More composability

Kaveen Kumarasinghe 2 years ago
parent 258a87945c
commit fd2ab04d93

@ -9,7 +9,7 @@
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](http://makeapullrequest.com)
# Overview
A robust, all-in-one GPT3 interface for Discord. Chat just like ChatGPT right inside Discord! Generate beautiful AI art using DALL-E 2! Automatically moderate your server using AI! A thorough integration with permanent conversation memory, automatic request retry, fault tolerance and reliability for servers of any scale, and much more.
A robust, all-in-one GPT3 interface for Discord. Chat just like ChatGPT right inside Discord! Generate beautiful AI art using DALL-E 2! Automatically moderate your server using AI! Upload documents, videos, and files to get AI-assisted insights! A thorough integration with permanent conversation memory, automatic request retry, fault tolerance and reliability for servers of any scale, and much more.
SUPPORT SERVER FOR BOT SETUP: https://discord.gg/WvAHXDMS7Q (You can try out the bot here also in a limited fashion)
# Screenshots
@ -25,6 +25,10 @@ SUPPORT SERVER FOR BOT SETUP: https://discord.gg/WvAHXDMS7Q (You can try out the
# Recent Notable Updates
- **CUSTOM INDEXES** - This is a huge update. You can now upload files to your server and use them as custom context when asking GPT3 questions. You can also use webpage links as context, images, full documents, csvs, powerpoints, audio files, and even **youtube videos**! Read more in the 'Custom Indexes' section below.
<p align="center"/>
<img src="https://i.imgur.com/rlJxXRX.png"/>
</p>
# Features
- **Directly prompt GPT3 with `/gpt ask <prompt>`**
@ -181,12 +185,18 @@ Permanent memory using pinecone is still in alpha, I will be working on cleaning
# Custom Indexes / Knowledgebase
This bot supports per-user custom indexes. This means that users can upload files of their choosing, such as PDFs and ask GPT to answer questions based on those files. We also support using URLs for indexes.
**This feature uses a large amount of tokens and money, and you should restrict it to trusted users.**
Supported filetypes:
- All text and data based files (PDF, TXT, DOCX, PPTX, CSV etc)
- Images (JPG, PNG, etc) (Note: The bot will do OCR on the images to extract the text, this requires a lot of processing power sometimes)
- Videos/Audio (MP4, MP3, etc) (Note: The bot will use OpenAI on the audio to extract the text, this requires a lot of processing power sometimes)
- **Youtube Videos** - For all youtube videos that are transcribable, the bot will index the entire transcription of the given youtube video URL!
Index Compositions:
Indexes can be combined with other indexes through a composition. To combine indexes, you can run the `/index compose` command, and select the indexes that you want to combine together. You should only combine relevant indexes together, combining irrelevant indexes together will result in poor results (for example, don't upload a math textbook and then upload a large set of poems and combine them together). When creating a composition, you will be given the option to do a "Deep" composition, deep compositions are more detailed and will give you better results, but are incredibly costly and will sometimes take multiple minutes to compose.
You can also compose a singular index with itself with "Deep Compose", this will give you a more detailed version of the index, but will be costly and will sometimes take multiple minutes to compose.
# Translations with DeepL
This bot supports and uses DeepL for translations (optionally). If you want to enable the translations service, you can add a line in your `.env` file as follows:

@ -28,15 +28,6 @@ def get_and_query(user_id, index_storage, query, llm_predictor):
if isinstance(index, GPTSimpleVectorIndex):
response = index.query(query,verbose=True)
else:
query_configs = [
QueryConfig(
index_struct_type=IndexStructType.TREE,
query_mode=QueryMode.RECURSIVE,
query_kwargs={
"child_branch_factor": 4
}
)
]
response = index.query(query, verbose=True, query_configs=[])
return response
@ -54,6 +45,12 @@ class IndexData:
def queryable(self):
return self.queryable_index is not None
def has_indexes(self, user_id):
try:
return len(os.listdir(f"{app_root_path()}/indexes/{user_id}")) > 1
except:
return False
def add_index(self, index, user_id, file_name):
self.individual_indexes.append(index)
self.queryable_index = index
@ -61,7 +58,7 @@ class IndexData:
# Create a folder called "indexes/{USER_ID}" if it doesn't exist already
Path(f"{app_root_path()}/indexes/{user_id}").mkdir(parents=True, exist_ok=True)
# Save the index to file under the user id
index.save_to_disk(app_root_path() / "indexes" / f"{str(user_id)}"/f"{file_name}_{date.today()}-H{datetime.now().hour}.json")
index.save_to_disk(app_root_path() / "indexes" / f"{str(user_id)}"/f"{file_name}_{date.today()}.json")
def reset_indexes(self, user_id):
self.individual_indexes = []
@ -227,7 +224,7 @@ class Index_handler:
except Exception as e:
await ctx.respond(e)
async def compose_indexes(self, user_id, indexes, name):
async def compose_indexes(self, user_id, indexes, name, deep_compose):
# Load all the indexes first
index_objects = []
for _index in indexes:
@ -236,30 +233,46 @@ class Index_handler:
index_objects.append(index)
# For each index object, add its documents to a GPTTreeIndex
tree_indexes = []
for _index in index_objects:
# Get all the document objects out of _index.docstore.docs
document_ids = [docmeta for docmeta in _index.docstore.docs.keys()]
documents = list([_index.docstore.get_document(doc_id) for doc_id in document_ids if isinstance(_index.docstore.get_document(doc_id), Document)])
tree_index = GPTTreeIndex(documents=documents)
summary = tree_index.query(
"What is a summary of this document?", mode="summarize"
)
tree_index.set_text(str(summary))
tree_indexes.append(tree_index)
if deep_compose:
tree_indexes = []
for _index in index_objects:
# Get all the document objects out of _index.docstore.docs
document_ids = [docmeta for docmeta in _index.docstore.docs.keys()]
documents = list([_index.docstore.get_document(doc_id) for doc_id in document_ids if isinstance(_index.docstore.get_document(doc_id), Document)])
tree_index = GPTTreeIndex(documents=documents)
summary = tree_index.query(
"What is a summary of this document?", mode="summarize"
)
tree_index.set_text(str(summary))
tree_indexes.append(tree_index)
# Now we have a list of tree indexes, we can compose them
list_index = GPTListIndex(tree_indexes)
graph = ComposableGraph.build_from_index(list_index)
# Now we have a list of tree indexes, we can compose them
list_index = GPTListIndex(tree_indexes)
graph = ComposableGraph.build_from_index(list_index)
if not name:
name = f"composed_deep_index_{date.today()}.json"
# Save the composed index
graph.save_to_disk(f"indexes/{user_id}/{name}.json")
self.index_storage[user_id].queryable_index = graph
else:
documents = []
for _index in index_objects:
[documents.append(_index.docstore.get_document(doc_id)) for doc_id in [docmeta for docmeta in _index.docstore.docs.keys()] if isinstance(_index.docstore.get_document(doc_id), Document)]
if not name:
name = f"composed_index_{date.today()}-H{datetime.now().hour}.json"
# Add everything into a simple vector index
simple_index = GPTSimpleVectorIndex(documents=documents)
# Save the composed index
graph.save_to_disk(f"indexes/{user_id}/{name}.json")
if not name:
name = f"composed_index_{date.today()}.json"
self.index_storage[user_id].queryable_index = graph
# Save the composed index
simple_index.save_to_disk(f"indexes/{user_id}/{name}.json")
self.index_storage[user_id].queryable_index = simple_index
async def backup_discord(self, ctx: discord.ApplicationContext, user_api_key):
@ -275,7 +288,7 @@ class Index_handler:
document = await self.load_data(channel_ids=channel_ids, limit=3000, oldest_first=False)
index = await self.loop.run_in_executor(None, partial(self.index_discord, document))
Path(app_root_path() / "indexes").mkdir(parents = True, exist_ok=True)
index.save_to_disk(app_root_path() / "indexes" / f"{ctx.guild.name.replace(' ', '-')}_{date.today()}-H{datetime.now().hour}.json")
index.save_to_disk(app_root_path() / "indexes" / f"{ctx.guild.name.replace(' ', '-')}_{date.today()}.json")
await ctx.respond("Backup saved")
except Exception:
@ -381,19 +394,20 @@ class Index_handler:
else:
os.environ["OPENAI_API_KEY"] = user_api_key
if not self.index_storage[ctx.user.id].queryable():
if not self.index_storage[ctx.user.id].has_indexes(ctx.user.id):
await ctx.respond("You must load at least two indexes before composing")
return
await ctx.respond("Select the indexes to compose.", view=ComposeModal(self, ctx.user.id, name))
await ctx.respond("Select the indexes to compose.", view=ComposeModal(self, ctx.user.id, name), ephemeral=True)
class ComposeModal(discord.ui.View):
def __init__(self, index_cog, user_id, name=None) -> None:
def __init__(self, index_cog, user_id, name=None, deep=None) -> None:
super().__init__()
# Get the argument named "user_key_db" and save it as USER_KEY_DB
self.index_cog = index_cog
self.user_id = user_id
self.deep = deep
# Get all the indexes for the user
self.indexes = [
@ -404,20 +418,51 @@ class ComposeModal(discord.ui.View):
# A text entry field for the name of the composed index
self.name = name
# A discord UI select menu with all the indexes
# A discord UI select menu with all the indexes. Limited to 25 entries
self.index_select = discord.ui.Select(
placeholder="Select an index",
placeholder="Select multiple indexes to query",
options=[
discord.SelectOption(label=index, value=index)
for index in self.indexes
],
max_values=len(self.indexes),
][0:25],
max_values=len(self.indexes) if len(self.indexes) < 25 else 25,
min_values=1,
)
# Add the select menu to the modal
self.add_item(self.index_select)
# If we have more than 25 entries, add more Select fields as neccessary
self.extra_index_selects = []
if len(self.indexes) > 25:
for i in range(25, len(self.indexes), 25):
self.extra_index_selects.append(discord.ui.Select(
placeholder="Select multiple indexes to query",
options=[
discord.SelectOption(label=index, value=index)
for index in self.indexes
][i:i+25],
max_values=len(self.indexes[i:i+25]),
min_values=1,
))
self.add_item(self.extra_index_selects[-1])
# Add an input field for "Deep", a "yes" or "no" option, default no
self.deep_select = discord.ui.Select(
placeholder="Deep Compose",
options=[
discord.SelectOption(label="Yes", value="yes"),
discord.SelectOption(label="No", value="no")
],
max_values=1,
min_values=1
)
self.add_item(self.deep_select)
# Add a button to the modal called "Compose"
self.add_item(discord.ui.Button(label="Compose", style=discord.ButtonStyle.green, custom_id="compose"))
@ -426,12 +471,16 @@ class ComposeModal(discord.ui.View):
# Check that the interaction was for custom_id "compose"
if interaction.data["custom_id"] == "compose":
# Check that the user selected at least one index
if len(self.index_select.values) < 2:
await interaction.response.send_message("You must select at least two indexes")
# The total list of indexes is the union of the values of all the select menus
indexes = self.index_select.values + [select.values[0] for select in self.extra_index_selects]
if len(indexes) < 1:
await interaction.response.send_message("You must select at least 1 index", ephemeral=True)
else:
composing_message = await interaction.response.send_message("Composing indexes, this may take a long time...", ephemeral=True, delete_after=120)
# Compose the indexes
await self.index_cog.compose_indexes(self.user_id,self.index_select.values,self.name)
await self.index_cog.compose_indexes(self.user_id,indexes,self.name, False if not self.deep_select.values or self.deep_select.values[0] == "no" else True)
await interaction.followup.send("Composed indexes", ephemeral=True, delete_after=10)
try:
@ -439,4 +488,4 @@ class ComposeModal(discord.ui.View):
except:
pass
else:
await interaction.response.defer()
await interaction.response.defer(ephemeral=True)
Loading…
Cancel
Save