Merge pull request #58 from Hikari-Haru/autocomplete

Autocomplete and changed the way opener files are loaded to use a folder instead
Kaveen Kumarasinghe 2 years ago committed by GitHub
commit 3775432aae
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -59,7 +59,9 @@ These commands are grouped, so each group has a prefix but you can easily tab co
`/gpt converse private:yes` - Start a private conversation with the bot, like ChatGPT
`/gpt converse opener:<opener text> | <opener file name>.txt` - Start a conversation with the bot, with a custom opener text (this is useful if you want it to take on a custom personality from the start). You can also load the opener text from a file by saving it in the bot root directory as .txt and referencing it in the opener param.
`/gpt converse opener:<opener text>` - Start a conversation with the bot, with a custom opener text (this is useful if you want it to take on a custom personality from the start).
`/gpt converse opener_file:<opener file name>.txt` - Starts a conversation with the bot, using a custom file, using this option also enables the minimal conversation starter. Loads files from the `/openers` folder, has autocomplete support so files in the folder will show up. Added before the `opener` as both can be used at the same time
`/gpt converse minimal:yes` - Start a conversation with the bot, like ChatGPT, with minimal context (saves tokens)
@ -73,7 +75,7 @@ These commands are grouped, so each group has a prefix but you can easily tab co
`/system settings` - Display settings for the model (temperature, top_p, etc)
`/system settings <setting> <value>` - Change a model setting to a new value
`/system settings <setting> <value>` - Change a model setting to a new value. Has autocomplete support, certain settings will have autocompleted values too.
`/system usage` Estimate current usage details (based on davinci)

@ -2,6 +2,7 @@ import datetime
import json
import re
import traceback
import sys
from pathlib import Path
import aiofiles
@ -13,10 +14,15 @@ from models.env_service_model import EnvService
from models.message_model import Message
from models.user_model import User, RedoUser
from models.check_model import Check
from models.autocomplete_model import Settings_autocompleter, File_autocompleter
from collections import defaultdict
original_message = {}
ALLOWED_GUILDS = EnvService.get_allowed_guilds()
if sys.platform == "win32":
separator = "\\"
else:
separator = "/"
class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
@ -821,7 +827,15 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
guild_ids=ALLOWED_GUILDS,
)
@discord.option(
name="opener", description="Which sentence to start with", required=False
name="opener",
description="Which sentence to start with, added after the file",
required=False,
)
@discord.option(
name="opener_file",
description="Which file to start with, added before the opener, sets minimal starter",
required=False,
autocomplete=File_autocompleter.get_openers
)
@discord.option(
name="private",
@ -831,13 +845,13 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
)
@discord.option(
name="minimal",
description="Use minimal starter text",
description="Use minimal starter text, saves tokens and has a more open personality",
required=False,
choices=["yes"],
)
@discord.guild_only()
async def converse(
self, ctx: discord.ApplicationContext, opener: str, private, minimal
self, ctx: discord.ApplicationContext, opener: str, opener_file: str, private, minimal
):
if private:
await ctx.defer(ephemeral=True)
@ -853,22 +867,28 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
await self.deletion_queue(message)
return
if not opener:
if not opener and not opener_file:
user_id_normalized = user.id
else:
user_id_normalized = ctx.author.id
# Pre-check for opener, check if they provided a valid file if it is indeed a file.
# If the opener ends in .txt, its a file and we want to load it
if opener.endswith(".txt"):
# Load the file and read it into opener
opener = await self.load_file(opener, ctx)
if not opener:
return
if opener_file: # only load in files if it's included in the command, if not pass on as normal
if opener_file.endswith(".txt"):
# Load the file and read it into opener
opener_file = f"openers{separator}{opener_file}"
opener_file = await self.load_file(opener_file, ctx)
if not opener: # if we only use opener_file then only pass on opener_file for the opening prompt
opener = opener_file
else:
opener = opener_file + opener
if not opener_file:
return
else:
pass
self.conversating_users[user_id_normalized] = User(user_id_normalized)
# Append the starter text for gpt3 to the user's history so it gets concatenated with the prompt later
if minimal:
if minimal or opener_file:
self.conversating_users[user_id_normalized].history.append(
self.CONVERSATION_STARTER_TEXT_MINIMAL
)
@ -971,28 +991,13 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
name="parameter",
description="The setting to change",
required=False,
choices=[
"mode",
"temp",
"top_p",
"max_tokens",
"presence_penalty",
"frequency_penalty",
"best_of",
"prompt_min_length",
"max_conversation_length",
"model",
"low_usage_mode",
"image_size",
"num_images",
"summarize_conversations",
"summarize_threshold",
"welcome_message_enabled",
"IMAGE_SAVE_PATH",
],
autocomplete=Settings_autocompleter.get_settings
)
@discord.option(
name="value", description="The value to set the setting to", required=False
name="value",
description="The value to set the setting to",
required=False,
autocomplete=Settings_autocompleter.get_value
)
@discord.guild_only()
async def settings(

@ -0,0 +1,37 @@
from pathlib import Path
import os
import re
import discord
from models.usage_service_model import UsageService
from models.openai_model import Model
usage_service = UsageService(Path(os.environ.get("DATA_DIR", os.getcwd())))
model = Model(usage_service)
class Settings_autocompleter:
async def get_settings(ctx: discord.AutocompleteContext):
SETTINGS = [re.sub("^_","",key) for key in model.__dict__.keys() if key not in model._hidden_attributes]
return [parameter for parameter in SETTINGS if parameter.startswith(ctx.value.lower())][:25]
async def get_value(ctx: discord.AutocompleteContext): # Behaves a bit weird if you go back and edit the parameter without typing in a new command
values = {
'mode' : ['temperature', 'top_p'],
'model' : ["text-davinci-003", "text-curie-001"],
'low_usage_mode' : ["True", "False"],
'image_size' : ["256x256", "512x512", "1024x1024"],
'summarize_conversastion' : ["True", "False"],
'welcome_message_enabled' : ["True", "False"]
}
if ctx.options["parameter"] in values.keys():
return[value for value in values[ctx.options["parameter"]]]
else:
await ctx.interaction.response.defer() # defer so the autocomplete in int values doesn't error but rather just says not found
return []
class File_autocompleter:
async def get_openers(ctx: discord.AutocompleteContext):
try:
return [file for file in os.listdir('openers') if file.startswith(ctx.value.lower())][:25] # returns the 25 first files from your current input
except:
return ["No 'openers' folder"]

@ -0,0 +1 @@
I want you to act as an English translator, spelling corrector and improver. I will speak to you in any language and you will detect the language, translate it and answer in the corrected and improved version of my text, in English. I want you to replace my simplified A0-level words and sentences with more beautiful and elegant, upper level English words and sentences. Keep the meaning same, but make them more literary. I want you to only reply the correction, the improvements and nothing else, do not write explanations.

@ -0,0 +1 @@
I want you to act as a javascript console. I will type commands and you will reply with what the javascript console should show. I want you to only reply with the terminal output inside one unique code block, and nothing else. do not write explanations. do not type commands unless I instruct you to do so. when i need to tell you something in english, i will do so by putting text inside curly brackets {like this}.
Loading…
Cancel
Save