Add support for permanent custom model in threads

No current way to set it yourself
Rene Teigen 2 years ago
parent b261aa6df4
commit e401927dcd

@ -72,21 +72,23 @@ These commands are grouped, so each group has a prefix but you can easily tab co
`/gpt edit <instruction> <input> <temp> <top_p> <codex>` Use the bot to edit text using the given instructions for how to do it, currently an alpha openai feature so results might vary. Codex uses a model trained on code. Editing is currently free
`/gpt converse` - Start a conversation with the bot, like ChatGPT
`/gpt converse <opener> <opener_file> <private> <minimal>` - Start a conversation with the bot, like ChatGPT
`/gpt converse private:yes` - Start a private conversation with the bot, like ChatGPT
- `opener:<opener text>` - Start a conversation with the bot, with a custom opener text (this is useful if you want it to take on a custom personality from the start).
`/gpt converse opener:<opener text>` - Start a conversation with the bot, with a custom opener text (this is useful if you want it to take on a custom personality from the start).
- `opener_file:<opener file name>.txt|.json` - Starts a conversation with the bot, using a custom file.
`/gpt converse opener_file:<opener file name>.txt` - Starts a conversation with the bot, using a custom file, using this option also enables the minimal conversation starter. Loads files from the `/openers` folder, has autocomplete support so files in the folder will show up. Added before the `opener` as both can be used at the same time
- Loads files from the `/openers` folder, has autocomplete support so files in the folder will show up. Added before the `opener` as both can be used at the same time
- Custom openers need to be placed as a .txt file in the `openers` directory, in the same directory as `gpt3discord.py`
- Custom openers need to be placed as a .txt file in the `openers` directory, in the same directory as `gpt3discord.py`
- Can use .json files in the `{"text": your prompt, "temp":0, "top_p":0,"frequency_penalty":0,"presence_penalty":0}` format to include permanent overrides
- Enables minimal
`/gpt converse minimal:yes` - Start a conversation with the bot, like ChatGPT, with minimal context (saves tokens)
- Can use .json files in the `{"text": "your prompt", "temp":0, "top_p":0,"frequency_penalty":0,"presence_penalty":0}` format to include permanent overrides
- Note that the above options for `/gpt converse` can be combined (you can combine minimal, private, and opener!)
- `private` - Start a private conversation with the bot, like ChatGPT
- `minimal` - Start a conversation with the bot, like ChatGPT, with minimal context (saves tokens)
`/gpt end` - End a conversation with the bot.

@ -667,6 +667,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
top_p_override=overrides["top_p"],
frequency_penalty_override=overrides["frequency_penalty"],
presence_penalty_override=overrides["presence_penalty"],
model=self.conversation_threads[after.channel.id].model,
edited_request=True,
)
@ -821,6 +822,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
top_p_override=overrides["top_p"],
frequency_penalty_override=overrides["frequency_penalty"],
presence_penalty_override=overrides["presence_penalty"],
model=self.conversation_threads[message.channel.id].model,
custom_api_key=user_api_key,
)
@ -860,6 +862,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
instruction=None,
from_edit_command=False,
codex=False,
model=None,
custom_api_key=None,
edited_request=False,
redo_request=False,
@ -1056,6 +1059,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
top_p_override=top_p_override,
frequency_penalty_override=frequency_penalty_override,
presence_penalty_override=presence_penalty_override,
model=model,
custom_api_key=custom_api_key,
)
@ -1372,13 +1376,11 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
name="private",
description="Converse in a private thread",
required=False,
choices=["yes"],
)
@discord.option(
name="minimal",
description="Use minimal starter text, saves tokens and has a more open personality",
required=False,
choices=["yes"],
)
@discord.guild_only()
async def converse(
@ -1386,8 +1388,8 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
ctx: discord.ApplicationContext,
opener: str,
opener_file: str,
private,
minimal,
private: bool,
minimal: bool,
):
user = ctx.user
@ -1426,6 +1428,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
)
self.conversation_threads[thread.id] = Thread(thread.id)
self.conversation_threads[thread.id].model = self.model.model
if opener:
opener = await self.mention_to_username(ctx, opener)
@ -1496,6 +1499,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
await thread.send(
f"<@{str(user_id_normalized)}> You are now conversing with GPT3. *Say hi to start!*\n"
f"Overrides for this thread is **temp={overrides['temperature']}**, **top_p={overrides['top_p']}**, **frequency penalty={overrides['frequency_penalty']}**, **presence penalty={overrides['presence_penalty']}**\n"
f"The model used is **{self.conversation_threads[thread.id].model}**\n"
f"End the conversation by saying `end`.\n\n"
f"If you want GPT3 to ignore your messages, start your messages with `~`\n\n"
f"Your conversation will remain active even if you leave this thread and talk in other GPT supported channels, unless you end the conversation!"
@ -1530,6 +1534,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
top_p_override=overrides["top_p"],
frequency_penalty_override=overrides["frequency_penalty"],
presence_penalty_override=overrides["presence_penalty"],
model=self.conversation_threads[thread.id].model,
custom_api_key=user_api_key,
)
self.awaiting_responses.remove(user_id_normalized)

@ -475,6 +475,7 @@ class Model:
frequency_penalty_override=None,
presence_penalty_override=None,
max_tokens_override=None,
model=None,
custom_api_key=None,
) -> (
dict,
@ -495,7 +496,7 @@ class Model:
async with aiohttp.ClientSession() as session:
payload = {
"model": self.model,
"model": self.model if model is None else model,
"prompt": prompt,
"temperature": self.temp if temp_override is None else temp_override,
"top_p": self.top_p if top_p_override is None else top_p_override,

@ -57,6 +57,7 @@ class Thread:
self.id = id
self.history = []
self.count = 0
self.model = None
self.temperature = None
self.top_p = None
self.frequency_penalty = None

Loading…
Cancel
Save