Merge pull request #89 from Hikari-Haru/override_threads

Add parameter overrides to opener files using json
Kaveen Kumarasinghe 1 year ago committed by GitHub
commit 9637ace4ec
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -80,6 +80,8 @@ These commands are grouped, so each group has a prefix but you can easily tab co
- Custom openers need to be placed as a .txt file in the `openers` directory, in the same directory as `gpt3discord.py`
- Can use .json files in the `{"text": your prompt, "temp":0, "top_p":0,"frequency_penalty":0,"presence_penalty":0}` format to include permanent overrides
`/gpt converse minimal:yes` - Start a conversation with the bot, like ChatGPT, with minimal context (saves tokens)
- Note that the above options for `/gpt converse` can be combined (you can combine minimal, private, and opener!)
@ -147,6 +149,18 @@ Moreover, an important thing to keep in mind is: pinecone indexes are currently
Permanent memory using pinecone is still in alpha, I will be working on cleaning up this work, adding auto-clearing, and optimizing for stability and reliability, any help and feedback is appreciated (**add me on Discord Kaveen#0001 for pinecone help**)! If at any time you're having too many issues with pinecone, simply remove the `PINECONE_TOKEN` line in your `.env` file and the bot will revert to using conversation summarizations.
# Permanent overrides in threads
This bot now supports having overrides be permanent in an entire conversation if you use an opener file which includes them. The new opener files should be .json files formatted like this. `text` corresponds to what you want the conversational opener to be and the rest map 1:1 to the appropriate model settings. An example .json file is included by the name of `english_translator.json` in the `openers` folder
```json
{
"text": "your prompt",
"temp":0,
"top_p":0,
"frequency_penalty":0,
"presence_penalty":0
}
```
# User-Input API Keys (Multi-key tenancy)
This bot supports multi-user tenancy in regards to API keys. This means that, if you wanted, you could make it such that each user needs to enter their own API key in order to use commands that use GPT3 and DALLE.

@ -8,6 +8,7 @@ from pathlib import Path
import aiofiles
import json
import discord
from pycord.multicog import add_to_group
@ -650,11 +651,17 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
self.conversation_threads[after.channel.id].count += 1
overrides = self.conversation_threads[after.channel.id].get_overrides()
await self.encapsulated_send(
id=after.channel.id,
prompt=edited_content,
ctx=ctx,
response_message=response_message,
temp_override=overrides["temperature"],
top_p_override=overrides["top_p"],
frequency_penalty_override=overrides["frequency_penalty"],
presence_penalty_override=overrides["presence_penalty"],
edited_request=True,
)
@ -798,10 +805,17 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
]
)
#set conversation overrides
overrides = self.conversation_threads[message.channel.id].get_overrides()
await self.encapsulated_send(
message.channel.id,
primary_prompt,
message,
temp_override=overrides["temperature"],
top_p_override=overrides["top_p"],
frequency_penalty_override=overrides["frequency_penalty"],
presence_penalty_override=overrides["presence_penalty"],
custom_api_key=user_api_key,
)
@ -1300,33 +1314,6 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
)
return
if opener:
opener = await self.mention_to_username(ctx, opener)
if not opener and not opener_file:
user_id_normalized = user.id
else:
user_id_normalized = ctx.author.id
if (
opener_file
): # only load in files if it's included in the command, if not pass on as normal
if opener_file.endswith(".txt"):
# Load the file and read it into opener
opener_file = EnvService.find_shared_file(
f"openers{separator}{opener_file}"
)
opener_file = await self.load_file(opener_file, ctx)
if (
not opener
): # if we only use opener_file then only pass on opener_file for the opening prompt
opener = opener_file
else:
opener = opener_file + opener
if not opener_file:
return
else:
pass
if private:
await ctx.respond(user.name + "'s private conversation with GPT3")
thread = await ctx.channel.create_thread(
@ -1344,6 +1331,46 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
self.conversation_threads[thread.id] = Thread(thread.id)
if opener:
opener = await self.mention_to_username(ctx, opener)
if not opener and not opener_file:
user_id_normalized = user.id
else:
user_id_normalized = ctx.author.id
if not opener_file:
pass
else:
if not opener_file.endswith((".txt", ".json")):
opener_file = None # Just start a regular thread if the file fails to load
else:
# Load the file and read it into opener
try:
opener_file = re.sub(".+(?=[\\//])", "", opener_file) # remove paths from the opener file
opener_file = EnvService.find_shared_file(
f"openers{separator}{opener_file}"
)
opener_file = await self.load_file(opener_file, ctx)
try: # Try opening as json, if it fails it'll just pass the whole txt or json to the opener
opener_file = json.loads(opener_file)
temperature=opener_file.get("temperature", None)
top_p=opener_file.get("top_p", None)
frequency_penalty=opener_file.get("frequency_penalty", None)
presence_penalty=opener_file.get("presence_penalty", None)
self.conversation_threads[thread.id].set_overrides(temperature, top_p, frequency_penalty, presence_penalty)
if not opener: # if we only use opener_file then only pass on opener_file for the opening prompt
opener = opener_file.get('text', "error getting text")
else:
opener = opener_file.get('text', "error getting text") + opener
except: # Parse as just regular text
if not opener:
opener = opener_file
else:
opener = opener_file + opener
except:
opener_file = None # Just start a regular thread if the file fails to load
# Append the starter text for gpt3 to the user's history so it gets concatenated with the prompt later
if minimal or opener_file:
self.conversation_threads[thread.id].history.append(
@ -1354,15 +1381,21 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
EmbeddedConversationItem(self.CONVERSATION_STARTER_TEXT, 0)
)
# Set user as thread owner before sending anything that can error and leave the thread unowned
self.conversation_thread_owners[user_id_normalized] = thread.id
overrides = self.conversation_threads[thread.id].get_overrides()
await thread.send(
"<@"
+ str(user_id_normalized)
+ "> You are now conversing with GPT3. *Say hi to start!*\n End the conversation by saying `end`.\n\n If you want GPT3 to ignore your messages, start your messages with `~`\n\nYour conversation will remain active even if you leave this thread and talk in other GPT supported channels, unless you end the conversation!"
f"<@{str(user_id_normalized)}> You are now conversing with GPT3. *Say hi to start!*\n"
f"Overrides for this thread is **temp={overrides['temperature']}**, **top_p={overrides['top_p']}**, **frequency penalty={overrides['frequency_penalty']}**, **presence penalty={overrides['presence_penalty']}**\n"
f"End the conversation by saying `end`.\n\n"
f"If you want GPT3 to ignore your messages, start your messages with `~`\n\n"
f"Your conversation will remain active even if you leave this thread and talk in other GPT supported channels, unless you end the conversation!"
)
# send opening
if opener:
thread_message = await thread.send("***Opening prompt*** \n" + opener)
thread_message = await thread.send("***Opening prompt*** \n" + str(opener))
if thread.id in self.conversation_threads:
self.awaiting_responses.append(user_id_normalized)
self.awaiting_thread_responses.append(thread.id)
@ -1385,14 +1418,16 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
[item.text for item in self.conversation_threads[thread.id].history]
),
thread_message,
temp_override=overrides["temperature"],
top_p_override=overrides["top_p"],
frequency_penalty_override=overrides["frequency_penalty"],
presence_penalty_override=overrides["presence_penalty"],
custom_api_key=user_api_key,
)
self.awaiting_responses.remove(user_id_normalized)
if thread.id in self.awaiting_thread_responses:
self.awaiting_thread_responses.remove(thread.id)
self.conversation_thread_owners[user_id_normalized] = thread.id
@add_to_group("system")
@discord.slash_command(
name="moderations-test",

@ -24,7 +24,7 @@ from models.openai_model import Model
from models.usage_service_model import UsageService
from models.env_service_model import EnvService
__version__ = "5.2"
__version__ = "5.3"
"""
The pinecone service is used to store and retrieve conversation embeddings.

@ -57,6 +57,24 @@ class Thread:
self.id = id
self.history = []
self.count = 0
self.temperature = None
self.top_p = None
self.frequency_penalty = None
self.presence_penalty = None
def set_overrides(self, temperature=None,top_p=None,frequency_penalty=None,presence_penalty=None):
self.temperature = temperature
self.top_p = top_p
self.frequency_penalty = frequency_penalty
self.presence_penalty = presence_penalty
def get_overrides(self):
return {
"temperature": self.temperature,
"top_p": self.top_p,
"frequency_penalty": self.frequency_penalty,
"presence_penalty": self.presence_penalty,
}
# These user objects should be accessible by ID, for example if we had a bunch of user
# objects in a list, and we did `if 1203910293001 in user_list`, it would return True

@ -0,0 +1,8 @@
{
"text":"I want you to act as an English translator, spelling corrector and improver. I will speak to you in any language and you will detect the language, translate it and answer in the corrected and improved version of my text, in English. I want you to replace my simplified A0-level words and sentences with more beautiful and elegant, upper level English words and sentences. Keep the meaning same, but make them more literary. I want you to only reply the correction, the improvements and nothing else, do not write explanations. ",
"temperature":0.77,
"top_p":0.9,
"frequency_penalty":0.95,
"presence_penalty":0.95
}
Loading…
Cancel
Save