Add stop in conversation, some formatting

Added ping on thread open
Removed extra newlines from ask formatting
Rene Teigen 2 years ago
parent f0860d1f66
commit 7260ab7b6b

@ -841,7 +841,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
elif not private:
message_thread = await ctx.respond(
embed=discord.Embed(
title=f"{user.name} 's conversation with GPT3", color=0x808080
title=f"{user.name}'s conversation with GPT3", color=0x808080
)
)
# Get the actual message object for the message_thread
@ -920,6 +920,10 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
self.conversation_thread_owners[user_id_normalized] = thread.id
overrides = self.conversation_threads[thread.id].get_overrides()
await thread.send(
f"<@{str(ctx.user.id)}> is the thread owner."
)
await thread.send(
embed=EmbedStatics.generate_conversation_embed(
self.conversation_threads, thread, opener, overrides
@ -938,7 +942,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
if not self.pinecone_service:
self.conversation_threads[thread.id].history.append(
EmbeddedConversationItem(
f"\n'{ctx.author.display_name}': {opener} <|endofstatement|>\n",
f"\n{ctx.author.display_name}: {opener} <|endofstatement|>\n",
0,
)
)
@ -958,6 +962,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
top_p_override=overrides["top_p"],
frequency_penalty_override=overrides["frequency_penalty"],
presence_penalty_override=overrides["presence_penalty"],
user=user,
model=self.conversation_threads[thread.id].model,
custom_api_key=user_api_key,
)

@ -1,11 +1,13 @@
Instructions for GPTie:
Instructions for you:
The conversations are in this format, there can be an arbitrary amount of newlines between chat entries. <username> can be any name, pay attention to who's talking. The text "<|endofstatement|>" is used to separate chat entries and make it easier for you to understand the context:
<username>: [MESSAGE 1] <|endofstatement|>
GPTie: [RESPONSE TO MESSAGE 1] <|endofstatement|>
<YOUR NAME>: [RESPONSE TO MESSAGE 1] <|endofstatement|>
<username>: [MESSAGE 2] <|endofstatement|>
GPTie: [RESPONSE TO MESSAGE 2] <|endofstatement|>
<YOUR NAME>: [RESPONSE TO MESSAGE 2] <|endofstatement|>
<YOUR NAME> will be given to you in an actual conversation.
...
Never say "<|endofstatement|>". Never say "GPTie:" in your response either.
Never say "<|endofstatement|>". Never say <YOUR NAME> or <username> in your response either.

@ -533,6 +533,7 @@ class Model:
presence_penalty_override=None,
max_tokens_override=None,
model=None,
stop=None,
custom_api_key=None,
) -> (
Tuple[dict, bool]
@ -554,6 +555,7 @@ class Model:
payload = {
"model": self.model if model is None else model,
"prompt": prompt,
"stop": "" if stop is None else stop,
"temperature": self.temp if temp_override is None else temp_override,
"top_p": self.top_p if top_p_override is None else top_p_override,
"max_tokens": self.max_tokens - tokens

@ -35,6 +35,7 @@ class TextService:
from_edit_command=False,
codex=False,
model=None,
user=None,
custom_api_key=None,
edited_request=False,
redo_request=False,
@ -68,6 +69,8 @@ class TextService:
else prompt
)
stop = f"{ctx.author.display_name if user is None else user.display_name}:"
from_context = isinstance(ctx, discord.ApplicationContext)
if not instruction:
@ -273,6 +276,7 @@ class TextService:
frequency_penalty_override=frequency_penalty_override,
presence_penalty_override=presence_penalty_override,
model=model,
stop=stop if not from_ask_command else None,
custom_api_key=custom_api_key,
)
@ -282,9 +286,7 @@ class TextService:
)
if from_ask_command or from_action:
# Append the prompt to the beginning of the response, in italics, then a new line
response_text = response_text.strip()
response_text = f"***{prompt}***\n\n{response_text}"
response_text = f"***{prompt}***{response_text}"
elif from_edit_command:
if codex:
response_text = response_text.strip()
@ -597,7 +599,7 @@ class TextService:
message.channel.id
].history.append(
EmbeddedConversationItem(
f"\n'{message.author.display_name}': {prompt} <|endofstatement|>\n",
f"\n{message.author.display_name}: {prompt} <|endofstatement|>\n",
0,
)
)

Loading…
Cancel
Save