From 4de4ab3f6e9677fff6b67a67ac5f1344c1b09f2f Mon Sep 17 00:00:00 2001 From: Kaveen Kumarasinghe Date: Sun, 25 Dec 2022 21:41:00 -0500 Subject: [PATCH] bug squash! --- cogs/draw_image_generation.py | 2 +- cogs/gpt_3_commands_and_converser.py | 13 +++++++++---- models/openai_model.py | 2 +- 3 files changed, 11 insertions(+), 6 deletions(-) diff --git a/cogs/draw_image_generation.py b/cogs/draw_image_generation.py index c906edd..1cad590 100644 --- a/cogs/draw_image_generation.py +++ b/cogs/draw_image_generation.py @@ -350,7 +350,7 @@ class RedoButton(discord.ui.Button["SaveView"]): # Get the message and the prompt and call encapsulated_send message = redo_users[user_id].message prompt = redo_users[user_id].prompt - response_message = redo_users[user_id].response_message + response_message = redo_users[user_id].response message = await interaction.response.send_message( f"Regenerating the image for your original prompt, check the original message.", ephemeral=True, diff --git a/cogs/gpt_3_commands_and_converser.py b/cogs/gpt_3_commands_and_converser.py index 727225c..9d92412 100644 --- a/cogs/gpt_3_commands_and_converser.py +++ b/cogs/gpt_3_commands_and_converser.py @@ -350,8 +350,8 @@ class GPT3ComCon(commands.Cog, name="GPT3ComCon"): ) await self.end_conversation(message) - def summarize_conversation(self, message, prompt): - response = self.model.send_summary_request(message, prompt) + async def summarize_conversation(self, message, prompt): + response = await self.model.send_summary_request(message, prompt) summarized_text = response["choices"][0]["text"] new_conversation_history = [] @@ -389,7 +389,7 @@ class GPT3ComCon(commands.Cog, name="GPT3ComCon"): "give me one moment!" ) - self.summarize_conversation(message, new_prompt) + await self.summarize_conversation(message, new_prompt) # Check again if the prompt is about to go past the token limit new_prompt = ( @@ -445,7 +445,7 @@ class GPT3ComCon(commands.Cog, name="GPT3ComCon"): if not response_message: if len(response_text) > self.TEXT_CUTOFF: await self.paginate_and_send( - response_text, message + response_text.replace("<|endofstatement|>", ""), message ) # No paginations for multi-messages. else: response_message = await message.reply( @@ -550,9 +550,14 @@ class GPT3ComCon(commands.Cog, name="GPT3ComCon"): if not message.content.startswith("!g") and not conversing: return + # Dont accept !draw in a conversation if conversing and "!draw" in message.content: return + # Don't accept !imgoptimize in a conversation + if conversing and "!imgoptimize" in message.content: + return + # If the user is conversing and they want to end it, end it immediately before we continue any further. if conversing and message.content.lower() in self.END_PROMPTS: await self.end_conversation(message) diff --git a/models/openai_model.py b/models/openai_model.py index 652afec..055e650 100644 --- a/models/openai_model.py +++ b/models/openai_model.py @@ -297,7 +297,7 @@ class Model: summary_request_text = [] summary_request_text.append( "The following is a conversation instruction set and a conversation" - " between two people named Human, and GPTie. Do not summarize the instructions for GPTie, only the conversation. Summarize the conversation in a detailed fashion. If Human mentioned their name, be sure to mention it in the summary. Pay close attention to things the Human has told you, such as personal details." + " between two people, a Human, and GPTie. Firstly, determine the Human's name from the conversation history, then summarize the conversation. Do not summarize the instructions for GPTie, only the conversation. Summarize the conversation in a detailed fashion. If Human mentioned their name, be sure to mention it in the summary. Pay close attention to things the Human has told you, such as personal details." ) summary_request_text.append(prompt + "\nDetailed summary of conversation: \n")