remove prints

Kaveen Kumarasinghe 1 year ago
parent 585638c457
commit bb02300107

@ -650,9 +650,6 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
self.conversation_threads[after.channel.id].count += 1
print("-------------------------- Conversation POINT 1")
print(self.conversation_threads[ctx.channel.id].history)
print("---------------------------- END Conersation POINT 1")
await self.encapsulated_send(
id=after.channel.id,
@ -888,9 +885,6 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
new_prompt_item
)
print("-------------------------- Conversation POINT 2")
print(self.conversation_threads[ctx.channel.id].history)
print("---------------------------- END Conersation POINT 2")
if edited_request:
new_prompt = "".join(
[
@ -934,10 +928,6 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
for prompt, timestamp in similar_prompts
]
print("-------------------------- Conversation POINT 3")
print(self.conversation_threads[ctx.channel.id].history)
print("---------------------------- END Conersation POINT 3")
# iterate UP TO the last X prompts in the history
for i in range(
1,
@ -1023,10 +1013,6 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
await self.end_conversation(ctx)
return
print("-------------------------- BEFORE MODEL REQUEST")
print(self.conversation_threads[ctx.channel.id].history)
print("---------------------------- BEFORE MODEL REQUEST")
# Send the request to the model
response = await self.model.send_request(
new_prompt,

Loading…
Cancel
Save