Final redo fix, can likely remove the id parameter from encapsulate send, not used

Rene Teigen 1 year ago
parent a5d675ff5c
commit 62e86595b8

@ -182,14 +182,20 @@ class TextService:
# remove duplicates from prompt_with_history and set the conversation history
prompt_with_history = list(dict.fromkeys(prompt_with_history))
converser_cog.conversation_threads[
ctx.channel.id
].history = prompt_with_history
# Sort the prompt_with_history by increasing timestamp if pinecone is enabled
if converser_cog.pinecone_service:
prompt_with_history.sort(key=lambda x: x.timestamp)
# Remove the last two entries after sort, this is from the end of the list as prompt(redo), answer, prompt(original), leaving only prompt(original) and further history
if redo_request:
prompt_with_history = prompt_with_history[:-2]
converser_cog.conversation_threads[
ctx.channel.id
].history = prompt_with_history
# Ensure that the last prompt in this list is the prompt we just sent (new_prompt_item)
if prompt_with_history[-1].text != new_prompt_item.text:
try:
@ -301,12 +307,12 @@ class TextService:
# If the user is conversing, add the GPT response to their conversation history.
if (
id in converser_cog.conversation_threads
ctx.channel.id in converser_cog.conversation_threads
and not from_ask_command
and not converser_cog.pinecone_service
):
if not redo_request:
converser_cog.conversation_threads[id].history.append(
converser_cog.conversation_threads[ctx.channel.id].history.append(
EmbeddedConversationItem(
"\n"
+ BOT_NAME
@ -318,12 +324,12 @@ class TextService:
# Embeddings case!
elif (
id in converser_cog.conversation_threads
ctx.channel.id in converser_cog.conversation_threads
and not from_ask_command
and not from_edit_command
and converser_cog.pinecone_service
):
conversation_id = id
conversation_id = ctx.channel.id
# Create an embedding and timestamp for the prompt
response_text = (

Loading…
Cancel
Save