Merge pull request #215 from Hikari-Haru/fix-wrong-apikey

Fix broken per user api key on query and compose
Kaveen Kumarasinghe 1 year ago committed by GitHub
commit df5bb8b48d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -167,13 +167,6 @@ class Index_handler:
def __init__(self, bot, usage_service):
self.bot = bot
self.openai_key = os.getenv("OPENAI_TOKEN")
self.llm_predictor = LLMPredictor(
llm=OpenAIChat(
temperature=0,
model_name="gpt-3.5-turbo",
openai_api_key=self.openai_key,
)
)
self.index_storage = defaultdict(IndexData)
self.loop = asyncio.get_running_loop()
self.usage_service = usage_service
@ -753,6 +746,13 @@ class Index_handler:
)
index_objects.append(index)
llm_predictor = LLMPredictor(
llm=OpenAIChat(
temperature=0,
model_name="gpt-3.5-turbo"
)
)
# For each index object, add its documents to a GPTTreeIndex
if deep_compose:
documents = []
@ -791,14 +791,14 @@ class Index_handler:
partial(
GPTTreeIndex,
documents=documents,
llm_predictor=self.llm_predictor,
llm_predictor=llm_predictor,
embed_model=embedding_model,
use_async=True,
),
)
await self.usage_service.update_usage(
self.llm_predictor.last_token_usage, chatgpt=True
llm_predictor.last_token_usage, chatgpt=True
)
await self.usage_service.update_usage(
embedding_model.last_token_usage, embeddings=True
@ -915,6 +915,13 @@ class Index_handler:
else:
os.environ["OPENAI_API_KEY"] = user_api_key
llm_predictor = LLMPredictor(
llm=OpenAIChat(
temperature=0,
model_name="gpt-3.5-turbo"
)
)
ctx_response = await ctx.respond(
embed=EmbedStatics.build_index_query_progress_embed(query)
)
@ -928,13 +935,13 @@ class Index_handler:
query,
response_mode,
nodes,
self.llm_predictor,
llm_predictor,
embedding_model,
child_branch_factor,
)
print("The last token usage was ", self.llm_predictor.last_token_usage)
print("The last token usage was ", llm_predictor.last_token_usage)
await self.usage_service.update_usage(
self.llm_predictor.last_token_usage, chatgpt=True
llm_predictor.last_token_usage, chatgpt=True
)
await self.usage_service.update_usage(
embedding_model.last_token_usage, embeddings=True
@ -943,7 +950,7 @@ class Index_handler:
try:
total_price = round(
await self.usage_service.get_price(
self.llm_predictor.last_token_usage, chatgpt=True
llm_predictor.last_token_usage, chatgpt=True
)
+ await self.usage_service.get_price(
embedding_model.last_token_usage, embeddings=True

Loading…
Cancel
Save