Format Python code with psf/black push

github-actions 2 years ago
parent d780235d8d
commit 55fddbc37b

@ -543,7 +543,9 @@ class Commands(discord.Cog, name="Commands"):
required=False,
autocomplete=File_autocompleter.get_server_indexes,
)
async def load_index(self, ctx: discord.ApplicationContext, user_index: str, server_index: str):
async def load_index(
self, ctx: discord.ApplicationContext, user_index: str, server_index: str
):
await self.index_cog.load_index_command(ctx, user_index, server_index)
@add_to_group("index")
@ -617,7 +619,7 @@ class Commands(discord.Cog, name="Commands"):
name="discord_backup",
description="Save an index made from the whole server",
guild_ids=ALLOWED_GUILDS,
checks=[Check.check_admin_roles(), Check.check_index_roles()]
checks=[Check.check_admin_roles(), Check.check_index_roles()],
)
@discord.guild_only()
async def discord_backup(self, ctx: discord.ApplicationContext):
@ -647,7 +649,11 @@ class Commands(discord.Cog, name="Commands"):
choices=["default", "compact", "tree_summarize"],
)
async def query(
self, ctx: discord.ApplicationContext, query: str, nodes:int, response_mode: str
self,
ctx: discord.ApplicationContext,
query: str,
nodes: int,
response_mode: str,
):
await self.index_cog.query_command(ctx, query, nodes, response_mode)

@ -167,7 +167,6 @@ class File_autocompleter:
except Exception:
return ["No user indexes found, add an index"]
async def get_server_indexes(ctx: discord.AutocompleteContext):
"""get all files in the indexes folder"""
try:

@ -44,9 +44,22 @@ def get_and_query(user_id, index_storage, query, response_mode, nodes, llm_predi
].get_index_or_throw()
prompthelper = PromptHelper(4096, 500, 20)
if isinstance(index, GPTTreeIndex):
response = index.query(query, verbose=True, child_branch_factor=2, llm_predictor=llm_predictor, prompt_helper=prompthelper)
response = index.query(
query,
verbose=True,
child_branch_factor=2,
llm_predictor=llm_predictor,
prompt_helper=prompthelper,
)
else:
response = index.query(query, response_mode=response_mode, verbose=True, llm_predictor=llm_predictor, similarity_top_k=nodes, prompt_helper=prompthelper)
response = index.query(
query,
response_mode=response_mode,
verbose=True,
llm_predictor=llm_predictor,
similarity_top_k=nodes,
prompt_helper=prompthelper,
)
return response
@ -273,7 +286,9 @@ class Index_handler:
await ctx.respond("Failed to set index")
traceback.print_exc()
async def load_index(self, ctx: discord.ApplicationContext, index, server, user_api_key):
async def load_index(
self, ctx: discord.ApplicationContext, index, server, user_api_key
):
if not user_api_key:
os.environ["OPENAI_API_KEY"] = self.openai_key
else:
@ -281,9 +296,13 @@ class Index_handler:
try:
if server:
index_file = EnvService.find_shared_file(f"indexes/{ctx.guild.id}/{index}")
index_file = EnvService.find_shared_file(
f"indexes/{ctx.guild.id}/{index}"
)
else:
index_file = EnvService.find_shared_file(f"indexes/{ctx.user.id}/{index}")
index_file = EnvService.find_shared_file(
f"indexes/{ctx.user.id}/{index}"
)
index = await self.loop.run_in_executor(
None, partial(self.index_load_file, index_file)
)
@ -361,7 +380,9 @@ class Index_handler:
index = await self.loop.run_in_executor(
None, partial(self.index_discord, document)
)
Path(app_root_path() / "indexes" / str(ctx.guild.id)).mkdir(parents=True, exist_ok=True)
Path(app_root_path() / "indexes" / str(ctx.guild.id)).mkdir(
parents=True, exist_ok=True
)
index.save_to_disk(
app_root_path()
/ "indexes"
@ -375,7 +396,12 @@ class Index_handler:
traceback.print_exc()
async def query(
self, ctx: discord.ApplicationContext, query: str, response_mode, nodes, user_api_key
self,
ctx: discord.ApplicationContext,
query: str,
response_mode,
nodes,
user_api_key,
):
if not user_api_key:
os.environ["OPENAI_API_KEY"] = self.openai_key
@ -387,7 +413,13 @@ class Index_handler:
response = await self.loop.run_in_executor(
None,
partial(
get_and_query, ctx.user.id, self.index_storage, query, response_mode, nodes, llm_predictor
get_and_query,
ctx.user.id,
self.index_storage,
query,
response_mode,
nodes,
llm_predictor,
),
)
print("The last token usage was ", llm_predictor.last_token_usage)

@ -18,7 +18,9 @@ class UsageService:
tokens_used = int(tokens_used)
price = (tokens_used / 1000) * 0.02
usage = await self.get_usage()
print(f"Cost -> Old: {str(usage)} | New: {str(usage + float(price))}, used {str(float(price))} credits")
print(
f"Cost -> Old: {str(usage)} | New: {str(usage + float(price))}, used {str(float(price))} credits"
)
# Do the same as above but with aiofiles
async with aiofiles.open(self.usage_file_path, "w") as f:
await f.write(str(usage + float(price)))

Loading…
Cancel
Save