Multi tenancy api key support

Kaveen Kumarasinghe 1 year ago
parent 413c1ae921
commit cfb7eb0903

@ -26,4 +26,5 @@ COPY --from=builder /install /usr/local/lib/python${PY_VERSION}/site-packages
RUN mkdir -p /opt/gpt3discord/etc
COPY gpt3discord.py /opt/gpt3discord/bin/
COPY image_optimizer_pretext.txt conversation_starter_pretext.txt conversation_starter_pretext_minimal.txt /opt/gpt3discord/share/
COPY openers /opt/gpt3discord/share/openers
CMD ["python3", "/opt/gpt3discord/bin/gpt3discord.py"]

@ -23,6 +23,9 @@
A big shoutout to `CrypticHeaven-Lab` for hitting our first sponsorship goal!
# Recent Notable Updates
- **Allow each individual user to enter their own API Key!** - Each request that a user makes will be made using their own API key! Check out the User-Input API Key section in this README for more details.
- **Permanent memory with embeddings and PineconeDB finished!** - An initial alpha version of permanent memory is now done! This allows you to chat with GPT3 infinitely and accurately, and save tokens, by using embeddings. *Please read the Permanent Memory section for more information!*
@ -35,8 +38,6 @@ A big shoutout to `CrypticHeaven-Lab` for hitting our first sponsorship goal!
- **AUTOMATIC CHAT SUMMARIZATION!** - When the context limit of a conversation is reached, the bot will use GPT3 itself to summarize the conversation to reduce the tokens, and continue conversing with you, this allows you to chat for a long time!
- Custom conversation openers from https://github.com/f/awesome-chatgpt-prompts were integrated into the bot, check out `/gpt converse opener_file`! The bot now has built in support to make GPT3 behave like various personalities, such as a life coach, python interpreter, interviewer, text based adventure game, and much more!
# Features
- **Directly prompt GPT3 with `/gpt ask <prompt>`**
@ -147,6 +148,26 @@ Moreover, an important thing to keep in mind is: pinecone indexes are currently
Permanent memory using pinecone is still in alpha, I will be working on cleaning up this work, adding auto-clearing, and optimizing for stability and reliability, any help and feedback is appreciated (**add me on Discord Kaveen#0001 for pinecone help**)! If at any time you're having too many issues with pinecone, simply remove the `PINECONE_TOKEN` line in your `.env` file and the bot will revert to using conversation summarizations.
# User-Input API Keys (Multi-key tenancy)
This bot supports multi-user tenancy in regards to API keys. This means that, if you wanted, you could make it such that each user needs to enter their own API key in order to use commands that use GPT3 and DALLE.
To enable this, add the following line to the end of your `.env` file:
```env
USER_INPUT_API_KEYS="True"
```
Then, restart the bot, and it will set up the system for everyone to input their own API keys.
The bot will use SQLite to store API keys for the users, each user's key will be saved with a USER_ID <> API_KEY mapping in SQLite, and will be persistent across restarts. All the data will be saved in a file called `user_key_db.sqlite` in the current working directory of the bot.
With this feature enabled, any attempt to use a GPT3 or DALL-E command without a valid API key set for the user will pop up the following modal for them to enter their API key:
<img src="https://i.imgur.com/ZDScoWk.png"/>
Once the user enters their key, the bot will send a small test request to OpenAI to validate that the key indeed works, if not, it will tell the user to try again and tell them why it did not work.
After the user's key is validated, they will be able to use GPT3 and DALLE commands.
The Moderations service still uses the main API key defined in the `.env` file. Pinecone and discord-tokens are also per-host tokens, not per-user.
# Configuration

@ -10,6 +10,9 @@ from pycord.multicog import add_to_group
# We don't use the converser cog here because we want to be able to redo for the last images and text prompts at the same time
from sqlitedict import SqliteDict
from cogs.gpt_3_commands_and_converser import GPT3ComCon
from models.env_service_model import EnvService
from models.user_model import RedoUser
@ -17,6 +20,11 @@ redo_users = {}
users_to_interactions = {}
ALLOWED_GUILDS = EnvService.get_allowed_guilds()
USER_INPUT_API_KEYS = EnvService.get_user_input_api_keys()
USER_KEY_DB = None
if USER_INPUT_API_KEYS:
USER_KEY_DB = SqliteDict("user_key_db.sqlite")
class DrawDallEService(discord.Cog, name="DrawDallEService"):
def __init__(
@ -40,6 +48,7 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
response_message=None,
vary=None,
draw_from_optimizer=None,
custom_api_key=None,
):
await asyncio.sleep(0)
# send the prompt to the model
@ -47,7 +56,7 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
try:
file, image_urls = await self.model.send_image_request(
ctx, prompt, vary=vary if not draw_from_optimizer else None
ctx, prompt, vary=vary if not draw_from_optimizer else None, custom_api_key=custom_api_key
)
except ValueError as e:
(
@ -87,7 +96,7 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
)
await result_message.edit(
view=SaveView(ctx, image_urls, self, self.converser_cog, result_message)
view=SaveView(ctx, image_urls, self, self.converser_cog, result_message, custom_api_key=custom_api_key)
)
self.converser_cog.users_to_interactions[user_id] = []
@ -106,7 +115,7 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
file=file,
)
await message.edit(
view=SaveView(ctx, image_urls, self, self.converser_cog, message)
view=SaveView(ctx, image_urls, self, self.converser_cog, message, custom_api_key=custom_api_key)
)
else: # Varying case
if not draw_from_optimizer:
@ -123,6 +132,7 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
self.converser_cog,
result_message,
True,
custom_api_key=custom_api_key,
)
)
@ -134,7 +144,7 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
)
await result_message.edit(
view=SaveView(
ctx, image_urls, self, self.converser_cog, result_message
ctx, image_urls, self, self.converser_cog, result_message, custom_api_key=custom_api_key
)
)
@ -155,6 +165,12 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
)
@discord.option(name="prompt", description="The prompt to draw from", required=True)
async def draw(self, ctx: discord.ApplicationContext, prompt: str):
user_api_key = None
if USER_INPUT_API_KEYS:
user_api_key = await GPT3ComCon.get_user_api_key(ctx.user.id, ctx)
if not user_api_key:
return
await ctx.defer()
user = ctx.user
@ -163,7 +179,7 @@ class DrawDallEService(discord.Cog, name="DrawDallEService"):
return
try:
asyncio.ensure_future(self.encapsulated_send(user.id, prompt, ctx))
asyncio.ensure_future(self.encapsulated_send(user.id, prompt, ctx, custom_api_key=user_api_key))
except Exception as e:
print(e)
@ -226,6 +242,7 @@ class SaveView(discord.ui.View):
message,
no_retry=False,
only_save=None,
custom_api_key=None,
):
super().__init__(
timeout=3600 if not only_save else None
@ -236,15 +253,16 @@ class SaveView(discord.ui.View):
self.no_retry = no_retry
self.converser_cog = converser_cog
self.message = message
self.custom_api_key = custom_api_key
for x in range(1, len(image_urls) + 1):
self.add_item(SaveButton(x, image_urls[x - 1]))
if not only_save:
if not no_retry:
self.add_item(RedoButton(self.cog, converser_cog=self.converser_cog))
self.add_item(RedoButton(self.cog, converser_cog=self.converser_cog, custom_api_key=self.custom_api_key))
for x in range(1, len(image_urls) + 1):
self.add_item(
VaryButton(
x, image_urls[x - 1], self.cog, converser_cog=self.converser_cog
x, image_urls[x - 1], self.cog, converser_cog=self.converser_cog, custom_api_key=self.custom_api_key
)
)
@ -270,12 +288,13 @@ class SaveView(discord.ui.View):
class VaryButton(discord.ui.Button):
def __init__(self, number, image_url, cog, converser_cog):
def __init__(self, number, image_url, cog, converser_cog, custom_api_key):
super().__init__(style=discord.ButtonStyle.blurple, label="Vary " + str(number))
self.number = number
self.image_url = image_url
self.cog = cog
self.converser_cog = converser_cog
self.custom_api_key = custom_api_key
async def callback(self, interaction: discord.Interaction):
user_id = interaction.user.id
@ -318,6 +337,7 @@ class VaryButton(discord.ui.Button):
interaction.message,
response_message=response_message,
vary=self.image_url,
custom_api_key=self.custom_api_key,
)
)
@ -354,10 +374,11 @@ class SaveButton(discord.ui.Button["SaveView"]):
class RedoButton(discord.ui.Button["SaveView"]):
def __init__(self, cog, converser_cog):
def __init__(self, cog, converser_cog, custom_api_key):
super().__init__(style=discord.ButtonStyle.danger, label="Retry")
self.cog = cog
self.converser_cog = converser_cog
self.custom_api_key = custom_api_key
async def callback(self, interaction: discord.Interaction):
user_id = interaction.user.id
@ -383,5 +404,5 @@ class RedoButton(discord.ui.Button["SaveView"]):
self.converser_cog.users_to_interactions[user_id].append(message.id)
asyncio.ensure_future(
self.cog.encapsulated_send(user_id, prompt, ctx, response_message)
self.cog.encapsulated_send(user_id, prompt, ctx, response_message, custom_api_key=self.custom_api_key)
)

@ -6,6 +6,7 @@ import traceback
import sys
from pathlib import Path
import aiofiles
import discord
from pycord.multicog import add_to_group
@ -14,10 +15,12 @@ from models.deletion_service_model import Deletion
from models.env_service_model import EnvService
from models.message_model import Message
from models.moderations_service_model import Moderation
from models.openai_model import Model
from models.user_model import RedoUser, Thread, EmbeddedConversationItem
from models.check_model import Check
from models.autocomplete_model import Settings_autocompleter, File_autocompleter
from collections import defaultdict
from sqlitedict import SqliteDict
original_message = {}
ALLOWED_GUILDS = EnvService.get_allowed_guilds()
@ -26,6 +29,14 @@ if sys.platform == "win32":
else:
separator = "/"
USER_INPUT_API_KEYS = EnvService.get_user_input_api_keys()
USER_KEY_DB = None
if USER_INPUT_API_KEYS:
print("This server was configured to enforce user input API keys. Doing the required database setup now")
USER_KEY_DB = SqliteDict("user_key_db.sqlite")
print("Retrieved/created the user key database")
class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
def __init__(
@ -130,6 +141,18 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
checks=[Check.check_admin_roles()],
)
@staticmethod
async def get_user_api_key(user_id, ctx):
user_api_key = None if user_id not in USER_KEY_DB else USER_KEY_DB[user_id]
if user_api_key is None or user_api_key == "":
modal = SetupModal(title="API Key Setup")
if isinstance(ctx, discord.ApplicationContext):
await ctx.send_modal(modal)
await ctx.send_followup("You must set up your API key before using this command.")
else:
await ctx.reply("You must set up your API key before typing in a GPT3 powered channel, type `/setup` to enter your API key.")
return user_api_key
async def load_file(self, file, ctx):
try:
async with aiofiles.open(file, "r") as f:
@ -175,6 +198,9 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
self.debug_channel = self.bot.get_guild(self.DEBUG_GUILD).get_channel(
self.DEBUG_CHANNEL
)
if USER_INPUT_API_KEYS:
print("This bot was set to use user input API keys. Doing the required SQLite setup now")
await self.bot.sync_commands(
commands=None,
method="individual",
@ -616,6 +642,12 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
# GPT3 command
if conversing:
# Extract all the text after the !g and use it as the prompt.
user_api_key = None
if USER_INPUT_API_KEYS:
user_api_key = await GPT3ComCon.get_user_api_key(message.author.id, message)
if not user_api_key:
return
prompt = content
await self.check_conversation_limit(message)
@ -690,6 +722,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
message.channel.id,
primary_prompt,
message,
custom_api_key=user_api_key,
)
def cleanse_response(self, response_text):
@ -711,7 +744,9 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
frequency_penalty_override=None,
presence_penalty_override=None,
from_g_command=False,
custom_api_key=None,
):
print("The custom API key was given as: " + str(custom_api_key))
new_prompt = prompt + "\nGPTie: " if not from_g_command else prompt
from_context = isinstance(ctx, discord.ApplicationContext)
@ -721,7 +756,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
try:
# This is the EMBEDDINGS CASE
if self.pinecone_service and not from_g_command:
if self.pinecone_service and ctx.channel.id in self.conversation_threads:
# The conversation_id is the id of the thread
conversation_id = ctx.channel.id
@ -756,11 +791,11 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
# Create and upsert the embedding for the conversation id, prompt, timestamp
embedding = await self.pinecone_service.upsert_conversation_embedding(
self.model, conversation_id, new_prompt, timestamp
self.model, conversation_id, new_prompt, timestamp, custom_api_key=custom_api_key,
)
embedding_prompt_less_author = await self.model.send_embedding_request(
prompt_less_author
prompt_less_author, custom_api_key=custom_api_key
) # Use the version of
# the prompt without the author's name for better clarity on retrieval.
@ -866,6 +901,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
top_p_override=top_p_override,
frequency_penalty_override=frequency_penalty_override,
presence_penalty_override=presence_penalty_override,
custom_api_key=custom_api_key,
)
# Clean the request response
@ -918,7 +954,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
# Create and upsert the embedding for the conversation id, prompt, timestamp
embedding = await self.pinecone_service.upsert_conversation_embedding(
self.model, conversation_id, response_text, timestamp
self.model, conversation_id, response_text, timestamp, custom_api_key=custom_api_key
)
# Cleanse
@ -932,12 +968,12 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
response_message = (
await ctx.respond(
response_text,
view=ConversationView(ctx, self, ctx.channel.id),
view=ConversationView(ctx, self, ctx.channel.id, custom_api_key),
)
if from_context
else await ctx.reply(
response_text,
view=ConversationView(ctx, self, ctx.channel.id),
view=ConversationView(ctx, self, ctx.channel.id, custom_api_key),
)
)
@ -1053,11 +1089,17 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
frequency_penalty: float,
presence_penalty: float,
):
await ctx.defer()
user = ctx.user
prompt = prompt.strip()
user_api_key = None
if USER_INPUT_API_KEYS:
user_api_key = await GPT3ComCon.get_user_api_key(user.id, ctx)
if not user_api_key:
return
await ctx.defer()
# CONVERSE Checks here TODO
# Send the request to the model
# If conversing, the prompt to send is the history, otherwise, it's just the prompt
@ -1071,6 +1113,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
frequency_penalty_override=frequency_penalty,
presence_penalty_override=presence_penalty,
from_g_command=True,
custom_api_key=user_api_key,
)
@add_to_group("gpt")
@ -1111,13 +1154,20 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
private,
minimal,
):
user = ctx.user
# If we are in user input api keys mode, check if the user has entered their api key before letting them continue
user_api_key = None
if USER_INPUT_API_KEYS:
user_api_key = await GPT3ComCon.get_user_api_key(user.id, ctx)
if not user_api_key:
return
if private:
await ctx.defer(ephemeral=True)
elif not private:
await ctx.defer()
user = ctx.user
if user.id in self.conversation_thread_owners:
message = await ctx.respond(
"You've already created a thread, end it before creating a new one",
@ -1202,6 +1252,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
if thread.id not in self.conversation_threads or self.pinecone_service
else "".join(self.conversation_threads[thread.id].history),
thread_message,
custom_api_key=user_api_key,
)
self.awaiting_responses.remove(user_id_normalized)
if thread.id in self.awaiting_thread_responses:
@ -1317,6 +1368,17 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
await ctx.defer()
await self.send_help_text(ctx)
@discord.slash_command(
name="setup", description="Setup your API key for use with GPT3Discord", guild_ids=ALLOWED_GUILDS
)
@discord.guild_only()
async def setup(self, ctx: discord.ApplicationContext):
if not USER_INPUT_API_KEYS:
await ctx.respond("This server doesn't support user input API keys.", ephemeral=True, delete_after=30)
modal = SetupModal(title="API Key Setup")
await ctx.send_modal(modal)
@add_to_group("system")
@discord.slash_command(
name="usage",
@ -1372,11 +1434,12 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
class ConversationView(discord.ui.View):
def __init__(self, ctx, converser_cog, id):
def __init__(self, ctx, converser_cog, id, custom_api_key=None):
super().__init__(timeout=3600) # 1 hour interval to redo.
self.converser_cog = converser_cog
self.ctx = ctx
self.add_item(RedoButton(self.converser_cog))
self.custom_api_key= custom_api_key
self.add_item(RedoButton(self.converser_cog, self.custom_api_key))
if id in self.converser_cog.conversation_threads:
self.add_item(EndConvoButton(self.converser_cog))
@ -1427,9 +1490,10 @@ class EndConvoButton(discord.ui.Button["ConversationView"]):
class RedoButton(discord.ui.Button["ConversationView"]):
def __init__(self, converser_cog):
def __init__(self, converser_cog, custom_api_key):
super().__init__(style=discord.ButtonStyle.danger, label="Retry")
self.converser_cog = converser_cog
self.custom_api_key = custom_api_key
async def callback(self, interaction: discord.Interaction):
@ -1448,7 +1512,7 @@ class RedoButton(discord.ui.Button["ConversationView"]):
)
await self.converser_cog.encapsulated_send(
id=user_id, prompt=prompt, ctx=ctx, response_message=response_message
id=user_id, prompt=prompt, ctx=ctx, response_message=response_message, custom_api_key=self.custom_api_key
)
else:
await interaction.response.send_message(
@ -1456,3 +1520,38 @@ class RedoButton(discord.ui.Button["ConversationView"]):
ephemeral=True,
delete_after=10,
)
class SetupModal(discord.ui.Modal):
def __init__(self, *args, **kwargs) -> None:
super().__init__(*args, **kwargs)
self.add_item(discord.ui.InputText(label="OpenAI API Key", placeholder="sk--......", ))
async def callback(self, interaction: discord.Interaction):
user = interaction.user
api_key = self.children[0].value
# Validate that api_key is indeed in this format
if not re.match(r"sk-[a-zA-Z0-9]{32}", api_key):
await interaction.response.send_message("Your API key looks invalid, please check that it is correct before proceeding. Please run the /setup command to set your key.", ephemeral=True, delete_after=100)
else:
# We can save the key for the user to the database.
# Make a test request using the api key to ensure that it is valid.
try:
await Model.send_test_request(api_key)
await interaction.response.send_message("Your API key was successfully validated.", ephemeral=True, delete_after=10)
except Exception as e:
await interaction.response.send_message(f"Your API key looks invalid, the API returned: {e}. Please check that your API key is correct before proceeding", ephemeral=True, delete_after=30)
return
# Save the key to the database
try:
USER_KEY_DB[user.id] = api_key
USER_KEY_DB.commit()
await interaction.followup.send("Your API key was successfully saved.", ephemeral=True, delete_after=10)
except Exception as e:
traceback.print_exc()
await interaction.followup.send("There was an error saving your API key.", ephemeral=True, delete_after=30)
return
pass

@ -2,13 +2,18 @@ import re
import traceback
import discord
from sqlitedict import SqliteDict
from cogs.gpt_3_commands_and_converser import GPT3ComCon
from models.env_service_model import EnvService
from models.user_model import RedoUser
from pycord.multicog import add_to_group
ALLOWED_GUILDS = EnvService.get_allowed_guilds()
USER_INPUT_API_KEYS = EnvService.get_user_input_api_keys()
USER_KEY_DB = None
if USER_INPUT_API_KEYS:
USER_KEY_DB = SqliteDict("user_key_db.sqlite")
class ImgPromptOptimizer(discord.Cog, name="ImgPromptOptimizer"):
_OPTIMIZER_PRETEXT = "Optimize the following text for DALL-E image generation to have the most detailed and realistic image possible. Prompt:"
@ -57,6 +62,12 @@ class ImgPromptOptimizer(discord.Cog, name="ImgPromptOptimizer"):
)
@discord.guild_only()
async def optimize(self, ctx: discord.ApplicationContext, prompt: str):
user_api_key = None
if USER_INPUT_API_KEYS:
user_api_key = await GPT3ComCon.get_user_api_key(ctx.user.id, ctx)
if not user_api_key:
return
await ctx.defer()
user = ctx.user
@ -80,6 +91,7 @@ class ImgPromptOptimizer(discord.Cog, name="ImgPromptOptimizer"):
presence_penalty_override=0.5,
best_of_override=2,
max_tokens_override=80,
custom_api_key=user_api_key,
)
# THIS USES MORE TOKENS THAN A NORMAL REQUEST! This will use roughly 4000 tokens, and will repeat the query
@ -111,7 +123,7 @@ class ImgPromptOptimizer(discord.Cog, name="ImgPromptOptimizer"):
self.converser_cog.redo_users[user.id].add_interaction(response_message.id)
await response_message.edit(
view=OptimizeView(
self.converser_cog, self.image_service_cog, self.deletion_queue
self.converser_cog, self.image_service_cog, self.deletion_queue, custom_api_key=user_api_key,
)
)
@ -130,21 +142,23 @@ class ImgPromptOptimizer(discord.Cog, name="ImgPromptOptimizer"):
class OptimizeView(discord.ui.View):
def __init__(self, converser_cog, image_service_cog, deletion_queue):
def __init__(self, converser_cog, image_service_cog, deletion_queue, custom_api_key=None):
super().__init__(timeout=None)
self.cog = converser_cog
self.image_service_cog = image_service_cog
self.deletion_queue = deletion_queue
self.add_item(RedoButton(self.cog, self.image_service_cog, self.deletion_queue))
self.add_item(DrawButton(self.cog, self.image_service_cog, self.deletion_queue))
self.custom_api_key = custom_api_key
self.add_item(RedoButton(self.cog, self.image_service_cog, self.deletion_queue, self.custom_api_key))
self.add_item(DrawButton(self.cog, self.image_service_cog, self.deletion_queue, self.custom_api_key))
class DrawButton(discord.ui.Button["OptimizeView"]):
def __init__(self, converser_cog, image_service_cog, deletion_queue):
def __init__(self, converser_cog, image_service_cog, deletion_queue, custom_api_key):
super().__init__(style=discord.ButtonStyle.green, label="Draw")
self.converser_cog = converser_cog
self.image_service_cog = image_service_cog
self.deletion_queue = deletion_queue
self.custom_api_key = custom_api_key
async def callback(self, interaction: discord.Interaction):
@ -187,15 +201,17 @@ class DrawButton(discord.ui.Button["OptimizeView"]):
msg,
True,
True,
custom_api_key=self.custom_api_key,
)
class RedoButton(discord.ui.Button["OptimizeView"]):
def __init__(self, converser_cog, image_service_cog, deletion_queue):
def __init__(self, converser_cog, image_service_cog, deletion_queue, custom_api_key=None):
super().__init__(style=discord.ButtonStyle.danger, label="Retry")
self.converser_cog = converser_cog
self.image_service_cog = image_service_cog
self.deletion_queue = deletion_queue
self.custom_api_key = custom_api_key
async def callback(self, interaction: discord.Interaction):
interaction_id = interaction.message.id
@ -219,6 +235,7 @@ class RedoButton(discord.ui.Button["OptimizeView"]):
prompt=prompt,
ctx=ctx,
response_message=response_message,
custom_api_key=self.custom_api_key,
)
else:
await interaction.response.send_message(

@ -24,7 +24,7 @@ from models.openai_model import Model
from models.usage_service_model import UsageService
from models.env_service_model import EnvService
__version__ = "4.2.6"
__version__ = "5.0"
"""
The pinecone service is used to store and retrieve conversation embeddings.

@ -44,8 +44,9 @@ class EnvService:
def find_shared_file(file_name):
share_file_paths = []
share_dir = os.getenv("SHARE_DIR")
if share_dir != None:
share_file_paths.append(share_dir)
if share_dir is not None:
share_file_paths.append(Path(share_dir) / file_name)
share_file_paths.extend(
[
app_root_path() / "share" / file_name,
@ -181,3 +182,14 @@ class EnvService:
except:
moderations_alert_channel = None
return moderations_alert_channel
@staticmethod
def get_user_input_api_keys():
try:
user_input_api_keys = os.getenv("USER_INPUT_API_KEYS")
if user_input_api_keys.lower().strip() == "true":
return True
else:
return False
except:
return False

@ -351,7 +351,7 @@ class Model:
+ str(response["error"]["message"])
)
async def send_embedding_request(self, text):
async def send_embedding_request(self, text, custom_api_key=None):
async with aiohttp.ClientSession() as session:
payload = {
"model": Models.EMBEDDINGS,
@ -359,7 +359,7 @@ class Model:
}
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {self.openai_key}",
"Authorization": f"Bearer {self.openai_key if not custom_api_key else custom_api_key}",
}
async with session.post(
"https://api.openai.com/v1/embeddings", json=payload, headers=headers
@ -388,7 +388,7 @@ class Model:
) as response:
return await response.json()
async def send_summary_request(self, prompt):
async def send_summary_request(self, prompt, custom_api_key=None):
"""
Sends a summary request to the OpenAI API
"""
@ -416,7 +416,7 @@ class Model:
}
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {self.openai_key}",
"Authorization": f"Bearer {self.openai_key if not custom_api_key else custom_api_key}",
}
async with session.post(
"https://api.openai.com/v1/completions", json=payload, headers=headers
@ -439,6 +439,7 @@ class Model:
frequency_penalty_override=None,
presence_penalty_override=None,
max_tokens_override=None,
custom_api_key=None,
) -> (
dict,
bool,
@ -455,6 +456,10 @@ class Model:
print(
f"Overrides -> temp:{temp_override}, top_p:{top_p_override} frequency:{frequency_penalty_override}, presence:{presence_penalty_override}"
)
if custom_api_key:
print("USING A CUSTOM API KEY FOR THIS!!!")
print(custom_api_key)
print("END API KEY")
async with aiohttp.ClientSession() as session:
payload = {
@ -473,7 +478,7 @@ class Model:
else frequency_penalty_override,
"best_of": self.best_of if not best_of_override else best_of_override,
}
headers = {"Authorization": f"Bearer {self.openai_key}"}
headers = {"Authorization": f"Bearer {self.openai_key if not custom_api_key else custom_api_key}"}
async with session.post(
"https://api.openai.com/v1/completions", json=payload, headers=headers
) as resp:
@ -485,8 +490,31 @@ class Model:
return response
@staticmethod
async def send_test_request(api_key):
async with aiohttp.ClientSession() as session:
payload = {
"model": Models.CURIE,
"prompt": "test.",
"temperature": 1,
"top_p": 1,
"max_tokens": 10,
}
headers = {"Authorization": f"Bearer {api_key}"}
async with session.post(
"https://api.openai.com/v1/completions", json=payload, headers=headers
) as resp:
response = await resp.json()
try:
int(response["usage"]["total_tokens"])
except:
raise ValueError(str(response["error"]["message"]))
return response
async def send_image_request(
self, ctx, prompt, vary=None
self, ctx, prompt, vary=None, custom_api_key=None
) -> tuple[File, list[Any]]:
# Validate that all the parameters are in a good state before we send the request
words = len(prompt.split(" "))
@ -505,7 +533,7 @@ class Model:
payload = {"prompt": prompt, "n": self.num_images, "size": self.image_size}
headers = {
"Content-Type": "application/json",
"Authorization": f"Bearer {self.openai_key}",
"Authorization": f"Bearer {self.openai_key if not custom_api_key else custom_api_key}",
}
async with aiohttp.ClientSession() as session:
async with session.post(
@ -526,14 +554,18 @@ class Model:
async with session.post(
"https://api.openai.com/v1/images/variations",
headers={
"Authorization": "Bearer " + self.openai_key,
},
headers={
"Authorization": f"Bearer {self.openai_key if not custom_api_key else custom_api_key}",
},
data=data,
) as resp:
response = await resp.json()
print(response)
if custom_api_key:
print("USING A CUSTOM API KEY FOR THIS!!!")
print(custom_api_key)
print("END API KEY")
image_urls = []
for result in response["data"]:

@ -15,7 +15,7 @@ class PineconeService:
return response
async def upsert_conversation_embedding(
self, model, conversation_id: int, text, timestamp
self, model, conversation_id: int, text, timestamp, custom_api_key=None
):
# If the text is > 512 characters, we need to split it up into multiple entries.
first_embedding = None
@ -26,7 +26,7 @@ class PineconeService:
print("The split chunk is ", chunk)
# Create an embedding for the split chunk
embedding = await model.send_embedding_request(chunk)
embedding = await model.send_embedding_request(chunk, custom_api_key=custom_api_key)
if not first_embedding:
first_embedding = embedding
self.index.upsert(
@ -38,7 +38,7 @@ class PineconeService:
)
return first_embedding
else:
embedding = await model.send_embedding_request(text)
embedding = await model.send_embedding_request(text, custom_api_key=custom_api_key)
self.index.upsert(
[
(

@ -25,7 +25,8 @@ dependencies = [
"transformers",
"pycord-multicog",
"aiofiles",
"pinecone-client"
"pinecone-client",
"sqlitedict",
]
dynamic = ["version"]
[project.scripts]

@ -6,3 +6,4 @@ transformers==4.25.1
pycord-multicog==1.0.2
aiofiles==22.1.0
pinecone-client==2.1.0
sqlitedict==2.1.0

@ -14,3 +14,5 @@ DALLE_ROLES = "Admin,Openai,Dalle,gpt"
GPT_ROLES = "openai,gpt"
WELCOME_MESSAGE = "Hi There! Welcome to our Discord server. We hope you'll enjoy our server and we look forward to engaging with you!" # This is a fallback message if gpt3 fails to generate a welcome message.
USER_INPUT_API_KEYS="False" # If True, users must use their own API keys for OpenAI. If False, the bot will use the API key in the .env file.
Loading…
Cancel
Save