Merge pull request #53 from justinmcp/feat/docker-env

Changes to environment loading.
Kaveen Kumarasinghe 2 years ago committed by GitHub
commit 4b191ac9d8
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -23,5 +23,7 @@ RUN pip install --target="/install" /src
FROM python:${PY_VERSION}-slim
ARG PY_VERSION
COPY --from=builder /install /usr/local/lib/python${PY_VERSION}/site-packages
COPY gpt3discord.py /bin/gpt3discord
CMD ["python3", "/bin/gpt3discord"]
RUN mkdir -p /opt/gpt3discord/etc
COPY gpt3discord.py /opt/gpt3discord/bin/
COPY image_optimizer_pretext.txt conversation_starter_pretext.txt conversation_starter_pretext_minimal.txt /opt/gpt3discord/share/
CMD ["python3", "/opt/gpt3discord/bin/gpt3discord.py"]

@ -302,12 +302,15 @@ To build:
- Clone repository and build *(hopefully eventually we'll add CI to automatically build + push to docker hub)*
- `docker build -t gpt3discord .`
- *From repository root or supply path to repository*
- Make a env file to bind mount to /bin/.env
- Make a .env file to bind mount to `/opt/gpt3discord/etc/environment`
- Optional: Make a data directory + bind mount it
- Add `DATA_DIR=/data` to env file
- Add `DATA_DIR=/data` to env file -> `usage.txt` is made here
- Add `SHARE_DIR=/data/share` to env file -> this is where `conversation starters, optimizer pretext and the 'openers' folder` is loaded from
- If `SHARE_DIR` is not included it'll load from the files added during the docker image build
- Run via docker:
- `docker run [-d] --name gpt3discord -v env_file:/bin/.env [-v /containers/gpt3discord:/data] gpt3discord`
- You can also mount a second volume and set `DATA_DIR` in the env file to keep persistent data
- `docker run [-d] --name gpt3discord -v env_file:/opt/gpt3discord/etc/environment [-v /containers/gpt3discord:/data] [-v /containers/gpt3discord/share:/data/share] gpt3discord`
- You can also mount extra volumes and set `DATA_DIR` and `SHARE_DIR` in the env file to keep persistent data
- `env_file` should be replaced with where you have your .env file stored on your machine
This can also be run via screen/tmux or detached like a daemon.

@ -71,7 +71,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
self.pinecone_service = pinecone_service
try:
conversation_file_path = data_path / "conversation_starter_pretext.txt"
conversation_file_path = EnvService.find_shared_file("conversation_starter_pretext.txt")
# Attempt to read a conversation starter text string from the file.
with conversation_file_path.open("r") as f:
self.CONVERSATION_STARTER_TEXT = f.read()
@ -80,9 +80,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
)
assert self.CONVERSATION_STARTER_TEXT is not None
conversation_file_path_minimal = (
data_path / "conversation_starter_pretext_minimal.txt"
)
conversation_file_path_minimal = EnvService.find_shared_file("conversation_starter_pretext_minimal.txt")
with conversation_file_path_minimal.open("r") as f:
self.CONVERSATION_STARTER_TEXT_MINIMAL = f.read()
print(
@ -1136,7 +1134,7 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
): # only load in files if it's included in the command, if not pass on as normal
if opener_file.endswith(".txt"):
# Load the file and read it into opener
opener_file = f"openers{separator}{opener_file}"
opener_file = EnvService.find_shared_file(f"openers{separator}{opener_file}")
opener_file = await self.load_file(opener_file, ctx)
if (
not opener
@ -1204,7 +1202,8 @@ class GPT3ComCon(discord.Cog, name="GPT3ComCon"):
thread_message,
)
self.awaiting_responses.remove(user_id_normalized)
self.awaiting_thread_responses.remove(thread.id)
if thread.id in self.awaiting_thread_responses:
self.awaiting_thread_responses.remove(thread.id)
self.conversation_thread_owners[user_id_normalized] = thread.id

@ -34,9 +34,7 @@ class ImgPromptOptimizer(discord.Cog, name="ImgPromptOptimizer"):
self.deletion_queue = deletion_queue
try:
image_pretext_path = (
self.converser_cog.data_path / "image_optimizer_pretext.txt"
)
image_pretext_path = EnvService.find_shared_file("image_optimizer_pretext.txt")
# Try to read the image optimizer pretext from
# the file system
with image_pretext_path.open("r") as file:

@ -5,7 +5,6 @@ from pathlib import Path
import discord
import pinecone
from dotenv import load_dotenv
from pycord.multicog import apply_multicog
import os
@ -16,9 +15,6 @@ if sys.platform == "win32":
else:
separator = "/"
print("The environment file is located at " + os.getcwd() + separator + ".env")
load_dotenv(dotenv_path=os.getcwd() + separator + ".env")
from cogs.draw_image_generation import DrawDallEService
from cogs.gpt_3_commands_and_converser import GPT3ComCon
from cogs.image_prompt_optimizer import ImgPromptOptimizer
@ -26,6 +22,7 @@ from models.deletion_service_model import Deletion
from models.message_model import Message
from models.openai_model import Model
from models.usage_service_model import UsageService
from models.env_service_model import EnvService
__version__ = "4.0.1"
@ -86,12 +83,12 @@ async def on_application_command_error(
async def main():
data_path = Path(os.environ.get("DATA_DIR", os.getcwd()))
data_path = EnvService.environment_path_with_fallback("DATA_DIR")
debug_guild = int(os.getenv("DEBUG_GUILD"))
debug_channel = int(os.getenv("DEBUG_CHANNEL"))
if not data_path.exists():
raise OSError(f"{data_path} does not exist ... create it?")
raise OSError(f"Data path: {data_path} does not exist ... create it?")
# Load the main GPT3 Bot service
bot.add_cog(

@ -5,6 +5,7 @@ import re
import discord
from models.usage_service_model import UsageService
from models.openai_model import Model
from models.env_service_model import EnvService
usage_service = UsageService(Path(os.environ.get("DATA_DIR", os.getcwd())))
model = Model(usage_service)
@ -51,7 +52,7 @@ class File_autocompleter:
try:
return [
file
for file in os.listdir("openers")
for file in os.listdir(EnvService.find_shared_file("openers"))
if file.startswith(ctx.value.lower())
][
:25

@ -1,7 +1,24 @@
import os
import sys
from pathlib import Path
from dotenv import load_dotenv
load_dotenv()
import os
def app_root_path():
app_path = Path(sys.argv[0]).resolve()
try:
if app_path.parent.name == "bin": # Installed in unixy hierachy
return app_path.parents[1]
except IndexError:
pass
return app_path.parent
# None will let direnv do its' thing
env_paths = [Path(".env"), app_root_path() / "etc/environment", None]
for env_path in env_paths:
print("Loading environment from " + str(env_path))
load_dotenv(dotenv_path=env_path)
class EnvService:
@ -9,6 +26,33 @@ class EnvService:
def __init__(self):
self.env = {}
@staticmethod
def environment_path_with_fallback(env_name, relative_fallback = None):
dir = os.getenv(env_name)
if dir != None:
return Path(dir).resolve()
if relative_fallback:
app_relative = (app_root_path() / relative_fallback).resolve()
if app_relative.exists():
return app_relative
return Path.cwd()
@staticmethod
def find_shared_file(file_name):
share_file_paths = []
share_dir = os.getenv("SHARE_DIR")
if share_dir != None:
share_file_paths.append(share_dir)
share_file_paths.extend([app_root_path() / "share" / file_name, app_root_path() / file_name, Path(file_name)])
for share_file_path in share_file_paths:
if share_file_path.exists():
return share_file_path.resolve()
raise ValueError(f"Unable to find shared data file {file_name}")
@staticmethod
def get_allowed_guilds():
# ALLOWED_GUILDS is a comma separated list of guild ids

Loading…
Cancel
Save