diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000..a16fc47 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,24 @@ +ARG PY_VERSION=3.9 + +FROM python:${PY_VERSION} as base + +FROM base as builder +ARG PY_VERSION + +RUN mkdir /install +WORKDIR /install +RUN pip install --target="/install" --upgrade pip setuptools wheel +ADD requirements.txt /install +RUN pip install --target="/install" -r requirements.txt + + +FROM python:${PY_VERSION}-slim + +ARG PY_VERSION + +COPY --from=builder /install /usr/local/lib/python${PY_VERSION}/site-packages +COPY cogs /usr/local/lib/python${PY_VERSION}/site-packages/cogs +COPY models /usr/local/lib/python${PY_VERSION}/site-packages/models +COPY main.py /bin/gpt3discord + +CMD ["python3", "/bin/gpt3discord"] diff --git a/README.md b/README.md index b83990c..453f70b 100644 --- a/README.md +++ b/README.md @@ -122,12 +122,29 @@ After login, we need to install the various dependencies that the bot needs. To 21 screen -r ``` +## Docker Installation + +We now have a `Dockerfile` in the repository. This will build / install all dependencies and put a `gpt3discord` binary (main.py) into path. +To build: + +- [Install docker](https://docs.docker.com/get-docker/) +- Clone repository and build *(hopefully eventually we'll add CI to automatically build + push to docker hub)* + - `docker build -t gpt3discord .` + - *From repository root or supply path to repository* +- Make a env file to bind mount to /bin/.env +- Optional: Make a data directory + bind mount it + - Add `DATA_DIR=/data` to env file +- Run via docker: + - `docker run [-d] --name gpt3discord -v env_file:/bin/.env [-v /containers/gpt3discord:/data] gpt3discord` + - You can also mount a second volume and set `DATA_DIR` in the env file to keep persistent data + +This can also be run via screen/tmux or detached like a daemon. ## Bot on discord: - Create a new Bot on Discord Developer Portal: - Applications -> New Application -- Generate Toker for the app (discord_bot_token) +- Generate Token for the app (discord_bot_token) - Select App (Bot) -> Bot -> Reset Token - Toogle PRESENCE INTENT: - Select App (Bot) -> Bot -> PRESENCE INTENT, SERVER MEMBERS INTENT, MESSAGES INTENT, (basically turn on all intents) diff --git a/cogs/gpt_3_commands_and_converser.py b/cogs/gpt_3_commands_and_converser.py index c0f4977..5602955 100644 --- a/cogs/gpt_3_commands_and_converser.py +++ b/cogs/gpt_3_commands_and_converser.py @@ -7,6 +7,7 @@ import re import threading import time import traceback +from pathlib import Path import discord from discord.ext import commands @@ -32,7 +33,9 @@ class GPT3ComCon(commands.Cog, name="GPT3ComCon"): deletion_queue, DEBUG_GUILD, DEBUG_CHANNEL, + data_path: Path, ): + self.data_path = data_path self.debug_channel = None self.bot = bot self._last_member_ = None @@ -58,13 +61,14 @@ class GPT3ComCon(commands.Cog, name="GPT3ComCon"): self.awaiting_responses = [] try: + conversation_file_path = data_path / "conversation_starter_pretext.txt" # Attempt to read a conversation starter text string from the file. - with open("conversation_starter_pretext.txt", "r") as f: + with conversation_file_path.open("r") as f: self.CONVERSATION_STARTER_TEXT = f.read() - print("Conversation starter text loaded from file.") + print(f"Conversation starter text loaded from {conversation_file_path}.") assert self.CONVERSATION_STARTER_TEXT is not None - except: + except Exception: self.CONVERSATION_STARTER_TEXT = ( "You are an artificial intelligence that is able to do anything, and answer any question," "I want you to be my personal assistant and help me with some tasks. " diff --git a/cogs/image_prompt_optimizer.py b/cogs/image_prompt_optimizer.py index 74ebc02..428b8ba 100644 --- a/cogs/image_prompt_optimizer.py +++ b/cogs/image_prompt_optimizer.py @@ -34,11 +34,12 @@ class ImgPromptOptimizer(commands.Cog, name="ImgPromptOptimizer"): self.deletion_queue = deletion_queue try: + image_pretext_path = self.converser_cog.data_path / "image_optimizer_pretext.txt" # Try to read the image optimizer pretext from # the file system - with open("image_optimizer_pretext.txt", "r") as file: + with image_pretext_path.open("r") as file: self.OPTIMIZER_PRETEXT = file.read() - print("Loaded image optimizer pretext from file system") + print(f"Loaded image optimizer pretext from {image_pretext_path}") except: traceback.print_exc() self.OPTIMIZER_PRETEXT = self._OPTIMIZER_PRETEXT diff --git a/main.py b/main.py index e53bbaf..4e26331 100644 --- a/main.py +++ b/main.py @@ -1,6 +1,7 @@ import asyncio import sys import traceback +from pathlib import Path import discord from discord.ext import commands @@ -29,7 +30,7 @@ asyncio.ensure_future(Deletion.process_deletion_queue(deletion_queue, 1, 1)) Settings for the bot """ bot = commands.Bot(intents=discord.Intents.all(), command_prefix="!") -usage_service = UsageService() +usage_service = UsageService(Path(os.environ.get("DATA_DIR", os.getcwd()))) model = Model(usage_service) @@ -44,9 +45,13 @@ async def on_ready(): # I can make self optional by async def main(): + data_path = Path(os.environ.get("DATA_DIR", os.getcwd())) debug_guild = int(os.getenv("DEBUG_GUILD")) debug_channel = int(os.getenv("DEBUG_CHANNEL")) + if not data_path.exists(): + raise OSError(f"{data_path} does not exist ... create it?") + # Load the main GPT3 Bot service bot.add_cog( GPT3ComCon( @@ -57,6 +62,7 @@ async def main(): deletion_queue, debug_guild, debug_channel, + data_path, ) ) diff --git a/models/usage_service_model.py b/models/usage_service_model.py index 8deac09..e34d733 100644 --- a/models/usage_service_model.py +++ b/models/usage_service_model.py @@ -1,13 +1,15 @@ import os +from pathlib import Path from transformers import GPT2TokenizerFast class UsageService: - def __init__(self): + def __init__(self, data_dir: Path): + self.usage_file_path = data_dir / "usage.txt" # If the usage.txt file doesn't currently exist in the directory, create it and write 0.00 to it. - if not os.path.exists("usage.txt"): - with open("usage.txt", "w") as f: + if not self.usage_file_path.exists(): + with self.usage_file_path.open("w") as f: f.write("0.00") f.close() self.tokenizer = GPT2TokenizerFast.from_pretrained("gpt2") @@ -18,17 +20,17 @@ class UsageService: print("This request cost " + str(price) + " credits") usage = self.get_usage() print("The current usage is " + str(usage) + " credits") - with open("usage.txt", "w") as f: + with self.usage_file_path.open("w") as f: f.write(str(usage + float(price))) f.close() def set_usage(self, usage): - with open("usage.txt", "w") as f: + with self.usage_file_path.open("w") as f: f.write(str(usage)) f.close() def get_usage(self): - with open("usage.txt", "r") as f: + with self.usage_file_path.open("r") as f: usage = float(f.read().strip()) f.close() return usage @@ -53,6 +55,6 @@ class UsageService: usage = self.get_usage() - with open("usage.txt", "w") as f: + with self.usage_file_path.open("w") as f: f.write(str(usage + float(price))) f.close()