Kaveen Kumarasinghe 2 years ago
commit e802be26f2

@ -0,0 +1,96 @@
name: Docker
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
on:
schedule:
- cron: '28 2 * * *'
push:
branches: [ "main" ]
# Publish semver tags as releases.
tags: [ 'v*.*.*' ]
pull_request:
branches: [ "main" ]
env:
# Use docker.io for Docker Hub if empty
REGISTRY: ghcr.io
# github.repository as <account>/<repo>
IMAGE_NAME: ${{ github.repository }}
jobs:
build:
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
# This is used to complete the identity challenge
# with sigstore/fulcio when running outside of PRs.
id-token: write
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Install the cosign tool except on PR
# https://github.com/sigstore/cosign-installer
- name: Install cosign
if: github.event_name != 'pull_request'
uses: sigstore/cosign-installer@f3c664df7af409cb4873aa5068053ba9d61a57b6 #v2.6.0
with:
cosign-release: 'v1.13.1'
# Workaround: https://github.com/docker/build-push-action/issues/461
- name: Setup Docker buildx
uses: docker/setup-buildx-action@79abd3f86f79a9d68a23c75a09a9a85889262adf
# Login against a Docker registry except on PR
# https://github.com/docker/login-action
- name: Log into registry ${{ env.REGISTRY }}
if: github.event_name != 'pull_request'
uses: docker/login-action@28218f9b04b4f3f62068d7b6ce6ca5b26e35336c
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
# Extract metadata (tags, labels) for Docker
# https://github.com/docker/metadata-action
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
# Build and push Docker image with Buildx (don't push on PR)
# https://github.com/docker/build-push-action
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@ac9327eae2b366085ac7f6a2d02df8aa8ead720a
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
cache-from: type=gha
cache-to: type=gha,mode=max
# Sign the resulting Docker image digest except on PRs.
# This will only write to the public Rekor transparency log when the Docker
# repository is public to avoid leaking data. If you would like to publish
# transparency data even for private images, pass --force to cosign below.
# https://github.com/sigstore/cosign
- name: Sign the published Docker image
if: ${{ github.event_name != 'pull_request' }}
env:
COSIGN_EXPERIMENTAL: "true"
# This step uses the identity token to provision an ephemeral certificate
# against the sigstore community Fulcio instance.
run: echo "${{ steps.meta.outputs.tags }}" | xargs -I {} cosign sign {}@${{ steps.build-and-push.outputs.digest }}

@ -1,5 +1,7 @@
ARG PY_VERSION=3.9
# Build container
FROM python:${PY_VERSION} as base
FROM base as builder
@ -17,6 +19,7 @@ RUN apt-get update
RUN curl https://sh.rustup.rs -sSf | bash -s -- -y
ENV PATH="/root/.cargo/bin:${PATH}"
RUN mkdir /install /src
WORKDIR /install
RUN pip install --target="/install" --upgrade pip setuptools wheel
@ -40,6 +43,21 @@ RUN pip install --target="/install" /src
# Copy minimal to main image (to keep as small as possible)
FROM python:${PY_VERSION}-slim
ENV OPENAI_TOKEN=""
ENV DISCORD_TOKEN=""
ENV PINECONE_TOKEN=""
ENV DEBUG_GUILD=""
ENV DEBUG_CHANNEL=""
ENV ALLOWED_GUILDS=""
ENV ADMIN_ROLES=""
ENV DALLE_ROLES=""
ENV GPT_ROLES=""
ENV WELCOME_MESSAGE=""
ENV USER_INPUT_API_KEYS=""
ENV MODERATIONS_ALERT_CHANNEL=""
ENV USER_KEY_DB_PATH=""
ARG PY_VERSION
COPY . .
COPY --from=builder /install /usr/local/lib/python${PY_VERSION}/site-packages

@ -415,7 +415,7 @@ As a last resort, you can try to run the bot using python in a basic way, with s
cd (the folder where the files for GPT3Discord are located/cloned)
python3.9 gpt3discord.py
```
<!--
### Docker Installation
We now have a `Dockerfile` in the repository. This will build / install all dependencies and put a `gpt3discord` binary (main.py) into path.
@ -435,7 +435,47 @@ To build:
- You can also mount extra volumes and set `DATA_DIR` and `SHARE_DIR` in the env file to keep persistent data
- `env_file` should be replaced with where you have your .env file stored on your machine
This can also be run via screen/tmux or detached like a daemon.
This can also be run via screen/tmux or detached like a daemon. -->
### Docker and Docker Compose :
To use docker you can use the following command
```
docker run -d --name gpt3discord -v /containers/gpt3discord:/data -v /containers/gpt3discord/share:/data/share --env-file /path/to/.env ghcr.io/kav-k/gpt3discord:main
```
Make sure that the /data and /data/share are created and the `env-file` path is correct.
#### Docker Compose
To use Docker Compose, you need to have Docker and Docker Compose installed on your system. You can download and install them from the following links:
- Docker
- Docker Compose
[You will need to install Docker for Desktop if you are on a desktop machine such as Windows or Mac, trying to run this]
To start the gpt3discord container with Docker Compose, follow these steps:
1. Open a terminal or command prompt and navigate to the directory that contains the docker-compose.yml file.
2. Open the docker-compose.yml file and replace the environment variable values with your actual tokens and IDs.
3. Run the following command to start the container in detached mode:
```
Copy codedocker-compose up -d
```
This will start the container and use the settings in the docker-compose.yml file. The -d option tells Docker Compose to run the container in the background (detached mode).
To stop the gpt3discord container, run the following command:
```
Copy codedocker-compose down
```
This will stop the container and remove the services and networks defined in the docker-compose.yml file.
That's it! With these simple steps, you can start and stop the gpt3discord container using Docker Compose.
### Non-Server, Non-Docker installation (Windows included)

@ -14,6 +14,7 @@ ALLOWED_GUILDS = EnvService.get_allowed_guilds()
USER_INPUT_API_KEYS = EnvService.get_user_input_api_keys()
USER_KEY_DB = EnvService.get_api_db()
class RedoSearchUser:
def __init__(self, ctx, query, search_scope, nodes):
self.ctx = ctx
@ -21,6 +22,7 @@ class RedoSearchUser:
self.search_scope = search_scope
self.nodes = nodes
class SearchService(discord.Cog, name="SearchService"):
"""Cog containing translation commands and retrieval of translation services"""
@ -141,5 +143,13 @@ class RedoButton(discord.ui.View):
@discord.ui.button(label="Redo", style=discord.ButtonStyle.danger)
async def redo(self, button: discord.ui.Button, interaction: discord.Interaction):
"""Redo the translation"""
await interaction.response.send_message("Redoing search...", ephemeral=True, delete_after=15)
await self.search_cog.search_command(self.search_cog.redo_users[self.ctx.user.id].ctx, self.search_cog.redo_users[self.ctx.user.id].query, self.search_cog.redo_users[self.ctx.user.id].search_scope, self.search_cog.redo_users[self.ctx.user.id].nodes, redo=True)
await interaction.response.send_message(
"Redoing search...", ephemeral=True, delete_after=15
)
await self.search_cog.search_command(
self.search_cog.redo_users[self.ctx.user.id].ctx,
self.search_cog.redo_users[self.ctx.user.id].query,
self.search_cog.redo_users[self.ctx.user.id].search_scope,
self.search_cog.redo_users[self.ctx.user.id].nodes,
redo=True,
)

@ -0,0 +1,22 @@
version: '3.8'
services:
gpt3discord:
image: ghcr.io/kav-k/gpt3discord:main
container_name: gpt3discord
environment:
OPENAI_TOKEN: "<openai_api_token>"
DISCORD_TOKEN: "<discord_bot_token>"
DEBUG_GUILD: "974519864045756446"
DEBUG_CHANNEL: "977697652147892304"
ALLOWED_GUILDS: "971268468148166697,971268468148166697"
ADMIN_ROLES: "Admin,Owner"
DALLE_ROLES: "Admin,Openai,Dalle,gpt"
GPT_ROLES: "openai,gpt"
WELCOME_MESSAGE: "Hi There! Welcome to our Discord server. We hope you'll enjoy our server and we look forward to engaging with you!"
USER_INPUT_API_KEYS: "False"
MODERATIONS_ALERT_CHANNEL: "977697652147892304"
USER_KEY_DB_PATH: "user_key_db.sqlite"
volumes:
- /containers/gpt3discord:/data
- /containers/gpt3discord/share:/data/share
restart: always

@ -695,7 +695,7 @@ class ComposeModal(discord.ui.View):
# Map everything into the short to long cache
for index in self.indexes:
if len(index) > 94:
index_name = index[:94] + "-" + str(random.randint(0000,9999))
index_name = index[:94] + "-" + str(random.randint(0000, 9999))
SHORT_TO_LONG_CACHE[index_name] = index
else:
SHORT_TO_LONG_CACHE[index[:99]] = index
@ -711,7 +711,9 @@ class ComposeModal(discord.ui.View):
self.index_select = discord.ui.Select(
placeholder="Select index(es) to compose",
options=[
discord.SelectOption(label=LONG_TO_SHORT_CACHE[index], value=LONG_TO_SHORT_CACHE[index])
discord.SelectOption(
label=LONG_TO_SHORT_CACHE[index], value=LONG_TO_SHORT_CACHE[index]
)
for index in self.indexes
][0:25],
max_values=len(self.indexes) if len(self.indexes) < 25 else 25,
@ -728,7 +730,10 @@ class ComposeModal(discord.ui.View):
discord.ui.Select(
placeholder="Select index(es) to compose",
options=[
discord.SelectOption(label=LONG_TO_SHORT_CACHE[index], value=LONG_TO_SHORT_CACHE[index])
discord.SelectOption(
label=LONG_TO_SHORT_CACHE[index],
value=LONG_TO_SHORT_CACHE[index],
)
for index in self.indexes
][i : i + 25],
max_values=len(self.indexes[i : i + 25]),

@ -160,7 +160,13 @@ class Search:
pass
async def search(
self, ctx: discord.ApplicationContext, query, user_api_key, search_scope, nodes, redo=None
self,
ctx: discord.ApplicationContext,
query,
user_api_key,
search_scope,
nodes,
redo=None,
):
DEFAULT_SEARCH_NODES = 1
if not user_api_key:
@ -169,9 +175,11 @@ class Search:
os.environ["OPENAI_API_KEY"] = user_api_key
if ctx:
in_progress_message = await ctx.respond(
embed=self.build_search_started_embed()
) if not redo else await ctx.channel.send(embed=self.build_search_started_embed())
in_progress_message = (
await ctx.respond(embed=self.build_search_started_embed())
if not redo
else await ctx.channel.send(embed=self.build_search_started_embed())
)
llm_predictor = LLMPredictor(llm=OpenAI(model_name="text-davinci-003"))
try:
@ -181,7 +189,8 @@ class Search:
# Refine a query to send to google custom search API
query_refined = llm_predictor_presearch.generate(
prompts=[f"You are to be given a search query for google. Change the query such that putting it into the Google Custom Search API will return the most relevant websites to assist in answering the original query. If the original query is asking about something that is relevant to the current day, insert the current_date into the refined query. If the user is asking about something that may be relevant to the current month, insert the current year and month into the refined query, if the query is asking for something relevant to the current year, insert the current year into the refined query. There is no need to insert a day, month, or year for queries that purely ask about facts and about things that don't have much time-relevance. The current_date is {str(datetime.now().date())}. Do not insert the current_date if not neccessary. Respond with only the refined query for the original query. Dont use punctuation or quotation marks.\n\nExamples:\n---\nOriginal Query: Who is Harald Baldr?\nRefined Query: Harald Baldr biography\n---\nOriginal Query: What happened today with the Ohio train derailment?\nRefined Query: Ohio train derailment details {str(datetime.now().date())}\n---\nOriginal Query: Is copper in drinking water bad for you?\nRefined Query: copper in drinking water adverse effects\n---\nOriginal Query: What's the current time in Mississauga?\nRefined Query: current time Mississauga\nNow, refine the user input query.\nOriginal Query: {query}\nRefined Query:"
prompts=[
f"You are to be given a search query for google. Change the query such that putting it into the Google Custom Search API will return the most relevant websites to assist in answering the original query. If the original query is asking about something that is relevant to the current day, insert the current_date into the refined query. If the user is asking about something that may be relevant to the current month, insert the current year and month into the refined query, if the query is asking for something relevant to the current year, insert the current year into the refined query. There is no need to insert a day, month, or year for queries that purely ask about facts and about things that don't have much time-relevance. The current_date is {str(datetime.now().date())}. Do not insert the current_date if not neccessary. Respond with only the refined query for the original query. Dont use punctuation or quotation marks.\n\nExamples:\n---\nOriginal Query: Who is Harald Baldr?\nRefined Query: Harald Baldr biography\n---\nOriginal Query: What happened today with the Ohio train derailment?\nRefined Query: Ohio train derailment details {str(datetime.now().date())}\n---\nOriginal Query: Is copper in drinking water bad for you?\nRefined Query: copper in drinking water adverse effects\n---\nOriginal Query: What's the current time in Mississauga?\nRefined Query: current time Mississauga\nNow, refine the user input query.\nOriginal Query: {query}\nRefined Query:"
]
)
query_refined_text = query_refined.generations[0][0].text

Loading…
Cancel
Save