diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 00000000..a88db12e --- /dev/null +++ b/Dockerfile @@ -0,0 +1,10 @@ +FROM python:3.11-slim + +ENV PIP_NO_CACHE_DIR=true +WORKDIR /tmp +COPY requirements.txt /tmp/requirements.txt +RUN pip install -r requirements.txt + +WORKDIR /app +COPY . /app +ENTRYPOINT ["./babyagi.py"] diff --git a/README.md b/README.md index e37b359a..8b6149b7 100644 --- a/README.md +++ b/README.md @@ -59,6 +59,15 @@ To use the script, you will need to follow these steps: All optional values above can also be specified on the command line. +# Running inside a docker container +As a prerequisite, you will need docker and docker-compose installed. Docker desktop is the simplest option https://www.docker.com/products/docker-desktop/ + +To run the system inside a docker container, setup your .env file as per steps above and then run the following: + +``` +docker-compose up +``` + # Supported Models This script works with all OpenAI models, as well as Llama through Llama.cpp. Default model is **gpt-3.5-turbo**. To use a different model, specify it through OPENAI_API_MODEL or use the command line. diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 00000000..e2bad9a5 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,9 @@ +version: "3.9" + +services: + babyagi: + build: ./ + volumes: + - "./:/app" + stdin_open: true + tty: true