From 0d93a77cb81740d0f5e1b20cc55871dead4e4204 Mon Sep 17 00:00:00 2001 From: einfachtorben Date: Sun, 22 Sep 2024 10:45:32 +0200 Subject: [PATCH 1/3] add amd-gpu capability in docker compose --- docker-compose.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index c17d929..36e837b 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -114,6 +114,14 @@ services: count: 1 capabilities: [gpu] + ollama-gpu-amd: + profiles: ["gpu-amd"] + <<: *service-ollama + image: ollama/ollama:rocm + devices: + - "/dev/kfd" + - "dev/dri" + ollama-pull-llama-cpu: profiles: ["cpu"] <<: *init-ollama @@ -125,3 +133,10 @@ services: <<: *init-ollama depends_on: - ollama-gpu + + ollama-pull-llama-gpu-amd: + profiles: [gpu-amd] + <<: *init-ollama + image: ollama/ollama:rocm + depends_on: + - ollama-gpu-amd From cb67ae615dcaa13605f6ff54f34a2821a27e6ed1 Mon Sep 17 00:00:00 2001 From: einfachtorben Date: Sun, 22 Sep 2024 10:50:39 +0200 Subject: [PATCH 2/3] changed readme.md to reflect changes in the docker-compose.yml file --- README.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/README.md b/README.md index 894448b..b2e6d5e 100644 --- a/README.md +++ b/README.md @@ -74,6 +74,14 @@ docker compose up After you followed the quick start set-up below, change the Ollama credentials by using `http://host.docker.internal:11434/` as the host. +### For AMD GPU users + +``` +git clone https://github.com/n8n-io/self-hosted-ai-starter-kit.git +cd self-hosted-ai-starter-kit +docker compose --profile gpu-amd up +``` + ### For everyone else ``` From f2168bbb9083901b6de7ec8dac65d21cb8d52036 Mon Sep 17 00:00:00 2001 From: einfachtorben Date: Sun, 22 Sep 2024 10:57:23 +0200 Subject: [PATCH 3/3] changed readme to specify the added AMD support works (only) on Linux --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index b2e6d5e..15e0737 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ docker compose up After you followed the quick start set-up below, change the Ollama credentials by using `http://host.docker.internal:11434/` as the host. -### For AMD GPU users +### For AMD GPU users on Linux ``` git clone https://github.com/n8n-io/self-hosted-ai-starter-kit.git