From f2cdb576f99b33f51caf0b0952bd36cf0d8fd170 Mon Sep 17 00:00:00 2001 From: Jeff Scott Ward Date: Tue, 26 Nov 2024 16:31:02 -0500 Subject: [PATCH] Update docker-compose.yml to include Pipelines NOTE: their docs also say to.... Connect to Open WebUI: - Navigate to the Settings > Connections > OpenAI API section in Open WebUI. - Set the API URL to http://localhost:9099 and the API key to 0p3n-w3bu!. Your pipelines should now be active. --- local-ai-packaged/docker-compose.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/local-ai-packaged/docker-compose.yml b/local-ai-packaged/docker-compose.yml index 1998c35d..9a28f8b5 100644 --- a/local-ai-packaged/docker-compose.yml +++ b/local-ai-packaged/docker-compose.yml @@ -45,6 +45,21 @@ x-init-ollama: &init-ollama - "sleep 3; OLLAMA_HOST=ollama:11434 ollama pull llama3.1; OLLAMA_HOST=ollama:11434 ollama pull nomic-embed-text" services: + open-webui-pipelines: + image: ghcr.io/open-webui/pipelines:main + networks: ['demo'] + restart: unless-stopped + container_name: open-webui-pipelines + ports: + - "9099:9099" + extra_hosts: + - "host.docker.internal:host-gateway" + volumes: + - ./open-webui-pipelines:/app/pipelines + depends_on: + - open-webui + - postgres + open-webui: image: ghcr.io/open-webui/open-webui:main networks: ['demo']