This commit is contained in:
2024-09-03 10:28:57 +08:00
parent 9fcc584b5c
commit 65addae2b5

View File

@@ -0,0 +1,59 @@
version: '3.8'
services:
ollama:
# Uncomment below for GPU support
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
volumes:
- /mnt/c/.ollama:/root/.ollama
- /mnt/c/ollama/.ollama:/usr/share/ollama/.ollama
environment:
- OLLAMA_MODELS=/usr/share/ollama/.ollama/models # 设置模型路径的环境变量
- NVIDIA_VISIBLE_DEVICES=all
- NVIDIA_DRIVER_CAPABILITIES=compute,utility
# Uncomment below to expose Ollama API outside the container stack
ports:
- 11434:11434
container_name: ollama
pull_policy: always
# tty: true
restart: unless-stopped
image: ollama/ollama:latest
networks:
- network_ollama
ollama-webui:
build:
context: .
args:
OLLAMA_API_BASE_URL: '/ollama/api'
dockerfile: Dockerfile
image: ollamawebui/ollama-webui:latest
container_name: ollama-webui_new
depends_on:
- ollama
# Uncomment below for WIP: Auth support
# - ollama-webui-db
ports:
- 3000:8080
environment:
- "OLLAMA_API_BASE_URL=http://ollama:11434/api"
# Uncomment below for WIP: Auth support
# - "WEBUI_AUTH=TRUE"
# - "WEBUI_DB_URL=mongodb://root:example@ollama-webui-db:27017/"
# - "WEBUI_JWT_SECRET_KEY=SECRET_KEY"
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
networks:
- network_ollama
networks:
network_ollama:
name: network_ollama