version: '3.6' services: ollama: deploy: resources: reservations: devices: - driver: nvidia count: 1 capabilities: - gpu volumes: - ollama:/root/.ollama # Uncomment below to expose Ollama API outside the container stack ports: - 11434:11434 pull_policy: always tty: true restart: unless-stopped image: ollama/ollama:latest ollama-webui: build: context: ./ollama-webui args: OLLAMA_API_BASE_URL: '/ollama/api' dockerfile: Dockerfile image: reaweb.uk/ollama-webui depends_on: - ollama ports: - 3000:8080 environment: - "OLLAMA_API_BASE_URL=http://ollama:11434/api" extra_hosts: - host.docker.internal:host-gateway restart: unless-stopped volumes: ollama: {}