services: openWebUI: container_name: ollama-webui image: ghcr.io/open-webui/open-webui:cuda restart: unless-stopped depends_on: - ollama ports: - "3000:8080" extra_hosts: - host.docker.internal:host-gateway volumes: - ./open-webui-local:/app/backend/data environment: - "OLLAMA_API_BASE_URL=http://ollama:11435/api" ollama: container_name: ollama image: ollama/ollama:latest ports: - "11435:11434" volumes: - ./ollama-local:/root/.ollama restart: unless-stopped deploy: resources: reservations: devices: - driver: nvidia count: 1 capabilities: [gpu] edge: container_name: ollama-edge image: travisvn/openai-edge-tts:latest ports: - 5051:5050 restart: unless-stopped