services: openwebui: image: ghcr.io/open-webui/open-webui:main ports: - "3080:8080" volumes: - /data/llm/webui-data:/app/backend/data networks: - openwebui-net ollama: image: docker.io/ollama/ollama:latest ports: - 3081:11434 volumes: - /data/llm/ollama/code:/code - /data/llm/ollama/ollama:/root/.ollama container_name: ollama pull_policy: always tty: true environment: - OLLAMA_KEEP_ALIVE=24h - OLLAMA_HOST=0.0.0.0 networks: - openwebui-net networks: openwebui-net: dns_enabled: true