version: '3.8' services: open-webui: image: ghcr.io/open-webui/open-webui:latest container_name: open-webui networks: my-network: ipv4_address: 172.30.0.100 ports: - "3000:8080" environment: - 'OLLAMA_BASE_URL=http://ollama:11434' # INDISPENSABLE volumes: - open-webui-data:/app/data restart: always ollama: image: ollama/ollama:latest container_name: ollama networks: my-network: ipv4_address: 172.30.0.101 ports: - "5005:11434" volumes: - ollama-models:/root/.ollama # Chemin correct pour les modèles restart: always networks: my-network: driver: bridge ipam: config: - subnet: "172.30.0.0/16" volumes: open-webui-data: ollama-models: # Stockage persistant des modèles IA