services: lavalink: # pin the image version to Lavalink v4 image: ghcr.io/lavalink-devs/lavalink:4.0.8 container_name: lavalink restart: unless-stopped environment: # set Java options here - _JAVA_OPTIONS=-Xmx6G # set lavalink server port - SERVER_PORT=2333 # set password for lavalink - LAVALINK_SERVER_PASSWORD=youshallnotpass volumes: # mount application.yml from the same directory or use environment variables - ./application.yml:/opt/Lavalink/application.yml # persist plugins between restarts, make sure to set the correct permissions (user: 322, group: 322) - ./plugins/:/opt/Lavalink/plugins/ networks: - lavalink expose: # lavalink exposes port 2333 to connect to for other containers (this is for documentation purposes only) - 2333 ports: # you only need this if you want to make your lavalink accessible from outside of containers - "2333:2333" ollama: image: ollama/ollama:latest ports: - 7869:11434 volumes: - .:/code - ./ollama/ollama:/root/.ollama container_name: ollama pull_policy: always tty: true restart: always environment: - OLLAMA_KEEP_ALIVE=24h - OLLAMA_HOST=0.0.0.0 networks: - ollama-docker ollama-webui: image: ghcr.io/open-webui/open-webui:main container_name: ollama-webui volumes: - ./ollama/ollama-webui:/app/backend/data depends_on: - ollama ports: - 8080:8080 environment: # https://docs.openwebui.com/getting-started/env-configuration#default_models - OLLAMA_BASE_URLS=http://host.docker.internal:7869 #comma separated ollama hosts - ENV=dev - WEBUI_AUTH=False - WEBUI_NAME=valiantlynx AI - WEBUI_URL=http://localhost:8080 - WEBUI_SECRET_KEY=t0p-s3cr3t extra_hosts: - host.docker.internal:host-gateway restart: unless-stopped networks: - ollama-docker volumes: ollama: {} networks: # create a lavalink network you can add other containers to, to give them access to Lavalink lavalink: name: lavalink ollama-docker: external: false