services: lavalink: # pin the image version to Lavalink v4 image: ghcr.io/lavalink-devs/lavalink:3.7.11 container_name: lavalink restart: unless-stopped environment: # set Java options here - _JAVA_OPTIONS=-Xmx6G # set lavalink server port - SERVER_PORT=2333 # set password for lavalink - LAVALINK_SERVER_PASSWORD=youshallnotpass volumes: # mount application.yml from the same directory or use environment variables - ./application.yml:/opt/Lavalink/application.yml # persist plugins between restarts, make sure to set the correct permissions (user: 322, group: 322) - ./plugins/:/opt/Lavalink/plugins/ networks: - lavalink expose: # lavalink exposes port 2333 to connect to for other containers (this is for documentation purposes only) - 2333 ports: # you only need this if you want to make your lavalink accessible from outside of containers - "2333:2333" ollama: volumes: - ollama:/root/.ollama # comment below to not expose Ollama API outside the container stack ports: - 11434:11434 container_name: ollama pull_policy: always tty: true restart: unless-stopped image: ollama/ollama:latest ollama-webui: build: context: . args: OLLAMA_API_BASE_URL: '/ollama/api' dockerfile: Dockerfile image: ollama-webui:latest container_name: ollama-webui depends_on: - ollama ports: - 3000:8080 environment: - "OLLAMA_API_BASE_URL=http://ollama:11434/api" extra_hosts: - host.docker.internal:host-gateway restart: unless-stopped volumes: ollama: {} networks: # create a lavalink network you can add other containers to, to give them access to Lavalink lavalink: name: lavalink