Compare commits

...

2 Commits
0.1.2 ... 0.1.4

Author SHA1 Message Date
3ce0df7eaf Added Ollama and Ollama Web UI 2024-06-19 12:21:05 +02:00
e88e67f913 Fix broken appsettings file
Invalid json
2024-06-19 12:12:05 +02:00
2 changed files with 35 additions and 1 deletions

View File

@@ -9,7 +9,7 @@
"Token": "discordToken",
"LavaLinkPassword": "youshallnotpass",
"LavaLinkHostname": "127.0.0.1",
"LavaLinkPort": 2333
"LavaLinkPort": 2333,
"LLM": {
"Url": "http://192.168.50.54:11434",
"Model": "gemma"

View File

@@ -24,6 +24,40 @@ services:
ports:
# you only need this if you want to make your lavalink accessible from outside of containers
- "2333:2333"
ollama:
volumes:
- ollama:/root/.ollama
# comment below to not expose Ollama API outside the container stack
ports:
- 11434:11434
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:latest
ollama-webui:
build:
context: .
args:
OLLAMA_API_BASE_URL: '/ollama/api'
dockerfile: Dockerfile
image: ollama-webui:latest
container_name: ollama-webui
depends_on:
- ollama
ports:
- 3000:8080
environment:
- "OLLAMA_API_BASE_URL=http://ollama:11434/api"
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama: {}
networks:
# create a lavalink network you can add other containers to, to give them access to Lavalink
lavalink: