Compare commits

...

4 Commits
0.1.1 ... 0.1.5

Author SHA1 Message Date
b30d47e351 Add LLM info into main Readme 2024-06-19 12:33:19 +02:00
3ce0df7eaf Added Ollama and Ollama Web UI 2024-06-19 12:21:05 +02:00
e88e67f913 Fix broken appsettings file
Invalid json
2024-06-19 12:12:05 +02:00
5053553182 Update dotnet.yml 2024-06-02 19:12:45 +02:00
4 changed files with 44 additions and 4 deletions

View File

@@ -30,7 +30,7 @@ jobs:
run: dotnet publish ./Bot/Lunaris2.csproj --configuration Release --output ./out run: dotnet publish ./Bot/Lunaris2.csproj --configuration Release --output ./out
- name: Zip the build - name: Zip the build
run: 7z a -tzip ./out/Bot.zip ./out/* run: 7z a -tzip ./out/Lunaris.zip ./out/*
- name: Get previous tag - name: Get previous tag
id: previoustag id: previoustag
@@ -62,6 +62,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with: with:
upload_url: ${{ steps.create_release.outputs.upload_url }} upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./out/Bot.zip asset_path: ./out/Lunaris.zip
asset_name: Bot.zip asset_name: Lunaris.zip
asset_content_type: application/zip asset_content_type: application/zip

View File

@@ -9,7 +9,7 @@
"Token": "discordToken", "Token": "discordToken",
"LavaLinkPassword": "youshallnotpass", "LavaLinkPassword": "youshallnotpass",
"LavaLinkHostname": "127.0.0.1", "LavaLinkHostname": "127.0.0.1",
"LavaLinkPort": 2333 "LavaLinkPort": 2333,
"LLM": { "LLM": {
"Url": "http://192.168.50.54:11434", "Url": "http://192.168.50.54:11434",
"Model": "gemma" "Model": "gemma"

View File

@@ -7,6 +7,7 @@ Lunaris2 is a Discord bot designed to play music in your server's voice channels
- Play music from YouTube directly in your Discord server. - Play music from YouTube directly in your Discord server.
- Skip tracks, pause, and resume playback. - Skip tracks, pause, and resume playback.
- Queue system to line up your favorite tracks. - Queue system to line up your favorite tracks.
- Local LLM (AI chatbot) that answers on @mentions in Discord chat. See more about it below.
## Setup ## Setup
@@ -17,6 +18,11 @@ Lunaris2 is a Discord bot designed to play music in your server's voice channels
5. Make sure you got docker installed. And run the file ``start-services.sh``, make sure you got git-bash installed. 5. Make sure you got docker installed. And run the file ``start-services.sh``, make sure you got git-bash installed.
6. Now you can start the project and run the application. 6. Now you can start the project and run the application.
## LLM
Lunaris supports AI chat using a large language model, this is done by hosting the LLM locally, in this case Docker will set it up for you when you run the start-services script.
The LLM is run using Ollama see more about Ollama [here](https://ollama.com/). Running LLM locally requires much resources from your system, minimum requirements is at least 8GB of ram. If your don't have enought ram, select a LLM model in the [appsettings file](https://github.com/Myxelium/Lunaris2.0/blob/master/Bot/appsettings.json#L15) that requires less of your system.
## Usage ## Usage
- `/play <song>`: Plays the specified song in the voice channel you're currently in. - `/play <song>`: Plays the specified song in the voice channel you're currently in.

View File

@@ -24,6 +24,40 @@ services:
ports: ports:
# you only need this if you want to make your lavalink accessible from outside of containers # you only need this if you want to make your lavalink accessible from outside of containers
- "2333:2333" - "2333:2333"
ollama:
volumes:
- ollama:/root/.ollama
# comment below to not expose Ollama API outside the container stack
ports:
- 11434:11434
container_name: ollama
pull_policy: always
tty: true
restart: unless-stopped
image: ollama/ollama:latest
ollama-webui:
build:
context: .
args:
OLLAMA_API_BASE_URL: '/ollama/api'
dockerfile: Dockerfile
image: ollama-webui:latest
container_name: ollama-webui
depends_on:
- ollama
ports:
- 3000:8080
environment:
- "OLLAMA_API_BASE_URL=http://ollama:11434/api"
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
volumes:
ollama: {}
networks: networks:
# create a lavalink network you can add other containers to, to give them access to Lavalink # create a lavalink network you can add other containers to, to give them access to Lavalink
lavalink: lavalink: