Compare commits

..

9 Commits

Author SHA1 Message Date
Myx
f08d89f322 Improve chat 2024-08-11 01:18:00 +02:00
b30d47e351 Add LLM info into main Readme 2024-06-19 12:33:19 +02:00
3ce0df7eaf Added Ollama and Ollama Web UI 2024-06-19 12:21:05 +02:00
e88e67f913 Fix broken appsettings file
Invalid json
2024-06-19 12:12:05 +02:00
5053553182 Update dotnet.yml 2024-06-02 19:12:45 +02:00
327ccc9675 Update dotnet.yml 2024-06-02 18:57:56 +02:00
cbc99c2773 Update dotnet.yml 2024-06-02 18:55:14 +02:00
d56215f685 Update README.md 2024-06-02 18:53:02 +02:00
967bee923a Update dotnet.yml 2024-06-02 18:51:38 +02:00
9 changed files with 100 additions and 23 deletions

View File

@@ -7,10 +7,13 @@ on:
jobs:
build:
runs-on: windows-latest
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0 # required for github-action-get-previous-tag
- name: Setup .NET
uses: actions/setup-dotnet@v1
@@ -27,15 +30,19 @@ jobs:
run: dotnet publish ./Bot/Lunaris2.csproj --configuration Release --output ./out
- name: Zip the build
run: 7z a -tzip ./out/Bot.zip ./out/*
run: 7z a -tzip ./out/Lunaris.zip ./out/*
- name: Get the tag name
id: get_tag
run: echo "::set-output name=tag::${GITHUB_REF#refs/tags/}"
- name: Get previous tag
id: previoustag
uses: 'WyriHaximus/github-action-get-previous-tag@v1'
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
- name: Get the version
id: get_version
run: echo "::set-output name=version::$(date +%s).${{ github.run_id }}"
- name: Get next minor version
id: semver
uses: 'WyriHaximus/github-action-next-semvers@v1'
with:
version: ${{ steps.previoustag.outputs.tag }}
- name: Create Release
id: create_release
@@ -43,8 +50,8 @@ jobs:
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # This token is provided by Actions, you do not need to create your own token
with:
tag_name: ${{ steps.get_version.outputs.version }}
release_name: Release v${{ steps.get_version.outputs.version }}
tag_name: ${{ steps.semver.outputs.patch }}
release_name: Release ${{ steps.semver.outputs.patch }}
draft: false
prerelease: false
@@ -55,6 +62,6 @@ jobs:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./out/Bot.zip
asset_name: Bot.zip
asset_path: ./out/Lunaris.zip
asset_name: Lunaris.zip
asset_content_type: application/zip

View File

@@ -12,9 +12,11 @@ namespace Lunaris2.Handler.ChatCommand
{
private readonly OllamaApiClient _ollama;
private readonly Dictionary<ulong, Chat?> _chatContexts = new();
private readonly ChatSettings _chatSettings;
public ChatHandler(IOptions<ChatSettings> chatSettings)
{
_chatSettings = chatSettings.Value;
var uri = new Uri(chatSettings.Value.Url);
_ollama = new OllamaApiClient(uri)
@@ -30,6 +32,10 @@ namespace Lunaris2.Handler.ChatCommand
var userMessage = command.FilteredMessage;
var randomPersonality = _chatSettings.Personalities[new Random().Next(_chatSettings.Personalities.Count)];
userMessage = $"{randomPersonality.Instruction} {userMessage}";
using var setTyping = command.Message.Channel.EnterTypingState();
if (string.IsNullOrWhiteSpace(userMessage))

View File

@@ -4,4 +4,11 @@ public class ChatSettings
{
public string Url { get; set; }
public string Model { get; set; }
public List<Personality> Personalities { get; set; }
}
public class Personality
{
public string Name { get; set; }
public string Instruction { get; set; }
}

View File

@@ -16,6 +16,7 @@
<PackageReference Include="Discord.Net.Rest" Version="3.13.1" />
<PackageReference Include="MediatR" Version="12.2.0" />
<PackageReference Include="Microsoft.Extensions.Configuration" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="8.0.1" />
<PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="8.0.0" />
<PackageReference Include="Microsoft.Extensions.Hosting" Version="8.0.0" />

View File

@@ -42,7 +42,8 @@ public class Program
.AddJsonFile("appsettings.json")
.Build();
services.AddSingleton(client)
services
.AddSingleton(client)
.AddSingleton(commands)
.AddMediatR(configuration => configuration.RegisterServicesFromAssembly(Assembly.GetExecutingAssembly()))
.AddSingleton<DiscordEventListener>()

View File

@@ -9,9 +9,15 @@
"Token": "discordToken",
"LavaLinkPassword": "youshallnotpass",
"LavaLinkHostname": "127.0.0.1",
"LavaLinkPort": 2333
"LavaLinkPort": 2333,
"LLM": {
"Url": "http://192.168.50.54:11434",
"Model": "gemma"
"Url": "http://localhost:7869",
"Model": "gemma",
"personalities": [
{
"name": "Lunaris",
"instruction": "Let's role play. Your name is Lunaris and can be quite rude. You are supposed to answer on following question with a maximum of 2 sentences:"
}
]
}
}

View File

@@ -7,6 +7,7 @@ Lunaris2 is a Discord bot designed to play music in your server's voice channels
- Play music from YouTube directly in your Discord server.
- Skip tracks, pause, and resume playback.
- Queue system to line up your favorite tracks.
- Local LLM (AI chatbot) that answers on @mentions in Discord chat. See more about it below.
## Setup
@@ -17,6 +18,11 @@ Lunaris2 is a Discord bot designed to play music in your server's voice channels
5. Make sure you got docker installed. And run the file ``start-services.sh``, make sure you got git-bash installed.
6. Now you can start the project and run the application.
## LLM
Lunaris supports AI chat using a large language model, this is done by hosting the LLM locally, in this case Docker will set it up for you when you run the start-services script.
The LLM is run using Ollama see more about Ollama [here](https://ollama.com/). Running LLM locally requires much resources from your system, minimum requirements is at least 8GB of ram. If your don't have enought ram, select a LLM model in the [appsettings file](https://github.com/Myxelium/Lunaris2.0/blob/master/Bot/appsettings.json#L15) that requires less of your system.
## Usage
- `/play <song>`: Plays the specified song in the voice channel you're currently in.
@@ -25,7 +31,3 @@ Lunaris2 is a Discord bot designed to play music in your server's voice channels
## Contributing
Pull requests are welcome. For major changes, please open an issue first to discuss what you would like to change.
## License
[MIT](https://choosealicense.com/licenses/mit/)

View File

@@ -24,7 +24,52 @@ services:
ports:
# you only need this if you want to make your lavalink accessible from outside of containers
- "2333:2333"
ollama:
image: ollama/ollama:latest
ports:
- 7869:11434
volumes:
- .:/code
- ./ollama/ollama:/root/.ollama
container_name: ollama
pull_policy: always
tty: true
restart: always
environment:
- OLLAMA_KEEP_ALIVE=24h
- OLLAMA_HOST=0.0.0.0
networks:
- ollama-docker
ollama-webui:
image: ghcr.io/open-webui/open-webui:main
container_name: ollama-webui
volumes:
- ./ollama/ollama-webui:/app/backend/data
depends_on:
- ollama
ports:
- 8080:8080
environment: # https://docs.openwebui.com/getting-started/env-configuration#default_models
- OLLAMA_BASE_URLS=http://host.docker.internal:7869 #comma separated ollama hosts
- ENV=dev
- WEBUI_AUTH=False
- WEBUI_NAME=valiantlynx AI
- WEBUI_URL=http://localhost:8080
- WEBUI_SECRET_KEY=t0p-s3cr3t
extra_hosts:
- host.docker.internal:host-gateway
restart: unless-stopped
networks:
- ollama-docker
volumes:
ollama: {}
networks:
# create a lavalink network you can add other containers to, to give them access to Lavalink
lavalink:
name: lavalink
ollama-docker:
external: false

View File

@@ -1 +1,3 @@
docker compose up -d
read -p "Press enter to continue"