feat: Add TURN server support
All checks were successful
Queue Release Build / prepare (push) Successful in 15s
Deploy Web Apps / deploy (push) Successful in 5m35s
Queue Release Build / build-linux (push) Successful in 24m45s
Queue Release Build / build-windows (push) Successful in 13m52s
Queue Release Build / finalize (push) Successful in 23s

This commit is contained in:
2026-04-18 21:27:04 +02:00
parent 167c45ba8d
commit 44588e8789
60 changed files with 2404 additions and 365 deletions

View File

@@ -190,7 +190,7 @@
[class.text-destructive]="!!att.requestError"
[class.text-muted-foreground]="!att.requestError"
>
{{ att.requestError || 'Waiting for image source' }}
{{ att.requestError || 'Waiting for image source...' }}
</div>
</div>
</div>

View File

@@ -419,8 +419,8 @@ export class ChatMessageItemComponent {
}
return this.isVideoAttachment(attachment)
? 'Waiting for video source'
: 'Waiting for audio source';
? 'Waiting for video source...'
: 'Waiting for audio source...';
}
getMediaAttachmentActionLabel(attachment: Attachment): string {
@@ -502,8 +502,8 @@ export class ChatMessageItemComponent {
? 'Large video. Accept the download to watch it in chat.'
: 'Large audio file. Accept the download to play it in chat.'
: isVideo
? 'Waiting for video source'
: 'Waiting for audio source',
? 'Waiting for video source...'
: 'Waiting for audio source...',
progressPercent: attachment.size > 0
? ((attachment.receivedBytes || 0) * 100) / attachment.size
: 0

View File

@@ -7,7 +7,7 @@
@if (syncing() && !loading()) {
<div class="flex items-center justify-center gap-2 py-1.5 text-xs text-muted-foreground">
<div class="h-3 w-3 animate-spin rounded-full border-b-2 border-primary"></div>
<span>Syncing messages</span>
<span>Syncing messages...</span>
</div>
}

View File

@@ -62,7 +62,7 @@
@if (loading() && results().length === 0) {
<div class="flex h-full min-h-56 flex-col items-center justify-center gap-3 text-muted-foreground">
<span class="h-6 w-6 animate-spin rounded-full border-2 border-primary/20 border-t-primary"></span>
<p class="text-sm">Loading GIFs from KLIPY</p>
<p class="text-sm">Loading GIFs from KLIPY...</p>
</div>
} @else if (results().length === 0) {
<div
@@ -125,7 +125,7 @@
[disabled]="loading()"
class="rounded-full border border-border/80 bg-background/60 px-4 py-2 text-xs font-medium text-foreground transition-colors hover:bg-secondary disabled:cursor-not-allowed disabled:opacity-60"
>
{{ loading() ? 'Loading' : 'Load more' }}
{{ loading() ? 'Loading...' : 'Load more' }}
</button>
}
</div>

View File

@@ -151,7 +151,7 @@ function formatMessagePreview(senderName: string, content: string): string {
}
const preview = normalisedContent.length > MESSAGE_PREVIEW_LIMIT
? `${normalisedContent.slice(0, MESSAGE_PREVIEW_LIMIT - 1)}`
? `${normalisedContent.slice(0, MESSAGE_PREVIEW_LIMIT - 1)}...`
: normalisedContent;
return `${senderName}: ${preview}`;

View File

@@ -27,7 +27,7 @@ export class InviteComponent implements OnInit {
readonly currentUser = inject(Store).selectSignal(selectCurrentUser);
readonly invite = signal<ServerInviteInfo | null>(null);
readonly status = signal<'loading' | 'redirecting' | 'joining' | 'error'>('loading');
readonly message = signal('Loading invite');
readonly message = signal('Loading invite...');
private readonly route = inject(ActivatedRoute);
private readonly router = inject(Router);
@@ -121,7 +121,7 @@ export class InviteComponent implements OnInit {
this.invite.set(invite);
this.status.set('joining');
this.message.set(`Joining ${invite.server.name}`);
this.message.set(`Joining ${invite.server.name}...`);
const currentUser = await this.hydrateCurrentUser();
const joinResponse = await firstValueFrom(this.serverDirectory.requestJoin({
@@ -163,7 +163,7 @@ export class InviteComponent implements OnInit {
private async redirectToLogin(): Promise<void> {
this.status.set('redirecting');
this.message.set('Redirecting to login');
this.message.set('Redirecting to login...');
await this.router.navigate(['/login'], {
queryParams: {

View File

@@ -0,0 +1,139 @@
import {
Injectable,
inject,
computed,
type Signal
} from '@angular/core';
import { Store } from '@ngrx/store';
import { selectCurrentUser, selectOnlineUsers } from '../../../../store/users/users.selectors';
import { toSignal } from '@angular/core/rxjs-interop';
import { RealtimeSessionFacade } from '../../../../core/realtime';
import type { User } from '../../../../shared-kernel';
/**
* Connectivity health status for a single peer in voice.
*/
export interface PeerConnectivityHealth {
peerId: string;
/** Number of voice peers this peer can send/receive audio to/from. */
connectedPeerCount: number;
/** Total peers expected in voice. */
totalVoicePeers: number;
/** true when this peer has the fewest connections -> warning target. */
hasDesync: boolean;
}
/**
* Tracks per-peer voice connectivity health by comparing the number
* of connected audio streams each peer has. Peers with fewest
* bidirectional audio connections are flagged.
*
* Uses peer latency data as proxy for healthy bidirectional connection.
*/
@Injectable({ providedIn: 'root' })
export class VoiceConnectivityHealthService {
readonly currentUser: Signal<User | null | undefined>;
readonly onlineUsers: Signal<User[]>;
readonly desyncPeerIds: Signal<ReadonlySet<string>>;
readonly localUserHasDesync: Signal<boolean>;
private readonly webrtc = inject(RealtimeSessionFacade);
constructor() {
const store = inject(Store);
this.currentUser = toSignal(store.select(selectCurrentUser));
this.onlineUsers = toSignal(store.select(selectOnlineUsers), { initialValue: [] });
/**
* Map of peerId -> true for peers that have connectivity issues.
* A peer is flagged when it has fewer healthy connections than the
* majority of users in the same voice channel.
*/
this.desyncPeerIds = computed<ReadonlySet<string>>(() => {
const me = this.currentUser();
const myVoice = me?.voiceState;
if (!myVoice?.isConnected || !myVoice.roomId || !myVoice.serverId) {
return new Set<string>();
}
// Find all users in same voice room
const voiceUsers = this.onlineUsers().filter(
(user) =>
user.voiceState?.isConnected
&& user.voiceState.roomId === myVoice.roomId
&& user.voiceState.serverId === myVoice.serverId
);
if (voiceUsers.length < 2) {
return new Set<string>();
}
// Use peer latencies as proxy. A peer we can ping has a working
// data-channel (= working RTCPeerConnection). Peers without latency
// measurements are considered unreachable.
const connectedPeers = this.webrtc.connectedPeers();
const connectedSet = new Set(connectedPeers);
const myKey = me?.oderId || me?.id;
if (!myKey) {
return new Set<string>();
}
// Count how many voice peers each voice user is connected to (from
// the local perspective). We can only see our own connections - but
// if WE can't reach peer X while we CAN reach peers Y and Z, peer X
// is the one with issues.
const unreachableFromUs = new Set<string>();
for (const user of voiceUsers) {
const key = user.oderId || user.id;
if (key === myKey) {
continue;
}
const hasConnection = connectedSet.has(key)
|| connectedSet.has(user.id)
|| connectedSet.has(user.oderId ?? '');
if (!hasConnection) {
unreachableFromUs.add(key);
}
}
// If we can reach everyone, no desync
if (unreachableFromUs.size === 0) {
return new Set<string>();
}
// If we can't reach ANYONE, the problem is likely on our end
const reachableCount = voiceUsers.length - 1 - unreachableFromUs.size;
if (reachableCount === 0 && voiceUsers.length > 2) {
// Everyone unreachable from us -> WE are the problem
return new Set([myKey]);
}
return unreachableFromUs;
});
/**
* Whether the LOCAL user is the one with connectivity issues.
*/
this.localUserHasDesync = computed(() => {
const me = this.currentUser();
const myKey = me?.oderId || me?.id;
return !!myKey && this.desyncPeerIds().has(myKey);
});
}
/**
* Check if a specific peer has a desync warning.
*/
hasPeerDesync(peerKey: string): boolean {
return this.desyncPeerIds().has(peerKey);
}
}

View File

@@ -209,7 +209,7 @@ export class VoicePlaybackService {
* ↓
* muted <audio> element (Chrome workaround - primes the stream)
* ↓
* MediaStreamSource GainNode MediaStreamDestination output <audio>
* MediaStreamSource -> GainNode -> MediaStreamDestination -> output <audio>
*/
private createPipeline(peerId: string, stream: MediaStream): void {
// Chromium/Electron needs a muted <audio> element before Web Audio can read the stream.

View File

@@ -1,4 +1,5 @@
export * from './application/facades/voice-connection.facade';
export * from './application/services/voice-activity.service';
export * from './application/services/voice-playback.service';
export * from './application/services/voice-connectivity-health.service';
export * from './domain/models/voice-connection.model';