feat: Add TURN server support
All checks were successful
Queue Release Build / prepare (push) Successful in 15s
Deploy Web Apps / deploy (push) Successful in 5m35s
Queue Release Build / build-linux (push) Successful in 24m45s
Queue Release Build / build-windows (push) Successful in 13m52s
Queue Release Build / finalize (push) Successful in 23s

This commit is contained in:
2026-04-18 21:27:04 +02:00
parent 167c45ba8d
commit 44588e8789
60 changed files with 2404 additions and 365 deletions

View File

@@ -1,6 +1,6 @@
# Realtime Infrastructure
Low-level WebRTC and WebSocket plumbing that the rest of the app sits on top of. Nothing in here knows about Angular components, NgRx, or domain logic. It exposes observables, signals, and callbacks that higher layers (facades, effects, components) consume.
Low-level WebRTC and WebSocket plumbing plus the Angular-facing runtime boundary that the rest of the app sits on top of. Most files here stay technical and framework-light, but this area does use Angular signals and DI, shared-kernel contracts, and a small screen-share domain adapter at the composition boundary. It exposes observables, signals, and callbacks that higher layers (facades, effects, components) consume.
## Module map
@@ -9,6 +9,8 @@ realtime/
├── realtime-session.service.ts Composition root (WebRTCService)
├── realtime.types.ts PeerData, credentials, tracker types
├── realtime.constants.ts ICE servers, signal types, bitrates, intervals
├── ice-server-settings.service.ts Persisted STUN/TURN configuration
├── screen-share.config.ts Shared screen-share options and presets
├── signaling/ WebSocket layer
│ ├── signaling.manager.ts One WebSocket per signaling URL
@@ -56,7 +58,7 @@ realtime/
## How it all fits together
`WebRTCService` is the composition root. It instantiates every other manager, then wires their callbacks together after construction (to avoid circular references). No manager imports another manager directly.
`WebRTCService` is the composition root. It instantiates the main managers, then wires their callbacks together after construction to avoid the old monolithic tangle and break circular initialization. Some focused helpers still depend on other managers or their types, but cross-cutting orchestration stays centralized here instead of being spread across the runtime.
```mermaid
graph TD
@@ -378,9 +380,10 @@ Instead of connecting peers directly:
This approach is more reliable in restrictive network environments but introduces additional latency and bandwidth overhead, since all traffic flows through the relay instead of directly between peers.
Toju/Zoracord does not use TURN and does not have code written to support it.
MetoYou ships with STUN-only defaults in `ICE_SERVERS`, but the runtime does support TURN entries through `IceServerSettingsService` and standard `RTCIceServer` credentials. There is no bundled TURN service or default TURN configuration in the repo.
### Summary
- **ICE** coordinates connection establishment by trying multiple network paths
- **STUN** provides public-facing address discovery for NAT traversal
- **TURN** is an optional relay fallback that this runtime can be configured to use, but it is not bundled or enabled by default

View File

@@ -0,0 +1,122 @@
import {
Injectable,
signal,
computed,
type Signal
} from '@angular/core';
import { STORAGE_KEY_ICE_SERVERS } from '../../core/constants';
import { ICE_SERVERS } from './realtime.constants';
export interface IceServerEntry {
id: string;
type: 'stun' | 'turn';
urls: string;
username?: string;
credential?: string;
}
const DEFAULT_ENTRIES: IceServerEntry[] = ICE_SERVERS.map((server, index) => ({
id: `default-stun-${index}`,
type: 'stun' as const,
urls: Array.isArray(server.urls) ? server.urls[0] : server.urls
}));
@Injectable({ providedIn: 'root' })
export class IceServerSettingsService {
readonly entries: Signal<IceServerEntry[]>;
readonly rtcIceServers: Signal<RTCIceServer[]>;
private readonly _entries = signal<IceServerEntry[]>(this.load());
constructor() {
this.entries = this._entries.asReadonly();
this.rtcIceServers = computed<RTCIceServer[]>(() =>
this._entries().map((entry) => {
if (entry.type === 'turn') {
return {
urls: entry.urls,
username: entry.username ?? '',
credential: entry.credential ?? ''
};
}
return { urls: entry.urls };
})
);
}
addEntry(entry: Omit<IceServerEntry, 'id'>): void {
const id = `${entry.type}-${Date.now()}-${Math.random().toString(36)
.slice(2, 8)}`;
const updated = [...this._entries(), { ...entry, id }];
this._entries.set(updated);
this.save(updated);
}
removeEntry(id: string): void {
const updated = this._entries().filter((entry) => entry.id !== id);
this._entries.set(updated);
this.save(updated);
}
updateEntry(id: string, changes: Partial<Omit<IceServerEntry, 'id'>>): void {
const updated = this._entries().map((entry) =>
entry.id === id ? { ...entry, ...changes } : entry
);
this._entries.set(updated);
this.save(updated);
}
moveEntry(fromIndex: number, toIndex: number): void {
const entries = [...this._entries()];
if (fromIndex < 0 || fromIndex >= entries.length || toIndex < 0 || toIndex >= entries.length) {
return;
}
const [moved] = entries.splice(fromIndex, 1);
entries.splice(toIndex, 0, moved);
this._entries.set(entries);
this.save(entries);
}
restoreDefaults(): void {
this._entries.set([...DEFAULT_ENTRIES]);
this.save(DEFAULT_ENTRIES);
}
private load(): IceServerEntry[] {
try {
const raw = localStorage.getItem(STORAGE_KEY_ICE_SERVERS);
if (!raw) {
return [...DEFAULT_ENTRIES];
}
const parsed = JSON.parse(raw);
if (!Array.isArray(parsed) || parsed.length === 0) {
return [...DEFAULT_ENTRIES];
}
return parsed.filter(
(entry: unknown): entry is IceServerEntry =>
typeof entry === 'object'
&& entry !== null
&& typeof (entry as IceServerEntry).id === 'string'
&& ((entry as IceServerEntry).type === 'stun' || (entry as IceServerEntry).type === 'turn')
&& typeof (entry as IceServerEntry).urls === 'string'
);
} catch {
return [...DEFAULT_ENTRIES];
}
}
private save(entries: IceServerEntry[]): void {
localStorage.setItem(STORAGE_KEY_ICE_SERVERS, JSON.stringify(entries));
}
}

View File

@@ -10,6 +10,7 @@ import { LatencyProfile } from '../realtime.constants';
import { PeerData } from '../realtime.types';
import { WebRTCLogger } from '../logging/webrtc-logger';
import { NoiseReductionManager } from './noise-reduction.manager';
import { loadVoiceSettingsFromStorage } from '../../../domains/voice-session/infrastructure/util/voice-settings-storage.util';
import {
TRACK_KIND_AUDIO,
TRACK_KIND_VIDEO,
@@ -105,6 +106,12 @@ export class MediaManager {
private callbacks: MediaManagerCallbacks
) {
this.noiseReduction = new NoiseReductionManager(logger);
// Read the persisted noise-reduction preference so enableVoice()
// uses the correct value even before voice-controls loads.
try {
this._noiseReductionDesired = loadVoiceSettingsFromStorage().noiseReduction;
} catch { /* keep default */ }
}
/**
@@ -226,7 +233,7 @@ export class MediaManager {
: stream;
// Apply input gain (mic volume) before sending to peers
this.applyInputGainToCurrentStream();
await this.applyInputGainToCurrentStream();
this.logger.logStream('localVoice', this.localMediaStream);
@@ -296,7 +303,7 @@ export class MediaManager {
}
// Apply input gain (mic volume) before sending to peers
this.applyInputGainToCurrentStream();
await this.applyInputGainToCurrentStream();
this.bindLocalTracksToAllPeers();
this.isVoiceActive = true;
@@ -447,7 +454,7 @@ export class MediaManager {
}
// Re-apply input gain to the (possibly new) stream
this.applyInputGainToCurrentStream();
await this.applyInputGainToCurrentStream();
// Propagate the new audio track to every peer connection
this.bindLocalTracksToAllPeers();
@@ -479,8 +486,7 @@ export class MediaManager {
}
if (this.localMediaStream) {
this.applyInputGainToCurrentStream();
this.bindLocalTracksToAllPeers();
void this.applyInputGainToCurrentStream().then(() => this.bindLocalTracksToAllPeers());
}
}
@@ -840,12 +846,22 @@ export class MediaManager {
* If a gain pipeline already exists for the same source stream the gain
* value is simply updated. Otherwise a new pipeline is created.
*/
private applyInputGainToCurrentStream(): void {
private async applyInputGainToCurrentStream(): Promise<void> {
const stream = this.localMediaStream;
if (!stream)
return;
// When gain is unity (1.0) skip the Web Audio pipeline entirely and
// use the raw microphone stream. This avoids unnecessary AudioContext
// overhead when no volume adjustment is needed.
if (this.inputGainVolume === 1.0) {
this.teardownInputGain();
this.preGainStream = stream;
this.applyCurrentMuteState();
return;
}
// If the source stream hasn't changed, just update gain
if (this.preGainStream === stream && this.inputGainNode && this.inputGainCtx) {
this.inputGainNode.gain.value = this.inputGainVolume;
@@ -855,9 +871,15 @@ export class MediaManager {
// Tear down the old pipeline (if any)
this.teardownInputGain();
// Build new pipeline: source gain destination
// Build new pipeline: source -> gain -> destination
this.preGainStream = stream;
this.inputGainCtx = new AudioContext();
// Ensure the AudioContext is running before connecting nodes.
if (this.inputGainCtx.state !== 'running') {
await this.inputGainCtx.resume();
}
this.inputGainSourceNode = this.inputGainCtx.createMediaStreamSource(stream);
this.inputGainNode = this.inputGainCtx.createGain();
this.inputGainNode.gain.value = this.inputGainVolume;

View File

@@ -7,9 +7,9 @@
* a clean output stream that can be sent to peers instead.
*
* Architecture:
* raw mic AudioContext.createMediaStreamSource
* NoiseSuppressorWorklet (AudioWorkletNode)
* MediaStreamDestination clean MediaStream
* raw mic -> AudioContext.createMediaStreamSource
* -> NoiseSuppressorWorklet (AudioWorkletNode)
* -> MediaStreamDestination -> clean MediaStream
*
* The manager is intentionally stateless w.r.t. Angular signals;
* the owning MediaManager / WebRTCService drives signals.
@@ -138,7 +138,7 @@ export class NoiseReductionManager {
/**
* Build the AudioWorklet processing graph:
* rawStream source workletNode destination
* rawStream -> source -> workletNode -> destination
*/
private async buildProcessingGraph(rawStream: MediaStream): Promise<void> {
// Reuse or create the AudioContext (must be 48 kHz for RNNoise)

View File

@@ -4,7 +4,6 @@ import {
CONNECTION_STATE_DISCONNECTED,
CONNECTION_STATE_FAILED,
DATA_CHANNEL_LABEL,
ICE_SERVERS,
SIGNALING_TYPE_ICE_CANDIDATE,
TRACK_KIND_AUDIO,
TRACK_KIND_VIDEO,
@@ -28,7 +27,7 @@ export function createPeerConnection(
logger.info('Creating peer connection', { remotePeerId, isInitiator });
const connection = new RTCPeerConnection({ iceServers: ICE_SERVERS });
const connection = new RTCPeerConnection({ iceServers: callbacks.getIceServers() });
let dataChannel: RTCDataChannel | null = null;
let peerData: PeerData | null = null;

View File

@@ -29,6 +29,8 @@ export interface PeerConnectionCallbacks {
isScreenSharingActive(): boolean;
/** Whether the local camera is active. */
isCameraEnabled(): boolean;
/** Returns the user-configured ICE servers for peer connection creation. */
getIceServers(): RTCIceServer[];
}
export interface PeerConnectionManagerState {

View File

@@ -2,11 +2,11 @@
* WebRTCService - thin Angular service that composes specialised managers.
*
* Each concern lives in its own file under `./`:
* SignalingManager - WebSocket lifecycle & reconnection
* PeerConnectionManager - RTCPeerConnection, offers/answers, ICE, data channels
* MediaManager - mic voice, mute, deafen, bitrate
* ScreenShareManager - screen capture & mixed audio
* WebRTCLogger - debug / diagnostic logging
* - SignalingManager - WebSocket lifecycle & reconnection
* - PeerConnectionManager - RTCPeerConnection, offers/answers, ICE, data channels
* - MediaManager - mic voice, mute, deafen, bitrate
* - ScreenShareManager - screen capture & mixed audio
* - WebRTCLogger - debug / diagnostic logging
*
* This file wires them together and exposes a public API that is
* identical to the old monolithic service so consumers don't change.
@@ -26,6 +26,7 @@ import { ScreenShareSourcePickerService } from '../../domains/screen-share';
import { MediaManager } from './media/media.manager';
import { ScreenShareManager } from './media/screen-share.manager';
import { VoiceSessionController } from './media/voice-session-controller';
import { IceServerSettingsService } from './ice-server-settings.service';
import type { PeerData, VoiceStateSnapshot } from './realtime.types';
import { LatencyProfile } from './realtime.constants';
import { ScreenShareStartOptions } from './screen-share.config';
@@ -47,6 +48,7 @@ export class WebRTCService implements OnDestroy {
private readonly timeSync = inject(TimeSyncService);
private readonly debugging = inject(DebuggingService);
private readonly screenShareSourcePicker = inject(ScreenShareSourcePickerService);
private readonly iceServerSettings = inject(IceServerSettingsService);
private readonly logger = new WebRTCLogger(() => this.debugging.enabled());
private readonly state = new WebRtcStateController();
@@ -151,7 +153,8 @@ export class WebRTCService implements OnDestroy {
getIdentifyCredentials: () => this.signalingTransportHandler.getIdentifyCredentials(),
getLocalPeerId: (): string => this.state.getLocalPeerId(),
isScreenSharingActive: (): boolean => this.state.isScreenSharingActive(),
isCameraEnabled: (): boolean => this.state.isCameraEnabledActive()
isCameraEnabled: (): boolean => this.state.isCameraEnabledActive(),
getIceServers: (): RTCIceServer[] => this.iceServerSettings.rtcIceServers()
});
this.mediaManager.setCallbacks({
@@ -211,7 +214,7 @@ export class WebRTCService implements OnDestroy {
this.remoteScreenShareRequestController.handlePeerControlMessage(event)
);
// Peer manager connected peers signal
// Peer manager -> connected peers signal
this.peerManager.connectedPeersChanged$.subscribe((peers: string[]) =>
this.state.setConnectedPeers(peers)
);
@@ -232,12 +235,12 @@ export class WebRTCService implements OnDestroy {
this.remoteScreenShareRequestController.handlePeerDisconnected(peerId);
});
// Media manager voice connected signal
// Media manager -> voice connected signal
this.mediaManager.voiceConnected$.subscribe(() => {
this.voiceSessionController.handleVoiceConnected();
});
// Peer manager latency updates
// Peer manager -> latency updates
this.peerManager.peerLatencyChanged$.subscribe(() =>
this.state.syncPeerLatencies(this.peerManager.peerLatencies)
);

View File

@@ -49,7 +49,7 @@ export { ELECTRON_ENTIRE_SCREEN_SOURCE_NAME } from '../../shared-kernel';
export const AUDIO_BITRATE_MIN_BPS = 16_000;
/** Maximum audio bitrate (bps) */
export const AUDIO_BITRATE_MAX_BPS = 256_000;
/** Multiplier to convert kbps bps */
/** Multiplier to convert kbps -> bps */
export const KBPS_TO_BPS = 1_000;
/** Pre-defined latency-to-bitrate mappings (bps) */
export const LATENCY_PROFILE_BITRATES: Record<LatencyProfile, number> = {