Noise reduction beta

This commit is contained in:
2026-03-03 02:01:42 +01:00
parent 50e7a66812
commit d684fc5632
14 changed files with 1516 additions and 159 deletions

View File

@@ -5,6 +5,9 @@
"main": "electron/main.js",
"scripts": {
"ng": "ng",
"prebuild": "npm run bundle:rnnoise",
"prestart": "npm run bundle:rnnoise",
"bundle:rnnoise": "esbuild node_modules/@timephy/rnnoise-wasm/dist/NoiseSuppressorWorklet.js --bundle --format=esm --outfile=public/rnnoise-worklet.js",
"start": "ng serve",
"build": "ng build",
"build:all": "npm run build && cd server && npm run build",
@@ -57,6 +60,7 @@
"@spartan-ng/brain": "^0.0.1-alpha.589",
"@spartan-ng/cli": "^0.0.1-alpha.589",
"@spartan-ng/ui-core": "^0.0.1-alpha.380",
"@timephy/rnnoise-wasm": "^1.0.0",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"mermaid": "^11.12.3",

585
public/rnnoise-worklet.js Normal file

File diff suppressed because one or more lines are too long

View File

@@ -71,10 +71,17 @@ export class WebRTCService implements OnDestroy {
private readonly _isMuted = signal(false);
private readonly _isDeafened = signal(false);
private readonly _isScreenSharing = signal(false);
private readonly _isNoiseReductionEnabled = signal(false);
private readonly _screenStreamSignal = signal<MediaStream | null>(null);
private readonly _hasConnectionError = signal(false);
private readonly _connectionErrorMessage = signal<string | null>(null);
private readonly _hasEverConnected = signal(false);
/**
* Reactive snapshot of per-peer latencies (ms).
* Updated whenever a ping/pong round-trip completes.
* Keyed by remote peer (oderId).
*/
private readonly _peerLatencies = signal<ReadonlyMap<string, number>>(new Map());
// Public computed signals (unchanged external API)
readonly peerId = computed(() => this._localPeerId());
@@ -85,6 +92,7 @@ export class WebRTCService implements OnDestroy {
readonly isMuted = computed(() => this._isMuted());
readonly isDeafened = computed(() => this._isDeafened());
readonly isScreenSharing = computed(() => this._isScreenSharing());
readonly isNoiseReductionEnabled = computed(() => this._isNoiseReductionEnabled());
readonly screenStream = computed(() => this._screenStreamSignal());
readonly hasConnectionError = computed(() => this._hasConnectionError());
readonly connectionErrorMessage = computed(() => this._connectionErrorMessage());
@@ -93,6 +101,8 @@ export class WebRTCService implements OnDestroy {
if (this._isVoiceConnected() && this._connectedPeers().length > 0) return false;
return true;
});
/** Per-peer latency map (ms). Read via `peerLatencies()`. */
readonly peerLatencies = computed(() => this._peerLatencies());
private readonly signalingMessage$ = new Subject<SignalingMessage>();
readonly onSignalingMessage = this.signalingMessage$.asObservable();
@@ -184,6 +194,12 @@ export class WebRTCService implements OnDestroy {
this.mediaManager.voiceConnected$.subscribe(() => {
this._isVoiceConnected.set(true);
});
// Peer manager → latency updates
this.peerManager.peerLatencyChanged$.subscribe(({ peerId, latencyMs }) => {
const next = new Map(this.peerManager.peerLatencies);
this._peerLatencies.set(next);
});
}
private handleSignalingMessage(message: any): void {
@@ -505,8 +521,8 @@ export class WebRTCService implements OnDestroy {
*
* @param stream - The media stream to use.
*/
setLocalStream(stream: MediaStream): void {
this.mediaManager.setLocalStream(stream);
async setLocalStream(stream: MediaStream): Promise<void> {
await this.mediaManager.setLocalStream(stream);
this.syncMediaSignals();
}
@@ -530,6 +546,20 @@ export class WebRTCService implements OnDestroy {
this._isDeafened.set(this.mediaManager.getIsSelfDeafened());
}
/**
* Toggle RNNoise noise reduction on the local microphone.
*
* When enabled, the raw mic audio is routed through an AudioWorklet
* that applies neural-network noise suppression before being sent
* to peers.
*
* @param enabled - Explicit state; if omitted, the current state is toggled.
*/
async toggleNoiseReduction(enabled?: boolean): Promise<void> {
await this.mediaManager.toggleNoiseReduction(enabled);
this._isNoiseReductionEnabled.set(this.mediaManager.getIsNoiseReductionEnabled());
}
/**
* Set the output volume for remote audio playback.
*

View File

@@ -11,3 +11,4 @@ export * from './signaling.manager';
export * from './peer-connection.manager';
export * from './media.manager';
export * from './screen-share.manager';
export * from './noise-reduction.manager';

View File

@@ -1,10 +1,12 @@
/**
* Manages local voice media: getUserMedia, mute, deafen,
* attaching/detaching audio tracks to peer connections, and bitrate tuning.
* attaching/detaching audio tracks to peer connections, bitrate tuning,
* and optional RNNoise-based noise reduction.
*/
import { Subject } from 'rxjs';
import { WebRTCLogger } from './webrtc-logger';
import { PeerData } from './webrtc.types';
import { NoiseReductionManager } from './noise-reduction.manager';
import {
TRACK_KIND_AUDIO,
TRACK_KIND_VIDEO,
@@ -39,9 +41,16 @@ export interface MediaManagerCallbacks {
}
export class MediaManager {
/** The current local media stream (mic audio). */
/** The stream sent to peers (may be raw or denoised). */
private localMediaStream: MediaStream | null = null;
/**
* The raw microphone stream from `getUserMedia`.
* Kept separately so noise reduction can be toggled
* without re-acquiring the mic.
*/
private rawMicStream: MediaStream | null = null;
/** Remote audio output volume (0-1). */
private remoteAudioVolume = VOLUME_MAX;
@@ -51,6 +60,16 @@ export class MediaManager {
/** Emitted when voice is successfully connected. */
readonly voiceConnected$ = new Subject<void>();
/** RNNoise noise-reduction processor. */
private readonly noiseReduction: NoiseReductionManager;
/**
* Tracks the user's *desired* noise-reduction state, independent of
* whether the worklet is actually running. This lets us honour the
* preference even when it is set before the mic stream is acquired.
*/
private _noiseReductionDesired = false;
// State tracked locally (the service exposes these via signals)
private isVoiceActive = false;
private isMicMuted = false;
@@ -64,7 +83,9 @@ export class MediaManager {
constructor(
private readonly logger: WebRTCLogger,
private callbacks: MediaManagerCallbacks,
) {}
) {
this.noiseReduction = new NoiseReductionManager(logger);
}
/**
* Replace the callback set at runtime.
@@ -77,19 +98,37 @@ export class MediaManager {
}
/** Returns the current local media stream, or `null` if voice is disabled. */
getLocalStream(): MediaStream | null { return this.localMediaStream; }
getLocalStream(): MediaStream | null {
return this.localMediaStream;
}
/** Whether voice is currently active (mic captured). */
getIsVoiceActive(): boolean { return this.isVoiceActive; }
getIsVoiceActive(): boolean {
return this.isVoiceActive;
}
/** Whether the local microphone is muted. */
getIsMicMuted(): boolean { return this.isMicMuted; }
getIsMicMuted(): boolean {
return this.isMicMuted;
}
/** Whether the user has self-deafened. */
getIsSelfDeafened(): boolean { return this.isSelfDeafened; }
getIsSelfDeafened(): boolean {
return this.isSelfDeafened;
}
/** Current remote audio output volume (normalised 01). */
getRemoteAudioVolume(): number { return this.remoteAudioVolume; }
getRemoteAudioVolume(): number {
return this.remoteAudioVolume;
}
/** The voice channel room ID, if currently in voice. */
getCurrentVoiceRoomId(): string | undefined { return this.currentVoiceRoomId; }
getCurrentVoiceRoomId(): string | undefined {
return this.currentVoiceRoomId;
}
/** The voice channel server ID, if currently in voice. */
getCurrentVoiceServerId(): string | undefined { return this.currentVoiceServerId; }
getCurrentVoiceServerId(): string | undefined {
return this.currentVoiceServerId;
}
/** Whether the user wants noise reduction (may or may not be running yet). */
getIsNoiseReductionEnabled(): boolean {
return this._noiseReductionDesired;
}
/**
* Request microphone access via `getUserMedia` and bind the resulting
@@ -123,14 +162,19 @@ export class MediaManager {
throw new Error(
'navigator.mediaDevices is not available. ' +
'This requires a secure context (HTTPS or localhost). ' +
'If accessing from an external device, use HTTPS.'
'If accessing from an external device, use HTTPS.',
);
}
const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
this.rawMicStream = stream;
this.localMediaStream = stream;
this.logger.logStream('localVoice', stream);
// If the user wants noise reduction, pipe through the denoiser
this.localMediaStream = this._noiseReductionDesired
? await this.noiseReduction.enable(stream)
: stream;
this.logger.logStream('localVoice', this.localMediaStream);
this.bindLocalTracksToAllPeers();
@@ -148,15 +192,20 @@ export class MediaManager {
* The peer connections themselves are kept alive.
*/
disableVoice(): void {
if (this.localMediaStream) {
this.localMediaStream.getTracks().forEach((track) => track.stop());
this.localMediaStream = null;
this.noiseReduction.disable();
// Stop the raw mic tracks (the denoised stream's tracks are
// derived nodes and will stop once their source is gone).
if (this.rawMicStream) {
this.rawMicStream.getTracks().forEach((track) => track.stop());
this.rawMicStream = null;
}
this.localMediaStream = null;
// Remove audio senders but keep connections alive
this.callbacks.getActivePeers().forEach((peerData) => {
const senders = peerData.connection.getSenders();
senders.forEach(sender => {
senders.forEach((sender) => {
if (sender.track?.kind === TRACK_KIND_AUDIO) {
peerData.connection.removeTrack(sender);
}
@@ -168,9 +217,25 @@ export class MediaManager {
this.currentVoiceServerId = undefined;
}
/** Set the local stream from an external source (e.g. voice-controls component). */
setLocalStream(stream: MediaStream): void {
/**
* Set the local stream from an external source (e.g. voice-controls component).
*
* The raw stream is saved so noise reduction can be toggled on/off later.
* If noise reduction is already enabled the stream is piped through the
* denoiser before being sent to peers.
*/
async setLocalStream(stream: MediaStream): Promise<void> {
this.rawMicStream = stream;
this.logger.info('setLocalStream — noiseReductionDesired =', this._noiseReductionDesired);
// Pipe through the denoiser when the user wants noise reduction
if (this._noiseReductionDesired) {
this.logger.info('Piping new stream through noise reduction');
this.localMediaStream = await this.noiseReduction.enable(stream);
} else {
this.localMediaStream = stream;
}
this.bindLocalTracksToAllPeers();
this.isVoiceActive = true;
this.voiceConnected$.next();
@@ -185,7 +250,9 @@ export class MediaManager {
if (this.localMediaStream) {
const audioTracks = this.localMediaStream.getAudioTracks();
const newMutedState = muted !== undefined ? muted : !this.isMicMuted;
audioTracks.forEach((track) => { track.enabled = !newMutedState; });
audioTracks.forEach((track) => {
track.enabled = !newMutedState;
});
this.isMicMuted = newMutedState;
}
}
@@ -199,6 +266,50 @@ export class MediaManager {
this.isSelfDeafened = deafened !== undefined ? deafened : !this.isSelfDeafened;
}
/**
* Toggle RNNoise noise reduction on the local microphone.
*
* When enabled the raw mic stream is routed through the RNNoise
* AudioWorklet and peer senders are updated with the denoised track.
* When disabled the original raw mic track is restored.
*
* @param enabled - Explicit state; if omitted, the current state is toggled.
*/
async toggleNoiseReduction(enabled?: boolean): Promise<void> {
const shouldEnable = enabled !== undefined ? enabled : !this._noiseReductionDesired;
// Always persist the preference
this._noiseReductionDesired = shouldEnable;
this.logger.info(
'Noise reduction desired =',
shouldEnable,
'| worklet active =',
this.noiseReduction.isEnabled,
);
if (shouldEnable === this.noiseReduction.isEnabled) return;
if (shouldEnable) {
if (!this.rawMicStream) {
this.logger.warn(
'Cannot enable noise reduction — no mic stream yet (will apply on connect)',
);
return;
}
this.logger.info('Enabling noise reduction on raw mic stream');
const cleanStream = await this.noiseReduction.enable(this.rawMicStream);
this.localMediaStream = cleanStream;
} else {
this.noiseReduction.disable();
if (this.rawMicStream) {
this.localMediaStream = this.rawMicStream;
}
}
// Propagate the new audio track to every peer connection
this.bindLocalTracksToAllPeers();
}
/**
* Set the output volume for remote audio.
*
@@ -217,15 +328,25 @@ export class MediaManager {
* @param kbps - Target bitrate in kilobits per second.
*/
async setAudioBitrate(kbps: number): Promise<void> {
const targetBps = Math.max(AUDIO_BITRATE_MIN_BPS, Math.min(AUDIO_BITRATE_MAX_BPS, Math.floor(kbps * KBPS_TO_BPS)));
const targetBps = Math.max(
AUDIO_BITRATE_MIN_BPS,
Math.min(AUDIO_BITRATE_MAX_BPS, Math.floor(kbps * KBPS_TO_BPS)),
);
this.callbacks.getActivePeers().forEach(async (peerData) => {
const sender = peerData.audioSender || peerData.connection.getSenders().find(s => s.track?.kind === TRACK_KIND_AUDIO);
const sender =
peerData.audioSender ||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_AUDIO);
if (!sender?.track) return;
if (peerData.connection.signalingState !== 'stable') return;
let params: RTCRtpSendParameters;
try { params = sender.getParameters(); } catch (error) { this.logger.warn('getParameters failed; skipping bitrate apply', error as any); return; }
try {
params = sender.getParameters();
} catch (error) {
this.logger.warn('getParameters failed; skipping bitrate apply', error as any);
return;
}
params.encodings = params.encodings || [{}];
params.encodings[0].maxBitrate = targetBps;
@@ -296,36 +417,58 @@ export class MediaManager {
peers.forEach((peerData, peerId) => {
if (localAudioTrack) {
let audioSender = peerData.audioSender || peerData.connection.getSenders().find(s => s.track?.kind === TRACK_KIND_AUDIO);
let audioSender =
peerData.audioSender ||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_AUDIO);
if (!audioSender) {
audioSender = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, { direction: TRANSCEIVER_SEND_RECV }).sender;
audioSender = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, {
direction: TRANSCEIVER_SEND_RECV,
}).sender;
}
peerData.audioSender = audioSender;
// Restore direction after removeTrack (which sets it to recvonly)
const audioTransceiver = peerData.connection.getTransceivers().find(t => t.sender === audioSender);
if (audioTransceiver && (audioTransceiver.direction === TRANSCEIVER_RECV_ONLY || audioTransceiver.direction === TRANSCEIVER_INACTIVE)) {
const audioTransceiver = peerData.connection
.getTransceivers()
.find((t) => t.sender === audioSender);
if (
audioTransceiver &&
(audioTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
audioTransceiver.direction === TRANSCEIVER_INACTIVE)
) {
audioTransceiver.direction = TRANSCEIVER_SEND_RECV;
}
audioSender.replaceTrack(localAudioTrack)
audioSender
.replaceTrack(localAudioTrack)
.then(() => this.logger.info('audio replaceTrack ok', { peerId }))
.catch((e) => this.logger.error('audio replaceTrack failed', e));
}
if (localVideoTrack) {
let videoSender = peerData.videoSender || peerData.connection.getSenders().find(s => s.track?.kind === TRACK_KIND_VIDEO);
let videoSender =
peerData.videoSender ||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_VIDEO);
if (!videoSender) {
videoSender = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, { direction: TRANSCEIVER_SEND_RECV }).sender;
videoSender = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, {
direction: TRANSCEIVER_SEND_RECV,
}).sender;
}
peerData.videoSender = videoSender;
const videoTransceiver = peerData.connection.getTransceivers().find(t => t.sender === videoSender);
if (videoTransceiver && (videoTransceiver.direction === TRANSCEIVER_RECV_ONLY || videoTransceiver.direction === TRANSCEIVER_INACTIVE)) {
const videoTransceiver = peerData.connection
.getTransceivers()
.find((t) => t.sender === videoSender);
if (
videoTransceiver &&
(videoTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
videoTransceiver.direction === TRANSCEIVER_INACTIVE)
) {
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
}
videoSender.replaceTrack(localVideoTrack)
videoSender
.replaceTrack(localVideoTrack)
.then(() => this.logger.info('video replaceTrack ok', { peerId }))
.catch((e) => this.logger.error('video replaceTrack failed', e));
}
@@ -356,6 +499,7 @@ export class MediaManager {
destroy(): void {
this.disableVoice();
this.stopVoiceHeartbeat();
this.noiseReduction.destroy();
this.voiceConnected$.complete();
}
}

View File

@@ -0,0 +1,203 @@
/**
* Manages RNNoise-based noise reduction for microphone audio.
*
* Uses the `@timephy/rnnoise-wasm` AudioWorklet to process the raw
* microphone stream through a neural-network noise gate, producing
* a clean output stream that can be sent to peers instead.
*
* Architecture:
* raw mic → AudioContext.createMediaStreamSource
* → NoiseSuppressorWorklet (AudioWorkletNode)
* → MediaStreamDestination → clean MediaStream
*
* The manager is intentionally stateless w.r.t. Angular signals;
* the owning MediaManager / WebRTCService drives signals.
*/
import { WebRTCLogger } from './webrtc-logger';
/** Name used to register / instantiate the AudioWorklet processor. */
const WORKLET_PROCESSOR_NAME = 'NoiseSuppressorWorklet';
/** RNNoise is trained on 48 kHz audio — the AudioContext must match. */
const RNNOISE_SAMPLE_RATE = 48_000;
/**
* Relative path (from the served application root) to the **bundled**
* worklet script placed in `public/` and served as a static asset.
*/
const WORKLET_MODULE_PATH = 'rnnoise-worklet.js';
export class NoiseReductionManager {
/** The AudioContext used for the noise-reduction graph. */
private audioContext: AudioContext | null = null;
/** Source node wrapping the raw microphone stream. */
private sourceNode: MediaStreamAudioSourceNode | null = null;
/** The RNNoise AudioWorklet node. */
private workletNode: AudioWorkletNode | null = null;
/** Destination node that exposes the cleaned stream. */
private destinationNode: MediaStreamAudioDestinationNode | null = null;
/** Whether the worklet module has been loaded into the AudioContext. */
private workletLoaded = false;
/** Whether noise reduction is currently active. */
private _isEnabled = false;
constructor(private readonly logger: WebRTCLogger) {}
// ─── Public API ──────────────────────────────────────────────────
/** Whether noise reduction is currently active. */
get isEnabled(): boolean {
return this._isEnabled;
}
/**
* Enable noise reduction on a raw microphone stream.
*
* Builds the AudioWorklet processing graph and returns a new
* {@link MediaStream} whose audio has been denoised.
*
* If the worklet cannot be loaded (e.g. unsupported browser),
* the original stream is returned unchanged and an error is logged.
*
* @param rawStream - The raw `getUserMedia` microphone stream.
* @returns A denoised {@link MediaStream}, or the original if setup fails.
*/
async enable(rawStream: MediaStream): Promise<MediaStream> {
if (this._isEnabled && this.destinationNode) {
this.logger.info('Noise reduction already enabled, returning existing clean stream');
return this.destinationNode.stream;
}
try {
await this.buildProcessingGraph(rawStream);
this._isEnabled = true;
this.logger.info('Noise reduction enabled');
return this.destinationNode!.stream;
} catch (err) {
this.logger.error('Failed to enable noise reduction, returning raw stream', err);
this.teardownGraph();
return rawStream;
}
}
/**
* Disable noise reduction and tear down the processing graph.
*
* After calling this, the original raw microphone stream should be
* used again (the caller is responsible for re-binding tracks).
*/
disable(): void {
if (!this._isEnabled) return;
this.teardownGraph();
this._isEnabled = false;
this.logger.info('Noise reduction disabled');
}
/**
* Re-pipe a new raw stream through the existing noise-reduction graph.
*
* Useful when the microphone device changes but noise reduction
* should stay active.
*
* @param rawStream - The new raw microphone stream.
* @returns The denoised stream, or the raw stream on failure.
*/
async replaceInputStream(rawStream: MediaStream): Promise<MediaStream> {
if (!this._isEnabled) return rawStream;
try {
// Disconnect old source but keep the rest of the graph alive
this.sourceNode?.disconnect();
if (!this.audioContext || !this.workletNode || !this.destinationNode) {
throw new Error('Processing graph not initialised');
}
this.sourceNode = this.audioContext.createMediaStreamSource(rawStream);
this.sourceNode.connect(this.workletNode);
this.logger.info('Noise reduction input stream replaced');
return this.destinationNode.stream;
} catch (err) {
this.logger.error('Failed to replace noise reduction input', err);
return rawStream;
}
}
/** Clean up all resources. Safe to call multiple times. */
destroy(): void {
this.disable();
this.audioContext = null;
this.workletLoaded = false;
}
// ─── Internals ───────────────────────────────────────────────────
/**
* Build the AudioWorklet processing graph:
* rawStream → source → workletNode → destination
*/
private async buildProcessingGraph(rawStream: MediaStream): Promise<void> {
// Reuse or create the AudioContext (must be 48 kHz for RNNoise)
if (!this.audioContext || this.audioContext.state === 'closed') {
this.audioContext = new AudioContext({ sampleRate: RNNOISE_SAMPLE_RATE });
this.workletLoaded = false;
}
// Resume if suspended (browsers auto-suspend until user gesture)
if (this.audioContext.state === 'suspended') {
await this.audioContext.resume();
}
// Load the worklet module once per AudioContext lifetime
if (!this.workletLoaded) {
await this.audioContext.audioWorklet.addModule(WORKLET_MODULE_PATH);
this.workletLoaded = true;
this.logger.info('RNNoise worklet module loaded');
}
// Build the node graph
this.sourceNode = this.audioContext.createMediaStreamSource(rawStream);
this.workletNode = new AudioWorkletNode(this.audioContext, WORKLET_PROCESSOR_NAME);
this.destinationNode = this.audioContext.createMediaStreamDestination();
this.sourceNode.connect(this.workletNode).connect(this.destinationNode);
}
/** Disconnect and release all graph nodes. */
private teardownGraph(): void {
try {
this.sourceNode?.disconnect();
} catch {
/* already disconnected */
}
try {
this.workletNode?.disconnect();
} catch {
/* already disconnected */
}
try {
this.destinationNode?.disconnect();
} catch {
/* already disconnected */
}
this.sourceNode = null;
this.workletNode = null;
this.destinationNode = null;
// Close the context to free hardware resources
if (this.audioContext && this.audioContext.state !== 'closed') {
this.audioContext.close().catch(() => {
/* best-effort */
});
}
this.audioContext = null;
this.workletLoaded = false;
}
}

View File

@@ -31,6 +31,9 @@ import {
P2P_TYPE_VOICE_STATE_REQUEST,
P2P_TYPE_VOICE_STATE,
P2P_TYPE_SCREEN_STATE,
P2P_TYPE_PING,
P2P_TYPE_PONG,
PEER_PING_INTERVAL_MS,
SIGNALING_TYPE_OFFER,
SIGNALING_TYPE_ANSWER,
SIGNALING_TYPE_ICE_CANDIDATE,
@@ -69,6 +72,15 @@ export class PeerConnectionManager {
private disconnectedPeerTracker = new Map<string, DisconnectedPeerEntry>();
private peerReconnectTimers = new Map<string, ReturnType<typeof setInterval>>();
/** Pending ping timestamps keyed by peer ID. */
private readonly pendingPings = new Map<string, number>();
/** Per-peer ping interval timers. */
private readonly peerPingTimers = new Map<string, ReturnType<typeof setInterval>>();
/** Last measured latency (ms) per peer. */
readonly peerLatencies = new Map<string, number>();
/** Emitted whenever a peer latency value changes. */
readonly peerLatencyChanged$ = new Subject<{ peerId: string; latencyMs: number }>();
/**
* Per-peer promise chain that serialises all SDP operations
* (handleOffer, handleAnswer, renegotiate) so they never run
@@ -544,6 +556,7 @@ export class PeerConnectionManager {
} catch {
/* ignore */
}
this.startPingInterval(remotePeerId);
};
channel.onclose = () => {
@@ -582,6 +595,22 @@ export class PeerConnectionManager {
return;
}
// Ping/pong latency measurement handled internally, not forwarded
if (message.type === P2P_TYPE_PING) {
this.sendToPeer(peerId, { type: P2P_TYPE_PONG, ts: message.ts } as any);
return;
}
if (message.type === P2P_TYPE_PONG) {
const sent = this.pendingPings.get(peerId);
if (sent && typeof message.ts === 'number' && message.ts === sent) {
const latencyMs = Math.round(performance.now() - sent);
this.peerLatencies.set(peerId, latencyMs);
this.peerLatencyChanged$.next({ peerId, latencyMs });
}
this.pendingPings.delete(peerId);
return;
}
const enriched = { ...message, fromPeerId: peerId };
this.messageReceived$.next(enriched);
}
@@ -782,6 +811,9 @@ export class PeerConnectionManager {
this.activePeerConnections.delete(peerId);
this.peerNegotiationQueue.delete(peerId);
this.removeFromConnectedPeers(peerId);
this.stopPingInterval(peerId);
this.peerLatencies.delete(peerId);
this.pendingPings.delete(peerId);
this.peerDisconnected$.next(peerId);
}
}
@@ -789,12 +821,15 @@ export class PeerConnectionManager {
/** Close every active peer connection and clear internal state. */
closeAllPeers(): void {
this.clearAllPeerReconnectTimers();
this.clearAllPingTimers();
this.activePeerConnections.forEach((peerData) => {
if (peerData.dataChannel) peerData.dataChannel.close();
peerData.connection.close();
});
this.activePeerConnections.clear();
this.peerNegotiationQueue.clear();
this.peerLatencies.clear();
this.pendingPings.clear();
this.connectedPeersChanged$.next([]);
}
@@ -909,6 +944,48 @@ export class PeerConnectionManager {
this.connectedPeersChanged$.next([]);
}
// ═══════════════════════════════════════════════════════════════════
// Ping / Latency helpers
// ═══════════════════════════════════════════════════════════════════
/** Start periodic pings to a peer to measure round-trip latency. */
private startPingInterval(peerId: string): void {
this.stopPingInterval(peerId);
// Send an immediate ping
this.sendPing(peerId);
const timer = setInterval(() => this.sendPing(peerId), PEER_PING_INTERVAL_MS);
this.peerPingTimers.set(peerId, timer);
}
/** Stop the periodic ping for a specific peer. */
private stopPingInterval(peerId: string): void {
const timer = this.peerPingTimers.get(peerId);
if (timer) {
clearInterval(timer);
this.peerPingTimers.delete(peerId);
}
}
/** Cancel all active ping timers. */
private clearAllPingTimers(): void {
this.peerPingTimers.forEach((timer) => clearInterval(timer));
this.peerPingTimers.clear();
}
/** Send a single ping to a peer. */
private sendPing(peerId: string): void {
const peerData = this.activePeerConnections.get(peerId);
if (!peerData?.dataChannel || peerData.dataChannel.readyState !== DATA_CHANNEL_STATE_OPEN)
return;
const ts = performance.now();
this.pendingPings.set(peerId, ts);
try {
peerData.dataChannel.send(JSON.stringify({ type: P2P_TYPE_PING, ts }));
} catch {
/* ignore */
}
}
/** Clean up all resources. */
destroy(): void {
this.closeAllPeers();
@@ -917,5 +994,6 @@ export class PeerConnectionManager {
this.remoteStream$.complete();
this.messageReceived$.complete();
this.connectedPeersChanged$.complete();
this.peerLatencyChanged$.complete();
}
}

View File

@@ -84,6 +84,11 @@ export const P2P_TYPE_STATE_REQUEST = 'state-request';
export const P2P_TYPE_VOICE_STATE_REQUEST = 'voice-state-request';
export const P2P_TYPE_VOICE_STATE = 'voice-state';
export const P2P_TYPE_SCREEN_STATE = 'screen-state';
export const P2P_TYPE_PING = 'ping';
export const P2P_TYPE_PONG = 'pong';
/** Interval (ms) between peer latency pings */
export const PEER_PING_INTERVAL_MS = 5_000;
/** Default display name fallback */
export const DEFAULT_DISPLAY_NAME = 'User';

View File

@@ -26,7 +26,9 @@
>
<ng-icon name="lucideUsers" class="w-4 h-4" />
<span>Users</span>
<span class="text-xs px-1.5 py-0.5 rounded-full bg-primary/15 text-primary">{{ onlineUsers().length }}</span>
<span class="text-xs px-1.5 py-0.5 rounded-full bg-primary/15 text-primary">{{
onlineUsers().length
}}</span>
</button>
</div>
</div>
@@ -37,9 +39,15 @@
<!-- Text Channels -->
<div class="p-3">
<div class="flex items-center justify-between mb-2 px-1">
<h4 class="text-xs uppercase tracking-wide text-muted-foreground font-medium">Text Channels</h4>
<h4 class="text-xs uppercase tracking-wide text-muted-foreground font-medium">
Text Channels
</h4>
@if (canManageChannels()) {
<button (click)="createChannel('text')" class="text-muted-foreground hover:text-foreground transition-colors" title="Create Text Channel">
<button
(click)="createChannel('text')"
class="text-muted-foreground hover:text-foreground transition-colors"
title="Create Text Channel"
>
<ng-icon name="lucidePlus" class="w-3.5 h-3.5" />
</button>
}
@@ -80,9 +88,15 @@
<!-- Voice Channels -->
<div class="p-3 pt-0">
<div class="flex items-center justify-between mb-2 px-1">
<h4 class="text-xs uppercase tracking-wide text-muted-foreground font-medium">Voice Channels</h4>
<h4 class="text-xs uppercase tracking-wide text-muted-foreground font-medium">
Voice Channels
</h4>
@if (canManageChannels()) {
<button (click)="createChannel('voice')" class="text-muted-foreground hover:text-foreground transition-colors" title="Create Voice Channel">
<button
(click)="createChannel('voice')"
class="text-muted-foreground hover:text-foreground transition-colors"
title="Create Voice Channel"
>
<ng-icon name="lucidePlus" class="w-3.5 h-3.5" />
</button>
}
@@ -130,9 +144,29 @@
[name]="u.displayName"
[avatarUrl]="u.avatarUrl"
size="xs"
[ringClass]="u.voiceState?.isDeafened ? 'ring-2 ring-red-500' : u.voiceState?.isMuted ? 'ring-2 ring-yellow-500' : voiceActivity.isSpeaking(u.oderId || u.id)() ? 'ring-2 ring-green-400 shadow-[0_0_8px_2px_rgba(74,222,128,0.6)]' : 'ring-2 ring-green-500/40'"
[ringClass]="
u.voiceState?.isDeafened
? 'ring-2 ring-red-500'
: u.voiceState?.isMuted
? 'ring-2 ring-yellow-500'
: voiceActivity.isSpeaking(u.oderId || u.id)()
? 'ring-2 ring-green-400 shadow-[0_0_8px_2px_rgba(74,222,128,0.6)]'
: 'ring-2 ring-green-500/40'
"
/>
<span class="text-sm text-foreground/80 truncate flex-1">{{ u.displayName }}</span>
<span class="text-sm text-foreground/80 truncate flex-1">{{
u.displayName
}}</span>
<!-- Ping latency indicator -->
@if (u.id !== currentUser()?.id) {
<span
class="w-2 h-2 rounded-full shrink-0"
[class]="getPingColorClass(u)"
[title]="
getPeerLatency(u) !== null ? getPeerLatency(u) + ' ms' : 'Measuring...'
"
></span>
}
@if (u.screenShareState?.isSharing || isUserSharing(u.id)) {
<button
(click)="viewStream(u.id); $event.stopPropagation()"
@@ -161,11 +195,19 @@
<!-- Current User (You) -->
@if (currentUser()) {
<div class="mb-4">
<h4 class="text-xs uppercase tracking-wide text-muted-foreground font-medium mb-2 px-1">You</h4>
<h4 class="text-xs uppercase tracking-wide text-muted-foreground font-medium mb-2 px-1">
You
</h4>
<div class="flex items-center gap-2 px-2 py-1.5 rounded bg-secondary/30">
<div class="relative">
<app-user-avatar [name]="currentUser()?.displayName || '?'" [avatarUrl]="currentUser()?.avatarUrl" size="sm" />
<span class="absolute bottom-0 right-0 w-2.5 h-2.5 rounded-full bg-green-500 ring-2 ring-card"></span>
<app-user-avatar
[name]="currentUser()?.displayName || '?'"
[avatarUrl]="currentUser()?.avatarUrl"
size="sm"
/>
<span
class="absolute bottom-0 right-0 w-2.5 h-2.5 rounded-full bg-green-500 ring-2 ring-card"
></span>
</div>
<div class="flex-1 min-w-0">
<p class="text-sm text-foreground truncate">{{ currentUser()?.displayName }}</p>
@@ -176,8 +218,13 @@
In voice
</p>
}
@if (currentUser()?.screenShareState?.isSharing || (currentUser()?.id && isUserSharing(currentUser()!.id))) {
<span class="text-[10px] bg-red-500 text-white px-1.5 py-0.5 rounded-sm font-medium flex items-center gap-1 animate-pulse">
@if (
currentUser()?.screenShareState?.isSharing ||
(currentUser()?.id && isUserSharing(currentUser()!.id))
) {
<span
class="text-[10px] bg-red-500 text-white px-1.5 py-0.5 rounded-sm font-medium flex items-center gap-1 animate-pulse"
>
<ng-icon name="lucideMonitor" class="w-2.5 h-2.5" />
LIVE
</span>
@@ -201,18 +248,33 @@
(contextmenu)="openUserContextMenu($event, user)"
>
<div class="relative">
<app-user-avatar [name]="user.displayName" [avatarUrl]="user.avatarUrl" size="sm" />
<span class="absolute bottom-0 right-0 w-2.5 h-2.5 rounded-full bg-green-500 ring-2 ring-card"></span>
<app-user-avatar
[name]="user.displayName"
[avatarUrl]="user.avatarUrl"
size="sm"
/>
<span
class="absolute bottom-0 right-0 w-2.5 h-2.5 rounded-full bg-green-500 ring-2 ring-card"
></span>
</div>
<div class="flex-1 min-w-0">
<div class="flex items-center gap-1.5">
<p class="text-sm text-foreground truncate">{{ user.displayName }}</p>
@if (user.role === 'host') {
<span class="text-[10px] bg-yellow-500/20 text-yellow-400 px-1 py-0.5 rounded font-medium">Owner</span>
<span
class="text-[10px] bg-yellow-500/20 text-yellow-400 px-1 py-0.5 rounded font-medium"
>Owner</span
>
} @else if (user.role === 'admin') {
<span class="text-[10px] bg-blue-500/20 text-blue-400 px-1 py-0.5 rounded font-medium">Admin</span>
<span
class="text-[10px] bg-blue-500/20 text-blue-400 px-1 py-0.5 rounded font-medium"
>Admin</span
>
} @else if (user.role === 'moderator') {
<span class="text-[10px] bg-green-500/20 text-green-400 px-1 py-0.5 rounded font-medium">Mod</span>
<span
class="text-[10px] bg-green-500/20 text-green-400 px-1 py-0.5 rounded font-medium"
>Mod</span
>
}
</div>
<div class="flex items-center gap-2">
@@ -258,7 +320,12 @@
<!-- Channel context menu -->
@if (showChannelMenu()) {
<app-context-menu [x]="channelMenuX()" [y]="channelMenuY()" (closed)="closeChannelMenu()" [width]="'w-44'">
<app-context-menu
[x]="channelMenuX()"
[y]="channelMenuY()"
(closed)="closeChannelMenu()"
[width]="'w-44'"
>
<button (click)="resyncMessages()" class="context-menu-item">Resync Messages</button>
@if (canManageChannels()) {
<div class="context-menu-divider"></div>
@@ -273,15 +340,25 @@
<app-context-menu [x]="userMenuX()" [y]="userMenuY()" (closed)="closeUserMenu()">
@if (isAdmin()) {
@if (contextMenuUser()?.role === 'member') {
<button (click)="changeUserRole('moderator')" class="context-menu-item">Promote to Moderator</button>
<button (click)="changeUserRole('admin')" class="context-menu-item">Promote to Admin</button>
<button (click)="changeUserRole('moderator')" class="context-menu-item">
Promote to Moderator
</button>
<button (click)="changeUserRole('admin')" class="context-menu-item">
Promote to Admin
</button>
}
@if (contextMenuUser()?.role === 'moderator') {
<button (click)="changeUserRole('admin')" class="context-menu-item">Promote to Admin</button>
<button (click)="changeUserRole('member')" class="context-menu-item">Demote to Member</button>
<button (click)="changeUserRole('admin')" class="context-menu-item">
Promote to Admin
</button>
<button (click)="changeUserRole('member')" class="context-menu-item">
Demote to Member
</button>
}
@if (contextMenuUser()?.role === 'admin') {
<button (click)="changeUserRole('member')" class="context-menu-item">Demote to Member</button>
<button (click)="changeUserRole('member')" class="context-menu-item">
Demote to Member
</button>
}
<div class="context-menu-divider"></div>
<button (click)="kickUserAction()" class="context-menu-item-danger">Kick User</button>

View File

@@ -3,9 +3,27 @@ import { CommonModule } from '@angular/common';
import { FormsModule } from '@angular/forms';
import { Store } from '@ngrx/store';
import { NgIcon, provideIcons } from '@ng-icons/core';
import { lucideMessageSquare, lucideMic, lucideMicOff, lucideChevronLeft, lucideMonitor, lucideHash, lucideUsers, lucidePlus } from '@ng-icons/lucide';
import { selectOnlineUsers, selectCurrentUser, selectIsCurrentUserAdmin } from '../../../store/users/users.selectors';
import { selectCurrentRoom, selectActiveChannelId, selectTextChannels, selectVoiceChannels } from '../../../store/rooms/rooms.selectors';
import {
lucideMessageSquare,
lucideMic,
lucideMicOff,
lucideChevronLeft,
lucideMonitor,
lucideHash,
lucideUsers,
lucidePlus,
} from '@ng-icons/lucide';
import {
selectOnlineUsers,
selectCurrentUser,
selectIsCurrentUserAdmin,
} from '../../../store/users/users.selectors';
import {
selectCurrentRoom,
selectActiveChannelId,
selectTextChannels,
selectVoiceChannels,
} from '../../../store/rooms/rooms.selectors';
import { UsersActions } from '../../../store/users/users.actions';
import { RoomsActions } from '../../../store/rooms/rooms.actions';
import { MessagesActions } from '../../../store/messages/messages.actions';
@@ -22,9 +40,26 @@ type TabView = 'channels' | 'users';
@Component({
selector: 'app-rooms-side-panel',
standalone: true,
imports: [CommonModule, FormsModule, NgIcon, VoiceControlsComponent, ContextMenuComponent, UserAvatarComponent, ConfirmDialogComponent],
imports: [
CommonModule,
FormsModule,
NgIcon,
VoiceControlsComponent,
ContextMenuComponent,
UserAvatarComponent,
ConfirmDialogComponent,
],
viewProviders: [
provideIcons({ lucideMessageSquare, lucideMic, lucideMicOff, lucideChevronLeft, lucideMonitor, lucideHash, lucideUsers, lucidePlus })
provideIcons({
lucideMessageSquare,
lucideMic,
lucideMicOff,
lucideChevronLeft,
lucideMonitor,
lucideHash,
lucideUsers,
lucidePlus,
}),
],
templateUrl: './rooms-side-panel.component.html',
})
@@ -73,7 +108,9 @@ export class RoomsSidePanelComponent {
const current = this.currentUser();
const currentId = current?.id;
const currentOderId = current?.oderId;
return this.onlineUsers().filter(user => user.id !== currentId && user.oderId !== currentOderId);
return this.onlineUsers().filter(
(user) => user.id !== currentId && user.oderId !== currentOderId,
);
}
/** Check whether the current user has permission to manage channels. */
@@ -264,10 +301,18 @@ export class RoomsSidePanelComponent {
if (!this.webrtc.isVoiceConnected()) {
// Stale state clear it so the user can proceed
if (current.id) {
this.store.dispatch(UsersActions.updateVoiceState({
this.store.dispatch(
UsersActions.updateVoiceState({
userId: current.id,
voiceState: { isConnected: false, isMuted: false, isDeafened: false, roomId: undefined, serverId: undefined }
}));
voiceState: {
isConnected: false,
isMuted: false,
isDeafened: false,
roomId: undefined,
serverId: undefined,
},
}),
);
}
} else {
// Already connected to voice in another server; must disconnect first
@@ -276,19 +321,29 @@ export class RoomsSidePanelComponent {
}
// If switching channels within the same server, just update the room
const isSwitchingChannels = current?.voiceState?.isConnected &&
const isSwitchingChannels =
current?.voiceState?.isConnected &&
current.voiceState.serverId === room?.id &&
current.voiceState.roomId !== roomId;
// Enable microphone and broadcast voice-state
const enableVoicePromise = isSwitchingChannels ? Promise.resolve() : this.webrtc.enableVoice();
enableVoicePromise.then(() => {
enableVoicePromise
.then(() => {
if (current?.id && room) {
this.store.dispatch(UsersActions.updateVoiceState({
this.store.dispatch(
UsersActions.updateVoiceState({
userId: current.id,
voiceState: { isConnected: true, isMuted: current.voiceState?.isMuted ?? false, isDeafened: current.voiceState?.isDeafened ?? false, roomId: roomId, serverId: room.id }
}));
voiceState: {
isConnected: true,
isMuted: current.voiceState?.isMuted ?? false,
isDeafened: current.voiceState?.isDeafened ?? false,
roomId: roomId,
serverId: room.id,
},
}),
);
}
// Start voice heartbeat to broadcast presence every 5 seconds
this.webrtc.startVoiceHeartbeat(roomId, room?.id);
@@ -296,13 +351,19 @@ export class RoomsSidePanelComponent {
type: 'voice-state',
oderId: current?.oderId || current?.id,
displayName: current?.displayName || 'User',
voiceState: { isConnected: true, isMuted: current?.voiceState?.isMuted ?? false, isDeafened: current?.voiceState?.isDeafened ?? false, roomId: roomId, serverId: room?.id }
voiceState: {
isConnected: true,
isMuted: current?.voiceState?.isMuted ?? false,
isDeafened: current?.voiceState?.isDeafened ?? false,
roomId: roomId,
serverId: room?.id,
},
});
// Update voice session for floating controls
if (room) {
// Find label from channel list
const voiceChannel = this.voiceChannels().find(channel => channel.id === roomId);
const voiceChannel = this.voiceChannels().find((channel) => channel.id === roomId);
const voiceRoomName = voiceChannel ? `🔊 ${voiceChannel.name}` : roomId;
this.voiceSessionService.startSession({
serverId: room.id,
@@ -314,7 +375,8 @@ export class RoomsSidePanelComponent {
serverRoute: `/room/${room.id}`,
});
}
}).catch((_error) => {
})
.catch((_error) => {
// Failed to join voice room
});
}
@@ -333,10 +395,18 @@ export class RoomsSidePanelComponent {
// Update store voice state
if (current?.id) {
this.store.dispatch(UsersActions.updateVoiceState({
this.store.dispatch(
UsersActions.updateVoiceState({
userId: current.id,
voiceState: { isConnected: false, isMuted: false, isDeafened: false, roomId: undefined, serverId: undefined }
}));
voiceState: {
isConnected: false,
isMuted: false,
isDeafened: false,
roomId: undefined,
serverId: undefined,
},
}),
);
}
// Broadcast disconnect
@@ -344,7 +414,13 @@ export class RoomsSidePanelComponent {
type: 'voice-state',
oderId: current?.oderId || current?.id,
displayName: current?.displayName || 'User',
voiceState: { isConnected: false, isMuted: false, isDeafened: false, roomId: undefined, serverId: undefined }
voiceState: {
isConnected: false,
isMuted: false,
isDeafened: false,
roomId: undefined,
serverId: undefined,
},
});
// End voice session
@@ -355,10 +431,11 @@ export class RoomsSidePanelComponent {
voiceOccupancy(roomId: string): number {
const users = this.onlineUsers();
const room = this.currentRoom();
return users.filter(user =>
return users.filter(
(user) =>
!!user.voiceState?.isConnected &&
user.voiceState?.roomId === roomId &&
user.voiceState?.serverId === room?.id
user.voiceState?.serverId === room?.id,
).length;
}
@@ -380,7 +457,9 @@ export class RoomsSidePanelComponent {
if (me?.id === userId) {
return this.webrtc.isScreenSharing();
}
const user = this.onlineUsers().find(onlineUser => onlineUser.id === userId || onlineUser.oderId === userId);
const user = this.onlineUsers().find(
(onlineUser) => onlineUser.id === userId || onlineUser.oderId === userId,
);
if (user?.screenShareState?.isSharing === false) {
return false;
}
@@ -391,10 +470,11 @@ export class RoomsSidePanelComponent {
/** Return all users currently connected to a specific voice channel. */
voiceUsersInRoom(roomId: string) {
const room = this.currentRoom();
return this.onlineUsers().filter(user =>
return this.onlineUsers().filter(
(user) =>
!!user.voiceState?.isConnected &&
user.voiceState?.roomId === roomId &&
user.voiceState?.serverId === room?.id
user.voiceState?.serverId === room?.id,
);
}
@@ -414,4 +494,31 @@ export class RoomsSidePanelComponent {
const room = this.currentRoom();
return room?.permissions?.allowVoice !== false;
}
/**
* Get the measured latency (ms) to a voice user.
* Returns `null` when no measurement is available yet.
*/
getPeerLatency(user: User): number | null {
const latencies = this.webrtc.peerLatencies();
// Try oderId first (primary peer key), then fall back to user id
return latencies.get(user.oderId ?? '') ?? latencies.get(user.id) ?? null;
}
/**
* Return a Tailwind `bg-*` class representing the latency quality.
* - green : < 100 ms
* - yellow : 100199 ms
* - orange : 200349 ms
* - red : >= 350 ms
* - gray : no data yet
*/
getPingColorClass(user: User): string {
const ms = this.getPeerLatency(user);
if (ms === null) return 'bg-gray-500';
if (ms < 100) return 'bg-green-500';
if (ms < 200) return 'bg-yellow-500';
if (ms < 350) return 'bg-orange-500';
return 'bg-red-500';
}
}

View File

@@ -29,8 +29,8 @@
</div>
<p class="text-sm text-muted-foreground mb-4">
Add multiple server directories to search for rooms across different networks.
The active server will be used for creating and registering new rooms.
Add multiple server directories to search for rooms across different networks. The active
server will be used for creating and registering new rooms.
</p>
<!-- Server List -->
@@ -58,7 +58,9 @@
<div class="flex items-center gap-2">
<span class="font-medium text-foreground truncate">{{ server.name }}</span>
@if (server.isActive) {
<span class="text-xs bg-primary text-primary-foreground px-2 py-0.5 rounded-full">Active</span>
<span class="text-xs bg-primary text-primary-foreground px-2 py-0.5 rounded-full"
>Active</span
>
}
</div>
<p class="text-sm text-muted-foreground truncate">{{ server.url }}</p>
@@ -75,7 +77,10 @@
class="p-2 hover:bg-secondary rounded-lg transition-colors"
title="Set as active"
>
<ng-icon name="lucideCheck" class="w-4 h-4 text-muted-foreground hover:text-primary" />
<ng-icon
name="lucideCheck"
class="w-4 h-4 text-muted-foreground hover:text-primary"
/>
</button>
}
@if (!server.isDefault) {
@@ -84,7 +89,10 @@
class="p-2 hover:bg-destructive/10 rounded-lg transition-colors"
title="Remove server"
>
<ng-icon name="lucideTrash2" class="w-4 h-4 text-muted-foreground hover:text-destructive" />
<ng-icon
name="lucideTrash2"
class="w-4 h-4 text-muted-foreground hover:text-destructive"
/>
</button>
}
</div>
@@ -125,7 +133,7 @@
</div>
<!-- Connection Settings -->
<div class="bg-card border border-border rounded-lg p-6">
<div class="bg-card border border-border rounded-lg p-6 mb-6">
<div class="flex items-center gap-2 mb-4">
<ng-icon name="lucideServer" class="w-5 h-5 text-muted-foreground" />
<h2 class="text-lg font-semibold text-foreground">Connection Settings</h2>
@@ -135,7 +143,9 @@
<div class="flex items-center justify-between">
<div>
<p class="font-medium text-foreground">Auto-reconnect</p>
<p class="text-sm text-muted-foreground">Automatically reconnect when connection is lost</p>
<p class="text-sm text-muted-foreground">
Automatically reconnect when connection is lost
</p>
</div>
<label class="relative inline-flex items-center cursor-pointer">
<input
@@ -144,14 +154,18 @@
(change)="saveConnectionSettings()"
class="sr-only peer"
/>
<div class="w-11 h-6 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-5 after:w-5 after:transition-all"></div>
<div
class="w-11 h-6 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-5 after:w-5 after:transition-all"
></div>
</label>
</div>
<div class="flex items-center justify-between">
<div>
<p class="font-medium text-foreground">Search all servers</p>
<p class="text-sm text-muted-foreground">Search across all configured server directories</p>
<p class="text-sm text-muted-foreground">
Search across all configured server directories
</p>
</div>
<label class="relative inline-flex items-center cursor-pointer">
<input
@@ -160,7 +174,39 @@
(change)="saveConnectionSettings()"
class="sr-only peer"
/>
<div class="w-11 h-6 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-5 after:w-5 after:transition-all"></div>
<div
class="w-11 h-6 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-5 after:w-5 after:transition-all"
></div>
</label>
</div>
</div>
</div>
<!-- Voice Settings -->
<div class="bg-card border border-border rounded-lg p-6">
<div class="flex items-center gap-2 mb-4">
<ng-icon name="lucideAudioLines" class="w-5 h-5 text-muted-foreground" />
<h2 class="text-lg font-semibold text-foreground">Voice Settings</h2>
</div>
<div class="space-y-4">
<div class="flex items-center justify-between">
<div>
<p class="font-medium text-foreground">Noise reduction</p>
<p class="text-sm text-muted-foreground">
Use RNNoise to suppress background noise from your microphone
</p>
</div>
<label class="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
[(ngModel)]="noiseReduction"
(change)="saveVoiceSettings()"
class="sr-only peer"
/>
<div
class="w-11 h-6 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-5 after:w-5 after:transition-all"
></div>
</label>
</div>
</div>

View File

@@ -13,10 +13,12 @@ import {
lucideRefreshCw,
lucideGlobe,
lucideArrowLeft,
lucideAudioLines,
} from '@ng-icons/lucide';
import { ServerDirectoryService } from '../../core/services/server-directory.service';
import { STORAGE_KEY_CONNECTION_SETTINGS } from '../../core/constants';
import { WebRTCService } from '../../core/services/webrtc.service';
import { STORAGE_KEY_CONNECTION_SETTINGS, STORAGE_KEY_VOICE_SETTINGS } from '../../core/constants';
@Component({
selector: 'app-settings',
@@ -33,6 +35,7 @@ import { STORAGE_KEY_CONNECTION_SETTINGS } from '../../core/constants';
lucideRefreshCw,
lucideGlobe,
lucideArrowLeft,
lucideAudioLines,
}),
],
templateUrl: './settings.component.html',
@@ -42,6 +45,7 @@ import { STORAGE_KEY_CONNECTION_SETTINGS } from '../../core/constants';
*/
export class SettingsComponent implements OnInit {
private serverDirectory = inject(ServerDirectoryService);
private webrtcService = inject(WebRTCService);
private router = inject(Router);
servers = this.serverDirectory.servers;
@@ -52,10 +56,12 @@ export class SettingsComponent implements OnInit {
newServerUrl = '';
autoReconnect = true;
searchAllServers = true;
noiseReduction = false;
/** Load persisted connection settings on component init. */
ngOnInit(): void {
this.loadConnectionSettings();
this.loadVoiceSettings();
}
/** Add a new signaling server after URL validation and duplicate checking. */
@@ -128,7 +134,7 @@ export class SettingsComponent implements OnInit {
JSON.stringify({
autoReconnect: this.autoReconnect,
searchAllServers: this.searchAllServers,
})
}),
);
this.serverDirectory.setSearchAllServers(this.searchAllServers);
}
@@ -137,4 +143,32 @@ export class SettingsComponent implements OnInit {
goBack(): void {
this.router.navigate(['/']);
}
/** Load voice settings (noise reduction) from localStorage. */
loadVoiceSettings(): void {
const settings = localStorage.getItem(STORAGE_KEY_VOICE_SETTINGS);
if (settings) {
const parsed = JSON.parse(settings);
this.noiseReduction = parsed.noiseReduction ?? false;
}
// Sync the live WebRTC state with the persisted preference
if (this.noiseReduction !== this.webrtcService.isNoiseReductionEnabled()) {
this.webrtcService.toggleNoiseReduction(this.noiseReduction);
}
}
/** Persist noise reduction preference (merged into existing voice settings) and apply immediately. */
async saveVoiceSettings(): Promise<void> {
// Merge into existing voice settings so we don't overwrite device/volume prefs
let existing: Record<string, unknown> = {};
try {
const raw = localStorage.getItem(STORAGE_KEY_VOICE_SETTINGS);
if (raw) existing = JSON.parse(raw);
} catch {}
localStorage.setItem(
STORAGE_KEY_VOICE_SETTINGS,
JSON.stringify({ ...existing, noiseReduction: this.noiseReduction }),
);
await this.webrtcService.toggleNoiseReduction(this.noiseReduction);
}
}

View File

@@ -1,10 +1,16 @@
<div class="bg-card border-border p-4">
<!-- Connection Error Banner -->
@if (showConnectionError()) {
<div class="mb-3 p-2 bg-destructive/20 border border-destructive/30 rounded-lg flex items-center gap-2">
<div
class="mb-3 p-2 bg-destructive/20 border border-destructive/30 rounded-lg flex items-center gap-2"
>
<span class="w-2 h-2 rounded-full bg-destructive animate-pulse"></span>
<span class="text-xs text-destructive">{{ connectionErrorMessage() || 'Connection error' }}</span>
<button (click)="retryConnection()" class="ml-auto text-xs text-destructive hover:underline">Retry</button>
<span class="text-xs text-destructive">{{
connectionErrorMessage() || 'Connection error'
}}</span>
<button (click)="retryConnection()" class="ml-auto text-xs text-destructive hover:underline">
Retry
</button>
</div>
}
@@ -25,10 +31,7 @@
}
</p>
</div>
<button
(click)="toggleSettings()"
class="p-2 hover:bg-secondary rounded-lg transition-colors"
>
<button (click)="toggleSettings()" class="p-2 hover:bg-secondary rounded-lg transition-colors">
<ng-icon name="lucideSettings" class="w-4 h-4 text-muted-foreground" />
</button>
</div>
@@ -37,10 +40,7 @@
<div class="flex items-center justify-center gap-2">
@if (isConnected()) {
<!-- Mute Toggle -->
<button
(click)="toggleMute()"
[class]="getMuteButtonClass()"
>
<button (click)="toggleMute()" [class]="getMuteButtonClass()">
@if (isMuted()) {
<ng-icon name="lucideMicOff" class="w-5 h-5" />
} @else {
@@ -49,18 +49,12 @@
</button>
<!-- Deafen Toggle -->
<button
(click)="toggleDeafen()"
[class]="getDeafenButtonClass()"
>
<button (click)="toggleDeafen()" [class]="getDeafenButtonClass()">
<ng-icon name="lucideHeadphones" class="w-5 h-5" />
</button>
<!-- Screen Share Toggle -->
<button
(click)="toggleScreenShare()"
[class]="getScreenShareButtonClass()"
>
<button (click)="toggleScreenShare()" [class]="getScreenShareButtonClass()">
@if (isScreenSharing()) {
<ng-icon name="lucideMonitorOff" class="w-5 h-5" />
} @else {
@@ -80,8 +74,14 @@
<!-- Settings Modal -->
@if (showSettings()) {
<div class="fixed inset-0 bg-black/50 flex items-center justify-center z-50" (click)="closeSettings()">
<div class="bg-card border border-border rounded-lg p-6 w-full max-w-md m-4" (click)="$event.stopPropagation()">
<div
class="fixed inset-0 bg-black/50 flex items-center justify-center z-50"
(click)="closeSettings()"
>
<div
class="bg-card border border-border rounded-lg p-6 w-full max-w-md m-4"
(click)="$event.stopPropagation()"
>
<h2 class="text-xl font-semibold text-foreground mb-4">Voice Settings</h2>
<div class="space-y-4">
@@ -92,7 +92,10 @@
class="w-full px-3 py-2 bg-secondary rounded-lg border border-border text-foreground text-sm focus:outline-none focus:ring-2 focus:ring-primary"
>
@for (device of inputDevices(); track device.deviceId) {
<option [value]="device.deviceId" [selected]="device.deviceId === selectedInputDevice()">
<option
[value]="device.deviceId"
[selected]="device.deviceId === selectedInputDevice()"
>
{{ device.label || 'Microphone ' + $index }}
</option>
}
@@ -106,7 +109,10 @@
class="w-full px-3 py-2 bg-secondary rounded-lg border border-border text-foreground text-sm focus:outline-none focus:ring-2 focus:ring-primary"
>
@for (device of outputDevices(); track device.deviceId) {
<option [value]="device.deviceId" [selected]="device.deviceId === selectedOutputDevice()">
<option
[value]="device.deviceId"
[selected]="device.deviceId === selectedOutputDevice()"
>
{{ device.label || 'Speaker ' + $index }}
</option>
}
@@ -154,14 +160,36 @@
</div>
<div>
<label class="block text-sm font-medium text-foreground mb-1">Include system audio when sharing screen</label>
<label class="block text-sm font-medium text-foreground mb-1"
>Include system audio when sharing screen</label
>
<input
type="checkbox"
[checked]="includeSystemAudio()"
(change)="onIncludeSystemAudioChange($event)"
class="accent-primary"
/>
<p class="text-xs text-muted-foreground">Off by default; viewers will still hear your mic.</p>
<p class="text-xs text-muted-foreground">
Off by default; viewers will still hear your mic.
</p>
</div>
<div class="flex items-center justify-between">
<div>
<label class="block text-sm font-medium text-foreground">Noise reduction</label>
<p class="text-xs text-muted-foreground">Suppress background noise using RNNoise</p>
</div>
<label class="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
[checked]="noiseReduction()"
(change)="onNoiseReductionChange($event)"
class="sr-only peer"
/>
<div
class="w-11 h-6 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-5 after:w-5 after:transition-all"
></div>
</label>
</div>
<div>

View File

@@ -86,6 +86,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
audioBitrate = signal(96);
latencyProfile = signal<'low' | 'balanced' | 'high'>('balanced');
includeSystemAudio = signal(false);
noiseReduction = signal(false);
private voiceConnectedSubscription: Subscription | null = null;
@@ -264,7 +265,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
},
});
this.webrtcService.setLocalStream(stream);
await this.webrtcService.setLocalStream(stream);
// Track local mic for voice-activity visualisation
const userId = this.currentUser()?.id;
@@ -496,6 +497,13 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
this.saveSettings();
}
async onNoiseReductionChange(event: Event): Promise<void> {
const input = event.target as HTMLInputElement;
this.noiseReduction.set(!!input.checked);
await this.webrtcService.toggleNoiseReduction(this.noiseReduction());
this.saveSettings();
}
private loadSettings(): void {
try {
const raw = localStorage.getItem(STORAGE_KEY_VOICE_SETTINGS);
@@ -508,6 +516,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
audioBitrate?: number;
latencyProfile?: 'low' | 'balanced' | 'high';
includeSystemAudio?: boolean;
noiseReduction?: boolean;
};
if (settings.inputDevice) this.selectedInputDevice.set(settings.inputDevice);
if (settings.outputDevice) this.selectedOutputDevice.set(settings.outputDevice);
@@ -517,6 +526,8 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
if (settings.latencyProfile) this.latencyProfile.set(settings.latencyProfile);
if (typeof settings.includeSystemAudio === 'boolean')
this.includeSystemAudio.set(settings.includeSystemAudio);
if (typeof settings.noiseReduction === 'boolean')
this.noiseReduction.set(settings.noiseReduction);
} catch {}
}
@@ -530,6 +541,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
audioBitrate: this.audioBitrate(),
latencyProfile: this.latencyProfile(),
includeSystemAudio: this.includeSystemAudio(),
noiseReduction: this.noiseReduction(),
};
localStorage.setItem(STORAGE_KEY_VOICE_SETTINGS, JSON.stringify(voiceSettings));
} catch {}
@@ -541,6 +553,9 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
this.webrtcService.setAudioBitrate(this.audioBitrate());
this.webrtcService.setLatencyProfile(this.latencyProfile());
this.applyOutputDevice();
// Always sync the desired noise-reduction preference (even before
// a mic stream exists — the flag will be honoured on connect).
this.webrtcService.toggleNoiseReduction(this.noiseReduction());
} catch {}
}