Fix bugs and clean noise reduction

This commit is contained in:
2026-03-06 02:22:43 +01:00
parent 0ed9ca93d3
commit 2d84fbd91a
39 changed files with 3443 additions and 1544 deletions

View File

@@ -19,7 +19,8 @@ export async function handleSaveRoom(command: SaveRoomCommand, dataSource: DataS
icon: room.icon ?? null,
iconUpdatedAt: room.iconUpdatedAt ?? null,
permissions: room.permissions != null ? JSON.stringify(room.permissions) : null,
channels: room.channels != null ? JSON.stringify(room.channels) : null
channels: room.channels != null ? JSON.stringify(room.channels) : null,
members: room.members != null ? JSON.stringify(room.members) : null
});
await repo.save(entity);

View File

@@ -12,7 +12,8 @@ const ROOM_TRANSFORMS: TransformMap = {
isPrivate: boolToInt,
userCount: (val) => (val ?? 0),
permissions: jsonOrNull,
channels: jsonOrNull
channels: jsonOrNull,
members: jsonOrNull
};
export async function handleUpdateRoom(command: UpdateRoomCommand, dataSource: DataSource): Promise<void> {

View File

@@ -60,7 +60,8 @@ export function rowToRoom(row: RoomEntity) {
icon: row.icon ?? undefined,
iconUpdatedAt: row.iconUpdatedAt ?? undefined,
permissions: row.permissions ? JSON.parse(row.permissions) : undefined,
channels: row.channels ? JSON.parse(row.channels) : undefined
channels: row.channels ? JSON.parse(row.channels) : undefined,
members: row.members ? JSON.parse(row.members) : undefined
};
}

View File

@@ -103,6 +103,7 @@ export interface RoomPayload {
iconUpdatedAt?: number;
permissions?: unknown;
channels?: unknown[];
members?: unknown[];
}
export interface BanPayload {

View File

@@ -47,4 +47,7 @@ export class RoomEntity {
@Column('text', { nullable: true })
channels!: string | null;
@Column('text', { nullable: true })
members!: string | null;
}

View File

@@ -0,0 +1,19 @@
import { MigrationInterface, QueryRunner } from 'typeorm';
export class AddRoomMembers1000000000001 implements MigrationInterface {
name = 'AddRoomMembers1000000000001';
public async up(queryRunner: QueryRunner): Promise<void> {
const columns = await queryRunner.query(`PRAGMA table_info("rooms")`) as Array<{ name?: string }>;
const hasMembersColumn = Array.isArray(columns)
&& columns.some((column) => column.name === 'members');
if (!hasMembersColumn) {
await queryRunner.query(`ALTER TABLE "rooms" ADD COLUMN "members" TEXT`);
}
}
public async down(_queryRunner: QueryRunner): Promise<void> {
// Forward-only migration: SQLite column removal is intentionally omitted.
}
}

View File

@@ -62,6 +62,13 @@ module.exports = tseslint.config(
],
processor: angular.processInlineTemplates,
rules: {
'@angular-eslint/component-max-inline-declarations': [
'error',
{
template: 3,
styles: 0
}
],
'no-dashes/no-unicode-dashes': 'error',
'@typescript-eslint/no-extraneous-class': 'off',
'@angular-eslint/component-class-suffix': [ 'error', { suffixes: ['Component','Page','Stub'] } ],
@@ -141,6 +148,15 @@ module.exports = tseslint.config(
'@stylistic/js/space-in-parens': 'error',
'@stylistic/js/space-unary-ops': 'error',
'@stylistic/js/spaced-comment': ['error','always',{ markers:['/'] }],
'@stylistic/js/array-bracket-spacing': 'error',
'@stylistic/js/array-element-newline': ['error', {
multiline: true,
minItems: 3
}],
'@stylistic/js/array-bracket-newline': ['error', {
multiline: true,
minItems: 3
}],
"import-newlines/enforce": [
"error",
2

File diff suppressed because it is too large Load Diff

View File

@@ -1,442 +0,0 @@
/**
* VoiceLevelingProcessor — AudioWorkletProcessor that implements
* broadcast-grade per-speaker automatic gain control (AGC).
*
* ═══════════════════════════════════════════════════════════════════
* DSP DESIGN NOTES
* ═══════════════════════════════════════════════════════════════════
*
* This processor mimics WebRTC's Gain Controller 2 (AGC2) behaviour
* using a lightweight algorithm suitable for real-time voice in an
* AudioWorklet thread.
*
* Pipeline (per 128-sample render quantum ≈ 2.67 ms @ 48 kHz):
*
* 1. RMS level estimation (short-term envelope)
* 2. Silence gate (freeze gain when below noise floor)
* 3. Target gain compute (desired dBFS → linear gain)
* 4. Gain smoothing (exponential attack / release)
* 5. Max-gain clamp (prevent runaway boost)
* 6. Soft-clip limiter (prevent digital overs)
*
* Key properties:
* • No per-frame allocation — all buffers pre-allocated.
* • Synchronous processing — no message passing in hot path.
* • Uses Float32 throughout — native AudioWorklet format.
* • 128-sample quantum fits within 10 ms at 48 kHz (2.67 ms).
*
* The processor receives configuration via AudioWorkletNode.port
* messages and applies them on the next render quantum.
*
* ═══════════════════════════════════════════════════════════════════
*/
/* ──────────────────────────────────────────────────────────────── */
/* Constants */
/* ──────────────────────────────────────────────────────────────── */
/** Processor name registered with `registerProcessor`. */
const PROCESSOR_NAME = 'VoiceLevelingProcessor';
/**
* Web Audio render quantum size — the number of samples processed
* in each call to `process()`. The AudioWorklet spec mandates 128.
*/
const RENDER_QUANTUM_FRAMES = 128;
/**
* Minimum RMS level (linear) below which the input is considered
* silence. Gain is frozen/decayed when the signal is this quiet.
* Roughly 60 dBFS.
*/
const DEFAULT_SILENCE_THRESHOLD = 0.001;
/**
* The target RMS level in dBFS. 18 dBFS is a comfortable
* conversational loudness for headphone listening.
*/
const DEFAULT_TARGET_DBFS = -18;
/** Default maximum gain boost in dB. */
const DEFAULT_MAX_GAIN_DB = 12;
/** Soft-clip ceiling — prevents digital overs. */
const SOFT_CLIP_THRESHOLD = 0.95;
/**
* Speed presets: attack and release time constants (seconds).
*
* Attack = how fast gain *decreases* when a loud signal arrives.
* Release = how fast gain *increases* when the signal gets quieter.
*
* Asymmetric: fast attack prevents clipping, slow release sounds
* natural and avoids "pumping".
*/
const SPEED_PRESETS = {
slow: { attack: 0.015, release: 0.800 },
medium: { attack: 0.010, release: 0.400 },
fast: { attack: 0.005, release: 0.150 },
};
/**
* AGC strength presets: scale the computed gain adjustment.
* 1.0 = full correction toward target; lower = gentler leveling.
*/
const STRENGTH_PRESETS = {
low: 0.5,
medium: 0.75,
high: 1.0,
};
/**
* When silence is detected, the gain decays toward 1.0 (unity)
* at this rate (seconds). This prevents the gain from sitting at
* a huge value after long silence and then blasting when speech
* resumes.
*/
const SILENCE_DECAY_TC = 2.0;
/* ──────────────────────────────────────────────────────────────── */
/* Helpers */
/* ──────────────────────────────────────────────────────────────── */
/** Convert decibels to linear gain. */
function dbToLinear(db) {
return Math.pow(10, db / 20);
}
/** Convert linear amplitude to dBFS. Returns Infinity for 0. */
function linearToDb(linear) {
if (linear <= 0) return -Infinity;
return 20 * Math.log10(linear);
}
/**
* Compute the exponential smoothing coefficient (α) for a given
* time constant and **frame rate** (not sample rate!).
*
* Because the envelope / gain update runs once per render quantum
* (128 samples), the rate passed here must be frames-per-second
* (sampleRate / 128), NOT samples-per-second. Using the raw
* sampleRate would produce absurdly small α values, making the
* AGC appear frozen.
*
* α = 1 e^(1 / (tc * fps))
*
* Larger α → faster response.
*
* @param {number} tc Time constant in seconds.
* @param {number} fps Frame rate (render quanta per second).
* @returns {number} Smoothing coefficient (01).
*/
function timeConstantToAlpha(tc, fps) {
if (tc <= 0) return 1.0;
return 1.0 - Math.exp(-1.0 / (tc * fps));
}
/**
* Attempt to use SharedArrayBuffer for the envelope history if
* the environment supports it. Falls back to a regular
* Float32Array.
*
* @param {number} length Number of elements.
* @returns {Float32Array}
*/
function allocateBuffer(length) {
try {
if (typeof SharedArrayBuffer !== 'undefined') {
return new Float32Array(new SharedArrayBuffer(length * 4));
}
} catch { /* fall through */ }
return new Float32Array(length);
}
/**
* Soft-clip function (tanh-based) that prevents digital overs
* while preserving signal shape.
*
* Below the threshold the signal passes through unchanged.
* Above it, tanh compression is applied symmetrically.
*
* @param {number} sample Input sample.
* @returns {number} Clipped sample.
*/
function softClip(sample) {
const abs = Math.abs(sample);
if (abs <= SOFT_CLIP_THRESHOLD) return sample;
const sign = sample >= 0 ? 1 : -1;
// Map (threshold..∞) → (threshold..1) using tanh
const excess = (abs - SOFT_CLIP_THRESHOLD) / (1 - SOFT_CLIP_THRESHOLD);
return sign * (SOFT_CLIP_THRESHOLD + (1 - SOFT_CLIP_THRESHOLD) * Math.tanh(excess));
}
/* ──────────────────────────────────────────────────────────────── */
/* Processor */
/* ──────────────────────────────────────────────────────────────── */
class VoiceLevelingProcessor extends AudioWorkletProcessor {
/* ── State ──────────────────────────────────────────────────── */
/** Whether processing is enabled (bypass when false). */
_enabled = true;
/** Target loudness in dBFS. */
_targetDbfs = DEFAULT_TARGET_DBFS;
/** Maximum gain boost in dB. */
_maxGainDb = DEFAULT_MAX_GAIN_DB;
/** Linear ceiling for the gain multiplier. */
_maxGainLinear = dbToLinear(DEFAULT_MAX_GAIN_DB);
/** AGC strength factor (01). Scales the gain correction. */
_strength = STRENGTH_PRESETS.medium;
/** Whether the silence/noise gate is active. */
_noiseGateEnabled = false;
/** RMS threshold below which input is treated as silence. */
_silenceThreshold = DEFAULT_SILENCE_THRESHOLD;
/** Attack smoothing coefficient. */
_alphaAttack = 0;
/** Release smoothing coefficient. */
_alphaRelease = 0;
/** Silence decay smoothing coefficient. */
_alphaSilenceDecay = 0;
/**
* Running RMS envelope (squared, to avoid sqrt every frame).
* Smoothed with a one-pole filter.
*/
_envelopeSq = 0;
/** Current applied gain (linear). Smoothed toward target. */
_currentGain = 1.0;
/**
* Pre-allocated buffer used for RMS computation.
* Sized to the largest possible render quantum (128 samples).
*/
_scratchBuffer = allocateBuffer(128);
/* ── Constructor ────────────────────────────────────────────── */
constructor(options) {
super(options);
// Compute smoothing coefficients from default speed
this._applySpeed('medium');
// Listen for configuration changes from the main thread.
// Messages are consumed before the next render quantum.
this.port.onmessage = (event) => this._handleMessage(event.data);
}
/* ── Configuration ──────────────────────────────────────────── */
/**
* Handle a configuration message from the main thread.
*
* Accepted keys:
* enabled : boolean
* targetDbfs : number (-30 … -12)
* maxGainDb : number (3 … 20)
* strength : 'low' | 'medium' | 'high'
* speed : 'slow' | 'medium' | 'fast'
* noiseGate : boolean
*
* @param {object} msg
*/
_handleMessage(msg) {
if (msg == null || typeof msg !== 'object') return;
if (typeof msg.enabled === 'boolean') {
this._enabled = msg.enabled;
if (!msg.enabled) {
// Reset gain to unity on disable so re-enabling starts clean
this._currentGain = 1.0;
this._envelopeSq = 0;
}
}
if (typeof msg.targetDbfs === 'number') {
this._targetDbfs = Math.max(-30, Math.min(-12, msg.targetDbfs));
}
if (typeof msg.maxGainDb === 'number') {
const clamped = Math.max(3, Math.min(20, msg.maxGainDb));
this._maxGainDb = clamped;
this._maxGainLinear = dbToLinear(clamped);
}
if (typeof msg.strength === 'string' && STRENGTH_PRESETS[msg.strength] != null) {
this._strength = STRENGTH_PRESETS[msg.strength];
}
if (typeof msg.speed === 'string' && SPEED_PRESETS[msg.speed] != null) {
this._applySpeed(msg.speed);
}
if (typeof msg.noiseGate === 'boolean') {
this._noiseGateEnabled = msg.noiseGate;
}
}
/**
* Recompute attack/release/silence-decay coefficients for
* the current sample rate.
*
* IMPORTANT: We use frames-per-second (sampleRate / 128), NOT
* the raw sampleRate, because the smoothing filter is applied
* once per render quantum — not once per sample.
*
* @param {'slow' | 'medium' | 'fast'} preset
*/
_applySpeed(preset) {
const { attack, release } = SPEED_PRESETS[preset];
const fps = sampleRate / RENDER_QUANTUM_FRAMES;
this._alphaAttack = timeConstantToAlpha(attack, fps);
this._alphaRelease = timeConstantToAlpha(release, fps);
this._alphaSilenceDecay = timeConstantToAlpha(SILENCE_DECAY_TC, fps);
}
/* ── DSP ────────────────────────────────────────────────────── */
/**
* Main audio processing callback.
*
* @param {Float32Array[][]} inputs Input channels.
* @param {Float32Array[][]} outputs Output channels.
* @returns {boolean} `true` to keep the processor alive.
*/
process(inputs, outputs) {
const input = inputs[0];
const output = outputs[0];
// No input → silence pass-through
if (!input || input.length === 0 || !input[0]) {
return true;
}
const inputChannel = input[0];
const outputChannel = output[0];
const numSamples = inputChannel.length;
// ── Bypass mode ──────────────────────────────────────────
if (!this._enabled) {
// Copy input → output unchanged
for (let i = 0; i < numSamples; i++) {
outputChannel[i] = inputChannel[i];
}
// Also copy any additional channels (stereo, etc.)
for (let ch = 1; ch < input.length; ch++) {
if (output[ch] && input[ch]) {
for (let i = 0; i < numSamples; i++) {
output[ch][i] = input[ch][i];
}
}
}
return true;
}
// ── 1. RMS level estimation ──────────────────────────────
//
// Compute the RMS of this render quantum and smooth it with
// a one-pole IIR filter (exponential moving average).
//
// We work in the squared domain to avoid a sqrt per sample;
// the sqrt is taken only once per quantum for the gain calc.
let sumSq = 0;
for (let i = 0; i < numSamples; i++) {
const s = inputChannel[i];
sumSq += s * s;
}
const frameMeanSq = sumSq / numSamples;
// Smooth envelope: use attack for rising levels, release for falling
const alpha = frameMeanSq > this._envelopeSq
? this._alphaAttack
: this._alphaRelease;
this._envelopeSq += alpha * (frameMeanSq - this._envelopeSq);
// Current smoothed RMS (linear)
const rms = Math.sqrt(Math.max(this._envelopeSq, 1e-12));
// ── 2. Silence gate ──────────────────────────────────────
//
// If the RMS is below the silence threshold, do NOT compute
// a new gain target. Instead, decay the current gain slowly
// toward unity (1.0) so we don't slam the listener when
// speech resumes.
const isSilence = rms < this._silenceThreshold;
if (isSilence && this._noiseGateEnabled) {
// Decay gain toward 1.0
this._currentGain += this._alphaSilenceDecay * (1.0 - this._currentGain);
} else if (!isSilence) {
// ── 3. Target gain computation ───────────────────────
//
// Desired gain = 10^((targetDbfs currentDbfs) / 20)
//
// We scale the correction by the strength factor so that
// "low" strength applies only 50 % of the correction.
const currentDbfs = linearToDb(rms);
const errorDb = this._targetDbfs - currentDbfs;
// Scale the correction by strength.
// A strength of 1.0 means "correct fully to target".
const correctionDb = errorDb * this._strength;
let desiredGain = dbToLinear(correctionDb);
// Clamp to max gain
if (desiredGain > this._maxGainLinear) {
desiredGain = this._maxGainLinear;
}
// Never attenuate below a certain floor (we're leveling UP,
// but very loud signals still need to be pulled down).
// Allow attenuation down to 6 dB.
if (desiredGain < 0.5) {
desiredGain = 0.5;
}
// ── 4. Gain smoothing ──────────────────────────────
//
// Exponentially interpolate the current gain toward the
// desired gain. Use fast attack (gain DOWN) and slow
// release (gain UP) for natural dynamics.
const gainAlpha = desiredGain < this._currentGain
? this._alphaAttack // Gain is decreasing (loud signal arrived)
: this._alphaRelease; // Gain is increasing (signal got quieter)
this._currentGain += gainAlpha * (desiredGain - this._currentGain);
}
// If isSilence && !noiseGateEnabled → gain stays as-is (frozen)
// ── 5. Apply gain & soft-clip ─────────────────────────────
const gain = this._currentGain;
for (let i = 0; i < numSamples; i++) {
outputChannel[i] = softClip(inputChannel[i] * gain);
}
// Copy any additional channels with same gain
for (let ch = 1; ch < input.length; ch++) {
if (output[ch] && input[ch]) {
for (let i = 0; i < numSamples; i++) {
output[ch][i] = softClip(input[ch][i] * gain);
}
}
}
return true;
}
}
registerProcessor(PROCESSOR_NAME, VoiceLevelingProcessor);

Binary file not shown.

View File

@@ -17,6 +17,7 @@ import { MessagesEffects } from './store/messages/messages.effects';
import { MessagesSyncEffects } from './store/messages/messages-sync.effects';
import { UsersEffects } from './store/users/users.effects';
import { RoomsEffects } from './store/rooms/rooms.effects';
import { RoomMembersSyncEffects } from './store/rooms/room-members-sync.effects';
import { STORE_DEVTOOLS_MAX_AGE } from './core/constants';
/** Root application configuration providing routing, HTTP, NgRx store, and devtools. */
@@ -34,7 +35,8 @@ export const appConfig: ApplicationConfig = {
MessagesEffects,
MessagesSyncEffects,
UsersEffects,
RoomsEffects
RoomsEffects,
RoomMembersSyncEffects
]),
provideStoreDevtools({
maxAge: STORE_DEVTOOLS_MAX_AGE,

View File

@@ -17,6 +17,9 @@ export const STORAGE_KEY_CONNECTION_SETTINGS = 'metoyou_connection_settings';
/** Key used to persist voice settings (input/output devices, volume). */
export const STORAGE_KEY_VOICE_SETTINGS = 'metoyou_voice_settings';
/** Key used to persist per-user volume overrides (0200%). */
export const STORAGE_KEY_USER_VOLUMES = 'metoyou_user_volumes';
/** Regex that extracts a roomId from a `/room/:roomId` URL path. */
export const ROOM_URL_PATTERN = /\/room\/([^/]+)/;
@@ -34,6 +37,3 @@ export const DEFAULT_VOLUME = 100;
/** Default search debounce time in milliseconds. */
export const SEARCH_DEBOUNCE_MS = 300;
/** Key used to persist voice leveling (AGC) settings. */
export const STORAGE_KEY_VOICE_LEVELING_SETTINGS = 'metoyou_voice_leveling_settings';

View File

@@ -49,6 +49,31 @@ export interface User {
screenShareState?: ScreenShareState;
}
/**
* Persisted membership record for a room/server.
*
* Unlike `User`, this survives when a member goes offline so the UI can
* continue to list known server members.
*/
export interface RoomMember {
/** The member's local application/database identifier. */
id: string;
/** Optional network-wide peer identifier. */
oderId?: string;
/** Login username (best effort; may be synthesized from display name). */
username: string;
/** Human-readable display name shown in the UI. */
displayName: string;
/** Optional avatar URL. */
avatarUrl?: string;
/** Role within the room/server. */
role: UserRole;
/** Epoch timestamp (ms) when the member first joined. */
joinedAt: number;
/** Epoch timestamp (ms) when the member was last seen online. */
lastSeenAt: number;
}
/**
* A communication channel within a server (either text or voice).
*/
@@ -141,6 +166,8 @@ export interface Room {
permissions?: RoomPermissions;
/** Text and voice channels within the server. */
channels?: Channel[];
/** Persisted member roster, including offline users. */
members?: RoomMember[];
}
/**
@@ -307,6 +334,9 @@ export type ChatEventType =
| 'room-settings-update'
| 'voice-state'
| 'chat-inventory-request'
| 'member-roster-request'
| 'member-roster'
| 'member-leave'
| 'voice-state-request'
| 'state-request'
| 'screen-state'
@@ -362,6 +392,8 @@ export interface ChatEvent {
role?: UserRole;
/** Updated channel list. */
channels?: Channel[];
/** Synced room member roster. */
members?: RoomMember[];
}
/**

View File

@@ -9,4 +9,3 @@ export * from './voice-session.service';
export * from './voice-activity.service';
export * from './external-link.service';
export * from './settings-modal.service';
export * from './voice-leveling.service';

View File

@@ -1,281 +0,0 @@
/**
* VoiceLevelingService - Angular service that manages the
* per-speaker voice leveling (AGC) system.
*
* ═══════════════════════════════════════════════════════════════════
* RESPONSIBILITIES
* ═══════════════════════════════════════════════════════════════════
*
* 1. Owns the {@link VoiceLevelingManager} singleton and proxies
* its methods to the rest of the application.
*
* 2. Persists user settings in localStorage and restores them on
* construction so preferences survive across sessions.
*
* 3. Exposes reactive Angular signals for the current settings so
* UI components can bind declaratively.
*
* 4. Provides an `enable` / `disable` / `disableAll` API that
* the voice-controls component uses to insert and remove the
* AGC pipeline from the remote audio playback chain - mirroring
* the {@link NoiseReductionManager} toggle pattern.
*
* 5. Fires a callback when the user toggles the enabled state so
* the voice-controls component can rebuild audio elements live.
*
* ═══════════════════════════════════════════════════════════════════
*/
/* eslint-disable @typescript-eslint/member-ordering */
import {
Injectable,
signal,
computed,
OnDestroy
} from '@angular/core';
import {
VoiceLevelingManager,
VoiceLevelingSettings,
DEFAULT_VOICE_LEVELING_SETTINGS
} from './webrtc/voice-leveling.manager';
import { WebRTCLogger } from './webrtc/webrtc-logger';
import { STORAGE_KEY_VOICE_LEVELING_SETTINGS } from '../constants';
@Injectable({ providedIn: 'root' })
export class VoiceLevelingService implements OnDestroy {
/** The underlying per-speaker pipeline manager. */
private readonly manager: VoiceLevelingManager;
/* ── Reactive signals ────────────────────────────────────────── */
private readonly _enabled = signal(DEFAULT_VOICE_LEVELING_SETTINGS.enabled);
private readonly _targetDbfs = signal(DEFAULT_VOICE_LEVELING_SETTINGS.targetDbfs);
private readonly _strength = signal<'low' | 'medium' | 'high'>(DEFAULT_VOICE_LEVELING_SETTINGS.strength);
private readonly _maxGainDb = signal(DEFAULT_VOICE_LEVELING_SETTINGS.maxGainDb);
private readonly _speed = signal<'slow' | 'medium' | 'fast'>(DEFAULT_VOICE_LEVELING_SETTINGS.speed);
private readonly _noiseGate = signal(DEFAULT_VOICE_LEVELING_SETTINGS.noiseGate);
/** Whether voice leveling is enabled. */
readonly enabled = computed(() => this._enabled());
/** Target loudness in dBFS. */
readonly targetDbfs = computed(() => this._targetDbfs());
/** AGC strength preset. */
readonly strength = computed(() => this._strength());
/** Maximum gain boost in dB. */
readonly maxGainDb = computed(() => this._maxGainDb());
/** Gain response speed preset. */
readonly speed = computed(() => this._speed());
/** Whether the noise floor gate is active. */
readonly noiseGate = computed(() => this._noiseGate());
/** Number of speakers currently being processed. */
readonly activeSpeakerCount = computed(() => this.manager.activePipelineCount);
/* ── Enabled-change callbacks ────────────────────────────────── */
private _enabledChangeCallbacks: ((enabled: boolean) => void)[] = [];
constructor() {
const logger = new WebRTCLogger(/* debugEnabled */ false);
this.manager = new VoiceLevelingManager(logger);
// Restore persisted settings
this._loadSettings();
}
/* ── Settings API ────────────────────────────────────────────── */
/**
* Toggle the enabled state.
*
* Unlike the manager's `enable`/`disable` which operate per-peer,
* this is the user-facing master toggle. It persists the setting
* and notifies all registered callbacks so that the voice-controls
* component can rebuild Audio elements immediately.
*/
setEnabled(enabled: boolean): void {
this._enabled.set(enabled);
this._saveSettings();
// Notify listeners so the voice-controls component can rebuild
this._enabledChangeCallbacks.forEach((cb) => cb(enabled));
}
/** Set the target loudness in dBFS (30 to 12). */
setTargetDbfs(value: number): void {
const clamped = Math.max(-30, Math.min(-12, value));
this._targetDbfs.set(clamped);
this._pushAndPersist({ targetDbfs: clamped });
}
/** Set the AGC strength preset. */
setStrength(strength: 'low' | 'medium' | 'high'): void {
this._strength.set(strength);
this._pushAndPersist({ strength });
}
/** Set the maximum gain boost in dB (3 to 20). */
setMaxGainDb(value: number): void {
const clamped = Math.max(3, Math.min(20, value));
this._maxGainDb.set(clamped);
this._pushAndPersist({ maxGainDb: clamped });
}
/** Set the gain response speed preset. */
setSpeed(speed: 'slow' | 'medium' | 'fast'): void {
this._speed.set(speed);
this._pushAndPersist({ speed });
}
/** Toggle the noise floor gate. */
setNoiseGate(enabled: boolean): void {
this._noiseGate.set(enabled);
this._pushAndPersist({ noiseGate: enabled });
}
/* ── Pipeline API (mirrors NoiseReductionManager pattern) ───── */
/**
* Build the AGC pipeline for a remote speaker and return the
* leveled stream. The caller sets this as `audio.srcObject`.
*
* @param peerId The remote peer's unique identifier.
* @param stream The remote peer's raw {@link MediaStream}.
* @returns The leveled {@link MediaStream} for playback.
*/
async enable(peerId: string, stream: MediaStream): Promise<MediaStream> {
return this.manager.enable(peerId, stream);
}
/**
* Tear down the AGC pipeline for a single speaker.
* The caller swaps the Audio element back to the raw stream.
*
* @param peerId The peer to clean up.
*/
disable(peerId: string): void {
this.manager.disable(peerId);
}
/** Tear down all speaker pipelines at once. */
disableAll(): void {
this.manager.disableAll();
}
/**
* Set the post-AGC volume for a specific speaker.
*
* @param peerId The speaker's peer ID.
* @param volume Normalised volume (0-1).
*/
setSpeakerVolume(peerId: string, volume: number): void {
this.manager.setSpeakerVolume(peerId, volume);
}
/**
* Set the master volume applied after AGC to all speakers.
*
* @param volume Normalised volume (0-1).
*/
setMasterVolume(volume: number): void {
this.manager.setMasterVolume(volume);
}
/* ── Live toggle notification ────────────────────────────────── */
/**
* Register a callback that fires whenever the user toggles the
* enabled state. Returns an unsubscribe function.
*/
onEnabledChange(callback: (enabled: boolean) => void): () => void {
this._enabledChangeCallbacks.push(callback);
return () => {
this._enabledChangeCallbacks = this._enabledChangeCallbacks.filter(
(cb) => cb !== callback
);
};
}
/* ── Persistence ─────────────────────────────────────────────── */
/** Push a partial config update to the manager and persist. */
private _pushAndPersist(partial: Partial<VoiceLevelingSettings>): void {
this.manager.updateSettings(partial);
this._saveSettings();
}
/** Persist all current settings to localStorage. */
private _saveSettings(): void {
try {
const settings: VoiceLevelingSettings = {
enabled: this._enabled(),
targetDbfs: this._targetDbfs(),
strength: this._strength(),
maxGainDb: this._maxGainDb(),
speed: this._speed(),
noiseGate: this._noiseGate()
};
localStorage.setItem(
STORAGE_KEY_VOICE_LEVELING_SETTINGS,
JSON.stringify(settings)
);
} catch { /* localStorage unavailable - ignore */ }
}
/** Load settings from localStorage and apply to the manager. */
private _loadSettings(): void {
try {
const raw = localStorage.getItem(STORAGE_KEY_VOICE_LEVELING_SETTINGS);
if (!raw)
return;
const saved = JSON.parse(raw) as Partial<VoiceLevelingSettings>;
if (typeof saved.enabled === 'boolean')
this._enabled.set(saved.enabled);
if (typeof saved.targetDbfs === 'number')
this._targetDbfs.set(saved.targetDbfs);
if (saved.strength === 'low' || saved.strength === 'medium' || saved.strength === 'high') {
this._strength.set(saved.strength);
}
if (typeof saved.maxGainDb === 'number')
this._maxGainDb.set(saved.maxGainDb);
if (saved.speed === 'slow' || saved.speed === 'medium' || saved.speed === 'fast') {
this._speed.set(saved.speed);
}
if (typeof saved.noiseGate === 'boolean')
this._noiseGate.set(saved.noiseGate);
// Push the restored settings to the manager
this.manager.updateSettings({
enabled: this._enabled(),
targetDbfs: this._targetDbfs(),
strength: this._strength(),
maxGainDb: this._maxGainDb(),
speed: this._speed(),
noiseGate: this._noiseGate()
});
} catch { /* corrupted data - use defaults */ }
}
/* ── Cleanup ─────────────────────────────────────────────────── */
ngOnDestroy(): void {
this.manager.destroy();
this._enabledChangeCallbacks = [];
}
}

View File

@@ -576,6 +576,15 @@ export class WebRTCService implements OnDestroy {
return this.mediaManager.getLocalStream();
}
/**
* Get the raw local microphone stream before gain / RNNoise processing.
*
* @returns The raw microphone {@link MediaStream}, or `null` if voice is not active.
*/
getRawMicStream(): MediaStream | null {
return this.mediaManager.getRawMicStream();
}
/**
* Request microphone access and start sending audio to all peers.
*
@@ -648,6 +657,18 @@ export class WebRTCService implements OnDestroy {
this.mediaManager.setOutputVolume(volume);
}
/**
* Set the input (microphone) volume.
*
* Adjusts a Web Audio GainNode on the local mic stream so the level
* sent to peers changes in real time without renegotiation.
*
* @param volume - Normalised volume (0-1).
*/
setInputVolume(volume: number): void {
this.mediaManager.setInputVolume(volume);
}
/**
* Set the maximum audio bitrate for all peer connections.
*

View File

@@ -12,4 +12,3 @@ export * from './peer-connection.manager';
export * from './media.manager';
export * from './screen-share.manager';
export * from './noise-reduction.manager';
export * from './voice-leveling.manager';

View File

@@ -55,6 +55,16 @@ export class MediaManager {
/** Remote audio output volume (0-1). */
private remoteAudioVolume = VOLUME_MAX;
// -- Input gain pipeline (mic volume) --
/** The stream BEFORE gain is applied (for identity checks). */
private preGainStream: MediaStream | null = null;
private inputGainCtx: AudioContext | null = null;
private inputGainSourceNode: MediaStreamAudioSourceNode | null = null;
private inputGainNode: GainNode | null = null;
private inputGainDest: MediaStreamAudioDestinationNode | null = null;
/** Normalised 0-1 input gain (1 = 100%). */
private inputGainVolume = 1.0;
/** Voice-presence heartbeat timer. */
private voicePresenceTimer: ReturnType<typeof setInterval> | null = null;
@@ -69,7 +79,7 @@ export class MediaManager {
* whether the worklet is actually running. This lets us honour the
* preference even when it is set before the mic stream is acquired.
*/
private _noiseReductionDesired = false;
private _noiseReductionDesired = true;
// State tracked locally (the service exposes these via signals)
private isVoiceActive = false;
@@ -102,6 +112,10 @@ export class MediaManager {
getLocalStream(): MediaStream | null {
return this.localMediaStream;
}
/** Returns the raw microphone stream before processing, if available. */
getRawMicStream(): MediaStream | null {
return this.rawMicStream;
}
/** Whether voice is currently active (mic captured). */
getIsVoiceActive(): boolean {
return this.isVoiceActive;
@@ -152,7 +166,7 @@ export class MediaManager {
const mediaConstraints: MediaStreamConstraints = {
audio: {
echoCancellation: true,
noiseSuppression: true,
noiseSuppression: !this._noiseReductionDesired,
autoGainControl: true
},
video: false
@@ -177,6 +191,9 @@ export class MediaManager {
? await this.noiseReduction.enable(stream)
: stream;
// Apply input gain (mic volume) before sending to peers
this.applyInputGainToCurrentStream();
this.logger.logStream('localVoice', this.localMediaStream);
this.bindLocalTracksToAllPeers();
@@ -196,6 +213,7 @@ export class MediaManager {
*/
disableVoice(): void {
this.noiseReduction.disable();
this.teardownInputGain();
// Stop the raw mic tracks (the denoised stream's tracks are
// derived nodes and will stop once their source is gone).
@@ -241,6 +259,9 @@ export class MediaManager {
this.localMediaStream = stream;
}
// Apply input gain (mic volume) before sending to peers
this.applyInputGainToCurrentStream();
this.bindLocalTracksToAllPeers();
this.isVoiceActive = true;
this.voiceConnected$.next();
@@ -252,16 +273,10 @@ export class MediaManager {
* @param muted - Explicit state; if omitted, the current state is toggled.
*/
toggleMute(muted?: boolean): void {
if (this.localMediaStream) {
const audioTracks = this.localMediaStream.getAudioTracks();
const newMutedState = muted !== undefined ? muted : !this.isMicMuted;
audioTracks.forEach((track) => {
track.enabled = !newMutedState;
});
this.isMicMuted = newMutedState;
}
this.applyCurrentMuteState();
}
/**
@@ -294,6 +309,11 @@ export class MediaManager {
this.noiseReduction.isEnabled
);
// Do not update the browser's built-in noiseSuppression constraint on the
// live mic track here. Chromium may share the underlying capture source,
// which can leak the constraint change into other active streams. We only
// apply the browser constraint when the microphone stream is acquired.
if (shouldEnable === this.noiseReduction.isEnabled)
return;
@@ -318,6 +338,9 @@ export class MediaManager {
}
}
// Re-apply input gain to the (possibly new) stream
this.applyInputGainToCurrentStream();
// Propagate the new audio track to every peer connection
this.bindLocalTracksToAllPeers();
}
@@ -331,6 +354,32 @@ export class MediaManager {
this.remoteAudioVolume = Math.max(VOLUME_MIN, Math.min(VOLUME_MAX, volume));
}
/**
* Set the input (microphone) volume.
*
* If a local stream is active the gain node is updated in real time.
* If no stream exists yet the value is stored and applied on connect.
*
* @param volume - Normalised 0-1 (0 = silent, 1 = 100%).
*/
setInputVolume(volume: number): void {
this.inputGainVolume = Math.max(0, Math.min(1, volume));
if (this.inputGainNode) {
// Pipeline already exists - just update the gain value
this.inputGainNode.gain.value = this.inputGainVolume;
} else if (this.localMediaStream) {
// Stream is active but gain pipeline hasn't been created yet
this.applyInputGainToCurrentStream();
this.bindLocalTracksToAllPeers();
}
}
/** Get current input gain value (0-1). */
getInputVolume(): number {
return this.inputGainVolume;
}
/**
* Set the maximum audio bitrate on every active peer's audio sender.
*
@@ -525,8 +574,79 @@ export class MediaManager {
});
}
// -- Input gain helpers --
/**
* Route the current `localMediaStream` through a Web Audio GainNode so
* the microphone level can be adjusted without renegotiating peers.
*
* If a gain pipeline already exists for the same source stream the gain
* value is simply updated. Otherwise a new pipeline is created.
*/
private applyInputGainToCurrentStream(): void {
const stream = this.localMediaStream;
if (!stream)
return;
// If the source stream hasn't changed, just update gain
if (this.preGainStream === stream && this.inputGainNode && this.inputGainCtx) {
this.inputGainNode.gain.value = this.inputGainVolume;
return;
}
// Tear down the old pipeline (if any)
this.teardownInputGain();
// Build new pipeline: source → gain → destination
this.preGainStream = stream;
this.inputGainCtx = new AudioContext();
this.inputGainSourceNode = this.inputGainCtx.createMediaStreamSource(stream);
this.inputGainNode = this.inputGainCtx.createGain();
this.inputGainNode.gain.value = this.inputGainVolume;
this.inputGainDest = this.inputGainCtx.createMediaStreamDestination();
this.inputGainSourceNode.connect(this.inputGainNode);
this.inputGainNode.connect(this.inputGainDest);
// Replace localMediaStream with the gained stream
this.localMediaStream = this.inputGainDest.stream;
this.applyCurrentMuteState();
}
/** Keep the active outbound track aligned with the stored mute state. */
private applyCurrentMuteState(): void {
if (!this.localMediaStream)
return;
const enabled = !this.isMicMuted;
this.localMediaStream.getAudioTracks().forEach((track) => {
track.enabled = enabled;
});
}
/** Disconnect and close the input-gain AudioContext. */
private teardownInputGain(): void {
try {
this.inputGainSourceNode?.disconnect();
this.inputGainNode?.disconnect();
} catch { /* already disconnected */ }
if (this.inputGainCtx && this.inputGainCtx.state !== 'closed') {
this.inputGainCtx.close().catch(() => {});
}
this.inputGainCtx = null;
this.inputGainSourceNode = null;
this.inputGainNode = null;
this.inputGainDest = null;
this.preGainStream = null;
}
/** Clean up all resources. */
destroy(): void {
this.teardownInputGain();
this.disableVoice();
this.stopVoiceHeartbeat();
this.noiseReduction.destroy();

View File

@@ -1,382 +0,0 @@
/* eslint-disable id-length, max-statements-per-line */
/**
* VoiceLevelingManager - manages per-speaker automatic gain control
* pipelines for remote voice streams.
*
* ═══════════════════════════════════════════════════════════════════
* ARCHITECTURE
* ═══════════════════════════════════════════════════════════════════
*
* For every remote MediaStream a dedicated processing chain is built:
*
* Remote MediaStreamTrack
* ↓
* MediaStreamSource (AudioContext)
* ↓
* AudioWorkletNode (VoiceLevelingProcessor - per-speaker AGC)
* ↓
* GainNode (post fine-tuning - master volume knob)
* ↓
* MediaStreamDestination → leveled MediaStream
*
* Each speaker gets its own AudioWorkletNode instance so that the
* AGC adapts independently to each person's microphone level.
*
* A fallback mode using {@link DynamicsCompressorNode} is provided
* for browsers that don't support AudioWorklet or SharedArrayBuffer.
*
* ═══════════════════════════════════════════════════════════════════
* DESIGN - mirrors the NoiseReductionManager pattern
* ═══════════════════════════════════════════════════════════════════
*
* • `enable(peerId, rawStream)` builds the pipeline and returns a
* processed stream.
* • `disable(peerId)` tears down the pipeline. The caller swaps
* the Audio element's srcObject back to the raw stream.
* • `disableAll()` tears down every pipeline at once.
*
* The calling component keeps a reference to the original raw stream
* and swaps the Audio element's `srcObject` between the raw stream
* and the leveled stream when the user toggles the feature - exactly
* like noise reduction does for the local mic.
*
* ═══════════════════════════════════════════════════════════════════
*/
import { WebRTCLogger } from './webrtc-logger';
/* ──────────────────────────────────────────────────────────────── */
/* Types */
/* ──────────────────────────────────────────────────────────────── */
/** User-configurable voice leveling parameters. */
export interface VoiceLevelingSettings {
/** Master on/off toggle. When false, audio passes through unchanged. */
enabled: boolean;
/** Target loudness in dBFS (30 … 12). Default 18. */
targetDbfs: number;
/** AGC strength preset. Default 'medium'. */
strength: 'low' | 'medium' | 'high';
/** Maximum gain boost in dB (3 … 20). Default 12. */
maxGainDb: number;
/** Gain response speed preset. Default 'medium'. */
speed: 'slow' | 'medium' | 'fast';
/** Whether the silence noise gate is active. Default false. */
noiseGate: boolean;
}
/** Default settings used when none are explicitly provided. */
export const DEFAULT_VOICE_LEVELING_SETTINGS: VoiceLevelingSettings = {
enabled: false,
targetDbfs: -18,
strength: 'medium',
maxGainDb: 12,
speed: 'medium',
noiseGate: false
};
/**
* Internal bookkeeping for a single speaker's processing chain.
*/
interface SpeakerPipeline {
ctx: AudioContext;
source: MediaStreamAudioSourceNode;
workletNode: AudioWorkletNode | null;
compressorNode: DynamicsCompressorNode | null;
gainNode: GainNode;
destination: MediaStreamAudioDestinationNode;
originalStream: MediaStream;
isFallback: boolean;
}
/** AudioWorklet module path (served from public/). */
const WORKLET_MODULE_PATH = 'voice-leveling-worklet.js';
/** Processor name - must match `registerProcessor` in the worklet. */
const WORKLET_PROCESSOR_NAME = 'VoiceLevelingProcessor';
/* ──────────────────────────────────────────────────────────────── */
/* Manager */
/* ──────────────────────────────────────────────────────────────── */
export class VoiceLevelingManager {
/** Active per-speaker pipelines keyed by peer ID. */
private readonly pipelines = new Map<string, SpeakerPipeline>();
/** Cached DSP settings pushed to worklets. */
private _settings: VoiceLevelingSettings = { ...DEFAULT_VOICE_LEVELING_SETTINGS };
/** Whether the AudioWorklet module is available. */
private _workletAvailable: boolean | null = null;
/** Shared AudioContext (avoids browser per-page limits). */
private _sharedCtx: AudioContext | null = null;
/** Whether the worklet module has been loaded. */
private _workletLoaded = false;
constructor(private readonly logger: WebRTCLogger) {}
/* ── Public API ─────────────────────────────────────────────── */
get settings(): Readonly<VoiceLevelingSettings> {
return this._settings;
}
get activePeerIds(): string[] {
return Array.from(this.pipelines.keys());
}
get activePipelineCount(): number {
return this.pipelines.size;
}
/**
* Update DSP settings and propagate to all active worklets.
* Only provided keys are updated; the rest stay unchanged.
*/
updateSettings(partial: Partial<VoiceLevelingSettings>): void {
this._settings = { ...this._settings,
...partial };
this.pipelines.forEach((p) => this._pushSettingsToPipeline(p));
}
/**
* Enable voice leveling for a single speaker.
*
* Builds the processing pipeline and returns the leveled
* {@link MediaStream}. The caller sets this as the Audio
* element's `srcObject`.
*
* If a pipeline already exists for this peer with the **same**
* raw stream, the existing leveled stream is returned (no rebuild).
*
* @param peerId Remote peer identifier.
* @param stream The remote peer's raw MediaStream.
* @returns The leveled MediaStream (or raw on failure).
*/
async enable(peerId: string, stream: MediaStream): Promise<MediaStream> {
// Reuse existing pipeline if it targets the same stream
const existing = this.pipelines.get(peerId);
if (existing && existing.originalStream === stream) {
return existing.destination.stream;
}
// Tear down stale pipeline for this peer
if (existing) {
this._disposePipeline(existing);
this.pipelines.delete(peerId);
}
// No audio tracks → nothing to process
if (stream.getAudioTracks().length === 0) {
this.logger.info('VoiceLeveling: no audio tracks, skipping', { peerId });
return stream;
}
try {
const pipeline = await this._buildPipeline(stream);
this.pipelines.set(peerId, pipeline);
this.logger.info('VoiceLeveling: pipeline created', {
peerId,
fallback: pipeline.isFallback
});
return pipeline.destination.stream;
} catch (err) {
this.logger.error('VoiceLeveling: pipeline build failed, returning raw stream', err);
return stream;
}
}
/**
* Disable voice leveling for a single speaker.
*
* Tears down the pipeline. The caller is responsible for swapping
* the Audio element's `srcObject` back to the raw stream.
*/
disable(peerId: string): void {
const pipeline = this.pipelines.get(peerId);
if (!pipeline)
return;
this._disposePipeline(pipeline);
this.pipelines.delete(peerId);
this.logger.info('VoiceLeveling: pipeline removed', { peerId });
}
/** Tear down ALL speaker pipelines. */
disableAll(): void {
this.pipelines.forEach((p) => this._disposePipeline(p));
this.pipelines.clear();
}
setSpeakerVolume(peerId: string, volume: number): void {
const pipeline = this.pipelines.get(peerId);
if (!pipeline)
return;
pipeline.gainNode.gain.setValueAtTime(
Math.max(0, Math.min(1, volume)),
pipeline.ctx.currentTime
);
}
setMasterVolume(volume: number): void {
const clamped = Math.max(0, Math.min(1, volume));
this.pipelines.forEach((pipeline) => {
pipeline.gainNode.gain.setValueAtTime(clamped, pipeline.ctx.currentTime);
});
}
/** Tear down all pipelines and release all resources. */
destroy(): void {
this.disableAll();
if (this._sharedCtx && this._sharedCtx.state !== 'closed') {
this._sharedCtx.close().catch(() => { /* best-effort */ });
}
this._sharedCtx = null;
this._workletLoaded = false;
this._workletAvailable = null;
}
/* ── Pipeline construction ──────────────────────────────────── */
private async _buildPipeline(stream: MediaStream): Promise<SpeakerPipeline> {
const ctx = await this._getOrCreateContext();
if (ctx.state === 'suspended') {
await ctx.resume();
}
const source = ctx.createMediaStreamSource(stream);
const gainNode = ctx.createGain();
gainNode.gain.value = 1.0;
const destination = ctx.createMediaStreamDestination();
const workletOk = await this._ensureWorkletLoaded(ctx);
if (workletOk) {
const workletNode = new AudioWorkletNode(ctx, WORKLET_PROCESSOR_NAME);
source.connect(workletNode);
workletNode.connect(gainNode);
gainNode.connect(destination);
const pipeline: SpeakerPipeline = {
ctx,
source,
workletNode,
compressorNode: null,
gainNode,
destination,
originalStream: stream,
isFallback: false
};
this._pushSettingsToPipeline(pipeline);
return pipeline;
} else {
this.logger.warn('VoiceLeveling: AudioWorklet unavailable, using fallback compressor');
const compressor = this._createFallbackCompressor(ctx);
source.connect(compressor);
compressor.connect(gainNode);
gainNode.connect(destination);
return {
ctx,
source,
workletNode: null,
compressorNode: compressor,
gainNode,
destination,
originalStream: stream,
isFallback: true
};
}
}
/**
* Get or create the shared AudioContext.
*
* Uses the system default sample rate (instead of forcing 48 kHz)
* to avoid resampling issues with remote WebRTC streams whose
* sample rate is determined by the sender's codec.
*/
private async _getOrCreateContext(): Promise<AudioContext> {
if (this._sharedCtx && this._sharedCtx.state !== 'closed') {
return this._sharedCtx;
}
this._sharedCtx = new AudioContext();
this._workletLoaded = false;
return this._sharedCtx;
}
private async _ensureWorkletLoaded(ctx: AudioContext): Promise<boolean> {
if (this._workletAvailable === false)
return false;
if (this._workletLoaded && this._workletAvailable === true)
return true;
try {
await ctx.audioWorklet.addModule(WORKLET_MODULE_PATH);
this._workletLoaded = true;
this._workletAvailable = true;
this.logger.info('VoiceLeveling: worklet module loaded');
return true;
} catch (err) {
this.logger.error('VoiceLeveling: worklet module failed to load', err);
this._workletAvailable = false;
return false;
}
}
private _createFallbackCompressor(ctx: AudioContext): DynamicsCompressorNode {
const compressor = ctx.createDynamicsCompressor();
compressor.threshold.setValueAtTime(-24, ctx.currentTime);
compressor.knee.setValueAtTime(30, ctx.currentTime);
compressor.ratio.setValueAtTime(3, ctx.currentTime);
compressor.attack.setValueAtTime(0.01, ctx.currentTime);
compressor.release.setValueAtTime(0.25, ctx.currentTime);
return compressor;
}
/* ── Settings propagation ───────────────────────────────────── */
private _pushSettingsToPipeline(pipeline: SpeakerPipeline): void {
if (pipeline.workletNode) {
pipeline.workletNode.port.postMessage({
enabled: true, // Pipeline only exists when leveling is on; DSP always active
targetDbfs: this._settings.targetDbfs,
maxGainDb: this._settings.maxGainDb,
strength: this._settings.strength,
speed: this._settings.speed,
noiseGate: this._settings.noiseGate
});
}
}
/* ── Cleanup ────────────────────────────────────────────────── */
private _disposePipeline(pipeline: SpeakerPipeline): void {
try { pipeline.source.disconnect(); } catch { /* already disconnected */ }
try { pipeline.workletNode?.disconnect(); } catch { /* ok */ }
try { pipeline.compressorNode?.disconnect(); } catch { /* ok */ }
try { pipeline.gainNode.disconnect(); } catch { /* ok */ }
try { pipeline.destination.disconnect(); } catch { /* ok */ }
}
}

View File

@@ -205,11 +205,14 @@ export class AdminPanelComponent {
/** Change a member's role and broadcast the update to all peers. */
changeRole(user: User, role: 'admin' | 'moderator' | 'member'): void {
const roomId = this.currentRoom()?.id;
this.store.dispatch(UsersActions.updateUserRole({ userId: user.id,
role }));
this.webrtc.broadcastMessage({
type: 'role-change',
roomId,
targetUserId: user.id,
role
});

View File

@@ -33,7 +33,7 @@
class="w-4 h-4"
/>
<span>Users</span>
<span class="text-xs px-1.5 py-0.5 rounded-full bg-primary/15 text-primary">{{ onlineUsers().length }}</span>
<span class="text-xs px-1.5 py-0.5 rounded-full bg-primary/15 text-primary">{{ knownUserCount() }}</span>
</button>
</div>
</div>
@@ -149,7 +149,10 @@
@if (voiceUsersInRoom(ch.id).length > 0) {
<div class="ml-5 mt-1 space-y-1">
@for (u of voiceUsersInRoom(ch.id); track u.id) {
<div class="flex items-center gap-2 px-2 py-1.5 rounded hover:bg-secondary/40">
<div
class="flex items-center gap-2 px-2 py-1.5 rounded hover:bg-secondary/40"
(contextmenu)="openVoiceUserVolumeMenu($event, u)"
>
<app-user-avatar
[name]="u.displayName"
[avatarUrl]="u.avatarUrl"
@@ -187,6 +190,13 @@
class="w-4 h-4 text-muted-foreground"
/>
}
@if (isUserLocallyMuted(u)) {
<ng-icon
name="lucideVolumeX"
class="w-4 h-4 text-destructive"
title="Muted by you"
/>
}
</div>
}
</div>
@@ -300,8 +310,42 @@
</div>
}
<!-- Offline Users -->
@if (offlineRoomMembers().length > 0) {
<div class="mb-4">
<h4 class="text-xs uppercase tracking-wide text-muted-foreground font-medium mb-2 px-1">Offline - {{ offlineRoomMembers().length }}</h4>
<div class="space-y-1">
@for (member of offlineRoomMembers(); track member.oderId || member.id) {
<div class="flex items-center gap-2 px-2 py-1.5 rounded opacity-80">
<div class="relative">
<app-user-avatar
[name]="member.displayName"
[avatarUrl]="member.avatarUrl"
size="sm"
/>
<span class="absolute bottom-0 right-0 w-2.5 h-2.5 rounded-full bg-gray-500 ring-2 ring-card"></span>
</div>
<div class="flex-1 min-w-0">
<div class="flex items-center gap-1.5">
<p class="text-sm text-foreground/80 truncate">{{ member.displayName }}</p>
@if (member.role === 'host') {
<span class="text-[10px] bg-yellow-500/20 text-yellow-400 px-1 py-0.5 rounded font-medium">Owner</span>
} @else if (member.role === 'admin') {
<span class="text-[10px] bg-blue-500/20 text-blue-400 px-1 py-0.5 rounded font-medium">Admin</span>
} @else if (member.role === 'moderator') {
<span class="text-[10px] bg-green-500/20 text-green-400 px-1 py-0.5 rounded font-medium">Mod</span>
}
</div>
<p class="text-[10px] text-muted-foreground">Offline</p>
</div>
</div>
}
</div>
</div>
}
<!-- No other users message -->
@if (onlineUsersFiltered().length === 0) {
@if (onlineUsersFiltered().length === 0 && offlineRoomMembers().length === 0) {
<div class="text-center py-4 text-muted-foreground">
<p class="text-sm">No other users in this server</p>
</div>
@@ -406,6 +450,17 @@
</app-context-menu>
}
<!-- Per-user volume context menu -->
@if (showVolumeMenu()) {
<app-user-volume-menu
[x]="volumeMenuX()"
[y]="volumeMenuY()"
[peerId]="volumeMenuPeerId()"
[displayName]="volumeMenuDisplayName()"
(closed)="showVolumeMenu.set(false)"
/>
}
<!-- Create channel dialog -->
@if (showCreateChannelDialog()) {
<app-confirm-dialog

View File

@@ -2,6 +2,7 @@
import {
Component,
inject,
computed,
signal
} from '@angular/core';
import { CommonModule } from '@angular/common';
@@ -16,7 +17,8 @@ import {
lucideMonitor,
lucideHash,
lucideUsers,
lucidePlus
lucidePlus,
lucideVolumeX
} from '@ng-icons/lucide';
import {
selectOnlineUsers,
@@ -35,15 +37,18 @@ import { MessagesActions } from '../../../store/messages/messages.actions';
import { WebRTCService } from '../../../core/services/webrtc.service';
import { VoiceSessionService } from '../../../core/services/voice-session.service';
import { VoiceActivityService } from '../../../core/services/voice-activity.service';
import { VoicePlaybackService } from '../../voice/voice-controls/services/voice-playback.service';
import { VoiceControlsComponent } from '../../voice/voice-controls/voice-controls.component';
import {
ContextMenuComponent,
UserAvatarComponent,
ConfirmDialogComponent
ConfirmDialogComponent,
UserVolumeMenuComponent
} from '../../../shared';
import {
Channel,
ChatEvent,
RoomMember,
Room,
User
} from '../../../core/models';
@@ -54,7 +59,16 @@ type TabView = 'channels' | 'users';
@Component({
selector: 'app-rooms-side-panel',
standalone: true,
imports: [CommonModule, FormsModule, NgIcon, VoiceControlsComponent, ContextMenuComponent, UserAvatarComponent, ConfirmDialogComponent],
imports: [
CommonModule,
FormsModule,
NgIcon,
VoiceControlsComponent,
ContextMenuComponent,
UserVolumeMenuComponent,
UserAvatarComponent,
ConfirmDialogComponent
],
viewProviders: [
provideIcons({
lucideMessageSquare,
@@ -64,7 +78,8 @@ type TabView = 'channels' | 'users';
lucideMonitor,
lucideHash,
lucideUsers,
lucidePlus
lucidePlus,
lucideVolumeX
})
],
templateUrl: './rooms-side-panel.component.html'
@@ -76,6 +91,7 @@ export class RoomsSidePanelComponent {
private store = inject(Store);
private webrtc = inject(WebRTCService);
private voiceSessionService = inject(VoiceSessionService);
private voicePlayback = inject(VoicePlaybackService);
voiceActivity = inject(VoiceActivityService);
activeTab = signal<TabView>('channels');
@@ -87,6 +103,31 @@ export class RoomsSidePanelComponent {
activeChannelId = this.store.selectSignal(selectActiveChannelId);
textChannels = this.store.selectSignal(selectTextChannels);
voiceChannels = this.store.selectSignal(selectVoiceChannels);
roomMembers = computed(() => this.currentRoom()?.members ?? []);
offlineRoomMembers = computed(() => {
const current = this.currentUser();
const onlineIds = new Set(this.onlineUsers().map((user) => user.oderId || user.id));
if (current) {
onlineIds.add(current.oderId || current.id);
}
return this.roomMembers().filter((member) => !onlineIds.has(this.roomMemberKey(member)));
});
knownUserCount = computed(() => {
const memberIds = new Set(
this.roomMembers()
.map((member) => this.roomMemberKey(member))
.filter(Boolean)
);
const current = this.currentUser();
if (current) {
memberIds.add(current.oderId || current.id);
}
return memberIds.size;
});
// Channel context menu state
showChannelMenu = signal(false);
@@ -108,6 +149,13 @@ export class RoomsSidePanelComponent {
userMenuY = signal(0);
contextMenuUser = signal<User | null>(null);
// Per-user volume context menu state
showVolumeMenu = signal(false);
volumeMenuX = signal(0);
volumeMenuY = signal(0);
volumeMenuPeerId = signal('');
volumeMenuDisplayName = signal('');
/** Return online users excluding the current user. */
// Filter out current user from online users list
onlineUsersFiltered() {
@@ -118,6 +166,10 @@ export class RoomsSidePanelComponent {
return this.onlineUsers().filter((user) => user.id !== currentId && user.oderId !== currentOderId);
}
private roomMemberKey(member: RoomMember): string {
return member.oderId || member.id;
}
/** Check whether the current user has permission to manage channels. */
canManageChannels(): boolean {
const room = this.currentRoom();
@@ -287,9 +339,27 @@ export class RoomsSidePanelComponent {
this.showUserMenu.set(false);
}
/** Open the per-user volume context menu for a voice channel participant. */
openVoiceUserVolumeMenu(evt: MouseEvent, user: User) {
evt.preventDefault();
// Don't show volume menu for the local user
const me = this.currentUser();
if (user.id === me?.id || user.oderId === me?.oderId)
return;
this.volumeMenuPeerId.set(user.oderId || user.id);
this.volumeMenuDisplayName.set(user.displayName);
this.volumeMenuX.set(evt.clientX);
this.volumeMenuY.set(evt.clientY);
this.showVolumeMenu.set(true);
}
/** Change a user's role and broadcast the update to connected peers. */
changeUserRole(role: 'admin' | 'moderator' | 'member') {
const user = this.contextMenuUser();
const roomId = this.currentRoom()?.id;
this.closeUserMenu();
@@ -298,6 +368,7 @@ export class RoomsSidePanelComponent {
// Broadcast role change to peers
this.webrtc.broadcastMessage({
type: 'role-change',
roomId,
targetUserId: user.id,
role
});
@@ -377,11 +448,29 @@ export class RoomsSidePanelComponent {
private onVoiceJoinSucceeded(roomId: string, room: Room, current: User | null): void {
this.updateVoiceStateStore(roomId, room, current);
this.trackCurrentUserMic();
this.startVoiceHeartbeat(roomId, room);
this.broadcastVoiceConnected(roomId, room, current);
this.startVoiceSession(roomId, room);
}
private trackCurrentUserMic(): void {
const userId = this.currentUser()?.oderId || this.currentUser()?.id;
const micStream = this.webrtc.getRawMicStream();
if (userId && micStream) {
this.voiceActivity.trackLocalMic(userId, micStream);
}
}
private untrackCurrentUserMic(): void {
const userId = this.currentUser()?.oderId || this.currentUser()?.id;
if (userId) {
this.voiceActivity.untrackLocalMic(userId);
}
}
private updateVoiceStateStore(roomId: string, room: Room, current: User | null): void {
if (!current?.id)
return;
@@ -445,6 +534,8 @@ export class RoomsSidePanelComponent {
// Stop voice heartbeat
this.webrtc.stopVoiceHeartbeat();
this.untrackCurrentUserMic();
// Disable voice locally
this.webrtc.disableVoice();
@@ -484,11 +575,7 @@ export class RoomsSidePanelComponent {
/** Count the number of users connected to a voice channel in the current room. */
voiceOccupancy(roomId: string): number {
const users = this.onlineUsers();
const room = this.currentRoom();
return users.filter((user) => !!user.voiceState?.isConnected && user.voiceState?.roomId === roomId && user.voiceState?.serverId === room?.id)
.length;
return this.voiceUsersInRoom(roomId).length;
}
/** Dispatch a viewer:focus event to display a remote user's screen share. */
@@ -505,6 +592,13 @@ export class RoomsSidePanelComponent {
window.dispatchEvent(evt);
}
/** Check whether the local user has muted a specific voice user. */
isUserLocallyMuted(user: User): boolean {
const peerId = user.oderId || user.id;
return this.voicePlayback.isUserMuted(peerId);
}
/** Check whether a user is currently sharing their screen. */
isUserSharing(userId: string): boolean {
const me = this.currentUser();
@@ -524,13 +618,33 @@ export class RoomsSidePanelComponent {
return !!stream && stream.getVideoTracks().length > 0;
}
/** Return all users currently connected to a specific voice channel. */
/** Return all users currently connected to a specific voice channel, including the local user. */
voiceUsersInRoom(roomId: string) {
const room = this.currentRoom();
return this.onlineUsers().filter(
const me = this.currentUser();
const remoteUsers = this.onlineUsers().filter(
(user) => !!user.voiceState?.isConnected && user.voiceState?.roomId === roomId && user.voiceState?.serverId === room?.id
);
// Include the local user at the top if they are in this voice channel
if (
me?.voiceState?.isConnected &&
me.voiceState?.roomId === roomId &&
me.voiceState?.serverId === room?.id
) {
// Avoid duplicates if the current user is already in onlineUsers
const meId = me.id;
const meOderId = me.oderId;
const alreadyIncluded = remoteUsers.some(
(user) => user.id === meId || user.oderId === meOderId
);
if (!alreadyIncluded) {
return [me, ...remoteUsers];
}
}
return remoteUsers;
}
/** Check whether the current user is connected to the specified voice channel. */

View File

@@ -52,11 +52,14 @@ export class MembersSettingsComponent {
}
changeRole(user: User, role: 'admin' | 'moderator' | 'member'): void {
const roomId = this.server()?.id;
this.store.dispatch(UsersActions.updateUserRole({ userId: user.id,
role }));
this.webrtcService.broadcastMessage({
type: 'role-change',
roomId,
targetUserId: user.id,
role
});

View File

@@ -229,178 +229,4 @@
</div>
</div>
</section>
<!-- Voice Leveling -->
<section>
<div class="flex items-center gap-2 mb-3">
<ng-icon
name="lucideActivity"
class="w-5 h-5 text-muted-foreground"
/>
<h4 class="text-sm font-semibold text-foreground">Voice Leveling</h4>
</div>
<div class="space-y-3">
<!-- Master toggle -->
<div class="flex items-center justify-between">
<div>
<p class="text-sm font-medium text-foreground">Voice Leveling</p>
<p class="text-xs text-muted-foreground">Automatically equalise volume across speakers</p>
</div>
<label class="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
[checked]="voiceLeveling.enabled()"
(change)="onVoiceLevelingToggle()"
id="voice-leveling-toggle"
aria-label="Toggle voice leveling"
class="sr-only peer"
/>
<div
class="w-10 h-5 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-4 after:w-4 after:transition-all"
></div>
</label>
</div>
<!-- Advanced controls - visible only when enabled -->
@if (voiceLeveling.enabled()) {
<div class="space-y-3 pl-1 border-l-2 border-primary/20 ml-1">
<!-- Target Loudness -->
<div class="pl-3">
<label
for="target-loudness-slider"
class="block text-xs font-medium text-muted-foreground mb-1"
>
Target Loudness: {{ voiceLeveling.targetDbfs() }} dBFS
</label>
<input
type="range"
[value]="voiceLeveling.targetDbfs()"
(input)="onTargetDbfsChange($event)"
min="-30"
max="-12"
step="1"
id="target-loudness-slider"
class="w-full h-1.5 bg-secondary rounded-lg appearance-none cursor-pointer accent-primary"
/>
<div class="flex justify-between text-[10px] text-muted-foreground/60 mt-0.5">
<span>-30 (quiet)</span>
<span>-12 (loud)</span>
</div>
</div>
<!-- AGC Strength -->
<div class="pl-3">
<label
for="agc-strength-select"
class="block text-xs font-medium text-muted-foreground mb-1"
>AGC Strength</label
>
<select
(change)="onStrengthChange($event)"
id="agc-strength-select"
class="w-full px-3 py-2 bg-secondary rounded-lg border border-border text-foreground text-sm focus:outline-none focus:ring-2 focus:ring-primary"
>
<option
value="low"
[selected]="voiceLeveling.strength() === 'low'"
>
Low (gentle)
</option>
<option
value="medium"
[selected]="voiceLeveling.strength() === 'medium'"
>
Medium
</option>
<option
value="high"
[selected]="voiceLeveling.strength() === 'high'"
>
High (aggressive)
</option>
</select>
</div>
<!-- Max Gain Boost -->
<div class="pl-3">
<label
for="max-gain-slider"
class="block text-xs font-medium text-muted-foreground mb-1"
>
Max Gain Boost: {{ voiceLeveling.maxGainDb() }} dB
</label>
<input
type="range"
[value]="voiceLeveling.maxGainDb()"
(input)="onMaxGainDbChange($event)"
min="3"
max="20"
step="1"
id="max-gain-slider"
class="w-full h-1.5 bg-secondary rounded-lg appearance-none cursor-pointer accent-primary"
/>
<div class="flex justify-between text-[10px] text-muted-foreground/60 mt-0.5">
<span>3 dB (subtle)</span>
<span>20 dB (strong)</span>
</div>
</div>
<!-- Response Speed -->
<div class="pl-3">
<label
for="response-speed-select"
class="block text-xs font-medium text-muted-foreground mb-1"
>
Response Speed
</label>
<select
(change)="onSpeedChange($event)"
id="response-speed-select"
class="w-full px-3 py-2 bg-secondary rounded-lg border border-border text-foreground text-sm focus:outline-none focus:ring-2 focus:ring-primary"
>
<option
value="slow"
[selected]="voiceLeveling.speed() === 'slow'"
>
Slow (natural)
</option>
<option
value="medium"
[selected]="voiceLeveling.speed() === 'medium'"
>
Medium
</option>
<option
value="fast"
[selected]="voiceLeveling.speed() === 'fast'"
>
Fast (aggressive)
</option>
</select>
</div>
<!-- Noise Floor Gate -->
<div class="pl-3 flex items-center justify-between">
<div>
<p class="text-sm font-medium text-foreground">Noise Floor Gate</p>
<p class="text-xs text-muted-foreground">Prevents boosting silence</p>
</div>
<label class="relative inline-flex items-center cursor-pointer">
<input
type="checkbox"
[checked]="voiceLeveling.noiseGate()"
(change)="onNoiseGateToggle()"
id="noise-gate-toggle"
aria-label="Toggle noise floor gate"
class="sr-only peer"
/>
<div
class="w-10 h-5 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-4 after:w-4 after:transition-all"
></div>
</label>
</div>
</div>
}
</div>
</section>
</div>

View File

@@ -10,12 +10,11 @@ import { NgIcon, provideIcons } from '@ng-icons/core';
import {
lucideMic,
lucideHeadphones,
lucideAudioLines,
lucideActivity
lucideAudioLines
} from '@ng-icons/lucide';
import { WebRTCService } from '../../../../core/services/webrtc.service';
import { VoiceLevelingService } from '../../../../core/services/voice-leveling.service';
import { VoicePlaybackService } from '../../../voice/voice-controls/services/voice-playback.service';
import { NotificationAudioService, AppSound } from '../../../../core/services/notification-audio.service';
import { STORAGE_KEY_VOICE_SETTINGS } from '../../../../core/constants';
@@ -36,15 +35,14 @@ interface AudioDevice {
provideIcons({
lucideMic,
lucideHeadphones,
lucideAudioLines,
lucideActivity
lucideAudioLines
})
],
templateUrl: './voice-settings.component.html'
})
export class VoiceSettingsComponent {
private webrtcService = inject(WebRTCService);
readonly voiceLeveling = inject(VoiceLevelingService);
private voicePlayback = inject(VoicePlaybackService);
readonly audioService = inject(NotificationAudioService);
inputDevices = signal<AudioDevice[]>([]);
@@ -56,7 +54,7 @@ export class VoiceSettingsComponent {
audioBitrate = signal(96);
latencyProfile = signal<'low' | 'balanced' | 'high'>('balanced');
includeSystemAudio = signal(false);
noiseReduction = signal(false);
noiseReduction = signal(true);
constructor() {
this.loadVoiceSettings();
@@ -123,6 +121,11 @@ export class VoiceSettingsComponent {
if (this.noiseReduction() !== this.webrtcService.isNoiseReductionEnabled()) {
this.webrtcService.toggleNoiseReduction(this.noiseReduction());
}
// Apply persisted volume levels to the live audio pipelines
this.webrtcService.setInputVolume(this.inputVolume() / 100);
this.webrtcService.setOutputVolume(this.outputVolume() / 100);
this.voicePlayback.updateOutputVolume(this.outputVolume() / 100);
}
saveVoiceSettings(): void {
@@ -162,6 +165,7 @@ export class VoiceSettingsComponent {
const input = event.target as HTMLInputElement;
this.inputVolume.set(parseInt(input.value, 10));
this.webrtcService.setInputVolume(this.inputVolume() / 100);
this.saveVoiceSettings();
}
@@ -170,6 +174,7 @@ export class VoiceSettingsComponent {
this.outputVolume.set(parseInt(input.value, 10));
this.webrtcService.setOutputVolume(this.outputVolume() / 100);
this.voicePlayback.updateOutputVolume(this.outputVolume() / 100);
this.saveVoiceSettings();
}
@@ -203,40 +208,6 @@ export class VoiceSettingsComponent {
this.saveVoiceSettings();
}
/* ── Voice Leveling handlers ───────────────────────────────── */
onVoiceLevelingToggle(): void {
this.voiceLeveling.setEnabled(!this.voiceLeveling.enabled());
}
onTargetDbfsChange(event: Event): void {
const input = event.target as HTMLInputElement;
this.voiceLeveling.setTargetDbfs(parseInt(input.value, 10));
}
onStrengthChange(event: Event): void {
const select = event.target as HTMLSelectElement;
this.voiceLeveling.setStrength(select.value as 'low' | 'medium' | 'high');
}
onMaxGainDbChange(event: Event): void {
const input = event.target as HTMLInputElement;
this.voiceLeveling.setMaxGainDb(parseInt(input.value, 10));
}
onSpeedChange(event: Event): void {
const select = event.target as HTMLSelectElement;
this.voiceLeveling.setSpeed(select.value as 'slow' | 'medium' | 'fast');
}
onNoiseGateToggle(): void {
this.voiceLeveling.setNoiseGate(!this.voiceLeveling.noiseGate());
}
onNotificationVolumeChange(event: Event): void {
const input = event.target as HTMLInputElement;

View File

@@ -68,7 +68,7 @@ export class SettingsComponent implements OnInit {
newServerUrl = '';
autoReconnect = true;
searchAllServers = true;
noiseReduction = false;
noiseReduction = true;
/** Load persisted connection settings on component init. */
ngOnInit(): void {

View File

@@ -1,6 +1,6 @@
import { Injectable, inject } from '@angular/core';
import { WebRTCService } from '../../../../core/services/webrtc.service';
import { VoiceLevelingService } from '../../../../core/services/voice-leveling.service';
import { STORAGE_KEY_USER_VOLUMES } from '../../../../core/constants';
export interface PlaybackOptions {
isConnected: boolean;
@@ -8,14 +8,58 @@ export interface PlaybackOptions {
isDeafened: boolean;
}
/**
* Per-peer Web Audio pipeline that routes the remote MediaStream
* through a GainNode so volume can be amplified beyond 100% (up to 200%).
*
* Chrome/Electron workaround: a muted HTMLAudioElement is attached to
* the stream first so that `createMediaStreamSource` actually outputs
* audio. The element itself is silent - all audible output comes from
* the GainNode -> AudioContext.destination path.
*/
interface PeerAudioPipeline {
/** Muted <audio> element that "primes" the stream for Web Audio. */
audioElement: HTMLAudioElement;
/** AudioContext for this peer's pipeline. */
context: AudioContext;
/** Source node created from the remote stream. */
sourceNode: MediaStreamAudioSourceNode;
/** GainNode used to control per-user volume (0.0-2.0). */
gainNode: GainNode;
}
@Injectable({ providedIn: 'root' })
export class VoicePlaybackService {
private voiceLeveling = inject(VoiceLevelingService);
private webrtc = inject(WebRTCService);
private remoteAudioElements = new Map<string, HTMLAudioElement>();
/** Active Web Audio pipelines keyed by peer ID. */
private peerPipelines = new Map<string, PeerAudioPipeline>();
private pendingRemoteStreams = new Map<string, MediaStream>();
private rawRemoteStreams = new Map<string, MediaStream>();
/**
* Per-user volume overrides (0-200 integer, maps to 0.0-2.0 gain).
* Keyed by oderId so the setting persists across reconnections.
*/
private userVolumes = new Map<string, number>();
/** Per-user mute state. Keyed by oderId. */
private userMuted = new Map<string, boolean>();
/** Global master output volume (0.0-1.0 from the settings slider). */
private masterVolume = 1;
/** Whether the local user is deafened. */
private deafened = false;
constructor() {
this.loadPersistedVolumes();
}
// ---------------------------------------------------------------------------
// Public API - stream lifecycle
// ---------------------------------------------------------------------------
handleRemoteStream(peerId: string, stream: MediaStream, options: PlaybackOptions): void {
if (!options.isConnected) {
this.pendingRemoteStreams.set(peerId, stream);
@@ -26,39 +70,17 @@ export class VoicePlaybackService {
return;
}
this.removeAudioElement(peerId);
// Always stash the raw stream so we can re-wire on toggle
this.removePipeline(peerId);
this.rawRemoteStreams.set(peerId, stream);
// Start playback immediately with the raw stream
const audio = new Audio();
audio.srcObject = stream;
audio.autoplay = true;
audio.volume = options.outputVolume;
audio.muted = options.isDeafened;
audio.play().catch(() => {});
this.remoteAudioElements.set(peerId, audio);
// Swap to leveled stream if enabled
if (this.voiceLeveling.enabled()) {
this.voiceLeveling.enable(peerId, stream).then((leveledStream) => {
const currentAudio = this.remoteAudioElements.get(peerId);
if (currentAudio && leveledStream !== stream) {
currentAudio.srcObject = leveledStream;
}
})
.catch(() => {});
}
this.masterVolume = options.outputVolume;
this.deafened = options.isDeafened;
this.createPipeline(peerId, stream);
}
removeRemoteAudio(peerId: string): void {
this.pendingRemoteStreams.delete(peerId);
this.rawRemoteStreams.delete(peerId);
this.voiceLeveling.disable(peerId);
this.removeAudioElement(peerId);
this.removePipeline(peerId);
}
playPendingStreams(options: PlaybackOptions): void {
@@ -88,81 +110,232 @@ export class VoicePlaybackService {
}
}
async rebuildAllRemoteAudio(enabled: boolean, options: PlaybackOptions): Promise<void> {
if (enabled) {
for (const [peerId, rawStream] of this.rawRemoteStreams) {
try {
const leveledStream = await this.voiceLeveling.enable(peerId, rawStream);
const audio = this.remoteAudioElements.get(peerId);
if (audio && leveledStream !== rawStream) {
audio.srcObject = leveledStream;
}
} catch {}
}
} else {
this.voiceLeveling.disableAll();
for (const [peerId, rawStream] of this.rawRemoteStreams) {
const audio = this.remoteAudioElements.get(peerId);
if (audio) {
audio.srcObject = rawStream;
}
}
}
this.updateOutputVolume(options.outputVolume);
this.updateDeafened(options.isDeafened);
}
// ---------------------------------------------------------------------------
// Global volume / deafen (master slider from settings)
// ---------------------------------------------------------------------------
updateOutputVolume(volume: number): void {
this.remoteAudioElements.forEach((audio) => {
audio.volume = volume;
});
this.masterVolume = volume;
this.recalcAllGains();
}
updateDeafened(isDeafened: boolean): void {
this.remoteAudioElements.forEach((audio) => {
audio.muted = isDeafened;
});
this.deafened = isDeafened;
this.recalcAllGains();
}
// ---------------------------------------------------------------------------
// Per-user volume (0-200%) and mute
// ---------------------------------------------------------------------------
/** Get the per-user volume for a peer (0-200). Defaults to 100. */
getUserVolume(peerId: string): number {
return this.userVolumes.get(peerId) ?? 100;
}
/** Set per-user volume (0-200) and update the gain node in real time. */
setUserVolume(peerId: string, volume: number): void {
const clamped = Math.max(0, Math.min(200, volume));
this.userVolumes.set(peerId, clamped);
this.applyGain(peerId);
this.persistVolumes();
}
/** Whether a specific user is muted by the local user. */
isUserMuted(peerId: string): boolean {
return this.userMuted.get(peerId) ?? false;
}
/** Toggle per-user mute. */
setUserMuted(peerId: string, muted: boolean): void {
this.userMuted.set(peerId, muted);
this.applyGain(peerId);
this.persistVolumes();
}
// ---------------------------------------------------------------------------
// Output device routing
// ---------------------------------------------------------------------------
applyOutputDevice(deviceId: string): void {
if (!deviceId)
return;
this.remoteAudioElements.forEach((audio) => {
const anyAudio = audio as any;
this.peerPipelines.forEach((pipeline) => {
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const anyAudio = pipeline.audioElement as any;
if (typeof anyAudio.setSinkId === 'function') {
anyAudio.setSinkId(deviceId).catch(() => {});
}
// Also try setting sink on the AudioContext destination (Chromium ≥ 110)
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const anyCtx = pipeline.context as any;
if (typeof anyCtx.setSinkId === 'function') {
anyCtx.setSinkId(deviceId).catch(() => {});
}
});
}
teardownAll(): void {
this.remoteAudioElements.forEach((audio) => {
audio.srcObject = null;
audio.remove();
});
// ---------------------------------------------------------------------------
// Teardown
// ---------------------------------------------------------------------------
this.remoteAudioElements.clear();
teardownAll(): void {
this.peerPipelines.forEach((_pipeline, peerId) => this.removePipeline(peerId));
this.peerPipelines.clear();
this.rawRemoteStreams.clear();
this.pendingRemoteStreams.clear();
}
// ---------------------------------------------------------------------------
// Private - Web Audio pipeline
// ---------------------------------------------------------------------------
/**
* Build the Web Audio graph for a remote peer:
*
* remoteStream
* ↓
* muted <audio> element (Chrome workaround - primes the stream)
* ↓
* MediaStreamSource → GainNode → AudioContext.destination
*/
private createPipeline(peerId: string, stream: MediaStream): void {
// 1) Chrome/Electron workaround: attach stream to a muted <audio>
const audioEl = new Audio();
audioEl.srcObject = stream;
audioEl.muted = true; // silent - we route audio through Web Audio API
audioEl.play().catch(() => {});
// 2) Set up Web Audio graph
const ctx = new AudioContext();
const sourceNode = ctx.createMediaStreamSource(stream);
const gainNode = ctx.createGain();
sourceNode.connect(gainNode);
gainNode.connect(ctx.destination);
// 3) Store pipeline
const pipeline: PeerAudioPipeline = { audioElement: audioEl, context: ctx, sourceNode, gainNode };
this.peerPipelines.set(peerId, pipeline);
// 4) Apply current gain
this.applyGain(peerId);
}
/** Disconnect and clean up all nodes for a single peer. */
private removePipeline(peerId: string): void {
const pipeline = this.peerPipelines.get(peerId);
if (!pipeline)
return;
try {
pipeline.gainNode.disconnect();
pipeline.sourceNode.disconnect();
} catch {
// nodes may already be disconnected
}
pipeline.audioElement.srcObject = null;
pipeline.audioElement.remove();
if (pipeline.context.state !== 'closed') {
pipeline.context.close().catch(() => {});
}
this.peerPipelines.delete(peerId);
}
/**
* Compute and apply the effective gain for a peer.
*
* effectiveGain = masterVolume × (userVolume / 100)
*
* If the user is deafened or the peer is individually muted the gain
* is set to 0.
*/
private applyGain(peerId: string): void {
const pipeline = this.peerPipelines.get(peerId);
if (!pipeline)
return;
if (this.deafened || this.isUserMuted(peerId)) {
pipeline.gainNode.gain.value = 0;
return;
}
const userVol = this.getUserVolume(peerId) / 100; // 0.0-2.0
const effective = this.masterVolume * userVol;
pipeline.gainNode.gain.value = effective;
}
/** Recalculate gain for every active pipeline. */
private recalcAllGains(): void {
this.peerPipelines.forEach((_pipeline, peerId) => this.applyGain(peerId));
}
// ---------------------------------------------------------------------------
// Persistence helpers
// ---------------------------------------------------------------------------
private persistVolumes(): void {
try {
const data: Record<string, { volume: number; muted: boolean }> = {};
this.userVolumes.forEach((vol, id) => {
data[id] = { volume: vol, muted: this.userMuted.get(id) ?? false };
});
// Also persist any muted-only entries
this.userMuted.forEach((muted, id) => {
if (!data[id]) {
data[id] = { volume: 100, muted };
}
});
localStorage.setItem(STORAGE_KEY_USER_VOLUMES, JSON.stringify(data));
} catch {
// localStorage not available
}
}
private loadPersistedVolumes(): void {
try {
const raw = localStorage.getItem(STORAGE_KEY_USER_VOLUMES);
if (!raw)
return;
const data = JSON.parse(raw) as Record<string, { volume: number; muted: boolean }>;
Object.entries(data).forEach(([id, entry]) => {
if (typeof entry.volume === 'number') {
this.userVolumes.set(id, entry.volume);
}
if (entry.muted) {
this.userMuted.set(id, true);
}
});
} catch {
// corrupted data - ignore
}
}
// ---------------------------------------------------------------------------
// Utility
// ---------------------------------------------------------------------------
private hasAudio(stream: MediaStream): boolean {
return stream.getAudioTracks().length > 0;
}
private removeAudioElement(peerId: string): void {
const audio = this.remoteAudioElements.get(peerId);
if (audio) {
audio.srcObject = null;
audio.remove();
this.remoteAudioElements.delete(peerId);
}
}
}

View File

@@ -26,7 +26,6 @@ import {
import { WebRTCService } from '../../../core/services/webrtc.service';
import { VoiceSessionService } from '../../../core/services/voice-session.service';
import { VoiceActivityService } from '../../../core/services/voice-activity.service';
import { VoiceLevelingService } from '../../../core/services/voice-leveling.service';
import { UsersActions } from '../../../store/users/users.actions';
import { selectCurrentUser } from '../../../store/users/users.selectors';
import { selectCurrentRoom } from '../../../store/rooms/rooms.selectors';
@@ -67,13 +66,10 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
private webrtcService = inject(WebRTCService);
private voiceSessionService = inject(VoiceSessionService);
private voiceActivity = inject(VoiceActivityService);
private voiceLeveling = inject(VoiceLevelingService);
private voicePlayback = inject(VoicePlaybackService);
private store = inject(Store);
private settingsModal = inject(SettingsModalService);
private remoteStreamSubscription: Subscription | null = null;
/** Unsubscribe function for live voice-leveling toggle notifications. */
private voiceLevelingUnsubscribe: (() => void) | null = null;
currentUser = this.store.selectSignal(selectCurrentUser);
currentRoom = this.store.selectSignal(selectCurrentRoom);
@@ -95,7 +91,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
audioBitrate = signal(96);
latencyProfile = signal<'low' | 'balanced' | 'high'>('balanced');
includeSystemAudio = signal(false);
noiseReduction = signal(false);
noiseReduction = signal(true);
private playbackOptions(): PlaybackOptions {
return {
@@ -121,12 +117,6 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
}
);
// Listen for live voice-leveling toggle changes so we can
// rebuild all remote Audio elements immediately (no reconnect).
this.voiceLevelingUnsubscribe = this.voiceLeveling.onEnabledChange(
(enabled) => this.voicePlayback.rebuildAllRemoteAudio(enabled, this.playbackOptions())
);
// Subscribe to voice connected event to play pending streams and ensure all remote audio is set up
this.voiceConnectedSubscription = this.webrtcService.onVoiceConnected.subscribe(() => {
const options = this.playbackOptions();
@@ -150,11 +140,9 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
}
this.voicePlayback.teardownAll();
this.voiceLeveling.disableAll();
this.remoteStreamSubscription?.unsubscribe();
this.voiceConnectedSubscription?.unsubscribe();
this.voiceLevelingUnsubscribe?.();
}
async loadAudioDevices(): Promise<void> {
@@ -198,7 +186,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
audio: {
deviceId: this.selectedInputDevice() || undefined,
echoCancellation: true,
noiseSuppression: true
noiseSuppression: !this.noiseReduction()
}
});
@@ -219,6 +207,25 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
this.webrtcService.startVoiceHeartbeat(roomId, serverId);
// Update local user's voice state in the store so the side panel
// shows us in the voice channel with a speaking indicator.
const user = this.currentUser();
if (user?.id) {
this.store.dispatch(
UsersActions.updateVoiceState({
userId: user.id,
voiceState: {
isConnected: true,
isMuted: this.isMuted(),
isDeafened: this.isDeafened(),
roomId,
serverId
}
})
);
}
// Broadcast voice state to other users
this.webrtcService.broadcastMessage({
type: 'voice-state',
@@ -279,9 +286,6 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
// Disable voice (stops audio tracks but keeps peer connections open for chat)
this.webrtcService.disableVoice();
// Tear down all voice leveling pipelines
this.voiceLeveling.disableAll();
this.voicePlayback.teardownAll();
const user = this.currentUser();
@@ -313,6 +317,22 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
this.isMuted.update((current) => !current);
this.webrtcService.toggleMute(this.isMuted());
// Update local store so the side panel reflects the mute state
const user = this.currentUser();
if (user?.id) {
this.store.dispatch(
UsersActions.updateVoiceState({
userId: user.id,
voiceState: {
isConnected: this.isConnected(),
isMuted: this.isMuted(),
isDeafened: this.isDeafened()
}
})
);
}
// Broadcast mute state change
this.webrtcService.broadcastMessage({
type: 'voice-state',
@@ -349,6 +369,22 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
isDeafened: this.isDeafened()
}
});
// Update local store so the side panel reflects the deafen/mute state
const user = this.currentUser();
if (user?.id) {
this.store.dispatch(
UsersActions.updateVoiceState({
userId: user.id,
voiceState: {
isConnected: this.isConnected(),
isMuted: this.isMuted(),
isDeafened: this.isDeafened()
}
})
);
}
}
async toggleScreenShare(): Promise<void> {
@@ -397,6 +433,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
const input = event.target as HTMLInputElement;
this.inputVolume.set(parseInt(input.value, 10));
this.webrtcService.setInputVolume(this.inputVolume() / 100);
this.saveSettings();
}
@@ -506,6 +543,8 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
private applySettingsToWebRTC(): void {
try {
this.webrtcService.setOutputVolume(this.outputVolume() / 100);
this.voicePlayback.updateOutputVolume(this.outputVolume() / 100);
this.webrtcService.setInputVolume(this.inputVolume() / 100);
this.webrtcService.setAudioBitrate(this.audioBitrate());
this.webrtcService.setLatencyProfile(this.latencyProfile());
this.applyOutputDevice();

View File

@@ -0,0 +1,22 @@
<!-- Invisible backdrop that captures clicks outside -->
<div
class="fixed inset-0 z-40"
(click)="closed.emit(undefined)"
(contextmenu)="$event.preventDefault(); closed.emit(undefined)"
(keydown.enter)="closed.emit(undefined)"
(keydown.space)="closed.emit(undefined)"
role="button"
tabindex="0"
aria-label="Close menu"
></div>
<!-- Positioned menu panel -->
<div
#panel
class="fixed z-50 bg-card border border-border rounded-lg shadow-lg py-1"
[class]="widthPx() ? '' : width()"
[style.left.px]="clampedX()"
[style.top.px]="clampedY()"
[style.width.px]="widthPx() || null"
>
<ng-content />
</div>

View File

@@ -0,0 +1,28 @@
:host {
display: contents;
}
/* Convenience classes consumers can use on projected buttons */
:host ::ng-deep .context-menu-item {
@apply w-full text-left px-3 py-1.5 text-sm hover:bg-secondary transition-colors text-foreground;
}
:host ::ng-deep .context-menu-item-danger {
@apply w-full text-left px-3 py-1.5 text-sm hover:bg-secondary transition-colors text-destructive;
}
:host ::ng-deep .context-menu-item-icon {
@apply w-full text-left px-3 py-2 text-sm hover:bg-secondary transition-colors text-foreground flex items-center gap-2;
}
:host ::ng-deep .context-menu-item-icon-danger {
@apply w-full text-left px-3 py-2 text-sm hover:bg-destructive/10 transition-colors text-destructive flex items-center gap-2;
}
:host ::ng-deep .context-menu-divider {
@apply border-t border-border my-1;
}
:host ::ng-deep .context-menu-empty {
@apply px-3 py-1.5 text-sm text-muted-foreground;
}

View File

@@ -2,11 +2,16 @@ import {
Component,
input,
output,
HostListener
signal,
HostListener,
ViewChild,
ElementRef,
AfterViewInit,
OnInit
} from '@angular/core';
/**
* Generic positioned context-menu overlay.
* Generic positioned context-menu overlay with automatic viewport clamping.
*
* Usage:
* ```html
@@ -17,6 +22,13 @@ import {
* }
* ```
*
* For pixel-based widths (e.g. sliders), use `[widthPx]` instead of `[width]`:
* ```html
* <app-context-menu [x]="menuX()" [y]="menuY()" [widthPx]="240" (closed)="closeMenu()">
* ...custom content...
* </app-context-menu>
* ```
*
* Built-in item classes are available via the host styles:
* - `.context-menu-item` - normal item
* - `.context-menu-item-danger` - destructive (red) item
@@ -25,68 +37,73 @@ import {
@Component({
selector: 'app-context-menu',
standalone: true,
template: `
<!-- Invisible backdrop that captures clicks outside -->
<div
class="fixed inset-0 z-40"
(click)="closed.emit(undefined)"
(keydown.enter)="closed.emit(undefined)"
(keydown.space)="closed.emit(undefined)"
role="button"
tabindex="0"
aria-label="Close menu"
></div>
<!-- Positioned menu panel -->
<div
class="fixed z-50 bg-card border border-border rounded-lg shadow-lg py-1"
[class]="width()"
[style.left.px]="x()"
[style.top.px]="y()"
>
<ng-content />
</div>
`,
styles: [
`
:host {
display: contents;
}
/* Convenience classes consumers can use on projected buttons */
:host ::ng-deep .context-menu-item {
@apply w-full text-left px-3 py-1.5 text-sm hover:bg-secondary transition-colors text-foreground;
}
:host ::ng-deep .context-menu-item-danger {
@apply w-full text-left px-3 py-1.5 text-sm hover:bg-secondary transition-colors text-destructive;
}
:host ::ng-deep .context-menu-item-icon {
@apply w-full text-left px-3 py-2 text-sm hover:bg-secondary transition-colors text-foreground flex items-center gap-2;
}
:host ::ng-deep .context-menu-item-icon-danger {
@apply w-full text-left px-3 py-2 text-sm hover:bg-destructive/10 transition-colors text-destructive flex items-center gap-2;
}
:host ::ng-deep .context-menu-divider {
@apply border-t border-border my-1;
}
:host ::ng-deep .context-menu-empty {
@apply px-3 py-1.5 text-sm text-muted-foreground;
}
`
]
templateUrl: './context-menu.component.html',
styleUrl: './context-menu.component.scss'
})
export class ContextMenuComponent {
/* eslint-disable @typescript-eslint/member-ordering */
export class ContextMenuComponent implements OnInit, AfterViewInit {
/** Horizontal position (px from left). */
// eslint-disable-next-line id-length, id-denylist
x = input.required<number>();
/** Vertical position (px from top). */
// eslint-disable-next-line id-length, id-denylist
y = input.required<number>();
/** Tailwind width class for the panel (default `w-48`). */
/** Tailwind width class for the panel (default `w-48`). Ignored when `widthPx` is set. */
width = input<string>('w-48');
/** Optional fixed width in pixels (overrides `width`). Useful for custom content like sliders. */
widthPx = input<number | null>(null);
/** Emitted when the menu should close (backdrop click or Escape). */
closed = output<undefined>();
@ViewChild('panel', { static: true }) panelRef!: ElementRef<HTMLDivElement>;
/** Viewport-clamped X position. */
clampedX = signal(0);
/** Viewport-clamped Y position. */
clampedY = signal(0);
ngOnInit(): void {
// Initial clamp with estimated dimensions
this.clampedX.set(this.clampX(this.x(), this.estimateWidth()));
this.clampedY.set(this.clampY(this.y(), 80));
}
ngAfterViewInit(): void {
// Refine with actual rendered dimensions
const rect = this.panelRef.nativeElement.getBoundingClientRect();
this.clampedX.set(this.clampX(this.x(), rect.width));
this.clampedY.set(this.clampY(this.y(), rect.height));
}
@HostListener('document:keydown.escape')
onEscape(): void {
this.closed.emit(undefined);
}
private estimateWidth(): number {
const px = this.widthPx();
if (px)
return px;
// Parse Tailwind w-XX class to approximate pixel width
const match = this.width().match(/w-(\d+)/);
return match ? parseInt(match[1], 10) * 4 : 192;
}
private clampX(rawX: number, panelWidth: number): number {
const margin = 8;
const maxX = window.innerWidth - panelWidth - margin;
return Math.max(margin, Math.min(rawX, maxX));
}
private clampY(rawY: number, panelHeight: number): number {
const margin = 8;
const maxY = window.innerHeight - panelHeight - margin;
return Math.max(margin, Math.min(rawY, maxY));
}
}

View File

@@ -0,0 +1,46 @@
<app-context-menu
[x]="x()"
[y]="y()"
[widthPx]="240"
(closed)="closed.emit(undefined)"
>
<!-- Header -->
<p class="text-xs font-medium text-muted-foreground mb-2 px-2 truncate">{{ displayName() }}</p>
<!-- Mute button + slider + percentage in one row -->
<div class="flex items-center gap-2 px-2 pb-1">
<!-- Mute toggle button -->
<button
type="button"
(click)="toggleMute()"
class="shrink-0 w-7 h-7 inline-flex items-center justify-center rounded transition-colors"
[class]="muteButtonClass()"
[title]="isMuted() ? 'Unmute' : 'Mute'"
>
<ng-icon
[name]="isMuted() ? 'lucideVolumeX' : 'lucideVolume2'"
class="w-4 h-4"
/>
</button>
<!-- Slider -->
<input
type="range"
min="0"
max="200"
step="1"
[value]="volume()"
(input)="onSliderInput($event)"
class="volume-slider flex-1"
[class.opacity-40]="isMuted()"
[disabled]="isMuted()"
/>
<!-- Percentage label -->
<span
class="text-xs w-10 text-right tabular-nums shrink-0"
[class]="isMuted() ? 'text-muted-foreground line-through' : 'text-foreground'"
>{{ volume() }}%</span
>
</div>
</app-context-menu>

View File

@@ -0,0 +1,47 @@
:host {
display: contents;
}
.volume-slider {
-webkit-appearance: none;
appearance: none;
height: 6px;
border-radius: 3px;
background: hsl(var(--secondary));
outline: none;
cursor: pointer;
}
.volume-slider:disabled {
cursor: not-allowed;
}
/* Track */
.volume-slider::-webkit-slider-runnable-track {
height: 6px;
border-radius: 3px;
background: hsl(var(--secondary));
}
/* Thumb */
.volume-slider::-webkit-slider-thumb {
-webkit-appearance: none;
appearance: none;
width: 14px;
height: 14px;
border-radius: 50%;
background: hsl(var(--primary));
border: 2px solid hsl(var(--card));
margin-top: -4px;
cursor: pointer;
box-shadow: 0 1px 3px rgba(0, 0, 0, 0.3);
}
.volume-slider::-webkit-slider-thumb:hover {
transform: scale(1.15);
}
.volume-slider:disabled::-webkit-slider-thumb {
background: hsl(var(--muted-foreground));
cursor: not-allowed;
}

View File

@@ -0,0 +1,89 @@
import {
Component,
input,
output,
inject,
signal,
OnInit
} from '@angular/core';
import { NgIcon, provideIcons } from '@ng-icons/core';
import { lucideVolume2, lucideVolumeX } from '@ng-icons/lucide';
import { VoicePlaybackService } from '../../../features/voice/voice-controls/services/voice-playback.service';
import { ContextMenuComponent } from '../context-menu/context-menu.component';
/**
* Context-menu overlay that lets the local user adjust the playback
* volume of a specific remote voice-channel participant (0%-200%)
* and toggle per-user mute.
*
* Wraps `<app-context-menu>` for consistent positioning, backdrop,
* escape handling and viewport clamping.
*
* Usage:
* ```html
* @if (showVolumeMenu()) {
* <app-user-volume-menu
* [x]="menuX()"
* [y]="menuY()"
* [peerId]="targetPeerId()"
* [displayName]="targetName()"
* (closed)="showVolumeMenu.set(false)"
* />
* }
* ```
*/
@Component({
selector: 'app-user-volume-menu',
standalone: true,
imports: [NgIcon, ContextMenuComponent],
viewProviders: [provideIcons({ lucideVolume2, lucideVolumeX })],
templateUrl: './user-volume-menu.component.html',
styleUrl: './user-volume-menu.component.scss'
})
/* eslint-disable @typescript-eslint/member-ordering */
export class UserVolumeMenuComponent implements OnInit {
/** Horizontal position (px from left). */
// eslint-disable-next-line id-length, id-denylist
x = input.required<number>();
/** Vertical position (px from top). */
// eslint-disable-next-line id-length, id-denylist
y = input.required<number>();
/** Remote peer identifier (oderId). */
peerId = input.required<string>();
/** Display name shown in the header. */
displayName = input.required<string>();
/** Emitted when the menu should close. */
closed = output<undefined>();
private playback = inject(VoicePlaybackService);
volume = signal(100);
isMuted = signal(false);
ngOnInit(): void {
const id = this.peerId();
this.volume.set(this.playback.getUserVolume(id));
this.isMuted.set(this.playback.isUserMuted(id));
}
onSliderInput(event: Event): void {
const val = parseInt((event.target as HTMLInputElement).value, 10);
this.volume.set(val);
this.playback.setUserVolume(this.peerId(), val);
}
toggleMute(): void {
const next = !this.isMuted();
this.isMuted.set(next);
this.playback.setUserMuted(this.peerId(), next);
}
muteButtonClass(): string {
return this.isMuted()
? 'bg-destructive/15 text-destructive hover:bg-destructive/25'
: 'text-muted-foreground hover:bg-secondary hover:text-foreground';
}
}

View File

@@ -4,3 +4,4 @@
export { ContextMenuComponent } from './components/context-menu/context-menu.component';
export { UserAvatarComponent } from './components/user-avatar/user-avatar.component';
export { ConfirmDialogComponent } from './components/confirm-dialog/confirm-dialog.component';
export { UserVolumeMenuComponent } from './components/user-volume-menu/user-volume-menu.component';

View File

@@ -0,0 +1,422 @@
/* eslint-disable @typescript-eslint/member-ordering */
import { Injectable, inject } from '@angular/core';
import {
Actions,
createEffect,
ofType
} from '@ngrx/effects';
import { Action } from '@ngrx/store';
import { Store } from '@ngrx/store';
import { EMPTY } from 'rxjs';
import {
mergeMap,
tap,
withLatestFrom
} from 'rxjs/operators';
import {
Room,
RoomMember,
User
} from '../../core/models';
import { WebRTCService } from '../../core/services/webrtc.service';
import { UsersActions } from '../users/users.actions';
import { selectCurrentUser } from '../users/users.selectors';
import { RoomsActions } from './rooms.actions';
import {
selectCurrentRoom,
selectSavedRooms
} from './rooms.selectors';
import {
areRoomMembersEqual,
findRoomMember,
mergeRoomMembers,
removeRoomMember,
roomMemberFromUser,
touchRoomMemberLastSeen,
updateRoomMemberRole,
upsertRoomMember
} from './room-members.helpers';
@Injectable()
export class RoomMembersSyncEffects {
private readonly actions$ = inject(Actions);
private readonly store = inject(Store);
private readonly webrtc = inject(WebRTCService);
/** Ensure the local user is recorded in a room as soon as it becomes active. */
ensureCurrentMemberOnRoomEntry$ = createEffect(() =>
this.actions$.pipe(
ofType(
RoomsActions.createRoomSuccess,
RoomsActions.joinRoomSuccess,
RoomsActions.viewServerSuccess
),
withLatestFrom(this.store.select(selectCurrentUser)),
mergeMap(([{ room }, currentUser]) => {
if (!currentUser)
return EMPTY;
const members = upsertRoomMember(
room.members ?? [],
this.buildCurrentUserMember(room, currentUser, true)
);
const actions = this.createRoomMemberUpdateActions(room, members);
return actions.length > 0 ? actions : EMPTY;
})
)
);
/** Keep the viewed room's local member record aligned with the current profile. */
syncCurrentUserIntoCurrentRoom$ = createEffect(() =>
this.actions$.pipe(
ofType(
UsersActions.loadCurrentUserSuccess,
UsersActions.setCurrentUser,
UsersActions.updateCurrentUser
),
withLatestFrom(
this.store.select(selectCurrentUser),
this.store.select(selectCurrentRoom)
),
mergeMap(([, currentUser, currentRoom]) => {
if (!currentUser || !currentRoom)
return EMPTY;
const members = upsertRoomMember(
currentRoom.members ?? [],
this.buildCurrentUserMember(currentRoom, currentUser, true)
);
const actions = this.createRoomMemberUpdateActions(currentRoom, members);
return actions.length > 0 ? actions : EMPTY;
})
)
);
/** Persist room-role changes into the stored member roster for the active room. */
syncRoleChangesIntoCurrentRoom$ = createEffect(() =>
this.actions$.pipe(
ofType(UsersActions.updateUserRole),
withLatestFrom(this.store.select(selectCurrentRoom)),
mergeMap(([{ userId, role }, currentRoom]) => {
if (!currentRoom)
return EMPTY;
const members = updateRoomMemberRole(currentRoom.members ?? [], userId, role);
const actions = this.createRoomMemberUpdateActions(currentRoom, members);
return actions.length > 0 ? actions : EMPTY;
})
)
);
/** Update persisted room rosters when signaling presence changes arrive. */
signalingPresenceIntoRoomMembers$ = createEffect(() =>
this.webrtc.onSignalingMessage.pipe(
withLatestFrom(
this.store.select(selectCurrentRoom),
this.store.select(selectSavedRooms),
this.store.select(selectCurrentUser)
),
mergeMap(([message, currentRoom, savedRooms, currentUser]) => {
const signalingMessage = message as any;
const roomId = typeof signalingMessage.serverId === 'string' ? signalingMessage.serverId : undefined;
const room = this.resolveRoom(roomId, currentRoom, savedRooms);
if (!room)
return EMPTY;
const myId = currentUser?.oderId || currentUser?.id;
switch (signalingMessage.type) {
case 'server_users': {
if (!Array.isArray(signalingMessage.users))
return EMPTY;
let members = room.members ?? [];
for (const user of signalingMessage.users as Array<{ oderId: string; displayName: string }>) {
if (!user?.oderId || user.oderId === myId)
continue;
members = upsertRoomMember(members, this.buildPresenceMember(room, user));
}
const actions = this.createRoomMemberUpdateActions(room, members);
return actions.length > 0 ? actions : EMPTY;
}
case 'user_joined': {
if (!signalingMessage.oderId || signalingMessage.oderId === myId)
return EMPTY;
const members = upsertRoomMember(
room.members ?? [],
this.buildPresenceMember(room, signalingMessage)
);
const actions = this.createRoomMemberUpdateActions(room, members);
return actions.length > 0 ? actions : EMPTY;
}
case 'user_left': {
if (!signalingMessage.oderId)
return EMPTY;
const members = touchRoomMemberLastSeen(room.members ?? [], signalingMessage.oderId, Date.now());
const actions = this.createRoomMemberUpdateActions(room, members);
return actions.length > 0 ? actions : EMPTY;
}
default:
return EMPTY;
}
})
)
);
/** Request the latest member roster whenever a new peer data channel opens. */
peerConnectedRosterSync$ = createEffect(
() =>
this.webrtc.onPeerConnected.pipe(
withLatestFrom(this.store.select(selectCurrentRoom)),
tap(([peerId, currentRoom]) => {
if (!currentRoom)
return;
this.webrtc.sendToPeer(peerId, {
type: 'member-roster-request',
roomId: currentRoom.id
} as any);
})
),
{ dispatch: false }
);
/** Kick off room-member sync when entering or switching to a room. */
roomEntryRosterSync$ = createEffect(
() =>
this.actions$.pipe(
ofType(
RoomsActions.createRoomSuccess,
RoomsActions.joinRoomSuccess,
RoomsActions.viewServerSuccess
),
tap(({ room }) => {
for (const peerId of this.webrtc.getConnectedPeers()) {
try {
this.webrtc.sendToPeer(peerId, {
type: 'member-roster-request',
roomId: room.id
} as any);
} catch {
/* peer may have disconnected */
}
}
})
),
{ dispatch: false }
);
/** Handle peer-to-peer member roster sync and explicit leave messages. */
incomingRoomMemberEvents$ = createEffect(() =>
this.webrtc.onMessageReceived.pipe(
withLatestFrom(
this.store.select(selectCurrentRoom),
this.store.select(selectSavedRooms),
this.store.select(selectCurrentUser)
),
mergeMap(([event, currentRoom, savedRooms, currentUser]) => {
switch (event.type) {
case 'member-roster-request': {
const actions = this.handleMemberRosterRequest(event, currentRoom, savedRooms, currentUser ?? null);
return actions.length > 0 ? actions : EMPTY;
}
case 'member-roster': {
const actions = this.handleMemberRoster(event, currentRoom, savedRooms, currentUser ?? null);
return actions.length > 0 ? actions : EMPTY;
}
case 'member-leave': {
const actions = this.handleMemberLeave(event, currentRoom, savedRooms);
return actions.length > 0 ? actions : EMPTY;
}
case 'role-change': {
const actions = this.handleIncomingRoleChange(event, currentRoom, savedRooms);
return actions.length > 0 ? actions : EMPTY;
}
default:
return EMPTY;
}
})
)
);
private resolveRoom(roomId: string | undefined, currentRoom: Room | null, savedRooms: Room[]): Room | null {
if (!roomId)
return null;
if (currentRoom?.id === roomId)
return currentRoom;
return savedRooms.find((room) => room.id === roomId) ?? null;
}
private buildCurrentUserMember(room: Room, currentUser: User, isCurrentRoom: boolean): RoomMember {
const existingMember = findRoomMember(room.members ?? [], currentUser.oderId || currentUser.id);
const role = room.hostId === currentUser.id
? 'host'
: (isCurrentRoom ? currentUser.role : existingMember?.role ?? 'member');
return {
...roomMemberFromUser(currentUser, Date.now(), role),
id: existingMember?.id ?? currentUser.id,
joinedAt: existingMember?.joinedAt ?? currentUser.joinedAt ?? Date.now(),
avatarUrl: currentUser.avatarUrl ?? existingMember?.avatarUrl,
role
};
}
private buildPresenceMember(
room: Room,
data: { oderId: string; displayName?: string }
): RoomMember {
const existingMember = findRoomMember(room.members ?? [], data.oderId);
const now = Date.now();
return {
id: existingMember?.id ?? data.oderId,
oderId: data.oderId,
username:
existingMember?.username ??
(data.displayName || 'User').toLowerCase().replace(/\s+/g, '_'),
displayName: data.displayName || existingMember?.displayName || 'User',
avatarUrl: existingMember?.avatarUrl,
role: existingMember?.role ?? 'member',
joinedAt: existingMember?.joinedAt ?? now,
lastSeenAt: now
};
}
private createRoomMemberUpdateActions(room: Room, members: RoomMember[]): Action[] {
return areRoomMembersEqual(room.members ?? [], members)
? []
: [RoomsActions.updateRoom({ roomId: room.id, changes: { members } })];
}
private handleMemberRosterRequest(
event: any,
currentRoom: Room | null,
savedRooms: Room[],
currentUser: User | null
): Action[] {
const room = this.resolveRoom(event.roomId, currentRoom, savedRooms);
if (!room || !event.fromPeerId)
return [];
const isCurrentRoom = currentRoom?.id === room.id;
let members = room.members ?? [];
if (currentUser) {
members = upsertRoomMember(
members,
this.buildCurrentUserMember(room, currentUser, isCurrentRoom)
);
}
this.webrtc.sendToPeer(event.fromPeerId, {
type: 'member-roster',
roomId: room.id,
members
} as any);
return this.createRoomMemberUpdateActions(room, members);
}
private handleMemberRoster(
event: any,
currentRoom: Room | null,
savedRooms: Room[],
currentUser: User | null
): Action[] {
const room = this.resolveRoom(event.roomId, currentRoom, savedRooms);
if (!room || !Array.isArray(event.members))
return [];
let members = mergeRoomMembers(room.members ?? [], event.members);
if (currentUser) {
members = upsertRoomMember(
members,
this.buildCurrentUserMember(room, currentUser, currentRoom?.id === room.id)
);
}
return this.createRoomMemberUpdateActions(room, members);
}
private handleMemberLeave(
event: any,
currentRoom: Room | null,
savedRooms: Room[]
): Action[] {
const roomId = typeof event.roomId === 'string' ? event.roomId : currentRoom?.id;
const room = this.resolveRoom(roomId, currentRoom, savedRooms);
if (!room)
return [];
const actions = this.createRoomMemberUpdateActions(
room,
removeRoomMember(room.members ?? [], event.targetUserId, event.oderId)
);
if (currentRoom?.id === room.id && (event.oderId || event.targetUserId)) {
actions.push(
UsersActions.userLeft({ userId: event.oderId || event.targetUserId })
);
}
return actions;
}
private handleIncomingRoleChange(
event: any,
currentRoom: Room | null,
savedRooms: Room[]
): Action[] {
const roomId = typeof event.roomId === 'string' ? event.roomId : currentRoom?.id;
const room = this.resolveRoom(roomId, currentRoom, savedRooms);
if (!room || !event.targetUserId || !event.role)
return [];
const actions = this.createRoomMemberUpdateActions(
room,
updateRoomMemberRole(room.members ?? [], event.targetUserId, event.role)
);
if (currentRoom?.id === room.id) {
actions.push(
UsersActions.updateUserRole({
userId: event.targetUserId,
role: event.role
})
);
}
return actions;
}
}

View File

@@ -0,0 +1,279 @@
import {
RoomMember,
User
} from '../../core/models';
/** Remove members that have not been seen for roughly two months. */
export const ROOM_MEMBER_STALE_MS = 1000 * 60 * 60 * 24 * 60;
function fallbackDisplayName(member: Partial<RoomMember>): string {
return member.displayName || member.username || member.oderId || member.id || 'User';
}
function fallbackUsername(member: Partial<RoomMember>): string {
const base = fallbackDisplayName(member)
.trim()
.toLowerCase()
.replace(/\s+/g, '_');
return base || member.oderId || member.id || 'user';
}
function normalizeMember(member: RoomMember, now = Date.now()): RoomMember {
const key = getRoomMemberKey(member);
const lastSeenAt =
typeof member.lastSeenAt === 'number' && Number.isFinite(member.lastSeenAt)
? member.lastSeenAt
: typeof member.joinedAt === 'number' && Number.isFinite(member.joinedAt)
? member.joinedAt
: now;
const joinedAt =
typeof member.joinedAt === 'number' && Number.isFinite(member.joinedAt)
? member.joinedAt
: lastSeenAt;
return {
id: member.id || key,
oderId: member.oderId || undefined,
username: member.username || fallbackUsername(member),
displayName: fallbackDisplayName(member),
avatarUrl: member.avatarUrl || undefined,
role: member.role || 'member',
joinedAt,
lastSeenAt
};
}
function compareMembers(firstMember: RoomMember, secondMember: RoomMember): number {
const displayNameCompare = firstMember.displayName.localeCompare(secondMember.displayName, undefined, { sensitivity: 'base' });
if (displayNameCompare !== 0)
return displayNameCompare;
return getRoomMemberKey(firstMember).localeCompare(getRoomMemberKey(secondMember));
}
function mergeRole(
existingRole: RoomMember['role'],
incomingRole: RoomMember['role'],
preferIncoming: boolean
): RoomMember['role'] {
if (existingRole === incomingRole)
return existingRole;
if (incomingRole === 'member' && existingRole !== 'member')
return existingRole;
if (existingRole === 'member' && incomingRole !== 'member')
return incomingRole;
return preferIncoming ? incomingRole : existingRole;
}
function mergeMembers(
existingMember: RoomMember | undefined,
incomingMember: RoomMember,
now = Date.now()
): RoomMember {
const normalizedIncoming = normalizeMember(incomingMember, now);
if (!existingMember)
return normalizedIncoming;
const normalizedExisting = normalizeMember(existingMember, now);
const preferIncoming = normalizedIncoming.lastSeenAt >= normalizedExisting.lastSeenAt;
return {
id: normalizedExisting.id || normalizedIncoming.id,
oderId: normalizedIncoming.oderId || normalizedExisting.oderId,
username: preferIncoming
? (normalizedIncoming.username || normalizedExisting.username)
: (normalizedExisting.username || normalizedIncoming.username),
displayName: preferIncoming
? (normalizedIncoming.displayName || normalizedExisting.displayName)
: (normalizedExisting.displayName || normalizedIncoming.displayName),
avatarUrl: preferIncoming
? (normalizedIncoming.avatarUrl || normalizedExisting.avatarUrl)
: (normalizedExisting.avatarUrl || normalizedIncoming.avatarUrl),
role: mergeRole(normalizedExisting.role, normalizedIncoming.role, preferIncoming),
joinedAt: Math.min(normalizedExisting.joinedAt, normalizedIncoming.joinedAt),
lastSeenAt: Math.max(normalizedExisting.lastSeenAt, normalizedIncoming.lastSeenAt)
};
}
/** Stable member key, preferring `oderId` when available. */
export function getRoomMemberKey(member: Pick<RoomMember, 'id' | 'oderId'>): string {
return member.oderId || member.id || '';
}
/** Find a room member by either their local ID or their `oderId`. */
export function findRoomMember(
members: RoomMember[] = [],
identifier?: string
): RoomMember | undefined {
if (!identifier)
return undefined;
return members.find((member) => member.id === identifier || member.oderId === identifier);
}
/** Convert a live `User` into a persisted room-member record. */
export function roomMemberFromUser(
user: User,
seenAt = Date.now(),
roleOverride?: RoomMember['role']
): RoomMember {
return normalizeMember(
{
id: user.id || user.oderId,
oderId: user.oderId || undefined,
username: user.username || '',
displayName: user.displayName || user.username || 'User',
avatarUrl: user.avatarUrl,
role: roleOverride || user.role || 'member',
joinedAt: user.joinedAt || seenAt,
lastSeenAt: seenAt
},
seenAt
);
}
/** Deduplicate, sanitize, sort, and prune stale room members. */
export function pruneRoomMembers(
members: RoomMember[] = [],
now = Date.now()
): RoomMember[] {
const cutoff = now - ROOM_MEMBER_STALE_MS;
const deduplicatedMembers = new Map<string, RoomMember>();
for (const member of members) {
const key = getRoomMemberKey(member);
if (!key)
continue;
const normalizedMember = normalizeMember(member, now);
if (normalizedMember.lastSeenAt < cutoff)
continue;
deduplicatedMembers.set(
key,
mergeMembers(deduplicatedMembers.get(key), normalizedMember, now)
);
}
return Array.from(deduplicatedMembers.values()).sort(compareMembers);
}
/** Upsert a member into a room roster while preserving the best known data. */
export function upsertRoomMember(
members: RoomMember[] = [],
member: RoomMember,
now = Date.now()
): RoomMember[] {
const key = getRoomMemberKey(member);
const nextMembers = pruneRoomMembers(members, now);
if (!key)
return nextMembers;
const memberIndex = nextMembers.findIndex((entry) => getRoomMemberKey(entry) === key);
const mergedMember = mergeMembers(memberIndex >= 0 ? nextMembers[memberIndex] : undefined, member, now);
if (memberIndex >= 0) {
const updatedMembers = [...nextMembers];
updatedMembers[memberIndex] = mergedMember;
return pruneRoomMembers(updatedMembers, now);
}
return pruneRoomMembers([...nextMembers, mergedMember], now);
}
/** Merge a remote roster into the local roster. */
export function mergeRoomMembers(
localMembers: RoomMember[] = [],
incomingMembers: RoomMember[] = [],
now = Date.now()
): RoomMember[] {
let mergedMembers = pruneRoomMembers(localMembers, now);
for (const incomingMember of incomingMembers) {
mergedMembers = upsertRoomMember(mergedMembers, incomingMember, now);
}
return pruneRoomMembers(mergedMembers, now);
}
/** Update the last-seen timestamp of a known room member. */
export function touchRoomMemberLastSeen(
members: RoomMember[] = [],
identifier: string,
seenAt = Date.now()
): RoomMember[] {
const nextMembers = pruneRoomMembers(members, seenAt);
const memberIndex = nextMembers.findIndex((member) => member.id === identifier || member.oderId === identifier);
if (memberIndex < 0)
return nextMembers;
const updatedMembers = [...nextMembers];
updatedMembers[memberIndex] = normalizeMember(
{
...updatedMembers[memberIndex],
lastSeenAt: Math.max(updatedMembers[memberIndex].lastSeenAt, seenAt)
},
seenAt
);
return pruneRoomMembers(updatedMembers, seenAt);
}
/** Remove a member from a room roster by either ID flavor. */
export function removeRoomMember(
members: RoomMember[] = [],
...identifiers: Array<string | undefined>
): RoomMember[] {
const ids = new Set(identifiers.filter((identifier): identifier is string => !!identifier));
if (ids.size === 0)
return pruneRoomMembers(members);
return pruneRoomMembers(members).filter(
(member) => !ids.has(member.id) && !ids.has(member.oderId || '')
);
}
/** Update a persisted member role without touching presence timestamps. */
export function updateRoomMemberRole(
members: RoomMember[] = [],
identifier: string,
role: RoomMember['role']
): RoomMember[] {
const nextMembers = pruneRoomMembers(members);
const memberIndex = nextMembers.findIndex((member) => member.id === identifier || member.oderId === identifier);
if (memberIndex < 0)
return nextMembers;
const updatedMembers = [...nextMembers];
updatedMembers[memberIndex] = {
...updatedMembers[memberIndex],
role
};
return pruneRoomMembers(updatedMembers);
}
/** Compare two room rosters after normalization and pruning. */
export function areRoomMembersEqual(
firstMembers: RoomMember[] = [],
secondMembers: RoomMember[] = []
): boolean {
const now = Date.now();
return JSON.stringify(pruneRoomMembers(firstMembers, now)) === JSON.stringify(pruneRoomMembers(secondMembers, now));
}

View File

@@ -40,6 +40,7 @@ import {
VoiceState
} from '../../core/models';
import { NotificationAudioService, AppSound } from '../../core/services/notification-audio.service';
import { findRoomMember } from './room-members.helpers';
/** Build a minimal User object from signaling payload. */
function buildSignalingUser(
@@ -59,6 +60,21 @@ function buildSignalingUser(
};
}
/** Best-known persisted member metadata for a signaling user in the viewed room. */
function buildKnownUserExtras(room: Room | null, identifier: string): Record<string, unknown> {
const knownMember = room ? findRoomMember(room.members ?? [], identifier) : undefined;
if (!knownMember)
return {};
return {
username: knownMember.username,
avatarUrl: knownMember.avatarUrl,
role: knownMember.role,
joinedAt: knownMember.joinedAt
};
}
/** Returns true when the message's server ID does not match the viewed server. */
function isWrongServer(
msgServerId: string | undefined,
@@ -77,6 +93,13 @@ export class RoomsEffects {
private serverDirectory = inject(ServerDirectoryService);
private audioService = inject(NotificationAudioService);
/**
* Tracks user IDs we already know are in voice. Lives outside the
* NgRx store so it survives `clearUsers()` dispatched on server switches
* and prevents false join/leave sounds during state re-syncs.
*/
private knownVoiceUsers = new Set<string>();
/** Loads all saved rooms from the local database. */
loadRooms$ = createEffect(() =>
this.actions$.pipe(
@@ -318,8 +341,18 @@ export class RoomsEffects {
forgetRoom$ = createEffect(() =>
this.actions$.pipe(
ofType(RoomsActions.forgetRoom),
withLatestFrom(this.store.select(selectCurrentRoom)),
switchMap(([{ roomId }, currentRoom]) => {
withLatestFrom(this.store.select(selectCurrentUser)),
switchMap(([{ roomId }, currentUser]) => {
if (currentUser) {
this.webrtc.broadcastMessage({
type: 'member-leave',
roomId,
targetUserId: currentUser.id,
oderId: currentUser.oderId,
displayName: currentUser.displayName
});
}
// Delete from local DB
this.db.deleteRoom(roomId);
@@ -383,11 +416,8 @@ export class RoomsEffects {
() =>
this.actions$.pipe(
ofType(RoomsActions.updateRoom),
withLatestFrom(this.store.select(selectCurrentRoom)),
tap(([{ roomId, changes }, currentRoom]) => {
if (currentRoom && currentRoom.id === roomId) {
tap(({ roomId, changes }) => {
this.db.updateRoom(roomId, changes);
}
})
),
{ dispatch: false }
@@ -522,7 +552,10 @@ export class RoomsEffects {
onLeaveRoom$ = createEffect(() =>
this.actions$.pipe(
ofType(RoomsActions.leaveRoomSuccess),
mergeMap(() => [MessagesActions.clearMessages(), UsersActions.clearUsers()])
mergeMap(() => {
this.knownVoiceUsers.clear();
return [MessagesActions.clearMessages(), UsersActions.clearUsers()];
})
)
);
@@ -545,7 +578,11 @@ export class RoomsEffects {
const joinActions = (message.users as { oderId: string; displayName: string }[])
.filter((u) => u.oderId !== myId)
.map((u) => UsersActions.userJoined({ user: buildSignalingUser(u) }));
.map((u) =>
UsersActions.userJoined({
user: buildSignalingUser(u, buildKnownUserExtras(currentRoom, u.oderId))
})
);
return [UsersActions.clearUsers(), ...joinActions];
}
@@ -554,13 +591,18 @@ export class RoomsEffects {
if (isWrongServer(message.serverId, viewedServerId) || message.oderId === myId)
return EMPTY;
return [UsersActions.userJoined({ user: buildSignalingUser(message) })];
return [
UsersActions.userJoined({
user: buildSignalingUser(message, buildKnownUserExtras(currentRoom, message.oderId))
})
];
}
case 'user_left': {
if (isWrongServer(message.serverId, viewedServerId))
return EMPTY;
this.knownVoiceUsers.delete(message.oderId);
return [UsersActions.userLeft({ userId: message.oderId })];
}
@@ -619,20 +661,28 @@ export class RoomsEffects {
return EMPTY;
// Detect voice-connection transitions to play join/leave sounds.
// Use the local knownVoiceUsers set (not the store) so that
// clearUsers() from server-switching doesn't create false transitions.
const weAreInVoice = this.webrtc.isVoiceConnected();
if (weAreInVoice) {
const existingUser = allUsers.find((u) => u.id === userId || u.oderId === userId) as any;
const wasConnected = existingUser?.voiceState?.isConnected ?? false;
const nowConnected = vs.isConnected ?? false;
if (!wasConnected && nowConnected) {
if (weAreInVoice) {
const wasKnown = this.knownVoiceUsers.has(userId);
if (!wasKnown && nowConnected) {
this.audioService.play(AppSound.Joining);
} else if (wasConnected && !nowConnected) {
} else if (wasKnown && !nowConnected) {
this.audioService.play(AppSound.Leave);
}
}
// Keep the tracking set in sync
if (nowConnected) {
this.knownVoiceUsers.add(userId);
} else {
this.knownVoiceUsers.delete(userId);
}
if (!userExists) {
return of(
UsersActions.userJoined({

View File

@@ -6,6 +6,7 @@ import {
Channel
} from '../../core/models';
import { RoomsActions } from './rooms.actions';
import { pruneRoomMembers } from './room-members.helpers';
/** Default channels for a new server */
export function defaultChannels(): Channel[] {
@@ -40,18 +41,28 @@ function deduplicateRooms(rooms: Room[]): Room[] {
return Array.from(seen.values());
}
/** Normalize room defaults and prune any stale persisted member entries. */
function enrichRoom(room: Room): Room {
return {
...room,
channels: room.channels || defaultChannels(),
members: pruneRoomMembers(room.members || [])
};
}
/** Upsert a room into a saved-rooms list (add or replace by id) */
function upsertRoom(savedRooms: Room[], room: Room): Room[] {
const normalizedRoom = enrichRoom(room);
const idx = savedRooms.findIndex(existingRoom => existingRoom.id === room.id);
if (idx >= 0) {
const updated = [...savedRooms];
updated[idx] = room;
updated[idx] = normalizedRoom;
return updated;
}
return [...savedRooms, room];
return [...savedRooms, normalizedRoom];
}
/** State shape for the rooms feature slice. */
@@ -103,7 +114,7 @@ export const roomsReducer = createReducer(
on(RoomsActions.loadRoomsSuccess, (state, { rooms }) => ({
...state,
savedRooms: deduplicateRooms(rooms),
savedRooms: deduplicateRooms(rooms.map(enrichRoom)),
loading: false
})),
@@ -140,8 +151,7 @@ export const roomsReducer = createReducer(
})),
on(RoomsActions.createRoomSuccess, (state, { room }) => {
const enriched = { ...room,
channels: room.channels || defaultChannels() };
const enriched = enrichRoom(room);
return {
...state,
@@ -167,8 +177,7 @@ export const roomsReducer = createReducer(
})),
on(RoomsActions.joinRoomSuccess, (state, { room }) => {
const enriched = { ...room,
channels: room.channels || defaultChannels() };
const enriched = enrichRoom(room);
return {
...state,
@@ -208,8 +217,7 @@ export const roomsReducer = createReducer(
})),
on(RoomsActions.viewServerSuccess, (state, { room }) => {
const enriched = { ...room,
channels: room.channels || defaultChannels() };
const enriched = enrichRoom(room);
return {
...state,
@@ -231,7 +239,21 @@ export const roomsReducer = createReducer(
...state,
roomSettings: settings,
currentRoom: state.currentRoom
? {
? enrichRoom({
...state.currentRoom,
name: settings.name,
description: settings.description,
topic: settings.topic,
isPrivate: settings.isPrivate,
password: settings.password,
maxUsers: settings.maxUsers
})
: null,
savedRooms:
state.currentRoom
? upsertRoom(
state.savedRooms,
{
...state.currentRoom,
name: settings.name,
description: settings.description,
@@ -240,7 +262,8 @@ export const roomsReducer = createReducer(
password: settings.password,
maxUsers: settings.maxUsers
}
: null
)
: state.savedRooms
})),
on(RoomsActions.updateRoomSettingsFailure, (state, { error }) => ({
@@ -265,7 +288,8 @@ export const roomsReducer = createReducer(
// Set current room
on(RoomsActions.setCurrentRoom, (state, { room }) => ({
...state,
currentRoom: room,
currentRoom: enrichRoom(room),
savedRooms: upsertRoom(state.savedRooms, room),
isConnected: true
})),
@@ -279,13 +303,19 @@ export const roomsReducer = createReducer(
// Update room
on(RoomsActions.updateRoom, (state, { roomId, changes }) => {
if (state.currentRoom?.id !== roomId)
const baseRoom = state.savedRooms.find((savedRoom) => savedRoom.id === roomId)
|| (state.currentRoom?.id === roomId ? state.currentRoom : null);
if (!baseRoom)
return state;
const updatedRoom = enrichRoom({ ...baseRoom,
...changes });
return {
...state,
currentRoom: { ...state.currentRoom,
...changes }
currentRoom: state.currentRoom?.id === roomId ? updatedRoom : state.currentRoom,
savedRooms: upsertRoom(state.savedRooms, updatedRoom)
};
}),
@@ -294,20 +324,31 @@ export const roomsReducer = createReducer(
if (state.currentRoom?.id !== roomId)
return state;
const updatedRoom = enrichRoom({ ...state.currentRoom,
icon,
iconUpdatedAt });
return {
...state,
currentRoom: { ...state.currentRoom,
icon,
iconUpdatedAt }
currentRoom: updatedRoom,
savedRooms: upsertRoom(state.savedRooms, updatedRoom)
};
}),
// Receive room update
on(RoomsActions.receiveRoomUpdate, (state, { room }) => ({
on(RoomsActions.receiveRoomUpdate, (state, { room }) => {
if (!state.currentRoom)
return state;
const updatedRoom = enrichRoom({ ...state.currentRoom,
...room });
return {
...state,
currentRoom: state.currentRoom ? { ...state.currentRoom,
...room } : null
})),
currentRoom: updatedRoom,
savedRooms: upsertRoom(state.savedRooms, updatedRoom)
};
}),
// Clear search results
on(RoomsActions.clearSearchResults, (state) => ({