Files
Toju/src/app/features/voice/voice-controls/voice-controls.component.ts
Myx cb2c0495b9
All checks were successful
Queue Release Build / prepare (push) Successful in 16s
Deploy Web Apps / deploy (push) Successful in 10m15s
Queue Release Build / build-linux (push) Successful in 26m14s
Queue Release Build / build-windows (push) Successful in 25m41s
Queue Release Build / finalize (push) Successful in 1m51s
hotfix handshake issue
2026-03-19 03:34:26 +01:00

576 lines
17 KiB
TypeScript

/* eslint-disable @typescript-eslint/member-ordering, @typescript-eslint/no-unused-vars, complexity */
import {
Component,
inject,
signal,
OnInit,
OnDestroy,
computed
} from '@angular/core';
import { CommonModule } from '@angular/common';
import { Store } from '@ngrx/store';
import { NgIcon, provideIcons } from '@ng-icons/core';
import {
lucideMic,
lucideMicOff,
lucideVideo,
lucideVideoOff,
lucideMonitor,
lucideMonitorOff,
lucidePhoneOff,
lucideSettings,
lucideHeadphones
} from '@ng-icons/lucide';
import { WebRTCService } from '../../../core/services/webrtc.service';
import { VoiceSessionService } from '../../../core/services/voice-session.service';
import { VoiceActivityService } from '../../../core/services/voice-activity.service';
import { UsersActions } from '../../../store/users/users.actions';
import { selectCurrentUser } from '../../../store/users/users.selectors';
import { selectCurrentRoom } from '../../../store/rooms/rooms.selectors';
import { SettingsModalService } from '../../../core/services/settings-modal.service';
import { loadVoiceSettingsFromStorage, saveVoiceSettingsToStorage } from '../../../core/services/voice-settings.storage';
import { ScreenShareQuality } from '../../../core/services/webrtc';
import {
DebugConsoleComponent,
ScreenShareQualityDialogComponent,
UserAvatarComponent
} from '../../../shared';
import { PlaybackOptions, VoicePlaybackService } from './services/voice-playback.service';
interface AudioDevice {
deviceId: string;
label: string;
}
@Component({
selector: 'app-voice-controls',
standalone: true,
imports: [
CommonModule,
NgIcon,
DebugConsoleComponent,
ScreenShareQualityDialogComponent,
UserAvatarComponent
],
viewProviders: [
provideIcons({
lucideMic,
lucideMicOff,
lucideVideo,
lucideVideoOff,
lucideMonitor,
lucideMonitorOff,
lucidePhoneOff,
lucideSettings,
lucideHeadphones
})
],
templateUrl: './voice-controls.component.html'
})
export class VoiceControlsComponent implements OnInit, OnDestroy {
private webrtcService = inject(WebRTCService);
private voiceSessionService = inject(VoiceSessionService);
private voiceActivity = inject(VoiceActivityService);
private voicePlayback = inject(VoicePlaybackService);
private store = inject(Store);
private settingsModal = inject(SettingsModalService);
currentUser = this.store.selectSignal(selectCurrentUser);
currentRoom = this.store.selectSignal(selectCurrentRoom);
isConnected = computed(() => this.webrtcService.isVoiceConnected());
showConnectionError = computed(() => this.webrtcService.shouldShowConnectionError());
connectionErrorMessage = computed(() => this.webrtcService.connectionErrorMessage());
isMuted = signal(false);
isDeafened = signal(false);
isScreenSharing = this.webrtcService.isScreenSharing;
showSettings = signal(false);
inputDevices = signal<AudioDevice[]>([]);
outputDevices = signal<AudioDevice[]>([]);
selectedInputDevice = signal<string>('');
selectedOutputDevice = signal<string>('');
inputVolume = signal(100);
outputVolume = signal(100);
audioBitrate = signal(96);
latencyProfile = signal<'low' | 'balanced' | 'high'>('balanced');
includeSystemAudio = signal(false);
noiseReduction = signal(true);
screenShareQuality = signal<ScreenShareQuality>('balanced');
askScreenShareQuality = signal(true);
showScreenShareQualityDialog = signal(false);
private playbackOptions(): PlaybackOptions {
return {
isConnected: this.isConnected(),
outputVolume: this.outputVolume() / 100,
isDeafened: this.isDeafened()
};
}
async ngOnInit(): Promise<void> {
await this.loadAudioDevices();
// Load persisted voice settings and apply
this.loadSettings();
this.applySettingsToWebRTC();
}
ngOnDestroy(): void {
if (!this.webrtcService.isVoiceConnected()) {
this.voicePlayback.teardownAll();
}
}
async loadAudioDevices(): Promise<void> {
try {
if (!navigator.mediaDevices?.enumerateDevices) {
return;
}
const devices = await navigator.mediaDevices.enumerateDevices();
this.inputDevices.set(
devices
.filter((device) => device.kind === 'audioinput')
.map((device) => ({ deviceId: device.deviceId,
label: device.label }))
);
this.outputDevices.set(
devices
.filter((device) => device.kind === 'audiooutput')
.map((device) => ({ deviceId: device.deviceId,
label: device.label }))
);
} catch (_error) {}
}
async connect(): Promise<void> {
try {
// Require signaling connectivity first
const ok = await this.webrtcService.ensureSignalingConnected();
if (!ok) {
return;
}
if (!navigator.mediaDevices?.getUserMedia) {
return;
}
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
deviceId: this.selectedInputDevice() || undefined,
echoCancellation: true,
noiseSuppression: !this.noiseReduction()
}
});
await this.webrtcService.setLocalStream(stream);
// Track local mic for voice-activity visualisation
// Use oderId||id to match the key used by the rooms-side-panel template.
const userId = this.currentUser()?.oderId || this.currentUser()?.id;
if (userId) {
this.voiceActivity.trackLocalMic(userId, stream);
}
// Start voice heartbeat to broadcast presence every 5 seconds
const room = this.currentRoom();
const roomId = this.currentUser()?.voiceState?.roomId || room?.id;
const serverId = room?.id;
this.webrtcService.startVoiceHeartbeat(roomId, serverId);
// Update local user's voice state in the store so the side panel
// shows us in the voice channel with a speaking indicator.
const user = this.currentUser();
if (user?.id) {
this.store.dispatch(
UsersActions.updateVoiceState({
userId: user.id,
voiceState: {
isConnected: true,
isMuted: this.isMuted(),
isDeafened: this.isDeafened(),
roomId,
serverId
}
})
);
}
// Broadcast voice state to other users
this.webrtcService.broadcastMessage({
type: 'voice-state',
oderId: this.currentUser()?.oderId || this.currentUser()?.id,
displayName: this.currentUser()?.displayName || 'User',
voiceState: {
isConnected: true,
isMuted: this.isMuted(),
isDeafened: this.isDeafened(),
roomId,
serverId
}
});
// Play any pending remote streams now that we're connected
this.voicePlayback.playPendingStreams(this.playbackOptions());
// Persist settings after successful connection
this.saveSettings();
} catch (_error) {}
}
// Retry connection when there's a connection error
async retryConnection(): Promise<void> {
try {
await this.webrtcService.ensureSignalingConnected(10000);
} catch (_error) {}
}
disconnect(): void {
// Stop voice heartbeat
this.webrtcService.stopVoiceHeartbeat();
// Broadcast voice disconnect to other users
this.webrtcService.broadcastMessage({
type: 'voice-state',
oderId: this.currentUser()?.oderId || this.currentUser()?.id,
displayName: this.currentUser()?.displayName || 'User',
voiceState: {
isConnected: false,
isMuted: false,
isDeafened: false,
serverId: this.currentRoom()?.id
}
});
// Stop screen sharing if active
if (this.isScreenSharing()) {
this.webrtcService.stopScreenShare();
}
// Untrack local mic from voice-activity visualisation
const userId = this.currentUser()?.oderId || this.currentUser()?.id;
if (userId) {
this.voiceActivity.untrackLocalMic(userId);
}
// Disable voice (stops audio tracks but keeps peer connections open for chat)
this.webrtcService.disableVoice();
this.voicePlayback.teardownAll();
this.voicePlayback.updateDeafened(false);
const user = this.currentUser();
if (user?.id) {
this.store.dispatch(
UsersActions.updateVoiceState({
userId: user.id,
voiceState: {
isConnected: false,
isMuted: false,
isDeafened: false,
roomId: undefined,
serverId: undefined
}
})
);
}
// End voice session for floating controls
this.voiceSessionService.endSession();
this.isMuted.set(false);
this.isDeafened.set(false);
}
toggleMute(): void {
this.isMuted.update((current) => !current);
this.webrtcService.toggleMute(this.isMuted());
// Update local store so the side panel reflects the mute state
const user = this.currentUser();
if (user?.id) {
this.store.dispatch(
UsersActions.updateVoiceState({
userId: user.id,
voiceState: {
isConnected: this.isConnected(),
isMuted: this.isMuted(),
isDeafened: this.isDeafened()
}
})
);
}
// Broadcast mute state change
this.webrtcService.broadcastMessage({
type: 'voice-state',
oderId: this.currentUser()?.oderId || this.currentUser()?.id,
displayName: this.currentUser()?.displayName || 'User',
voiceState: {
isConnected: this.isConnected(),
isMuted: this.isMuted(),
isDeafened: this.isDeafened()
}
});
}
toggleDeafen(): void {
this.isDeafened.update((current) => !current);
this.webrtcService.toggleDeafen(this.isDeafened());
this.voicePlayback.updateDeafened(this.isDeafened());
// When deafening, also mute
if (this.isDeafened() && !this.isMuted()) {
this.isMuted.set(true);
this.webrtcService.toggleMute(true);
}
// Broadcast deafen state change
this.webrtcService.broadcastMessage({
type: 'voice-state',
oderId: this.currentUser()?.oderId || this.currentUser()?.id,
displayName: this.currentUser()?.displayName || 'User',
voiceState: {
isConnected: this.isConnected(),
isMuted: this.isMuted(),
isDeafened: this.isDeafened()
}
});
// Update local store so the side panel reflects the deafen/mute state
const user = this.currentUser();
if (user?.id) {
this.store.dispatch(
UsersActions.updateVoiceState({
userId: user.id,
voiceState: {
isConnected: this.isConnected(),
isMuted: this.isMuted(),
isDeafened: this.isDeafened()
}
})
);
}
}
async toggleScreenShare(): Promise<void> {
if (this.isScreenSharing()) {
this.webrtcService.stopScreenShare();
} else {
this.syncScreenShareSettings();
if (this.askScreenShareQuality()) {
this.showScreenShareQualityDialog.set(true);
return;
}
await this.startScreenShareWithOptions(this.screenShareQuality());
}
}
onScreenShareQualityCancelled(): void {
this.showScreenShareQualityDialog.set(false);
}
async onScreenShareQualityConfirmed(quality: ScreenShareQuality): Promise<void> {
this.showScreenShareQualityDialog.set(false);
this.screenShareQuality.set(quality);
this.saveSettings();
await this.startScreenShareWithOptions(quality);
}
toggleSettings(): void {
this.settingsModal.open('voice');
}
closeSettings(): void {
this.showSettings.set(false);
}
onInputDeviceChange(event: Event): void {
const select = event.target as HTMLSelectElement;
this.selectedInputDevice.set(select.value);
// Reconnect with new device if connected
if (this.isConnected()) {
this.disconnect();
this.connect();
}
this.saveSettings();
}
onOutputDeviceChange(event: Event): void {
const select = event.target as HTMLSelectElement;
this.selectedOutputDevice.set(select.value);
this.applyOutputDevice();
this.saveSettings();
}
onInputVolumeChange(event: Event): void {
const input = event.target as HTMLInputElement;
this.inputVolume.set(parseInt(input.value, 10));
this.webrtcService.setInputVolume(this.inputVolume() / 100);
this.saveSettings();
}
onOutputVolumeChange(event: Event): void {
const input = event.target as HTMLInputElement;
this.outputVolume.set(parseInt(input.value, 10));
this.webrtcService.setOutputVolume(this.outputVolume() / 100);
this.voicePlayback.updateOutputVolume(this.outputVolume() / 100);
this.saveSettings();
}
onLatencyProfileChange(event: Event): void {
const select = event.target as HTMLSelectElement;
const profile = select.value as 'low' | 'balanced' | 'high';
this.latencyProfile.set(profile);
this.webrtcService.setLatencyProfile(profile);
this.saveSettings();
}
onAudioBitrateChange(event: Event): void {
const input = event.target as HTMLInputElement;
const kbps = parseInt(input.value, 10);
this.audioBitrate.set(kbps);
this.webrtcService.setAudioBitrate(kbps);
this.saveSettings();
}
onIncludeSystemAudioChange(event: Event): void {
const input = event.target as HTMLInputElement;
this.includeSystemAudio.set(!!input.checked);
this.saveSettings();
}
async onNoiseReductionChange(event: Event): Promise<void> {
const input = event.target as HTMLInputElement;
this.noiseReduction.set(!!input.checked);
await this.webrtcService.toggleNoiseReduction(this.noiseReduction());
this.saveSettings();
}
private loadSettings(): void {
const settings = loadVoiceSettingsFromStorage();
this.selectedInputDevice.set(settings.inputDevice);
this.selectedOutputDevice.set(settings.outputDevice);
this.inputVolume.set(settings.inputVolume);
this.outputVolume.set(settings.outputVolume);
this.audioBitrate.set(settings.audioBitrate);
this.latencyProfile.set(settings.latencyProfile);
this.includeSystemAudio.set(settings.includeSystemAudio);
this.noiseReduction.set(settings.noiseReduction);
this.screenShareQuality.set(settings.screenShareQuality);
this.askScreenShareQuality.set(settings.askScreenShareQuality);
}
private saveSettings(): void {
saveVoiceSettingsToStorage({
inputDevice: this.selectedInputDevice(),
outputDevice: this.selectedOutputDevice(),
inputVolume: this.inputVolume(),
outputVolume: this.outputVolume(),
audioBitrate: this.audioBitrate(),
latencyProfile: this.latencyProfile(),
includeSystemAudio: this.includeSystemAudio(),
noiseReduction: this.noiseReduction(),
screenShareQuality: this.screenShareQuality(),
askScreenShareQuality: this.askScreenShareQuality()
});
}
private applySettingsToWebRTC(): void {
try {
this.webrtcService.setOutputVolume(this.outputVolume() / 100);
this.voicePlayback.updateOutputVolume(this.outputVolume() / 100);
this.webrtcService.setInputVolume(this.inputVolume() / 100);
this.webrtcService.setAudioBitrate(this.audioBitrate());
this.webrtcService.setLatencyProfile(this.latencyProfile());
this.applyOutputDevice();
// Always sync the desired noise-reduction preference (even before
// a mic stream exists - the flag will be honoured on connect).
this.webrtcService.toggleNoiseReduction(this.noiseReduction());
} catch {}
}
private async applyOutputDevice(): Promise<void> {
const deviceId = this.selectedOutputDevice();
if (!deviceId)
return;
this.voicePlayback.applyOutputDevice(deviceId);
}
private syncScreenShareSettings(): void {
const settings = loadVoiceSettingsFromStorage();
this.includeSystemAudio.set(settings.includeSystemAudio);
this.screenShareQuality.set(settings.screenShareQuality);
this.askScreenShareQuality.set(settings.askScreenShareQuality);
}
private async startScreenShareWithOptions(quality: ScreenShareQuality): Promise<void> {
try {
await this.webrtcService.startScreenShare({
includeSystemAudio: this.includeSystemAudio(),
quality
});
} catch (_error) {}
}
getMuteButtonClass(): string {
const base =
'w-10 h-10 inline-flex items-center justify-center rounded-full transition-colors disabled:opacity-50 disabled:cursor-not-allowed';
if (this.isMuted()) {
return `${base} bg-destructive/20 text-destructive hover:bg-destructive/30`;
}
return `${base} bg-secondary text-foreground hover:bg-secondary/80`;
}
getDeafenButtonClass(): string {
const base =
'w-10 h-10 inline-flex items-center justify-center rounded-full transition-colors disabled:opacity-50 disabled:cursor-not-allowed';
if (this.isDeafened()) {
return `${base} bg-destructive/20 text-destructive hover:bg-destructive/30`;
}
return `${base} bg-secondary text-foreground hover:bg-secondary/80`;
}
getScreenShareButtonClass(): string {
const base =
'w-10 h-10 inline-flex items-center justify-center rounded-full transition-colors disabled:opacity-50 disabled:cursor-not-allowed';
if (this.isScreenSharing()) {
return `${base} bg-primary/20 text-primary hover:bg-primary/30`;
}
return `${base} bg-secondary text-foreground hover:bg-secondary/80`;
}
}