338 lines
9.4 KiB
TypeScript
338 lines
9.4 KiB
TypeScript
import {
|
|
Injectable,
|
|
effect,
|
|
inject
|
|
} from '@angular/core';
|
|
import { WebRTCService } from '../../../../core/services/webrtc.service';
|
|
import { STORAGE_KEY_USER_VOLUMES } from '../../../../core/constants';
|
|
|
|
export interface PlaybackOptions {
|
|
isConnected: boolean;
|
|
outputVolume: number;
|
|
isDeafened: boolean;
|
|
}
|
|
|
|
/**
|
|
* Per-peer Web Audio pipeline that routes the remote MediaStream
|
|
* through a GainNode so volume can be amplified beyond 100% (up to 200%).
|
|
*
|
|
* Chrome/Electron workaround: a muted HTMLAudioElement is attached to
|
|
* the stream first so that `createMediaStreamSource` actually outputs
|
|
* audio. The priming element itself is silent; audible output is routed
|
|
* through a separate output element fed by
|
|
* `GainNode -> MediaStreamDestination` so output-device switching stays
|
|
* reliable during Linux screen sharing.
|
|
*/
|
|
interface PeerAudioPipeline {
|
|
audioElement: HTMLAudioElement;
|
|
outputElement: HTMLAudioElement;
|
|
context: AudioContext;
|
|
sourceNodes: MediaStreamAudioSourceNode[];
|
|
gainNode: GainNode;
|
|
}
|
|
|
|
@Injectable({ providedIn: 'root' })
|
|
export class VoicePlaybackService {
|
|
private webrtc = inject(WebRTCService);
|
|
|
|
private peerPipelines = new Map<string, PeerAudioPipeline>();
|
|
private pendingRemoteStreams = new Map<string, MediaStream>();
|
|
private rawRemoteStreams = new Map<string, MediaStream>();
|
|
private userVolumes = new Map<string, number>();
|
|
private userMuted = new Map<string, boolean>();
|
|
private preferredOutputDeviceId = 'default';
|
|
private temporaryOutputDeviceId: string | null = null;
|
|
private masterVolume = 1;
|
|
private deafened = false;
|
|
private captureEchoSuppressed = false;
|
|
|
|
constructor() {
|
|
this.loadPersistedVolumes();
|
|
|
|
effect(() => {
|
|
this.captureEchoSuppressed = this.webrtc.isScreenShareRemotePlaybackSuppressed();
|
|
this.recalcAllGains();
|
|
});
|
|
|
|
effect(() => {
|
|
this.temporaryOutputDeviceId = this.webrtc.forceDefaultRemotePlaybackOutput()
|
|
? 'default'
|
|
: null;
|
|
void this.applyEffectiveOutputDeviceToAllPipelines();
|
|
});
|
|
}
|
|
|
|
handleRemoteStream(peerId: string, stream: MediaStream, options: PlaybackOptions): void {
|
|
if (!options.isConnected) {
|
|
this.pendingRemoteStreams.set(peerId, stream);
|
|
return;
|
|
}
|
|
|
|
if (!this.hasAudio(stream)) {
|
|
this.rawRemoteStreams.delete(peerId);
|
|
this.removePipeline(peerId);
|
|
return;
|
|
}
|
|
|
|
this.removePipeline(peerId);
|
|
this.rawRemoteStreams.set(peerId, stream);
|
|
this.masterVolume = options.outputVolume;
|
|
this.deafened = options.isDeafened;
|
|
this.createPipeline(peerId, stream);
|
|
}
|
|
|
|
removeRemoteAudio(peerId: string): void {
|
|
this.pendingRemoteStreams.delete(peerId);
|
|
this.rawRemoteStreams.delete(peerId);
|
|
this.removePipeline(peerId);
|
|
}
|
|
|
|
playPendingStreams(options: PlaybackOptions): void {
|
|
if (!options.isConnected)
|
|
return;
|
|
|
|
this.pendingRemoteStreams.forEach((stream, peerId) => this.handleRemoteStream(peerId, stream, options));
|
|
this.pendingRemoteStreams.clear();
|
|
}
|
|
|
|
ensureAllRemoteStreamsPlaying(options: PlaybackOptions): void {
|
|
if (!options.isConnected)
|
|
return;
|
|
|
|
const peers = this.webrtc.getConnectedPeers();
|
|
|
|
for (const peerId of peers) {
|
|
const stream = this.webrtc.getRemoteVoiceStream(peerId);
|
|
|
|
if (stream && this.hasAudio(stream)) {
|
|
const trackedRaw = this.rawRemoteStreams.get(peerId);
|
|
|
|
if (!trackedRaw || trackedRaw !== stream) {
|
|
this.handleRemoteStream(peerId, stream, options);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
updateOutputVolume(volume: number): void {
|
|
this.masterVolume = volume;
|
|
this.recalcAllGains();
|
|
}
|
|
|
|
updateDeafened(isDeafened: boolean): void {
|
|
this.deafened = isDeafened;
|
|
this.recalcAllGains();
|
|
}
|
|
|
|
getUserVolume(peerId: string): number {
|
|
return this.userVolumes.get(peerId) ?? 100;
|
|
}
|
|
|
|
setUserVolume(peerId: string, volume: number): void {
|
|
const clamped = Math.max(0, Math.min(200, volume));
|
|
|
|
this.userVolumes.set(peerId, clamped);
|
|
this.applyGain(peerId);
|
|
this.persistVolumes();
|
|
}
|
|
|
|
isUserMuted(peerId: string): boolean {
|
|
return this.userMuted.get(peerId) ?? false;
|
|
}
|
|
|
|
setUserMuted(peerId: string, muted: boolean): void {
|
|
this.userMuted.set(peerId, muted);
|
|
this.applyGain(peerId);
|
|
this.persistVolumes();
|
|
}
|
|
|
|
applyOutputDevice(deviceId: string): void {
|
|
this.preferredOutputDeviceId = deviceId || 'default';
|
|
void this.applyEffectiveOutputDeviceToAllPipelines();
|
|
}
|
|
|
|
teardownAll(): void {
|
|
this.peerPipelines.forEach((_pipeline, peerId) => this.removePipeline(peerId));
|
|
this.peerPipelines.clear();
|
|
this.rawRemoteStreams.clear();
|
|
this.pendingRemoteStreams.clear();
|
|
}
|
|
|
|
/**
|
|
* Build the Web Audio graph for a remote peer:
|
|
*
|
|
* remoteStream
|
|
* ↓
|
|
* muted <audio> element (Chrome workaround - primes the stream)
|
|
* ↓
|
|
* MediaStreamSource → GainNode → MediaStreamDestination → output <audio>
|
|
*/
|
|
private createPipeline(peerId: string, stream: MediaStream): void {
|
|
// Chromium/Electron needs a muted <audio> element before Web Audio can read the stream.
|
|
const audioEl = new Audio();
|
|
const outputEl = new Audio();
|
|
const audioTracks = stream.getAudioTracks().filter((track) => track.readyState === 'live');
|
|
|
|
audioEl.srcObject = stream;
|
|
audioEl.muted = true;
|
|
audioEl.play().catch(() => {});
|
|
|
|
const ctx = new AudioContext();
|
|
const gainNode = ctx.createGain();
|
|
const mediaDestination = ctx.createMediaStreamDestination();
|
|
const sourceNodes = audioTracks.map((track) => ctx.createMediaStreamSource(new MediaStream([track])));
|
|
|
|
sourceNodes.forEach((sourceNode) => sourceNode.connect(gainNode));
|
|
gainNode.connect(mediaDestination);
|
|
|
|
outputEl.srcObject = mediaDestination.stream;
|
|
outputEl.muted = false;
|
|
outputEl.volume = 1;
|
|
outputEl.play().catch(() => {});
|
|
|
|
const pipeline: PeerAudioPipeline = {
|
|
audioElement: audioEl,
|
|
outputElement: outputEl,
|
|
context: ctx,
|
|
sourceNodes,
|
|
gainNode
|
|
};
|
|
|
|
this.peerPipelines.set(peerId, pipeline);
|
|
|
|
this.applyGain(peerId);
|
|
void this.applyEffectiveOutputDeviceToPipeline(pipeline);
|
|
}
|
|
|
|
private async applyEffectiveOutputDeviceToAllPipelines(): Promise<void> {
|
|
await Promise.all(Array.from(this.peerPipelines.values(), (pipeline) =>
|
|
this.applyEffectiveOutputDeviceToPipeline(pipeline)
|
|
));
|
|
}
|
|
|
|
private async applyEffectiveOutputDeviceToPipeline(pipeline: PeerAudioPipeline): Promise<void> {
|
|
const deviceId = this.getEffectiveOutputDeviceId();
|
|
|
|
if (!deviceId) {
|
|
return;
|
|
}
|
|
|
|
// eslint-disable-next-line
|
|
const anyAudio = pipeline.outputElement as any;
|
|
const tasks: Promise<unknown>[] = [];
|
|
|
|
if (typeof anyAudio.setSinkId === 'function') {
|
|
tasks.push(anyAudio.setSinkId(deviceId).catch(() => undefined));
|
|
}
|
|
|
|
if (tasks.length > 0) {
|
|
await Promise.all(tasks);
|
|
}
|
|
}
|
|
|
|
private getEffectiveOutputDeviceId(): string {
|
|
return this.temporaryOutputDeviceId ?? this.preferredOutputDeviceId;
|
|
}
|
|
|
|
private removePipeline(peerId: string): void {
|
|
const pipeline = this.peerPipelines.get(peerId);
|
|
|
|
if (!pipeline)
|
|
return;
|
|
|
|
try {
|
|
pipeline.gainNode.disconnect();
|
|
} catch {
|
|
// nodes may already be disconnected
|
|
}
|
|
|
|
pipeline.sourceNodes.forEach((sourceNode) => {
|
|
try {
|
|
sourceNode.disconnect();
|
|
} catch {
|
|
// nodes may already be disconnected
|
|
}
|
|
});
|
|
|
|
pipeline.audioElement.srcObject = null;
|
|
pipeline.audioElement.remove();
|
|
pipeline.outputElement.srcObject = null;
|
|
pipeline.outputElement.remove();
|
|
|
|
if (pipeline.context.state !== 'closed') {
|
|
pipeline.context.close().catch(() => {});
|
|
}
|
|
|
|
this.peerPipelines.delete(peerId);
|
|
}
|
|
|
|
private applyGain(peerId: string): void {
|
|
const pipeline = this.peerPipelines.get(peerId);
|
|
|
|
if (!pipeline)
|
|
return;
|
|
|
|
if (this.deafened || this.captureEchoSuppressed || this.isUserMuted(peerId)) {
|
|
pipeline.gainNode.gain.value = 0;
|
|
return;
|
|
}
|
|
|
|
const userVol = this.getUserVolume(peerId) / 100; // 0.0-2.0
|
|
const effective = this.masterVolume * userVol;
|
|
|
|
pipeline.gainNode.gain.value = effective;
|
|
}
|
|
|
|
private recalcAllGains(): void {
|
|
this.peerPipelines.forEach((_pipeline, peerId) => this.applyGain(peerId));
|
|
}
|
|
|
|
private persistVolumes(): void {
|
|
try {
|
|
const data: Record<string, { volume: number; muted: boolean }> = {};
|
|
|
|
this.userVolumes.forEach((vol, id) => {
|
|
data[id] = { volume: vol, muted: this.userMuted.get(id) ?? false };
|
|
});
|
|
|
|
// Also persist any muted-only entries
|
|
this.userMuted.forEach((muted, id) => {
|
|
if (!data[id]) {
|
|
data[id] = { volume: 100, muted };
|
|
}
|
|
});
|
|
|
|
localStorage.setItem(STORAGE_KEY_USER_VOLUMES, JSON.stringify(data));
|
|
} catch {
|
|
// localStorage not available
|
|
}
|
|
}
|
|
|
|
private loadPersistedVolumes(): void {
|
|
try {
|
|
const raw = localStorage.getItem(STORAGE_KEY_USER_VOLUMES);
|
|
|
|
if (!raw)
|
|
return;
|
|
|
|
const data = JSON.parse(raw) as Record<string, { volume: number; muted: boolean }>;
|
|
|
|
Object.entries(data).forEach(([id, entry]) => {
|
|
if (typeof entry.volume === 'number') {
|
|
this.userVolumes.set(id, entry.volume);
|
|
}
|
|
|
|
if (entry.muted) {
|
|
this.userMuted.set(id, true);
|
|
}
|
|
});
|
|
} catch {
|
|
// corrupted data - ignore
|
|
}
|
|
}
|
|
|
|
private hasAudio(stream: MediaStream): boolean {
|
|
return stream.getAudioTracks().length > 0;
|
|
}
|
|
}
|