Files
Toju/src/app/core/services/webrtc/screen-share.manager.ts
2026-03-07 21:59:39 +01:00

359 lines
13 KiB
TypeScript

/* eslint-disable @typescript-eslint/no-explicit-any, @typescript-eslint/no-non-null-assertion, @typescript-eslint/member-ordering, id-length, id-denylist, max-statements-per-line, max-len */
/**
* Manages screen sharing: getDisplayMedia / Electron desktop capturer,
* mixed audio (screen + mic), and attaching screen tracks to peers.
*/
import { WebRTCLogger } from './webrtc-logger';
import { PeerData } from './webrtc.types';
import {
TRACK_KIND_AUDIO,
TRACK_KIND_VIDEO,
TRANSCEIVER_SEND_RECV,
TRANSCEIVER_RECV_ONLY,
SCREEN_SHARE_IDEAL_WIDTH,
SCREEN_SHARE_IDEAL_HEIGHT,
SCREEN_SHARE_IDEAL_FRAME_RATE,
ELECTRON_ENTIRE_SCREEN_SOURCE_NAME
} from './webrtc.constants';
/**
* Callbacks the ScreenShareManager needs from the owning service.
*/
export interface ScreenShareCallbacks {
getActivePeers(): Map<string, PeerData>;
getLocalMediaStream(): MediaStream | null;
renegotiate(peerId: string): Promise<void>;
broadcastCurrentStates(): void;
}
export class ScreenShareManager {
/** The active screen-capture stream. */
private activeScreenStream: MediaStream | null = null;
/** Mixed audio stream (screen audio + mic). */
private combinedAudioStream: MediaStream | null = null;
/** AudioContext used to mix screen + mic audio. */
private audioMixingContext: AudioContext | null = null;
/** Whether screen sharing is currently active. */
private isScreenActive = false;
constructor(
private readonly logger: WebRTCLogger,
private callbacks: ScreenShareCallbacks
) {}
/**
* Replace the callback set at runtime.
* Needed because of circular initialisation between managers.
*
* @param cb - The new callback interface to wire into this manager.
*/
setCallbacks(cb: ScreenShareCallbacks): void {
this.callbacks = cb;
}
/** Returns the current screen-capture stream, or `null` if inactive. */
getScreenStream(): MediaStream | null { return this.activeScreenStream; }
/** Whether screen sharing is currently active. */
getIsScreenActive(): boolean { return this.isScreenActive; }
/**
* Begin screen sharing.
*
* Tries the Electron desktop capturer API first (for Electron builds),
* then falls back to standard `getDisplayMedia`.
* Optionally includes system audio mixed with the microphone.
*
* @param includeSystemAudio - Whether to capture system / tab audio alongside the video.
* @returns The captured screen {@link MediaStream}.
* @throws If both Electron and browser screen capture fail.
*/
async startScreenShare(includeSystemAudio = false): Promise<MediaStream> {
try {
this.logger.info('startScreenShare invoked', { includeSystemAudio });
// Try Electron desktop capturer first
if (typeof window !== 'undefined' && (window as any).electronAPI?.getSources) {
try {
const sources = await (window as any).electronAPI.getSources();
const screenSource = sources.find((s: any) => s.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME) || sources[0];
const electronConstraints: any = {
video: { mandatory: { chromeMediaSource: 'desktop',
chromeMediaSourceId: screenSource.id } }
};
if (includeSystemAudio) {
electronConstraints.audio = { mandatory: { chromeMediaSource: 'desktop',
chromeMediaSourceId: screenSource.id } };
} else {
electronConstraints.audio = false;
}
this.logger.info('desktopCapturer constraints', electronConstraints);
if (!navigator.mediaDevices?.getUserMedia) {
throw new Error('navigator.mediaDevices is not available (requires HTTPS or localhost).');
}
this.activeScreenStream = await navigator.mediaDevices.getUserMedia(electronConstraints);
} catch (e) {
this.logger.warn('Electron desktop capture failed; falling back to getDisplayMedia', e as any);
}
}
// Fallback to standard getDisplayMedia
if (!this.activeScreenStream) {
const displayConstraints: DisplayMediaStreamOptions = {
video: {
width: { ideal: SCREEN_SHARE_IDEAL_WIDTH },
height: { ideal: SCREEN_SHARE_IDEAL_HEIGHT },
frameRate: { ideal: SCREEN_SHARE_IDEAL_FRAME_RATE }
},
audio: includeSystemAudio ? { echoCancellation: false,
noiseSuppression: false,
autoGainControl: false } : false
} as any;
this.logger.info('getDisplayMedia constraints', displayConstraints);
if (!navigator.mediaDevices) {
throw new Error('navigator.mediaDevices is not available (requires HTTPS or localhost).');
}
this.activeScreenStream = await (navigator.mediaDevices as any).getDisplayMedia(displayConstraints);
}
this.logger.logStream('screen', this.activeScreenStream);
// Prepare mixed audio if system audio is included
this.prepareMixedAudio(includeSystemAudio);
// Attach tracks to peers
this.attachScreenTracksToPeers(includeSystemAudio);
this.isScreenActive = true;
// Auto-stop when user ends share via browser UI
const screenVideoTrack = this.activeScreenStream!.getVideoTracks()[0];
if (screenVideoTrack) {
screenVideoTrack.onended = () => {
this.logger.warn('Screen video track ended');
this.stopScreenShare();
};
}
return this.activeScreenStream!;
} catch (error) {
this.logger.error('Failed to start screen share', error);
throw error;
}
}
/**
* Stop screen sharing and restore the microphone audio track on all peers.
*
* Stops all screen-capture tracks, tears down mixed audio,
* resets video transceivers to receive-only, and triggers renegotiation.
*/
stopScreenShare(): void {
if (this.activeScreenStream) {
this.activeScreenStream.getTracks().forEach((track) => track.stop());
this.activeScreenStream = null;
this.isScreenActive = false;
this.callbacks.broadcastCurrentStates();
}
// Clean up mixed audio
if (this.combinedAudioStream) {
try {
this.combinedAudioStream.getTracks().forEach((track) => track.stop());
} catch (error) {
this.logger.warn('Failed to stop combined screen-share audio tracks', error as any);
}
this.combinedAudioStream = null;
}
// Remove video track and restore mic on all peers
this.callbacks.getActivePeers().forEach((peerData, peerId) => {
const transceivers = peerData.connection.getTransceivers();
const videoTransceiver = transceivers.find(transceiver => transceiver.sender === peerData.videoSender || transceiver.sender === peerData.screenVideoSender);
if (videoTransceiver) {
videoTransceiver.sender.replaceTrack(null).catch((error) => {
this.logger.error('Failed to clear screen video sender track', error, { peerId });
});
if (videoTransceiver.direction === TRANSCEIVER_SEND_RECV) {
videoTransceiver.direction = TRANSCEIVER_RECV_ONLY;
}
}
peerData.screenVideoSender = undefined;
peerData.screenAudioSender = undefined;
// Restore mic track
const micTrack = this.callbacks.getLocalMediaStream()?.getAudioTracks()[0] || null;
if (micTrack) {
let audioSender = peerData.audioSender || peerData.connection.getSenders().find(s => s.track?.kind === TRACK_KIND_AUDIO);
if (!audioSender) {
const transceiver = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, { direction: TRANSCEIVER_SEND_RECV });
audioSender = transceiver.sender;
}
peerData.audioSender = audioSender;
audioSender.replaceTrack(micTrack).catch((error) => this.logger.error('Restore mic replaceTrack failed', error));
}
this.callbacks.renegotiate(peerId);
});
}
/**
* Create a mixed audio stream from screen audio + microphone audio
* using the Web Audio API ({@link AudioContext}).
*
* Falls back to screen-audio-only if mixing fails.
*
* @param includeSystemAudio - Whether system audio should be mixed in.
*/
private prepareMixedAudio(includeSystemAudio: boolean): void {
const screenAudioTrack = includeSystemAudio ? (this.activeScreenStream?.getAudioTracks()[0] || null) : null;
const micAudioTrack = this.callbacks.getLocalMediaStream()?.getAudioTracks()[0] || null;
if (includeSystemAudio && screenAudioTrack) {
try {
if (!this.audioMixingContext && (window as any).AudioContext) {
this.audioMixingContext = new (window as any).AudioContext();
}
if (!this.audioMixingContext)
throw new Error('AudioContext not available');
const destination = this.audioMixingContext.createMediaStreamDestination();
const screenAudioSource = this.audioMixingContext.createMediaStreamSource(new MediaStream([screenAudioTrack]));
screenAudioSource.connect(destination);
if (micAudioTrack) {
const micAudioSource = this.audioMixingContext.createMediaStreamSource(new MediaStream([micAudioTrack]));
micAudioSource.connect(destination);
this.logger.info('Mixed mic + screen audio together');
}
this.combinedAudioStream = destination.stream;
this.logger.logStream('combinedAudio(screen+mic)', this.combinedAudioStream);
} catch (e) {
this.logger.warn('Mixed audio creation failed; fallback to screen audio only', e as any);
this.combinedAudioStream = screenAudioTrack ? new MediaStream([screenAudioTrack]) : null;
this.logger.logStream('combinedAudio(fallback)', this.combinedAudioStream);
}
} else {
this.combinedAudioStream = null;
}
}
/**
* Attach screen video and (optionally) combined audio tracks to all
* active peer connections, then trigger SDP renegotiation.
*
* @param includeSystemAudio - Whether the combined audio track should replace the mic sender.
*/
private attachScreenTracksToPeers(includeSystemAudio: boolean): void {
this.callbacks.getActivePeers().forEach((peerData, peerId) => {
if (!this.activeScreenStream)
return;
const screenVideoTrack = this.activeScreenStream.getVideoTracks()[0];
if (!screenVideoTrack)
return;
this.logger.attachTrackDiagnostics(screenVideoTrack, `screenVideo:${peerId}`);
// Use primary video sender/transceiver
let videoSender = peerData.videoSender || peerData.connection.getSenders().find(s => s.track?.kind === TRACK_KIND_VIDEO);
if (!videoSender) {
const videoTransceiver = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, { direction: TRANSCEIVER_SEND_RECV });
videoSender = videoTransceiver.sender;
peerData.videoSender = videoSender;
} else {
const transceivers = peerData.connection.getTransceivers();
const videoTransceiver = transceivers.find(t => t.sender === videoSender);
if (videoTransceiver?.direction === TRANSCEIVER_RECV_ONLY) {
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
}
}
peerData.screenVideoSender = videoSender;
videoSender.replaceTrack(screenVideoTrack)
.then(() => this.logger.info('screen video replaceTrack ok', { peerId }))
.catch((e) => this.logger.error('screen video replaceTrack failed', e));
// Audio handling
const micTrack = this.callbacks.getLocalMediaStream()?.getAudioTracks()[0] || null;
if (includeSystemAudio) {
const combinedTrack = this.combinedAudioStream?.getAudioTracks()[0] || null;
if (combinedTrack) {
this.logger.attachTrackDiagnostics(combinedTrack, `combinedAudio:${peerId}`);
let audioSender = peerData.audioSender || peerData.connection.getSenders().find(s => s.track?.kind === TRACK_KIND_AUDIO);
if (!audioSender) {
const transceiver = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, { direction: TRANSCEIVER_SEND_RECV });
audioSender = transceiver.sender;
}
peerData.audioSender = audioSender;
audioSender.replaceTrack(combinedTrack)
.then(() => this.logger.info('screen audio(combined) replaceTrack ok', { peerId }))
.catch((e) => this.logger.error('audio replaceTrack (combined) failed', e));
}
} else if (micTrack) {
this.logger.attachTrackDiagnostics(micTrack, `micAudio:${peerId}`);
let audioSender = peerData.audioSender || peerData.connection.getSenders().find(s => s.track?.kind === TRACK_KIND_AUDIO);
if (!audioSender) {
const transceiver = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, { direction: TRANSCEIVER_SEND_RECV });
audioSender = transceiver.sender;
}
peerData.audioSender = audioSender;
audioSender.replaceTrack(micTrack)
.then(() => this.logger.info('screen audio(mic) replaceTrack ok', { peerId }))
.catch((e) => this.logger.error('audio replaceTrack (mic) failed', e));
}
this.callbacks.renegotiate(peerId);
});
}
/** Clean up all resources. */
destroy(): void {
this.stopScreenShare();
if (this.audioMixingContext) {
try {
this.audioMixingContext.close();
} catch (error) {
this.logger.warn('Failed to close audio mixing context during destroy', error as any);
}
this.audioMixingContext = null;
}
}
}