Files
Toju/src/app/core/services/webrtc/screen-share.manager.ts
2026-03-09 23:02:52 +01:00

1065 lines
36 KiB
TypeScript

/* eslint-disable, @typescript-eslint/no-non-null-assertion, @typescript-eslint/member-ordering, id-denylist */
/**
* Manages screen sharing: getDisplayMedia / Electron desktop capturer,
* system-audio capture, and attaching screen tracks to peers.
*/
import { WebRTCLogger } from './webrtc-logger';
import { PeerData } from './webrtc.types';
import {
TRACK_KIND_AUDIO,
TRACK_KIND_VIDEO,
TRANSCEIVER_SEND_RECV,
TRANSCEIVER_RECV_ONLY,
ELECTRON_ENTIRE_SCREEN_SOURCE_NAME
} from './webrtc.constants';
import {
DEFAULT_SCREEN_SHARE_START_OPTIONS,
SCREEN_SHARE_QUALITY_PRESETS,
ScreenShareQualityPreset,
ScreenShareStartOptions
} from './screen-share.config';
/**
* Callbacks the ScreenShareManager needs from the owning service.
*/
export interface ScreenShareCallbacks {
getActivePeers(): Map<string, PeerData>;
getLocalMediaStream(): MediaStream | null;
renegotiate(peerId: string): Promise<void>;
broadcastCurrentStates(): void;
}
interface LinuxScreenShareAudioRoutingInfo {
available: boolean;
active: boolean;
monitorCaptureSupported: boolean;
screenShareSinkName: string;
screenShareMonitorSourceName: string;
voiceSinkName: string;
reason?: string;
}
interface LinuxScreenShareMonitorCaptureInfo {
bitsPerSample: number;
captureId: string;
channelCount: number;
sampleRate: number;
sourceName: string;
}
interface LinuxScreenShareMonitorAudioChunkPayload {
captureId: string;
chunk: Uint8Array;
}
interface LinuxScreenShareMonitorAudioEndedPayload {
captureId: string;
reason?: string;
}
interface LinuxScreenShareMonitorAudioPipeline {
audioContext: AudioContext;
audioTrack: MediaStreamTrack;
bitsPerSample: number;
captureId: string;
channelCount: number;
mediaDestination: MediaStreamAudioDestinationNode;
nextStartTime: number;
pendingBytes: Uint8Array;
sampleRate: number;
unsubscribeChunk: () => void;
unsubscribeEnded: () => void;
}
interface DesktopSource {
id: string;
name: string;
thumbnail: string;
}
interface ScreenShareElectronApi {
getSources?: () => Promise<DesktopSource[]>;
prepareLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
activateLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
deactivateLinuxScreenShareAudioRouting?: () => Promise<boolean>;
startLinuxScreenShareMonitorCapture?: () => Promise<LinuxScreenShareMonitorCaptureInfo>;
stopLinuxScreenShareMonitorCapture?: (captureId?: string) => Promise<boolean>;
onLinuxScreenShareMonitorAudioChunk?: (listener: (payload: LinuxScreenShareMonitorAudioChunkPayload) => void) => () => void;
onLinuxScreenShareMonitorAudioEnded?: (listener: (payload: LinuxScreenShareMonitorAudioEndedPayload) => void) => () => void;
}
type ElectronDesktopVideoConstraint = MediaTrackConstraints & {
mandatory: {
chromeMediaSource: 'desktop';
chromeMediaSourceId: string;
maxWidth: number;
maxHeight: number;
maxFrameRate: number;
};
};
type ElectronDesktopAudioConstraint = MediaTrackConstraints & {
mandatory: {
chromeMediaSource: 'desktop';
chromeMediaSourceId: string;
};
};
interface ElectronDesktopMediaStreamConstraints extends MediaStreamConstraints {
video: ElectronDesktopVideoConstraint;
audio?: false | ElectronDesktopAudioConstraint;
}
type ScreenShareWindow = Window & {
electronAPI?: ScreenShareElectronApi;
};
export class ScreenShareManager {
/** The active screen-capture stream. */
private activeScreenStream: MediaStream | null = null;
/** Optional system-audio stream captured alongside the screen. */
private screenAudioStream: MediaStream | null = null;
/** The quality preset currently applied to the active share. */
private activeScreenPreset: ScreenShareQualityPreset | null = null;
/** Remote peers that explicitly requested screen-share video. */
private readonly requestedViewerPeerIds = new Set<string>();
/** Whether screen sharing is currently active. */
private isScreenActive = false;
/** Whether Linux-specific Electron audio routing is currently active. */
private linuxElectronAudioRoutingActive = false;
/** Pending teardown of Linux-specific Electron audio routing. */
private linuxAudioRoutingResetPromise: Promise<void> | null = null;
/** Renderer-side audio pipeline for Linux monitor-source capture. */
private linuxMonitorAudioPipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
constructor(
private readonly logger: WebRTCLogger,
private callbacks: ScreenShareCallbacks
) {}
/**
* Replace the callback set at runtime.
* Needed because of circular initialisation between managers.
*
* @param nextCallbacks - The new callback interface to wire into this manager.
*/
setCallbacks(nextCallbacks: ScreenShareCallbacks): void {
this.callbacks = nextCallbacks;
}
/** Returns the current screen-capture stream, or `null` if inactive. */
getScreenStream(): MediaStream | null { return this.activeScreenStream; }
/** Whether screen sharing is currently active. */
getIsScreenActive(): boolean { return this.isScreenActive; }
/**
* Begin screen sharing.
*
* On Linux Electron builds, prefers a dedicated PulseAudio/PipeWire routing
* path so remote voice playback is kept out of captured system audio.
* Otherwise prefers `getDisplayMedia` when system audio is requested so the
* browser can filter MeToYou's own playback via `restrictOwnAudio`, then
* falls back to Electron desktop capture when needed.
*
* @param options - Screen-share capture options.
* @returns The captured screen {@link MediaStream}.
* @throws If both Electron and browser screen capture fail.
*/
async startScreenShare(options: ScreenShareStartOptions = DEFAULT_SCREEN_SHARE_START_OPTIONS): Promise<MediaStream> {
const shareOptions = {
...DEFAULT_SCREEN_SHARE_START_OPTIONS,
...options
};
const preset = SCREEN_SHARE_QUALITY_PRESETS[shareOptions.quality];
try {
this.logger.info('startScreenShare invoked', shareOptions);
if (this.activeScreenStream) {
this.stopScreenShare();
}
await this.awaitPendingLinuxAudioRoutingReset();
this.activeScreenStream = null;
if (shareOptions.includeSystemAudio && this.isLinuxElectronAudioRoutingSupported()) {
try {
this.activeScreenStream = await this.startWithLinuxElectronAudioRouting(shareOptions, preset);
} catch (error) {
this.logger.warn('Linux Electron audio routing failed; falling back to standard capture', error);
}
}
if (!this.activeScreenStream && shareOptions.includeSystemAudio) {
try {
this.activeScreenStream = await this.startWithDisplayMedia(shareOptions, preset);
if (this.activeScreenStream.getAudioTracks().length === 0) {
this.logger.warn('getDisplayMedia did not provide system audio; trying Electron desktop capture');
this.activeScreenStream.getTracks().forEach((track) => track.stop());
this.activeScreenStream = null;
}
} catch (error) {
this.logger.warn('getDisplayMedia with system audio failed; falling back to Electron desktop capture', error);
}
}
if (!this.activeScreenStream && this.getElectronApi()?.getSources) {
try {
this.activeScreenStream = await this.startWithElectronDesktopCapturer(shareOptions, preset);
} catch (error) {
this.logger.warn('Electron desktop capture failed; falling back to getDisplayMedia', error);
}
}
if (!this.activeScreenStream) {
this.activeScreenStream = await this.startWithDisplayMedia(shareOptions, preset);
}
this.configureScreenStream(preset);
this.prepareScreenAudio(shareOptions.includeSystemAudio);
this.activeScreenPreset = preset;
this.attachScreenTracksToPeers(preset);
this.isScreenActive = true;
this.callbacks.broadcastCurrentStates();
const activeScreenStream = this.activeScreenStream;
if (!activeScreenStream) {
throw new Error('Screen sharing did not produce an active stream.');
}
const screenVideoTrack = activeScreenStream.getVideoTracks()[0];
if (screenVideoTrack) {
screenVideoTrack.onended = () => {
this.logger.warn('Screen video track ended');
this.stopScreenShare();
};
}
return activeScreenStream;
} catch (error) {
this.logger.error('Failed to start screen share', error);
throw error;
}
}
/**
* Stop screen sharing and remove screen-share tracks on all peers.
*
* Stops all screen-capture tracks, resets screen transceivers to receive-only,
* and triggers renegotiation.
*/
stopScreenShare(): void {
if (this.activeScreenStream) {
this.activeScreenStream.getTracks().forEach((track) => track.stop());
this.activeScreenStream = null;
}
this.scheduleLinuxAudioRoutingReset();
this.screenAudioStream = null;
this.activeScreenPreset = null;
this.isScreenActive = false;
this.callbacks.broadcastCurrentStates();
this.callbacks.getActivePeers().forEach((peerData, peerId) => {
this.detachScreenTracksFromPeer(peerData, peerId);
});
}
requestScreenShareForPeer(peerId: string): void {
this.requestedViewerPeerIds.add(peerId);
if (!this.isScreenActive || !this.activeScreenPreset) {
return;
}
const peerData = this.callbacks.getActivePeers().get(peerId);
if (!peerData) {
return;
}
this.attachScreenTracksToPeer(peerData, peerId, this.activeScreenPreset);
}
stopScreenShareForPeer(peerId: string): void {
this.requestedViewerPeerIds.delete(peerId);
const peerData = this.callbacks.getActivePeers().get(peerId);
if (!peerData) {
return;
}
this.detachScreenTracksFromPeer(peerData, peerId);
}
clearScreenShareRequest(peerId: string): void {
this.requestedViewerPeerIds.delete(peerId);
}
/**
* Attach the current screen-share tracks to a newly-connected peer.
*
* This is needed when a peer connects after screen sharing already started,
* because `startScreenShare()` only pushes tracks to peers that existed at
* the time sharing began.
*/
syncScreenShareToPeer(peerId: string): void {
if (
!this.requestedViewerPeerIds.has(peerId)
|| !this.isScreenActive
|| !this.activeScreenStream
|| !this.activeScreenPreset
) {
return;
}
const peerData = this.callbacks.getActivePeers().get(peerId);
if (!peerData) {
return;
}
this.attachScreenTracksToPeer(peerData, peerId, this.activeScreenPreset);
}
/** Clean up all resources. */
destroy(): void {
this.stopScreenShare();
}
private getElectronApi(): ScreenShareElectronApi | null {
return typeof window !== 'undefined'
? (window as ScreenShareWindow).electronAPI ?? null
: null;
}
private getRequiredLinuxElectronApi(): Required<Pick<
ScreenShareElectronApi,
| 'prepareLinuxScreenShareAudioRouting'
| 'activateLinuxScreenShareAudioRouting'
| 'deactivateLinuxScreenShareAudioRouting'
| 'startLinuxScreenShareMonitorCapture'
| 'stopLinuxScreenShareMonitorCapture'
| 'onLinuxScreenShareMonitorAudioChunk'
| 'onLinuxScreenShareMonitorAudioEnded'
>> {
const electronApi = this.getElectronApi();
if (!electronApi?.prepareLinuxScreenShareAudioRouting
|| !electronApi.activateLinuxScreenShareAudioRouting
|| !electronApi.deactivateLinuxScreenShareAudioRouting
|| !electronApi.startLinuxScreenShareMonitorCapture
|| !electronApi.stopLinuxScreenShareMonitorCapture
|| !electronApi.onLinuxScreenShareMonitorAudioChunk
|| !electronApi.onLinuxScreenShareMonitorAudioEnded) {
throw new Error('Linux Electron audio routing is unavailable.');
}
return {
prepareLinuxScreenShareAudioRouting: electronApi.prepareLinuxScreenShareAudioRouting,
activateLinuxScreenShareAudioRouting: electronApi.activateLinuxScreenShareAudioRouting,
deactivateLinuxScreenShareAudioRouting: electronApi.deactivateLinuxScreenShareAudioRouting,
startLinuxScreenShareMonitorCapture: electronApi.startLinuxScreenShareMonitorCapture,
stopLinuxScreenShareMonitorCapture: electronApi.stopLinuxScreenShareMonitorCapture,
onLinuxScreenShareMonitorAudioChunk: electronApi.onLinuxScreenShareMonitorAudioChunk,
onLinuxScreenShareMonitorAudioEnded: electronApi.onLinuxScreenShareMonitorAudioEnded
};
}
private assertLinuxAudioRoutingReady(
routingInfo: LinuxScreenShareAudioRoutingInfo,
unavailableReason: string
): void {
if (!routingInfo.available) {
throw new Error(routingInfo.reason || unavailableReason);
}
if (!routingInfo.monitorCaptureSupported) {
throw new Error('Linux screen-share monitor capture requires restarting the desktop app so the new Electron main process can load.');
}
}
/**
* Create a dedicated stream for system audio captured alongside the screen.
*
* @param includeSystemAudio - Whether system audio should be sent.
*/
private prepareScreenAudio(includeSystemAudio: boolean): void {
const screenAudioTrack = includeSystemAudio ? (this.activeScreenStream?.getAudioTracks()[0] || null) : null;
if (!screenAudioTrack) {
if (includeSystemAudio) {
this.logger.warn('System audio was requested, but no screen audio track was captured');
}
this.screenAudioStream = null;
return;
}
this.screenAudioStream = new MediaStream([screenAudioTrack]);
this.logger.attachTrackDiagnostics(screenAudioTrack, 'screenAudio');
this.logger.logStream('screenAudio', this.screenAudioStream);
}
/**
* Attach screen video and optional system-audio tracks to all
* active peer connections, then trigger SDP renegotiation.
*
* @param options - Screen-share capture options.
* @param preset - Selected quality preset for sender tuning.
*/
private attachScreenTracksToPeers(
preset: ScreenShareQualityPreset
): void {
this.callbacks.getActivePeers().forEach((peerData, peerId) => {
if (!this.requestedViewerPeerIds.has(peerId)) {
return;
}
this.attachScreenTracksToPeer(peerData, peerId, preset);
});
}
private attachScreenTracksToPeer(
peerData: PeerData,
peerId: string,
preset: ScreenShareQualityPreset
): void {
if (!this.activeScreenStream) {
return;
}
const screenVideoTrack = this.activeScreenStream.getVideoTracks()[0];
if (!screenVideoTrack) {
return;
}
this.logger.attachTrackDiagnostics(screenVideoTrack, `screenVideo:${peerId}`);
let videoSender = peerData.videoSender || peerData.connection.getSenders().find((sender) => sender.track?.kind === TRACK_KIND_VIDEO);
if (!videoSender) {
const videoTransceiver = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, {
direction: TRANSCEIVER_SEND_RECV
});
videoSender = videoTransceiver.sender;
peerData.videoSender = videoSender;
} else {
const videoTransceiver = peerData.connection.getTransceivers().find(
(transceiver) => transceiver.sender === videoSender
);
if (videoTransceiver?.direction === TRANSCEIVER_RECV_ONLY) {
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
}
}
peerData.screenVideoSender = videoSender;
videoSender.replaceTrack(screenVideoTrack)
.then(() => {
this.logger.info('screen video replaceTrack ok', { peerId });
void this.applyScreenShareVideoParameters(videoSender, preset, peerId);
})
.catch((error) => this.logger.error('screen video replaceTrack failed', error));
const screenAudioTrack = this.screenAudioStream?.getAudioTracks()[0] || null;
if (screenAudioTrack) {
this.logger.attachTrackDiagnostics(screenAudioTrack, `screenAudio:${peerId}`);
let screenAudioSender = peerData.screenAudioSender;
if (!screenAudioSender) {
const screenAudioTransceiver = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, {
direction: TRANSCEIVER_SEND_RECV
});
screenAudioSender = screenAudioTransceiver.sender;
} else {
const screenAudioTransceiver = peerData.connection.getTransceivers().find(
(transceiver) => transceiver.sender === screenAudioSender
);
if (screenAudioTransceiver?.direction === TRANSCEIVER_RECV_ONLY) {
screenAudioTransceiver.direction = TRANSCEIVER_SEND_RECV;
}
}
peerData.screenAudioSender = screenAudioSender;
screenAudioSender.replaceTrack(screenAudioTrack)
.then(() => this.logger.info('screen audio replaceTrack ok', { peerId }))
.catch((error) => this.logger.error('screen audio replaceTrack failed', error));
}
this.callbacks.renegotiate(peerId);
}
private detachScreenTracksFromPeer(peerData: PeerData, peerId: string): void {
const transceivers = peerData.connection.getTransceivers();
const videoTransceiver = transceivers.find(
(transceiver) => transceiver.sender === peerData.videoSender || transceiver.sender === peerData.screenVideoSender
);
const screenAudioTransceiver = transceivers.find(
(transceiver) => transceiver.sender === peerData.screenAudioSender
);
if (videoTransceiver) {
videoTransceiver.sender.replaceTrack(null).catch((error) => {
this.logger.error('Failed to clear screen video sender track', error, { peerId });
});
if (videoTransceiver.direction === TRANSCEIVER_SEND_RECV) {
videoTransceiver.direction = TRANSCEIVER_RECV_ONLY;
}
}
if (screenAudioTransceiver) {
screenAudioTransceiver.sender.replaceTrack(null).catch((error) => {
this.logger.error('Failed to clear screen audio sender track', error, { peerId });
});
if (screenAudioTransceiver.direction === TRANSCEIVER_SEND_RECV) {
screenAudioTransceiver.direction = TRANSCEIVER_RECV_ONLY;
}
}
peerData.screenVideoSender = undefined;
peerData.screenAudioSender = undefined;
this.callbacks.renegotiate(peerId);
}
private async startWithDisplayMedia(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<MediaStream> {
const displayConstraints = this.buildDisplayMediaConstraints(options, preset);
this.logger.info('getDisplayMedia constraints', displayConstraints);
if (!navigator.mediaDevices?.getDisplayMedia) {
throw new Error('navigator.mediaDevices.getDisplayMedia is not available.');
}
return await navigator.mediaDevices.getDisplayMedia(displayConstraints);
}
private async startWithElectronDesktopCapturer(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<MediaStream> {
const electronApi = this.getElectronApi();
if (!electronApi?.getSources) {
throw new Error('Electron desktop capture is unavailable.');
}
const sources = await electronApi.getSources();
const screenSource = sources.find((source) => source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME) ?? sources[0];
if (!screenSource) {
throw new Error('No desktop capture sources were available.');
}
const electronConstraints = this.buildElectronDesktopConstraints(screenSource.id, options, preset);
this.logger.info('desktopCapturer constraints', electronConstraints);
if (!navigator.mediaDevices?.getUserMedia) {
throw new Error('navigator.mediaDevices.getUserMedia is not available (requires HTTPS or localhost).');
}
return await navigator.mediaDevices.getUserMedia(electronConstraints);
}
private isLinuxElectronAudioRoutingSupported(): boolean {
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
return false;
}
const electronApi = this.getElectronApi();
const platformHint = `${navigator.userAgent} ${navigator.platform}`;
return !!electronApi?.prepareLinuxScreenShareAudioRouting
&& !!electronApi?.activateLinuxScreenShareAudioRouting
&& !!electronApi?.deactivateLinuxScreenShareAudioRouting
&& !!electronApi?.startLinuxScreenShareMonitorCapture
&& !!electronApi?.stopLinuxScreenShareMonitorCapture
&& !!electronApi?.onLinuxScreenShareMonitorAudioChunk
&& !!electronApi?.onLinuxScreenShareMonitorAudioEnded
&& /linux/i.test(platformHint);
}
private async startWithLinuxElectronAudioRouting(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<MediaStream> {
const electronApi = this.getRequiredLinuxElectronApi();
const routingInfo = await electronApi.prepareLinuxScreenShareAudioRouting();
this.assertLinuxAudioRoutingReady(routingInfo, 'Linux Electron audio routing is unavailable.');
let desktopStream: MediaStream | null = null;
try {
const activation = await electronApi.activateLinuxScreenShareAudioRouting();
this.assertLinuxAudioRoutingReady(activation, 'Failed to activate Linux Electron audio routing.');
if (!activation.active) {
throw new Error(activation.reason || 'Failed to activate Linux Electron audio routing.');
}
desktopStream = await this.startWithElectronDesktopCapturer({
...options,
includeSystemAudio: false
}, preset);
const { audioTrack, captureInfo } = await this.startLinuxScreenShareMonitorTrack();
const stream = new MediaStream([...desktopStream.getVideoTracks(), audioTrack]);
desktopStream.getAudioTracks().forEach((track) => track.stop());
this.linuxElectronAudioRoutingActive = true;
this.logger.info('Linux Electron screen-share audio routing enabled', {
screenShareMonitorSourceName: captureInfo.sourceName,
voiceSinkName: activation.voiceSinkName
});
return stream;
} catch (error) {
desktopStream?.getTracks().forEach((track) => track.stop());
await this.resetLinuxElectronAudioRouting();
throw error;
}
}
private scheduleLinuxAudioRoutingReset(): void {
if (!this.linuxElectronAudioRoutingActive || this.linuxAudioRoutingResetPromise) {
return;
}
this.linuxAudioRoutingResetPromise = this.resetLinuxElectronAudioRouting()
.catch((error) => {
this.logger.warn('Failed to reset Linux Electron audio routing', error);
})
.finally(() => {
this.linuxAudioRoutingResetPromise = null;
});
}
private async awaitPendingLinuxAudioRoutingReset(): Promise<void> {
if (!this.linuxAudioRoutingResetPromise) {
return;
}
await this.linuxAudioRoutingResetPromise;
}
private async resetLinuxElectronAudioRouting(): Promise<void> {
const electronApi = this.getElectronApi();
const captureId = this.linuxMonitorAudioPipeline?.captureId;
this.linuxElectronAudioRoutingActive = false;
this.disposeLinuxScreenShareMonitorAudioPipeline();
try {
if (captureId && electronApi?.stopLinuxScreenShareMonitorCapture) {
await electronApi.stopLinuxScreenShareMonitorCapture(captureId);
}
} catch (error) {
this.logger.warn('Failed to stop Linux screen-share monitor capture', error);
}
try {
if (electronApi?.deactivateLinuxScreenShareAudioRouting) {
await electronApi.deactivateLinuxScreenShareAudioRouting();
}
} catch (error) {
this.logger.warn('Failed to deactivate Linux Electron audio routing', error);
}
}
private async startLinuxScreenShareMonitorTrack(): Promise<{
audioTrack: MediaStreamTrack;
captureInfo: LinuxScreenShareMonitorCaptureInfo;
}> {
const electronApi = this.getElectronApi();
if (!electronApi?.startLinuxScreenShareMonitorCapture
|| !electronApi?.stopLinuxScreenShareMonitorCapture
|| !electronApi?.onLinuxScreenShareMonitorAudioChunk
|| !electronApi?.onLinuxScreenShareMonitorAudioEnded) {
throw new Error('Linux screen-share monitor capture is unavailable.');
}
const queuedChunksByCaptureId = new Map<string, Uint8Array[]>();
const queuedEndedReasons = new Map<string, string | undefined>();
let pipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
let captureInfo: LinuxScreenShareMonitorCaptureInfo | null = null;
const queueChunk = (captureId: string, chunk: Uint8Array): void => {
const queuedChunks = queuedChunksByCaptureId.get(captureId) || [];
queuedChunks.push(this.copyLinuxMonitorAudioBytes(chunk));
queuedChunksByCaptureId.set(captureId, queuedChunks);
};
const onChunk = (payload: LinuxScreenShareMonitorAudioChunkPayload): void => {
if (!pipeline || payload.captureId !== pipeline.captureId) {
queueChunk(payload.captureId, payload.chunk);
return;
}
this.handleLinuxScreenShareMonitorAudioChunk(pipeline, payload.chunk);
};
const onEnded = (payload: LinuxScreenShareMonitorAudioEndedPayload): void => {
if (!pipeline || payload.captureId !== pipeline.captureId) {
queuedEndedReasons.set(payload.captureId, payload.reason);
return;
}
this.logger.warn('Linux screen-share monitor capture ended', payload);
if (this.isScreenActive && this.linuxMonitorAudioPipeline?.captureId === payload.captureId) {
this.stopScreenShare();
}
};
const unsubscribeChunk = electronApi.onLinuxScreenShareMonitorAudioChunk(onChunk) as () => void;
const unsubscribeEnded = electronApi.onLinuxScreenShareMonitorAudioEnded(onEnded) as () => void;
try {
captureInfo = await electronApi.startLinuxScreenShareMonitorCapture() as LinuxScreenShareMonitorCaptureInfo;
const audioContext = new AudioContext({ sampleRate: captureInfo.sampleRate });
const mediaDestination = audioContext.createMediaStreamDestination();
await audioContext.resume();
const audioTrack = mediaDestination.stream.getAudioTracks()[0];
if (!audioTrack) {
throw new Error('Renderer audio pipeline did not produce a screen-share monitor track.');
}
pipeline = {
audioContext,
audioTrack,
bitsPerSample: captureInfo.bitsPerSample,
captureId: captureInfo.captureId,
channelCount: captureInfo.channelCount,
mediaDestination,
nextStartTime: audioContext.currentTime + 0.05,
pendingBytes: new Uint8Array(0),
sampleRate: captureInfo.sampleRate,
unsubscribeChunk,
unsubscribeEnded
};
this.linuxMonitorAudioPipeline = pipeline;
const activeCaptureId = captureInfo.captureId;
audioTrack.addEventListener('ended', () => {
if (this.isScreenActive && this.linuxMonitorAudioPipeline?.captureId === activeCaptureId) {
this.stopScreenShare();
}
}, { once: true });
const queuedChunks = queuedChunksByCaptureId.get(captureInfo.captureId) || [];
const activePipeline = pipeline;
queuedChunks.forEach((chunk) => {
this.handleLinuxScreenShareMonitorAudioChunk(activePipeline, chunk);
});
queuedChunksByCaptureId.delete(captureInfo.captureId);
if (queuedEndedReasons.has(captureInfo.captureId)) {
throw new Error(queuedEndedReasons.get(captureInfo.captureId)
|| 'Linux screen-share monitor capture ended before audio initialisation completed.');
}
return {
audioTrack,
captureInfo
};
} catch (error) {
if (pipeline) {
this.disposeLinuxScreenShareMonitorAudioPipeline(pipeline.captureId);
} else {
unsubscribeChunk();
unsubscribeEnded();
}
try {
await electronApi.stopLinuxScreenShareMonitorCapture(captureInfo?.captureId);
} catch (stopError) {
this.logger.warn('Failed to stop Linux screen-share monitor capture after startup failure', stopError);
}
throw error;
}
}
private disposeLinuxScreenShareMonitorAudioPipeline(captureId?: string): void {
if (!this.linuxMonitorAudioPipeline) {
return;
}
if (captureId && captureId !== this.linuxMonitorAudioPipeline.captureId) {
return;
}
const pipeline = this.linuxMonitorAudioPipeline;
this.linuxMonitorAudioPipeline = null;
pipeline.unsubscribeChunk();
pipeline.unsubscribeEnded();
pipeline.audioTrack.stop();
pipeline.pendingBytes = new Uint8Array(0);
void pipeline.audioContext.close().catch((error) => {
this.logger.warn('Failed to close Linux screen-share monitor audio context', error);
});
}
private handleLinuxScreenShareMonitorAudioChunk(
pipeline: LinuxScreenShareMonitorAudioPipeline,
chunk: Uint8Array
): void {
if (pipeline.bitsPerSample !== 16) {
this.logger.warn('Unsupported Linux screen-share monitor capture sample size', {
bitsPerSample: pipeline.bitsPerSample,
captureId: pipeline.captureId
});
return;
}
const bytesPerSample = pipeline.bitsPerSample / 8;
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
if (!Number.isFinite(bytesPerFrame) || bytesPerFrame <= 0) {
return;
}
const combinedBytes = this.concatLinuxMonitorAudioBytes(pipeline.pendingBytes, chunk);
const completeByteLength = combinedBytes.byteLength - (combinedBytes.byteLength % bytesPerFrame);
if (completeByteLength <= 0) {
pipeline.pendingBytes = combinedBytes;
return;
}
const completeBytes = combinedBytes.subarray(0, completeByteLength);
pipeline.pendingBytes = this.copyLinuxMonitorAudioBytes(combinedBytes.subarray(completeByteLength));
if (pipeline.audioContext.state !== 'running') {
void pipeline.audioContext.resume().catch((error) => {
this.logger.warn('Failed to resume Linux screen-share monitor audio context', error);
});
}
const frameCount = completeByteLength / bytesPerFrame;
const audioBuffer = this.createLinuxScreenShareAudioBuffer(pipeline, completeBytes, frameCount);
const source = pipeline.audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(pipeline.mediaDestination);
source.onended = () => {
source.disconnect();
};
const now = pipeline.audioContext.currentTime;
const startTime = Math.max(pipeline.nextStartTime, now + 0.02);
source.start(startTime);
pipeline.nextStartTime = startTime + audioBuffer.duration;
}
private createLinuxScreenShareAudioBuffer(
pipeline: LinuxScreenShareMonitorAudioPipeline,
bytes: Uint8Array,
frameCount: number
): AudioBuffer {
const audioBuffer = pipeline.audioContext.createBuffer(pipeline.channelCount, frameCount, pipeline.sampleRate);
const sampleData = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
const channelData = Array.from({ length: pipeline.channelCount }, (_, channelIndex) => audioBuffer.getChannelData(channelIndex));
const bytesPerSample = pipeline.bitsPerSample / 8;
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
for (let frameIndex = 0; frameIndex < frameCount; frameIndex += 1) {
const frameOffset = frameIndex * bytesPerFrame;
for (let channelIndex = 0; channelIndex < pipeline.channelCount; channelIndex += 1) {
const sampleOffset = frameOffset + (channelIndex * bytesPerSample);
channelData[channelIndex][frameIndex] = sampleData.getInt16(sampleOffset, true) / 32768;
}
}
return audioBuffer;
}
private concatLinuxMonitorAudioBytes(first: Uint8Array, second: Uint8Array): Uint8Array {
if (first.byteLength === 0) {
return this.copyLinuxMonitorAudioBytes(second);
}
if (second.byteLength === 0) {
return this.copyLinuxMonitorAudioBytes(first);
}
const combined = new Uint8Array(first.byteLength + second.byteLength);
combined.set(first, 0);
combined.set(second, first.byteLength);
return combined;
}
private copyLinuxMonitorAudioBytes(bytes: Uint8Array): Uint8Array {
return bytes.byteLength > 0 ? new Uint8Array(bytes) : new Uint8Array(0);
}
private buildDisplayMediaConstraints(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): DisplayMediaStreamOptions {
const supportedConstraints = navigator.mediaDevices?.getSupportedConstraints?.() as Record<string, boolean> | undefined;
const audioConstraints: Record<string, unknown> | false = options.includeSystemAudio
? {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
}
: false;
if (audioConstraints && supportedConstraints?.['restrictOwnAudio']) {
audioConstraints['restrictOwnAudio'] = true;
}
if (audioConstraints && supportedConstraints?.['suppressLocalAudioPlayback']) {
audioConstraints['suppressLocalAudioPlayback'] = true;
}
return {
video: {
width: { ideal: preset.width, max: preset.width },
height: { ideal: preset.height, max: preset.height },
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
},
audio: audioConstraints,
monitorTypeSurfaces: 'include',
selfBrowserSurface: 'exclude',
surfaceSwitching: 'include',
systemAudio: options.includeSystemAudio ? 'include' : 'exclude'
} as DisplayMediaStreamOptions;
}
private buildElectronDesktopConstraints(
sourceId: string,
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): ElectronDesktopMediaStreamConstraints {
const electronConstraints: ElectronDesktopMediaStreamConstraints = {
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sourceId,
maxWidth: preset.width,
maxHeight: preset.height,
maxFrameRate: preset.frameRate
}
}
};
if (options.includeSystemAudio) {
electronConstraints.audio = {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sourceId
}
};
} else {
electronConstraints.audio = false;
}
return electronConstraints;
}
private configureScreenStream(preset: ScreenShareQualityPreset): void {
const screenVideoTrack = this.activeScreenStream?.getVideoTracks()[0];
if (!screenVideoTrack) {
throw new Error('Screen capture returned no video track.');
}
if ('contentHint' in screenVideoTrack) {
screenVideoTrack.contentHint = preset.contentHint;
}
this.logger.attachTrackDiagnostics(screenVideoTrack, 'screenVideo');
this.logger.logStream('screen', this.activeScreenStream);
if (typeof screenVideoTrack.applyConstraints === 'function') {
screenVideoTrack.applyConstraints({
width: { ideal: preset.width, max: preset.width },
height: { ideal: preset.height, max: preset.height },
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
}).catch((error) => {
this.logger.warn('Failed to re-apply screen video constraints', error);
});
}
}
private async applyScreenShareVideoParameters(
sender: RTCRtpSender,
preset: ScreenShareQualityPreset,
peerId: string
): Promise<void> {
try {
const params = sender.getParameters();
const encodings = params.encodings?.length ? params.encodings : [{} as RTCRtpEncodingParameters];
params.encodings = encodings.map((encoding, index) => index === 0
? {
...encoding,
maxBitrate: preset.maxBitrateBps,
maxFramerate: preset.frameRate,
scaleResolutionDownBy: preset.scaleResolutionDownBy ?? encoding.scaleResolutionDownBy ?? 1
}
: encoding);
(params as RTCRtpSendParameters & { degradationPreference?: string }).degradationPreference = preset.degradationPreference;
await sender.setParameters(params);
this.logger.info('Applied screen-share sender parameters', {
peerId,
maxBitrate: preset.maxBitrateBps,
maxFramerate: preset.frameRate
});
} catch (error) {
this.logger.warn('Failed to apply screen-share sender parameters', error, { peerId });
}
}
}