[Experimental Screenshare audio fix] Seperate logic to own files (minor change can possibly revert)

This commit is contained in:
2026-03-13 02:26:55 +01:00
parent 15c5952e29
commit 22d355a522
12 changed files with 1211 additions and 800 deletions

View File

@@ -32,6 +32,11 @@ interface SinkInputDetails extends ShortSinkInputEntry {
properties: Record<string, string>;
}
interface DescendantProcessInfo {
ids: ReadonlySet<string>;
binaryNames: ReadonlySet<string>;
}
interface PactlJsonSinkInputEntry {
index?: number | string;
properties?: Record<string, unknown>;
@@ -44,6 +49,7 @@ interface LinuxScreenShareAudioRoutingState {
screenShareLoopbackModuleId: string | null;
voiceLoopbackModuleId: string | null;
rerouteIntervalId: ReturnType<typeof setInterval> | null;
subscribeProcess: ChildProcess | null;
}
interface LinuxScreenShareMonitorCaptureState {
@@ -77,7 +83,8 @@ const routingState: LinuxScreenShareAudioRoutingState = {
restoreSinkName: null,
screenShareLoopbackModuleId: null,
voiceLoopbackModuleId: null,
rerouteIntervalId: null
rerouteIntervalId: null,
subscribeProcess: null
};
const monitorCaptureState: LinuxScreenShareMonitorCaptureState = {
captureId: null,
@@ -126,12 +133,21 @@ export async function activateLinuxScreenShareAudioRouting(): Promise<LinuxScree
routingState.screenShareLoopbackModuleId = await loadLoopbackModule(SCREEN_SHARE_MONITOR_SOURCE_NAME, restoreSinkName);
routingState.voiceLoopbackModuleId = await loadLoopbackModule(`${VOICE_SINK_NAME}.monitor`, restoreSinkName);
await setDefaultSink(SCREEN_SHARE_SINK_NAME);
await moveSinkInputs(SCREEN_SHARE_SINK_NAME, (sinkName) => !!sinkName && sinkName !== SCREEN_SHARE_SINK_NAME && sinkName !== VOICE_SINK_NAME);
// Set the default sink to the voice sink so that new app audio
// streams (received WebRTC voice) never land on the screenshare
// capture sink. This prevents the feedback loop where remote
// voice audio was picked up by parec before the reroute interval
// could move the stream away.
await setDefaultSink(VOICE_SINK_NAME);
routingState.active = true;
await rerouteAppSinkInputsToVoiceSink();
// Let the combined reroute decide placement for every existing
// stream. This avoids briefly shoving the app's own playback to the
// screenshare sink before ownership detection can move it back.
await rerouteSinkInputs();
startSinkInputRerouteLoop();
startSubscribeWatcher();
return buildRoutingInfo(true, true);
} catch (error) {
@@ -148,6 +164,7 @@ export async function activateLinuxScreenShareAudioRouting(): Promise<LinuxScree
export async function deactivateLinuxScreenShareAudioRouting(): Promise<boolean> {
const restoreSinkName = routingState.restoreSinkName;
stopSubscribeWatcher();
stopSinkInputRerouteLoop();
await stopLinuxScreenShareMonitorCapture();
@@ -166,6 +183,7 @@ export async function deactivateLinuxScreenShareAudioRouting(): Promise<boolean>
routingState.restoreSinkName = null;
routingState.screenShareLoopbackModuleId = null;
routingState.voiceLoopbackModuleId = null;
routingState.subscribeProcess = null;
return true;
}
@@ -425,34 +443,52 @@ async function setDefaultSink(sinkName: string): Promise<void> {
await runPactl('set-default-sink', sinkName);
}
async function rerouteAppSinkInputsToVoiceSink(): Promise<void> {
/**
* Combined reroute that enforces sink placement in both directions:
* - App-owned sink inputs that are NOT on the voice sink are moved there.
* - Non-app sink inputs that ARE on the voice sink are moved to the
* screenshare sink so they are captured by parec.
*
* This two-way approach, combined with the voice sink being the PulseAudio
* default, ensures that received WebRTC voice audio can never leak into the
* screenshare monitor source.
*/
async function rerouteSinkInputs(): Promise<void> {
const [
sinks,
sinkInputs,
descendantProcessIds
descendantProcessInfo
] = await Promise.all([
listSinks(),
listSinkInputDetails(),
collectDescendantProcessIds(process.pid)
collectDescendantProcessInfo(process.pid)
]);
const sinkNamesByIndex = new Map(sinks.map((sink) => [sink.index, sink.name]));
await Promise.all(
sinkInputs.map(async (sinkInput) => {
if (!isAppOwnedSinkInput(sinkInput, descendantProcessIds)) {
return;
}
const sinkName = sinkNamesByIndex.get(sinkInput.sinkIndex) ?? null;
const appOwned = isAppOwnedSinkInput(sinkInput, descendantProcessInfo);
// App-owned streams must stay on the voice sink.
if (appOwned && sinkName !== VOICE_SINK_NAME) {
try {
await runPactl('move-sink-input', sinkInput.index, VOICE_SINK_NAME);
} catch {
// Streams can disappear or be recreated while rerouting.
}
if (sinkName === VOICE_SINK_NAME) {
return;
}
try {
await runPactl('move-sink-input', sinkInput.index, VOICE_SINK_NAME);
} catch {
// Streams can disappear or be recreated while rerouting.
// Non-app streams sitting on the voice sink should be moved to the
// screenshare sink for desktop-audio capture.
if (!appOwned && sinkName === VOICE_SINK_NAME) {
try {
await runPactl('move-sink-input', sinkInput.index, SCREEN_SHARE_SINK_NAME);
} catch {
// Streams can disappear or be recreated while rerouting.
}
}
})
);
@@ -515,7 +551,7 @@ function startSinkInputRerouteLoop(): void {
}
routingState.rerouteIntervalId = setInterval(() => {
void rerouteAppSinkInputsToVoiceSink();
void rerouteSinkInputs();
}, REROUTE_INTERVAL_MS);
}
@@ -528,13 +564,108 @@ function stopSinkInputRerouteLoop(): void {
routingState.rerouteIntervalId = null;
}
/**
* Spawns `pactl subscribe` to receive PulseAudio events in real time.
* When a new or changed sink-input is detected, a reroute is triggered
* immediately instead of waiting for the next interval tick. This
* drastically reduces the time non-app desktop audio spends on the
* voice sink before being moved to the screenshare sink.
*/
function startSubscribeWatcher(): void {
if (routingState.subscribeProcess) {
return;
}
let proc: ChildProcess;
try {
proc = spawn('pactl', ['subscribe'], {
env: process.env,
stdio: [
'ignore',
'pipe',
'ignore'
]
});
} catch {
// If pactl subscribe fails to spawn, the interval loop still covers us.
return;
}
routingState.subscribeProcess = proc;
let pending = false;
proc.stdout?.on('data', (chunk: Buffer) => {
if (!routingState.active) {
return;
}
const text = chunk.toString();
if (/Event '(?:new|change)' on sink-input/.test(text)) {
if (!pending) {
pending = true;
// Batch rapid-fire events with a short delay.
setTimeout(() => {
pending = false;
void rerouteSinkInputs();
}, 50);
}
}
});
proc.on('close', () => {
if (routingState.subscribeProcess === proc) {
routingState.subscribeProcess = null;
}
});
proc.on('error', () => {
if (routingState.subscribeProcess === proc) {
routingState.subscribeProcess = null;
}
});
}
function stopSubscribeWatcher(): void {
const proc = routingState.subscribeProcess;
if (!proc) {
return;
}
routingState.subscribeProcess = null;
if (!proc.killed) {
proc.kill('SIGTERM');
}
}
function isAppOwnedSinkInput(
sinkInput: SinkInputDetails,
descendantProcessIds: ReadonlySet<string>
descendantProcessInfo: DescendantProcessInfo
): boolean {
const processId = sinkInput.properties['application.process.id'];
return typeof processId === 'string' && descendantProcessIds.has(processId);
if (typeof processId === 'string' && descendantProcessInfo.ids.has(processId)) {
return true;
}
const processBinary = normalizeProcessBinary(sinkInput.properties['application.process.binary']);
if (processBinary && descendantProcessInfo.binaryNames.has(processBinary)) {
return true;
}
const applicationName = normalizeProcessBinary(sinkInput.properties['application.name']);
if (applicationName && descendantProcessInfo.binaryNames.has(applicationName)) {
return true;
}
return false;
}
async function moveSinkInputs(
@@ -697,31 +828,45 @@ async function listSinkInputDetails(): Promise<SinkInputDetails[]> {
return entries.filter((entry) => !!entry.sinkIndex);
}
async function collectDescendantProcessIds(rootProcessId: number): Promise<Set<string>> {
const { stdout } = await execFileAsync('ps', ['-eo', 'pid=,ppid='], {
async function collectDescendantProcessInfo(rootProcessId: number): Promise<DescendantProcessInfo> {
const { stdout } = await execFileAsync('ps', ['-eo', 'pid=,ppid=,comm='], {
env: process.env
});
const childrenByParentId = new Map<string, string[]>();
const binaryNameByProcessId = new Map<string, string>();
stdout
.split(/\r?\n/)
.map((line) => line.trim())
.filter(Boolean)
.forEach((line) => {
const [pid, ppid] = line.split(/\s+/);
const match = line.match(/^(\d+)\s+(\d+)\s+(.+)$/);
if (!pid || !ppid) {
if (!match) {
return;
}
const [
,
pid,
ppid,
command
] = match;
const siblings = childrenByParentId.get(ppid) ?? [];
siblings.push(pid);
childrenByParentId.set(ppid, siblings);
const normalizedBinaryName = normalizeProcessBinary(command);
if (normalizedBinaryName) {
binaryNameByProcessId.set(pid, normalizedBinaryName);
}
});
const rootId = `${rootProcessId}`;
const descendantIds = new Set<string>([rootId]);
const descendantBinaryNames = new Set<string>();
const queue = [rootId];
while (queue.length > 0) {
@@ -731,6 +876,12 @@ async function collectDescendantProcessIds(rootProcessId: number): Promise<Set<s
continue;
}
const binaryName = binaryNameByProcessId.get(currentId);
if (binaryName) {
descendantBinaryNames.add(binaryName);
}
for (const childId of childrenByParentId.get(currentId) ?? []) {
if (descendantIds.has(childId)) {
continue;
@@ -741,7 +892,30 @@ async function collectDescendantProcessIds(rootProcessId: number): Promise<Set<s
}
}
return descendantIds;
return {
ids: descendantIds,
binaryNames: descendantBinaryNames
};
}
function normalizeProcessBinary(value: string | undefined): string | null {
if (!value) {
return null;
}
const trimmed = value.trim();
if (!trimmed) {
return null;
}
const basename = trimmed
.split(/[\\/]/)
.pop()
?.trim()
.toLowerCase() ?? '';
return basename || null;
}
function stripSurroundingQuotes(value: string): string {

View File

@@ -109,6 +109,7 @@ export class WebRTCService implements OnDestroy {
private readonly _isNoiseReductionEnabled = signal(false);
private readonly _screenStreamSignal = signal<MediaStream | null>(null);
private readonly _isScreenShareRemotePlaybackSuppressed = signal(false);
private readonly _forceDefaultRemotePlaybackOutput = signal(false);
private readonly _hasConnectionError = signal(false);
private readonly _connectionErrorMessage = signal<string | null>(null);
private readonly _hasEverConnected = signal(false);
@@ -131,6 +132,7 @@ export class WebRTCService implements OnDestroy {
readonly isNoiseReductionEnabled = computed(() => this._isNoiseReductionEnabled());
readonly screenStream = computed(() => this._screenStreamSignal());
readonly isScreenShareRemotePlaybackSuppressed = computed(() => this._isScreenShareRemotePlaybackSuppressed());
readonly forceDefaultRemotePlaybackOutput = computed(() => this._forceDefaultRemotePlaybackOutput());
readonly hasConnectionError = computed(() => this._hasConnectionError());
readonly connectionErrorMessage = computed(() => this._connectionErrorMessage());
readonly shouldShowConnectionError = computed(() => {
@@ -220,6 +222,7 @@ export class WebRTCService implements OnDestroy {
this._isScreenSharing.set(state.active);
this._screenStreamSignal.set(state.stream);
this._isScreenShareRemotePlaybackSuppressed.set(state.suppressRemotePlayback);
this._forceDefaultRemotePlaybackOutput.set(state.forceDefaultRemotePlaybackOutput);
}
});
@@ -912,6 +915,7 @@ export class WebRTCService implements OnDestroy {
this._isScreenSharing.set(false);
this._screenStreamSignal.set(null);
this._isScreenShareRemotePlaybackSuppressed.set(false);
this._forceDefaultRemotePlaybackOutput.set(false);
}
/** Synchronise Angular signals from the MediaManager's internal state. */

View File

@@ -103,10 +103,10 @@ export class MediaManager {
* Replace the callback set at runtime.
* Needed because of circular initialisation between managers.
*
* @param cb - The new callback interface to wire into this manager.
* @param nextCallbacks - The new callback interface to wire into this manager.
*/
setCallbacks(cb: MediaManagerCallbacks): void {
this.callbacks = cb;
setCallbacks(nextCallbacks: MediaManagerCallbacks): void {
this.callbacks = nextCallbacks;
}
/** Returns the current local media stream, or `null` if voice is disabled. */
@@ -485,28 +485,21 @@ export class MediaManager {
if (!this.localMediaStream)
return;
const localAudioTrack = this.localMediaStream.getAudioTracks()[0] || null;
const localVideoTrack = this.localMediaStream.getVideoTracks()[0] || null;
const localStream = this.localMediaStream;
const localAudioTrack = localStream.getAudioTracks()[0] || null;
const localVideoTrack = localStream.getVideoTracks()[0] || null;
peers.forEach((peerData, peerId) => {
if (localAudioTrack) {
let audioSender =
peerData.audioSender ||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_AUDIO);
if (!audioSender) {
audioSender = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, {
direction: TRANSCEIVER_SEND_RECV
}).sender;
}
const audioTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_AUDIO, {
preferredSender: peerData.audioSender,
excludedSenders: [peerData.screenAudioSender]
});
const audioSender = audioTransceiver.sender;
peerData.audioSender = audioSender;
// Restore direction after removeTrack (which sets it to recvonly)
const audioTransceiver = peerData.connection
.getTransceivers()
.find((t) => t.sender === audioSender);
if (
audioTransceiver &&
(audioTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
@@ -515,29 +508,25 @@ export class MediaManager {
audioTransceiver.direction = TRANSCEIVER_SEND_RECV;
}
if (typeof audioSender.setStreams === 'function') {
audioSender.setStreams(localStream);
}
audioSender
.replaceTrack(localAudioTrack)
.then(() => this.logger.info('audio replaceTrack ok', { peerId }))
.catch((e) => this.logger.error('audio replaceTrack failed', e));
.catch((error) => this.logger.error('audio replaceTrack failed', error));
}
if (localVideoTrack) {
let videoSender =
peerData.videoSender ||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_VIDEO);
if (!videoSender) {
videoSender = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, {
direction: TRANSCEIVER_SEND_RECV
}).sender;
}
const videoTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_VIDEO, {
preferredSender: peerData.videoSender,
excludedSenders: [peerData.screenVideoSender]
});
const videoSender = videoTransceiver.sender;
peerData.videoSender = videoSender;
const videoTransceiver = peerData.connection
.getTransceivers()
.find((t) => t.sender === videoSender);
if (
videoTransceiver &&
(videoTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
@@ -546,16 +535,64 @@ export class MediaManager {
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
}
if (typeof videoSender.setStreams === 'function') {
videoSender.setStreams(localStream);
}
videoSender
.replaceTrack(localVideoTrack)
.then(() => this.logger.info('video replaceTrack ok', { peerId }))
.catch((e) => this.logger.error('video replaceTrack failed', e));
.catch((error) => this.logger.error('video replaceTrack failed', error));
}
this.callbacks.renegotiate(peerId);
});
}
private getOrCreateReusableTransceiver(
peerData: PeerData,
kind: typeof TRACK_KIND_AUDIO | typeof TRACK_KIND_VIDEO,
options: {
preferredSender?: RTCRtpSender;
excludedSenders?: (RTCRtpSender | undefined)[];
}
): RTCRtpTransceiver {
const excludedSenders = new Set(
(options.excludedSenders ?? []).filter((sender): sender is RTCRtpSender => !!sender)
);
const existingTransceivers = peerData.connection.getTransceivers();
const preferredTransceiver = options.preferredSender
? existingTransceivers.find((transceiver) => transceiver.sender === options.preferredSender)
: null;
if (preferredTransceiver) {
return preferredTransceiver;
}
const attachedSenderTransceiver = existingTransceivers.find((transceiver) =>
!excludedSenders.has(transceiver.sender)
&& transceiver.sender.track?.kind === kind
);
if (attachedSenderTransceiver) {
return attachedSenderTransceiver;
}
const reusableReceiverTransceiver = existingTransceivers.find((transceiver) =>
!excludedSenders.has(transceiver.sender)
&& !transceiver.sender.track
&& transceiver.receiver.track?.kind === kind
);
if (reusableReceiverTransceiver) {
return reusableReceiverTransceiver;
}
return peerData.connection.addTransceiver(kind, {
direction: TRANSCEIVER_SEND_RECV
});
}
/** Broadcast a voice-presence state event to all connected peers. */
private broadcastVoicePresence(): void {
const oderId = this.callbacks.getIdentifyOderId();

View File

@@ -127,7 +127,9 @@ export function createPeerConnection(
isInitiator,
pendingIceCandidates: [],
audioSender: undefined,
videoSender: undefined
videoSender: undefined,
remoteVoiceStreamIds: new Set<string>(),
remoteScreenShareStreamIds: new Set<string>()
};
if (isInitiator) {
@@ -151,6 +153,10 @@ export function createPeerConnection(
localStream.getTracks().forEach((track) => {
if (track.kind === TRACK_KIND_AUDIO && peerData.audioSender) {
if (typeof peerData.audioSender.setStreams === 'function') {
peerData.audioSender.setStreams(localStream);
}
peerData.audioSender
.replaceTrack(track)
.then(() => logger.info('audio replaceTrack (init) ok', { remotePeerId }))
@@ -158,6 +164,10 @@ export function createPeerConnection(
logger.error('audio replaceTrack failed at createPeerConnection', error)
);
} else if (track.kind === TRACK_KIND_VIDEO && peerData.videoSender) {
if (typeof peerData.videoSender.setStreams === 'function') {
peerData.videoSender.setStreams(localStream);
}
peerData.videoSender
.replaceTrack(track)
.then(() => logger.info('video replaceTrack (init) ok', { remotePeerId }))

View File

@@ -9,6 +9,7 @@ export function handleRemoteTrack(
): void {
const { logger, state } = context;
const track = event.track;
const isScreenAudio = isScreenShareAudioTrack(context, event, remotePeerId);
const settings =
typeof track.getSettings === 'function' ? track.getSettings() : ({} as MediaTrackSettings);
@@ -34,10 +35,10 @@ export function handleRemoteTrack(
}
const compositeStream = buildCompositeRemoteStream(state, remotePeerId, track);
const voiceStream = isVoiceAudioTrack(context, event, remotePeerId)
const voiceStream = isVoiceAudioTrack(track, isScreenAudio)
? buildAudioOnlyStream(state.remotePeerVoiceStreams.get(remotePeerId), track)
: null;
const screenShareStream = isScreenShareTrack(context, event, remotePeerId)
const screenShareStream = isScreenShareTrack(track, isScreenAudio)
? buildScreenShareStream(state.remotePeerScreenShareStreams.get(remotePeerId), track)
: null;
@@ -53,6 +54,12 @@ export function handleRemoteTrack(
state.remotePeerScreenShareStreams.set(remotePeerId, screenShareStream);
}
rememberIncomingStreamIds(state, event, remotePeerId, {
isScreenAudio,
isVoiceAudio: !!voiceStream,
isScreenTrack: !!screenShareStream
});
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
}
@@ -61,6 +68,7 @@ export function clearRemoteScreenShareStream(
remotePeerId: string
): void {
const { state } = context;
const peerData = state.activePeerConnections.get(remotePeerId);
const screenShareStream = state.remotePeerScreenShareStreams.get(remotePeerId);
if (!screenShareStream) {
@@ -79,6 +87,8 @@ export function clearRemoteScreenShareStream(
removeTracksFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, screenShareTrackIds);
state.remotePeerScreenShareStreams.delete(remotePeerId);
peerData?.remoteScreenShareStreamIds.clear();
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
}
@@ -152,11 +162,20 @@ function removeRemoteTrack(
trackId: string
): void {
const { state } = context;
const peerData = state.activePeerConnections.get(remotePeerId);
const compositeStream = removeTrackFromStreamMap(state.remotePeerStreams, remotePeerId, trackId);
removeTrackFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, trackId);
removeTrackFromStreamMap(state.remotePeerScreenShareStreams, remotePeerId, trackId);
if (!state.remotePeerVoiceStreams.has(remotePeerId)) {
peerData?.remoteVoiceStreamIds.clear();
}
if (!state.remotePeerScreenShareStreams.has(remotePeerId)) {
peerData?.remoteScreenShareStreamIds.clear();
}
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
}
@@ -224,20 +243,12 @@ function publishRemoteStreamUpdate(
});
}
function isVoiceAudioTrack(
context: PeerConnectionManagerContext,
event: RTCTrackEvent,
remotePeerId: string
): boolean {
return event.track.kind === TRACK_KIND_AUDIO && !isScreenShareAudioTrack(context, event, remotePeerId);
function isVoiceAudioTrack(track: MediaStreamTrack, isScreenAudio: boolean): boolean {
return track.kind === TRACK_KIND_AUDIO && !isScreenAudio;
}
function isScreenShareTrack(
context: PeerConnectionManagerContext,
event: RTCTrackEvent,
remotePeerId: string
): boolean {
return event.track.kind === TRACK_KIND_VIDEO || isScreenShareAudioTrack(context, event, remotePeerId);
function isScreenShareTrack(track: MediaStreamTrack, isScreenAudio: boolean): boolean {
return track.kind === TRACK_KIND_VIDEO || isScreenAudio;
}
function isScreenShareAudioTrack(
@@ -255,12 +266,34 @@ function isScreenShareAudioTrack(
return false;
}
const incomingStreamIds = getIncomingStreamIds(event);
if (incomingStreamIds.some((streamId) => peerData.remoteScreenShareStreamIds.has(streamId))) {
return true;
}
if (incomingStreamIds.some((streamId) => peerData.remoteVoiceStreamIds.has(streamId))) {
return false;
}
if (event.streams.some((stream) => stream.getVideoTracks().some((track) => track.readyState === 'live'))) {
return true;
}
const screenAudioTransceiver = peerData.connection.getTransceivers().find(
(transceiver) => transceiver.sender === peerData.screenAudioSender
);
if (screenAudioTransceiver && matchesTransceiver(event.transceiver, screenAudioTransceiver)) {
return true;
}
const voiceAudioTransceiver = peerData.connection.getTransceivers().find(
(transceiver) => transceiver.sender === peerData.audioSender
);
if (voiceAudioTransceiver) {
return event.transceiver !== voiceAudioTransceiver;
return !matchesTransceiver(event.transceiver, voiceAudioTransceiver);
}
const audioTransceivers = peerData.connection.getTransceivers().filter((transceiver) =>
@@ -272,3 +305,52 @@ function isScreenShareAudioTrack(
return transceiverIndex > 0;
}
function rememberIncomingStreamIds(
state: PeerConnectionManagerContext['state'],
event: RTCTrackEvent,
remotePeerId: string,
options: {
isScreenAudio: boolean;
isVoiceAudio: boolean;
isScreenTrack: boolean;
}
): void {
const peerData = state.activePeerConnections.get(remotePeerId);
if (!peerData) {
return;
}
const incomingStreamIds = getIncomingStreamIds(event);
if (incomingStreamIds.length === 0) {
return;
}
if (event.track.kind === TRACK_KIND_VIDEO || options.isScreenAudio || options.isScreenTrack) {
incomingStreamIds.forEach((streamId) => {
peerData.remoteScreenShareStreamIds.add(streamId);
peerData.remoteVoiceStreamIds.delete(streamId);
});
return;
}
if (options.isVoiceAudio) {
incomingStreamIds.forEach((streamId) => {
peerData.remoteVoiceStreamIds.add(streamId);
peerData.remoteScreenShareStreamIds.delete(streamId);
});
}
}
function getIncomingStreamIds(event: RTCTrackEvent): string[] {
return event.streams
.map((stream) => stream.id)
.filter((streamId): streamId is string => !!streamId);
}
function matchesTransceiver(left: RTCRtpTransceiver, right: RTCRtpTransceiver): boolean {
return left === right || (!!left.mid && !!right.mid && left.mid === right.mid);
}

View File

@@ -0,0 +1,56 @@
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../screen-share.config';
import { WebRTCLogger } from '../webrtc-logger';
export class BrowserScreenShareCapture {
constructor(private readonly logger: WebRTCLogger) {}
async startCapture(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<MediaStream> {
const displayConstraints = this.buildDisplayMediaConstraints(options, preset);
this.logger.info('getDisplayMedia constraints', displayConstraints);
if (!navigator.mediaDevices?.getDisplayMedia) {
throw new Error('navigator.mediaDevices.getDisplayMedia is not available.');
}
return await navigator.mediaDevices.getDisplayMedia(displayConstraints);
}
private buildDisplayMediaConstraints(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): DisplayMediaStreamOptions {
const supportedConstraints = navigator.mediaDevices?.getSupportedConstraints?.() as Record<string, boolean> | undefined;
const audioConstraints: Record<string, unknown> | false = options.includeSystemAudio
? {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
}
: false;
if (audioConstraints && supportedConstraints?.['restrictOwnAudio']) {
audioConstraints['restrictOwnAudio'] = true;
}
if (audioConstraints && supportedConstraints?.['suppressLocalAudioPlayback']) {
audioConstraints['suppressLocalAudioPlayback'] = true;
}
return {
video: {
width: { ideal: preset.width, max: preset.width },
height: { ideal: preset.height, max: preset.height },
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
},
audio: audioConstraints,
monitorTypeSurfaces: 'include',
selfBrowserSurface: 'exclude',
surfaceSwitching: 'include',
systemAudio: options.includeSystemAudio ? 'include' : 'exclude'
} as DisplayMediaStreamOptions;
}
}

View File

@@ -0,0 +1,163 @@
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../screen-share.config';
import { ELECTRON_ENTIRE_SCREEN_SOURCE_NAME } from '../webrtc.constants';
import { WebRTCLogger } from '../webrtc-logger';
import {
DesktopSource,
ElectronDesktopCaptureResult,
ElectronDesktopMediaStreamConstraints,
ElectronDesktopSourceSelection,
ScreenShareElectronApi
} from './shared';
interface DesktopElectronScreenShareCaptureDependencies {
getElectronApi(): ScreenShareElectronApi | null;
getSelectDesktopSource(): ((
sources: readonly DesktopSource[],
options: { includeSystemAudio: boolean }
) => Promise<ElectronDesktopSourceSelection>) | undefined;
}
export class DesktopElectronScreenShareCapture {
constructor(
private readonly logger: WebRTCLogger,
private readonly dependencies: DesktopElectronScreenShareCaptureDependencies
) {}
isAvailable(): boolean {
return !!this.dependencies.getElectronApi()?.getSources && !this.isLinuxElectron();
}
shouldSuppressRemotePlaybackDuringShare(includeSystemAudio: boolean): boolean {
return includeSystemAudio && this.isWindowsElectron();
}
async startCapture(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<ElectronDesktopCaptureResult> {
const electronApi = this.dependencies.getElectronApi();
if (!electronApi?.getSources) {
throw new Error('Electron desktop capture is unavailable.');
}
const sources = await electronApi.getSources();
const selection = await this.resolveSourceSelection(sources, options.includeSystemAudio);
const captureOptions = {
...options,
includeSystemAudio: selection.includeSystemAudio
};
if (!selection.source) {
throw new Error('No desktop capture sources were available.');
}
this.logger.info('Selected Electron desktop source', {
includeSystemAudio: selection.includeSystemAudio,
sourceId: selection.source.id,
sourceName: selection.source.name
});
const constraints = this.buildConstraints(selection.source.id, captureOptions, preset);
this.logger.info('desktopCapturer constraints', constraints);
if (!navigator.mediaDevices?.getUserMedia) {
throw new Error('navigator.mediaDevices.getUserMedia is not available (requires HTTPS or localhost).');
}
return {
includeSystemAudio: selection.includeSystemAudio,
stream: await navigator.mediaDevices.getUserMedia(constraints)
};
}
private async resolveSourceSelection(
sources: DesktopSource[],
includeSystemAudio: boolean
): Promise<ElectronDesktopSourceSelection> {
const orderedSources = this.sortSources(sources);
const defaultSource = orderedSources.find((source) => source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME)
?? orderedSources[0];
if (orderedSources.length === 0) {
throw new Error('No desktop capture sources were available.');
}
const selectDesktopSource = this.dependencies.getSelectDesktopSource();
if (!this.isWindowsElectron() || orderedSources.length < 2 || !selectDesktopSource) {
return {
includeSystemAudio,
source: defaultSource
};
}
return await selectDesktopSource(orderedSources, { includeSystemAudio });
}
private sortSources(sources: DesktopSource[]): DesktopSource[] {
return [...sources].sort((left, right) => {
const weightDiff = this.getSourceWeight(left) - this.getSourceWeight(right);
if (weightDiff !== 0) {
return weightDiff;
}
return left.name.localeCompare(right.name);
});
}
private getSourceWeight(source: DesktopSource): number {
return source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME || source.id.startsWith('screen')
? 0
: 1;
}
private buildConstraints(
sourceId: string,
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): ElectronDesktopMediaStreamConstraints {
const constraints: ElectronDesktopMediaStreamConstraints = {
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sourceId,
maxWidth: preset.width,
maxHeight: preset.height,
maxFrameRate: preset.frameRate
}
}
};
if (options.includeSystemAudio) {
constraints.audio = {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sourceId
}
};
} else {
constraints.audio = false;
}
return constraints;
}
private isLinuxElectron(): boolean {
if (!this.dependencies.getElectronApi() || typeof navigator === 'undefined') {
return false;
}
return /linux/i.test(`${navigator.userAgent} ${navigator.platform}`);
}
private isWindowsElectron(): boolean {
if (!this.isAvailable() || typeof navigator === 'undefined') {
return false;
}
return /win/i.test(`${navigator.userAgent} ${navigator.platform}`);
}
}

View File

@@ -0,0 +1,439 @@
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../screen-share.config';
import { WebRTCLogger } from '../webrtc-logger';
import {
LinuxScreenShareAudioRoutingInfo,
LinuxScreenShareMonitorAudioChunkPayload,
LinuxScreenShareMonitorAudioEndedPayload,
LinuxScreenShareMonitorCaptureInfo,
ScreenShareElectronApi
} from './shared';
interface LinuxScreenShareMonitorAudioPipeline {
audioContext: AudioContext;
audioTrack: MediaStreamTrack;
bitsPerSample: number;
captureId: string;
channelCount: number;
mediaDestination: MediaStreamAudioDestinationNode;
nextStartTime: number;
pendingBytes: Uint8Array;
sampleRate: number;
unsubscribeChunk: () => void;
unsubscribeEnded: () => void;
}
interface LinuxElectronScreenShareCaptureDependencies {
getElectronApi(): ScreenShareElectronApi | null;
onCaptureEnded(): void;
startDisplayMedia(options: ScreenShareStartOptions, preset: ScreenShareQualityPreset): Promise<MediaStream>;
}
export class LinuxElectronScreenShareCapture {
private audioRoutingActive = false;
private audioRoutingResetPromise: Promise<void> | null = null;
private monitorAudioPipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
constructor(
private readonly logger: WebRTCLogger,
private readonly dependencies: LinuxElectronScreenShareCaptureDependencies
) {}
isSupported(): boolean {
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
return false;
}
const electronApi = this.dependencies.getElectronApi();
const platformHint = `${navigator.userAgent} ${navigator.platform}`;
return !!electronApi?.prepareLinuxScreenShareAudioRouting
&& !!electronApi?.activateLinuxScreenShareAudioRouting
&& !!electronApi?.deactivateLinuxScreenShareAudioRouting
&& !!electronApi?.startLinuxScreenShareMonitorCapture
&& !!electronApi?.stopLinuxScreenShareMonitorCapture
&& !!electronApi?.onLinuxScreenShareMonitorAudioChunk
&& !!electronApi?.onLinuxScreenShareMonitorAudioEnded
&& /linux/i.test(platformHint);
}
async awaitPendingReset(): Promise<void> {
if (!this.audioRoutingResetPromise) {
return;
}
await this.audioRoutingResetPromise;
}
scheduleReset(): void {
if (!this.audioRoutingActive || this.audioRoutingResetPromise) {
return;
}
this.audioRoutingResetPromise = this.resetAudioRouting()
.catch((error) => {
this.logger.warn('Failed to reset Linux Electron audio routing', error);
})
.finally(() => {
this.audioRoutingResetPromise = null;
});
}
async startCapture(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<MediaStream> {
const electronApi = this.getRequiredElectronApi();
const routingInfo = await electronApi.prepareLinuxScreenShareAudioRouting();
this.assertAudioRoutingReady(routingInfo, 'Linux Electron audio routing is unavailable.');
let desktopStream: MediaStream | null = null;
try {
const activation = await electronApi.activateLinuxScreenShareAudioRouting();
this.assertAudioRoutingReady(activation, 'Failed to activate Linux Electron audio routing.');
if (!activation.active) {
throw new Error(activation.reason || 'Failed to activate Linux Electron audio routing.');
}
desktopStream = await this.dependencies.startDisplayMedia({
...options,
includeSystemAudio: false
}, preset);
const { audioTrack, captureInfo } = await this.startMonitorTrack();
const stream = new MediaStream([...desktopStream.getVideoTracks(), audioTrack]);
desktopStream.getAudioTracks().forEach((track) => track.stop());
this.audioRoutingActive = true;
this.logger.info('Linux Electron screen-share audio routing enabled', {
screenShareMonitorSourceName: captureInfo.sourceName,
voiceSinkName: activation.voiceSinkName
});
return stream;
} catch (error) {
desktopStream?.getTracks().forEach((track) => track.stop());
await this.resetAudioRouting();
throw error;
}
}
private getRequiredElectronApi(): Required<Pick<
ScreenShareElectronApi,
| 'prepareLinuxScreenShareAudioRouting'
| 'activateLinuxScreenShareAudioRouting'
| 'deactivateLinuxScreenShareAudioRouting'
| 'startLinuxScreenShareMonitorCapture'
| 'stopLinuxScreenShareMonitorCapture'
| 'onLinuxScreenShareMonitorAudioChunk'
| 'onLinuxScreenShareMonitorAudioEnded'
>> {
const electronApi = this.dependencies.getElectronApi();
if (!electronApi?.prepareLinuxScreenShareAudioRouting
|| !electronApi.activateLinuxScreenShareAudioRouting
|| !electronApi.deactivateLinuxScreenShareAudioRouting
|| !electronApi.startLinuxScreenShareMonitorCapture
|| !electronApi.stopLinuxScreenShareMonitorCapture
|| !electronApi.onLinuxScreenShareMonitorAudioChunk
|| !electronApi.onLinuxScreenShareMonitorAudioEnded) {
throw new Error('Linux Electron audio routing is unavailable.');
}
return {
prepareLinuxScreenShareAudioRouting: electronApi.prepareLinuxScreenShareAudioRouting,
activateLinuxScreenShareAudioRouting: electronApi.activateLinuxScreenShareAudioRouting,
deactivateLinuxScreenShareAudioRouting: electronApi.deactivateLinuxScreenShareAudioRouting,
startLinuxScreenShareMonitorCapture: electronApi.startLinuxScreenShareMonitorCapture,
stopLinuxScreenShareMonitorCapture: electronApi.stopLinuxScreenShareMonitorCapture,
onLinuxScreenShareMonitorAudioChunk: electronApi.onLinuxScreenShareMonitorAudioChunk,
onLinuxScreenShareMonitorAudioEnded: electronApi.onLinuxScreenShareMonitorAudioEnded
};
}
private assertAudioRoutingReady(
routingInfo: LinuxScreenShareAudioRoutingInfo,
unavailableReason: string
): void {
if (!routingInfo.available) {
throw new Error(routingInfo.reason || unavailableReason);
}
if (!routingInfo.monitorCaptureSupported) {
throw new Error('Linux screen-share monitor capture requires restarting the desktop app so the new Electron main process can load.');
}
}
private async resetAudioRouting(): Promise<void> {
const electronApi = this.dependencies.getElectronApi();
const captureId = this.monitorAudioPipeline?.captureId;
this.audioRoutingActive = false;
this.disposeMonitorAudioPipeline();
try {
if (captureId && electronApi?.stopLinuxScreenShareMonitorCapture) {
await electronApi.stopLinuxScreenShareMonitorCapture(captureId);
}
} catch (error) {
this.logger.warn('Failed to stop Linux screen-share monitor capture', error);
}
try {
if (electronApi?.deactivateLinuxScreenShareAudioRouting) {
await electronApi.deactivateLinuxScreenShareAudioRouting();
}
} catch (error) {
this.logger.warn('Failed to deactivate Linux Electron audio routing', error);
}
}
private async startMonitorTrack(): Promise<{
audioTrack: MediaStreamTrack;
captureInfo: LinuxScreenShareMonitorCaptureInfo;
}> {
const electronApi = this.dependencies.getElectronApi();
if (!electronApi?.startLinuxScreenShareMonitorCapture
|| !electronApi?.stopLinuxScreenShareMonitorCapture
|| !electronApi?.onLinuxScreenShareMonitorAudioChunk
|| !electronApi?.onLinuxScreenShareMonitorAudioEnded) {
throw new Error('Linux screen-share monitor capture is unavailable.');
}
const queuedChunksByCaptureId = new Map<string, Uint8Array[]>();
const queuedEndedReasons = new Map<string, string | undefined>();
let pipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
let captureInfo: LinuxScreenShareMonitorCaptureInfo | null = null;
const queueChunk = (captureId: string, chunk: Uint8Array): void => {
const queuedChunks = queuedChunksByCaptureId.get(captureId) || [];
queuedChunks.push(this.copyBytes(chunk));
queuedChunksByCaptureId.set(captureId, queuedChunks);
};
const onChunk = (payload: LinuxScreenShareMonitorAudioChunkPayload): void => {
if (!pipeline || payload.captureId !== pipeline.captureId) {
queueChunk(payload.captureId, payload.chunk);
return;
}
this.handleMonitorAudioChunk(pipeline, payload.chunk);
};
const onEnded = (payload: LinuxScreenShareMonitorAudioEndedPayload): void => {
if (!pipeline || payload.captureId !== pipeline.captureId) {
queuedEndedReasons.set(payload.captureId, payload.reason);
return;
}
this.logger.warn('Linux screen-share monitor capture ended', payload);
this.dependencies.onCaptureEnded();
};
const unsubscribeChunk = electronApi.onLinuxScreenShareMonitorAudioChunk(onChunk) as () => void;
const unsubscribeEnded = electronApi.onLinuxScreenShareMonitorAudioEnded(onEnded) as () => void;
try {
captureInfo = await electronApi.startLinuxScreenShareMonitorCapture() as LinuxScreenShareMonitorCaptureInfo;
const audioContext = new AudioContext({ sampleRate: captureInfo.sampleRate });
const mediaDestination = audioContext.createMediaStreamDestination();
await audioContext.resume();
const audioTrack = mediaDestination.stream.getAudioTracks()[0];
if (!audioTrack) {
throw new Error('Renderer audio pipeline did not produce a screen-share monitor track.');
}
pipeline = {
audioContext,
audioTrack,
bitsPerSample: captureInfo.bitsPerSample,
captureId: captureInfo.captureId,
channelCount: captureInfo.channelCount,
mediaDestination,
nextStartTime: audioContext.currentTime + 0.05,
pendingBytes: new Uint8Array(0),
sampleRate: captureInfo.sampleRate,
unsubscribeChunk,
unsubscribeEnded
};
this.monitorAudioPipeline = pipeline;
const activeCaptureId = captureInfo.captureId;
audioTrack.addEventListener('ended', () => {
if (this.monitorAudioPipeline?.captureId === activeCaptureId) {
this.dependencies.onCaptureEnded();
}
}, { once: true });
const queuedChunks = queuedChunksByCaptureId.get(captureInfo.captureId) || [];
const activePipeline = pipeline;
queuedChunks.forEach((chunk) => {
this.handleMonitorAudioChunk(activePipeline, chunk);
});
queuedChunksByCaptureId.delete(captureInfo.captureId);
if (queuedEndedReasons.has(captureInfo.captureId)) {
throw new Error(queuedEndedReasons.get(captureInfo.captureId)
|| 'Linux screen-share monitor capture ended before audio initialisation completed.');
}
return {
audioTrack,
captureInfo
};
} catch (error) {
if (pipeline) {
this.disposeMonitorAudioPipeline(pipeline.captureId);
} else {
unsubscribeChunk();
unsubscribeEnded();
}
try {
await electronApi.stopLinuxScreenShareMonitorCapture(captureInfo?.captureId);
} catch (stopError) {
this.logger.warn('Failed to stop Linux screen-share monitor capture after startup failure', stopError);
}
throw error;
}
}
private disposeMonitorAudioPipeline(captureId?: string): void {
if (!this.monitorAudioPipeline) {
return;
}
if (captureId && captureId !== this.monitorAudioPipeline.captureId) {
return;
}
const pipeline = this.monitorAudioPipeline;
this.monitorAudioPipeline = null;
pipeline.unsubscribeChunk();
pipeline.unsubscribeEnded();
pipeline.audioTrack.stop();
pipeline.pendingBytes = new Uint8Array(0);
void pipeline.audioContext.close().catch((error) => {
this.logger.warn('Failed to close Linux screen-share monitor audio context', error);
});
}
private handleMonitorAudioChunk(
pipeline: LinuxScreenShareMonitorAudioPipeline,
chunk: Uint8Array
): void {
if (pipeline.bitsPerSample !== 16) {
this.logger.warn('Unsupported Linux screen-share monitor capture sample size', {
bitsPerSample: pipeline.bitsPerSample,
captureId: pipeline.captureId
});
return;
}
const bytesPerSample = pipeline.bitsPerSample / 8;
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
if (!Number.isFinite(bytesPerFrame) || bytesPerFrame <= 0) {
return;
}
const combinedBytes = this.concatBytes(pipeline.pendingBytes, chunk);
const completeByteLength = combinedBytes.byteLength - (combinedBytes.byteLength % bytesPerFrame);
if (completeByteLength <= 0) {
pipeline.pendingBytes = combinedBytes;
return;
}
const completeBytes = combinedBytes.subarray(0, completeByteLength);
pipeline.pendingBytes = this.copyBytes(combinedBytes.subarray(completeByteLength));
if (pipeline.audioContext.state !== 'running') {
void pipeline.audioContext.resume().catch((error) => {
this.logger.warn('Failed to resume Linux screen-share monitor audio context', error);
});
}
const frameCount = completeByteLength / bytesPerFrame;
const audioBuffer = this.createAudioBuffer(pipeline, completeBytes, frameCount);
const source = pipeline.audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(pipeline.mediaDestination);
source.onended = () => {
source.disconnect();
};
const now = pipeline.audioContext.currentTime;
const startTime = Math.max(pipeline.nextStartTime, now + 0.02);
source.start(startTime);
pipeline.nextStartTime = startTime + audioBuffer.duration;
}
private createAudioBuffer(
pipeline: LinuxScreenShareMonitorAudioPipeline,
bytes: Uint8Array,
frameCount: number
): AudioBuffer {
const audioBuffer = pipeline.audioContext.createBuffer(pipeline.channelCount, frameCount, pipeline.sampleRate);
const sampleData = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
const channelData = Array.from(
{ length: pipeline.channelCount },
(_, channelIndex) => audioBuffer.getChannelData(channelIndex)
);
const bytesPerSample = pipeline.bitsPerSample / 8;
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
for (let frameIndex = 0; frameIndex < frameCount; frameIndex += 1) {
const frameOffset = frameIndex * bytesPerFrame;
for (let channelIndex = 0; channelIndex < pipeline.channelCount; channelIndex += 1) {
const sampleOffset = frameOffset + (channelIndex * bytesPerSample);
channelData[channelIndex][frameIndex] = sampleData.getInt16(sampleOffset, true) / 32768;
}
}
return audioBuffer;
}
private concatBytes(first: Uint8Array, second: Uint8Array): Uint8Array {
if (first.byteLength === 0) {
return this.copyBytes(second);
}
if (second.byteLength === 0) {
return this.copyBytes(first);
}
const combined = new Uint8Array(first.byteLength + second.byteLength);
combined.set(first, 0);
combined.set(second, first.byteLength);
return combined;
}
private copyBytes(bytes: Uint8Array): Uint8Array {
return bytes.byteLength > 0 ? new Uint8Array(bytes) : new Uint8Array(0);
}
}

View File

@@ -0,0 +1,80 @@
export interface DesktopSource {
id: string;
name: string;
thumbnail: string;
}
export interface ElectronDesktopSourceSelection {
includeSystemAudio: boolean;
source: DesktopSource;
}
export interface ElectronDesktopCaptureResult {
includeSystemAudio: boolean;
stream: MediaStream;
}
export interface LinuxScreenShareAudioRoutingInfo {
available: boolean;
active: boolean;
monitorCaptureSupported: boolean;
screenShareSinkName: string;
screenShareMonitorSourceName: string;
voiceSinkName: string;
reason?: string;
}
export interface LinuxScreenShareMonitorCaptureInfo {
bitsPerSample: number;
captureId: string;
channelCount: number;
sampleRate: number;
sourceName: string;
}
export interface LinuxScreenShareMonitorAudioChunkPayload {
captureId: string;
chunk: Uint8Array;
}
export interface LinuxScreenShareMonitorAudioEndedPayload {
captureId: string;
reason?: string;
}
export interface ScreenShareElectronApi {
getSources?: () => Promise<DesktopSource[]>;
prepareLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
activateLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
deactivateLinuxScreenShareAudioRouting?: () => Promise<boolean>;
startLinuxScreenShareMonitorCapture?: () => Promise<LinuxScreenShareMonitorCaptureInfo>;
stopLinuxScreenShareMonitorCapture?: (captureId?: string) => Promise<boolean>;
onLinuxScreenShareMonitorAudioChunk?: (listener: (payload: LinuxScreenShareMonitorAudioChunkPayload) => void) => () => void;
onLinuxScreenShareMonitorAudioEnded?: (listener: (payload: LinuxScreenShareMonitorAudioEndedPayload) => void) => () => void;
}
export type ElectronDesktopVideoConstraint = MediaTrackConstraints & {
mandatory: {
chromeMediaSource: 'desktop';
chromeMediaSourceId: string;
maxWidth: number;
maxHeight: number;
maxFrameRate: number;
};
};
export type ElectronDesktopAudioConstraint = MediaTrackConstraints & {
mandatory: {
chromeMediaSource: 'desktop';
chromeMediaSourceId: string;
};
};
export interface ElectronDesktopMediaStreamConstraints extends MediaStreamConstraints {
video: ElectronDesktopVideoConstraint;
audio?: false | ElectronDesktopAudioConstraint;
}
export type ScreenShareWindow = Window & {
electronAPI?: ScreenShareElectronApi;
};

View File

@@ -9,8 +9,7 @@ import {
TRACK_KIND_AUDIO,
TRACK_KIND_VIDEO,
TRANSCEIVER_SEND_RECV,
TRANSCEIVER_RECV_ONLY,
ELECTRON_ENTIRE_SCREEN_SOURCE_NAME
TRANSCEIVER_RECV_ONLY
} from './webrtc.constants';
import {
DEFAULT_SCREEN_SHARE_START_OPTIONS,
@@ -18,6 +17,10 @@ import {
ScreenShareQualityPreset,
ScreenShareStartOptions
} from './screen-share.config';
import { BrowserScreenShareCapture } from './screen-share-platforms/browser-screen-share.capture';
import { DesktopElectronScreenShareCapture } from './screen-share-platforms/desktop-electron-screen-share.capture';
import { LinuxElectronScreenShareCapture } from './screen-share-platforms/linux-electron-screen-share.capture';
import { ScreenShareElectronApi, ScreenShareWindow } from './screen-share-platforms/shared';
/**
* Callbacks the ScreenShareManager needs from the owning service.
@@ -45,103 +48,9 @@ export interface LocalScreenShareState {
includeSystemAudio: boolean;
stream: MediaStream | null;
suppressRemotePlayback: boolean;
forceDefaultRemotePlaybackOutput: boolean;
}
interface LinuxScreenShareAudioRoutingInfo {
available: boolean;
active: boolean;
monitorCaptureSupported: boolean;
screenShareSinkName: string;
screenShareMonitorSourceName: string;
voiceSinkName: string;
reason?: string;
}
interface LinuxScreenShareMonitorCaptureInfo {
bitsPerSample: number;
captureId: string;
channelCount: number;
sampleRate: number;
sourceName: string;
}
interface LinuxScreenShareMonitorAudioChunkPayload {
captureId: string;
chunk: Uint8Array;
}
interface LinuxScreenShareMonitorAudioEndedPayload {
captureId: string;
reason?: string;
}
interface LinuxScreenShareMonitorAudioPipeline {
audioContext: AudioContext;
audioTrack: MediaStreamTrack;
bitsPerSample: number;
captureId: string;
channelCount: number;
mediaDestination: MediaStreamAudioDestinationNode;
nextStartTime: number;
pendingBytes: Uint8Array;
sampleRate: number;
unsubscribeChunk: () => void;
unsubscribeEnded: () => void;
}
export interface DesktopSource {
id: string;
name: string;
thumbnail: string;
}
interface ElectronDesktopSourceSelection {
includeSystemAudio: boolean;
source: DesktopSource;
}
interface ElectronDesktopCaptureResult {
includeSystemAudio: boolean;
stream: MediaStream;
}
interface ScreenShareElectronApi {
getSources?: () => Promise<DesktopSource[]>;
prepareLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
activateLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
deactivateLinuxScreenShareAudioRouting?: () => Promise<boolean>;
startLinuxScreenShareMonitorCapture?: () => Promise<LinuxScreenShareMonitorCaptureInfo>;
stopLinuxScreenShareMonitorCapture?: (captureId?: string) => Promise<boolean>;
onLinuxScreenShareMonitorAudioChunk?: (listener: (payload: LinuxScreenShareMonitorAudioChunkPayload) => void) => () => void;
onLinuxScreenShareMonitorAudioEnded?: (listener: (payload: LinuxScreenShareMonitorAudioEndedPayload) => void) => () => void;
}
type ElectronDesktopVideoConstraint = MediaTrackConstraints & {
mandatory: {
chromeMediaSource: 'desktop';
chromeMediaSourceId: string;
maxWidth: number;
maxHeight: number;
maxFrameRate: number;
};
};
type ElectronDesktopAudioConstraint = MediaTrackConstraints & {
mandatory: {
chromeMediaSource: 'desktop';
chromeMediaSourceId: string;
};
};
interface ElectronDesktopMediaStreamConstraints extends MediaStreamConstraints {
video: ElectronDesktopVideoConstraint;
audio?: false | ElectronDesktopAudioConstraint;
}
type ScreenShareWindow = Window & {
electronAPI?: ScreenShareElectronApi;
};
export class ScreenShareManager {
/** The active screen-capture stream. */
private activeScreenStream: MediaStream | null = null;
@@ -155,22 +64,39 @@ export class ScreenShareManager {
/** Remote peers that explicitly requested screen-share video. */
private readonly requestedViewerPeerIds = new Set<string>();
/** Browser `getDisplayMedia` capture path. */
private readonly browserScreenShareCapture: BrowserScreenShareCapture;
/** Desktop Electron capture path for non-Linux desktop builds. */
private readonly desktopElectronScreenShareCapture: DesktopElectronScreenShareCapture;
/** Linux Electron screen/audio capture path with isolated audio routing. */
private readonly linuxElectronScreenShareCapture: LinuxElectronScreenShareCapture;
/** Whether screen sharing is currently active. */
private isScreenActive = false;
/** Whether Linux-specific Electron audio routing is currently active. */
private linuxElectronAudioRoutingActive = false;
/** Pending teardown of Linux-specific Electron audio routing. */
private linuxAudioRoutingResetPromise: Promise<void> | null = null;
/** Renderer-side audio pipeline for Linux monitor-source capture. */
private linuxMonitorAudioPipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
constructor(
private readonly logger: WebRTCLogger,
private callbacks: ScreenShareCallbacks
) {}
) {
this.browserScreenShareCapture = new BrowserScreenShareCapture(this.logger);
this.desktopElectronScreenShareCapture = new DesktopElectronScreenShareCapture(this.logger, {
getElectronApi: () => this.getElectronApi(),
getSelectDesktopSource: () => this.callbacks.selectDesktopSource
});
this.linuxElectronScreenShareCapture = new LinuxElectronScreenShareCapture(this.logger, {
getElectronApi: () => this.getElectronApi(),
onCaptureEnded: () => {
if (this.isScreenActive) {
this.stopScreenShare();
}
},
startDisplayMedia: async (options, preset) =>
await this.browserScreenShareCapture.startCapture(options, preset)
});
}
/**
* Replace the callback set at runtime.
@@ -207,7 +133,7 @@ export class ScreenShareManager {
...options
};
const preset = SCREEN_SHARE_QUALITY_PRESETS[shareOptions.quality];
const electronDesktopCaptureAvailable = this.isElectronDesktopCaptureAvailable();
const electronDesktopCaptureAvailable = this.desktopElectronScreenShareCapture.isAvailable();
let captureMethod: ScreenShareCaptureMethod | null = null;
@@ -218,13 +144,13 @@ export class ScreenShareManager {
this.stopScreenShare();
}
await this.awaitPendingLinuxAudioRoutingReset();
await this.linuxElectronScreenShareCapture.awaitPendingReset();
this.activeScreenStream = null;
if (shareOptions.includeSystemAudio && this.isLinuxElectronAudioRoutingSupported()) {
if (shareOptions.includeSystemAudio && this.linuxElectronScreenShareCapture.isSupported()) {
try {
this.activeScreenStream = await this.startWithLinuxElectronAudioRouting(shareOptions, preset);
this.activeScreenStream = await this.linuxElectronScreenShareCapture.startCapture(shareOptions, preset);
captureMethod = 'linux-electron';
} catch (error) {
this.rethrowIfScreenShareAborted(error);
@@ -234,7 +160,7 @@ export class ScreenShareManager {
if (!this.activeScreenStream && shareOptions.includeSystemAudio) {
try {
this.activeScreenStream = await this.startWithDisplayMedia(shareOptions, preset);
this.activeScreenStream = await this.browserScreenShareCapture.startCapture(shareOptions, preset);
captureMethod = 'display-media';
if (this.activeScreenStream.getAudioTracks().length === 0) {
@@ -246,6 +172,7 @@ export class ScreenShareManager {
'getDisplayMedia did not provide system audio; '
+ 'continuing without system audio to preserve mic stream'
);
shareOptions.includeSystemAudio = false;
} else {
this.logger.warn('getDisplayMedia did not provide system audio; trying next capture method');
@@ -262,7 +189,7 @@ export class ScreenShareManager {
if (!this.activeScreenStream && electronDesktopCaptureAvailable) {
try {
const electronCapture = await this.startWithElectronDesktopCapturer(shareOptions, preset);
const electronCapture = await this.desktopElectronScreenShareCapture.startCapture(shareOptions, preset);
this.activeScreenStream = electronCapture.stream;
shareOptions.includeSystemAudio = electronCapture.includeSystemAudio;
@@ -274,7 +201,7 @@ export class ScreenShareManager {
}
if (!this.activeScreenStream) {
this.activeScreenStream = await this.startWithDisplayMedia(shareOptions, preset);
this.activeScreenStream = await this.browserScreenShareCapture.startCapture(shareOptions, preset);
captureMethod = 'display-media';
}
@@ -321,7 +248,7 @@ export class ScreenShareManager {
this.activeScreenStream = null;
}
this.scheduleLinuxAudioRoutingReset();
this.linuxElectronScreenShareCapture.scheduleReset();
this.screenAudioStream = null;
this.activeScreenPreset = null;
@@ -403,26 +330,6 @@ export class ScreenShareManager {
: null;
}
private isElectronDesktopCaptureAvailable(): boolean {
return !!this.getElectronApi()?.getSources && !this.isLinuxElectron();
}
private isLinuxElectron(): boolean {
if (!this.getElectronApi() || typeof navigator === 'undefined') {
return false;
}
return /linux/i.test(`${navigator.userAgent} ${navigator.platform}`);
}
private isWindowsElectron(): boolean {
if (!this.isElectronDesktopCaptureAvailable() || typeof navigator === 'undefined') {
return false;
}
return /win/i.test(`${navigator.userAgent} ${navigator.platform}`);
}
private publishLocalScreenShareState(
includeSystemAudio: boolean,
captureMethod: ScreenShareCaptureMethod | null
@@ -433,66 +340,13 @@ export class ScreenShareManager {
includeSystemAudio: this.isScreenActive ? includeSystemAudio : false,
stream: this.isScreenActive ? this.activeScreenStream : null,
suppressRemotePlayback: this.isScreenActive
&& this.shouldSuppressRemotePlaybackDuringShare(includeSystemAudio, captureMethod)
&& this.desktopElectronScreenShareCapture.shouldSuppressRemotePlaybackDuringShare(includeSystemAudio),
forceDefaultRemotePlaybackOutput: this.isScreenActive
&& includeSystemAudio
&& captureMethod === 'linux-electron'
});
}
private shouldSuppressRemotePlaybackDuringShare(
includeSystemAudio: boolean,
_captureMethod: ScreenShareCaptureMethod | null
): boolean {
// On Windows Electron, system audio capture (via getDisplayMedia or
// desktop capturer) includes all output audio. Remote voice playback
// must be suppressed to avoid a feedback loop regardless of capture method.
return includeSystemAudio && this.isWindowsElectron();
}
private getRequiredLinuxElectronApi(): Required<Pick<
ScreenShareElectronApi,
| 'prepareLinuxScreenShareAudioRouting'
| 'activateLinuxScreenShareAudioRouting'
| 'deactivateLinuxScreenShareAudioRouting'
| 'startLinuxScreenShareMonitorCapture'
| 'stopLinuxScreenShareMonitorCapture'
| 'onLinuxScreenShareMonitorAudioChunk'
| 'onLinuxScreenShareMonitorAudioEnded'
>> {
const electronApi = this.getElectronApi();
if (!electronApi?.prepareLinuxScreenShareAudioRouting
|| !electronApi.activateLinuxScreenShareAudioRouting
|| !electronApi.deactivateLinuxScreenShareAudioRouting
|| !electronApi.startLinuxScreenShareMonitorCapture
|| !electronApi.stopLinuxScreenShareMonitorCapture
|| !electronApi.onLinuxScreenShareMonitorAudioChunk
|| !electronApi.onLinuxScreenShareMonitorAudioEnded) {
throw new Error('Linux Electron audio routing is unavailable.');
}
return {
prepareLinuxScreenShareAudioRouting: electronApi.prepareLinuxScreenShareAudioRouting,
activateLinuxScreenShareAudioRouting: electronApi.activateLinuxScreenShareAudioRouting,
deactivateLinuxScreenShareAudioRouting: electronApi.deactivateLinuxScreenShareAudioRouting,
startLinuxScreenShareMonitorCapture: electronApi.startLinuxScreenShareMonitorCapture,
stopLinuxScreenShareMonitorCapture: electronApi.stopLinuxScreenShareMonitorCapture,
onLinuxScreenShareMonitorAudioChunk: electronApi.onLinuxScreenShareMonitorAudioChunk,
onLinuxScreenShareMonitorAudioEnded: electronApi.onLinuxScreenShareMonitorAudioEnded
};
}
private assertLinuxAudioRoutingReady(
routingInfo: LinuxScreenShareAudioRoutingInfo,
unavailableReason: string
): void {
if (!routingInfo.available) {
throw new Error(routingInfo.reason || unavailableReason);
}
if (!routingInfo.monitorCaptureSupported) {
throw new Error('Linux screen-share monitor capture requires restarting the desktop app so the new Electron main process can load.');
}
}
/**
* Create a dedicated stream for system audio captured alongside the screen.
*
@@ -571,6 +425,11 @@ export class ScreenShareManager {
}
peerData.screenVideoSender = videoSender;
if (typeof videoSender.setStreams === 'function') {
videoSender.setStreams(this.activeScreenStream);
}
videoSender.replaceTrack(screenVideoTrack)
.then(() => {
this.logger.info('screen video replaceTrack ok', { peerId });
@@ -601,6 +460,11 @@ export class ScreenShareManager {
}
peerData.screenAudioSender = screenAudioSender;
if (typeof screenAudioSender.setStreams === 'function') {
screenAudioSender.setStreams(this.activeScreenStream);
}
screenAudioSender.replaceTrack(screenAudioTrack)
.then(() => this.logger.info('screen audio replaceTrack ok', { peerId }))
.catch((error) => this.logger.error('screen audio replaceTrack failed', error));
@@ -644,109 +508,6 @@ export class ScreenShareManager {
this.callbacks.renegotiate(peerId);
}
private async startWithDisplayMedia(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<MediaStream> {
const displayConstraints = this.buildDisplayMediaConstraints(options, preset);
this.logger.info('getDisplayMedia constraints', displayConstraints);
if (!navigator.mediaDevices?.getDisplayMedia) {
throw new Error('navigator.mediaDevices.getDisplayMedia is not available.');
}
return await navigator.mediaDevices.getDisplayMedia(displayConstraints);
}
private async startWithElectronDesktopCapturer(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<ElectronDesktopCaptureResult> {
const electronApi = this.getElectronApi();
if (!electronApi?.getSources) {
throw new Error('Electron desktop capture is unavailable.');
}
const sources = await electronApi.getSources();
const selection = await this.resolveElectronDesktopSource(sources, options.includeSystemAudio);
const captureOptions = {
...options,
includeSystemAudio: selection.includeSystemAudio
};
if (!selection.source) {
throw new Error('No desktop capture sources were available.');
}
this.logger.info('Selected Electron desktop source', {
includeSystemAudio: selection.includeSystemAudio,
sourceId: selection.source.id,
sourceName: selection.source.name
});
const electronConstraints = this.buildElectronDesktopConstraints(selection.source.id, captureOptions, preset);
this.logger.info('desktopCapturer constraints', electronConstraints);
if (!navigator.mediaDevices?.getUserMedia) {
throw new Error('navigator.mediaDevices.getUserMedia is not available (requires HTTPS or localhost).');
}
return {
includeSystemAudio: selection.includeSystemAudio,
stream: await navigator.mediaDevices.getUserMedia(electronConstraints)
};
}
private async resolveElectronDesktopSource(
sources: DesktopSource[],
includeSystemAudio: boolean
): Promise<ElectronDesktopSourceSelection> {
const orderedSources = this.sortElectronDesktopSources(sources);
const defaultSource = orderedSources.find((source) => source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME)
?? orderedSources[0];
if (orderedSources.length === 0) {
throw new Error('No desktop capture sources were available.');
}
if (!this.isWindowsElectron() || orderedSources.length < 2) {
return {
includeSystemAudio,
source: defaultSource
};
}
if (!this.callbacks.selectDesktopSource) {
return {
includeSystemAudio,
source: defaultSource
};
}
return await this.callbacks.selectDesktopSource(orderedSources, { includeSystemAudio });
}
private sortElectronDesktopSources(sources: DesktopSource[]): DesktopSource[] {
return [...sources].sort((left, right) => {
const weightDiff = this.getElectronDesktopSourceWeight(left) - this.getElectronDesktopSourceWeight(right);
if (weightDiff !== 0) {
return weightDiff;
}
return left.name.localeCompare(right.name);
});
}
private getElectronDesktopSourceWeight(source: DesktopSource): number {
return source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME || source.id.startsWith('screen')
? 0
: 1;
}
private isScreenShareSelectionAborted(error: unknown): boolean {
return error instanceof Error
&& (error.name === 'AbortError' || error.name === 'NotAllowedError');
@@ -758,425 +519,6 @@ export class ScreenShareManager {
}
}
private isLinuxElectronAudioRoutingSupported(): boolean {
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
return false;
}
const electronApi = this.getElectronApi();
const platformHint = `${navigator.userAgent} ${navigator.platform}`;
return !!electronApi?.prepareLinuxScreenShareAudioRouting
&& !!electronApi?.activateLinuxScreenShareAudioRouting
&& !!electronApi?.deactivateLinuxScreenShareAudioRouting
&& !!electronApi?.startLinuxScreenShareMonitorCapture
&& !!electronApi?.stopLinuxScreenShareMonitorCapture
&& !!electronApi?.onLinuxScreenShareMonitorAudioChunk
&& !!electronApi?.onLinuxScreenShareMonitorAudioEnded
&& /linux/i.test(platformHint);
}
private async startWithLinuxElectronAudioRouting(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): Promise<MediaStream> {
const electronApi = this.getRequiredLinuxElectronApi();
const routingInfo = await electronApi.prepareLinuxScreenShareAudioRouting();
this.assertLinuxAudioRoutingReady(routingInfo, 'Linux Electron audio routing is unavailable.');
let desktopStream: MediaStream | null = null;
try {
const activation = await electronApi.activateLinuxScreenShareAudioRouting();
this.assertLinuxAudioRoutingReady(activation, 'Failed to activate Linux Electron audio routing.');
if (!activation.active) {
throw new Error(activation.reason || 'Failed to activate Linux Electron audio routing.');
}
desktopStream = await this.startWithDisplayMedia({
...options,
includeSystemAudio: false
}, preset);
const { audioTrack, captureInfo } = await this.startLinuxScreenShareMonitorTrack();
const stream = new MediaStream([...desktopStream.getVideoTracks(), audioTrack]);
desktopStream.getAudioTracks().forEach((track) => track.stop());
this.linuxElectronAudioRoutingActive = true;
this.logger.info('Linux Electron screen-share audio routing enabled', {
screenShareMonitorSourceName: captureInfo.sourceName,
voiceSinkName: activation.voiceSinkName
});
return stream;
} catch (error) {
desktopStream?.getTracks().forEach((track) => track.stop());
await this.resetLinuxElectronAudioRouting();
throw error;
}
}
private scheduleLinuxAudioRoutingReset(): void {
if (!this.linuxElectronAudioRoutingActive || this.linuxAudioRoutingResetPromise) {
return;
}
this.linuxAudioRoutingResetPromise = this.resetLinuxElectronAudioRouting()
.catch((error) => {
this.logger.warn('Failed to reset Linux Electron audio routing', error);
})
.finally(() => {
this.linuxAudioRoutingResetPromise = null;
});
}
private async awaitPendingLinuxAudioRoutingReset(): Promise<void> {
if (!this.linuxAudioRoutingResetPromise) {
return;
}
await this.linuxAudioRoutingResetPromise;
}
private async resetLinuxElectronAudioRouting(): Promise<void> {
const electronApi = this.getElectronApi();
const captureId = this.linuxMonitorAudioPipeline?.captureId;
this.linuxElectronAudioRoutingActive = false;
this.disposeLinuxScreenShareMonitorAudioPipeline();
try {
if (captureId && electronApi?.stopLinuxScreenShareMonitorCapture) {
await electronApi.stopLinuxScreenShareMonitorCapture(captureId);
}
} catch (error) {
this.logger.warn('Failed to stop Linux screen-share monitor capture', error);
}
try {
if (electronApi?.deactivateLinuxScreenShareAudioRouting) {
await electronApi.deactivateLinuxScreenShareAudioRouting();
}
} catch (error) {
this.logger.warn('Failed to deactivate Linux Electron audio routing', error);
}
}
private async startLinuxScreenShareMonitorTrack(): Promise<{
audioTrack: MediaStreamTrack;
captureInfo: LinuxScreenShareMonitorCaptureInfo;
}> {
const electronApi = this.getElectronApi();
if (!electronApi?.startLinuxScreenShareMonitorCapture
|| !electronApi?.stopLinuxScreenShareMonitorCapture
|| !electronApi?.onLinuxScreenShareMonitorAudioChunk
|| !electronApi?.onLinuxScreenShareMonitorAudioEnded) {
throw new Error('Linux screen-share monitor capture is unavailable.');
}
const queuedChunksByCaptureId = new Map<string, Uint8Array[]>();
const queuedEndedReasons = new Map<string, string | undefined>();
let pipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
let captureInfo: LinuxScreenShareMonitorCaptureInfo | null = null;
const queueChunk = (captureId: string, chunk: Uint8Array): void => {
const queuedChunks = queuedChunksByCaptureId.get(captureId) || [];
queuedChunks.push(this.copyLinuxMonitorAudioBytes(chunk));
queuedChunksByCaptureId.set(captureId, queuedChunks);
};
const onChunk = (payload: LinuxScreenShareMonitorAudioChunkPayload): void => {
if (!pipeline || payload.captureId !== pipeline.captureId) {
queueChunk(payload.captureId, payload.chunk);
return;
}
this.handleLinuxScreenShareMonitorAudioChunk(pipeline, payload.chunk);
};
const onEnded = (payload: LinuxScreenShareMonitorAudioEndedPayload): void => {
if (!pipeline || payload.captureId !== pipeline.captureId) {
queuedEndedReasons.set(payload.captureId, payload.reason);
return;
}
this.logger.warn('Linux screen-share monitor capture ended', payload);
if (this.isScreenActive && this.linuxMonitorAudioPipeline?.captureId === payload.captureId) {
this.stopScreenShare();
}
};
const unsubscribeChunk = electronApi.onLinuxScreenShareMonitorAudioChunk(onChunk) as () => void;
const unsubscribeEnded = electronApi.onLinuxScreenShareMonitorAudioEnded(onEnded) as () => void;
try {
captureInfo = await electronApi.startLinuxScreenShareMonitorCapture() as LinuxScreenShareMonitorCaptureInfo;
const audioContext = new AudioContext({ sampleRate: captureInfo.sampleRate });
const mediaDestination = audioContext.createMediaStreamDestination();
await audioContext.resume();
const audioTrack = mediaDestination.stream.getAudioTracks()[0];
if (!audioTrack) {
throw new Error('Renderer audio pipeline did not produce a screen-share monitor track.');
}
pipeline = {
audioContext,
audioTrack,
bitsPerSample: captureInfo.bitsPerSample,
captureId: captureInfo.captureId,
channelCount: captureInfo.channelCount,
mediaDestination,
nextStartTime: audioContext.currentTime + 0.05,
pendingBytes: new Uint8Array(0),
sampleRate: captureInfo.sampleRate,
unsubscribeChunk,
unsubscribeEnded
};
this.linuxMonitorAudioPipeline = pipeline;
const activeCaptureId = captureInfo.captureId;
audioTrack.addEventListener('ended', () => {
if (this.isScreenActive && this.linuxMonitorAudioPipeline?.captureId === activeCaptureId) {
this.stopScreenShare();
}
}, { once: true });
const queuedChunks = queuedChunksByCaptureId.get(captureInfo.captureId) || [];
const activePipeline = pipeline;
queuedChunks.forEach((chunk) => {
this.handleLinuxScreenShareMonitorAudioChunk(activePipeline, chunk);
});
queuedChunksByCaptureId.delete(captureInfo.captureId);
if (queuedEndedReasons.has(captureInfo.captureId)) {
throw new Error(queuedEndedReasons.get(captureInfo.captureId)
|| 'Linux screen-share monitor capture ended before audio initialisation completed.');
}
return {
audioTrack,
captureInfo
};
} catch (error) {
if (pipeline) {
this.disposeLinuxScreenShareMonitorAudioPipeline(pipeline.captureId);
} else {
unsubscribeChunk();
unsubscribeEnded();
}
try {
await electronApi.stopLinuxScreenShareMonitorCapture(captureInfo?.captureId);
} catch (stopError) {
this.logger.warn('Failed to stop Linux screen-share monitor capture after startup failure', stopError);
}
throw error;
}
}
private disposeLinuxScreenShareMonitorAudioPipeline(captureId?: string): void {
if (!this.linuxMonitorAudioPipeline) {
return;
}
if (captureId && captureId !== this.linuxMonitorAudioPipeline.captureId) {
return;
}
const pipeline = this.linuxMonitorAudioPipeline;
this.linuxMonitorAudioPipeline = null;
pipeline.unsubscribeChunk();
pipeline.unsubscribeEnded();
pipeline.audioTrack.stop();
pipeline.pendingBytes = new Uint8Array(0);
void pipeline.audioContext.close().catch((error) => {
this.logger.warn('Failed to close Linux screen-share monitor audio context', error);
});
}
private handleLinuxScreenShareMonitorAudioChunk(
pipeline: LinuxScreenShareMonitorAudioPipeline,
chunk: Uint8Array
): void {
if (pipeline.bitsPerSample !== 16) {
this.logger.warn('Unsupported Linux screen-share monitor capture sample size', {
bitsPerSample: pipeline.bitsPerSample,
captureId: pipeline.captureId
});
return;
}
const bytesPerSample = pipeline.bitsPerSample / 8;
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
if (!Number.isFinite(bytesPerFrame) || bytesPerFrame <= 0) {
return;
}
const combinedBytes = this.concatLinuxMonitorAudioBytes(pipeline.pendingBytes, chunk);
const completeByteLength = combinedBytes.byteLength - (combinedBytes.byteLength % bytesPerFrame);
if (completeByteLength <= 0) {
pipeline.pendingBytes = combinedBytes;
return;
}
const completeBytes = combinedBytes.subarray(0, completeByteLength);
pipeline.pendingBytes = this.copyLinuxMonitorAudioBytes(combinedBytes.subarray(completeByteLength));
if (pipeline.audioContext.state !== 'running') {
void pipeline.audioContext.resume().catch((error) => {
this.logger.warn('Failed to resume Linux screen-share monitor audio context', error);
});
}
const frameCount = completeByteLength / bytesPerFrame;
const audioBuffer = this.createLinuxScreenShareAudioBuffer(pipeline, completeBytes, frameCount);
const source = pipeline.audioContext.createBufferSource();
source.buffer = audioBuffer;
source.connect(pipeline.mediaDestination);
source.onended = () => {
source.disconnect();
};
const now = pipeline.audioContext.currentTime;
const startTime = Math.max(pipeline.nextStartTime, now + 0.02);
source.start(startTime);
pipeline.nextStartTime = startTime + audioBuffer.duration;
}
private createLinuxScreenShareAudioBuffer(
pipeline: LinuxScreenShareMonitorAudioPipeline,
bytes: Uint8Array,
frameCount: number
): AudioBuffer {
const audioBuffer = pipeline.audioContext.createBuffer(pipeline.channelCount, frameCount, pipeline.sampleRate);
const sampleData = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
const channelData = Array.from({ length: pipeline.channelCount }, (_, channelIndex) => audioBuffer.getChannelData(channelIndex));
const bytesPerSample = pipeline.bitsPerSample / 8;
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
for (let frameIndex = 0; frameIndex < frameCount; frameIndex += 1) {
const frameOffset = frameIndex * bytesPerFrame;
for (let channelIndex = 0; channelIndex < pipeline.channelCount; channelIndex += 1) {
const sampleOffset = frameOffset + (channelIndex * bytesPerSample);
channelData[channelIndex][frameIndex] = sampleData.getInt16(sampleOffset, true) / 32768;
}
}
return audioBuffer;
}
private concatLinuxMonitorAudioBytes(first: Uint8Array, second: Uint8Array): Uint8Array {
if (first.byteLength === 0) {
return this.copyLinuxMonitorAudioBytes(second);
}
if (second.byteLength === 0) {
return this.copyLinuxMonitorAudioBytes(first);
}
const combined = new Uint8Array(first.byteLength + second.byteLength);
combined.set(first, 0);
combined.set(second, first.byteLength);
return combined;
}
private copyLinuxMonitorAudioBytes(bytes: Uint8Array): Uint8Array {
return bytes.byteLength > 0 ? new Uint8Array(bytes) : new Uint8Array(0);
}
private buildDisplayMediaConstraints(
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): DisplayMediaStreamOptions {
const supportedConstraints = navigator.mediaDevices?.getSupportedConstraints?.() as Record<string, boolean> | undefined;
const audioConstraints: Record<string, unknown> | false = options.includeSystemAudio
? {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false
}
: false;
if (audioConstraints && supportedConstraints?.['restrictOwnAudio']) {
audioConstraints['restrictOwnAudio'] = true;
}
if (audioConstraints && supportedConstraints?.['suppressLocalAudioPlayback']) {
audioConstraints['suppressLocalAudioPlayback'] = true;
}
return {
video: {
width: { ideal: preset.width, max: preset.width },
height: { ideal: preset.height, max: preset.height },
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
},
audio: audioConstraints,
monitorTypeSurfaces: 'include',
selfBrowserSurface: 'exclude',
surfaceSwitching: 'include',
systemAudio: options.includeSystemAudio ? 'include' : 'exclude'
} as DisplayMediaStreamOptions;
}
private buildElectronDesktopConstraints(
sourceId: string,
options: ScreenShareStartOptions,
preset: ScreenShareQualityPreset
): ElectronDesktopMediaStreamConstraints {
const electronConstraints: ElectronDesktopMediaStreamConstraints = {
video: {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sourceId,
maxWidth: preset.width,
maxHeight: preset.height,
maxFrameRate: preset.frameRate
}
}
};
if (options.includeSystemAudio) {
electronConstraints.audio = {
mandatory: {
chromeMediaSource: 'desktop',
chromeMediaSourceId: sourceId
}
};
} else {
electronConstraints.audio = false;
}
return electronConstraints;
}
private configureScreenStream(preset: ScreenShareQualityPreset): void {
const screenVideoTrack = this.activeScreenStream?.getVideoTracks()[0];

View File

@@ -20,6 +20,10 @@ export interface PeerData {
screenVideoSender?: RTCRtpSender;
/** The RTP sender carrying the screen-share audio track. */
screenAudioSender?: RTCRtpSender;
/** Known remote stream ids that carry the peer's voice audio. */
remoteVoiceStreamIds: Set<string>;
/** Known remote stream ids that carry the peer's screen-share audio/video. */
remoteScreenShareStreamIds: Set<string>;
}
/** Credentials cached for automatic re-identification after reconnect. */

View File

@@ -18,11 +18,14 @@ export interface PlaybackOptions {
*
* Chrome/Electron workaround: a muted HTMLAudioElement is attached to
* the stream first so that `createMediaStreamSource` actually outputs
* audio. The element itself is silent - all audible output comes from
* the GainNode -> AudioContext.destination path.
* audio. The priming element itself is silent; audible output is routed
* through a separate output element fed by
* `GainNode -> MediaStreamDestination` so output-device switching stays
* reliable during Linux screen sharing.
*/
interface PeerAudioPipeline {
audioElement: HTMLAudioElement;
outputElement: HTMLAudioElement;
context: AudioContext;
sourceNodes: MediaStreamAudioSourceNode[];
gainNode: GainNode;
@@ -38,6 +41,7 @@ export class VoicePlaybackService {
private userVolumes = new Map<string, number>();
private userMuted = new Map<string, boolean>();
private preferredOutputDeviceId = 'default';
private temporaryOutputDeviceId: string | null = null;
private masterVolume = 1;
private deafened = false;
private captureEchoSuppressed = false;
@@ -49,6 +53,13 @@ export class VoicePlaybackService {
this.captureEchoSuppressed = this.webrtc.isScreenShareRemotePlaybackSuppressed();
this.recalcAllGains();
});
effect(() => {
this.temporaryOutputDeviceId = this.webrtc.forceDefaultRemotePlaybackOutput()
? 'default'
: null;
void this.applyEffectiveOutputDeviceToAllPipelines();
});
}
handleRemoteStream(peerId: string, stream: MediaStream, options: PlaybackOptions): void {
@@ -154,11 +165,12 @@ export class VoicePlaybackService {
* ↓
* muted <audio> element (Chrome workaround - primes the stream)
* ↓
* MediaStreamSource → GainNode → AudioContext.destination
* MediaStreamSource → GainNode → MediaStreamDestination → output <audio>
*/
private createPipeline(peerId: string, stream: MediaStream): void {
// Chromium/Electron needs a muted <audio> element before Web Audio can read the stream.
const audioEl = new Audio();
const outputEl = new Audio();
const audioTracks = stream.getAudioTracks().filter((track) => track.readyState === 'live');
audioEl.srcObject = stream;
@@ -167,12 +179,24 @@ export class VoicePlaybackService {
const ctx = new AudioContext();
const gainNode = ctx.createGain();
const mediaDestination = ctx.createMediaStreamDestination();
const sourceNodes = audioTracks.map((track) => ctx.createMediaStreamSource(new MediaStream([track])));
sourceNodes.forEach((sourceNode) => sourceNode.connect(gainNode));
gainNode.connect(ctx.destination);
gainNode.connect(mediaDestination);
const pipeline: PeerAudioPipeline = { audioElement: audioEl, context: ctx, sourceNodes, gainNode };
outputEl.srcObject = mediaDestination.stream;
outputEl.muted = false;
outputEl.volume = 1;
outputEl.play().catch(() => {});
const pipeline: PeerAudioPipeline = {
audioElement: audioEl,
outputElement: outputEl,
context: ctx,
sourceNodes,
gainNode
};
this.peerPipelines.set(peerId, pipeline);
@@ -194,26 +218,20 @@ export class VoicePlaybackService {
}
// eslint-disable-next-line
const anyAudio = pipeline.audioElement as any;
// eslint-disable-next-line
const anyCtx = pipeline.context as any;
const anyAudio = pipeline.outputElement as any;
const tasks: Promise<unknown>[] = [];
if (typeof anyAudio.setSinkId === 'function') {
tasks.push(anyAudio.setSinkId(deviceId).catch(() => undefined));
}
if (typeof anyCtx.setSinkId === 'function') {
tasks.push(anyCtx.setSinkId(deviceId).catch(() => undefined));
}
if (tasks.length > 0) {
await Promise.all(tasks);
}
}
private getEffectiveOutputDeviceId(): string {
return this.preferredOutputDeviceId;
return this.temporaryOutputDeviceId ?? this.preferredOutputDeviceId;
}
private removePipeline(peerId: string): void {
@@ -238,6 +256,8 @@ export class VoicePlaybackService {
pipeline.audioElement.srcObject = null;
pipeline.audioElement.remove();
pipeline.outputElement.srcObject = null;
pipeline.outputElement.remove();
if (pipeline.context.state !== 'closed') {
pipeline.context.close().catch(() => {});