Compare commits
5 Commits
7bf37ba510
...
2b6e477c9a
| Author | SHA1 | Date | |
|---|---|---|---|
| 2b6e477c9a | |||
| 22d355a522 | |||
| 15c5952e29 | |||
| 781c05294f | |||
| 778e75bef5 |
@@ -32,6 +32,11 @@ interface SinkInputDetails extends ShortSinkInputEntry {
|
||||
properties: Record<string, string>;
|
||||
}
|
||||
|
||||
interface DescendantProcessInfo {
|
||||
ids: ReadonlySet<string>;
|
||||
binaryNames: ReadonlySet<string>;
|
||||
}
|
||||
|
||||
interface PactlJsonSinkInputEntry {
|
||||
index?: number | string;
|
||||
properties?: Record<string, unknown>;
|
||||
@@ -44,6 +49,7 @@ interface LinuxScreenShareAudioRoutingState {
|
||||
screenShareLoopbackModuleId: string | null;
|
||||
voiceLoopbackModuleId: string | null;
|
||||
rerouteIntervalId: ReturnType<typeof setInterval> | null;
|
||||
subscribeProcess: ChildProcess | null;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareMonitorCaptureState {
|
||||
@@ -77,7 +83,8 @@ const routingState: LinuxScreenShareAudioRoutingState = {
|
||||
restoreSinkName: null,
|
||||
screenShareLoopbackModuleId: null,
|
||||
voiceLoopbackModuleId: null,
|
||||
rerouteIntervalId: null
|
||||
rerouteIntervalId: null,
|
||||
subscribeProcess: null
|
||||
};
|
||||
const monitorCaptureState: LinuxScreenShareMonitorCaptureState = {
|
||||
captureId: null,
|
||||
@@ -126,12 +133,21 @@ export async function activateLinuxScreenShareAudioRouting(): Promise<LinuxScree
|
||||
routingState.screenShareLoopbackModuleId = await loadLoopbackModule(SCREEN_SHARE_MONITOR_SOURCE_NAME, restoreSinkName);
|
||||
routingState.voiceLoopbackModuleId = await loadLoopbackModule(`${VOICE_SINK_NAME}.monitor`, restoreSinkName);
|
||||
|
||||
await setDefaultSink(SCREEN_SHARE_SINK_NAME);
|
||||
await moveSinkInputs(SCREEN_SHARE_SINK_NAME, (sinkName) => !!sinkName && sinkName !== SCREEN_SHARE_SINK_NAME && sinkName !== VOICE_SINK_NAME);
|
||||
// Set the default sink to the voice sink so that new app audio
|
||||
// streams (received WebRTC voice) never land on the screenshare
|
||||
// capture sink. This prevents the feedback loop where remote
|
||||
// voice audio was picked up by parec before the reroute interval
|
||||
// could move the stream away.
|
||||
await setDefaultSink(VOICE_SINK_NAME);
|
||||
|
||||
routingState.active = true;
|
||||
await rerouteAppSinkInputsToVoiceSink();
|
||||
|
||||
// Let the combined reroute decide placement for every existing
|
||||
// stream. This avoids briefly shoving the app's own playback to the
|
||||
// screenshare sink before ownership detection can move it back.
|
||||
await rerouteSinkInputs();
|
||||
startSinkInputRerouteLoop();
|
||||
startSubscribeWatcher();
|
||||
|
||||
return buildRoutingInfo(true, true);
|
||||
} catch (error) {
|
||||
@@ -148,6 +164,7 @@ export async function activateLinuxScreenShareAudioRouting(): Promise<LinuxScree
|
||||
export async function deactivateLinuxScreenShareAudioRouting(): Promise<boolean> {
|
||||
const restoreSinkName = routingState.restoreSinkName;
|
||||
|
||||
stopSubscribeWatcher();
|
||||
stopSinkInputRerouteLoop();
|
||||
await stopLinuxScreenShareMonitorCapture();
|
||||
|
||||
@@ -166,6 +183,7 @@ export async function deactivateLinuxScreenShareAudioRouting(): Promise<boolean>
|
||||
routingState.restoreSinkName = null;
|
||||
routingState.screenShareLoopbackModuleId = null;
|
||||
routingState.voiceLoopbackModuleId = null;
|
||||
routingState.subscribeProcess = null;
|
||||
|
||||
return true;
|
||||
}
|
||||
@@ -425,34 +443,52 @@ async function setDefaultSink(sinkName: string): Promise<void> {
|
||||
await runPactl('set-default-sink', sinkName);
|
||||
}
|
||||
|
||||
async function rerouteAppSinkInputsToVoiceSink(): Promise<void> {
|
||||
/**
|
||||
* Combined reroute that enforces sink placement in both directions:
|
||||
* - App-owned sink inputs that are NOT on the voice sink are moved there.
|
||||
* - Non-app sink inputs that ARE on the voice sink are moved to the
|
||||
* screenshare sink so they are captured by parec.
|
||||
*
|
||||
* This two-way approach, combined with the voice sink being the PulseAudio
|
||||
* default, ensures that received WebRTC voice audio can never leak into the
|
||||
* screenshare monitor source.
|
||||
*/
|
||||
async function rerouteSinkInputs(): Promise<void> {
|
||||
const [
|
||||
sinks,
|
||||
sinkInputs,
|
||||
descendantProcessIds
|
||||
descendantProcessInfo
|
||||
] = await Promise.all([
|
||||
listSinks(),
|
||||
listSinkInputDetails(),
|
||||
collectDescendantProcessIds(process.pid)
|
||||
collectDescendantProcessInfo(process.pid)
|
||||
]);
|
||||
const sinkNamesByIndex = new Map(sinks.map((sink) => [sink.index, sink.name]));
|
||||
|
||||
await Promise.all(
|
||||
sinkInputs.map(async (sinkInput) => {
|
||||
if (!isAppOwnedSinkInput(sinkInput, descendantProcessIds)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sinkName = sinkNamesByIndex.get(sinkInput.sinkIndex) ?? null;
|
||||
const appOwned = isAppOwnedSinkInput(sinkInput, descendantProcessInfo);
|
||||
|
||||
// App-owned streams must stay on the voice sink.
|
||||
if (appOwned && sinkName !== VOICE_SINK_NAME) {
|
||||
try {
|
||||
await runPactl('move-sink-input', sinkInput.index, VOICE_SINK_NAME);
|
||||
} catch {
|
||||
// Streams can disappear or be recreated while rerouting.
|
||||
}
|
||||
|
||||
if (sinkName === VOICE_SINK_NAME) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await runPactl('move-sink-input', sinkInput.index, VOICE_SINK_NAME);
|
||||
} catch {
|
||||
// Streams can disappear or be recreated while rerouting.
|
||||
// Non-app streams sitting on the voice sink should be moved to the
|
||||
// screenshare sink for desktop-audio capture.
|
||||
if (!appOwned && sinkName === VOICE_SINK_NAME) {
|
||||
try {
|
||||
await runPactl('move-sink-input', sinkInput.index, SCREEN_SHARE_SINK_NAME);
|
||||
} catch {
|
||||
// Streams can disappear or be recreated while rerouting.
|
||||
}
|
||||
}
|
||||
})
|
||||
);
|
||||
@@ -515,7 +551,7 @@ function startSinkInputRerouteLoop(): void {
|
||||
}
|
||||
|
||||
routingState.rerouteIntervalId = setInterval(() => {
|
||||
void rerouteAppSinkInputsToVoiceSink();
|
||||
void rerouteSinkInputs();
|
||||
}, REROUTE_INTERVAL_MS);
|
||||
}
|
||||
|
||||
@@ -528,13 +564,108 @@ function stopSinkInputRerouteLoop(): void {
|
||||
routingState.rerouteIntervalId = null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Spawns `pactl subscribe` to receive PulseAudio events in real time.
|
||||
* When a new or changed sink-input is detected, a reroute is triggered
|
||||
* immediately instead of waiting for the next interval tick. This
|
||||
* drastically reduces the time non-app desktop audio spends on the
|
||||
* voice sink before being moved to the screenshare sink.
|
||||
*/
|
||||
function startSubscribeWatcher(): void {
|
||||
if (routingState.subscribeProcess) {
|
||||
return;
|
||||
}
|
||||
|
||||
let proc: ChildProcess;
|
||||
|
||||
try {
|
||||
proc = spawn('pactl', ['subscribe'], {
|
||||
env: process.env,
|
||||
stdio: [
|
||||
'ignore',
|
||||
'pipe',
|
||||
'ignore'
|
||||
]
|
||||
});
|
||||
} catch {
|
||||
// If pactl subscribe fails to spawn, the interval loop still covers us.
|
||||
return;
|
||||
}
|
||||
|
||||
routingState.subscribeProcess = proc;
|
||||
|
||||
let pending = false;
|
||||
|
||||
proc.stdout?.on('data', (chunk: Buffer) => {
|
||||
if (!routingState.active) {
|
||||
return;
|
||||
}
|
||||
|
||||
const text = chunk.toString();
|
||||
|
||||
if (/Event '(?:new|change)' on sink-input/.test(text)) {
|
||||
if (!pending) {
|
||||
pending = true;
|
||||
|
||||
// Batch rapid-fire events with a short delay.
|
||||
setTimeout(() => {
|
||||
pending = false;
|
||||
void rerouteSinkInputs();
|
||||
}, 50);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('close', () => {
|
||||
if (routingState.subscribeProcess === proc) {
|
||||
routingState.subscribeProcess = null;
|
||||
}
|
||||
});
|
||||
|
||||
proc.on('error', () => {
|
||||
if (routingState.subscribeProcess === proc) {
|
||||
routingState.subscribeProcess = null;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
function stopSubscribeWatcher(): void {
|
||||
const proc = routingState.subscribeProcess;
|
||||
|
||||
if (!proc) {
|
||||
return;
|
||||
}
|
||||
|
||||
routingState.subscribeProcess = null;
|
||||
|
||||
if (!proc.killed) {
|
||||
proc.kill('SIGTERM');
|
||||
}
|
||||
}
|
||||
|
||||
function isAppOwnedSinkInput(
|
||||
sinkInput: SinkInputDetails,
|
||||
descendantProcessIds: ReadonlySet<string>
|
||||
descendantProcessInfo: DescendantProcessInfo
|
||||
): boolean {
|
||||
const processId = sinkInput.properties['application.process.id'];
|
||||
|
||||
return typeof processId === 'string' && descendantProcessIds.has(processId);
|
||||
if (typeof processId === 'string' && descendantProcessInfo.ids.has(processId)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const processBinary = normalizeProcessBinary(sinkInput.properties['application.process.binary']);
|
||||
|
||||
if (processBinary && descendantProcessInfo.binaryNames.has(processBinary)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const applicationName = normalizeProcessBinary(sinkInput.properties['application.name']);
|
||||
|
||||
if (applicationName && descendantProcessInfo.binaryNames.has(applicationName)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
async function moveSinkInputs(
|
||||
@@ -697,31 +828,45 @@ async function listSinkInputDetails(): Promise<SinkInputDetails[]> {
|
||||
return entries.filter((entry) => !!entry.sinkIndex);
|
||||
}
|
||||
|
||||
async function collectDescendantProcessIds(rootProcessId: number): Promise<Set<string>> {
|
||||
const { stdout } = await execFileAsync('ps', ['-eo', 'pid=,ppid='], {
|
||||
async function collectDescendantProcessInfo(rootProcessId: number): Promise<DescendantProcessInfo> {
|
||||
const { stdout } = await execFileAsync('ps', ['-eo', 'pid=,ppid=,comm='], {
|
||||
env: process.env
|
||||
});
|
||||
const childrenByParentId = new Map<string, string[]>();
|
||||
const binaryNameByProcessId = new Map<string, string>();
|
||||
|
||||
stdout
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.forEach((line) => {
|
||||
const [pid, ppid] = line.split(/\s+/);
|
||||
const match = line.match(/^(\d+)\s+(\d+)\s+(.+)$/);
|
||||
|
||||
if (!pid || !ppid) {
|
||||
if (!match) {
|
||||
return;
|
||||
}
|
||||
|
||||
const [
|
||||
,
|
||||
pid,
|
||||
ppid,
|
||||
command
|
||||
] = match;
|
||||
const siblings = childrenByParentId.get(ppid) ?? [];
|
||||
|
||||
siblings.push(pid);
|
||||
childrenByParentId.set(ppid, siblings);
|
||||
|
||||
const normalizedBinaryName = normalizeProcessBinary(command);
|
||||
|
||||
if (normalizedBinaryName) {
|
||||
binaryNameByProcessId.set(pid, normalizedBinaryName);
|
||||
}
|
||||
});
|
||||
|
||||
const rootId = `${rootProcessId}`;
|
||||
const descendantIds = new Set<string>([rootId]);
|
||||
const descendantBinaryNames = new Set<string>();
|
||||
const queue = [rootId];
|
||||
|
||||
while (queue.length > 0) {
|
||||
@@ -731,6 +876,12 @@ async function collectDescendantProcessIds(rootProcessId: number): Promise<Set<s
|
||||
continue;
|
||||
}
|
||||
|
||||
const binaryName = binaryNameByProcessId.get(currentId);
|
||||
|
||||
if (binaryName) {
|
||||
descendantBinaryNames.add(binaryName);
|
||||
}
|
||||
|
||||
for (const childId of childrenByParentId.get(currentId) ?? []) {
|
||||
if (descendantIds.has(childId)) {
|
||||
continue;
|
||||
@@ -741,7 +892,30 @@ async function collectDescendantProcessIds(rootProcessId: number): Promise<Set<s
|
||||
}
|
||||
}
|
||||
|
||||
return descendantIds;
|
||||
return {
|
||||
ids: descendantIds,
|
||||
binaryNames: descendantBinaryNames
|
||||
};
|
||||
}
|
||||
|
||||
function normalizeProcessBinary(value: string | undefined): string | null {
|
||||
if (!value) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const trimmed = value.trim();
|
||||
|
||||
if (!trimmed) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const basename = trimmed
|
||||
.split(/[\\/]/)
|
||||
.pop()
|
||||
?.trim()
|
||||
.toLowerCase() ?? '';
|
||||
|
||||
return basename || null;
|
||||
}
|
||||
|
||||
function stripSurroundingQuotes(value: string): string {
|
||||
|
||||
@@ -10,7 +10,7 @@ interface WsMessage {
|
||||
/** Sends the current user list for a given server to a single connected user. */
|
||||
function sendServerUsers(user: ConnectedUser, serverId: string): void {
|
||||
const users = Array.from(connectedUsers.values())
|
||||
.filter(cu => cu.serverIds.has(serverId) && cu.oderId !== user.oderId && cu.displayName)
|
||||
.filter(cu => cu.serverIds.has(serverId) && cu.oderId !== user.oderId)
|
||||
.map(cu => ({ oderId: cu.oderId, displayName: cu.displayName ?? 'Anonymous' }));
|
||||
|
||||
user.ws.send(JSON.stringify({ type: 'server_users', serverId, users }));
|
||||
|
||||
@@ -9,13 +9,73 @@ import { connectedUsers } from './state';
|
||||
import { broadcastToServer } from './broadcast';
|
||||
import { handleWebSocketMessage } from './handler';
|
||||
|
||||
/** How often to ping all connected clients (ms). */
|
||||
const PING_INTERVAL_MS = 30_000;
|
||||
/** Maximum time a client can go without a pong before we consider it dead (ms). */
|
||||
const PONG_TIMEOUT_MS = 45_000;
|
||||
|
||||
function removeDeadConnection(connectionId: string): void {
|
||||
const user = connectedUsers.get(connectionId);
|
||||
|
||||
if (user) {
|
||||
console.log(`Removing dead connection: ${user.displayName ?? 'Unknown'} (${user.oderId})`);
|
||||
|
||||
user.serverIds.forEach((sid) => {
|
||||
broadcastToServer(sid, {
|
||||
type: 'user_left',
|
||||
oderId: user.oderId,
|
||||
displayName: user.displayName,
|
||||
serverId: sid
|
||||
}, user.oderId);
|
||||
});
|
||||
|
||||
try {
|
||||
user.ws.terminate();
|
||||
} catch {
|
||||
console.warn(`Failed to terminate WebSocket for ${user.displayName ?? 'Unknown'} (${user.oderId})`);
|
||||
}
|
||||
}
|
||||
|
||||
connectedUsers.delete(connectionId);
|
||||
}
|
||||
|
||||
export function setupWebSocket(server: Server<typeof IncomingMessage, typeof ServerResponse>): void {
|
||||
const wss = new WebSocketServer({ server });
|
||||
// Periodically ping all clients and reap dead connections
|
||||
const pingInterval = setInterval(() => {
|
||||
const now = Date.now();
|
||||
|
||||
connectedUsers.forEach((user, connectionId) => {
|
||||
if (now - user.lastPong > PONG_TIMEOUT_MS) {
|
||||
removeDeadConnection(connectionId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (user.ws.readyState === WebSocket.OPEN) {
|
||||
try {
|
||||
user.ws.ping();
|
||||
} catch {
|
||||
console.warn(`Failed to ping client ${user.displayName ?? 'Unknown'} (${user.oderId})`);
|
||||
}
|
||||
}
|
||||
});
|
||||
}, PING_INTERVAL_MS);
|
||||
|
||||
wss.on('close', () => clearInterval(pingInterval));
|
||||
|
||||
wss.on('connection', (ws: WebSocket) => {
|
||||
const connectionId = uuidv4();
|
||||
const now = Date.now();
|
||||
|
||||
connectedUsers.set(connectionId, { oderId: connectionId, ws, serverIds: new Set() });
|
||||
connectedUsers.set(connectionId, { oderId: connectionId, ws, serverIds: new Set(), lastPong: now });
|
||||
|
||||
ws.on('pong', () => {
|
||||
const user = connectedUsers.get(connectionId);
|
||||
|
||||
if (user) {
|
||||
user.lastPong = Date.now();
|
||||
}
|
||||
});
|
||||
|
||||
ws.on('message', (data) => {
|
||||
try {
|
||||
@@ -28,20 +88,7 @@ export function setupWebSocket(server: Server<typeof IncomingMessage, typeof Ser
|
||||
});
|
||||
|
||||
ws.on('close', () => {
|
||||
const user = connectedUsers.get(connectionId);
|
||||
|
||||
if (user) {
|
||||
user.serverIds.forEach((sid) => {
|
||||
broadcastToServer(sid, {
|
||||
type: 'user_left',
|
||||
oderId: user.oderId,
|
||||
displayName: user.displayName,
|
||||
serverId: sid
|
||||
}, user.oderId);
|
||||
});
|
||||
}
|
||||
|
||||
connectedUsers.delete(connectionId);
|
||||
removeDeadConnection(connectionId);
|
||||
});
|
||||
|
||||
ws.send(JSON.stringify({ type: 'connected', connectionId, serverTime: Date.now() }));
|
||||
|
||||
@@ -6,4 +6,6 @@ export interface ConnectedUser {
|
||||
serverIds: Set<string>;
|
||||
viewedServerId?: string;
|
||||
displayName?: string;
|
||||
/** Timestamp of the last pong received (used to detect dead connections). */
|
||||
lastPong: number;
|
||||
}
|
||||
|
||||
@@ -218,7 +218,16 @@ export class DebuggingService {
|
||||
|
||||
const rawMessage = args.map((arg) => this.stringifyPreview(arg)).join(' ')
|
||||
.trim() || '(empty console call)';
|
||||
const consoleMetadata = this.extractConsoleMetadata(rawMessage);
|
||||
|
||||
// Use only string args for label/message extraction so that
|
||||
// stringified object payloads don't pollute the parsed message.
|
||||
// Object payloads are captured separately via extractConsolePayload.
|
||||
const metadataSource = args
|
||||
.filter((arg): arg is string => typeof arg === 'string')
|
||||
.join(' ')
|
||||
.trim() || rawMessage;
|
||||
|
||||
const consoleMetadata = this.extractConsoleMetadata(metadataSource);
|
||||
const payload = this.extractConsolePayload(args);
|
||||
const payloadText = payload === undefined
|
||||
? null
|
||||
|
||||
@@ -109,6 +109,7 @@ export class WebRTCService implements OnDestroy {
|
||||
private readonly _isNoiseReductionEnabled = signal(false);
|
||||
private readonly _screenStreamSignal = signal<MediaStream | null>(null);
|
||||
private readonly _isScreenShareRemotePlaybackSuppressed = signal(false);
|
||||
private readonly _forceDefaultRemotePlaybackOutput = signal(false);
|
||||
private readonly _hasConnectionError = signal(false);
|
||||
private readonly _connectionErrorMessage = signal<string | null>(null);
|
||||
private readonly _hasEverConnected = signal(false);
|
||||
@@ -131,6 +132,7 @@ export class WebRTCService implements OnDestroy {
|
||||
readonly isNoiseReductionEnabled = computed(() => this._isNoiseReductionEnabled());
|
||||
readonly screenStream = computed(() => this._screenStreamSignal());
|
||||
readonly isScreenShareRemotePlaybackSuppressed = computed(() => this._isScreenShareRemotePlaybackSuppressed());
|
||||
readonly forceDefaultRemotePlaybackOutput = computed(() => this._forceDefaultRemotePlaybackOutput());
|
||||
readonly hasConnectionError = computed(() => this._hasConnectionError());
|
||||
readonly connectionErrorMessage = computed(() => this._connectionErrorMessage());
|
||||
readonly shouldShowConnectionError = computed(() => {
|
||||
@@ -220,6 +222,7 @@ export class WebRTCService implements OnDestroy {
|
||||
this._isScreenSharing.set(state.active);
|
||||
this._screenStreamSignal.set(state.stream);
|
||||
this._isScreenShareRemotePlaybackSuppressed.set(state.suppressRemotePlayback);
|
||||
this._forceDefaultRemotePlaybackOutput.set(state.forceDefaultRemotePlaybackOutput);
|
||||
}
|
||||
});
|
||||
|
||||
@@ -513,6 +516,11 @@ export class WebRTCService implements OnDestroy {
|
||||
this.activeServerId = serverId;
|
||||
}
|
||||
|
||||
/** The server ID currently being viewed / active, or `null`. */
|
||||
get currentServerId(): string | null {
|
||||
return this.activeServerId;
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an identify message to the signaling server.
|
||||
*
|
||||
@@ -907,6 +915,7 @@ export class WebRTCService implements OnDestroy {
|
||||
this._isScreenSharing.set(false);
|
||||
this._screenStreamSignal.set(null);
|
||||
this._isScreenShareRemotePlaybackSuppressed.set(false);
|
||||
this._forceDefaultRemotePlaybackOutput.set(false);
|
||||
}
|
||||
|
||||
/** Synchronise Angular signals from the MediaManager's internal state. */
|
||||
|
||||
@@ -103,10 +103,10 @@ export class MediaManager {
|
||||
* Replace the callback set at runtime.
|
||||
* Needed because of circular initialisation between managers.
|
||||
*
|
||||
* @param cb - The new callback interface to wire into this manager.
|
||||
* @param nextCallbacks - The new callback interface to wire into this manager.
|
||||
*/
|
||||
setCallbacks(cb: MediaManagerCallbacks): void {
|
||||
this.callbacks = cb;
|
||||
setCallbacks(nextCallbacks: MediaManagerCallbacks): void {
|
||||
this.callbacks = nextCallbacks;
|
||||
}
|
||||
|
||||
/** Returns the current local media stream, or `null` if voice is disabled. */
|
||||
@@ -485,28 +485,21 @@ export class MediaManager {
|
||||
if (!this.localMediaStream)
|
||||
return;
|
||||
|
||||
const localAudioTrack = this.localMediaStream.getAudioTracks()[0] || null;
|
||||
const localVideoTrack = this.localMediaStream.getVideoTracks()[0] || null;
|
||||
const localStream = this.localMediaStream;
|
||||
const localAudioTrack = localStream.getAudioTracks()[0] || null;
|
||||
const localVideoTrack = localStream.getVideoTracks()[0] || null;
|
||||
|
||||
peers.forEach((peerData, peerId) => {
|
||||
if (localAudioTrack) {
|
||||
let audioSender =
|
||||
peerData.audioSender ||
|
||||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_AUDIO);
|
||||
|
||||
if (!audioSender) {
|
||||
audioSender = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
}).sender;
|
||||
}
|
||||
const audioTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_AUDIO, {
|
||||
preferredSender: peerData.audioSender,
|
||||
excludedSenders: [peerData.screenAudioSender]
|
||||
});
|
||||
const audioSender = audioTransceiver.sender;
|
||||
|
||||
peerData.audioSender = audioSender;
|
||||
|
||||
// Restore direction after removeTrack (which sets it to recvonly)
|
||||
const audioTransceiver = peerData.connection
|
||||
.getTransceivers()
|
||||
.find((t) => t.sender === audioSender);
|
||||
|
||||
if (
|
||||
audioTransceiver &&
|
||||
(audioTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
|
||||
@@ -515,29 +508,25 @@ export class MediaManager {
|
||||
audioTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
|
||||
if (typeof audioSender.setStreams === 'function') {
|
||||
audioSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
audioSender
|
||||
.replaceTrack(localAudioTrack)
|
||||
.then(() => this.logger.info('audio replaceTrack ok', { peerId }))
|
||||
.catch((e) => this.logger.error('audio replaceTrack failed', e));
|
||||
.catch((error) => this.logger.error('audio replaceTrack failed', error));
|
||||
}
|
||||
|
||||
if (localVideoTrack) {
|
||||
let videoSender =
|
||||
peerData.videoSender ||
|
||||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_VIDEO);
|
||||
|
||||
if (!videoSender) {
|
||||
videoSender = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
}).sender;
|
||||
}
|
||||
const videoTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_VIDEO, {
|
||||
preferredSender: peerData.videoSender,
|
||||
excludedSenders: [peerData.screenVideoSender]
|
||||
});
|
||||
const videoSender = videoTransceiver.sender;
|
||||
|
||||
peerData.videoSender = videoSender;
|
||||
|
||||
const videoTransceiver = peerData.connection
|
||||
.getTransceivers()
|
||||
.find((t) => t.sender === videoSender);
|
||||
|
||||
if (
|
||||
videoTransceiver &&
|
||||
(videoTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
|
||||
@@ -546,16 +535,64 @@ export class MediaManager {
|
||||
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
|
||||
if (typeof videoSender.setStreams === 'function') {
|
||||
videoSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
videoSender
|
||||
.replaceTrack(localVideoTrack)
|
||||
.then(() => this.logger.info('video replaceTrack ok', { peerId }))
|
||||
.catch((e) => this.logger.error('video replaceTrack failed', e));
|
||||
.catch((error) => this.logger.error('video replaceTrack failed', error));
|
||||
}
|
||||
|
||||
this.callbacks.renegotiate(peerId);
|
||||
});
|
||||
}
|
||||
|
||||
private getOrCreateReusableTransceiver(
|
||||
peerData: PeerData,
|
||||
kind: typeof TRACK_KIND_AUDIO | typeof TRACK_KIND_VIDEO,
|
||||
options: {
|
||||
preferredSender?: RTCRtpSender;
|
||||
excludedSenders?: (RTCRtpSender | undefined)[];
|
||||
}
|
||||
): RTCRtpTransceiver {
|
||||
const excludedSenders = new Set(
|
||||
(options.excludedSenders ?? []).filter((sender): sender is RTCRtpSender => !!sender)
|
||||
);
|
||||
const existingTransceivers = peerData.connection.getTransceivers();
|
||||
const preferredTransceiver = options.preferredSender
|
||||
? existingTransceivers.find((transceiver) => transceiver.sender === options.preferredSender)
|
||||
: null;
|
||||
|
||||
if (preferredTransceiver) {
|
||||
return preferredTransceiver;
|
||||
}
|
||||
|
||||
const attachedSenderTransceiver = existingTransceivers.find((transceiver) =>
|
||||
!excludedSenders.has(transceiver.sender)
|
||||
&& transceiver.sender.track?.kind === kind
|
||||
);
|
||||
|
||||
if (attachedSenderTransceiver) {
|
||||
return attachedSenderTransceiver;
|
||||
}
|
||||
|
||||
const reusableReceiverTransceiver = existingTransceivers.find((transceiver) =>
|
||||
!excludedSenders.has(transceiver.sender)
|
||||
&& !transceiver.sender.track
|
||||
&& transceiver.receiver.track?.kind === kind
|
||||
);
|
||||
|
||||
if (reusableReceiverTransceiver) {
|
||||
return reusableReceiverTransceiver;
|
||||
}
|
||||
|
||||
return peerData.connection.addTransceiver(kind, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
});
|
||||
}
|
||||
|
||||
/** Broadcast a voice-presence state event to all connected peers. */
|
||||
private broadcastVoicePresence(): void {
|
||||
const oderId = this.callbacks.getIdentifyOderId();
|
||||
|
||||
@@ -127,7 +127,9 @@ export function createPeerConnection(
|
||||
isInitiator,
|
||||
pendingIceCandidates: [],
|
||||
audioSender: undefined,
|
||||
videoSender: undefined
|
||||
videoSender: undefined,
|
||||
remoteVoiceStreamIds: new Set<string>(),
|
||||
remoteScreenShareStreamIds: new Set<string>()
|
||||
};
|
||||
|
||||
if (isInitiator) {
|
||||
@@ -151,6 +153,10 @@ export function createPeerConnection(
|
||||
|
||||
localStream.getTracks().forEach((track) => {
|
||||
if (track.kind === TRACK_KIND_AUDIO && peerData.audioSender) {
|
||||
if (typeof peerData.audioSender.setStreams === 'function') {
|
||||
peerData.audioSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
peerData.audioSender
|
||||
.replaceTrack(track)
|
||||
.then(() => logger.info('audio replaceTrack (init) ok', { remotePeerId }))
|
||||
@@ -158,6 +164,10 @@ export function createPeerConnection(
|
||||
logger.error('audio replaceTrack failed at createPeerConnection', error)
|
||||
);
|
||||
} else if (track.kind === TRACK_KIND_VIDEO && peerData.videoSender) {
|
||||
if (typeof peerData.videoSender.setStreams === 'function') {
|
||||
peerData.videoSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
peerData.videoSender
|
||||
.replaceTrack(track)
|
||||
.then(() => logger.info('video replaceTrack (init) ok', { remotePeerId }))
|
||||
|
||||
@@ -9,6 +9,7 @@ export function handleRemoteTrack(
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
const track = event.track;
|
||||
const isScreenAudio = isScreenShareAudioTrack(context, event, remotePeerId);
|
||||
const settings =
|
||||
typeof track.getSettings === 'function' ? track.getSettings() : ({} as MediaTrackSettings);
|
||||
|
||||
@@ -34,10 +35,10 @@ export function handleRemoteTrack(
|
||||
}
|
||||
|
||||
const compositeStream = buildCompositeRemoteStream(state, remotePeerId, track);
|
||||
const voiceStream = isVoiceAudioTrack(context, event, remotePeerId)
|
||||
const voiceStream = isVoiceAudioTrack(track, isScreenAudio)
|
||||
? buildAudioOnlyStream(state.remotePeerVoiceStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
const screenShareStream = isScreenShareTrack(context, event, remotePeerId)
|
||||
const screenShareStream = isScreenShareTrack(track, isScreenAudio)
|
||||
? buildScreenShareStream(state.remotePeerScreenShareStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
|
||||
@@ -53,6 +54,12 @@ export function handleRemoteTrack(
|
||||
state.remotePeerScreenShareStreams.set(remotePeerId, screenShareStream);
|
||||
}
|
||||
|
||||
rememberIncomingStreamIds(state, event, remotePeerId, {
|
||||
isScreenAudio,
|
||||
isVoiceAudio: !!voiceStream,
|
||||
isScreenTrack: !!screenShareStream
|
||||
});
|
||||
|
||||
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
|
||||
}
|
||||
|
||||
@@ -61,6 +68,7 @@ export function clearRemoteScreenShareStream(
|
||||
remotePeerId: string
|
||||
): void {
|
||||
const { state } = context;
|
||||
const peerData = state.activePeerConnections.get(remotePeerId);
|
||||
const screenShareStream = state.remotePeerScreenShareStreams.get(remotePeerId);
|
||||
|
||||
if (!screenShareStream) {
|
||||
@@ -79,6 +87,8 @@ export function clearRemoteScreenShareStream(
|
||||
removeTracksFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, screenShareTrackIds);
|
||||
state.remotePeerScreenShareStreams.delete(remotePeerId);
|
||||
|
||||
peerData?.remoteScreenShareStreamIds.clear();
|
||||
|
||||
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
|
||||
}
|
||||
|
||||
@@ -152,11 +162,20 @@ function removeRemoteTrack(
|
||||
trackId: string
|
||||
): void {
|
||||
const { state } = context;
|
||||
const peerData = state.activePeerConnections.get(remotePeerId);
|
||||
const compositeStream = removeTrackFromStreamMap(state.remotePeerStreams, remotePeerId, trackId);
|
||||
|
||||
removeTrackFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, trackId);
|
||||
removeTrackFromStreamMap(state.remotePeerScreenShareStreams, remotePeerId, trackId);
|
||||
|
||||
if (!state.remotePeerVoiceStreams.has(remotePeerId)) {
|
||||
peerData?.remoteVoiceStreamIds.clear();
|
||||
}
|
||||
|
||||
if (!state.remotePeerScreenShareStreams.has(remotePeerId)) {
|
||||
peerData?.remoteScreenShareStreamIds.clear();
|
||||
}
|
||||
|
||||
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
|
||||
}
|
||||
|
||||
@@ -224,20 +243,12 @@ function publishRemoteStreamUpdate(
|
||||
});
|
||||
}
|
||||
|
||||
function isVoiceAudioTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): boolean {
|
||||
return event.track.kind === TRACK_KIND_AUDIO && !isScreenShareAudioTrack(context, event, remotePeerId);
|
||||
function isVoiceAudioTrack(track: MediaStreamTrack, isScreenAudio: boolean): boolean {
|
||||
return track.kind === TRACK_KIND_AUDIO && !isScreenAudio;
|
||||
}
|
||||
|
||||
function isScreenShareTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): boolean {
|
||||
return event.track.kind === TRACK_KIND_VIDEO || isScreenShareAudioTrack(context, event, remotePeerId);
|
||||
function isScreenShareTrack(track: MediaStreamTrack, isScreenAudio: boolean): boolean {
|
||||
return track.kind === TRACK_KIND_VIDEO || isScreenAudio;
|
||||
}
|
||||
|
||||
function isScreenShareAudioTrack(
|
||||
@@ -255,12 +266,34 @@ function isScreenShareAudioTrack(
|
||||
return false;
|
||||
}
|
||||
|
||||
const incomingStreamIds = getIncomingStreamIds(event);
|
||||
|
||||
if (incomingStreamIds.some((streamId) => peerData.remoteScreenShareStreamIds.has(streamId))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (incomingStreamIds.some((streamId) => peerData.remoteVoiceStreamIds.has(streamId))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (event.streams.some((stream) => stream.getVideoTracks().some((track) => track.readyState === 'live'))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const screenAudioTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === peerData.screenAudioSender
|
||||
);
|
||||
|
||||
if (screenAudioTransceiver && matchesTransceiver(event.transceiver, screenAudioTransceiver)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const voiceAudioTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === peerData.audioSender
|
||||
);
|
||||
|
||||
if (voiceAudioTransceiver) {
|
||||
return event.transceiver !== voiceAudioTransceiver;
|
||||
return !matchesTransceiver(event.transceiver, voiceAudioTransceiver);
|
||||
}
|
||||
|
||||
const audioTransceivers = peerData.connection.getTransceivers().filter((transceiver) =>
|
||||
@@ -272,3 +305,52 @@ function isScreenShareAudioTrack(
|
||||
|
||||
return transceiverIndex > 0;
|
||||
}
|
||||
|
||||
function rememberIncomingStreamIds(
|
||||
state: PeerConnectionManagerContext['state'],
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string,
|
||||
options: {
|
||||
isScreenAudio: boolean;
|
||||
isVoiceAudio: boolean;
|
||||
isScreenTrack: boolean;
|
||||
}
|
||||
): void {
|
||||
const peerData = state.activePeerConnections.get(remotePeerId);
|
||||
|
||||
if (!peerData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const incomingStreamIds = getIncomingStreamIds(event);
|
||||
|
||||
if (incomingStreamIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.track.kind === TRACK_KIND_VIDEO || options.isScreenAudio || options.isScreenTrack) {
|
||||
incomingStreamIds.forEach((streamId) => {
|
||||
peerData.remoteScreenShareStreamIds.add(streamId);
|
||||
peerData.remoteVoiceStreamIds.delete(streamId);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.isVoiceAudio) {
|
||||
incomingStreamIds.forEach((streamId) => {
|
||||
peerData.remoteVoiceStreamIds.add(streamId);
|
||||
peerData.remoteScreenShareStreamIds.delete(streamId);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getIncomingStreamIds(event: RTCTrackEvent): string[] {
|
||||
return event.streams
|
||||
.map((stream) => stream.id)
|
||||
.filter((streamId): streamId is string => !!streamId);
|
||||
}
|
||||
|
||||
function matchesTransceiver(left: RTCRtpTransceiver, right: RTCRtpTransceiver): boolean {
|
||||
return left === right || (!!left.mid && !!right.mid && left.mid === right.mid);
|
||||
}
|
||||
|
||||
@@ -0,0 +1,56 @@
|
||||
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../screen-share.config';
|
||||
import { WebRTCLogger } from '../webrtc-logger';
|
||||
|
||||
export class BrowserScreenShareCapture {
|
||||
constructor(private readonly logger: WebRTCLogger) {}
|
||||
|
||||
async startCapture(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<MediaStream> {
|
||||
const displayConstraints = this.buildDisplayMediaConstraints(options, preset);
|
||||
|
||||
this.logger.info('getDisplayMedia constraints', displayConstraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getDisplayMedia) {
|
||||
throw new Error('navigator.mediaDevices.getDisplayMedia is not available.');
|
||||
}
|
||||
|
||||
return await navigator.mediaDevices.getDisplayMedia(displayConstraints);
|
||||
}
|
||||
|
||||
private buildDisplayMediaConstraints(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): DisplayMediaStreamOptions {
|
||||
const supportedConstraints = navigator.mediaDevices?.getSupportedConstraints?.() as Record<string, boolean> | undefined;
|
||||
const audioConstraints: Record<string, unknown> | false = options.includeSystemAudio
|
||||
? {
|
||||
echoCancellation: false,
|
||||
noiseSuppression: false,
|
||||
autoGainControl: false
|
||||
}
|
||||
: false;
|
||||
|
||||
if (audioConstraints && supportedConstraints?.['restrictOwnAudio']) {
|
||||
audioConstraints['restrictOwnAudio'] = true;
|
||||
}
|
||||
|
||||
if (audioConstraints && supportedConstraints?.['suppressLocalAudioPlayback']) {
|
||||
audioConstraints['suppressLocalAudioPlayback'] = true;
|
||||
}
|
||||
|
||||
return {
|
||||
video: {
|
||||
width: { ideal: preset.width, max: preset.width },
|
||||
height: { ideal: preset.height, max: preset.height },
|
||||
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
|
||||
},
|
||||
audio: audioConstraints,
|
||||
monitorTypeSurfaces: 'include',
|
||||
selfBrowserSurface: 'exclude',
|
||||
surfaceSwitching: 'include',
|
||||
systemAudio: options.includeSystemAudio ? 'include' : 'exclude'
|
||||
} as DisplayMediaStreamOptions;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,163 @@
|
||||
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../screen-share.config';
|
||||
import { ELECTRON_ENTIRE_SCREEN_SOURCE_NAME } from '../webrtc.constants';
|
||||
import { WebRTCLogger } from '../webrtc-logger';
|
||||
import {
|
||||
DesktopSource,
|
||||
ElectronDesktopCaptureResult,
|
||||
ElectronDesktopMediaStreamConstraints,
|
||||
ElectronDesktopSourceSelection,
|
||||
ScreenShareElectronApi
|
||||
} from './shared';
|
||||
|
||||
interface DesktopElectronScreenShareCaptureDependencies {
|
||||
getElectronApi(): ScreenShareElectronApi | null;
|
||||
getSelectDesktopSource(): ((
|
||||
sources: readonly DesktopSource[],
|
||||
options: { includeSystemAudio: boolean }
|
||||
) => Promise<ElectronDesktopSourceSelection>) | undefined;
|
||||
}
|
||||
|
||||
export class DesktopElectronScreenShareCapture {
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private readonly dependencies: DesktopElectronScreenShareCaptureDependencies
|
||||
) {}
|
||||
|
||||
isAvailable(): boolean {
|
||||
return !!this.dependencies.getElectronApi()?.getSources && !this.isLinuxElectron();
|
||||
}
|
||||
|
||||
shouldSuppressRemotePlaybackDuringShare(includeSystemAudio: boolean): boolean {
|
||||
return includeSystemAudio && this.isWindowsElectron();
|
||||
}
|
||||
|
||||
async startCapture(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<ElectronDesktopCaptureResult> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
|
||||
if (!electronApi?.getSources) {
|
||||
throw new Error('Electron desktop capture is unavailable.');
|
||||
}
|
||||
|
||||
const sources = await electronApi.getSources();
|
||||
const selection = await this.resolveSourceSelection(sources, options.includeSystemAudio);
|
||||
const captureOptions = {
|
||||
...options,
|
||||
includeSystemAudio: selection.includeSystemAudio
|
||||
};
|
||||
|
||||
if (!selection.source) {
|
||||
throw new Error('No desktop capture sources were available.');
|
||||
}
|
||||
|
||||
this.logger.info('Selected Electron desktop source', {
|
||||
includeSystemAudio: selection.includeSystemAudio,
|
||||
sourceId: selection.source.id,
|
||||
sourceName: selection.source.name
|
||||
});
|
||||
|
||||
const constraints = this.buildConstraints(selection.source.id, captureOptions, preset);
|
||||
|
||||
this.logger.info('desktopCapturer constraints', constraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getUserMedia) {
|
||||
throw new Error('navigator.mediaDevices.getUserMedia is not available (requires HTTPS or localhost).');
|
||||
}
|
||||
|
||||
return {
|
||||
includeSystemAudio: selection.includeSystemAudio,
|
||||
stream: await navigator.mediaDevices.getUserMedia(constraints)
|
||||
};
|
||||
}
|
||||
|
||||
private async resolveSourceSelection(
|
||||
sources: DesktopSource[],
|
||||
includeSystemAudio: boolean
|
||||
): Promise<ElectronDesktopSourceSelection> {
|
||||
const orderedSources = this.sortSources(sources);
|
||||
const defaultSource = orderedSources.find((source) => source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME)
|
||||
?? orderedSources[0];
|
||||
|
||||
if (orderedSources.length === 0) {
|
||||
throw new Error('No desktop capture sources were available.');
|
||||
}
|
||||
|
||||
const selectDesktopSource = this.dependencies.getSelectDesktopSource();
|
||||
|
||||
if (!this.isWindowsElectron() || orderedSources.length < 2 || !selectDesktopSource) {
|
||||
return {
|
||||
includeSystemAudio,
|
||||
source: defaultSource
|
||||
};
|
||||
}
|
||||
|
||||
return await selectDesktopSource(orderedSources, { includeSystemAudio });
|
||||
}
|
||||
|
||||
private sortSources(sources: DesktopSource[]): DesktopSource[] {
|
||||
return [...sources].sort((left, right) => {
|
||||
const weightDiff = this.getSourceWeight(left) - this.getSourceWeight(right);
|
||||
|
||||
if (weightDiff !== 0) {
|
||||
return weightDiff;
|
||||
}
|
||||
|
||||
return left.name.localeCompare(right.name);
|
||||
});
|
||||
}
|
||||
|
||||
private getSourceWeight(source: DesktopSource): number {
|
||||
return source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME || source.id.startsWith('screen')
|
||||
? 0
|
||||
: 1;
|
||||
}
|
||||
|
||||
private buildConstraints(
|
||||
sourceId: string,
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): ElectronDesktopMediaStreamConstraints {
|
||||
const constraints: ElectronDesktopMediaStreamConstraints = {
|
||||
video: {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: sourceId,
|
||||
maxWidth: preset.width,
|
||||
maxHeight: preset.height,
|
||||
maxFrameRate: preset.frameRate
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (options.includeSystemAudio) {
|
||||
constraints.audio = {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: sourceId
|
||||
}
|
||||
};
|
||||
} else {
|
||||
constraints.audio = false;
|
||||
}
|
||||
|
||||
return constraints;
|
||||
}
|
||||
|
||||
private isLinuxElectron(): boolean {
|
||||
if (!this.dependencies.getElectronApi() || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return /linux/i.test(`${navigator.userAgent} ${navigator.platform}`);
|
||||
}
|
||||
|
||||
private isWindowsElectron(): boolean {
|
||||
if (!this.isAvailable() || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return /win/i.test(`${navigator.userAgent} ${navigator.platform}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,439 @@
|
||||
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../screen-share.config';
|
||||
import { WebRTCLogger } from '../webrtc-logger';
|
||||
import {
|
||||
LinuxScreenShareAudioRoutingInfo,
|
||||
LinuxScreenShareMonitorAudioChunkPayload,
|
||||
LinuxScreenShareMonitorAudioEndedPayload,
|
||||
LinuxScreenShareMonitorCaptureInfo,
|
||||
ScreenShareElectronApi
|
||||
} from './shared';
|
||||
|
||||
interface LinuxScreenShareMonitorAudioPipeline {
|
||||
audioContext: AudioContext;
|
||||
audioTrack: MediaStreamTrack;
|
||||
bitsPerSample: number;
|
||||
captureId: string;
|
||||
channelCount: number;
|
||||
mediaDestination: MediaStreamAudioDestinationNode;
|
||||
nextStartTime: number;
|
||||
pendingBytes: Uint8Array;
|
||||
sampleRate: number;
|
||||
unsubscribeChunk: () => void;
|
||||
unsubscribeEnded: () => void;
|
||||
}
|
||||
|
||||
interface LinuxElectronScreenShareCaptureDependencies {
|
||||
getElectronApi(): ScreenShareElectronApi | null;
|
||||
onCaptureEnded(): void;
|
||||
startDisplayMedia(options: ScreenShareStartOptions, preset: ScreenShareQualityPreset): Promise<MediaStream>;
|
||||
}
|
||||
|
||||
export class LinuxElectronScreenShareCapture {
|
||||
private audioRoutingActive = false;
|
||||
private audioRoutingResetPromise: Promise<void> | null = null;
|
||||
private monitorAudioPipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private readonly dependencies: LinuxElectronScreenShareCaptureDependencies
|
||||
) {}
|
||||
|
||||
isSupported(): boolean {
|
||||
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
const platformHint = `${navigator.userAgent} ${navigator.platform}`;
|
||||
|
||||
return !!electronApi?.prepareLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.activateLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.deactivateLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.startLinuxScreenShareMonitorCapture
|
||||
&& !!electronApi?.stopLinuxScreenShareMonitorCapture
|
||||
&& !!electronApi?.onLinuxScreenShareMonitorAudioChunk
|
||||
&& !!electronApi?.onLinuxScreenShareMonitorAudioEnded
|
||||
&& /linux/i.test(platformHint);
|
||||
}
|
||||
|
||||
async awaitPendingReset(): Promise<void> {
|
||||
if (!this.audioRoutingResetPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.audioRoutingResetPromise;
|
||||
}
|
||||
|
||||
scheduleReset(): void {
|
||||
if (!this.audioRoutingActive || this.audioRoutingResetPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.audioRoutingResetPromise = this.resetAudioRouting()
|
||||
.catch((error) => {
|
||||
this.logger.warn('Failed to reset Linux Electron audio routing', error);
|
||||
})
|
||||
.finally(() => {
|
||||
this.audioRoutingResetPromise = null;
|
||||
});
|
||||
}
|
||||
|
||||
async startCapture(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<MediaStream> {
|
||||
const electronApi = this.getRequiredElectronApi();
|
||||
const routingInfo = await electronApi.prepareLinuxScreenShareAudioRouting();
|
||||
|
||||
this.assertAudioRoutingReady(routingInfo, 'Linux Electron audio routing is unavailable.');
|
||||
|
||||
let desktopStream: MediaStream | null = null;
|
||||
|
||||
try {
|
||||
const activation = await electronApi.activateLinuxScreenShareAudioRouting();
|
||||
|
||||
this.assertAudioRoutingReady(activation, 'Failed to activate Linux Electron audio routing.');
|
||||
|
||||
if (!activation.active) {
|
||||
throw new Error(activation.reason || 'Failed to activate Linux Electron audio routing.');
|
||||
}
|
||||
|
||||
desktopStream = await this.dependencies.startDisplayMedia({
|
||||
...options,
|
||||
includeSystemAudio: false
|
||||
}, preset);
|
||||
|
||||
const { audioTrack, captureInfo } = await this.startMonitorTrack();
|
||||
const stream = new MediaStream([...desktopStream.getVideoTracks(), audioTrack]);
|
||||
|
||||
desktopStream.getAudioTracks().forEach((track) => track.stop());
|
||||
|
||||
this.audioRoutingActive = true;
|
||||
this.logger.info('Linux Electron screen-share audio routing enabled', {
|
||||
screenShareMonitorSourceName: captureInfo.sourceName,
|
||||
voiceSinkName: activation.voiceSinkName
|
||||
});
|
||||
|
||||
return stream;
|
||||
} catch (error) {
|
||||
desktopStream?.getTracks().forEach((track) => track.stop());
|
||||
await this.resetAudioRouting();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private getRequiredElectronApi(): Required<Pick<
|
||||
ScreenShareElectronApi,
|
||||
| 'prepareLinuxScreenShareAudioRouting'
|
||||
| 'activateLinuxScreenShareAudioRouting'
|
||||
| 'deactivateLinuxScreenShareAudioRouting'
|
||||
| 'startLinuxScreenShareMonitorCapture'
|
||||
| 'stopLinuxScreenShareMonitorCapture'
|
||||
| 'onLinuxScreenShareMonitorAudioChunk'
|
||||
| 'onLinuxScreenShareMonitorAudioEnded'
|
||||
>> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
|
||||
if (!electronApi?.prepareLinuxScreenShareAudioRouting
|
||||
|| !electronApi.activateLinuxScreenShareAudioRouting
|
||||
|| !electronApi.deactivateLinuxScreenShareAudioRouting
|
||||
|| !electronApi.startLinuxScreenShareMonitorCapture
|
||||
|| !electronApi.stopLinuxScreenShareMonitorCapture
|
||||
|| !electronApi.onLinuxScreenShareMonitorAudioChunk
|
||||
|| !electronApi.onLinuxScreenShareMonitorAudioEnded) {
|
||||
throw new Error('Linux Electron audio routing is unavailable.');
|
||||
}
|
||||
|
||||
return {
|
||||
prepareLinuxScreenShareAudioRouting: electronApi.prepareLinuxScreenShareAudioRouting,
|
||||
activateLinuxScreenShareAudioRouting: electronApi.activateLinuxScreenShareAudioRouting,
|
||||
deactivateLinuxScreenShareAudioRouting: electronApi.deactivateLinuxScreenShareAudioRouting,
|
||||
startLinuxScreenShareMonitorCapture: electronApi.startLinuxScreenShareMonitorCapture,
|
||||
stopLinuxScreenShareMonitorCapture: electronApi.stopLinuxScreenShareMonitorCapture,
|
||||
onLinuxScreenShareMonitorAudioChunk: electronApi.onLinuxScreenShareMonitorAudioChunk,
|
||||
onLinuxScreenShareMonitorAudioEnded: electronApi.onLinuxScreenShareMonitorAudioEnded
|
||||
};
|
||||
}
|
||||
|
||||
private assertAudioRoutingReady(
|
||||
routingInfo: LinuxScreenShareAudioRoutingInfo,
|
||||
unavailableReason: string
|
||||
): void {
|
||||
if (!routingInfo.available) {
|
||||
throw new Error(routingInfo.reason || unavailableReason);
|
||||
}
|
||||
|
||||
if (!routingInfo.monitorCaptureSupported) {
|
||||
throw new Error('Linux screen-share monitor capture requires restarting the desktop app so the new Electron main process can load.');
|
||||
}
|
||||
}
|
||||
|
||||
private async resetAudioRouting(): Promise<void> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
const captureId = this.monitorAudioPipeline?.captureId;
|
||||
|
||||
this.audioRoutingActive = false;
|
||||
|
||||
this.disposeMonitorAudioPipeline();
|
||||
|
||||
try {
|
||||
if (captureId && electronApi?.stopLinuxScreenShareMonitorCapture) {
|
||||
await electronApi.stopLinuxScreenShareMonitorCapture(captureId);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to stop Linux screen-share monitor capture', error);
|
||||
}
|
||||
|
||||
try {
|
||||
if (electronApi?.deactivateLinuxScreenShareAudioRouting) {
|
||||
await electronApi.deactivateLinuxScreenShareAudioRouting();
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to deactivate Linux Electron audio routing', error);
|
||||
}
|
||||
}
|
||||
|
||||
private async startMonitorTrack(): Promise<{
|
||||
audioTrack: MediaStreamTrack;
|
||||
captureInfo: LinuxScreenShareMonitorCaptureInfo;
|
||||
}> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
|
||||
if (!electronApi?.startLinuxScreenShareMonitorCapture
|
||||
|| !electronApi?.stopLinuxScreenShareMonitorCapture
|
||||
|| !electronApi?.onLinuxScreenShareMonitorAudioChunk
|
||||
|| !electronApi?.onLinuxScreenShareMonitorAudioEnded) {
|
||||
throw new Error('Linux screen-share monitor capture is unavailable.');
|
||||
}
|
||||
|
||||
const queuedChunksByCaptureId = new Map<string, Uint8Array[]>();
|
||||
const queuedEndedReasons = new Map<string, string | undefined>();
|
||||
|
||||
let pipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
|
||||
let captureInfo: LinuxScreenShareMonitorCaptureInfo | null = null;
|
||||
|
||||
const queueChunk = (captureId: string, chunk: Uint8Array): void => {
|
||||
const queuedChunks = queuedChunksByCaptureId.get(captureId) || [];
|
||||
|
||||
queuedChunks.push(this.copyBytes(chunk));
|
||||
queuedChunksByCaptureId.set(captureId, queuedChunks);
|
||||
};
|
||||
const onChunk = (payload: LinuxScreenShareMonitorAudioChunkPayload): void => {
|
||||
if (!pipeline || payload.captureId !== pipeline.captureId) {
|
||||
queueChunk(payload.captureId, payload.chunk);
|
||||
return;
|
||||
}
|
||||
|
||||
this.handleMonitorAudioChunk(pipeline, payload.chunk);
|
||||
};
|
||||
const onEnded = (payload: LinuxScreenShareMonitorAudioEndedPayload): void => {
|
||||
if (!pipeline || payload.captureId !== pipeline.captureId) {
|
||||
queuedEndedReasons.set(payload.captureId, payload.reason);
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.warn('Linux screen-share monitor capture ended', payload);
|
||||
this.dependencies.onCaptureEnded();
|
||||
};
|
||||
const unsubscribeChunk = electronApi.onLinuxScreenShareMonitorAudioChunk(onChunk) as () => void;
|
||||
const unsubscribeEnded = electronApi.onLinuxScreenShareMonitorAudioEnded(onEnded) as () => void;
|
||||
|
||||
try {
|
||||
captureInfo = await electronApi.startLinuxScreenShareMonitorCapture() as LinuxScreenShareMonitorCaptureInfo;
|
||||
|
||||
const audioContext = new AudioContext({ sampleRate: captureInfo.sampleRate });
|
||||
const mediaDestination = audioContext.createMediaStreamDestination();
|
||||
|
||||
await audioContext.resume();
|
||||
|
||||
const audioTrack = mediaDestination.stream.getAudioTracks()[0];
|
||||
|
||||
if (!audioTrack) {
|
||||
throw new Error('Renderer audio pipeline did not produce a screen-share monitor track.');
|
||||
}
|
||||
|
||||
pipeline = {
|
||||
audioContext,
|
||||
audioTrack,
|
||||
bitsPerSample: captureInfo.bitsPerSample,
|
||||
captureId: captureInfo.captureId,
|
||||
channelCount: captureInfo.channelCount,
|
||||
mediaDestination,
|
||||
nextStartTime: audioContext.currentTime + 0.05,
|
||||
pendingBytes: new Uint8Array(0),
|
||||
sampleRate: captureInfo.sampleRate,
|
||||
unsubscribeChunk,
|
||||
unsubscribeEnded
|
||||
};
|
||||
|
||||
this.monitorAudioPipeline = pipeline;
|
||||
const activeCaptureId = captureInfo.captureId;
|
||||
|
||||
audioTrack.addEventListener('ended', () => {
|
||||
if (this.monitorAudioPipeline?.captureId === activeCaptureId) {
|
||||
this.dependencies.onCaptureEnded();
|
||||
}
|
||||
}, { once: true });
|
||||
|
||||
const queuedChunks = queuedChunksByCaptureId.get(captureInfo.captureId) || [];
|
||||
const activePipeline = pipeline;
|
||||
|
||||
queuedChunks.forEach((chunk) => {
|
||||
this.handleMonitorAudioChunk(activePipeline, chunk);
|
||||
});
|
||||
|
||||
queuedChunksByCaptureId.delete(captureInfo.captureId);
|
||||
|
||||
if (queuedEndedReasons.has(captureInfo.captureId)) {
|
||||
throw new Error(queuedEndedReasons.get(captureInfo.captureId)
|
||||
|| 'Linux screen-share monitor capture ended before audio initialisation completed.');
|
||||
}
|
||||
|
||||
return {
|
||||
audioTrack,
|
||||
captureInfo
|
||||
};
|
||||
} catch (error) {
|
||||
if (pipeline) {
|
||||
this.disposeMonitorAudioPipeline(pipeline.captureId);
|
||||
} else {
|
||||
unsubscribeChunk();
|
||||
unsubscribeEnded();
|
||||
}
|
||||
|
||||
try {
|
||||
await electronApi.stopLinuxScreenShareMonitorCapture(captureInfo?.captureId);
|
||||
} catch (stopError) {
|
||||
this.logger.warn('Failed to stop Linux screen-share monitor capture after startup failure', stopError);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private disposeMonitorAudioPipeline(captureId?: string): void {
|
||||
if (!this.monitorAudioPipeline) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (captureId && captureId !== this.monitorAudioPipeline.captureId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pipeline = this.monitorAudioPipeline;
|
||||
|
||||
this.monitorAudioPipeline = null;
|
||||
pipeline.unsubscribeChunk();
|
||||
pipeline.unsubscribeEnded();
|
||||
pipeline.audioTrack.stop();
|
||||
pipeline.pendingBytes = new Uint8Array(0);
|
||||
|
||||
void pipeline.audioContext.close().catch((error) => {
|
||||
this.logger.warn('Failed to close Linux screen-share monitor audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
private handleMonitorAudioChunk(
|
||||
pipeline: LinuxScreenShareMonitorAudioPipeline,
|
||||
chunk: Uint8Array
|
||||
): void {
|
||||
if (pipeline.bitsPerSample !== 16) {
|
||||
this.logger.warn('Unsupported Linux screen-share monitor capture sample size', {
|
||||
bitsPerSample: pipeline.bitsPerSample,
|
||||
captureId: pipeline.captureId
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const bytesPerSample = pipeline.bitsPerSample / 8;
|
||||
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
|
||||
|
||||
if (!Number.isFinite(bytesPerFrame) || bytesPerFrame <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const combinedBytes = this.concatBytes(pipeline.pendingBytes, chunk);
|
||||
const completeByteLength = combinedBytes.byteLength - (combinedBytes.byteLength % bytesPerFrame);
|
||||
|
||||
if (completeByteLength <= 0) {
|
||||
pipeline.pendingBytes = combinedBytes;
|
||||
return;
|
||||
}
|
||||
|
||||
const completeBytes = combinedBytes.subarray(0, completeByteLength);
|
||||
|
||||
pipeline.pendingBytes = this.copyBytes(combinedBytes.subarray(completeByteLength));
|
||||
|
||||
if (pipeline.audioContext.state !== 'running') {
|
||||
void pipeline.audioContext.resume().catch((error) => {
|
||||
this.logger.warn('Failed to resume Linux screen-share monitor audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
const frameCount = completeByteLength / bytesPerFrame;
|
||||
const audioBuffer = this.createAudioBuffer(pipeline, completeBytes, frameCount);
|
||||
const source = pipeline.audioContext.createBufferSource();
|
||||
|
||||
source.buffer = audioBuffer;
|
||||
source.connect(pipeline.mediaDestination);
|
||||
|
||||
source.onended = () => {
|
||||
source.disconnect();
|
||||
};
|
||||
|
||||
const now = pipeline.audioContext.currentTime;
|
||||
const startTime = Math.max(pipeline.nextStartTime, now + 0.02);
|
||||
|
||||
source.start(startTime);
|
||||
pipeline.nextStartTime = startTime + audioBuffer.duration;
|
||||
}
|
||||
|
||||
private createAudioBuffer(
|
||||
pipeline: LinuxScreenShareMonitorAudioPipeline,
|
||||
bytes: Uint8Array,
|
||||
frameCount: number
|
||||
): AudioBuffer {
|
||||
const audioBuffer = pipeline.audioContext.createBuffer(pipeline.channelCount, frameCount, pipeline.sampleRate);
|
||||
const sampleData = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
||||
const channelData = Array.from(
|
||||
{ length: pipeline.channelCount },
|
||||
(_, channelIndex) => audioBuffer.getChannelData(channelIndex)
|
||||
);
|
||||
const bytesPerSample = pipeline.bitsPerSample / 8;
|
||||
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
|
||||
|
||||
for (let frameIndex = 0; frameIndex < frameCount; frameIndex += 1) {
|
||||
const frameOffset = frameIndex * bytesPerFrame;
|
||||
|
||||
for (let channelIndex = 0; channelIndex < pipeline.channelCount; channelIndex += 1) {
|
||||
const sampleOffset = frameOffset + (channelIndex * bytesPerSample);
|
||||
|
||||
channelData[channelIndex][frameIndex] = sampleData.getInt16(sampleOffset, true) / 32768;
|
||||
}
|
||||
}
|
||||
|
||||
return audioBuffer;
|
||||
}
|
||||
|
||||
private concatBytes(first: Uint8Array, second: Uint8Array): Uint8Array {
|
||||
if (first.byteLength === 0) {
|
||||
return this.copyBytes(second);
|
||||
}
|
||||
|
||||
if (second.byteLength === 0) {
|
||||
return this.copyBytes(first);
|
||||
}
|
||||
|
||||
const combined = new Uint8Array(first.byteLength + second.byteLength);
|
||||
|
||||
combined.set(first, 0);
|
||||
combined.set(second, first.byteLength);
|
||||
|
||||
return combined;
|
||||
}
|
||||
|
||||
private copyBytes(bytes: Uint8Array): Uint8Array {
|
||||
return bytes.byteLength > 0 ? new Uint8Array(bytes) : new Uint8Array(0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,80 @@
|
||||
export interface DesktopSource {
|
||||
id: string;
|
||||
name: string;
|
||||
thumbnail: string;
|
||||
}
|
||||
|
||||
export interface ElectronDesktopSourceSelection {
|
||||
includeSystemAudio: boolean;
|
||||
source: DesktopSource;
|
||||
}
|
||||
|
||||
export interface ElectronDesktopCaptureResult {
|
||||
includeSystemAudio: boolean;
|
||||
stream: MediaStream;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareAudioRoutingInfo {
|
||||
available: boolean;
|
||||
active: boolean;
|
||||
monitorCaptureSupported: boolean;
|
||||
screenShareSinkName: string;
|
||||
screenShareMonitorSourceName: string;
|
||||
voiceSinkName: string;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareMonitorCaptureInfo {
|
||||
bitsPerSample: number;
|
||||
captureId: string;
|
||||
channelCount: number;
|
||||
sampleRate: number;
|
||||
sourceName: string;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareMonitorAudioChunkPayload {
|
||||
captureId: string;
|
||||
chunk: Uint8Array;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareMonitorAudioEndedPayload {
|
||||
captureId: string;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
export interface ScreenShareElectronApi {
|
||||
getSources?: () => Promise<DesktopSource[]>;
|
||||
prepareLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
|
||||
activateLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
|
||||
deactivateLinuxScreenShareAudioRouting?: () => Promise<boolean>;
|
||||
startLinuxScreenShareMonitorCapture?: () => Promise<LinuxScreenShareMonitorCaptureInfo>;
|
||||
stopLinuxScreenShareMonitorCapture?: (captureId?: string) => Promise<boolean>;
|
||||
onLinuxScreenShareMonitorAudioChunk?: (listener: (payload: LinuxScreenShareMonitorAudioChunkPayload) => void) => () => void;
|
||||
onLinuxScreenShareMonitorAudioEnded?: (listener: (payload: LinuxScreenShareMonitorAudioEndedPayload) => void) => () => void;
|
||||
}
|
||||
|
||||
export type ElectronDesktopVideoConstraint = MediaTrackConstraints & {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop';
|
||||
chromeMediaSourceId: string;
|
||||
maxWidth: number;
|
||||
maxHeight: number;
|
||||
maxFrameRate: number;
|
||||
};
|
||||
};
|
||||
|
||||
export type ElectronDesktopAudioConstraint = MediaTrackConstraints & {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop';
|
||||
chromeMediaSourceId: string;
|
||||
};
|
||||
};
|
||||
|
||||
export interface ElectronDesktopMediaStreamConstraints extends MediaStreamConstraints {
|
||||
video: ElectronDesktopVideoConstraint;
|
||||
audio?: false | ElectronDesktopAudioConstraint;
|
||||
}
|
||||
|
||||
export type ScreenShareWindow = Window & {
|
||||
electronAPI?: ScreenShareElectronApi;
|
||||
};
|
||||
@@ -9,8 +9,7 @@ import {
|
||||
TRACK_KIND_AUDIO,
|
||||
TRACK_KIND_VIDEO,
|
||||
TRANSCEIVER_SEND_RECV,
|
||||
TRANSCEIVER_RECV_ONLY,
|
||||
ELECTRON_ENTIRE_SCREEN_SOURCE_NAME
|
||||
TRANSCEIVER_RECV_ONLY
|
||||
} from './webrtc.constants';
|
||||
import {
|
||||
DEFAULT_SCREEN_SHARE_START_OPTIONS,
|
||||
@@ -18,6 +17,10 @@ import {
|
||||
ScreenShareQualityPreset,
|
||||
ScreenShareStartOptions
|
||||
} from './screen-share.config';
|
||||
import { BrowserScreenShareCapture } from './screen-share-platforms/browser-screen-share.capture';
|
||||
import { DesktopElectronScreenShareCapture } from './screen-share-platforms/desktop-electron-screen-share.capture';
|
||||
import { LinuxElectronScreenShareCapture } from './screen-share-platforms/linux-electron-screen-share.capture';
|
||||
import { ScreenShareElectronApi, ScreenShareWindow } from './screen-share-platforms/shared';
|
||||
|
||||
/**
|
||||
* Callbacks the ScreenShareManager needs from the owning service.
|
||||
@@ -45,103 +48,9 @@ export interface LocalScreenShareState {
|
||||
includeSystemAudio: boolean;
|
||||
stream: MediaStream | null;
|
||||
suppressRemotePlayback: boolean;
|
||||
forceDefaultRemotePlaybackOutput: boolean;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareAudioRoutingInfo {
|
||||
available: boolean;
|
||||
active: boolean;
|
||||
monitorCaptureSupported: boolean;
|
||||
screenShareSinkName: string;
|
||||
screenShareMonitorSourceName: string;
|
||||
voiceSinkName: string;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareMonitorCaptureInfo {
|
||||
bitsPerSample: number;
|
||||
captureId: string;
|
||||
channelCount: number;
|
||||
sampleRate: number;
|
||||
sourceName: string;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareMonitorAudioChunkPayload {
|
||||
captureId: string;
|
||||
chunk: Uint8Array;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareMonitorAudioEndedPayload {
|
||||
captureId: string;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareMonitorAudioPipeline {
|
||||
audioContext: AudioContext;
|
||||
audioTrack: MediaStreamTrack;
|
||||
bitsPerSample: number;
|
||||
captureId: string;
|
||||
channelCount: number;
|
||||
mediaDestination: MediaStreamAudioDestinationNode;
|
||||
nextStartTime: number;
|
||||
pendingBytes: Uint8Array;
|
||||
sampleRate: number;
|
||||
unsubscribeChunk: () => void;
|
||||
unsubscribeEnded: () => void;
|
||||
}
|
||||
|
||||
export interface DesktopSource {
|
||||
id: string;
|
||||
name: string;
|
||||
thumbnail: string;
|
||||
}
|
||||
|
||||
interface ElectronDesktopSourceSelection {
|
||||
includeSystemAudio: boolean;
|
||||
source: DesktopSource;
|
||||
}
|
||||
|
||||
interface ElectronDesktopCaptureResult {
|
||||
includeSystemAudio: boolean;
|
||||
stream: MediaStream;
|
||||
}
|
||||
|
||||
interface ScreenShareElectronApi {
|
||||
getSources?: () => Promise<DesktopSource[]>;
|
||||
prepareLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
|
||||
activateLinuxScreenShareAudioRouting?: () => Promise<LinuxScreenShareAudioRoutingInfo>;
|
||||
deactivateLinuxScreenShareAudioRouting?: () => Promise<boolean>;
|
||||
startLinuxScreenShareMonitorCapture?: () => Promise<LinuxScreenShareMonitorCaptureInfo>;
|
||||
stopLinuxScreenShareMonitorCapture?: (captureId?: string) => Promise<boolean>;
|
||||
onLinuxScreenShareMonitorAudioChunk?: (listener: (payload: LinuxScreenShareMonitorAudioChunkPayload) => void) => () => void;
|
||||
onLinuxScreenShareMonitorAudioEnded?: (listener: (payload: LinuxScreenShareMonitorAudioEndedPayload) => void) => () => void;
|
||||
}
|
||||
|
||||
type ElectronDesktopVideoConstraint = MediaTrackConstraints & {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop';
|
||||
chromeMediaSourceId: string;
|
||||
maxWidth: number;
|
||||
maxHeight: number;
|
||||
maxFrameRate: number;
|
||||
};
|
||||
};
|
||||
|
||||
type ElectronDesktopAudioConstraint = MediaTrackConstraints & {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop';
|
||||
chromeMediaSourceId: string;
|
||||
};
|
||||
};
|
||||
|
||||
interface ElectronDesktopMediaStreamConstraints extends MediaStreamConstraints {
|
||||
video: ElectronDesktopVideoConstraint;
|
||||
audio?: false | ElectronDesktopAudioConstraint;
|
||||
}
|
||||
|
||||
type ScreenShareWindow = Window & {
|
||||
electronAPI?: ScreenShareElectronApi;
|
||||
};
|
||||
|
||||
export class ScreenShareManager {
|
||||
/** The active screen-capture stream. */
|
||||
private activeScreenStream: MediaStream | null = null;
|
||||
@@ -155,22 +64,39 @@ export class ScreenShareManager {
|
||||
/** Remote peers that explicitly requested screen-share video. */
|
||||
private readonly requestedViewerPeerIds = new Set<string>();
|
||||
|
||||
/** Browser `getDisplayMedia` capture path. */
|
||||
private readonly browserScreenShareCapture: BrowserScreenShareCapture;
|
||||
|
||||
/** Desktop Electron capture path for non-Linux desktop builds. */
|
||||
private readonly desktopElectronScreenShareCapture: DesktopElectronScreenShareCapture;
|
||||
|
||||
/** Linux Electron screen/audio capture path with isolated audio routing. */
|
||||
private readonly linuxElectronScreenShareCapture: LinuxElectronScreenShareCapture;
|
||||
|
||||
/** Whether screen sharing is currently active. */
|
||||
private isScreenActive = false;
|
||||
|
||||
/** Whether Linux-specific Electron audio routing is currently active. */
|
||||
private linuxElectronAudioRoutingActive = false;
|
||||
|
||||
/** Pending teardown of Linux-specific Electron audio routing. */
|
||||
private linuxAudioRoutingResetPromise: Promise<void> | null = null;
|
||||
|
||||
/** Renderer-side audio pipeline for Linux monitor-source capture. */
|
||||
private linuxMonitorAudioPipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private callbacks: ScreenShareCallbacks
|
||||
) {}
|
||||
) {
|
||||
this.browserScreenShareCapture = new BrowserScreenShareCapture(this.logger);
|
||||
this.desktopElectronScreenShareCapture = new DesktopElectronScreenShareCapture(this.logger, {
|
||||
getElectronApi: () => this.getElectronApi(),
|
||||
getSelectDesktopSource: () => this.callbacks.selectDesktopSource
|
||||
});
|
||||
|
||||
this.linuxElectronScreenShareCapture = new LinuxElectronScreenShareCapture(this.logger, {
|
||||
getElectronApi: () => this.getElectronApi(),
|
||||
onCaptureEnded: () => {
|
||||
if (this.isScreenActive) {
|
||||
this.stopScreenShare();
|
||||
}
|
||||
},
|
||||
startDisplayMedia: async (options, preset) =>
|
||||
await this.browserScreenShareCapture.startCapture(options, preset)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the callback set at runtime.
|
||||
@@ -190,10 +116,12 @@ export class ScreenShareManager {
|
||||
/**
|
||||
* Begin screen sharing.
|
||||
*
|
||||
* On Linux Electron builds, prefers a dedicated PulseAudio/PipeWire routing
|
||||
* path so remote voice playback is kept out of captured system audio.
|
||||
* On other Electron builds, uses desktop capture. In browser contexts, uses
|
||||
* `getDisplayMedia`.
|
||||
* On Linux Electron builds, prefers a dedicated PulseAudio/PipeWire routing
|
||||
* path so remote voice playback is kept out of captured system audio.
|
||||
* On Windows Electron builds, prefers `getDisplayMedia` with system audio
|
||||
* so the separate mic `getUserMedia` stream is not disrupted; falls back to
|
||||
* Electron desktop capture only when `getDisplayMedia` fails entirely.
|
||||
* In browser contexts, uses `getDisplayMedia`.
|
||||
*
|
||||
* @param options - Screen-share capture options.
|
||||
* @returns The captured screen {@link MediaStream}.
|
||||
@@ -205,7 +133,7 @@ export class ScreenShareManager {
|
||||
...options
|
||||
};
|
||||
const preset = SCREEN_SHARE_QUALITY_PRESETS[shareOptions.quality];
|
||||
const electronDesktopCaptureAvailable = this.isElectronDesktopCaptureAvailable();
|
||||
const electronDesktopCaptureAvailable = this.desktopElectronScreenShareCapture.isAvailable();
|
||||
|
||||
let captureMethod: ScreenShareCaptureMethod | null = null;
|
||||
|
||||
@@ -216,13 +144,13 @@ export class ScreenShareManager {
|
||||
this.stopScreenShare();
|
||||
}
|
||||
|
||||
await this.awaitPendingLinuxAudioRoutingReset();
|
||||
await this.linuxElectronScreenShareCapture.awaitPendingReset();
|
||||
|
||||
this.activeScreenStream = null;
|
||||
|
||||
if (shareOptions.includeSystemAudio && this.isLinuxElectronAudioRoutingSupported()) {
|
||||
if (shareOptions.includeSystemAudio && this.linuxElectronScreenShareCapture.isSupported()) {
|
||||
try {
|
||||
this.activeScreenStream = await this.startWithLinuxElectronAudioRouting(shareOptions, preset);
|
||||
this.activeScreenStream = await this.linuxElectronScreenShareCapture.startCapture(shareOptions, preset);
|
||||
captureMethod = 'linux-electron';
|
||||
} catch (error) {
|
||||
this.rethrowIfScreenShareAborted(error);
|
||||
@@ -230,16 +158,28 @@ export class ScreenShareManager {
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.activeScreenStream && shareOptions.includeSystemAudio && !electronDesktopCaptureAvailable) {
|
||||
if (!this.activeScreenStream && shareOptions.includeSystemAudio) {
|
||||
try {
|
||||
this.activeScreenStream = await this.startWithDisplayMedia(shareOptions, preset);
|
||||
this.activeScreenStream = await this.browserScreenShareCapture.startCapture(shareOptions, preset);
|
||||
captureMethod = 'display-media';
|
||||
|
||||
if (this.activeScreenStream.getAudioTracks().length === 0) {
|
||||
this.logger.warn('getDisplayMedia did not provide system audio; trying Electron desktop capture');
|
||||
this.activeScreenStream.getTracks().forEach((track) => track.stop());
|
||||
this.activeScreenStream = null;
|
||||
captureMethod = null;
|
||||
if (electronDesktopCaptureAvailable) {
|
||||
// On Windows Electron, keep the getDisplayMedia stream for video
|
||||
// rather than falling through to getUserMedia desktop audio which
|
||||
// can replace or kill the active mic stream.
|
||||
this.logger.warn(
|
||||
'getDisplayMedia did not provide system audio; '
|
||||
+ 'continuing without system audio to preserve mic stream'
|
||||
);
|
||||
|
||||
shareOptions.includeSystemAudio = false;
|
||||
} else {
|
||||
this.logger.warn('getDisplayMedia did not provide system audio; trying next capture method');
|
||||
this.activeScreenStream.getTracks().forEach((track) => track.stop());
|
||||
this.activeScreenStream = null;
|
||||
captureMethod = null;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.rethrowIfScreenShareAborted(error);
|
||||
@@ -249,7 +189,7 @@ export class ScreenShareManager {
|
||||
|
||||
if (!this.activeScreenStream && electronDesktopCaptureAvailable) {
|
||||
try {
|
||||
const electronCapture = await this.startWithElectronDesktopCapturer(shareOptions, preset);
|
||||
const electronCapture = await this.desktopElectronScreenShareCapture.startCapture(shareOptions, preset);
|
||||
|
||||
this.activeScreenStream = electronCapture.stream;
|
||||
shareOptions.includeSystemAudio = electronCapture.includeSystemAudio;
|
||||
@@ -261,7 +201,7 @@ export class ScreenShareManager {
|
||||
}
|
||||
|
||||
if (!this.activeScreenStream) {
|
||||
this.activeScreenStream = await this.startWithDisplayMedia(shareOptions, preset);
|
||||
this.activeScreenStream = await this.browserScreenShareCapture.startCapture(shareOptions, preset);
|
||||
captureMethod = 'display-media';
|
||||
}
|
||||
|
||||
@@ -308,7 +248,7 @@ export class ScreenShareManager {
|
||||
this.activeScreenStream = null;
|
||||
}
|
||||
|
||||
this.scheduleLinuxAudioRoutingReset();
|
||||
this.linuxElectronScreenShareCapture.scheduleReset();
|
||||
|
||||
this.screenAudioStream = null;
|
||||
this.activeScreenPreset = null;
|
||||
@@ -390,26 +330,6 @@ export class ScreenShareManager {
|
||||
: null;
|
||||
}
|
||||
|
||||
private isElectronDesktopCaptureAvailable(): boolean {
|
||||
return !!this.getElectronApi()?.getSources && !this.isLinuxElectron();
|
||||
}
|
||||
|
||||
private isLinuxElectron(): boolean {
|
||||
if (!this.getElectronApi() || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return /linux/i.test(`${navigator.userAgent} ${navigator.platform}`);
|
||||
}
|
||||
|
||||
private isWindowsElectron(): boolean {
|
||||
if (!this.isElectronDesktopCaptureAvailable() || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return /win/i.test(`${navigator.userAgent} ${navigator.platform}`);
|
||||
}
|
||||
|
||||
private publishLocalScreenShareState(
|
||||
includeSystemAudio: boolean,
|
||||
captureMethod: ScreenShareCaptureMethod | null
|
||||
@@ -420,63 +340,13 @@ export class ScreenShareManager {
|
||||
includeSystemAudio: this.isScreenActive ? includeSystemAudio : false,
|
||||
stream: this.isScreenActive ? this.activeScreenStream : null,
|
||||
suppressRemotePlayback: this.isScreenActive
|
||||
&& this.shouldSuppressRemotePlaybackDuringShare(includeSystemAudio, captureMethod)
|
||||
&& this.desktopElectronScreenShareCapture.shouldSuppressRemotePlaybackDuringShare(includeSystemAudio),
|
||||
forceDefaultRemotePlaybackOutput: this.isScreenActive
|
||||
&& includeSystemAudio
|
||||
&& captureMethod === 'linux-electron'
|
||||
});
|
||||
}
|
||||
|
||||
private shouldSuppressRemotePlaybackDuringShare(
|
||||
includeSystemAudio: boolean,
|
||||
captureMethod: ScreenShareCaptureMethod | null
|
||||
): boolean {
|
||||
return includeSystemAudio && captureMethod === 'electron-desktop' && this.isWindowsElectron();
|
||||
}
|
||||
|
||||
private getRequiredLinuxElectronApi(): Required<Pick<
|
||||
ScreenShareElectronApi,
|
||||
| 'prepareLinuxScreenShareAudioRouting'
|
||||
| 'activateLinuxScreenShareAudioRouting'
|
||||
| 'deactivateLinuxScreenShareAudioRouting'
|
||||
| 'startLinuxScreenShareMonitorCapture'
|
||||
| 'stopLinuxScreenShareMonitorCapture'
|
||||
| 'onLinuxScreenShareMonitorAudioChunk'
|
||||
| 'onLinuxScreenShareMonitorAudioEnded'
|
||||
>> {
|
||||
const electronApi = this.getElectronApi();
|
||||
|
||||
if (!electronApi?.prepareLinuxScreenShareAudioRouting
|
||||
|| !electronApi.activateLinuxScreenShareAudioRouting
|
||||
|| !electronApi.deactivateLinuxScreenShareAudioRouting
|
||||
|| !electronApi.startLinuxScreenShareMonitorCapture
|
||||
|| !electronApi.stopLinuxScreenShareMonitorCapture
|
||||
|| !electronApi.onLinuxScreenShareMonitorAudioChunk
|
||||
|| !electronApi.onLinuxScreenShareMonitorAudioEnded) {
|
||||
throw new Error('Linux Electron audio routing is unavailable.');
|
||||
}
|
||||
|
||||
return {
|
||||
prepareLinuxScreenShareAudioRouting: electronApi.prepareLinuxScreenShareAudioRouting,
|
||||
activateLinuxScreenShareAudioRouting: electronApi.activateLinuxScreenShareAudioRouting,
|
||||
deactivateLinuxScreenShareAudioRouting: electronApi.deactivateLinuxScreenShareAudioRouting,
|
||||
startLinuxScreenShareMonitorCapture: electronApi.startLinuxScreenShareMonitorCapture,
|
||||
stopLinuxScreenShareMonitorCapture: electronApi.stopLinuxScreenShareMonitorCapture,
|
||||
onLinuxScreenShareMonitorAudioChunk: electronApi.onLinuxScreenShareMonitorAudioChunk,
|
||||
onLinuxScreenShareMonitorAudioEnded: electronApi.onLinuxScreenShareMonitorAudioEnded
|
||||
};
|
||||
}
|
||||
|
||||
private assertLinuxAudioRoutingReady(
|
||||
routingInfo: LinuxScreenShareAudioRoutingInfo,
|
||||
unavailableReason: string
|
||||
): void {
|
||||
if (!routingInfo.available) {
|
||||
throw new Error(routingInfo.reason || unavailableReason);
|
||||
}
|
||||
|
||||
if (!routingInfo.monitorCaptureSupported) {
|
||||
throw new Error('Linux screen-share monitor capture requires restarting the desktop app so the new Electron main process can load.');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a dedicated stream for system audio captured alongside the screen.
|
||||
*
|
||||
@@ -555,6 +425,11 @@ export class ScreenShareManager {
|
||||
}
|
||||
|
||||
peerData.screenVideoSender = videoSender;
|
||||
|
||||
if (typeof videoSender.setStreams === 'function') {
|
||||
videoSender.setStreams(this.activeScreenStream);
|
||||
}
|
||||
|
||||
videoSender.replaceTrack(screenVideoTrack)
|
||||
.then(() => {
|
||||
this.logger.info('screen video replaceTrack ok', { peerId });
|
||||
@@ -585,6 +460,11 @@ export class ScreenShareManager {
|
||||
}
|
||||
|
||||
peerData.screenAudioSender = screenAudioSender;
|
||||
|
||||
if (typeof screenAudioSender.setStreams === 'function') {
|
||||
screenAudioSender.setStreams(this.activeScreenStream);
|
||||
}
|
||||
|
||||
screenAudioSender.replaceTrack(screenAudioTrack)
|
||||
.then(() => this.logger.info('screen audio replaceTrack ok', { peerId }))
|
||||
.catch((error) => this.logger.error('screen audio replaceTrack failed', error));
|
||||
@@ -628,109 +508,6 @@ export class ScreenShareManager {
|
||||
this.callbacks.renegotiate(peerId);
|
||||
}
|
||||
|
||||
private async startWithDisplayMedia(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<MediaStream> {
|
||||
const displayConstraints = this.buildDisplayMediaConstraints(options, preset);
|
||||
|
||||
this.logger.info('getDisplayMedia constraints', displayConstraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getDisplayMedia) {
|
||||
throw new Error('navigator.mediaDevices.getDisplayMedia is not available.');
|
||||
}
|
||||
|
||||
return await navigator.mediaDevices.getDisplayMedia(displayConstraints);
|
||||
}
|
||||
|
||||
private async startWithElectronDesktopCapturer(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<ElectronDesktopCaptureResult> {
|
||||
const electronApi = this.getElectronApi();
|
||||
|
||||
if (!electronApi?.getSources) {
|
||||
throw new Error('Electron desktop capture is unavailable.');
|
||||
}
|
||||
|
||||
const sources = await electronApi.getSources();
|
||||
const selection = await this.resolveElectronDesktopSource(sources, options.includeSystemAudio);
|
||||
const captureOptions = {
|
||||
...options,
|
||||
includeSystemAudio: selection.includeSystemAudio
|
||||
};
|
||||
|
||||
if (!selection.source) {
|
||||
throw new Error('No desktop capture sources were available.');
|
||||
}
|
||||
|
||||
this.logger.info('Selected Electron desktop source', {
|
||||
includeSystemAudio: selection.includeSystemAudio,
|
||||
sourceId: selection.source.id,
|
||||
sourceName: selection.source.name
|
||||
});
|
||||
|
||||
const electronConstraints = this.buildElectronDesktopConstraints(selection.source.id, captureOptions, preset);
|
||||
|
||||
this.logger.info('desktopCapturer constraints', electronConstraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getUserMedia) {
|
||||
throw new Error('navigator.mediaDevices.getUserMedia is not available (requires HTTPS or localhost).');
|
||||
}
|
||||
|
||||
return {
|
||||
includeSystemAudio: selection.includeSystemAudio,
|
||||
stream: await navigator.mediaDevices.getUserMedia(electronConstraints)
|
||||
};
|
||||
}
|
||||
|
||||
private async resolveElectronDesktopSource(
|
||||
sources: DesktopSource[],
|
||||
includeSystemAudio: boolean
|
||||
): Promise<ElectronDesktopSourceSelection> {
|
||||
const orderedSources = this.sortElectronDesktopSources(sources);
|
||||
const defaultSource = orderedSources.find((source) => source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME)
|
||||
?? orderedSources[0];
|
||||
|
||||
if (orderedSources.length === 0) {
|
||||
throw new Error('No desktop capture sources were available.');
|
||||
}
|
||||
|
||||
if (!this.isWindowsElectron() || orderedSources.length < 2) {
|
||||
return {
|
||||
includeSystemAudio,
|
||||
source: defaultSource
|
||||
};
|
||||
}
|
||||
|
||||
if (!this.callbacks.selectDesktopSource) {
|
||||
return {
|
||||
includeSystemAudio,
|
||||
source: defaultSource
|
||||
};
|
||||
}
|
||||
|
||||
return await this.callbacks.selectDesktopSource(orderedSources, { includeSystemAudio });
|
||||
}
|
||||
|
||||
private sortElectronDesktopSources(sources: DesktopSource[]): DesktopSource[] {
|
||||
return [...sources].sort((left, right) => {
|
||||
const weightDiff = this.getElectronDesktopSourceWeight(left) - this.getElectronDesktopSourceWeight(right);
|
||||
|
||||
if (weightDiff !== 0) {
|
||||
return weightDiff;
|
||||
}
|
||||
|
||||
return left.name.localeCompare(right.name);
|
||||
});
|
||||
}
|
||||
|
||||
private getElectronDesktopSourceWeight(source: DesktopSource): number {
|
||||
return source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME || source.id.startsWith('screen')
|
||||
? 0
|
||||
: 1;
|
||||
}
|
||||
|
||||
private isScreenShareSelectionAborted(error: unknown): boolean {
|
||||
return error instanceof Error
|
||||
&& (error.name === 'AbortError' || error.name === 'NotAllowedError');
|
||||
@@ -742,425 +519,6 @@ export class ScreenShareManager {
|
||||
}
|
||||
}
|
||||
|
||||
private isLinuxElectronAudioRoutingSupported(): boolean {
|
||||
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const electronApi = this.getElectronApi();
|
||||
const platformHint = `${navigator.userAgent} ${navigator.platform}`;
|
||||
|
||||
return !!electronApi?.prepareLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.activateLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.deactivateLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.startLinuxScreenShareMonitorCapture
|
||||
&& !!electronApi?.stopLinuxScreenShareMonitorCapture
|
||||
&& !!electronApi?.onLinuxScreenShareMonitorAudioChunk
|
||||
&& !!electronApi?.onLinuxScreenShareMonitorAudioEnded
|
||||
&& /linux/i.test(platformHint);
|
||||
}
|
||||
|
||||
private async startWithLinuxElectronAudioRouting(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<MediaStream> {
|
||||
const electronApi = this.getRequiredLinuxElectronApi();
|
||||
const routingInfo = await electronApi.prepareLinuxScreenShareAudioRouting();
|
||||
|
||||
this.assertLinuxAudioRoutingReady(routingInfo, 'Linux Electron audio routing is unavailable.');
|
||||
|
||||
let desktopStream: MediaStream | null = null;
|
||||
|
||||
try {
|
||||
const activation = await electronApi.activateLinuxScreenShareAudioRouting();
|
||||
|
||||
this.assertLinuxAudioRoutingReady(activation, 'Failed to activate Linux Electron audio routing.');
|
||||
|
||||
if (!activation.active) {
|
||||
throw new Error(activation.reason || 'Failed to activate Linux Electron audio routing.');
|
||||
}
|
||||
|
||||
desktopStream = await this.startWithDisplayMedia({
|
||||
...options,
|
||||
includeSystemAudio: false
|
||||
}, preset);
|
||||
|
||||
const { audioTrack, captureInfo } = await this.startLinuxScreenShareMonitorTrack();
|
||||
const stream = new MediaStream([...desktopStream.getVideoTracks(), audioTrack]);
|
||||
|
||||
desktopStream.getAudioTracks().forEach((track) => track.stop());
|
||||
|
||||
this.linuxElectronAudioRoutingActive = true;
|
||||
this.logger.info('Linux Electron screen-share audio routing enabled', {
|
||||
screenShareMonitorSourceName: captureInfo.sourceName,
|
||||
voiceSinkName: activation.voiceSinkName
|
||||
});
|
||||
|
||||
return stream;
|
||||
} catch (error) {
|
||||
desktopStream?.getTracks().forEach((track) => track.stop());
|
||||
await this.resetLinuxElectronAudioRouting();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private scheduleLinuxAudioRoutingReset(): void {
|
||||
if (!this.linuxElectronAudioRoutingActive || this.linuxAudioRoutingResetPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.linuxAudioRoutingResetPromise = this.resetLinuxElectronAudioRouting()
|
||||
.catch((error) => {
|
||||
this.logger.warn('Failed to reset Linux Electron audio routing', error);
|
||||
})
|
||||
.finally(() => {
|
||||
this.linuxAudioRoutingResetPromise = null;
|
||||
});
|
||||
}
|
||||
|
||||
private async awaitPendingLinuxAudioRoutingReset(): Promise<void> {
|
||||
if (!this.linuxAudioRoutingResetPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.linuxAudioRoutingResetPromise;
|
||||
}
|
||||
|
||||
private async resetLinuxElectronAudioRouting(): Promise<void> {
|
||||
const electronApi = this.getElectronApi();
|
||||
const captureId = this.linuxMonitorAudioPipeline?.captureId;
|
||||
|
||||
this.linuxElectronAudioRoutingActive = false;
|
||||
|
||||
this.disposeLinuxScreenShareMonitorAudioPipeline();
|
||||
|
||||
try {
|
||||
if (captureId && electronApi?.stopLinuxScreenShareMonitorCapture) {
|
||||
await electronApi.stopLinuxScreenShareMonitorCapture(captureId);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to stop Linux screen-share monitor capture', error);
|
||||
}
|
||||
|
||||
try {
|
||||
if (electronApi?.deactivateLinuxScreenShareAudioRouting) {
|
||||
await electronApi.deactivateLinuxScreenShareAudioRouting();
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to deactivate Linux Electron audio routing', error);
|
||||
}
|
||||
}
|
||||
|
||||
private async startLinuxScreenShareMonitorTrack(): Promise<{
|
||||
audioTrack: MediaStreamTrack;
|
||||
captureInfo: LinuxScreenShareMonitorCaptureInfo;
|
||||
}> {
|
||||
const electronApi = this.getElectronApi();
|
||||
|
||||
if (!electronApi?.startLinuxScreenShareMonitorCapture
|
||||
|| !electronApi?.stopLinuxScreenShareMonitorCapture
|
||||
|| !electronApi?.onLinuxScreenShareMonitorAudioChunk
|
||||
|| !electronApi?.onLinuxScreenShareMonitorAudioEnded) {
|
||||
throw new Error('Linux screen-share monitor capture is unavailable.');
|
||||
}
|
||||
|
||||
const queuedChunksByCaptureId = new Map<string, Uint8Array[]>();
|
||||
const queuedEndedReasons = new Map<string, string | undefined>();
|
||||
|
||||
let pipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
|
||||
let captureInfo: LinuxScreenShareMonitorCaptureInfo | null = null;
|
||||
|
||||
const queueChunk = (captureId: string, chunk: Uint8Array): void => {
|
||||
const queuedChunks = queuedChunksByCaptureId.get(captureId) || [];
|
||||
|
||||
queuedChunks.push(this.copyLinuxMonitorAudioBytes(chunk));
|
||||
queuedChunksByCaptureId.set(captureId, queuedChunks);
|
||||
};
|
||||
const onChunk = (payload: LinuxScreenShareMonitorAudioChunkPayload): void => {
|
||||
if (!pipeline || payload.captureId !== pipeline.captureId) {
|
||||
queueChunk(payload.captureId, payload.chunk);
|
||||
return;
|
||||
}
|
||||
|
||||
this.handleLinuxScreenShareMonitorAudioChunk(pipeline, payload.chunk);
|
||||
};
|
||||
const onEnded = (payload: LinuxScreenShareMonitorAudioEndedPayload): void => {
|
||||
if (!pipeline || payload.captureId !== pipeline.captureId) {
|
||||
queuedEndedReasons.set(payload.captureId, payload.reason);
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.warn('Linux screen-share monitor capture ended', payload);
|
||||
|
||||
if (this.isScreenActive && this.linuxMonitorAudioPipeline?.captureId === payload.captureId) {
|
||||
this.stopScreenShare();
|
||||
}
|
||||
};
|
||||
const unsubscribeChunk = electronApi.onLinuxScreenShareMonitorAudioChunk(onChunk) as () => void;
|
||||
const unsubscribeEnded = electronApi.onLinuxScreenShareMonitorAudioEnded(onEnded) as () => void;
|
||||
|
||||
try {
|
||||
captureInfo = await electronApi.startLinuxScreenShareMonitorCapture() as LinuxScreenShareMonitorCaptureInfo;
|
||||
|
||||
const audioContext = new AudioContext({ sampleRate: captureInfo.sampleRate });
|
||||
const mediaDestination = audioContext.createMediaStreamDestination();
|
||||
|
||||
await audioContext.resume();
|
||||
|
||||
const audioTrack = mediaDestination.stream.getAudioTracks()[0];
|
||||
|
||||
if (!audioTrack) {
|
||||
throw new Error('Renderer audio pipeline did not produce a screen-share monitor track.');
|
||||
}
|
||||
|
||||
pipeline = {
|
||||
audioContext,
|
||||
audioTrack,
|
||||
bitsPerSample: captureInfo.bitsPerSample,
|
||||
captureId: captureInfo.captureId,
|
||||
channelCount: captureInfo.channelCount,
|
||||
mediaDestination,
|
||||
nextStartTime: audioContext.currentTime + 0.05,
|
||||
pendingBytes: new Uint8Array(0),
|
||||
sampleRate: captureInfo.sampleRate,
|
||||
unsubscribeChunk,
|
||||
unsubscribeEnded
|
||||
};
|
||||
|
||||
this.linuxMonitorAudioPipeline = pipeline;
|
||||
const activeCaptureId = captureInfo.captureId;
|
||||
|
||||
audioTrack.addEventListener('ended', () => {
|
||||
if (this.isScreenActive && this.linuxMonitorAudioPipeline?.captureId === activeCaptureId) {
|
||||
this.stopScreenShare();
|
||||
}
|
||||
}, { once: true });
|
||||
|
||||
const queuedChunks = queuedChunksByCaptureId.get(captureInfo.captureId) || [];
|
||||
const activePipeline = pipeline;
|
||||
|
||||
queuedChunks.forEach((chunk) => {
|
||||
this.handleLinuxScreenShareMonitorAudioChunk(activePipeline, chunk);
|
||||
});
|
||||
|
||||
queuedChunksByCaptureId.delete(captureInfo.captureId);
|
||||
|
||||
if (queuedEndedReasons.has(captureInfo.captureId)) {
|
||||
throw new Error(queuedEndedReasons.get(captureInfo.captureId)
|
||||
|| 'Linux screen-share monitor capture ended before audio initialisation completed.');
|
||||
}
|
||||
|
||||
return {
|
||||
audioTrack,
|
||||
captureInfo
|
||||
};
|
||||
} catch (error) {
|
||||
if (pipeline) {
|
||||
this.disposeLinuxScreenShareMonitorAudioPipeline(pipeline.captureId);
|
||||
} else {
|
||||
unsubscribeChunk();
|
||||
unsubscribeEnded();
|
||||
}
|
||||
|
||||
try {
|
||||
await electronApi.stopLinuxScreenShareMonitorCapture(captureInfo?.captureId);
|
||||
} catch (stopError) {
|
||||
this.logger.warn('Failed to stop Linux screen-share monitor capture after startup failure', stopError);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private disposeLinuxScreenShareMonitorAudioPipeline(captureId?: string): void {
|
||||
if (!this.linuxMonitorAudioPipeline) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (captureId && captureId !== this.linuxMonitorAudioPipeline.captureId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pipeline = this.linuxMonitorAudioPipeline;
|
||||
|
||||
this.linuxMonitorAudioPipeline = null;
|
||||
pipeline.unsubscribeChunk();
|
||||
pipeline.unsubscribeEnded();
|
||||
pipeline.audioTrack.stop();
|
||||
pipeline.pendingBytes = new Uint8Array(0);
|
||||
|
||||
void pipeline.audioContext.close().catch((error) => {
|
||||
this.logger.warn('Failed to close Linux screen-share monitor audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
private handleLinuxScreenShareMonitorAudioChunk(
|
||||
pipeline: LinuxScreenShareMonitorAudioPipeline,
|
||||
chunk: Uint8Array
|
||||
): void {
|
||||
if (pipeline.bitsPerSample !== 16) {
|
||||
this.logger.warn('Unsupported Linux screen-share monitor capture sample size', {
|
||||
bitsPerSample: pipeline.bitsPerSample,
|
||||
captureId: pipeline.captureId
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const bytesPerSample = pipeline.bitsPerSample / 8;
|
||||
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
|
||||
|
||||
if (!Number.isFinite(bytesPerFrame) || bytesPerFrame <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const combinedBytes = this.concatLinuxMonitorAudioBytes(pipeline.pendingBytes, chunk);
|
||||
const completeByteLength = combinedBytes.byteLength - (combinedBytes.byteLength % bytesPerFrame);
|
||||
|
||||
if (completeByteLength <= 0) {
|
||||
pipeline.pendingBytes = combinedBytes;
|
||||
return;
|
||||
}
|
||||
|
||||
const completeBytes = combinedBytes.subarray(0, completeByteLength);
|
||||
|
||||
pipeline.pendingBytes = this.copyLinuxMonitorAudioBytes(combinedBytes.subarray(completeByteLength));
|
||||
|
||||
if (pipeline.audioContext.state !== 'running') {
|
||||
void pipeline.audioContext.resume().catch((error) => {
|
||||
this.logger.warn('Failed to resume Linux screen-share monitor audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
const frameCount = completeByteLength / bytesPerFrame;
|
||||
const audioBuffer = this.createLinuxScreenShareAudioBuffer(pipeline, completeBytes, frameCount);
|
||||
const source = pipeline.audioContext.createBufferSource();
|
||||
|
||||
source.buffer = audioBuffer;
|
||||
source.connect(pipeline.mediaDestination);
|
||||
|
||||
source.onended = () => {
|
||||
source.disconnect();
|
||||
};
|
||||
|
||||
const now = pipeline.audioContext.currentTime;
|
||||
const startTime = Math.max(pipeline.nextStartTime, now + 0.02);
|
||||
|
||||
source.start(startTime);
|
||||
pipeline.nextStartTime = startTime + audioBuffer.duration;
|
||||
}
|
||||
|
||||
private createLinuxScreenShareAudioBuffer(
|
||||
pipeline: LinuxScreenShareMonitorAudioPipeline,
|
||||
bytes: Uint8Array,
|
||||
frameCount: number
|
||||
): AudioBuffer {
|
||||
const audioBuffer = pipeline.audioContext.createBuffer(pipeline.channelCount, frameCount, pipeline.sampleRate);
|
||||
const sampleData = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
||||
const channelData = Array.from({ length: pipeline.channelCount }, (_, channelIndex) => audioBuffer.getChannelData(channelIndex));
|
||||
const bytesPerSample = pipeline.bitsPerSample / 8;
|
||||
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
|
||||
|
||||
for (let frameIndex = 0; frameIndex < frameCount; frameIndex += 1) {
|
||||
const frameOffset = frameIndex * bytesPerFrame;
|
||||
|
||||
for (let channelIndex = 0; channelIndex < pipeline.channelCount; channelIndex += 1) {
|
||||
const sampleOffset = frameOffset + (channelIndex * bytesPerSample);
|
||||
|
||||
channelData[channelIndex][frameIndex] = sampleData.getInt16(sampleOffset, true) / 32768;
|
||||
}
|
||||
}
|
||||
|
||||
return audioBuffer;
|
||||
}
|
||||
|
||||
private concatLinuxMonitorAudioBytes(first: Uint8Array, second: Uint8Array): Uint8Array {
|
||||
if (first.byteLength === 0) {
|
||||
return this.copyLinuxMonitorAudioBytes(second);
|
||||
}
|
||||
|
||||
if (second.byteLength === 0) {
|
||||
return this.copyLinuxMonitorAudioBytes(first);
|
||||
}
|
||||
|
||||
const combined = new Uint8Array(first.byteLength + second.byteLength);
|
||||
|
||||
combined.set(first, 0);
|
||||
combined.set(second, first.byteLength);
|
||||
|
||||
return combined;
|
||||
}
|
||||
|
||||
private copyLinuxMonitorAudioBytes(bytes: Uint8Array): Uint8Array {
|
||||
return bytes.byteLength > 0 ? new Uint8Array(bytes) : new Uint8Array(0);
|
||||
}
|
||||
|
||||
private buildDisplayMediaConstraints(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): DisplayMediaStreamOptions {
|
||||
const supportedConstraints = navigator.mediaDevices?.getSupportedConstraints?.() as Record<string, boolean> | undefined;
|
||||
const audioConstraints: Record<string, unknown> | false = options.includeSystemAudio
|
||||
? {
|
||||
echoCancellation: false,
|
||||
noiseSuppression: false,
|
||||
autoGainControl: false
|
||||
}
|
||||
: false;
|
||||
|
||||
if (audioConstraints && supportedConstraints?.['restrictOwnAudio']) {
|
||||
audioConstraints['restrictOwnAudio'] = true;
|
||||
}
|
||||
|
||||
if (audioConstraints && supportedConstraints?.['suppressLocalAudioPlayback']) {
|
||||
audioConstraints['suppressLocalAudioPlayback'] = true;
|
||||
}
|
||||
|
||||
return {
|
||||
video: {
|
||||
width: { ideal: preset.width, max: preset.width },
|
||||
height: { ideal: preset.height, max: preset.height },
|
||||
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
|
||||
},
|
||||
audio: audioConstraints,
|
||||
monitorTypeSurfaces: 'include',
|
||||
selfBrowserSurface: 'exclude',
|
||||
surfaceSwitching: 'include',
|
||||
systemAudio: options.includeSystemAudio ? 'include' : 'exclude'
|
||||
} as DisplayMediaStreamOptions;
|
||||
}
|
||||
|
||||
private buildElectronDesktopConstraints(
|
||||
sourceId: string,
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): ElectronDesktopMediaStreamConstraints {
|
||||
const electronConstraints: ElectronDesktopMediaStreamConstraints = {
|
||||
video: {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: sourceId,
|
||||
maxWidth: preset.width,
|
||||
maxHeight: preset.height,
|
||||
maxFrameRate: preset.frameRate
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (options.includeSystemAudio) {
|
||||
electronConstraints.audio = {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: sourceId
|
||||
}
|
||||
};
|
||||
} else {
|
||||
electronConstraints.audio = false;
|
||||
}
|
||||
|
||||
return electronConstraints;
|
||||
}
|
||||
|
||||
private configureScreenStream(preset: ScreenShareQualityPreset): void {
|
||||
const screenVideoTrack = this.activeScreenStream?.getVideoTracks()[0];
|
||||
|
||||
|
||||
@@ -20,6 +20,10 @@ export interface PeerData {
|
||||
screenVideoSender?: RTCRtpSender;
|
||||
/** The RTP sender carrying the screen-share audio track. */
|
||||
screenAudioSender?: RTCRtpSender;
|
||||
/** Known remote stream ids that carry the peer's voice audio. */
|
||||
remoteVoiceStreamIds: Set<string>;
|
||||
/** Known remote stream ids that carry the peer's screen-share audio/video. */
|
||||
remoteScreenShareStreamIds: Set<string>;
|
||||
}
|
||||
|
||||
/** Credentials cached for automatic re-identification after reconnect. */
|
||||
|
||||
@@ -107,7 +107,7 @@ export class ChatMessagesComponent {
|
||||
|
||||
handleTypingStarted(): void {
|
||||
try {
|
||||
this.webrtc.sendRawMessage({ type: 'typing' });
|
||||
this.webrtc.sendRawMessage({ type: 'typing', serverId: this.webrtc.currentServerId });
|
||||
} catch {
|
||||
/* ignore */
|
||||
}
|
||||
|
||||
@@ -18,11 +18,14 @@ export interface PlaybackOptions {
|
||||
*
|
||||
* Chrome/Electron workaround: a muted HTMLAudioElement is attached to
|
||||
* the stream first so that `createMediaStreamSource` actually outputs
|
||||
* audio. The element itself is silent - all audible output comes from
|
||||
* the GainNode -> AudioContext.destination path.
|
||||
* audio. The priming element itself is silent; audible output is routed
|
||||
* through a separate output element fed by
|
||||
* `GainNode -> MediaStreamDestination` so output-device switching stays
|
||||
* reliable during Linux screen sharing.
|
||||
*/
|
||||
interface PeerAudioPipeline {
|
||||
audioElement: HTMLAudioElement;
|
||||
outputElement: HTMLAudioElement;
|
||||
context: AudioContext;
|
||||
sourceNodes: MediaStreamAudioSourceNode[];
|
||||
gainNode: GainNode;
|
||||
@@ -38,6 +41,7 @@ export class VoicePlaybackService {
|
||||
private userVolumes = new Map<string, number>();
|
||||
private userMuted = new Map<string, boolean>();
|
||||
private preferredOutputDeviceId = 'default';
|
||||
private temporaryOutputDeviceId: string | null = null;
|
||||
private masterVolume = 1;
|
||||
private deafened = false;
|
||||
private captureEchoSuppressed = false;
|
||||
@@ -49,6 +53,13 @@ export class VoicePlaybackService {
|
||||
this.captureEchoSuppressed = this.webrtc.isScreenShareRemotePlaybackSuppressed();
|
||||
this.recalcAllGains();
|
||||
});
|
||||
|
||||
effect(() => {
|
||||
this.temporaryOutputDeviceId = this.webrtc.forceDefaultRemotePlaybackOutput()
|
||||
? 'default'
|
||||
: null;
|
||||
void this.applyEffectiveOutputDeviceToAllPipelines();
|
||||
});
|
||||
}
|
||||
|
||||
handleRemoteStream(peerId: string, stream: MediaStream, options: PlaybackOptions): void {
|
||||
@@ -154,11 +165,12 @@ export class VoicePlaybackService {
|
||||
* ↓
|
||||
* muted <audio> element (Chrome workaround - primes the stream)
|
||||
* ↓
|
||||
* MediaStreamSource → GainNode → AudioContext.destination
|
||||
* MediaStreamSource → GainNode → MediaStreamDestination → output <audio>
|
||||
*/
|
||||
private createPipeline(peerId: string, stream: MediaStream): void {
|
||||
// Chromium/Electron needs a muted <audio> element before Web Audio can read the stream.
|
||||
const audioEl = new Audio();
|
||||
const outputEl = new Audio();
|
||||
const audioTracks = stream.getAudioTracks().filter((track) => track.readyState === 'live');
|
||||
|
||||
audioEl.srcObject = stream;
|
||||
@@ -167,12 +179,24 @@ export class VoicePlaybackService {
|
||||
|
||||
const ctx = new AudioContext();
|
||||
const gainNode = ctx.createGain();
|
||||
const mediaDestination = ctx.createMediaStreamDestination();
|
||||
const sourceNodes = audioTracks.map((track) => ctx.createMediaStreamSource(new MediaStream([track])));
|
||||
|
||||
sourceNodes.forEach((sourceNode) => sourceNode.connect(gainNode));
|
||||
gainNode.connect(ctx.destination);
|
||||
gainNode.connect(mediaDestination);
|
||||
|
||||
const pipeline: PeerAudioPipeline = { audioElement: audioEl, context: ctx, sourceNodes, gainNode };
|
||||
outputEl.srcObject = mediaDestination.stream;
|
||||
outputEl.muted = false;
|
||||
outputEl.volume = 1;
|
||||
outputEl.play().catch(() => {});
|
||||
|
||||
const pipeline: PeerAudioPipeline = {
|
||||
audioElement: audioEl,
|
||||
outputElement: outputEl,
|
||||
context: ctx,
|
||||
sourceNodes,
|
||||
gainNode
|
||||
};
|
||||
|
||||
this.peerPipelines.set(peerId, pipeline);
|
||||
|
||||
@@ -194,26 +218,20 @@ export class VoicePlaybackService {
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
const anyAudio = pipeline.audioElement as any;
|
||||
// eslint-disable-next-line
|
||||
const anyCtx = pipeline.context as any;
|
||||
const anyAudio = pipeline.outputElement as any;
|
||||
const tasks: Promise<unknown>[] = [];
|
||||
|
||||
if (typeof anyAudio.setSinkId === 'function') {
|
||||
tasks.push(anyAudio.setSinkId(deviceId).catch(() => undefined));
|
||||
}
|
||||
|
||||
if (typeof anyCtx.setSinkId === 'function') {
|
||||
tasks.push(anyCtx.setSinkId(deviceId).catch(() => undefined));
|
||||
}
|
||||
|
||||
if (tasks.length > 0) {
|
||||
await Promise.all(tasks);
|
||||
}
|
||||
}
|
||||
|
||||
private getEffectiveOutputDeviceId(): string {
|
||||
return this.preferredOutputDeviceId;
|
||||
return this.temporaryOutputDeviceId ?? this.preferredOutputDeviceId;
|
||||
}
|
||||
|
||||
private removePipeline(peerId: string): void {
|
||||
@@ -238,6 +256,8 @@ export class VoicePlaybackService {
|
||||
|
||||
pipeline.audioElement.srcObject = null;
|
||||
pipeline.audioElement.remove();
|
||||
pipeline.outputElement.srcObject = null;
|
||||
pipeline.outputElement.remove();
|
||||
|
||||
if (pipeline.context.state !== 'closed') {
|
||||
pipeline.context.close().catch(() => {});
|
||||
|
||||
@@ -42,6 +42,65 @@
|
||||
{{ autoScroll() ? 'Pause auto-scroll' : 'Resume auto-scroll' }}
|
||||
</button>
|
||||
|
||||
<!-- Export dropdown -->
|
||||
<div
|
||||
class="relative"
|
||||
data-export-menu
|
||||
>
|
||||
<button
|
||||
type="button"
|
||||
(click)="toggleExportMenu()"
|
||||
class="inline-flex items-center gap-1.5 rounded-lg bg-secondary px-2.5 py-2 text-xs font-medium text-foreground transition-colors hover:bg-secondary/80"
|
||||
[attr.aria-expanded]="exportMenuOpen()"
|
||||
aria-haspopup="true"
|
||||
title="Export logs"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideDownload"
|
||||
class="h-3.5 w-3.5"
|
||||
/>
|
||||
Export
|
||||
</button>
|
||||
|
||||
@if (exportMenuOpen()) {
|
||||
<div class="absolute right-0 top-full z-10 mt-1 min-w-[11rem] rounded-lg border border-border bg-card p-1 shadow-xl">
|
||||
@if (activeTab() === 'logs') {
|
||||
<p class="px-2.5 py-1.5 text-[10px] font-semibold uppercase tracking-wider text-muted-foreground">Logs</p>
|
||||
<button
|
||||
type="button"
|
||||
(click)="exportLogs('csv')"
|
||||
class="flex w-full items-center gap-2 rounded-md px-2.5 py-1.5 text-xs text-foreground transition-colors hover:bg-secondary"
|
||||
>
|
||||
Export as CSV
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
(click)="exportLogs('txt')"
|
||||
class="flex w-full items-center gap-2 rounded-md px-2.5 py-1.5 text-xs text-foreground transition-colors hover:bg-secondary"
|
||||
>
|
||||
Export as TXT
|
||||
</button>
|
||||
} @else {
|
||||
<p class="px-2.5 py-1.5 text-[10px] font-semibold uppercase tracking-wider text-muted-foreground">Network</p>
|
||||
<button
|
||||
type="button"
|
||||
(click)="exportNetwork('csv')"
|
||||
class="flex w-full items-center gap-2 rounded-md px-2.5 py-1.5 text-xs text-foreground transition-colors hover:bg-secondary"
|
||||
>
|
||||
Export as CSV
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
(click)="exportNetwork('txt')"
|
||||
class="flex w-full items-center gap-2 rounded-md px-2.5 py-1.5 text-xs text-foreground transition-colors hover:bg-secondary"
|
||||
>
|
||||
Export as TXT
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
(click)="clear()"
|
||||
|
||||
@@ -1,11 +1,14 @@
|
||||
import {
|
||||
Component,
|
||||
HostListener,
|
||||
input,
|
||||
output
|
||||
output,
|
||||
signal
|
||||
} from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { NgIcon, provideIcons } from '@ng-icons/core';
|
||||
import {
|
||||
lucideDownload,
|
||||
lucideFilter,
|
||||
lucidePause,
|
||||
lucidePlay,
|
||||
@@ -15,6 +18,7 @@ import {
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
type DebugLogLevel = 'event' | 'info' | 'warn' | 'error' | 'debug';
|
||||
type DebugExportFormat = 'csv' | 'txt';
|
||||
|
||||
interface DebugNetworkSummary {
|
||||
clientCount: number;
|
||||
@@ -34,6 +38,7 @@ interface DebugNetworkSummary {
|
||||
imports: [CommonModule, NgIcon],
|
||||
viewProviders: [
|
||||
provideIcons({
|
||||
lucideDownload,
|
||||
lucideFilter,
|
||||
lucidePause,
|
||||
lucidePlay,
|
||||
@@ -64,6 +69,10 @@ export class DebugConsoleToolbarComponent {
|
||||
readonly autoScrollToggled = output<undefined>();
|
||||
readonly clearRequested = output<undefined>();
|
||||
readonly closeRequested = output<undefined>();
|
||||
readonly exportLogsRequested = output<DebugExportFormat>();
|
||||
readonly exportNetworkRequested = output<DebugExportFormat>();
|
||||
|
||||
readonly exportMenuOpen = signal(false);
|
||||
|
||||
readonly levels: DebugLogLevel[] = [
|
||||
'event',
|
||||
@@ -111,6 +120,35 @@ export class DebugConsoleToolbarComponent {
|
||||
this.closeRequested.emit(undefined);
|
||||
}
|
||||
|
||||
toggleExportMenu(): void {
|
||||
this.exportMenuOpen.update((open) => !open);
|
||||
}
|
||||
|
||||
closeExportMenu(): void {
|
||||
this.exportMenuOpen.set(false);
|
||||
}
|
||||
|
||||
exportLogs(format: DebugExportFormat): void {
|
||||
this.exportLogsRequested.emit(format);
|
||||
this.closeExportMenu();
|
||||
}
|
||||
|
||||
exportNetwork(format: DebugExportFormat): void {
|
||||
this.exportNetworkRequested.emit(format);
|
||||
this.closeExportMenu();
|
||||
}
|
||||
|
||||
@HostListener('document:click', ['$event'])
|
||||
onDocumentClick(event: MouseEvent): void {
|
||||
if (!this.exportMenuOpen())
|
||||
return;
|
||||
|
||||
const target = event.target as HTMLElement;
|
||||
|
||||
if (!target.closest('[data-export-menu]'))
|
||||
this.closeExportMenu();
|
||||
}
|
||||
|
||||
getDetachLabel(): string {
|
||||
return this.detached() ? 'Dock' : 'Detach';
|
||||
}
|
||||
|
||||
@@ -102,10 +102,11 @@
|
||||
[style.left.px]="detached() ? panelLeft() : null"
|
||||
[style.top.px]="detached() ? panelTop() : null"
|
||||
>
|
||||
<!-- Left resize bar -->
|
||||
<button
|
||||
type="button"
|
||||
class="group absolute inset-y-0 left-0 z-[1] w-3 cursor-col-resize bg-transparent"
|
||||
(mousedown)="startWidthResize($event)"
|
||||
(mousedown)="startLeftResize($event)"
|
||||
aria-label="Resize debug console width"
|
||||
>
|
||||
<span
|
||||
@@ -113,10 +114,23 @@
|
||||
></span>
|
||||
</button>
|
||||
|
||||
<!-- Right resize bar -->
|
||||
<button
|
||||
type="button"
|
||||
class="group absolute inset-y-0 right-0 z-[1] w-3 cursor-col-resize bg-transparent"
|
||||
(mousedown)="startRightResize($event)"
|
||||
aria-label="Resize debug console width from right"
|
||||
>
|
||||
<span
|
||||
class="absolute left-1/2 top-1/2 h-20 w-1 -translate-x-1/2 -translate-y-1/2 rounded-full bg-border/60 transition-colors group-hover:bg-primary/60"
|
||||
></span>
|
||||
</button>
|
||||
|
||||
<!-- Top resize bar -->
|
||||
<button
|
||||
type="button"
|
||||
class="group relative h-3 w-full cursor-row-resize bg-transparent"
|
||||
(mousedown)="startResize($event)"
|
||||
(mousedown)="startTopResize($event)"
|
||||
aria-label="Resize debug console"
|
||||
>
|
||||
<span
|
||||
@@ -154,6 +168,8 @@
|
||||
(autoScrollToggled)="toggleAutoScroll()"
|
||||
(clearRequested)="clearLogs()"
|
||||
(closeRequested)="closeConsole()"
|
||||
(exportLogsRequested)="exportLogs($event)"
|
||||
(exportNetworkRequested)="exportNetwork($event)"
|
||||
/>
|
||||
|
||||
@if (activeTab() === 'logs') {
|
||||
@@ -168,6 +184,48 @@
|
||||
[snapshot]="networkSnapshot()"
|
||||
/>
|
||||
}
|
||||
|
||||
<!-- Bottom resize bar -->
|
||||
<button
|
||||
type="button"
|
||||
class="group relative h-3 w-full cursor-row-resize bg-transparent"
|
||||
(mousedown)="startBottomResize($event)"
|
||||
aria-label="Resize debug console height from bottom"
|
||||
>
|
||||
<span
|
||||
class="absolute left-1/2 top-1/2 h-1 w-16 -translate-x-1/2 -translate-y-1/2 rounded-full bg-border transition-colors group-hover:bg-primary/50"
|
||||
></span>
|
||||
</button>
|
||||
|
||||
<!-- Bottom-right corner drag handle -->
|
||||
<button
|
||||
type="button"
|
||||
class="group absolute bottom-0 right-0 z-[2] flex h-5 w-5 cursor-nwse-resize items-center justify-center bg-transparent"
|
||||
(mousedown)="startCornerResize($event)"
|
||||
aria-label="Resize debug console from corner"
|
||||
>
|
||||
<svg
|
||||
class="h-3 w-3 text-border/80 transition-colors group-hover:text-primary/70"
|
||||
viewBox="0 0 10 10"
|
||||
fill="currentColor"
|
||||
>
|
||||
<circle
|
||||
cx="8"
|
||||
cy="8"
|
||||
r="1.2"
|
||||
/>
|
||||
<circle
|
||||
cx="4"
|
||||
cy="8"
|
||||
r="1.2"
|
||||
/>
|
||||
<circle
|
||||
cx="8"
|
||||
cy="4"
|
||||
r="1.2"
|
||||
/>
|
||||
</svg>
|
||||
</button>
|
||||
</section>
|
||||
</div>
|
||||
}
|
||||
|
||||
@@ -15,6 +15,9 @@ import { DebuggingService, type DebugLogLevel } from '../../../core/services/deb
|
||||
import { DebugConsoleEntryListComponent } from './debug-console-entry-list/debug-console-entry-list.component';
|
||||
import { DebugConsoleNetworkMapComponent } from './debug-console-network-map/debug-console-network-map.component';
|
||||
import { DebugConsoleToolbarComponent } from './debug-console-toolbar/debug-console-toolbar.component';
|
||||
import { DebugConsoleResizeService } from './services/debug-console-resize.service';
|
||||
import { DebugConsoleExportService, type DebugExportFormat } from './services/debug-console-export.service';
|
||||
import { DebugConsoleEnvironmentService } from './services/debug-console-environment.service';
|
||||
|
||||
type DebugLevelState = Record<DebugLogLevel, boolean>;
|
||||
|
||||
@@ -44,6 +47,9 @@ type DebugConsoleLauncherVariant = 'floating' | 'inline' | 'compact';
|
||||
})
|
||||
export class DebugConsoleComponent {
|
||||
readonly debugging = inject(DebuggingService);
|
||||
readonly resizeService = inject(DebugConsoleResizeService);
|
||||
readonly exportService = inject(DebugConsoleExportService);
|
||||
readonly envService = inject(DebugConsoleEnvironmentService);
|
||||
readonly entries = this.debugging.entries;
|
||||
readonly isOpen = this.debugging.isConsoleOpen;
|
||||
readonly networkSnapshot = this.debugging.networkSnapshot;
|
||||
@@ -56,10 +62,10 @@ export class DebugConsoleComponent {
|
||||
readonly searchTerm = signal('');
|
||||
readonly selectedSource = signal('all');
|
||||
readonly autoScroll = signal(true);
|
||||
readonly panelHeight = signal(360);
|
||||
readonly panelWidth = signal(832);
|
||||
readonly panelLeft = signal(0);
|
||||
readonly panelTop = signal(0);
|
||||
readonly panelHeight = this.resizeService.panelHeight;
|
||||
readonly panelWidth = this.resizeService.panelWidth;
|
||||
readonly panelLeft = this.resizeService.panelLeft;
|
||||
readonly panelTop = this.resizeService.panelTop;
|
||||
readonly levelState = signal<DebugLevelState>({
|
||||
event: true,
|
||||
info: true,
|
||||
@@ -123,18 +129,8 @@ export class DebugConsoleComponent {
|
||||
readonly hasErrors = computed(() => this.levelCounts().error > 0);
|
||||
readonly networkSummary = computed(() => this.networkSnapshot().summary);
|
||||
|
||||
private dragging = false;
|
||||
private resizingHeight = false;
|
||||
private resizingWidth = false;
|
||||
private resizeOriginY = 0;
|
||||
private resizeOriginX = 0;
|
||||
private resizeOriginHeight = 360;
|
||||
private resizeOriginWidth = 832;
|
||||
private panelOriginLeft = 0;
|
||||
private panelOriginTop = 0;
|
||||
|
||||
constructor() {
|
||||
this.syncPanelBounds();
|
||||
this.resizeService.syncBounds(this.detached());
|
||||
|
||||
effect(() => {
|
||||
const selectedSource = this.selectedSource();
|
||||
@@ -147,32 +143,17 @@ export class DebugConsoleComponent {
|
||||
|
||||
@HostListener('window:mousemove', ['$event'])
|
||||
onResizeMove(event: MouseEvent): void {
|
||||
if (this.dragging) {
|
||||
this.updateDetachedPosition(event);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.resizingWidth) {
|
||||
this.updatePanelWidth(event);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.resizingHeight)
|
||||
return;
|
||||
|
||||
this.updatePanelHeight(event);
|
||||
this.resizeService.onMouseMove(event, this.detached());
|
||||
}
|
||||
|
||||
@HostListener('window:mouseup')
|
||||
onResizeEnd(): void {
|
||||
this.dragging = false;
|
||||
this.resizingHeight = false;
|
||||
this.resizingWidth = false;
|
||||
this.resizeService.onMouseUp();
|
||||
}
|
||||
|
||||
@HostListener('window:resize')
|
||||
onWindowResize(): void {
|
||||
this.syncPanelBounds();
|
||||
this.resizeService.syncBounds(this.detached());
|
||||
}
|
||||
|
||||
toggleConsole(): void {
|
||||
@@ -195,14 +176,38 @@ export class DebugConsoleComponent {
|
||||
this.activeTab.set(tab);
|
||||
}
|
||||
|
||||
exportLogs(format: DebugExportFormat): void {
|
||||
const env = this.envService.getEnvironment();
|
||||
const name = this.envService.getFilenameSafeDisplayName();
|
||||
|
||||
this.exportService.exportLogs(
|
||||
this.filteredEntries(),
|
||||
format,
|
||||
env,
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
exportNetwork(format: DebugExportFormat): void {
|
||||
const env = this.envService.getEnvironment();
|
||||
const name = this.envService.getFilenameSafeDisplayName();
|
||||
|
||||
this.exportService.exportNetwork(
|
||||
this.networkSnapshot(),
|
||||
format,
|
||||
env,
|
||||
name
|
||||
);
|
||||
}
|
||||
|
||||
toggleDetached(): void {
|
||||
const nextDetached = !this.detached();
|
||||
|
||||
this.detached.set(nextDetached);
|
||||
this.syncPanelBounds();
|
||||
this.resizeService.syncBounds(nextDetached);
|
||||
|
||||
if (nextDetached)
|
||||
this.initializeDetachedPosition();
|
||||
this.resizeService.initializeDetachedPosition();
|
||||
}
|
||||
|
||||
toggleLevel(level: DebugLogLevel): void {
|
||||
@@ -220,35 +225,31 @@ export class DebugConsoleComponent {
|
||||
this.debugging.clear();
|
||||
}
|
||||
|
||||
startResize(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.resizingHeight = true;
|
||||
this.resizeOriginY = event.clientY;
|
||||
this.resizeOriginHeight = this.panelHeight();
|
||||
this.panelOriginTop = this.panelTop();
|
||||
startTopResize(event: MouseEvent): void {
|
||||
this.resizeService.startTopResize(event);
|
||||
}
|
||||
|
||||
startWidthResize(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.resizingWidth = true;
|
||||
this.resizeOriginX = event.clientX;
|
||||
this.resizeOriginWidth = this.panelWidth();
|
||||
this.panelOriginLeft = this.panelLeft();
|
||||
startBottomResize(event: MouseEvent): void {
|
||||
this.resizeService.startBottomResize(event);
|
||||
}
|
||||
|
||||
startLeftResize(event: MouseEvent): void {
|
||||
this.resizeService.startLeftResize(event);
|
||||
}
|
||||
|
||||
startRightResize(event: MouseEvent): void {
|
||||
this.resizeService.startRightResize(event);
|
||||
}
|
||||
|
||||
startCornerResize(event: MouseEvent): void {
|
||||
this.resizeService.startCornerResize(event);
|
||||
}
|
||||
|
||||
startDrag(event: MouseEvent): void {
|
||||
if (!this.detached())
|
||||
return;
|
||||
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.dragging = true;
|
||||
this.resizeOriginX = event.clientX;
|
||||
this.resizeOriginY = event.clientY;
|
||||
this.panelOriginLeft = this.panelLeft();
|
||||
this.panelOriginTop = this.panelTop();
|
||||
this.resizeService.startDrag(event);
|
||||
}
|
||||
|
||||
formatBadgeCount(count: number): string {
|
||||
@@ -257,92 +258,4 @@ export class DebugConsoleComponent {
|
||||
|
||||
return count.toString();
|
||||
}
|
||||
|
||||
private updatePanelHeight(event: MouseEvent): void {
|
||||
const delta = this.resizeOriginY - event.clientY;
|
||||
const nextHeight = this.clampPanelHeight(this.resizeOriginHeight + delta);
|
||||
|
||||
this.panelHeight.set(nextHeight);
|
||||
|
||||
if (!this.detached())
|
||||
return;
|
||||
|
||||
const originBottom = this.panelOriginTop + this.resizeOriginHeight;
|
||||
const maxTop = this.getMaxPanelTop(nextHeight);
|
||||
|
||||
this.panelTop.set(this.clampValue(originBottom - nextHeight, 16, maxTop));
|
||||
}
|
||||
|
||||
private updatePanelWidth(event: MouseEvent): void {
|
||||
const delta = this.resizeOriginX - event.clientX;
|
||||
const nextWidth = this.clampPanelWidth(this.resizeOriginWidth + delta);
|
||||
|
||||
this.panelWidth.set(nextWidth);
|
||||
|
||||
if (!this.detached())
|
||||
return;
|
||||
|
||||
const originRight = this.panelOriginLeft + this.resizeOriginWidth;
|
||||
const maxLeft = this.getMaxPanelLeft(nextWidth);
|
||||
|
||||
this.panelLeft.set(this.clampValue(originRight - nextWidth, 16, maxLeft));
|
||||
}
|
||||
|
||||
private updateDetachedPosition(event: MouseEvent): void {
|
||||
const nextLeft = this.panelOriginLeft + (event.clientX - this.resizeOriginX);
|
||||
const nextTop = this.panelOriginTop + (event.clientY - this.resizeOriginY);
|
||||
|
||||
this.panelLeft.set(this.clampValue(nextLeft, 16, this.getMaxPanelLeft(this.panelWidth())));
|
||||
this.panelTop.set(this.clampValue(nextTop, 16, this.getMaxPanelTop(this.panelHeight())));
|
||||
}
|
||||
|
||||
private initializeDetachedPosition(): void {
|
||||
if (this.panelLeft() > 0 || this.panelTop() > 0) {
|
||||
this.clampDetachedPosition();
|
||||
return;
|
||||
}
|
||||
|
||||
this.panelLeft.set(this.getMaxPanelLeft(this.panelWidth()));
|
||||
this.panelTop.set(this.clampValue(window.innerHeight - this.panelHeight() - 96, 16, this.getMaxPanelTop(this.panelHeight())));
|
||||
}
|
||||
|
||||
private clampPanelHeight(height: number): number {
|
||||
const maxHeight = this.detached()
|
||||
? Math.max(260, window.innerHeight - 32)
|
||||
: Math.floor(window.innerHeight * 0.75);
|
||||
|
||||
return Math.min(Math.max(height, 260), maxHeight);
|
||||
}
|
||||
|
||||
private clampPanelWidth(width: number): number {
|
||||
const maxWidth = Math.max(360, window.innerWidth - 32);
|
||||
const minWidth = Math.min(460, maxWidth);
|
||||
|
||||
return Math.min(Math.max(width, minWidth), maxWidth);
|
||||
}
|
||||
|
||||
private clampDetachedPosition(): void {
|
||||
this.panelLeft.set(this.clampValue(this.panelLeft(), 16, this.getMaxPanelLeft(this.panelWidth())));
|
||||
this.panelTop.set(this.clampValue(this.panelTop(), 16, this.getMaxPanelTop(this.panelHeight())));
|
||||
}
|
||||
|
||||
private getMaxPanelLeft(width: number): number {
|
||||
return Math.max(16, window.innerWidth - width - 16);
|
||||
}
|
||||
|
||||
private getMaxPanelTop(height: number): number {
|
||||
return Math.max(16, window.innerHeight - height - 16);
|
||||
}
|
||||
|
||||
private syncPanelBounds(): void {
|
||||
this.panelWidth.update((width) => this.clampPanelWidth(width));
|
||||
this.panelHeight.update((height) => this.clampPanelHeight(height));
|
||||
|
||||
if (this.detached())
|
||||
this.clampDetachedPosition();
|
||||
}
|
||||
|
||||
private clampValue(value: number, min: number, max: number): number {
|
||||
return Math.min(Math.max(value, min), max);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,214 @@
|
||||
import { Injectable, inject } from '@angular/core';
|
||||
import { Store } from '@ngrx/store';
|
||||
|
||||
import { selectCurrentUser } from '../../../../store/users/users.selectors';
|
||||
import { PlatformService } from '../../../../core/services/platform.service';
|
||||
|
||||
export interface DebugExportEnvironment {
|
||||
appVersion: string;
|
||||
displayName: string;
|
||||
displayServer: string;
|
||||
gpu: string;
|
||||
operatingSystem: string;
|
||||
platform: string;
|
||||
userAgent: string;
|
||||
userId: string;
|
||||
}
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class DebugConsoleEnvironmentService {
|
||||
private readonly store = inject(Store);
|
||||
private readonly platformService = inject(PlatformService);
|
||||
private readonly currentUser = this.store.selectSignal(selectCurrentUser);
|
||||
|
||||
getEnvironment(): DebugExportEnvironment {
|
||||
return {
|
||||
appVersion: this.resolveAppVersion(),
|
||||
displayName: this.resolveDisplayName(),
|
||||
displayServer: this.resolveDisplayServer(),
|
||||
gpu: this.resolveGpu(),
|
||||
operatingSystem: this.resolveOperatingSystem(),
|
||||
platform: this.resolvePlatform(),
|
||||
userAgent: navigator.userAgent,
|
||||
userId: this.currentUser()?.id ?? 'Unknown'
|
||||
};
|
||||
}
|
||||
|
||||
getFilenameSafeDisplayName(): string {
|
||||
const name = this.resolveDisplayName();
|
||||
const sanitized = name
|
||||
.replace(/[^a-zA-Z0-9._-]/g, '_')
|
||||
.replace(/_+/g, '_')
|
||||
.replace(/^_|_$/g, '');
|
||||
|
||||
return sanitized || 'unknown';
|
||||
}
|
||||
|
||||
private resolveDisplayName(): string {
|
||||
return this.currentUser()?.displayName ?? 'Unknown';
|
||||
}
|
||||
|
||||
private resolveAppVersion(): string {
|
||||
if (!this.platformService.isElectron)
|
||||
return 'web';
|
||||
|
||||
const electronVersion = this.readElectronVersion();
|
||||
|
||||
return electronVersion
|
||||
? `${electronVersion} (Electron)`
|
||||
: 'Electron (unknown version)';
|
||||
}
|
||||
|
||||
private resolvePlatform(): string {
|
||||
if (!this.platformService.isElectron)
|
||||
return 'Browser';
|
||||
|
||||
const os = this.resolveOperatingSystem().toLowerCase();
|
||||
|
||||
if (os.includes('windows'))
|
||||
return 'Windows Electron';
|
||||
|
||||
if (os.includes('linux'))
|
||||
return 'Linux Electron';
|
||||
|
||||
if (os.includes('mac'))
|
||||
return 'macOS Electron';
|
||||
|
||||
return 'Electron';
|
||||
}
|
||||
|
||||
private resolveOperatingSystem(): string {
|
||||
const ua = navigator.userAgent;
|
||||
|
||||
if (ua.includes('Windows NT 10.0'))
|
||||
return 'Windows 10/11';
|
||||
|
||||
if (ua.includes('Windows NT'))
|
||||
return 'Windows';
|
||||
|
||||
if (ua.includes('Mac OS X')) {
|
||||
const match = ua.match(/Mac OS X ([\d._]+)/);
|
||||
const version = match?.[1]?.replace(/_/g, '.') ?? '';
|
||||
|
||||
return version ? `macOS ${version}` : 'macOS';
|
||||
}
|
||||
|
||||
if (ua.includes('Linux')) {
|
||||
const parts: string[] = ['Linux'];
|
||||
|
||||
if (ua.includes('Ubuntu'))
|
||||
parts.push('(Ubuntu)');
|
||||
else if (ua.includes('Fedora'))
|
||||
parts.push('(Fedora)');
|
||||
else if (ua.includes('Debian'))
|
||||
parts.push('(Debian)');
|
||||
|
||||
return parts.join(' ');
|
||||
}
|
||||
|
||||
return navigator.platform || 'Unknown';
|
||||
}
|
||||
|
||||
private resolveDisplayServer(): string {
|
||||
if (!navigator.userAgent.includes('Linux'))
|
||||
return 'N/A';
|
||||
|
||||
try {
|
||||
const ua = navigator.userAgent.toLowerCase();
|
||||
|
||||
if (ua.includes('wayland'))
|
||||
return 'Wayland';
|
||||
|
||||
if (ua.includes('x11'))
|
||||
return 'X11';
|
||||
|
||||
const isOzone = ua.includes('ozone');
|
||||
|
||||
if (isOzone)
|
||||
return 'Ozone (Wayland likely)';
|
||||
} catch {
|
||||
// Ignore
|
||||
}
|
||||
|
||||
return this.detectDisplayServerFromEnv();
|
||||
}
|
||||
|
||||
private detectDisplayServerFromEnv(): string {
|
||||
try {
|
||||
// Electron may expose env vars
|
||||
const api = this.getElectronApi() as
|
||||
Record<string, unknown> | null;
|
||||
|
||||
if (!api)
|
||||
return 'Unknown (Linux)';
|
||||
} catch {
|
||||
// Not available
|
||||
}
|
||||
|
||||
// Best-effort heuristic: check if WebGL context
|
||||
// mentions wayland in renderer string
|
||||
const gpu = this.resolveGpu().toLowerCase();
|
||||
|
||||
if (gpu.includes('wayland'))
|
||||
return 'Wayland';
|
||||
|
||||
return 'Unknown (Linux)';
|
||||
}
|
||||
|
||||
private resolveGpu(): string {
|
||||
try {
|
||||
const canvas = document.createElement('canvas');
|
||||
const gl = canvas.getContext('webgl')
|
||||
?? canvas.getContext('experimental-webgl');
|
||||
|
||||
if (!gl || !(gl instanceof WebGLRenderingContext))
|
||||
return 'Unavailable';
|
||||
|
||||
const ext = gl.getExtension('WEBGL_debug_renderer_info');
|
||||
|
||||
if (!ext)
|
||||
return 'Unavailable (no debug info)';
|
||||
|
||||
const vendor = gl.getParameter(ext.UNMASKED_VENDOR_WEBGL);
|
||||
const renderer = gl.getParameter(
|
||||
ext.UNMASKED_RENDERER_WEBGL
|
||||
);
|
||||
const parts: string[] = [];
|
||||
|
||||
if (typeof renderer === 'string' && renderer.length > 0)
|
||||
parts.push(renderer);
|
||||
|
||||
if (typeof vendor === 'string' && vendor.length > 0)
|
||||
parts.push(`(${vendor})`);
|
||||
|
||||
return parts.length > 0
|
||||
? parts.join(' ')
|
||||
: 'Unknown';
|
||||
} catch {
|
||||
return 'Unavailable';
|
||||
}
|
||||
}
|
||||
|
||||
private readElectronVersion(): string | null {
|
||||
try {
|
||||
const ua = navigator.userAgent;
|
||||
const match = ua.match(/metoyou\/([\d.]+)/i)
|
||||
?? ua.match(/Electron\/([\d.]+)/);
|
||||
|
||||
return match?.[1] ?? null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
private getElectronApi(): Record<string, unknown> | null {
|
||||
try {
|
||||
const win = window as Window &
|
||||
{ electronAPI?: Record<string, unknown> };
|
||||
|
||||
return win.electronAPI ?? null;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,517 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
|
||||
import type {
|
||||
DebugLogEntry,
|
||||
DebugLogLevel,
|
||||
DebugNetworkEdge,
|
||||
DebugNetworkNode,
|
||||
DebugNetworkSnapshot
|
||||
} from '../../../../core/services/debugging.service';
|
||||
import type { DebugExportEnvironment } from './debug-console-environment.service';
|
||||
|
||||
export type DebugExportFormat = 'csv' | 'txt';
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class DebugConsoleExportService {
|
||||
exportLogs(
|
||||
entries: readonly DebugLogEntry[],
|
||||
format: DebugExportFormat,
|
||||
env: DebugExportEnvironment,
|
||||
filenameName: string
|
||||
): void {
|
||||
const content = format === 'csv'
|
||||
? this.buildLogsCsv(entries, env)
|
||||
: this.buildLogsTxt(entries, env);
|
||||
const extension = format === 'csv' ? 'csv' : 'txt';
|
||||
const mime = format === 'csv'
|
||||
? 'text/csv;charset=utf-8;'
|
||||
: 'text/plain;charset=utf-8;';
|
||||
const filename = this.buildFilename(
|
||||
'debug-logs',
|
||||
filenameName,
|
||||
extension
|
||||
);
|
||||
|
||||
this.downloadFile(filename, content, mime);
|
||||
}
|
||||
|
||||
exportNetwork(
|
||||
snapshot: DebugNetworkSnapshot,
|
||||
format: DebugExportFormat,
|
||||
env: DebugExportEnvironment,
|
||||
filenameName: string
|
||||
): void {
|
||||
const content = format === 'csv'
|
||||
? this.buildNetworkCsv(snapshot, env)
|
||||
: this.buildNetworkTxt(snapshot, env);
|
||||
const extension = format === 'csv' ? 'csv' : 'txt';
|
||||
const mime = format === 'csv'
|
||||
? 'text/csv;charset=utf-8;'
|
||||
: 'text/plain;charset=utf-8;';
|
||||
const filename = this.buildFilename(
|
||||
'debug-network',
|
||||
filenameName,
|
||||
extension
|
||||
);
|
||||
|
||||
this.downloadFile(filename, content, mime);
|
||||
}
|
||||
|
||||
private buildLogsCsv(
|
||||
entries: readonly DebugLogEntry[],
|
||||
env: DebugExportEnvironment
|
||||
): string {
|
||||
const meta = this.buildCsvMetaSection(env);
|
||||
const header = 'Timestamp,DateTime,Level,Source,Message,Payload,Count';
|
||||
const rows = entries.map((entry) =>
|
||||
[
|
||||
entry.timeLabel,
|
||||
entry.dateTimeLabel,
|
||||
entry.level,
|
||||
this.escapeCsvField(entry.source),
|
||||
this.escapeCsvField(entry.message),
|
||||
this.escapeCsvField(entry.payloadText ?? ''),
|
||||
entry.count
|
||||
].join(',')
|
||||
);
|
||||
|
||||
return [
|
||||
meta,
|
||||
'',
|
||||
header,
|
||||
...rows
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
private buildLogsTxt(
|
||||
entries: readonly DebugLogEntry[],
|
||||
env: DebugExportEnvironment
|
||||
): string {
|
||||
const lines: string[] = [
|
||||
`Debug Logs Export - ${new Date().toISOString()}`,
|
||||
this.buildSeparator(),
|
||||
...this.buildTxtEnvLines(env),
|
||||
this.buildSeparator(),
|
||||
`Total entries: ${entries.length}`,
|
||||
this.buildSeparator()
|
||||
];
|
||||
|
||||
for (const entry of entries) {
|
||||
const prefix = this.buildLevelPrefix(entry.level);
|
||||
const countSuffix = entry.count > 1 ? ` (×${entry.count})` : '';
|
||||
|
||||
lines.push(`[${entry.dateTimeLabel}] ${prefix} [${entry.source}] ${entry.message}${countSuffix}`);
|
||||
|
||||
if (entry.payloadText)
|
||||
lines.push(` Payload: ${entry.payloadText}`);
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
private buildNetworkCsv(
|
||||
snapshot: DebugNetworkSnapshot,
|
||||
env: DebugExportEnvironment
|
||||
): string {
|
||||
const sections: string[] = [];
|
||||
|
||||
sections.push(this.buildCsvMetaSection(env));
|
||||
sections.push('');
|
||||
sections.push(this.buildNetworkNodesCsv(snapshot.nodes));
|
||||
sections.push('');
|
||||
sections.push(this.buildNetworkEdgesCsv(snapshot.edges));
|
||||
sections.push('');
|
||||
sections.push(this.buildNetworkConnectionsCsv(snapshot));
|
||||
|
||||
return sections.join('\n');
|
||||
}
|
||||
|
||||
private buildNetworkNodesCsv(nodes: readonly DebugNetworkNode[]): string {
|
||||
const headerParts = [
|
||||
'NodeId',
|
||||
'Kind',
|
||||
'Label',
|
||||
'UserId',
|
||||
'Identity',
|
||||
'Active',
|
||||
'VoiceConnected',
|
||||
'Typing',
|
||||
'Speaking',
|
||||
'Muted',
|
||||
'Deafened',
|
||||
'Streaming',
|
||||
'ConnectionDrops',
|
||||
'PingMs',
|
||||
'TextSent',
|
||||
'TextReceived',
|
||||
'AudioStreams',
|
||||
'VideoStreams',
|
||||
'OffersSent',
|
||||
'OffersReceived',
|
||||
'AnswersSent',
|
||||
'AnswersReceived',
|
||||
'IceSent',
|
||||
'IceReceived',
|
||||
'DownloadFileMbps',
|
||||
'DownloadAudioMbps',
|
||||
'DownloadVideoMbps'
|
||||
];
|
||||
const header = headerParts.join(',');
|
||||
const rows = nodes.map((node) =>
|
||||
[
|
||||
this.escapeCsvField(node.id),
|
||||
node.kind,
|
||||
this.escapeCsvField(node.label),
|
||||
this.escapeCsvField(node.userId ?? ''),
|
||||
this.escapeCsvField(node.identity ?? ''),
|
||||
node.isActive,
|
||||
node.isVoiceConnected,
|
||||
node.isTyping,
|
||||
node.isSpeaking,
|
||||
node.isMuted,
|
||||
node.isDeafened,
|
||||
node.isStreaming,
|
||||
node.connectionDrops,
|
||||
node.pingMs ?? '',
|
||||
node.textMessages.sent,
|
||||
node.textMessages.received,
|
||||
node.streams.audio,
|
||||
node.streams.video,
|
||||
node.handshake.offersSent,
|
||||
node.handshake.offersReceived,
|
||||
node.handshake.answersSent,
|
||||
node.handshake.answersReceived,
|
||||
node.handshake.iceSent,
|
||||
node.handshake.iceReceived,
|
||||
node.downloads.fileMbps ?? '',
|
||||
node.downloads.audioMbps ?? '',
|
||||
node.downloads.videoMbps ?? ''
|
||||
].join(',')
|
||||
);
|
||||
|
||||
return [
|
||||
'# Nodes',
|
||||
header,
|
||||
...rows
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
private buildNetworkEdgesCsv(edges: readonly DebugNetworkEdge[]): string {
|
||||
const header = 'EdgeId,Kind,SourceId,TargetId,SourceLabel,TargetLabel,Active,PingMs,State,MessageTotal';
|
||||
const rows = edges.map((edge) =>
|
||||
[
|
||||
this.escapeCsvField(edge.id),
|
||||
edge.kind,
|
||||
this.escapeCsvField(edge.sourceId),
|
||||
this.escapeCsvField(edge.targetId),
|
||||
this.escapeCsvField(edge.sourceLabel),
|
||||
this.escapeCsvField(edge.targetLabel),
|
||||
edge.isActive,
|
||||
edge.pingMs ?? '',
|
||||
this.escapeCsvField(edge.stateLabel),
|
||||
edge.messageTotal
|
||||
].join(',')
|
||||
);
|
||||
|
||||
return [
|
||||
'# Edges',
|
||||
header,
|
||||
...rows
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
private buildNetworkConnectionsCsv(snapshot: DebugNetworkSnapshot): string {
|
||||
const header = 'SourceNode,TargetNode,EdgeKind,Direction,Active';
|
||||
const rows: string[] = [];
|
||||
|
||||
for (const edge of snapshot.edges) {
|
||||
rows.push(
|
||||
[
|
||||
this.escapeCsvField(edge.sourceLabel),
|
||||
this.escapeCsvField(edge.targetLabel),
|
||||
edge.kind,
|
||||
`${edge.sourceLabel} → ${edge.targetLabel}`,
|
||||
edge.isActive
|
||||
].join(',')
|
||||
);
|
||||
}
|
||||
|
||||
return [
|
||||
'# Connections',
|
||||
header,
|
||||
...rows
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
private buildNetworkTxt(
|
||||
snapshot: DebugNetworkSnapshot,
|
||||
env: DebugExportEnvironment
|
||||
): string {
|
||||
const lines: string[] = [];
|
||||
|
||||
lines.push(`Network Export - ${new Date().toISOString()}`);
|
||||
lines.push(this.buildSeparator());
|
||||
lines.push(...this.buildTxtEnvLines(env));
|
||||
lines.push(this.buildSeparator());
|
||||
|
||||
lines.push('SUMMARY');
|
||||
lines.push(` Clients: ${snapshot.summary.clientCount}`);
|
||||
lines.push(` Servers: ${snapshot.summary.serverCount}`);
|
||||
lines.push(` Signaling servers: ${snapshot.summary.signalingServerCount}`);
|
||||
lines.push(` Peer connections: ${snapshot.summary.peerConnectionCount}`);
|
||||
lines.push(` Memberships: ${snapshot.summary.membershipCount}`);
|
||||
lines.push(` Messages: ${snapshot.summary.messageCount}`);
|
||||
lines.push(` Typing: ${snapshot.summary.typingCount}`);
|
||||
lines.push(` Speaking: ${snapshot.summary.speakingCount}`);
|
||||
lines.push(` Streaming: ${snapshot.summary.streamingCount}`);
|
||||
lines.push('');
|
||||
|
||||
lines.push(this.buildSeparator());
|
||||
lines.push('NODES');
|
||||
lines.push(this.buildSeparator());
|
||||
|
||||
for (const node of snapshot.nodes)
|
||||
this.appendNodeTxt(lines, node);
|
||||
|
||||
lines.push(this.buildSeparator());
|
||||
lines.push('EDGES / CONNECTIONS');
|
||||
lines.push(this.buildSeparator());
|
||||
|
||||
for (const edge of snapshot.edges)
|
||||
this.appendEdgeTxt(lines, edge);
|
||||
|
||||
lines.push(this.buildSeparator());
|
||||
lines.push('CONNECTION MAP');
|
||||
lines.push(this.buildSeparator());
|
||||
this.appendConnectionMap(lines, snapshot);
|
||||
|
||||
return lines.join('\n');
|
||||
}
|
||||
|
||||
private appendNodeTxt(lines: string[], node: DebugNetworkNode): void {
|
||||
lines.push(` [${node.kind}] ${node.label} (${node.id})`);
|
||||
|
||||
if (node.userId)
|
||||
lines.push(` User ID: ${node.userId}`);
|
||||
|
||||
if (node.identity)
|
||||
lines.push(` Identity: ${node.identity}`);
|
||||
|
||||
const statuses: string[] = [];
|
||||
|
||||
if (node.isActive)
|
||||
statuses.push('Active');
|
||||
|
||||
if (node.isVoiceConnected)
|
||||
statuses.push('Voice');
|
||||
|
||||
if (node.isTyping)
|
||||
statuses.push('Typing');
|
||||
|
||||
if (node.isSpeaking)
|
||||
statuses.push('Speaking');
|
||||
|
||||
if (node.isMuted)
|
||||
statuses.push('Muted');
|
||||
|
||||
if (node.isDeafened)
|
||||
statuses.push('Deafened');
|
||||
|
||||
if (node.isStreaming)
|
||||
statuses.push('Streaming');
|
||||
|
||||
if (statuses.length > 0)
|
||||
lines.push(` Status: ${statuses.join(', ')}`);
|
||||
|
||||
if (node.pingMs !== null)
|
||||
lines.push(` Ping: ${node.pingMs} ms`);
|
||||
|
||||
lines.push(` Connection drops: ${node.connectionDrops}`);
|
||||
lines.push(` Text messages: ↑${node.textMessages.sent} ↓${node.textMessages.received}`);
|
||||
lines.push(` Streams: Audio ${node.streams.audio}, Video ${node.streams.video}`);
|
||||
const handshakeLine = [
|
||||
`Offers ${node.handshake.offersSent}/${node.handshake.offersReceived}`,
|
||||
`Answers ${node.handshake.answersSent}/${node.handshake.answersReceived}`,
|
||||
`ICE ${node.handshake.iceSent}/${node.handshake.iceReceived}`
|
||||
].join(', ');
|
||||
|
||||
lines.push(` Handshake: ${handshakeLine}`);
|
||||
|
||||
if (node.downloads.fileMbps !== null || node.downloads.audioMbps !== null || node.downloads.videoMbps !== null) {
|
||||
const parts = [
|
||||
`File ${this.formatMbps(node.downloads.fileMbps)}`,
|
||||
`Audio ${this.formatMbps(node.downloads.audioMbps)}`,
|
||||
`Video ${this.formatMbps(node.downloads.videoMbps)}`
|
||||
];
|
||||
|
||||
lines.push(` Downloads: ${parts.join(', ')}`);
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
private appendEdgeTxt(lines: string[], edge: DebugNetworkEdge): void {
|
||||
const activeLabel = edge.isActive ? 'active' : 'inactive';
|
||||
|
||||
lines.push(` [${edge.kind}] ${edge.sourceLabel} → ${edge.targetLabel} (${activeLabel})`);
|
||||
|
||||
if (edge.pingMs !== null)
|
||||
lines.push(` Ping: ${edge.pingMs} ms`);
|
||||
|
||||
if (edge.stateLabel)
|
||||
lines.push(` State: ${edge.stateLabel}`);
|
||||
|
||||
lines.push(` Total messages: ${edge.messageTotal}`);
|
||||
|
||||
if (edge.messageGroups.length > 0) {
|
||||
lines.push(' Message groups:');
|
||||
|
||||
for (const group of edge.messageGroups) {
|
||||
const dir = group.direction === 'outbound' ? '↑' : '↓';
|
||||
|
||||
lines.push(` ${dir} [${group.scope}] ${group.type} ×${group.count}`);
|
||||
}
|
||||
}
|
||||
|
||||
lines.push('');
|
||||
}
|
||||
|
||||
private appendConnectionMap(lines: string[], snapshot: DebugNetworkSnapshot): void {
|
||||
const nodeMap = new Map(snapshot.nodes.map((node) => [node.id, node]));
|
||||
|
||||
for (const node of snapshot.nodes) {
|
||||
const outgoing = snapshot.edges.filter((edge) => edge.sourceId === node.id);
|
||||
const incoming = snapshot.edges.filter((edge) => edge.targetId === node.id);
|
||||
|
||||
lines.push(` ${node.label} (${node.kind})`);
|
||||
|
||||
if (outgoing.length > 0) {
|
||||
lines.push(' Outgoing:');
|
||||
|
||||
for (const edge of outgoing) {
|
||||
const target = nodeMap.get(edge.targetId);
|
||||
|
||||
lines.push(` → ${target?.label ?? edge.targetId} [${edge.kind}] ${edge.isActive ? '●' : '○'}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (incoming.length > 0) {
|
||||
lines.push(' Incoming:');
|
||||
|
||||
for (const edge of incoming) {
|
||||
const source = nodeMap.get(edge.sourceId);
|
||||
|
||||
lines.push(` ← ${source?.label ?? edge.sourceId} [${edge.kind}] ${edge.isActive ? '●' : '○'}`);
|
||||
}
|
||||
}
|
||||
|
||||
if (outgoing.length === 0 && incoming.length === 0)
|
||||
lines.push(' (no connections)');
|
||||
|
||||
lines.push('');
|
||||
}
|
||||
}
|
||||
|
||||
private buildCsvMetaSection(env: DebugExportEnvironment): string {
|
||||
return [
|
||||
'# Export Metadata',
|
||||
'Property,Value',
|
||||
`Exported By,${this.escapeCsvField(env.displayName)}`,
|
||||
`User ID,${this.escapeCsvField(env.userId)}`,
|
||||
`Export Date,${new Date().toISOString()}`,
|
||||
`App Version,${this.escapeCsvField(env.appVersion)}`,
|
||||
`Platform,${this.escapeCsvField(env.platform)}`,
|
||||
`Operating System,${this.escapeCsvField(env.operatingSystem)}`,
|
||||
`Display Server,${this.escapeCsvField(env.displayServer)}`,
|
||||
`GPU,${this.escapeCsvField(env.gpu)}`,
|
||||
`User Agent,${this.escapeCsvField(env.userAgent)}`
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
private buildTxtEnvLines(
|
||||
env: DebugExportEnvironment
|
||||
): string[] {
|
||||
return [
|
||||
`Exported by: ${env.displayName}`,
|
||||
`User ID: ${env.userId}`,
|
||||
`App version: ${env.appVersion}`,
|
||||
`Platform: ${env.platform}`,
|
||||
`OS: ${env.operatingSystem}`,
|
||||
`Display server: ${env.displayServer}`,
|
||||
`GPU: ${env.gpu}`,
|
||||
`User agent: ${env.userAgent}`
|
||||
];
|
||||
}
|
||||
|
||||
private buildFilename(
|
||||
prefix: string,
|
||||
userLabel: string,
|
||||
extension: string
|
||||
): string {
|
||||
const stamp = this.buildTimestamp();
|
||||
|
||||
return `${prefix}_${userLabel}_${stamp}.${extension}`;
|
||||
}
|
||||
|
||||
private escapeCsvField(value: string): string {
|
||||
if (value.includes(',') || value.includes('"') || value.includes('\n'))
|
||||
return `"${value.replace(/"/g, '""')}"`;
|
||||
|
||||
return value;
|
||||
}
|
||||
|
||||
private buildLevelPrefix(level: DebugLogLevel): string {
|
||||
switch (level) {
|
||||
case 'event':
|
||||
return 'EVT';
|
||||
case 'info':
|
||||
return 'INF';
|
||||
case 'warn':
|
||||
return 'WRN';
|
||||
case 'error':
|
||||
return 'ERR';
|
||||
case 'debug':
|
||||
return 'DBG';
|
||||
}
|
||||
}
|
||||
|
||||
private formatMbps(value: number | null): string {
|
||||
if (value === null)
|
||||
return '-';
|
||||
|
||||
return `${value >= 10 ? value.toFixed(1) : value.toFixed(2)} Mbps`;
|
||||
}
|
||||
|
||||
private buildTimestamp(): string {
|
||||
const now = new Date();
|
||||
const year = now.getFullYear();
|
||||
const month = String(now.getMonth() + 1).padStart(2, '0');
|
||||
const day = String(now.getDate()).padStart(2, '0');
|
||||
const hours = String(now.getHours()).padStart(2, '0');
|
||||
const minutes = String(now.getMinutes()).padStart(2, '0');
|
||||
const seconds = String(now.getSeconds()).padStart(2, '0');
|
||||
|
||||
return `${year}${month}${day}-${hours}${minutes}${seconds}`;
|
||||
}
|
||||
|
||||
private buildSeparator(): string {
|
||||
return '─'.repeat(60);
|
||||
}
|
||||
|
||||
private downloadFile(filename: string, content: string, mimeType: string): void {
|
||||
const blob = new Blob([content], { type: mimeType });
|
||||
const url = URL.createObjectURL(blob);
|
||||
const anchor = document.createElement('a');
|
||||
|
||||
anchor.href = url;
|
||||
anchor.download = filename;
|
||||
anchor.style.display = 'none';
|
||||
document.body.appendChild(anchor);
|
||||
anchor.click();
|
||||
|
||||
requestAnimationFrame(() => {
|
||||
document.body.removeChild(anchor);
|
||||
URL.revokeObjectURL(url);
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,284 @@
|
||||
import { Injectable, signal } from '@angular/core';
|
||||
|
||||
const STORAGE_KEY = 'metoyou_debug_console_layout';
|
||||
const DEFAULT_HEIGHT = 520;
|
||||
const DEFAULT_WIDTH = 832;
|
||||
const MIN_HEIGHT = 260;
|
||||
const MIN_WIDTH = 460;
|
||||
|
||||
interface PersistedLayout {
|
||||
height: number;
|
||||
width: number;
|
||||
}
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class DebugConsoleResizeService {
|
||||
readonly panelHeight = signal(DEFAULT_HEIGHT);
|
||||
readonly panelWidth = signal(DEFAULT_WIDTH);
|
||||
readonly panelLeft = signal(0);
|
||||
readonly panelTop = signal(0);
|
||||
|
||||
private dragging = false;
|
||||
private resizingTop = false;
|
||||
private resizingBottom = false;
|
||||
private resizingLeft = false;
|
||||
private resizingRight = false;
|
||||
private resizingCorner = false;
|
||||
private resizeOriginX = 0;
|
||||
private resizeOriginY = 0;
|
||||
private resizeOriginHeight = DEFAULT_HEIGHT;
|
||||
private resizeOriginWidth = DEFAULT_WIDTH;
|
||||
private panelOriginLeft = 0;
|
||||
private panelOriginTop = 0;
|
||||
|
||||
constructor() {
|
||||
this.loadLayout();
|
||||
}
|
||||
|
||||
get isResizing(): boolean {
|
||||
return this.resizingTop || this.resizingBottom || this.resizingLeft || this.resizingRight || this.resizingCorner;
|
||||
}
|
||||
|
||||
get isDragging(): boolean {
|
||||
return this.dragging;
|
||||
}
|
||||
|
||||
startTopResize(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.resizingTop = true;
|
||||
this.resizeOriginY = event.clientY;
|
||||
this.resizeOriginHeight = this.panelHeight();
|
||||
this.panelOriginTop = this.panelTop();
|
||||
}
|
||||
|
||||
startBottomResize(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.resizingBottom = true;
|
||||
this.resizeOriginY = event.clientY;
|
||||
this.resizeOriginHeight = this.panelHeight();
|
||||
}
|
||||
|
||||
startLeftResize(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.resizingLeft = true;
|
||||
this.resizeOriginX = event.clientX;
|
||||
this.resizeOriginWidth = this.panelWidth();
|
||||
this.panelOriginLeft = this.panelLeft();
|
||||
}
|
||||
|
||||
startRightResize(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.resizingRight = true;
|
||||
this.resizeOriginX = event.clientX;
|
||||
this.resizeOriginWidth = this.panelWidth();
|
||||
}
|
||||
|
||||
startCornerResize(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.resizingCorner = true;
|
||||
this.resizeOriginX = event.clientX;
|
||||
this.resizeOriginY = event.clientY;
|
||||
this.resizeOriginWidth = this.panelWidth();
|
||||
this.resizeOriginHeight = this.panelHeight();
|
||||
}
|
||||
|
||||
startDrag(event: MouseEvent): void {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
this.dragging = true;
|
||||
this.resizeOriginX = event.clientX;
|
||||
this.resizeOriginY = event.clientY;
|
||||
this.panelOriginLeft = this.panelLeft();
|
||||
this.panelOriginTop = this.panelTop();
|
||||
}
|
||||
|
||||
onMouseMove(event: MouseEvent, detached: boolean): void {
|
||||
if (this.dragging) {
|
||||
this.updateDetachedPosition(event);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.resizingCorner) {
|
||||
this.updateCornerResize(event, detached);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.resizingLeft) {
|
||||
this.updateLeftResize(event, detached);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.resizingRight) {
|
||||
this.updateRightResize(event, detached);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.resizingTop) {
|
||||
this.updateTopResize(event, detached);
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.resizingBottom) {
|
||||
this.updateBottomResize(event, detached);
|
||||
}
|
||||
}
|
||||
|
||||
onMouseUp(): void {
|
||||
const wasActive = this.isResizing || this.dragging;
|
||||
|
||||
this.dragging = false;
|
||||
this.resizingTop = false;
|
||||
this.resizingBottom = false;
|
||||
this.resizingLeft = false;
|
||||
this.resizingRight = false;
|
||||
this.resizingCorner = false;
|
||||
|
||||
if (wasActive)
|
||||
this.persistLayout();
|
||||
}
|
||||
|
||||
syncBounds(detached: boolean): void {
|
||||
this.panelWidth.update((width) => this.clampWidth(width, detached));
|
||||
this.panelHeight.update((height) => this.clampHeight(height, detached));
|
||||
|
||||
if (detached)
|
||||
this.clampDetachedPosition();
|
||||
}
|
||||
|
||||
initializeDetachedPosition(): void {
|
||||
if (this.panelLeft() > 0 || this.panelTop() > 0) {
|
||||
this.clampDetachedPosition();
|
||||
return;
|
||||
}
|
||||
|
||||
this.panelLeft.set(this.getMaxLeft(this.panelWidth()));
|
||||
this.panelTop.set(
|
||||
this.clamp(window.innerHeight - this.panelHeight() - 96, 16, this.getMaxTop(this.panelHeight()))
|
||||
);
|
||||
}
|
||||
|
||||
private updateTopResize(event: MouseEvent, detached: boolean): void {
|
||||
const delta = this.resizeOriginY - event.clientY;
|
||||
const nextHeight = this.clampHeight(this.resizeOriginHeight + delta, detached);
|
||||
|
||||
this.panelHeight.set(nextHeight);
|
||||
|
||||
if (!detached)
|
||||
return;
|
||||
|
||||
const originBottom = this.panelOriginTop + this.resizeOriginHeight;
|
||||
|
||||
this.panelTop.set(this.clamp(originBottom - nextHeight, 16, this.getMaxTop(nextHeight)));
|
||||
}
|
||||
|
||||
private updateBottomResize(event: MouseEvent, detached: boolean): void {
|
||||
const delta = event.clientY - this.resizeOriginY;
|
||||
|
||||
this.panelHeight.set(this.clampHeight(this.resizeOriginHeight + delta, detached));
|
||||
}
|
||||
|
||||
private updateLeftResize(event: MouseEvent, detached: boolean): void {
|
||||
const delta = this.resizeOriginX - event.clientX;
|
||||
const nextWidth = this.clampWidth(this.resizeOriginWidth + delta, detached);
|
||||
|
||||
this.panelWidth.set(nextWidth);
|
||||
|
||||
if (!detached)
|
||||
return;
|
||||
|
||||
const originRight = this.panelOriginLeft + this.resizeOriginWidth;
|
||||
|
||||
this.panelLeft.set(this.clamp(originRight - nextWidth, 16, this.getMaxLeft(nextWidth)));
|
||||
}
|
||||
|
||||
private updateRightResize(event: MouseEvent, detached: boolean): void {
|
||||
const delta = event.clientX - this.resizeOriginX;
|
||||
|
||||
this.panelWidth.set(this.clampWidth(this.resizeOriginWidth + delta, detached));
|
||||
}
|
||||
|
||||
private updateCornerResize(event: MouseEvent, detached: boolean): void {
|
||||
const deltaX = event.clientX - this.resizeOriginX;
|
||||
const deltaY = event.clientY - this.resizeOriginY;
|
||||
|
||||
this.panelWidth.set(this.clampWidth(this.resizeOriginWidth + deltaX, detached));
|
||||
this.panelHeight.set(this.clampHeight(this.resizeOriginHeight + deltaY, detached));
|
||||
}
|
||||
|
||||
private updateDetachedPosition(event: MouseEvent): void {
|
||||
const nextLeft = this.panelOriginLeft + (event.clientX - this.resizeOriginX);
|
||||
const nextTop = this.panelOriginTop + (event.clientY - this.resizeOriginY);
|
||||
|
||||
this.panelLeft.set(this.clamp(nextLeft, 16, this.getMaxLeft(this.panelWidth())));
|
||||
this.panelTop.set(this.clamp(nextTop, 16, this.getMaxTop(this.panelHeight())));
|
||||
}
|
||||
|
||||
private clampHeight(height: number, detached?: boolean): number {
|
||||
const maxHeight = detached
|
||||
? Math.max(MIN_HEIGHT, window.innerHeight - 32)
|
||||
: Math.floor(window.innerHeight * 0.75);
|
||||
|
||||
return Math.min(Math.max(height, MIN_HEIGHT), maxHeight);
|
||||
}
|
||||
|
||||
private clampWidth(width: number, _detached?: boolean): number {
|
||||
const maxWidth = Math.max(MIN_WIDTH, window.innerWidth - 32);
|
||||
const minWidth = Math.min(MIN_WIDTH, maxWidth);
|
||||
|
||||
return Math.min(Math.max(width, minWidth), maxWidth);
|
||||
}
|
||||
|
||||
private clampDetachedPosition(): void {
|
||||
this.panelLeft.set(this.clamp(this.panelLeft(), 16, this.getMaxLeft(this.panelWidth())));
|
||||
this.panelTop.set(this.clamp(this.panelTop(), 16, this.getMaxTop(this.panelHeight())));
|
||||
}
|
||||
|
||||
private getMaxLeft(width: number): number {
|
||||
return Math.max(16, window.innerWidth - width - 16);
|
||||
}
|
||||
|
||||
private getMaxTop(height: number): number {
|
||||
return Math.max(16, window.innerHeight - height - 16);
|
||||
}
|
||||
|
||||
private clamp(value: number, min: number, max: number): number {
|
||||
return Math.min(Math.max(value, min), max);
|
||||
}
|
||||
|
||||
private loadLayout(): void {
|
||||
try {
|
||||
const raw = localStorage.getItem(STORAGE_KEY);
|
||||
|
||||
if (!raw)
|
||||
return;
|
||||
|
||||
const parsed = JSON.parse(raw) as PersistedLayout;
|
||||
|
||||
if (typeof parsed.height === 'number' && parsed.height >= MIN_HEIGHT)
|
||||
this.panelHeight.set(parsed.height);
|
||||
|
||||
if (typeof parsed.width === 'number' && parsed.width >= MIN_WIDTH)
|
||||
this.panelWidth.set(parsed.width);
|
||||
} catch {
|
||||
// Ignore corrupted storage
|
||||
}
|
||||
}
|
||||
|
||||
private persistLayout(): void {
|
||||
try {
|
||||
const layout: PersistedLayout = {
|
||||
height: this.panelHeight(),
|
||||
width: this.panelWidth()
|
||||
};
|
||||
|
||||
localStorage.setItem(STORAGE_KEY, JSON.stringify(layout));
|
||||
} catch {
|
||||
// Ignore storage failures
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -64,7 +64,12 @@
|
||||
"scripts": [],
|
||||
"server": "src/main.server.ts",
|
||||
"security": {
|
||||
"allowedHosts": []
|
||||
"allowedHosts": [
|
||||
"toju.app",
|
||||
"www.toju.app",
|
||||
"localhost",
|
||||
"127.0.0.1"
|
||||
]
|
||||
},
|
||||
"prerender": true,
|
||||
"ssr": {
|
||||
|
||||
@@ -246,15 +246,6 @@ export class ReleaseService {
|
||||
}
|
||||
|
||||
private getReleaseEndpoints(): string[] {
|
||||
if (!isPlatformBrowser(this.platformId)) {
|
||||
return [PROXY_RELEASES_API_URL, DIRECT_RELEASES_API_URL];
|
||||
}
|
||||
|
||||
const hostname = window.location.hostname;
|
||||
const isLocalHost = hostname === 'localhost' || hostname === '127.0.0.1';
|
||||
|
||||
return isLocalHost
|
||||
? [PROXY_RELEASES_API_URL, DIRECT_RELEASES_API_URL]
|
||||
: [DIRECT_RELEASES_API_URL, PROXY_RELEASES_API_URL];
|
||||
return [PROXY_RELEASES_API_URL, DIRECT_RELEASES_API_URL];
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,16 @@ const serverDistFolder = dirname(fileURLToPath(import.meta.url));
|
||||
const browserDistFolder = resolve(serverDistFolder, '../browser');
|
||||
const indexHtml = join(serverDistFolder, 'index.server.html');
|
||||
const app = express();
|
||||
const commonEngine = new CommonEngine();
|
||||
const commonEngine = new CommonEngine({
|
||||
allowedHosts: [
|
||||
'toju.app',
|
||||
'www.toju.app',
|
||||
'localhost',
|
||||
'127.0.0.1'
|
||||
]
|
||||
});
|
||||
|
||||
app.set('trust proxy', 'loopback');
|
||||
|
||||
/**
|
||||
* Proxy endpoint for Gitea releases API to avoid CORS issues.
|
||||
@@ -51,7 +60,8 @@ app.get(
|
||||
'**',
|
||||
express.static(browserDistFolder, {
|
||||
maxAge: '1y',
|
||||
index: 'index.html'
|
||||
index: 'index.html',
|
||||
redirect: false
|
||||
})
|
||||
);
|
||||
|
||||
|
||||
Reference in New Issue
Block a user