Screensharing rework
Split Linux screensharing audio tracks, Rework screensharing functionality and layout This will need some refactoring soon
This commit is contained in:
@@ -1,10 +1,18 @@
|
||||
import { app } from 'electron';
|
||||
import { readDesktopSettings } from '../desktop-settings';
|
||||
|
||||
export function configureAppFlags(): void {
|
||||
const desktopSettings = readDesktopSettings();
|
||||
|
||||
if (!desktopSettings.hardwareAcceleration) {
|
||||
app.disableHardwareAcceleration();
|
||||
}
|
||||
|
||||
// Disable sandbox on Linux to avoid SUID / /tmp shared-memory issues
|
||||
if (process.platform === 'linux') {
|
||||
app.commandLine.appendSwitch('no-sandbox');
|
||||
app.commandLine.appendSwitch('disable-dev-shm-usage');
|
||||
app.commandLine.appendSwitch('enable-features', 'AudioServiceOutOfProcess');
|
||||
}
|
||||
|
||||
// Suppress Autofill devtools errors
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { app, BrowserWindow } from 'electron';
|
||||
import { cleanupLinuxScreenShareAudioRouting } from '../audio/linux-screen-share-routing';
|
||||
import {
|
||||
initializeDatabase,
|
||||
destroyDatabase,
|
||||
@@ -38,6 +39,7 @@ export function registerAppLifecycle(): void {
|
||||
app.on('before-quit', async (event) => {
|
||||
if (getDataSource()?.isInitialized) {
|
||||
event.preventDefault();
|
||||
await cleanupLinuxScreenShareAudioRouting();
|
||||
await destroyDatabase();
|
||||
app.quit();
|
||||
}
|
||||
|
||||
753
electron/audio/linux-screen-share-routing.ts
Normal file
753
electron/audio/linux-screen-share-routing.ts
Normal file
@@ -0,0 +1,753 @@
|
||||
import {
|
||||
ChildProcess,
|
||||
execFile,
|
||||
spawn
|
||||
} from 'child_process';
|
||||
import { randomUUID } from 'crypto';
|
||||
import { WebContents } from 'electron';
|
||||
import { promisify } from 'util';
|
||||
const execFileAsync = promisify(execFile);
|
||||
const SCREEN_SHARE_SINK_NAME = 'metoyou_screenshare_sink';
|
||||
const SCREEN_SHARE_MONITOR_SOURCE_NAME = `${SCREEN_SHARE_SINK_NAME}.monitor`;
|
||||
const VOICE_SINK_NAME = 'metoyou_voice_sink';
|
||||
const REROUTE_INTERVAL_MS = 750;
|
||||
const MONITOR_CAPTURE_SAMPLE_RATE = 48_000;
|
||||
const MONITOR_CAPTURE_CHANNEL_COUNT = 2;
|
||||
const MONITOR_CAPTURE_BITS_PER_SAMPLE = 16;
|
||||
const MONITOR_CAPTURE_STOP_TIMEOUT_MS = 1_000;
|
||||
const MONITOR_AUDIO_CHUNK_CHANNEL = 'linux-screen-share-monitor-audio-chunk';
|
||||
const MONITOR_AUDIO_ENDED_CHANNEL = 'linux-screen-share-monitor-audio-ended';
|
||||
|
||||
interface ShortSinkEntry {
|
||||
index: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface ShortSinkInputEntry {
|
||||
index: string;
|
||||
sinkIndex: string;
|
||||
}
|
||||
|
||||
interface SinkInputDetails extends ShortSinkInputEntry {
|
||||
properties: Record<string, string>;
|
||||
}
|
||||
|
||||
interface PactlJsonSinkInputEntry {
|
||||
index?: number | string;
|
||||
properties?: Record<string, unknown>;
|
||||
sink?: number | string;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareAudioRoutingState {
|
||||
active: boolean;
|
||||
restoreSinkName: string | null;
|
||||
screenShareLoopbackModuleId: string | null;
|
||||
voiceLoopbackModuleId: string | null;
|
||||
rerouteIntervalId: ReturnType<typeof setInterval> | null;
|
||||
}
|
||||
|
||||
interface LinuxScreenShareMonitorCaptureState {
|
||||
captureId: string | null;
|
||||
process: ChildProcess | null;
|
||||
stderr: string;
|
||||
stopRequested: boolean;
|
||||
targetWebContents: WebContents | null;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareAudioRoutingInfo {
|
||||
available: boolean;
|
||||
active: boolean;
|
||||
monitorCaptureSupported: boolean;
|
||||
screenShareSinkName: string;
|
||||
screenShareMonitorSourceName: string;
|
||||
voiceSinkName: string;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareMonitorCaptureInfo {
|
||||
bitsPerSample: number;
|
||||
captureId: string;
|
||||
channelCount: number;
|
||||
sampleRate: number;
|
||||
sourceName: string;
|
||||
}
|
||||
|
||||
const routingState: LinuxScreenShareAudioRoutingState = {
|
||||
active: false,
|
||||
restoreSinkName: null,
|
||||
screenShareLoopbackModuleId: null,
|
||||
voiceLoopbackModuleId: null,
|
||||
rerouteIntervalId: null
|
||||
};
|
||||
const monitorCaptureState: LinuxScreenShareMonitorCaptureState = {
|
||||
captureId: null,
|
||||
process: null,
|
||||
stderr: '',
|
||||
stopRequested: false,
|
||||
targetWebContents: null
|
||||
};
|
||||
|
||||
let pactlAvailableCache: boolean | null = null;
|
||||
|
||||
export async function prepareLinuxScreenShareAudioRouting(): Promise<LinuxScreenShareAudioRoutingInfo> {
|
||||
if (process.platform !== 'linux') {
|
||||
return buildRoutingInfo(false, false, 'Linux-only audio routing is unavailable on this platform.');
|
||||
}
|
||||
|
||||
if (!await isPactlAvailable()) {
|
||||
return buildRoutingInfo(false, false, 'pactl is unavailable; falling back to standard desktop audio capture.');
|
||||
}
|
||||
|
||||
await ensureNullSink(SCREEN_SHARE_SINK_NAME);
|
||||
await ensureNullSink(VOICE_SINK_NAME);
|
||||
|
||||
return buildRoutingInfo(true, routingState.active);
|
||||
}
|
||||
|
||||
export async function activateLinuxScreenShareAudioRouting(): Promise<LinuxScreenShareAudioRoutingInfo> {
|
||||
const prepared = await prepareLinuxScreenShareAudioRouting();
|
||||
|
||||
if (!prepared.available) {
|
||||
return prepared;
|
||||
}
|
||||
|
||||
if (routingState.active) {
|
||||
return buildRoutingInfo(true, true);
|
||||
}
|
||||
|
||||
const restoreSinkName = await getPreferredRestoreSinkName();
|
||||
|
||||
if (!restoreSinkName) {
|
||||
return buildRoutingInfo(false, false, 'Unable to determine a playback sink for Linux screen-share audio routing.');
|
||||
}
|
||||
|
||||
try {
|
||||
routingState.restoreSinkName = restoreSinkName;
|
||||
routingState.screenShareLoopbackModuleId = await loadLoopbackModule(SCREEN_SHARE_MONITOR_SOURCE_NAME, restoreSinkName);
|
||||
routingState.voiceLoopbackModuleId = await loadLoopbackModule(`${VOICE_SINK_NAME}.monitor`, restoreSinkName);
|
||||
|
||||
await setDefaultSink(SCREEN_SHARE_SINK_NAME);
|
||||
await moveSinkInputs(SCREEN_SHARE_SINK_NAME, (sinkName) => !!sinkName && sinkName !== SCREEN_SHARE_SINK_NAME && sinkName !== VOICE_SINK_NAME);
|
||||
|
||||
routingState.active = true;
|
||||
await rerouteAppSinkInputsToVoiceSink();
|
||||
startSinkInputRerouteLoop();
|
||||
|
||||
return buildRoutingInfo(true, true);
|
||||
} catch (error) {
|
||||
await deactivateLinuxScreenShareAudioRouting();
|
||||
|
||||
return buildRoutingInfo(
|
||||
false,
|
||||
false,
|
||||
error instanceof Error ? error.message : 'Failed to activate Linux screen-share audio routing.'
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
export async function deactivateLinuxScreenShareAudioRouting(): Promise<boolean> {
|
||||
const restoreSinkName = routingState.restoreSinkName;
|
||||
|
||||
stopSinkInputRerouteLoop();
|
||||
await stopLinuxScreenShareMonitorCapture();
|
||||
|
||||
try {
|
||||
if (restoreSinkName) {
|
||||
await setDefaultSink(restoreSinkName);
|
||||
await moveSinkInputs(restoreSinkName, (sinkName) => sinkName === SCREEN_SHARE_SINK_NAME || sinkName === VOICE_SINK_NAME);
|
||||
}
|
||||
} catch {
|
||||
// Best-effort cleanup only.
|
||||
}
|
||||
|
||||
await Promise.all([unloadModuleIfLoaded(routingState.screenShareLoopbackModuleId), unloadModuleIfLoaded(routingState.voiceLoopbackModuleId)]);
|
||||
|
||||
routingState.active = false;
|
||||
routingState.restoreSinkName = null;
|
||||
routingState.screenShareLoopbackModuleId = null;
|
||||
routingState.voiceLoopbackModuleId = null;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
export async function cleanupLinuxScreenShareAudioRouting(): Promise<void> {
|
||||
await deactivateLinuxScreenShareAudioRouting();
|
||||
}
|
||||
|
||||
export async function startLinuxScreenShareMonitorCapture(
|
||||
targetWebContents: WebContents
|
||||
): Promise<LinuxScreenShareMonitorCaptureInfo> {
|
||||
if (process.platform !== 'linux') {
|
||||
throw new Error('Linux screen-share monitor capture is unavailable on this platform.');
|
||||
}
|
||||
|
||||
if (!routingState.active) {
|
||||
throw new Error('Linux screen-share audio routing must be active before monitor capture starts.');
|
||||
}
|
||||
|
||||
await stopLinuxScreenShareMonitorCapture();
|
||||
|
||||
const captureId = randomUUID();
|
||||
const captureProcess = spawn('parec', [
|
||||
'--device',
|
||||
SCREEN_SHARE_MONITOR_SOURCE_NAME,
|
||||
'--raw',
|
||||
'--format=s16le',
|
||||
'--rate',
|
||||
`${MONITOR_CAPTURE_SAMPLE_RATE}`,
|
||||
'--channels',
|
||||
`${MONITOR_CAPTURE_CHANNEL_COUNT}`
|
||||
], {
|
||||
env: process.env,
|
||||
stdio: [
|
||||
'ignore',
|
||||
'pipe',
|
||||
'pipe'
|
||||
]
|
||||
});
|
||||
|
||||
monitorCaptureState.captureId = captureId;
|
||||
monitorCaptureState.process = captureProcess;
|
||||
monitorCaptureState.stderr = '';
|
||||
monitorCaptureState.stopRequested = false;
|
||||
monitorCaptureState.targetWebContents = targetWebContents;
|
||||
|
||||
let started = false;
|
||||
|
||||
const startPromise = new Promise<void>((resolve, reject) => {
|
||||
const onError = (error: Error): void => {
|
||||
if (!started) {
|
||||
cleanupMonitorCaptureState(captureId, error.message);
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
|
||||
cleanupMonitorCaptureState(captureId, error.message);
|
||||
};
|
||||
|
||||
captureProcess.on('error', onError);
|
||||
captureProcess.once('spawn', () => {
|
||||
started = true;
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
|
||||
captureProcess.stdout.on('data', (chunk: Buffer) => {
|
||||
if (monitorCaptureState.captureId !== captureId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const target = monitorCaptureState.targetWebContents;
|
||||
|
||||
if (!target || target.isDestroyed()) {
|
||||
return;
|
||||
}
|
||||
|
||||
target.send(MONITOR_AUDIO_CHUNK_CHANNEL, {
|
||||
captureId,
|
||||
chunk: Uint8Array.from(chunk)
|
||||
});
|
||||
});
|
||||
|
||||
captureProcess.stderr.on('data', (chunk: Buffer) => {
|
||||
if (monitorCaptureState.captureId !== captureId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nextStderr = `${monitorCaptureState.stderr}${chunk.toString()}`;
|
||||
|
||||
monitorCaptureState.stderr = nextStderr.slice(-4_096);
|
||||
});
|
||||
|
||||
captureProcess.once('close', (code, signal) => {
|
||||
const reason = buildMonitorCaptureCloseReason(captureId, code, signal);
|
||||
|
||||
cleanupMonitorCaptureState(captureId, reason);
|
||||
});
|
||||
|
||||
await startPromise;
|
||||
|
||||
return {
|
||||
bitsPerSample: MONITOR_CAPTURE_BITS_PER_SAMPLE,
|
||||
captureId,
|
||||
channelCount: MONITOR_CAPTURE_CHANNEL_COUNT,
|
||||
sampleRate: MONITOR_CAPTURE_SAMPLE_RATE,
|
||||
sourceName: SCREEN_SHARE_MONITOR_SOURCE_NAME
|
||||
};
|
||||
}
|
||||
|
||||
export async function stopLinuxScreenShareMonitorCapture(captureId?: string): Promise<boolean> {
|
||||
if (!monitorCaptureState.captureId || !monitorCaptureState.process) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (captureId && captureId !== monitorCaptureState.captureId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const currentCaptureId = monitorCaptureState.captureId;
|
||||
const captureProcess = monitorCaptureState.process;
|
||||
|
||||
monitorCaptureState.stopRequested = true;
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
const forceKillTimeout = setTimeout(() => {
|
||||
if (!captureProcess.killed) {
|
||||
captureProcess.kill('SIGKILL');
|
||||
}
|
||||
}, MONITOR_CAPTURE_STOP_TIMEOUT_MS);
|
||||
|
||||
captureProcess.once('close', () => {
|
||||
clearTimeout(forceKillTimeout);
|
||||
resolve();
|
||||
});
|
||||
|
||||
if (!captureProcess.killed) {
|
||||
captureProcess.kill('SIGTERM');
|
||||
return;
|
||||
}
|
||||
|
||||
clearTimeout(forceKillTimeout);
|
||||
resolve();
|
||||
});
|
||||
|
||||
return monitorCaptureState.captureId !== currentCaptureId;
|
||||
}
|
||||
|
||||
function buildRoutingInfo(
|
||||
available: boolean,
|
||||
active: boolean,
|
||||
reason?: string
|
||||
): LinuxScreenShareAudioRoutingInfo {
|
||||
return {
|
||||
available,
|
||||
active,
|
||||
monitorCaptureSupported: true,
|
||||
screenShareSinkName: SCREEN_SHARE_SINK_NAME,
|
||||
screenShareMonitorSourceName: SCREEN_SHARE_MONITOR_SOURCE_NAME,
|
||||
voiceSinkName: VOICE_SINK_NAME,
|
||||
...(reason ? { reason } : {})
|
||||
};
|
||||
}
|
||||
|
||||
async function isPactlAvailable(): Promise<boolean> {
|
||||
if (pactlAvailableCache !== null) {
|
||||
return pactlAvailableCache;
|
||||
}
|
||||
|
||||
try {
|
||||
await runPactl('info');
|
||||
pactlAvailableCache = true;
|
||||
} catch {
|
||||
pactlAvailableCache = false;
|
||||
}
|
||||
|
||||
return pactlAvailableCache;
|
||||
}
|
||||
|
||||
async function runPactl(...args: string[]): Promise<string> {
|
||||
const { stdout } = await execFileAsync('pactl', args, {
|
||||
env: process.env
|
||||
});
|
||||
|
||||
return stdout.trim();
|
||||
}
|
||||
|
||||
async function ensureNullSink(sinkName: string): Promise<void> {
|
||||
const sinks = await listSinks();
|
||||
|
||||
if (sinks.some((sink) => sink.name === sinkName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
await runPactl(
|
||||
'load-module',
|
||||
'module-null-sink',
|
||||
`sink_name=${sinkName}`,
|
||||
`sink_properties=device.description=${sinkName}`
|
||||
);
|
||||
}
|
||||
|
||||
async function loadLoopbackModule(sourceName: string, sinkName: string): Promise<string> {
|
||||
const moduleId = await runPactl(
|
||||
'load-module',
|
||||
'module-loopback',
|
||||
`source=${sourceName}`,
|
||||
`sink=${sinkName}`,
|
||||
'latency_msec=10',
|
||||
'source_dont_move=true',
|
||||
'sink_dont_move=true'
|
||||
);
|
||||
|
||||
return moduleId.split(/\s+/)[0] || moduleId;
|
||||
}
|
||||
|
||||
async function unloadModuleIfLoaded(moduleId: string | null): Promise<void> {
|
||||
if (!moduleId) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await runPactl('unload-module', moduleId);
|
||||
} catch {
|
||||
// Module may have already been unloaded externally.
|
||||
}
|
||||
}
|
||||
|
||||
async function getPreferredRestoreSinkName(): Promise<string | null> {
|
||||
const defaultSinkName = await getDefaultSinkName();
|
||||
|
||||
if (defaultSinkName && defaultSinkName !== SCREEN_SHARE_SINK_NAME && defaultSinkName !== VOICE_SINK_NAME) {
|
||||
return defaultSinkName;
|
||||
}
|
||||
|
||||
const sinks = await listSinks();
|
||||
|
||||
return sinks.find((sink) => sink.name !== SCREEN_SHARE_SINK_NAME && sink.name !== VOICE_SINK_NAME)?.name ?? null;
|
||||
}
|
||||
|
||||
async function getDefaultSinkName(): Promise<string | null> {
|
||||
const info = await runPactl('info');
|
||||
const defaultSinkLine = info
|
||||
.split(/\r?\n/)
|
||||
.find((line) => line.startsWith('Default Sink:'));
|
||||
|
||||
if (!defaultSinkLine) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const sinkName = defaultSinkLine.replace('Default Sink:', '').trim();
|
||||
|
||||
return sinkName || null;
|
||||
}
|
||||
|
||||
async function setDefaultSink(sinkName: string): Promise<void> {
|
||||
await runPactl('set-default-sink', sinkName);
|
||||
}
|
||||
|
||||
async function rerouteAppSinkInputsToVoiceSink(): Promise<void> {
|
||||
const [
|
||||
sinks,
|
||||
sinkInputs,
|
||||
descendantProcessIds
|
||||
] = await Promise.all([
|
||||
listSinks(),
|
||||
listSinkInputDetails(),
|
||||
collectDescendantProcessIds(process.pid)
|
||||
]);
|
||||
const sinkNamesByIndex = new Map(sinks.map((sink) => [sink.index, sink.name]));
|
||||
|
||||
await Promise.all(
|
||||
sinkInputs.map(async (sinkInput) => {
|
||||
if (!isAppOwnedSinkInput(sinkInput, descendantProcessIds)) {
|
||||
return;
|
||||
}
|
||||
|
||||
const sinkName = sinkNamesByIndex.get(sinkInput.sinkIndex) ?? null;
|
||||
|
||||
if (sinkName === VOICE_SINK_NAME) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await runPactl('move-sink-input', sinkInput.index, VOICE_SINK_NAME);
|
||||
} catch {
|
||||
// Streams can disappear or be recreated while rerouting.
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
function cleanupMonitorCaptureState(captureId: string, reason?: string): void {
|
||||
if (monitorCaptureState.captureId !== captureId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const target = monitorCaptureState.targetWebContents;
|
||||
|
||||
monitorCaptureState.captureId = null;
|
||||
monitorCaptureState.process = null;
|
||||
monitorCaptureState.stderr = '';
|
||||
monitorCaptureState.stopRequested = false;
|
||||
monitorCaptureState.targetWebContents = null;
|
||||
|
||||
if (!target || target.isDestroyed()) {
|
||||
return;
|
||||
}
|
||||
|
||||
target.send(MONITOR_AUDIO_ENDED_CHANNEL, {
|
||||
captureId,
|
||||
...(reason ? { reason } : {})
|
||||
});
|
||||
}
|
||||
|
||||
function buildMonitorCaptureCloseReason(
|
||||
captureId: string,
|
||||
code: number | null,
|
||||
signal: NodeJS.Signals | null
|
||||
): string | undefined {
|
||||
if (monitorCaptureState.captureId !== captureId) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (monitorCaptureState.stopRequested) {
|
||||
return undefined;
|
||||
}
|
||||
|
||||
if (monitorCaptureState.stderr.trim()) {
|
||||
return monitorCaptureState.stderr.trim();
|
||||
}
|
||||
|
||||
if (signal) {
|
||||
return `Linux screen-share monitor capture stopped with signal ${signal}.`;
|
||||
}
|
||||
|
||||
if (typeof code === 'number' && code !== 0) {
|
||||
return `Linux screen-share monitor capture exited with code ${code}.`;
|
||||
}
|
||||
|
||||
return undefined;
|
||||
}
|
||||
|
||||
function startSinkInputRerouteLoop(): void {
|
||||
if (routingState.rerouteIntervalId) {
|
||||
return;
|
||||
}
|
||||
|
||||
routingState.rerouteIntervalId = setInterval(() => {
|
||||
void rerouteAppSinkInputsToVoiceSink();
|
||||
}, REROUTE_INTERVAL_MS);
|
||||
}
|
||||
|
||||
function stopSinkInputRerouteLoop(): void {
|
||||
if (!routingState.rerouteIntervalId) {
|
||||
return;
|
||||
}
|
||||
|
||||
clearInterval(routingState.rerouteIntervalId);
|
||||
routingState.rerouteIntervalId = null;
|
||||
}
|
||||
|
||||
function isAppOwnedSinkInput(
|
||||
sinkInput: SinkInputDetails,
|
||||
descendantProcessIds: ReadonlySet<string>
|
||||
): boolean {
|
||||
const processId = sinkInput.properties['application.process.id'];
|
||||
|
||||
return typeof processId === 'string' && descendantProcessIds.has(processId);
|
||||
}
|
||||
|
||||
async function moveSinkInputs(
|
||||
targetSinkName: string,
|
||||
shouldMove: (sinkName: string | null) => boolean
|
||||
): Promise<void> {
|
||||
const [sinks, sinkInputs] = await Promise.all([listSinks(), listSinkInputs()]);
|
||||
const sinkNamesByIndex = new Map(sinks.map((sink) => [sink.index, sink.name]));
|
||||
|
||||
await Promise.all(
|
||||
sinkInputs.map(async (sinkInput) => {
|
||||
const sinkName = sinkNamesByIndex.get(sinkInput.sinkIndex) ?? null;
|
||||
|
||||
if (!shouldMove(sinkName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await runPactl('move-sink-input', sinkInput.index, targetSinkName);
|
||||
} catch {
|
||||
// Streams can disappear while iterating.
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
async function listSinks(): Promise<ShortSinkEntry[]> {
|
||||
const output = await runPactl('list', 'short', 'sinks');
|
||||
|
||||
return output
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.map((line) => line.split(/\s+/))
|
||||
.filter((columns) => columns.length >= 2)
|
||||
.map((columns) => ({
|
||||
index: columns[0],
|
||||
name: columns[1]
|
||||
}));
|
||||
}
|
||||
|
||||
async function listSinkInputs(): Promise<ShortSinkInputEntry[]> {
|
||||
const output = await runPactl('list', 'short', 'sink-inputs');
|
||||
|
||||
return output
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.map((line) => line.split(/\s+/))
|
||||
.filter((columns) => columns.length >= 2)
|
||||
.map((columns) => ({
|
||||
index: columns[0],
|
||||
sinkIndex: columns[1]
|
||||
}));
|
||||
}
|
||||
|
||||
async function listSinkInputDetails(): Promise<SinkInputDetails[]> {
|
||||
try {
|
||||
const output = await runPactl('--format=json', 'list', 'sink-inputs');
|
||||
const entries = JSON.parse(output) as PactlJsonSinkInputEntry[];
|
||||
|
||||
if (!Array.isArray(entries)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return entries
|
||||
.map((entry) => {
|
||||
const index = typeof entry.index === 'number' || typeof entry.index === 'string'
|
||||
? `${entry.index}`
|
||||
: '';
|
||||
const sinkIndex = typeof entry.sink === 'number' || typeof entry.sink === 'string'
|
||||
? `${entry.sink}`
|
||||
: '';
|
||||
const properties = Object.fromEntries(
|
||||
Object.entries(entry.properties || {}).map(([key, value]) => [key, typeof value === 'string' ? value : `${value ?? ''}`])
|
||||
);
|
||||
|
||||
return {
|
||||
index,
|
||||
sinkIndex,
|
||||
properties
|
||||
} satisfies SinkInputDetails;
|
||||
})
|
||||
.filter((entry) => !!entry.index && !!entry.sinkIndex);
|
||||
} catch {
|
||||
// Fall back to the legacy text format parser below.
|
||||
}
|
||||
|
||||
const output = await runPactl('list', 'sink-inputs');
|
||||
const entries: SinkInputDetails[] = [];
|
||||
|
||||
let currentEntry: SinkInputDetails | null = null;
|
||||
let parsingProperties = false;
|
||||
|
||||
const pushCurrentEntry = (): void => {
|
||||
if (currentEntry) {
|
||||
entries.push(currentEntry);
|
||||
}
|
||||
};
|
||||
|
||||
for (const rawLine of output.split(/\r?\n/)) {
|
||||
const sinkInputMatch = rawLine.match(/^Sink Input #(\d+)/);
|
||||
|
||||
if (sinkInputMatch) {
|
||||
pushCurrentEntry();
|
||||
currentEntry = {
|
||||
index: sinkInputMatch[1],
|
||||
sinkIndex: '',
|
||||
properties: {}
|
||||
};
|
||||
|
||||
parsingProperties = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!currentEntry) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const sinkMatch = rawLine.match(/^\s*Sink:\s*(\d+)/);
|
||||
|
||||
if (sinkMatch) {
|
||||
currentEntry.sinkIndex = sinkMatch[1];
|
||||
continue;
|
||||
}
|
||||
|
||||
if (/^\s*Properties:\s*$/.test(rawLine)) {
|
||||
parsingProperties = true;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!parsingProperties) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (rawLine.trim().length === 0) {
|
||||
parsingProperties = false;
|
||||
continue;
|
||||
}
|
||||
|
||||
const propertyLine = rawLine.trim();
|
||||
const separatorIndex = propertyLine.indexOf(' = ');
|
||||
|
||||
if (separatorIndex === -1) {
|
||||
if (/^\S/.test(rawLine) || /^\s+\S[^=]*:\s*$/.test(rawLine)) {
|
||||
parsingProperties = false;
|
||||
}
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
const key = propertyLine.slice(0, separatorIndex).trim();
|
||||
const rawValue = propertyLine.slice(separatorIndex + 3).trim();
|
||||
|
||||
currentEntry.properties[key] = stripSurroundingQuotes(rawValue);
|
||||
}
|
||||
|
||||
pushCurrentEntry();
|
||||
|
||||
return entries.filter((entry) => !!entry.sinkIndex);
|
||||
}
|
||||
|
||||
async function collectDescendantProcessIds(rootProcessId: number): Promise<Set<string>> {
|
||||
const { stdout } = await execFileAsync('ps', ['-eo', 'pid=,ppid='], {
|
||||
env: process.env
|
||||
});
|
||||
const childrenByParentId = new Map<string, string[]>();
|
||||
|
||||
stdout
|
||||
.split(/\r?\n/)
|
||||
.map((line) => line.trim())
|
||||
.filter(Boolean)
|
||||
.forEach((line) => {
|
||||
const [pid, ppid] = line.split(/\s+/);
|
||||
|
||||
if (!pid || !ppid) {
|
||||
return;
|
||||
}
|
||||
|
||||
const siblings = childrenByParentId.get(ppid) ?? [];
|
||||
|
||||
siblings.push(pid);
|
||||
childrenByParentId.set(ppid, siblings);
|
||||
});
|
||||
|
||||
const rootId = `${rootProcessId}`;
|
||||
const descendantIds = new Set<string>([rootId]);
|
||||
const queue = [rootId];
|
||||
|
||||
while (queue.length > 0) {
|
||||
const currentId = queue.shift();
|
||||
|
||||
if (!currentId) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const childId of childrenByParentId.get(currentId) ?? []) {
|
||||
if (descendantIds.has(childId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
descendantIds.add(childId);
|
||||
queue.push(childId);
|
||||
}
|
||||
}
|
||||
|
||||
return descendantIds;
|
||||
}
|
||||
|
||||
function stripSurroundingQuotes(value: string): string {
|
||||
if ((value.startsWith('"') && value.endsWith('"')) || (value.startsWith('\'') && value.endsWith('\''))) {
|
||||
return value.slice(1, -1);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
65
electron/desktop-settings.ts
Normal file
65
electron/desktop-settings.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { app } from 'electron';
|
||||
import * as fs from 'fs';
|
||||
import * as path from 'path';
|
||||
|
||||
export interface DesktopSettings {
|
||||
hardwareAcceleration: boolean;
|
||||
}
|
||||
|
||||
export interface DesktopSettingsSnapshot extends DesktopSettings {
|
||||
runtimeHardwareAcceleration: boolean;
|
||||
restartRequired: boolean;
|
||||
}
|
||||
|
||||
const DEFAULT_DESKTOP_SETTINGS: DesktopSettings = {
|
||||
hardwareAcceleration: true
|
||||
};
|
||||
|
||||
export function getDesktopSettingsSnapshot(): DesktopSettingsSnapshot {
|
||||
const storedSettings = readDesktopSettings();
|
||||
const runtimeHardwareAcceleration = app.isHardwareAccelerationEnabled();
|
||||
|
||||
return {
|
||||
...storedSettings,
|
||||
runtimeHardwareAcceleration,
|
||||
restartRequired: storedSettings.hardwareAcceleration !== runtimeHardwareAcceleration
|
||||
};
|
||||
}
|
||||
|
||||
export function readDesktopSettings(): DesktopSettings {
|
||||
const filePath = getDesktopSettingsPath();
|
||||
|
||||
try {
|
||||
if (!fs.existsSync(filePath)) {
|
||||
return { ...DEFAULT_DESKTOP_SETTINGS };
|
||||
}
|
||||
|
||||
const raw = fs.readFileSync(filePath, 'utf8');
|
||||
const parsed = JSON.parse(raw) as Partial<DesktopSettings>;
|
||||
|
||||
return {
|
||||
hardwareAcceleration: typeof parsed.hardwareAcceleration === 'boolean'
|
||||
? parsed.hardwareAcceleration
|
||||
: DEFAULT_DESKTOP_SETTINGS.hardwareAcceleration
|
||||
};
|
||||
} catch {
|
||||
return { ...DEFAULT_DESKTOP_SETTINGS };
|
||||
}
|
||||
}
|
||||
|
||||
export function updateDesktopSettings(patch: Partial<DesktopSettings>): DesktopSettingsSnapshot {
|
||||
const nextSettings: DesktopSettings = {
|
||||
...readDesktopSettings(),
|
||||
...patch
|
||||
};
|
||||
const filePath = getDesktopSettingsPath();
|
||||
|
||||
fs.mkdirSync(path.dirname(filePath), { recursive: true });
|
||||
fs.writeFileSync(filePath, JSON.stringify(nextSettings, null, 2), 'utf8');
|
||||
|
||||
return getDesktopSettingsSnapshot();
|
||||
}
|
||||
|
||||
function getDesktopSettingsPath(): string {
|
||||
return path.join(app.getPath('userData'), 'desktop-settings.json');
|
||||
}
|
||||
@@ -7,6 +7,14 @@ import {
|
||||
} from 'electron';
|
||||
import * as fs from 'fs';
|
||||
import * as fsp from 'fs/promises';
|
||||
import { getDesktopSettingsSnapshot, updateDesktopSettings } from '../desktop-settings';
|
||||
import {
|
||||
activateLinuxScreenShareAudioRouting,
|
||||
deactivateLinuxScreenShareAudioRouting,
|
||||
prepareLinuxScreenShareAudioRouting,
|
||||
startLinuxScreenShareMonitorCapture,
|
||||
stopLinuxScreenShareMonitorCapture
|
||||
} from '../audio/linux-screen-share-routing';
|
||||
|
||||
export function setupSystemHandlers(): void {
|
||||
ipcMain.handle('open-external', async (_event, url: string) => {
|
||||
@@ -31,8 +39,40 @@ export function setupSystemHandlers(): void {
|
||||
}));
|
||||
});
|
||||
|
||||
ipcMain.handle('prepare-linux-screen-share-audio-routing', async () => {
|
||||
return await prepareLinuxScreenShareAudioRouting();
|
||||
});
|
||||
|
||||
ipcMain.handle('activate-linux-screen-share-audio-routing', async () => {
|
||||
return await activateLinuxScreenShareAudioRouting();
|
||||
});
|
||||
|
||||
ipcMain.handle('deactivate-linux-screen-share-audio-routing', async () => {
|
||||
return await deactivateLinuxScreenShareAudioRouting();
|
||||
});
|
||||
|
||||
ipcMain.handle('start-linux-screen-share-monitor-capture', async (event) => {
|
||||
return await startLinuxScreenShareMonitorCapture(event.sender);
|
||||
});
|
||||
|
||||
ipcMain.handle('stop-linux-screen-share-monitor-capture', async (_event, captureId?: string) => {
|
||||
return await stopLinuxScreenShareMonitorCapture(captureId);
|
||||
});
|
||||
|
||||
ipcMain.handle('get-app-data-path', () => app.getPath('userData'));
|
||||
|
||||
ipcMain.handle('get-desktop-settings', () => getDesktopSettingsSnapshot());
|
||||
|
||||
ipcMain.handle('set-desktop-settings', (_event, patch: { hardwareAcceleration?: boolean }) => {
|
||||
return updateDesktopSettings(patch);
|
||||
});
|
||||
|
||||
ipcMain.handle('relaunch-app', () => {
|
||||
app.relaunch();
|
||||
app.exit(0);
|
||||
return true;
|
||||
});
|
||||
|
||||
ipcMain.handle('file-exists', async (_event, filePath: string) => {
|
||||
try {
|
||||
await fsp.access(filePath, fs.constants.F_OK);
|
||||
|
||||
@@ -1,6 +1,37 @@
|
||||
import { contextBridge, ipcRenderer } from 'electron';
|
||||
import { Command, Query } from './cqrs/types';
|
||||
|
||||
const LINUX_SCREEN_SHARE_MONITOR_AUDIO_CHUNK_CHANNEL = 'linux-screen-share-monitor-audio-chunk';
|
||||
const LINUX_SCREEN_SHARE_MONITOR_AUDIO_ENDED_CHANNEL = 'linux-screen-share-monitor-audio-ended';
|
||||
|
||||
export interface LinuxScreenShareAudioRoutingInfo {
|
||||
available: boolean;
|
||||
active: boolean;
|
||||
monitorCaptureSupported: boolean;
|
||||
screenShareSinkName: string;
|
||||
screenShareMonitorSourceName: string;
|
||||
voiceSinkName: string;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareMonitorCaptureInfo {
|
||||
bitsPerSample: number;
|
||||
captureId: string;
|
||||
channelCount: number;
|
||||
sampleRate: number;
|
||||
sourceName: string;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareMonitorAudioChunkPayload {
|
||||
captureId: string;
|
||||
chunk: Uint8Array;
|
||||
}
|
||||
|
||||
export interface LinuxScreenShareMonitorAudioEndedPayload {
|
||||
captureId: string;
|
||||
reason?: string;
|
||||
}
|
||||
|
||||
export interface ElectronAPI {
|
||||
minimizeWindow: () => void;
|
||||
maximizeWindow: () => void;
|
||||
@@ -8,7 +39,25 @@ export interface ElectronAPI {
|
||||
|
||||
openExternal: (url: string) => Promise<boolean>;
|
||||
getSources: () => Promise<{ id: string; name: string; thumbnail: string }[]>;
|
||||
prepareLinuxScreenShareAudioRouting: () => Promise<LinuxScreenShareAudioRoutingInfo>;
|
||||
activateLinuxScreenShareAudioRouting: () => Promise<LinuxScreenShareAudioRoutingInfo>;
|
||||
deactivateLinuxScreenShareAudioRouting: () => Promise<boolean>;
|
||||
startLinuxScreenShareMonitorCapture: () => Promise<LinuxScreenShareMonitorCaptureInfo>;
|
||||
stopLinuxScreenShareMonitorCapture: (captureId?: string) => Promise<boolean>;
|
||||
onLinuxScreenShareMonitorAudioChunk: (listener: (payload: LinuxScreenShareMonitorAudioChunkPayload) => void) => () => void;
|
||||
onLinuxScreenShareMonitorAudioEnded: (listener: (payload: LinuxScreenShareMonitorAudioEndedPayload) => void) => () => void;
|
||||
getAppDataPath: () => Promise<string>;
|
||||
getDesktopSettings: () => Promise<{
|
||||
hardwareAcceleration: boolean;
|
||||
runtimeHardwareAcceleration: boolean;
|
||||
restartRequired: boolean;
|
||||
}>;
|
||||
setDesktopSettings: (patch: { hardwareAcceleration?: boolean }) => Promise<{
|
||||
hardwareAcceleration: boolean;
|
||||
runtimeHardwareAcceleration: boolean;
|
||||
restartRequired: boolean;
|
||||
}>;
|
||||
relaunchApp: () => Promise<boolean>;
|
||||
readFile: (filePath: string) => Promise<string>;
|
||||
writeFile: (filePath: string, data: string) => Promise<boolean>;
|
||||
saveFileAs: (defaultFileName: string, data: string) => Promise<{ saved: boolean; cancelled: boolean }>;
|
||||
@@ -26,7 +75,44 @@ const electronAPI: ElectronAPI = {
|
||||
|
||||
openExternal: (url) => ipcRenderer.invoke('open-external', url),
|
||||
getSources: () => ipcRenderer.invoke('get-sources'),
|
||||
prepareLinuxScreenShareAudioRouting: () => ipcRenderer.invoke('prepare-linux-screen-share-audio-routing'),
|
||||
activateLinuxScreenShareAudioRouting: () => ipcRenderer.invoke('activate-linux-screen-share-audio-routing'),
|
||||
deactivateLinuxScreenShareAudioRouting: () => ipcRenderer.invoke('deactivate-linux-screen-share-audio-routing'),
|
||||
startLinuxScreenShareMonitorCapture: () => ipcRenderer.invoke('start-linux-screen-share-monitor-capture'),
|
||||
stopLinuxScreenShareMonitorCapture: (captureId) => ipcRenderer.invoke('stop-linux-screen-share-monitor-capture', captureId),
|
||||
onLinuxScreenShareMonitorAudioChunk: (listener) => {
|
||||
const wrappedListener = (_event: Electron.IpcRendererEvent, payload: LinuxScreenShareMonitorAudioChunkPayload) => {
|
||||
const chunk = payload.chunk instanceof Uint8Array
|
||||
? payload.chunk
|
||||
: Uint8Array.from((payload as { chunk?: Iterable<number> }).chunk || []);
|
||||
|
||||
listener({
|
||||
...payload,
|
||||
chunk
|
||||
});
|
||||
};
|
||||
|
||||
ipcRenderer.on(LINUX_SCREEN_SHARE_MONITOR_AUDIO_CHUNK_CHANNEL, wrappedListener);
|
||||
|
||||
return () => {
|
||||
ipcRenderer.removeListener(LINUX_SCREEN_SHARE_MONITOR_AUDIO_CHUNK_CHANNEL, wrappedListener);
|
||||
};
|
||||
},
|
||||
onLinuxScreenShareMonitorAudioEnded: (listener) => {
|
||||
const wrappedListener = (_event: Electron.IpcRendererEvent, payload: LinuxScreenShareMonitorAudioEndedPayload) => {
|
||||
listener(payload);
|
||||
};
|
||||
|
||||
ipcRenderer.on(LINUX_SCREEN_SHARE_MONITOR_AUDIO_ENDED_CHANNEL, wrappedListener);
|
||||
|
||||
return () => {
|
||||
ipcRenderer.removeListener(LINUX_SCREEN_SHARE_MONITOR_AUDIO_ENDED_CHANNEL, wrappedListener);
|
||||
};
|
||||
},
|
||||
getAppDataPath: () => ipcRenderer.invoke('get-app-data-path'),
|
||||
getDesktopSettings: () => ipcRenderer.invoke('get-desktop-settings'),
|
||||
setDesktopSettings: (patch) => ipcRenderer.invoke('set-desktop-settings', patch),
|
||||
relaunchApp: () => ipcRenderer.invoke('relaunch-app'),
|
||||
readFile: (filePath) => ipcRenderer.invoke('read-file', filePath),
|
||||
writeFile: (filePath, data) => ipcRenderer.invoke('write-file', filePath, data),
|
||||
saveFileAs: (defaultFileName, data) => ipcRenderer.invoke('save-file-as', defaultFileName, data),
|
||||
|
||||
@@ -49,6 +49,7 @@ export async function createWindow(): Promise<void> {
|
||||
backgroundColor: '#0a0a0f',
|
||||
...(windowIconPath ? { icon: windowIconPath } : {}),
|
||||
webPreferences: {
|
||||
backgroundThrottling: false,
|
||||
nodeIntegration: false,
|
||||
contextIsolation: true,
|
||||
preload: path.join(__dirname, '..', 'preload.js'),
|
||||
|
||||
@@ -182,6 +182,8 @@ export type ChatEventType =
|
||||
| 'voice-state-request'
|
||||
| 'state-request'
|
||||
| 'screen-state'
|
||||
| 'screen-share-request'
|
||||
| 'screen-share-stop'
|
||||
| 'role-change'
|
||||
| 'room-permissions-update'
|
||||
| 'server-icon-summary'
|
||||
@@ -196,6 +198,7 @@ export type ChatEventType =
|
||||
/** Optional fields depend on `type`. */
|
||||
export interface ChatEvent {
|
||||
type: ChatEventType;
|
||||
fromPeerId?: string;
|
||||
messageId?: string;
|
||||
message?: Message;
|
||||
reaction?: Reaction;
|
||||
|
||||
@@ -35,7 +35,7 @@ const FFT_SIZE = 256;
|
||||
|
||||
interface TrackedStream {
|
||||
ctx: AudioContext;
|
||||
source: MediaStreamAudioSourceNode;
|
||||
sources: MediaStreamAudioSourceNode[];
|
||||
analyser: AnalyserNode;
|
||||
dataArray: Uint8Array<ArrayBuffer>;
|
||||
volumeSignal: ReturnType<typeof signal<number>>;
|
||||
@@ -58,8 +58,15 @@ export class VoiceActivityService implements OnDestroy {
|
||||
|
||||
constructor() {
|
||||
this.subs.push(
|
||||
this.webrtc.onRemoteStream.subscribe(({ peerId, stream }) => {
|
||||
this.trackStream(peerId, stream);
|
||||
this.webrtc.onRemoteStream.subscribe(({ peerId }) => {
|
||||
const voiceStream = this.webrtc.getRemoteVoiceStream(peerId);
|
||||
|
||||
if (!voiceStream) {
|
||||
this.untrackStream(peerId);
|
||||
return;
|
||||
}
|
||||
|
||||
this.trackStream(peerId, voiceStream);
|
||||
})
|
||||
);
|
||||
|
||||
@@ -98,6 +105,7 @@ export class VoiceActivityService implements OnDestroy {
|
||||
|
||||
trackStream(id: string, stream: MediaStream): void {
|
||||
const existing = this.tracked.get(id);
|
||||
const audioTracks = stream.getAudioTracks().filter((track) => track.readyState === 'live');
|
||||
|
||||
if (existing && existing.stream === stream)
|
||||
return;
|
||||
@@ -105,12 +113,22 @@ export class VoiceActivityService implements OnDestroy {
|
||||
if (existing)
|
||||
this.disposeEntry(existing);
|
||||
|
||||
if (audioTracks.length === 0) {
|
||||
this.tracked.delete(id);
|
||||
this.publishSpeakingMap();
|
||||
|
||||
if (this.tracked.size === 0)
|
||||
this.stopPolling();
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const ctx = new AudioContext();
|
||||
const source = ctx.createMediaStreamSource(stream);
|
||||
const analyser = ctx.createAnalyser();
|
||||
const sources = audioTracks.map((track) => ctx.createMediaStreamSource(new MediaStream([track])));
|
||||
|
||||
analyser.fftSize = FFT_SIZE;
|
||||
source.connect(analyser);
|
||||
sources.forEach((source) => source.connect(analyser));
|
||||
|
||||
const dataArray = new Uint8Array(analyser.fftSize) as Uint8Array<ArrayBuffer>;
|
||||
const volumeSignal = signal(0);
|
||||
@@ -118,7 +136,7 @@ export class VoiceActivityService implements OnDestroy {
|
||||
|
||||
this.tracked.set(id, {
|
||||
ctx,
|
||||
source,
|
||||
sources,
|
||||
analyser,
|
||||
dataArray,
|
||||
volumeSignal,
|
||||
@@ -228,7 +246,9 @@ export class VoiceActivityService implements OnDestroy {
|
||||
}
|
||||
|
||||
private disposeEntry(entry: TrackedStream): void {
|
||||
try { entry.source.disconnect(); } catch { /* already disconnected */ }
|
||||
entry.sources.forEach((source) => {
|
||||
try { source.disconnect(); } catch { /* already disconnected */ }
|
||||
});
|
||||
|
||||
try { entry.ctx.close(); } catch { /* already closed */ }
|
||||
}
|
||||
|
||||
105
src/app/core/services/voice-settings.storage.ts
Normal file
105
src/app/core/services/voice-settings.storage.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { STORAGE_KEY_VOICE_SETTINGS } from '../constants';
|
||||
import { LatencyProfile } from './webrtc/webrtc.constants';
|
||||
import { DEFAULT_SCREEN_SHARE_QUALITY, ScreenShareQuality } from './webrtc/screen-share.config';
|
||||
|
||||
const LATENCY_PROFILES: LatencyProfile[] = [
|
||||
'low',
|
||||
'balanced',
|
||||
'high'
|
||||
];
|
||||
const SCREEN_SHARE_QUALITIES: ScreenShareQuality[] = [
|
||||
'performance',
|
||||
'balanced',
|
||||
'high-fps',
|
||||
'quality'
|
||||
];
|
||||
|
||||
export interface VoiceSettings {
|
||||
inputDevice: string;
|
||||
outputDevice: string;
|
||||
inputVolume: number;
|
||||
outputVolume: number;
|
||||
audioBitrate: number;
|
||||
latencyProfile: LatencyProfile;
|
||||
includeSystemAudio: boolean;
|
||||
noiseReduction: boolean;
|
||||
screenShareQuality: ScreenShareQuality;
|
||||
askScreenShareQuality: boolean;
|
||||
}
|
||||
|
||||
export const DEFAULT_VOICE_SETTINGS: VoiceSettings = {
|
||||
inputDevice: '',
|
||||
outputDevice: '',
|
||||
inputVolume: 100,
|
||||
outputVolume: 100,
|
||||
audioBitrate: 96,
|
||||
latencyProfile: 'balanced',
|
||||
includeSystemAudio: false,
|
||||
noiseReduction: true,
|
||||
screenShareQuality: DEFAULT_SCREEN_SHARE_QUALITY,
|
||||
askScreenShareQuality: true
|
||||
};
|
||||
|
||||
export function loadVoiceSettingsFromStorage(): VoiceSettings {
|
||||
try {
|
||||
const raw = localStorage.getItem(STORAGE_KEY_VOICE_SETTINGS);
|
||||
|
||||
if (!raw)
|
||||
return { ...DEFAULT_VOICE_SETTINGS };
|
||||
|
||||
return normaliseVoiceSettings(JSON.parse(raw) as Partial<VoiceSettings>);
|
||||
} catch {
|
||||
return { ...DEFAULT_VOICE_SETTINGS };
|
||||
}
|
||||
}
|
||||
|
||||
export function saveVoiceSettingsToStorage(patch: Partial<VoiceSettings>): VoiceSettings {
|
||||
const nextSettings = normaliseVoiceSettings({
|
||||
...loadVoiceSettingsFromStorage(),
|
||||
...patch
|
||||
});
|
||||
|
||||
try {
|
||||
localStorage.setItem(STORAGE_KEY_VOICE_SETTINGS, JSON.stringify(nextSettings));
|
||||
} catch {}
|
||||
|
||||
return nextSettings;
|
||||
}
|
||||
|
||||
function normaliseVoiceSettings(raw: Partial<VoiceSettings>): VoiceSettings {
|
||||
return {
|
||||
inputDevice: typeof raw.inputDevice === 'string' ? raw.inputDevice : DEFAULT_VOICE_SETTINGS.inputDevice,
|
||||
outputDevice: typeof raw.outputDevice === 'string' ? raw.outputDevice : DEFAULT_VOICE_SETTINGS.outputDevice,
|
||||
inputVolume: clampNumber(raw.inputVolume, 0, 100, DEFAULT_VOICE_SETTINGS.inputVolume),
|
||||
outputVolume: clampNumber(raw.outputVolume, 0, 100, DEFAULT_VOICE_SETTINGS.outputVolume),
|
||||
audioBitrate: clampNumber(raw.audioBitrate, 32, 256, DEFAULT_VOICE_SETTINGS.audioBitrate),
|
||||
latencyProfile: LATENCY_PROFILES.includes(raw.latencyProfile as LatencyProfile)
|
||||
? raw.latencyProfile as LatencyProfile
|
||||
: DEFAULT_VOICE_SETTINGS.latencyProfile,
|
||||
includeSystemAudio: typeof raw.includeSystemAudio === 'boolean'
|
||||
? raw.includeSystemAudio
|
||||
: DEFAULT_VOICE_SETTINGS.includeSystemAudio,
|
||||
noiseReduction: typeof raw.noiseReduction === 'boolean'
|
||||
? raw.noiseReduction
|
||||
: DEFAULT_VOICE_SETTINGS.noiseReduction,
|
||||
screenShareQuality: SCREEN_SHARE_QUALITIES.includes(raw.screenShareQuality as ScreenShareQuality)
|
||||
? raw.screenShareQuality as ScreenShareQuality
|
||||
: DEFAULT_VOICE_SETTINGS.screenShareQuality,
|
||||
askScreenShareQuality: typeof raw.askScreenShareQuality === 'boolean'
|
||||
? raw.askScreenShareQuality
|
||||
: DEFAULT_VOICE_SETTINGS.askScreenShareQuality
|
||||
};
|
||||
}
|
||||
|
||||
function clampNumber(
|
||||
value: unknown,
|
||||
min: number,
|
||||
max: number,
|
||||
fallback: number
|
||||
): number {
|
||||
if (typeof value !== 'number' || Number.isNaN(value)) {
|
||||
return fallback;
|
||||
}
|
||||
|
||||
return Math.max(min, Math.min(max, value));
|
||||
}
|
||||
131
src/app/core/services/voice-workspace.service.ts
Normal file
131
src/app/core/services/voice-workspace.service.ts
Normal file
@@ -0,0 +1,131 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering */
|
||||
import {
|
||||
Injectable,
|
||||
computed,
|
||||
effect,
|
||||
inject,
|
||||
signal
|
||||
} from '@angular/core';
|
||||
|
||||
import { VoiceSessionService } from './voice-session.service';
|
||||
|
||||
export type VoiceWorkspaceMode = 'hidden' | 'expanded' | 'minimized';
|
||||
|
||||
export interface VoiceWorkspacePosition {
|
||||
left: number;
|
||||
top: number;
|
||||
}
|
||||
|
||||
const DEFAULT_MINI_WINDOW_POSITION: VoiceWorkspacePosition = {
|
||||
left: 24,
|
||||
top: 24
|
||||
};
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class VoiceWorkspaceService {
|
||||
private readonly voiceSession = inject(VoiceSessionService);
|
||||
|
||||
private readonly _mode = signal<VoiceWorkspaceMode>('hidden');
|
||||
private readonly _focusedStreamId = signal<string | null>(null);
|
||||
private readonly _connectRemoteShares = signal(false);
|
||||
private readonly _miniWindowPosition = signal<VoiceWorkspacePosition>(
|
||||
DEFAULT_MINI_WINDOW_POSITION
|
||||
);
|
||||
private readonly _hasCustomMiniWindowPosition = signal(false);
|
||||
|
||||
readonly mode = computed<VoiceWorkspaceMode>(() => {
|
||||
if (!this.voiceSession.voiceSession() || !this.voiceSession.isViewingVoiceServer()) {
|
||||
return 'hidden';
|
||||
}
|
||||
|
||||
return this._mode();
|
||||
});
|
||||
|
||||
readonly isExpanded = computed(() => this.mode() === 'expanded');
|
||||
readonly isMinimized = computed(() => this.mode() === 'minimized');
|
||||
readonly isVisible = computed(() => this.mode() !== 'hidden');
|
||||
readonly focusedStreamId = computed(() => this._focusedStreamId());
|
||||
readonly shouldConnectRemoteShares = computed(
|
||||
() => this.isVisible() && this._connectRemoteShares()
|
||||
);
|
||||
readonly miniWindowPosition = computed(() => this._miniWindowPosition());
|
||||
readonly hasCustomMiniWindowPosition = computed(() => this._hasCustomMiniWindowPosition());
|
||||
|
||||
constructor() {
|
||||
effect(
|
||||
() => {
|
||||
if (this.voiceSession.voiceSession()) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.reset();
|
||||
},
|
||||
{ allowSignalWrites: true }
|
||||
);
|
||||
}
|
||||
|
||||
open(
|
||||
focusedStreamId: string | null = null,
|
||||
options?: { connectRemoteShares?: boolean }
|
||||
): void {
|
||||
if (!this.voiceSession.voiceSession()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (options && Object.prototype.hasOwnProperty.call(options, 'connectRemoteShares')) {
|
||||
this._connectRemoteShares.set(options.connectRemoteShares === true);
|
||||
}
|
||||
|
||||
this._focusedStreamId.set(focusedStreamId);
|
||||
this._mode.set('expanded');
|
||||
}
|
||||
|
||||
focusStream(streamId: string, options?: { connectRemoteShares?: boolean }): void {
|
||||
this.open(streamId, options);
|
||||
}
|
||||
|
||||
minimize(): void {
|
||||
if (!this.voiceSession.voiceSession()) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._mode.set('minimized');
|
||||
}
|
||||
|
||||
restore(): void {
|
||||
this.open(this._focusedStreamId());
|
||||
}
|
||||
|
||||
close(): void {
|
||||
this._mode.set('hidden');
|
||||
this._connectRemoteShares.set(false);
|
||||
}
|
||||
|
||||
showChat(): void {
|
||||
if (this._mode() === 'expanded') {
|
||||
this._mode.set('hidden');
|
||||
this._connectRemoteShares.set(false);
|
||||
}
|
||||
}
|
||||
|
||||
clearFocusedStream(): void {
|
||||
this._focusedStreamId.set(null);
|
||||
}
|
||||
|
||||
setMiniWindowPosition(position: VoiceWorkspacePosition, markCustom = true): void {
|
||||
this._miniWindowPosition.set(position);
|
||||
this._hasCustomMiniWindowPosition.set(markCustom);
|
||||
}
|
||||
|
||||
resetMiniWindowPosition(): void {
|
||||
this._miniWindowPosition.set(DEFAULT_MINI_WINDOW_POSITION);
|
||||
this._hasCustomMiniWindowPosition.set(false);
|
||||
}
|
||||
|
||||
reset(): void {
|
||||
this._mode.set('hidden');
|
||||
this._focusedStreamId.set(null);
|
||||
this._connectRemoteShares.set(false);
|
||||
this.resetMiniWindowPosition();
|
||||
}
|
||||
}
|
||||
@@ -35,6 +35,7 @@ import {
|
||||
JoinedServerInfo,
|
||||
VoiceStateSnapshot,
|
||||
LatencyProfile,
|
||||
ScreenShareStartOptions,
|
||||
SIGNALING_TYPE_IDENTIFY,
|
||||
SIGNALING_TYPE_JOIN_SERVER,
|
||||
SIGNALING_TYPE_VIEW_SERVER,
|
||||
@@ -47,6 +48,8 @@ import {
|
||||
SIGNALING_TYPE_USER_JOINED,
|
||||
SIGNALING_TYPE_USER_LEFT,
|
||||
DEFAULT_DISPLAY_NAME,
|
||||
P2P_TYPE_SCREEN_SHARE_REQUEST,
|
||||
P2P_TYPE_SCREEN_SHARE_STOP,
|
||||
P2P_TYPE_VOICE_STATE,
|
||||
P2P_TYPE_SCREEN_STATE
|
||||
} from './webrtc';
|
||||
@@ -69,6 +72,9 @@ export class WebRTCService implements OnDestroy {
|
||||
/** Maps each remote peer ID to the server they were discovered from. */
|
||||
private readonly peerServerMap = new Map<string, string>();
|
||||
private readonly serviceDestroyed$ = new Subject<void>();
|
||||
private remoteScreenShareRequestsEnabled = false;
|
||||
private readonly desiredRemoteScreenSharePeers = new Set<string>();
|
||||
private readonly activeRemoteScreenSharePeers = new Set<string>();
|
||||
|
||||
private readonly _localPeerId = signal<string>(uuidv4());
|
||||
private readonly _isSignalingConnected = signal(false);
|
||||
@@ -204,11 +210,37 @@ export class WebRTCService implements OnDestroy {
|
||||
// Signaling → heartbeat → broadcast states
|
||||
this.signalingManager.heartbeatTick$.subscribe(() => this.peerManager.broadcastCurrentStates());
|
||||
|
||||
// Internal control-plane messages for on-demand screen-share delivery.
|
||||
this.peerManager.messageReceived$.subscribe((event) => this.handlePeerControlMessage(event));
|
||||
|
||||
// Peer manager → connected peers signal
|
||||
this.peerManager.connectedPeersChanged$.subscribe((peers: string[]) =>
|
||||
this._connectedPeers.set(peers)
|
||||
);
|
||||
|
||||
// If we are already sharing when a new peer connection finishes, push the
|
||||
// current screen-share tracks to that peer and renegotiate.
|
||||
this.peerManager.peerConnected$.subscribe((peerId) => {
|
||||
if (!this.screenShareManager.getIsScreenActive()) {
|
||||
if (this.remoteScreenShareRequestsEnabled && this.desiredRemoteScreenSharePeers.has(peerId)) {
|
||||
this.requestRemoteScreenShares([peerId]);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.screenShareManager.syncScreenShareToPeer(peerId);
|
||||
|
||||
if (this.remoteScreenShareRequestsEnabled && this.desiredRemoteScreenSharePeers.has(peerId)) {
|
||||
this.requestRemoteScreenShares([peerId]);
|
||||
}
|
||||
});
|
||||
|
||||
this.peerManager.peerDisconnected$.subscribe((peerId) => {
|
||||
this.activeRemoteScreenSharePeers.delete(peerId);
|
||||
this.screenShareManager.clearScreenShareRequest(peerId);
|
||||
});
|
||||
|
||||
// Media manager → voice connected signal
|
||||
this.mediaManager.voiceConnected$.subscribe(() => {
|
||||
this._isVoiceConnected.set(true);
|
||||
@@ -544,6 +576,31 @@ export class WebRTCService implements OnDestroy {
|
||||
this.peerManager.sendToPeer(peerId, event);
|
||||
}
|
||||
|
||||
syncRemoteScreenShareRequests(peerIds: string[], enabled: boolean): void {
|
||||
const nextDesiredPeers = new Set(
|
||||
peerIds.filter((peerId): peerId is string => !!peerId)
|
||||
);
|
||||
|
||||
if (!enabled) {
|
||||
this.remoteScreenShareRequestsEnabled = false;
|
||||
this.desiredRemoteScreenSharePeers.clear();
|
||||
this.stopRemoteScreenShares([...this.activeRemoteScreenSharePeers]);
|
||||
return;
|
||||
}
|
||||
|
||||
this.remoteScreenShareRequestsEnabled = true;
|
||||
|
||||
for (const activePeerId of [...this.activeRemoteScreenSharePeers]) {
|
||||
if (!nextDesiredPeers.has(activePeerId)) {
|
||||
this.stopRemoteScreenShares([activePeerId]);
|
||||
}
|
||||
}
|
||||
|
||||
this.desiredRemoteScreenSharePeers.clear();
|
||||
nextDesiredPeers.forEach((peerId) => this.desiredRemoteScreenSharePeers.add(peerId));
|
||||
this.requestRemoteScreenShares([...nextDesiredPeers]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a {@link ChatEvent} to a peer with back-pressure awareness.
|
||||
*
|
||||
@@ -569,6 +626,29 @@ export class WebRTCService implements OnDestroy {
|
||||
return this.peerManager.remotePeerStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote voice-only stream for a connected peer.
|
||||
*
|
||||
* @param peerId - The remote peer whose voice stream to retrieve.
|
||||
* @returns The stream, or `null` if the peer has no active voice audio.
|
||||
*/
|
||||
getRemoteVoiceStream(peerId: string): MediaStream | null {
|
||||
return this.peerManager.remotePeerVoiceStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote screen-share stream for a connected peer.
|
||||
*
|
||||
* This contains the screen video track and any audio track that belongs to
|
||||
* the screen share itself, not the peer's normal voice-chat audio.
|
||||
*
|
||||
* @param peerId - The remote peer whose screen-share stream to retrieve.
|
||||
* @returns The stream, or `null` if the peer has no active screen share.
|
||||
*/
|
||||
getRemoteScreenShareStream(peerId: string): MediaStream | null {
|
||||
return this.peerManager.remotePeerScreenShareStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current local media stream (microphone audio).
|
||||
*
|
||||
@@ -715,11 +795,11 @@ export class WebRTCService implements OnDestroy {
|
||||
/**
|
||||
* Start sharing the screen (or a window) with all connected peers.
|
||||
*
|
||||
* @param includeAudio - Whether to capture and mix system audio.
|
||||
* @param options - Screen-share capture options.
|
||||
* @returns The screen-capture {@link MediaStream}.
|
||||
*/
|
||||
async startScreenShare(includeAudio = false): Promise<MediaStream> {
|
||||
const stream = await this.screenShareManager.startScreenShare(includeAudio);
|
||||
async startScreenShare(options: ScreenShareStartOptions): Promise<MediaStream> {
|
||||
const stream = await this.screenShareManager.startScreenShare(options);
|
||||
|
||||
this._isScreenSharing.set(true);
|
||||
this._screenStreamSignal.set(stream);
|
||||
@@ -755,6 +835,9 @@ export class WebRTCService implements OnDestroy {
|
||||
private fullCleanup(): void {
|
||||
this.voiceServerId = null;
|
||||
this.peerServerMap.clear();
|
||||
this.remoteScreenShareRequestsEnabled = false;
|
||||
this.desiredRemoteScreenSharePeers.clear();
|
||||
this.activeRemoteScreenSharePeers.clear();
|
||||
this.peerManager.closeAllPeers();
|
||||
this._connectedPeers.set([]);
|
||||
this.mediaManager.disableVoice();
|
||||
@@ -782,6 +865,50 @@ export class WebRTCService implements OnDestroy {
|
||||
return connState === 'connected' && dcState === 'open';
|
||||
}
|
||||
|
||||
private handlePeerControlMessage(event: ChatEvent): void {
|
||||
if (!event.fromPeerId) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type === P2P_TYPE_SCREEN_SHARE_REQUEST) {
|
||||
this.screenShareManager.requestScreenShareForPeer(event.fromPeerId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type === P2P_TYPE_SCREEN_SHARE_STOP) {
|
||||
this.screenShareManager.stopScreenShareForPeer(event.fromPeerId);
|
||||
}
|
||||
}
|
||||
|
||||
private requestRemoteScreenShares(peerIds: string[]): void {
|
||||
const connectedPeerIds = new Set(this.peerManager.getConnectedPeerIds());
|
||||
|
||||
for (const peerId of peerIds) {
|
||||
if (!connectedPeerIds.has(peerId) || this.activeRemoteScreenSharePeers.has(peerId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.peerManager.sendToPeer(peerId, { type: P2P_TYPE_SCREEN_SHARE_REQUEST });
|
||||
this.activeRemoteScreenSharePeers.add(peerId);
|
||||
}
|
||||
}
|
||||
|
||||
private stopRemoteScreenShares(peerIds: string[]): void {
|
||||
const connectedPeerIds = new Set(this.peerManager.getConnectedPeerIds());
|
||||
|
||||
for (const peerId of peerIds) {
|
||||
if (!this.activeRemoteScreenSharePeers.has(peerId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (connectedPeerIds.has(peerId)) {
|
||||
this.peerManager.sendToPeer(peerId, { type: P2P_TYPE_SCREEN_SHARE_STOP });
|
||||
}
|
||||
|
||||
this.activeRemoteScreenSharePeers.delete(peerId);
|
||||
}
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
this.disconnect();
|
||||
this.serviceDestroyed$.complete();
|
||||
|
||||
@@ -11,4 +11,5 @@ export * from './signaling.manager';
|
||||
export * from './peer-connection.manager';
|
||||
export * from './media.manager';
|
||||
export * from './screen-share.manager';
|
||||
export * from './screen-share.config';
|
||||
export * from './noise-reduction.manager';
|
||||
|
||||
@@ -70,6 +70,12 @@ export class PeerConnectionManager {
|
||||
/** Remote composite streams keyed by remote peer ID. */
|
||||
readonly remotePeerStreams = this.state.remotePeerStreams;
|
||||
|
||||
/** Remote voice-only streams keyed by remote peer ID. */
|
||||
readonly remotePeerVoiceStreams = this.state.remotePeerVoiceStreams;
|
||||
|
||||
/** Remote screen-share streams keyed by remote peer ID. */
|
||||
readonly remotePeerScreenShareStreams = this.state.remotePeerScreenShareStreams;
|
||||
|
||||
/** Last measured latency (ms) per peer. */
|
||||
readonly peerLatencies = this.state.peerLatencies;
|
||||
|
||||
|
||||
@@ -34,6 +34,8 @@ export function removePeer(
|
||||
}
|
||||
|
||||
state.remotePeerStreams.delete(peerId);
|
||||
state.remotePeerVoiceStreams.delete(peerId);
|
||||
state.remotePeerScreenShareStreams.delete(peerId);
|
||||
|
||||
if (peerData) {
|
||||
if (peerData.dataChannel)
|
||||
@@ -65,6 +67,8 @@ export function closeAllPeers(state: PeerConnectionManagerState): void {
|
||||
|
||||
state.activePeerConnections.clear();
|
||||
state.remotePeerStreams.clear();
|
||||
state.remotePeerVoiceStreams.clear();
|
||||
state.remotePeerScreenShareStreams.clear();
|
||||
state.peerNegotiationQueue.clear();
|
||||
state.peerLatencies.clear();
|
||||
state.pendingPings.clear();
|
||||
|
||||
@@ -32,6 +32,8 @@ export interface PeerConnectionCallbacks {
|
||||
export interface PeerConnectionManagerState {
|
||||
activePeerConnections: Map<string, PeerData>;
|
||||
remotePeerStreams: Map<string, MediaStream>;
|
||||
remotePeerVoiceStreams: Map<string, MediaStream>;
|
||||
remotePeerScreenShareStreams: Map<string, MediaStream>;
|
||||
disconnectedPeerTracker: Map<string, DisconnectedPeerEntry>;
|
||||
peerReconnectTimers: Map<string, ReturnType<typeof setInterval>>;
|
||||
peerDisconnectGraceTimers: Map<string, ReturnType<typeof setTimeout>>;
|
||||
@@ -85,6 +87,8 @@ export function createPeerConnectionManagerState(): PeerConnectionManagerState {
|
||||
return {
|
||||
activePeerConnections: new Map<string, PeerData>(),
|
||||
remotePeerStreams: new Map<string, MediaStream>(),
|
||||
remotePeerVoiceStreams: new Map<string, MediaStream>(),
|
||||
remotePeerScreenShareStreams: new Map<string, MediaStream>(),
|
||||
disconnectedPeerTracker: new Map<string, DisconnectedPeerEntry>(),
|
||||
peerReconnectTimers: new Map<string, ReturnType<typeof setInterval>>(),
|
||||
peerDisconnectGraceTimers: new Map<string, ReturnType<typeof setTimeout>>(),
|
||||
|
||||
@@ -1,4 +1,7 @@
|
||||
import { TRACK_KIND_VIDEO } from '../../webrtc.constants';
|
||||
import {
|
||||
TRACK_KIND_AUDIO,
|
||||
TRACK_KIND_VIDEO
|
||||
} from '../../webrtc.constants';
|
||||
import { recordDebugNetworkStreams } from '../../../debug-network-metrics.service';
|
||||
import { PeerConnectionManagerContext } from '../shared';
|
||||
|
||||
@@ -34,10 +37,25 @@ export function handleRemoteTrack(
|
||||
}
|
||||
|
||||
const compositeStream = buildCompositeRemoteStream(state, remotePeerId, track);
|
||||
const voiceStream = isVoiceAudioTrack(context, event, remotePeerId)
|
||||
? buildAudioOnlyStream(state.remotePeerVoiceStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
const screenShareStream = isScreenShareTrack(context, event, remotePeerId)
|
||||
? buildScreenShareStream(state.remotePeerScreenShareStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
|
||||
track.addEventListener('ended', () => removeRemoteTrack(context, remotePeerId, track.id));
|
||||
|
||||
state.remotePeerStreams.set(remotePeerId, compositeStream);
|
||||
|
||||
if (voiceStream) {
|
||||
state.remotePeerVoiceStreams.set(remotePeerId, voiceStream);
|
||||
}
|
||||
|
||||
if (screenShareStream) {
|
||||
state.remotePeerScreenShareStreams.set(remotePeerId, screenShareStream);
|
||||
}
|
||||
|
||||
state.remoteStream$.next({
|
||||
peerId: remotePeerId,
|
||||
stream: compositeStream
|
||||
@@ -61,14 +79,59 @@ function buildCompositeRemoteStream(
|
||||
remotePeerId: string,
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
const existingStream = state.remotePeerStreams.get(remotePeerId);
|
||||
return buildMergedStream(state.remotePeerStreams.get(remotePeerId), incomingTrack, {
|
||||
replaceVideoTrack: true
|
||||
});
|
||||
}
|
||||
|
||||
function buildAudioOnlyStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
return buildMergedStream(existingStream, incomingTrack, {
|
||||
allowedKinds: [TRACK_KIND_AUDIO],
|
||||
replaceVideoTrack: false
|
||||
});
|
||||
}
|
||||
|
||||
function buildScreenShareStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
return buildMergedStream(existingStream, incomingTrack, {
|
||||
replaceVideoTrack: true
|
||||
});
|
||||
}
|
||||
|
||||
function buildMergedStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack,
|
||||
options: {
|
||||
allowedKinds?: string[];
|
||||
replaceVideoTrack: boolean;
|
||||
}
|
||||
): MediaStream {
|
||||
const allowedKinds = options.allowedKinds ?? [TRACK_KIND_AUDIO, TRACK_KIND_VIDEO];
|
||||
|
||||
let preservedTracks: MediaStreamTrack[] = [];
|
||||
|
||||
if (existingStream) {
|
||||
preservedTracks = existingStream.getTracks().filter(
|
||||
(existingTrack) =>
|
||||
existingTrack.kind !== incomingTrack.kind && existingTrack.readyState === 'live'
|
||||
(existingTrack) => {
|
||||
if (existingTrack.readyState !== 'live') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!allowedKinds.includes(existingTrack.kind)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (options.replaceVideoTrack && incomingTrack.kind === TRACK_KIND_VIDEO) {
|
||||
return existingTrack.kind !== TRACK_KIND_VIDEO;
|
||||
}
|
||||
|
||||
return existingTrack.id !== incomingTrack.id;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
@@ -81,20 +144,12 @@ function removeRemoteTrack(
|
||||
trackId: string
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
const currentStream = state.remotePeerStreams.get(remotePeerId);
|
||||
const compositeStream = removeTrackFromStreamMap(state.remotePeerStreams, remotePeerId, trackId);
|
||||
|
||||
if (!currentStream)
|
||||
return;
|
||||
removeTrackFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, trackId);
|
||||
removeTrackFromStreamMap(state.remotePeerScreenShareStreams, remotePeerId, trackId);
|
||||
|
||||
const remainingTracks = currentStream
|
||||
.getTracks()
|
||||
.filter((existingTrack) => existingTrack.id !== trackId && existingTrack.readyState === 'live');
|
||||
|
||||
if (remainingTracks.length === currentStream.getTracks().length)
|
||||
return;
|
||||
|
||||
if (remainingTracks.length === 0) {
|
||||
state.remotePeerStreams.delete(remotePeerId);
|
||||
if (!compositeStream) {
|
||||
recordDebugNetworkStreams(remotePeerId, { audio: 0,
|
||||
video: 0 });
|
||||
|
||||
@@ -108,23 +163,99 @@ function removeRemoteTrack(
|
||||
return;
|
||||
}
|
||||
|
||||
const nextStream = new MediaStream(remainingTracks);
|
||||
|
||||
state.remotePeerStreams.set(remotePeerId, nextStream);
|
||||
state.remoteStream$.next({
|
||||
peerId: remotePeerId,
|
||||
stream: nextStream
|
||||
stream: compositeStream
|
||||
});
|
||||
|
||||
recordDebugNetworkStreams(remotePeerId, {
|
||||
audio: nextStream.getAudioTracks().length,
|
||||
video: nextStream.getVideoTracks().length
|
||||
audio: compositeStream.getAudioTracks().length,
|
||||
video: compositeStream.getVideoTracks().length
|
||||
});
|
||||
|
||||
logger.info('Remote stream updated', {
|
||||
audioTrackCount: nextStream.getAudioTracks().length,
|
||||
audioTrackCount: compositeStream.getAudioTracks().length,
|
||||
remotePeerId,
|
||||
trackCount: nextStream.getTracks().length,
|
||||
videoTrackCount: nextStream.getVideoTracks().length
|
||||
trackCount: compositeStream.getTracks().length,
|
||||
videoTrackCount: compositeStream.getVideoTracks().length
|
||||
});
|
||||
}
|
||||
|
||||
function removeTrackFromStreamMap(
|
||||
streamMap: Map<string, MediaStream>,
|
||||
remotePeerId: string,
|
||||
trackId: string
|
||||
): MediaStream | null {
|
||||
const currentStream = streamMap.get(remotePeerId);
|
||||
|
||||
if (!currentStream) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const remainingTracks = currentStream
|
||||
.getTracks()
|
||||
.filter((existingTrack) => existingTrack.id !== trackId && existingTrack.readyState === 'live');
|
||||
|
||||
if (remainingTracks.length === currentStream.getTracks().length) {
|
||||
return currentStream;
|
||||
}
|
||||
|
||||
if (remainingTracks.length === 0) {
|
||||
streamMap.delete(remotePeerId);
|
||||
return null;
|
||||
}
|
||||
|
||||
const nextStream = new MediaStream(remainingTracks);
|
||||
|
||||
streamMap.set(remotePeerId, nextStream);
|
||||
return nextStream;
|
||||
}
|
||||
|
||||
function isVoiceAudioTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): boolean {
|
||||
return event.track.kind === TRACK_KIND_AUDIO && !isScreenShareAudioTrack(context, event, remotePeerId);
|
||||
}
|
||||
|
||||
function isScreenShareTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): boolean {
|
||||
return event.track.kind === TRACK_KIND_VIDEO || isScreenShareAudioTrack(context, event, remotePeerId);
|
||||
}
|
||||
|
||||
function isScreenShareAudioTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): boolean {
|
||||
if (event.track.kind !== TRACK_KIND_AUDIO) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const peerData = context.state.activePeerConnections.get(remotePeerId);
|
||||
|
||||
if (!peerData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const voiceAudioTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === peerData.audioSender
|
||||
);
|
||||
|
||||
if (voiceAudioTransceiver) {
|
||||
return event.transceiver !== voiceAudioTransceiver;
|
||||
}
|
||||
|
||||
const audioTransceivers = peerData.connection.getTransceivers().filter((transceiver) =>
|
||||
transceiver.receiver.track?.kind === TRACK_KIND_AUDIO || transceiver === event.transceiver
|
||||
);
|
||||
const transceiverIndex = audioTransceivers.findIndex((transceiver) =>
|
||||
transceiver === event.transceiver || (!!transceiver.mid && transceiver.mid === event.transceiver.mid)
|
||||
);
|
||||
|
||||
return transceiverIndex > 0;
|
||||
}
|
||||
|
||||
79
src/app/core/services/webrtc/screen-share.config.ts
Normal file
79
src/app/core/services/webrtc/screen-share.config.ts
Normal file
@@ -0,0 +1,79 @@
|
||||
export type ScreenShareQuality = 'performance' | 'balanced' | 'high-fps' | 'quality';
|
||||
|
||||
export interface ScreenShareStartOptions {
|
||||
includeSystemAudio: boolean;
|
||||
quality: ScreenShareQuality;
|
||||
}
|
||||
|
||||
export interface ScreenShareQualityPreset {
|
||||
label: string;
|
||||
description: string;
|
||||
width: number;
|
||||
height: number;
|
||||
frameRate: number;
|
||||
maxBitrateBps: number;
|
||||
contentHint: 'motion' | 'detail';
|
||||
degradationPreference: 'maintain-framerate' | 'maintain-resolution';
|
||||
scaleResolutionDownBy?: number;
|
||||
}
|
||||
|
||||
export const DEFAULT_SCREEN_SHARE_QUALITY: ScreenShareQuality = 'balanced';
|
||||
|
||||
export const DEFAULT_SCREEN_SHARE_START_OPTIONS: ScreenShareStartOptions = {
|
||||
includeSystemAudio: false,
|
||||
quality: DEFAULT_SCREEN_SHARE_QUALITY
|
||||
};
|
||||
|
||||
export const SCREEN_SHARE_QUALITY_PRESETS: Record<ScreenShareQuality, ScreenShareQualityPreset> = {
|
||||
performance: {
|
||||
label: 'Performance saver',
|
||||
description: '720p / 30 FPS with lower CPU and bandwidth usage.',
|
||||
width: 1280,
|
||||
height: 720,
|
||||
frameRate: 30,
|
||||
maxBitrateBps: 2_000_000,
|
||||
contentHint: 'motion',
|
||||
degradationPreference: 'maintain-framerate',
|
||||
scaleResolutionDownBy: 1
|
||||
},
|
||||
balanced: {
|
||||
label: 'Balanced',
|
||||
description: '1080p / 30 FPS for stable quality in most cases.',
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
frameRate: 30,
|
||||
maxBitrateBps: 4_000_000,
|
||||
contentHint: 'detail',
|
||||
degradationPreference: 'maintain-resolution',
|
||||
scaleResolutionDownBy: 1
|
||||
},
|
||||
'high-fps': {
|
||||
label: 'High FPS',
|
||||
description: '1080p / 60 FPS for games and fast motion.',
|
||||
width: 1920,
|
||||
height: 1080,
|
||||
frameRate: 60,
|
||||
maxBitrateBps: 6_000_000,
|
||||
contentHint: 'motion',
|
||||
degradationPreference: 'maintain-framerate',
|
||||
scaleResolutionDownBy: 1
|
||||
},
|
||||
quality: {
|
||||
label: 'Sharp text',
|
||||
description: '1440p / 30 FPS for detailed UI and text clarity.',
|
||||
width: 2560,
|
||||
height: 1440,
|
||||
frameRate: 30,
|
||||
maxBitrateBps: 8_000_000,
|
||||
contentHint: 'detail',
|
||||
degradationPreference: 'maintain-resolution',
|
||||
scaleResolutionDownBy: 1
|
||||
}
|
||||
};
|
||||
|
||||
export const SCREEN_SHARE_QUALITY_OPTIONS = (
|
||||
Object.entries(SCREEN_SHARE_QUALITY_PRESETS) as [ScreenShareQuality, ScreenShareQualityPreset][]
|
||||
).map(([id, preset]) => ({
|
||||
id,
|
||||
...preset
|
||||
}));
|
||||
File diff suppressed because it is too large
Load Diff
@@ -86,6 +86,8 @@ export const P2P_TYPE_STATE_REQUEST = 'state-request';
|
||||
export const P2P_TYPE_VOICE_STATE_REQUEST = 'voice-state-request';
|
||||
export const P2P_TYPE_VOICE_STATE = 'voice-state';
|
||||
export const P2P_TYPE_SCREEN_STATE = 'screen-state';
|
||||
export const P2P_TYPE_SCREEN_SHARE_REQUEST = 'screen-share-request';
|
||||
export const P2P_TYPE_SCREEN_SHARE_STOP = 'screen-share-stop';
|
||||
export const P2P_TYPE_PING = 'ping';
|
||||
export const P2P_TYPE_PONG = 'pong';
|
||||
|
||||
|
||||
@@ -1,23 +1,36 @@
|
||||
<div class="h-full flex flex-col bg-background">
|
||||
@if (currentRoom()) {
|
||||
<!-- Channel header bar -->
|
||||
<div class="h-12 flex items-center gap-2 px-4 border-b border-border bg-card flex-shrink-0">
|
||||
<span class="text-muted-foreground text-lg">#</span>
|
||||
<span class="font-medium text-foreground text-sm">{{ activeChannelName }}</span>
|
||||
<div class="flex-1"></div>
|
||||
</div>
|
||||
@if (!isVoiceWorkspaceExpanded()) {
|
||||
<div class="h-12 flex items-center gap-2 px-4 border-b border-border bg-card flex-shrink-0">
|
||||
<ng-icon
|
||||
[name]="isVoiceWorkspaceExpanded() ? 'lucideMonitor' : 'lucideHash'"
|
||||
class="w-4 h-4 text-muted-foreground"
|
||||
/>
|
||||
<span class="font-medium text-foreground text-sm">{{ headerTitle() }}</span>
|
||||
|
||||
@if (isVoiceWorkspaceExpanded()) {
|
||||
<span class="rounded-full bg-primary/10 px-2 py-0.5 text-[10px] font-semibold uppercase tracking-[0.2em] text-primary">
|
||||
Voice streams
|
||||
</span>
|
||||
}
|
||||
|
||||
<div class="flex-1"></div>
|
||||
</div>
|
||||
}
|
||||
|
||||
<!-- Main Content -->
|
||||
<div class="flex-1 flex overflow-hidden">
|
||||
<!-- Chat Area -->
|
||||
<main class="flex-1 flex flex-col min-w-0">
|
||||
<!-- Screen Share Viewer -->
|
||||
<app-screen-share-viewer />
|
||||
|
||||
<!-- Messages -->
|
||||
<div class="flex-1 overflow-hidden">
|
||||
<main class="relative flex-1 min-w-0">
|
||||
<div
|
||||
class="h-full overflow-hidden"
|
||||
[class.hidden]="isVoiceWorkspaceExpanded()"
|
||||
>
|
||||
<app-chat-messages />
|
||||
</div>
|
||||
|
||||
<app-screen-share-workspace />
|
||||
</main>
|
||||
|
||||
<!-- Sidebar always visible -->
|
||||
|
||||
@@ -1,15 +1,16 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering */
|
||||
import {
|
||||
Component,
|
||||
computed,
|
||||
inject,
|
||||
signal
|
||||
} from '@angular/core';
|
||||
import { Router } from '@angular/router';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { Store } from '@ngrx/store';
|
||||
import { NgIcon, provideIcons } from '@ng-icons/core';
|
||||
import {
|
||||
lucideHash,
|
||||
lucideMonitor,
|
||||
lucideSettings,
|
||||
lucideUsers,
|
||||
lucideMenu,
|
||||
@@ -18,16 +19,21 @@ import {
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
import { ChatMessagesComponent } from '../../chat/chat-messages/chat-messages.component';
|
||||
import { ScreenShareViewerComponent } from '../../voice/screen-share-viewer/screen-share-viewer.component';
|
||||
import { ScreenShareWorkspaceComponent } from '../../voice/screen-share-workspace/screen-share-workspace.component';
|
||||
import { RoomsSidePanelComponent } from '../rooms-side-panel/rooms-side-panel.component';
|
||||
|
||||
import {
|
||||
selectCurrentRoom,
|
||||
selectActiveChannelId,
|
||||
selectTextChannels
|
||||
selectTextChannels,
|
||||
selectVoiceChannels
|
||||
} from '../../../store/rooms/rooms.selectors';
|
||||
import { SettingsModalService } from '../../../core/services/settings-modal.service';
|
||||
import { selectIsCurrentUserAdmin } from '../../../store/users/users.selectors';
|
||||
import {
|
||||
selectCurrentUser,
|
||||
selectIsCurrentUserAdmin
|
||||
} from '../../../store/users/users.selectors';
|
||||
import { VoiceWorkspaceService } from '../../../core/services/voice-workspace.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-chat-room',
|
||||
@@ -36,12 +42,13 @@ import { selectIsCurrentUserAdmin } from '../../../store/users/users.selectors';
|
||||
CommonModule,
|
||||
NgIcon,
|
||||
ChatMessagesComponent,
|
||||
ScreenShareViewerComponent,
|
||||
ScreenShareWorkspaceComponent,
|
||||
RoomsSidePanelComponent
|
||||
],
|
||||
viewProviders: [
|
||||
provideIcons({
|
||||
lucideHash,
|
||||
lucideMonitor,
|
||||
lucideSettings,
|
||||
lucideUsers,
|
||||
lucideMenu,
|
||||
@@ -56,23 +63,38 @@ import { selectIsCurrentUserAdmin } from '../../../store/users/users.selectors';
|
||||
*/
|
||||
export class ChatRoomComponent {
|
||||
private store = inject(Store);
|
||||
private router = inject(Router);
|
||||
private settingsModal = inject(SettingsModalService);
|
||||
private voiceWorkspace = inject(VoiceWorkspaceService);
|
||||
showMenu = signal(false);
|
||||
showAdminPanel = signal(false);
|
||||
|
||||
currentRoom = this.store.selectSignal(selectCurrentRoom);
|
||||
isAdmin = this.store.selectSignal(selectIsCurrentUserAdmin);
|
||||
currentUser = this.store.selectSignal(selectCurrentUser);
|
||||
activeChannelId = this.store.selectSignal(selectActiveChannelId);
|
||||
textChannels = this.store.selectSignal(selectTextChannels);
|
||||
voiceChannels = this.store.selectSignal(selectVoiceChannels);
|
||||
isVoiceWorkspaceExpanded = this.voiceWorkspace.isExpanded;
|
||||
|
||||
/** Returns the display name of the currently active text channel. */
|
||||
get activeChannelName(): string {
|
||||
activeTextChannelName = computed(() => {
|
||||
const id = this.activeChannelId();
|
||||
const activeChannel = this.textChannels().find((channel) => channel.id === id);
|
||||
|
||||
return activeChannel ? activeChannel.name : id;
|
||||
}
|
||||
});
|
||||
|
||||
connectedVoiceChannelName = computed(() => {
|
||||
const voiceChannelId = this.currentUser()?.voiceState?.roomId;
|
||||
const voiceChannel = this.voiceChannels().find((channel) => channel.id === voiceChannelId);
|
||||
|
||||
return voiceChannel?.name || 'Voice Lounge';
|
||||
});
|
||||
|
||||
headerTitle = computed(() =>
|
||||
this.isVoiceWorkspaceExpanded()
|
||||
? this.connectedVoiceChannelName()
|
||||
: this.activeTextChannelName()
|
||||
);
|
||||
|
||||
/** Open the settings modal to the Server admin page for the current room. */
|
||||
toggleAdminPanel() {
|
||||
|
||||
@@ -120,6 +120,7 @@
|
||||
(contextmenu)="openChannelContextMenu($event, ch)"
|
||||
[class.bg-secondary/40]="isCurrentRoom(ch.id)"
|
||||
[disabled]="!voiceEnabled()"
|
||||
[title]="isCurrentRoom(ch.id) ? 'Open stream workspace' : 'Join voice channel'"
|
||||
>
|
||||
<span class="flex items-center gap-2 text-foreground/80">
|
||||
<ng-icon
|
||||
@@ -141,7 +142,12 @@
|
||||
<span>{{ ch.name }}</span>
|
||||
}
|
||||
</span>
|
||||
@if (voiceOccupancy(ch.id) > 0) {
|
||||
|
||||
@if (isCurrentRoom(ch.id)) {
|
||||
<span class="rounded-full bg-primary/15 px-2 py-0.5 text-[10px] font-semibold uppercase tracking-wide text-primary">
|
||||
{{ isVoiceWorkspaceExpanded() ? 'Open' : 'View' }}
|
||||
</span>
|
||||
} @else if (voiceOccupancy(ch.id) > 0) {
|
||||
<span class="text-xs text-muted-foreground">{{ voiceOccupancy(ch.id) }}</span>
|
||||
}
|
||||
</button>
|
||||
@@ -178,7 +184,7 @@
|
||||
}
|
||||
@if (u.screenShareState?.isSharing || isUserSharing(u.id)) {
|
||||
<button
|
||||
(click)="viewStream(u.id); $event.stopPropagation()"
|
||||
(click)="viewStream(u.oderId || u.id); $event.stopPropagation()"
|
||||
class="px-1.5 py-0.5 text-[10px] font-bold bg-red-500 text-white rounded animate-pulse hover:bg-red-600 transition-colors"
|
||||
>
|
||||
LIVE
|
||||
@@ -237,13 +243,16 @@
|
||||
</p>
|
||||
}
|
||||
@if (currentUser()?.screenShareState?.isSharing || (currentUser()?.id && isUserSharing(currentUser()!.id))) {
|
||||
<span class="text-[10px] bg-red-500 text-white px-1.5 py-0.5 rounded-sm font-medium flex items-center gap-1 animate-pulse">
|
||||
<button
|
||||
class="text-[10px] bg-red-500 text-white px-1.5 py-0.5 rounded-sm font-medium flex items-center gap-1 animate-pulse hover:bg-red-600 transition-colors"
|
||||
(click)="viewStream(currentUser()!.oderId || currentUser()!.id); $event.stopPropagation()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
class="w-2.5 h-2.5"
|
||||
/>
|
||||
LIVE
|
||||
</span>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
@@ -292,7 +301,7 @@
|
||||
}
|
||||
@if (user.screenShareState?.isSharing || isUserSharing(user.id)) {
|
||||
<button
|
||||
(click)="viewStream(user.id); $event.stopPropagation()"
|
||||
(click)="viewStream(user.oderId || user.id); $event.stopPropagation()"
|
||||
class="text-[10px] bg-red-500 text-white px-1.5 py-0.5 rounded-sm font-medium hover:bg-red-600 transition-colors flex items-center gap-1 animate-pulse"
|
||||
>
|
||||
<ng-icon
|
||||
|
||||
@@ -36,6 +36,7 @@ import { RoomsActions } from '../../../store/rooms/rooms.actions';
|
||||
import { MessagesActions } from '../../../store/messages/messages.actions';
|
||||
import { WebRTCService } from '../../../core/services/webrtc.service';
|
||||
import { VoiceSessionService } from '../../../core/services/voice-session.service';
|
||||
import { VoiceWorkspaceService } from '../../../core/services/voice-workspace.service';
|
||||
import { VoiceActivityService } from '../../../core/services/voice-activity.service';
|
||||
import { VoicePlaybackService } from '../../voice/voice-controls/services/voice-playback.service';
|
||||
import { VoiceControlsComponent } from '../../voice/voice-controls/voice-controls.component';
|
||||
@@ -88,11 +89,13 @@ export class RoomsSidePanelComponent {
|
||||
private store = inject(Store);
|
||||
private webrtc = inject(WebRTCService);
|
||||
private voiceSessionService = inject(VoiceSessionService);
|
||||
private voiceWorkspace = inject(VoiceWorkspaceService);
|
||||
private voicePlayback = inject(VoicePlaybackService);
|
||||
voiceActivity = inject(VoiceActivityService);
|
||||
|
||||
activeTab = signal<TabView>('channels');
|
||||
showFloatingControls = this.voiceSessionService.showFloatingControls;
|
||||
isVoiceWorkspaceExpanded = this.voiceWorkspace.isExpanded;
|
||||
onlineUsers = this.store.selectSignal(selectOnlineUsers);
|
||||
currentUser = this.store.selectSignal(selectCurrentUser);
|
||||
currentRoom = this.store.selectSignal(selectCurrentRoom);
|
||||
@@ -185,6 +188,7 @@ export class RoomsSidePanelComponent {
|
||||
if (this.renamingChannelId())
|
||||
return;
|
||||
|
||||
this.voiceWorkspace.showChat();
|
||||
this.store.dispatch(RoomsActions.selectChannel({ channelId }));
|
||||
}
|
||||
|
||||
@@ -346,6 +350,17 @@ export class RoomsSidePanelComponent {
|
||||
|
||||
joinVoice(roomId: string) {
|
||||
const room = this.currentRoom();
|
||||
const current = this.currentUser();
|
||||
|
||||
if (
|
||||
room
|
||||
&& current?.voiceState?.isConnected
|
||||
&& current.voiceState.roomId === roomId
|
||||
&& current.voiceState.serverId === room.id
|
||||
) {
|
||||
this.voiceWorkspace.open(null, { connectRemoteShares: true });
|
||||
return;
|
||||
}
|
||||
|
||||
if (room && room.permissions && room.permissions.allowVoice === false) {
|
||||
return;
|
||||
@@ -354,8 +369,6 @@ export class RoomsSidePanelComponent {
|
||||
if (!room)
|
||||
return;
|
||||
|
||||
const current = this.currentUser();
|
||||
|
||||
if (current?.voiceState?.isConnected && current.voiceState.serverId !== room?.id) {
|
||||
if (!this.webrtc.isVoiceConnected()) {
|
||||
if (current.id) {
|
||||
@@ -510,15 +523,11 @@ export class RoomsSidePanelComponent {
|
||||
}
|
||||
|
||||
viewShare(userId: string) {
|
||||
const evt = new CustomEvent('viewer:focus', { detail: { userId } });
|
||||
|
||||
window.dispatchEvent(evt);
|
||||
this.voiceWorkspace.focusStream(userId, { connectRemoteShares: true });
|
||||
}
|
||||
|
||||
viewStream(userId: string) {
|
||||
const evt = new CustomEvent('viewer:focus', { detail: { userId } });
|
||||
|
||||
window.dispatchEvent(evt);
|
||||
this.voiceWorkspace.focusStream(userId, { connectRemoteShares: true });
|
||||
}
|
||||
|
||||
isUserLocallyMuted(user: User): boolean {
|
||||
@@ -540,7 +549,13 @@ export class RoomsSidePanelComponent {
|
||||
return false;
|
||||
}
|
||||
|
||||
const stream = this.webrtc.getRemoteStream(userId);
|
||||
const peerKeys = [user?.oderId, user?.id, userId].filter(
|
||||
(candidate): candidate is string => !!candidate
|
||||
);
|
||||
|
||||
const stream = peerKeys
|
||||
.map((peerKey) => this.webrtc.getRemoteScreenShareStream(peerKey))
|
||||
.find((candidate) => !!candidate && candidate.getVideoTracks().length > 0) || null;
|
||||
|
||||
return !!stream && stream.getVideoTracks().length > 0;
|
||||
}
|
||||
|
||||
@@ -189,6 +189,49 @@
|
||||
class="w-full h-1.5 bg-secondary rounded-lg appearance-none cursor-pointer accent-primary"
|
||||
/>
|
||||
</div>
|
||||
<div>
|
||||
<label
|
||||
for="screen-share-quality-select"
|
||||
class="block text-xs font-medium text-muted-foreground mb-1"
|
||||
>Screen share quality</label
|
||||
>
|
||||
<select
|
||||
(change)="onScreenShareQualityChange($event)"
|
||||
id="screen-share-quality-select"
|
||||
class="w-full px-3 py-2 bg-secondary rounded-lg border border-border text-foreground text-sm focus:outline-none focus:ring-2 focus:ring-primary"
|
||||
>
|
||||
@for (option of screenShareQualityOptions; track option.id) {
|
||||
<option
|
||||
[value]="option.id"
|
||||
[selected]="screenShareQuality() === option.id"
|
||||
>
|
||||
{{ option.label }}
|
||||
</option>
|
||||
}
|
||||
</select>
|
||||
<p class="text-[10px] text-muted-foreground/60 mt-1">
|
||||
{{ selectedScreenShareQualityDescription() }}
|
||||
</p>
|
||||
</div>
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-foreground">Ask before screen sharing</p>
|
||||
<p class="text-xs text-muted-foreground">Let the user confirm quality before each new screen share</p>
|
||||
</div>
|
||||
<label class="relative inline-flex items-center cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
[checked]="askScreenShareQuality()"
|
||||
(change)="onAskScreenShareQualityChange($event)"
|
||||
id="ask-screen-share-quality-toggle"
|
||||
aria-label="Toggle screen share quality prompt"
|
||||
class="sr-only peer"
|
||||
/>
|
||||
<div
|
||||
class="w-10 h-5 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-4 after:w-4 after:transition-all"
|
||||
></div>
|
||||
</label>
|
||||
</div>
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-foreground">Noise reduction</p>
|
||||
@@ -211,7 +254,7 @@
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-foreground">Screen share system audio</p>
|
||||
<p class="text-xs text-muted-foreground">Include system audio when sharing screen</p>
|
||||
<p class="text-xs text-muted-foreground">Share other computer audio while filtering MeToYou audio when supported</p>
|
||||
</div>
|
||||
<label class="relative inline-flex items-center cursor-pointer">
|
||||
<input
|
||||
@@ -227,6 +270,58 @@
|
||||
></div>
|
||||
</label>
|
||||
</div>
|
||||
<p class="text-[10px] text-muted-foreground/60 -mt-1">
|
||||
Your microphone stays on the normal voice channel. The shared screen audio should only contain desktop sound.
|
||||
</p>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
@if (isElectron) {
|
||||
<section>
|
||||
<div class="flex items-center gap-2 mb-3">
|
||||
<ng-icon
|
||||
name="lucideCpu"
|
||||
class="w-5 h-5 text-muted-foreground"
|
||||
/>
|
||||
<h4 class="text-sm font-semibold text-foreground">Desktop Performance</h4>
|
||||
</div>
|
||||
<div class="space-y-3">
|
||||
<div class="flex items-center justify-between">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-foreground">Hardware acceleration</p>
|
||||
<p class="text-xs text-muted-foreground">Use GPU acceleration for rendering and WebRTC when available</p>
|
||||
</div>
|
||||
<label class="relative inline-flex items-center cursor-pointer">
|
||||
<input
|
||||
type="checkbox"
|
||||
[checked]="hardwareAcceleration()"
|
||||
(change)="onHardwareAccelerationChange($event)"
|
||||
id="hardware-acceleration-toggle"
|
||||
aria-label="Toggle hardware acceleration"
|
||||
class="sr-only peer"
|
||||
/>
|
||||
<div
|
||||
class="w-10 h-5 bg-secondary rounded-full peer peer-checked:bg-primary peer-checked:after:translate-x-full after:content-[''] after:absolute after:top-0.5 after:left-[2px] after:bg-white after:rounded-full after:h-4 after:w-4 after:transition-all"
|
||||
></div>
|
||||
</label>
|
||||
</div>
|
||||
|
||||
@if (hardwareAccelerationRestartRequired()) {
|
||||
<div class="rounded-lg border border-primary/30 bg-primary/10 p-3 flex items-center justify-between gap-3">
|
||||
<div>
|
||||
<p class="text-sm font-medium text-foreground">Restart required</p>
|
||||
<p class="text-xs text-muted-foreground">Restart MeToYou to apply the new hardware acceleration setting.</p>
|
||||
</div>
|
||||
<button
|
||||
type="button"
|
||||
(click)="restartDesktopApp()"
|
||||
class="px-3 py-1.5 rounded-lg bg-primary text-primary-foreground text-xs font-medium hover:bg-primary/90 transition-colors"
|
||||
>
|
||||
Restart app
|
||||
</button>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</section>
|
||||
}
|
||||
</div>
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
import {
|
||||
Component,
|
||||
inject,
|
||||
computed,
|
||||
signal
|
||||
} from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
@@ -10,19 +11,41 @@ import { NgIcon, provideIcons } from '@ng-icons/core';
|
||||
import {
|
||||
lucideMic,
|
||||
lucideHeadphones,
|
||||
lucideAudioLines
|
||||
lucideAudioLines,
|
||||
lucideMonitor,
|
||||
lucideCpu
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
import { WebRTCService } from '../../../../core/services/webrtc.service';
|
||||
import { VoicePlaybackService } from '../../../voice/voice-controls/services/voice-playback.service';
|
||||
import { NotificationAudioService, AppSound } from '../../../../core/services/notification-audio.service';
|
||||
import { STORAGE_KEY_VOICE_SETTINGS } from '../../../../core/constants';
|
||||
import { PlatformService } from '../../../../core/services/platform.service';
|
||||
import {
|
||||
loadVoiceSettingsFromStorage,
|
||||
saveVoiceSettingsToStorage
|
||||
} from '../../../../core/services/voice-settings.storage';
|
||||
import {
|
||||
SCREEN_SHARE_QUALITY_OPTIONS,
|
||||
ScreenShareQuality
|
||||
} from '../../../../core/services/webrtc';
|
||||
|
||||
interface AudioDevice {
|
||||
deviceId: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
interface DesktopSettingsSnapshot {
|
||||
hardwareAcceleration: boolean;
|
||||
runtimeHardwareAcceleration: boolean;
|
||||
restartRequired: boolean;
|
||||
}
|
||||
|
||||
interface DesktopSettingsElectronApi {
|
||||
getDesktopSettings?: () => Promise<DesktopSettingsSnapshot>;
|
||||
setDesktopSettings?: (patch: { hardwareAcceleration?: boolean }) => Promise<DesktopSettingsSnapshot>;
|
||||
relaunchApp?: () => Promise<boolean>;
|
||||
}
|
||||
|
||||
@Component({
|
||||
selector: 'app-voice-settings',
|
||||
standalone: true,
|
||||
@@ -35,7 +58,9 @@ interface AudioDevice {
|
||||
provideIcons({
|
||||
lucideMic,
|
||||
lucideHeadphones,
|
||||
lucideAudioLines
|
||||
lucideAudioLines,
|
||||
lucideMonitor,
|
||||
lucideCpu
|
||||
})
|
||||
],
|
||||
templateUrl: './voice-settings.component.html'
|
||||
@@ -43,7 +68,10 @@ interface AudioDevice {
|
||||
export class VoiceSettingsComponent {
|
||||
private webrtcService = inject(WebRTCService);
|
||||
private voicePlayback = inject(VoicePlaybackService);
|
||||
private platform = inject(PlatformService);
|
||||
readonly audioService = inject(NotificationAudioService);
|
||||
readonly isElectron = this.platform.isElectron;
|
||||
readonly screenShareQualityOptions = SCREEN_SHARE_QUALITY_OPTIONS;
|
||||
|
||||
inputDevices = signal<AudioDevice[]>([]);
|
||||
outputDevices = signal<AudioDevice[]>([]);
|
||||
@@ -55,10 +83,19 @@ export class VoiceSettingsComponent {
|
||||
latencyProfile = signal<'low' | 'balanced' | 'high'>('balanced');
|
||||
includeSystemAudio = signal(false);
|
||||
noiseReduction = signal(true);
|
||||
screenShareQuality = signal<ScreenShareQuality>('balanced');
|
||||
askScreenShareQuality = signal(true);
|
||||
hardwareAcceleration = signal(true);
|
||||
hardwareAccelerationRestartRequired = signal(false);
|
||||
readonly selectedScreenShareQualityDescription = computed(() => this.screenShareQualityOptions.find((option) => option.id === this.screenShareQuality())?.description ?? '');
|
||||
|
||||
constructor() {
|
||||
this.loadVoiceSettings();
|
||||
this.loadAudioDevices();
|
||||
|
||||
if (this.isElectron) {
|
||||
void this.loadDesktopSettings();
|
||||
}
|
||||
}
|
||||
|
||||
async loadAudioDevices(): Promise<void> {
|
||||
@@ -85,38 +122,18 @@ export class VoiceSettingsComponent {
|
||||
}
|
||||
|
||||
loadVoiceSettings(): void {
|
||||
try {
|
||||
const raw = localStorage.getItem(STORAGE_KEY_VOICE_SETTINGS);
|
||||
const settings = loadVoiceSettingsFromStorage();
|
||||
|
||||
if (!raw)
|
||||
return;
|
||||
|
||||
const settings = JSON.parse(raw);
|
||||
|
||||
if (settings.inputDevice)
|
||||
this.selectedInputDevice.set(settings.inputDevice);
|
||||
|
||||
if (settings.outputDevice)
|
||||
this.selectedOutputDevice.set(settings.outputDevice);
|
||||
|
||||
if (typeof settings.inputVolume === 'number')
|
||||
this.inputVolume.set(settings.inputVolume);
|
||||
|
||||
if (typeof settings.outputVolume === 'number')
|
||||
this.outputVolume.set(settings.outputVolume);
|
||||
|
||||
if (typeof settings.audioBitrate === 'number')
|
||||
this.audioBitrate.set(settings.audioBitrate);
|
||||
|
||||
if (settings.latencyProfile)
|
||||
this.latencyProfile.set(settings.latencyProfile);
|
||||
|
||||
if (typeof settings.includeSystemAudio === 'boolean')
|
||||
this.includeSystemAudio.set(settings.includeSystemAudio);
|
||||
|
||||
if (typeof settings.noiseReduction === 'boolean')
|
||||
this.noiseReduction.set(settings.noiseReduction);
|
||||
} catch {}
|
||||
this.selectedInputDevice.set(settings.inputDevice);
|
||||
this.selectedOutputDevice.set(settings.outputDevice);
|
||||
this.inputVolume.set(settings.inputVolume);
|
||||
this.outputVolume.set(settings.outputVolume);
|
||||
this.audioBitrate.set(settings.audioBitrate);
|
||||
this.latencyProfile.set(settings.latencyProfile);
|
||||
this.includeSystemAudio.set(settings.includeSystemAudio);
|
||||
this.noiseReduction.set(settings.noiseReduction);
|
||||
this.screenShareQuality.set(settings.screenShareQuality);
|
||||
this.askScreenShareQuality.set(settings.askScreenShareQuality);
|
||||
|
||||
if (this.noiseReduction() !== this.webrtcService.isNoiseReductionEnabled()) {
|
||||
this.webrtcService.toggleNoiseReduction(this.noiseReduction());
|
||||
@@ -129,21 +146,18 @@ export class VoiceSettingsComponent {
|
||||
}
|
||||
|
||||
saveVoiceSettings(): void {
|
||||
try {
|
||||
localStorage.setItem(
|
||||
STORAGE_KEY_VOICE_SETTINGS,
|
||||
JSON.stringify({
|
||||
inputDevice: this.selectedInputDevice(),
|
||||
outputDevice: this.selectedOutputDevice(),
|
||||
inputVolume: this.inputVolume(),
|
||||
outputVolume: this.outputVolume(),
|
||||
audioBitrate: this.audioBitrate(),
|
||||
latencyProfile: this.latencyProfile(),
|
||||
includeSystemAudio: this.includeSystemAudio(),
|
||||
noiseReduction: this.noiseReduction()
|
||||
})
|
||||
);
|
||||
} catch {}
|
||||
saveVoiceSettingsToStorage({
|
||||
inputDevice: this.selectedInputDevice(),
|
||||
outputDevice: this.selectedOutputDevice(),
|
||||
inputVolume: this.inputVolume(),
|
||||
outputVolume: this.outputVolume(),
|
||||
audioBitrate: this.audioBitrate(),
|
||||
latencyProfile: this.latencyProfile(),
|
||||
includeSystemAudio: this.includeSystemAudio(),
|
||||
noiseReduction: this.noiseReduction(),
|
||||
screenShareQuality: this.screenShareQuality(),
|
||||
askScreenShareQuality: this.askScreenShareQuality()
|
||||
});
|
||||
}
|
||||
|
||||
onInputDeviceChange(event: Event): void {
|
||||
@@ -202,6 +216,20 @@ export class VoiceSettingsComponent {
|
||||
this.saveVoiceSettings();
|
||||
}
|
||||
|
||||
onScreenShareQualityChange(event: Event): void {
|
||||
const select = event.target as HTMLSelectElement;
|
||||
|
||||
this.screenShareQuality.set(select.value as ScreenShareQuality);
|
||||
this.saveVoiceSettings();
|
||||
}
|
||||
|
||||
onAskScreenShareQualityChange(event: Event): void {
|
||||
const input = event.target as HTMLInputElement;
|
||||
|
||||
this.askScreenShareQuality.set(!!input.checked);
|
||||
this.saveVoiceSettings();
|
||||
}
|
||||
|
||||
async onNoiseReductionChange(): Promise<void> {
|
||||
this.noiseReduction.update((currentValue) => !currentValue);
|
||||
await this.webrtcService.toggleNoiseReduction(this.noiseReduction());
|
||||
@@ -217,4 +245,56 @@ export class VoiceSettingsComponent {
|
||||
previewNotificationSound(): void {
|
||||
this.audioService.play(AppSound.Notification);
|
||||
}
|
||||
|
||||
async onHardwareAccelerationChange(event: Event): Promise<void> {
|
||||
const input = event.target as HTMLInputElement;
|
||||
const enabled = !!input.checked;
|
||||
const api = this.getElectronApi();
|
||||
|
||||
if (!api?.setDesktopSettings) {
|
||||
this.hardwareAcceleration.set(enabled);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const snapshot = await api.setDesktopSettings({ hardwareAcceleration: enabled });
|
||||
|
||||
this.applyDesktopSettings(snapshot);
|
||||
} catch {
|
||||
input.checked = this.hardwareAcceleration();
|
||||
}
|
||||
}
|
||||
|
||||
async restartDesktopApp(): Promise<void> {
|
||||
const api = this.getElectronApi();
|
||||
|
||||
if (api?.relaunchApp) {
|
||||
await api.relaunchApp();
|
||||
}
|
||||
}
|
||||
|
||||
private async loadDesktopSettings(): Promise<void> {
|
||||
const api = this.getElectronApi();
|
||||
|
||||
if (!api?.getDesktopSettings) {
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const snapshot = await api.getDesktopSettings();
|
||||
|
||||
this.applyDesktopSettings(snapshot);
|
||||
} catch {}
|
||||
}
|
||||
|
||||
private applyDesktopSettings(snapshot: DesktopSettingsSnapshot): void {
|
||||
this.hardwareAcceleration.set(snapshot.hardwareAcceleration);
|
||||
this.hardwareAccelerationRestartRequired.set(snapshot.restartRequired);
|
||||
}
|
||||
|
||||
private getElectronApi(): DesktopSettingsElectronApi | null {
|
||||
return typeof window !== 'undefined'
|
||||
? (window as any).electronAPI as DesktopSettingsElectronApi
|
||||
: null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -93,3 +93,12 @@
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
@if (showScreenShareQualityDialog()) {
|
||||
<app-screen-share-quality-dialog
|
||||
[selectedQuality]="screenShareQuality()"
|
||||
[includeSystemAudio]="includeSystemAudio()"
|
||||
(cancelled)="onScreenShareQualityCancelled()"
|
||||
(confirmed)="onScreenShareQualityConfirmed($event)"
|
||||
/>
|
||||
}
|
||||
|
||||
@@ -23,14 +23,22 @@ import {
|
||||
|
||||
import { WebRTCService } from '../../../core/services/webrtc.service';
|
||||
import { VoiceSessionService } from '../../../core/services/voice-session.service';
|
||||
import {
|
||||
loadVoiceSettingsFromStorage,
|
||||
saveVoiceSettingsToStorage
|
||||
} from '../../../core/services/voice-settings.storage';
|
||||
import { ScreenShareQuality } from '../../../core/services/webrtc';
|
||||
import { UsersActions } from '../../../store/users/users.actions';
|
||||
import { selectCurrentUser } from '../../../store/users/users.selectors';
|
||||
import { DebugConsoleComponent } from '../../../shared';
|
||||
import {
|
||||
DebugConsoleComponent,
|
||||
ScreenShareQualityDialogComponent
|
||||
} from '../../../shared';
|
||||
|
||||
@Component({
|
||||
selector: 'app-floating-voice-controls',
|
||||
standalone: true,
|
||||
imports: [CommonModule, NgIcon, DebugConsoleComponent],
|
||||
imports: [CommonModule, NgIcon, DebugConsoleComponent, ScreenShareQualityDialogComponent],
|
||||
viewProviders: [
|
||||
provideIcons({
|
||||
lucideMic,
|
||||
@@ -63,6 +71,10 @@ export class FloatingVoiceControlsComponent implements OnInit, OnDestroy {
|
||||
isMuted = signal(false);
|
||||
isDeafened = signal(false);
|
||||
isScreenSharing = signal(false);
|
||||
includeSystemAudio = signal(false);
|
||||
screenShareQuality = signal<ScreenShareQuality>('balanced');
|
||||
askScreenShareQuality = signal(true);
|
||||
showScreenShareQualityDialog = signal(false);
|
||||
|
||||
private stateSubscription: Subscription | null = null;
|
||||
|
||||
@@ -72,6 +84,7 @@ export class FloatingVoiceControlsComponent implements OnInit, OnDestroy {
|
||||
this.isMuted.set(this.webrtcService.isMuted());
|
||||
this.isDeafened.set(this.webrtcService.isDeafened());
|
||||
this.isScreenSharing.set(this.webrtcService.isScreenSharing());
|
||||
this.syncScreenShareSettings();
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
@@ -131,15 +144,28 @@ export class FloatingVoiceControlsComponent implements OnInit, OnDestroy {
|
||||
this.webrtcService.stopScreenShare();
|
||||
this.isScreenSharing.set(false);
|
||||
} else {
|
||||
try {
|
||||
await this.webrtcService.startScreenShare(false);
|
||||
this.isScreenSharing.set(true);
|
||||
} catch (_error) {
|
||||
// Screen share request was denied or failed
|
||||
this.syncScreenShareSettings();
|
||||
|
||||
if (this.askScreenShareQuality()) {
|
||||
this.showScreenShareQualityDialog.set(true);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.startScreenShareWithOptions(this.screenShareQuality());
|
||||
}
|
||||
}
|
||||
|
||||
onScreenShareQualityCancelled(): void {
|
||||
this.showScreenShareQualityDialog.set(false);
|
||||
}
|
||||
|
||||
async onScreenShareQualityConfirmed(quality: ScreenShareQuality): Promise<void> {
|
||||
this.showScreenShareQualityDialog.set(false);
|
||||
this.screenShareQuality.set(quality);
|
||||
saveVoiceSettingsToStorage({ screenShareQuality: quality });
|
||||
await this.startScreenShareWithOptions(quality);
|
||||
}
|
||||
|
||||
/** Disconnect from the voice session entirely, cleaning up all voice state. */
|
||||
disconnect(): void {
|
||||
// Stop voice heartbeat
|
||||
@@ -242,4 +268,24 @@ export class FloatingVoiceControlsComponent implements OnInit, OnDestroy {
|
||||
|
||||
return base + ' bg-secondary text-foreground hover:bg-secondary/80';
|
||||
}
|
||||
|
||||
private syncScreenShareSettings(): void {
|
||||
const settings = loadVoiceSettingsFromStorage();
|
||||
|
||||
this.includeSystemAudio.set(settings.includeSystemAudio);
|
||||
this.screenShareQuality.set(settings.screenShareQuality);
|
||||
this.askScreenShareQuality.set(settings.askScreenShareQuality);
|
||||
}
|
||||
|
||||
private async startScreenShareWithOptions(quality: ScreenShareQuality): Promise<void> {
|
||||
try {
|
||||
await this.webrtcService.startScreenShare({
|
||||
includeSystemAudio: this.includeSystemAudio(),
|
||||
quality
|
||||
});
|
||||
this.isScreenSharing.set(true);
|
||||
} catch (_error) {
|
||||
// Screen share request was denied or failed
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -35,7 +35,7 @@
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="100"
|
||||
max="200"
|
||||
[value]="screenVolume()"
|
||||
(input)="onScreenVolumeChange($event)"
|
||||
class="w-32 accent-white"
|
||||
|
||||
@@ -23,6 +23,7 @@ import { WebRTCService } from '../../../core/services/webrtc.service';
|
||||
import { selectOnlineUsers } from '../../../store/users/users.selectors';
|
||||
import { User } from '../../../core/models/index';
|
||||
import { DEFAULT_VOLUME } from '../../../core/constants';
|
||||
import { VoicePlaybackService } from '../voice-controls/services/voice-playback.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-screen-share-viewer',
|
||||
@@ -46,6 +47,7 @@ export class ScreenShareViewerComponent implements OnDestroy {
|
||||
@ViewChild('screenVideo') videoRef!: ElementRef<HTMLVideoElement>;
|
||||
|
||||
private webrtcService = inject(WebRTCService);
|
||||
private voicePlayback = inject(VoicePlaybackService);
|
||||
private store = inject(Store);
|
||||
private remoteStreamSub: Subscription | null = null;
|
||||
|
||||
@@ -67,7 +69,7 @@ export class ScreenShareViewerComponent implements OnDestroy {
|
||||
if (!userId)
|
||||
return;
|
||||
|
||||
const stream = this.webrtcService.getRemoteStream(userId);
|
||||
const stream = this.webrtcService.getRemoteScreenShareStream(userId);
|
||||
const user = this.onlineUsers().find((onlineUser) => onlineUser.id === userId || onlineUser.oderId === userId) || null;
|
||||
|
||||
if (stream && stream.getVideoTracks().length > 0) {
|
||||
@@ -75,9 +77,12 @@ export class ScreenShareViewerComponent implements OnDestroy {
|
||||
this.setRemoteStream(stream, user);
|
||||
} else if (this.videoRef) {
|
||||
this.videoRef.nativeElement.srcObject = stream;
|
||||
this.videoRef.nativeElement.volume = 0;
|
||||
this.videoRef.nativeElement.muted = true;
|
||||
this.hasStream.set(true);
|
||||
this.activeScreenSharer.set(null);
|
||||
this.watchingUserId.set(userId);
|
||||
this.screenVolume.set(this.voicePlayback.getUserVolume(userId));
|
||||
this.isLocalShare.set(false);
|
||||
}
|
||||
}
|
||||
@@ -124,7 +129,7 @@ export class ScreenShareViewerComponent implements OnDestroy {
|
||||
}
|
||||
|
||||
// Also check if the stream's video tracks are still available
|
||||
const stream = this.webrtcService.getRemoteStream(watchingId);
|
||||
const stream = this.webrtcService.getRemoteScreenShareStream(watchingId);
|
||||
const hasActiveVideo = stream?.getVideoTracks().some(track => track.readyState === 'live');
|
||||
|
||||
if (!hasActiveVideo) {
|
||||
@@ -218,31 +223,16 @@ export class ScreenShareViewerComponent implements OnDestroy {
|
||||
this.activeScreenSharer.set(user);
|
||||
this.watchingUserId.set(user.id || user.oderId || null);
|
||||
this.isLocalShare.set(false);
|
||||
this.screenVolume.set(this.voicePlayback.getUserVolume(user.id || user.oderId || ''));
|
||||
|
||||
if (this.videoRef) {
|
||||
const el = this.videoRef.nativeElement;
|
||||
|
||||
el.srcObject = stream;
|
||||
// For autoplay policies, try muted first, then unmute per volume setting
|
||||
// Keep the viewer muted so screen-share audio only plays once via VoicePlaybackService.
|
||||
el.muted = true;
|
||||
el.volume = 0;
|
||||
el.play().then(() => {
|
||||
// After playback starts, apply viewer volume settings
|
||||
el.volume = this.screenVolume() / 100;
|
||||
el.muted = this.screenVolume() === 0;
|
||||
})
|
||||
.catch(() => {
|
||||
// If autoplay fails, keep muted to allow play, then apply volume
|
||||
try {
|
||||
el.muted = true;
|
||||
el.volume = 0;
|
||||
el.play().then(() => {
|
||||
el.volume = this.screenVolume() / 100;
|
||||
el.muted = this.screenVolume() === 0;
|
||||
})
|
||||
.catch(() => {});
|
||||
} catch {}
|
||||
});
|
||||
el.play().catch(() => {});
|
||||
|
||||
this.hasStream.set(true);
|
||||
}
|
||||
@@ -266,16 +256,16 @@ export class ScreenShareViewerComponent implements OnDestroy {
|
||||
/** Handle volume slider changes, applying only to remote streams. */
|
||||
onScreenVolumeChange(event: Event): void {
|
||||
const input = event.target as HTMLInputElement;
|
||||
const val = Math.max(0, Math.min(100, parseInt(input.value, 10)));
|
||||
const val = Math.max(0, Math.min(200, parseInt(input.value, 10)));
|
||||
|
||||
this.screenVolume.set(val);
|
||||
|
||||
if (this.videoRef?.nativeElement) {
|
||||
// Volume applies only to remote streams; keep local share muted
|
||||
const isLocal = this.isLocalShare();
|
||||
if (!this.isLocalShare()) {
|
||||
const userId = this.watchingUserId();
|
||||
|
||||
this.videoRef.nativeElement.volume = isLocal ? 0 : val / 100;
|
||||
this.videoRef.nativeElement.muted = isLocal ? true : val === 0;
|
||||
if (userId) {
|
||||
this.voicePlayback.setUserVolume(userId, val);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -0,0 +1,77 @@
|
||||
import { Injectable, signal } from '@angular/core';
|
||||
|
||||
interface ScreenSharePlaybackSettings {
|
||||
muted: boolean;
|
||||
volume: number;
|
||||
}
|
||||
|
||||
const DEFAULT_SETTINGS: ScreenSharePlaybackSettings = {
|
||||
muted: false,
|
||||
volume: 100
|
||||
};
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class ScreenSharePlaybackService {
|
||||
private readonly _settings = signal<ReadonlyMap<string, ScreenSharePlaybackSettings>>(new Map());
|
||||
|
||||
settings(): ReadonlyMap<string, ScreenSharePlaybackSettings> {
|
||||
return this._settings();
|
||||
}
|
||||
|
||||
getUserVolume(peerId: string): number {
|
||||
return this._settings().get(peerId)?.volume ?? DEFAULT_SETTINGS.volume;
|
||||
}
|
||||
|
||||
isUserMuted(peerId: string): boolean {
|
||||
return this._settings().get(peerId)?.muted ?? DEFAULT_SETTINGS.muted;
|
||||
}
|
||||
|
||||
setUserVolume(peerId: string, volume: number): void {
|
||||
const nextVolume = Math.max(0, Math.min(100, volume));
|
||||
const current = this._settings().get(peerId) ?? DEFAULT_SETTINGS;
|
||||
|
||||
this._settings.update((settings) => {
|
||||
const next = new Map(settings);
|
||||
|
||||
next.set(peerId, {
|
||||
...current,
|
||||
muted: nextVolume === 0 ? current.muted : false,
|
||||
volume: nextVolume
|
||||
});
|
||||
|
||||
return next;
|
||||
});
|
||||
}
|
||||
|
||||
setUserMuted(peerId: string, muted: boolean): void {
|
||||
const current = this._settings().get(peerId) ?? DEFAULT_SETTINGS;
|
||||
|
||||
this._settings.update((settings) => {
|
||||
const next = new Map(settings);
|
||||
|
||||
next.set(peerId, {
|
||||
...current,
|
||||
muted
|
||||
});
|
||||
|
||||
return next;
|
||||
});
|
||||
}
|
||||
|
||||
resetUser(peerId: string): void {
|
||||
this._settings.update((settings) => {
|
||||
if (!settings.has(peerId)) {
|
||||
return settings;
|
||||
}
|
||||
|
||||
const next = new Map(settings);
|
||||
|
||||
next.delete(peerId);
|
||||
return next;
|
||||
});
|
||||
}
|
||||
|
||||
teardownAll(): void {
|
||||
// Screen-share audio is played directly by the video element.
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,220 @@
|
||||
<div
|
||||
#tileRoot
|
||||
class="group relative flex h-full min-h-0 flex-col overflow-hidden bg-black/85 transition duration-200"
|
||||
tabindex="0"
|
||||
role="button"
|
||||
[attr.aria-label]="mini() ? 'Focus ' + displayName() + ' stream' : 'Open ' + displayName() + ' stream in widescreen mode'"
|
||||
[attr.title]="canToggleFullscreen() ? (isFullscreen() ? 'Double-click to exit fullscreen' : 'Double-click for fullscreen') : null"
|
||||
[ngClass]="{
|
||||
'ring-2 ring-primary/70': focused() && !immersive() && !mini() && !isFullscreen(),
|
||||
'min-h-[24rem] rounded-[1.75rem] border border-white/10 shadow-2xl': featured() && !compact() && !immersive() && !mini() && !isFullscreen(),
|
||||
'rounded-[1.75rem] border border-white/10 shadow-2xl': !featured() && !compact() && !immersive() && !mini() && !isFullscreen(),
|
||||
'rounded-2xl border border-white/10 shadow-2xl': compact() && !immersive() && !mini() && !isFullscreen(),
|
||||
'rounded-2xl border border-white/10 shadow-xl': mini() && !isFullscreen(),
|
||||
'shadow-none': immersive() || isFullscreen()
|
||||
}"
|
||||
(click)="requestFocus()"
|
||||
(dblclick)="onTileDoubleClick($event)"
|
||||
(mousemove)="onTilePointerMove()"
|
||||
(keydown.enter)="requestFocus()"
|
||||
(keydown.space)="requestFocus(); $event.preventDefault()"
|
||||
>
|
||||
<video
|
||||
#streamVideo
|
||||
autoplay
|
||||
playsinline
|
||||
class="absolute inset-0 h-full w-full bg-black object-contain"
|
||||
></video>
|
||||
|
||||
<div class="pointer-events-none absolute inset-0 bg-gradient-to-b from-black/70 via-black/10 to-black/80"></div>
|
||||
|
||||
@if (isFullscreen()) {
|
||||
<div
|
||||
class="pointer-events-none absolute inset-x-3 top-3 z-20 transition-all duration-300 sm:inset-x-4 sm:top-4"
|
||||
[class.opacity-0]="!showFullscreenHeader()"
|
||||
[class.translate-y-[-12px]]="!showFullscreenHeader()"
|
||||
>
|
||||
<div
|
||||
class="pointer-events-auto flex items-center gap-3 rounded-2xl border border-white/10 bg-black/45 px-4 py-3 backdrop-blur-lg"
|
||||
[class.pointer-events-none]="!showFullscreenHeader()"
|
||||
>
|
||||
<div class="flex min-w-0 flex-1 items-center gap-3">
|
||||
<app-user-avatar
|
||||
[name]="displayName()"
|
||||
[avatarUrl]="item().user.avatarUrl"
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
<div class="min-w-0 flex-1">
|
||||
<div class="flex flex-wrap items-center gap-2">
|
||||
<p class="truncate text-sm font-semibold text-white sm:text-base">{{ displayName() }}</p>
|
||||
<span class="rounded-full bg-primary/10 px-2 py-0.5 text-[10px] font-semibold uppercase tracking-[0.18em] text-primary"> Live </span>
|
||||
</div>
|
||||
|
||||
<p class="mt-1 text-xs text-white/60">
|
||||
{{ item().isLocal ? 'Local preview in fullscreen' : 'Fullscreen stream view' }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@if (!item().isLocal) {
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-9 w-9 items-center justify-center rounded-full border border-white/10 bg-black/45 text-white/75 transition hover:bg-black/60 hover:text-white"
|
||||
[title]="muted() ? 'Unmute stream audio' : 'Mute stream audio'"
|
||||
(click)="toggleMuted(); $event.stopPropagation()"
|
||||
>
|
||||
<ng-icon
|
||||
[name]="muted() ? 'lucideVolumeX' : 'lucideVolume2'"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
</button>
|
||||
}
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-9 w-9 items-center justify-center rounded-full border border-white/10 bg-black/45 text-white/75 transition hover:bg-black/60 hover:text-white"
|
||||
title="Exit fullscreen"
|
||||
(click)="exitFullscreen($event)"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMinimize"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
@if (mini()) {
|
||||
<div class="absolute inset-x-0 bottom-0 p-2">
|
||||
<div class="rounded-xl border border-white/10 bg-black/55 px-2.5 py-2 backdrop-blur-md">
|
||||
<div class="flex items-center gap-2">
|
||||
<app-user-avatar
|
||||
[name]="displayName()"
|
||||
[avatarUrl]="item().user.avatarUrl"
|
||||
size="xs"
|
||||
/>
|
||||
<div class="min-w-0 flex-1">
|
||||
<p class="truncate text-xs font-semibold text-white">{{ displayName() }}</p>
|
||||
<p class="text-[10px] uppercase tracking-[0.16em] text-white/60">Live stream</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
} @else if (!immersive()) {
|
||||
<div
|
||||
class="absolute left-4 top-4 flex items-center gap-3 bg-black/50 backdrop-blur-md"
|
||||
[ngClass]="compact() ? 'max-w-[calc(100%-5rem)] rounded-xl px-2.5 py-2' : 'max-w-[calc(100%-8rem)] rounded-full px-3 py-2'"
|
||||
>
|
||||
<app-user-avatar
|
||||
[name]="displayName()"
|
||||
[avatarUrl]="item().user.avatarUrl"
|
||||
size="xs"
|
||||
/>
|
||||
<div class="min-w-0">
|
||||
<p
|
||||
class="truncate font-semibold text-white"
|
||||
[class.text-xs]="compact()"
|
||||
[class.text-sm]="!compact()"
|
||||
>
|
||||
{{ displayName() }}
|
||||
</p>
|
||||
<p
|
||||
class="flex items-center gap-1 uppercase text-white/65"
|
||||
[class.text-[10px]]="compact()"
|
||||
[class.text-[11px]]="!compact()"
|
||||
[class.tracking-[0.18em]]="compact()"
|
||||
[class.tracking-[0.24em]]="!compact()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
class="h-3 w-3"
|
||||
/>
|
||||
Live
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="absolute right-4 top-4 flex items-center gap-2 opacity-100 transition md:opacity-0 md:group-hover:opacity-100">
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex items-center justify-center rounded-full border border-white/15 bg-black/45 text-white/90 backdrop-blur-md transition hover:bg-black/65"
|
||||
[class.h-8]="compact()"
|
||||
[class.w-8]="compact()"
|
||||
[class.h-10]="!compact()"
|
||||
[class.w-10]="!compact()"
|
||||
[title]="focused() ? 'Viewing in widescreen' : 'View in widescreen'"
|
||||
(click)="requestFocus(); $event.stopPropagation()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMaximize"
|
||||
[class.h-3.5]="compact()"
|
||||
[class.w-3.5]="compact()"
|
||||
[class.h-4]="!compact()"
|
||||
[class.w-4]="!compact()"
|
||||
/>
|
||||
</button>
|
||||
|
||||
@if (!item().isLocal) {
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex items-center justify-center rounded-full border border-white/15 bg-black/45 text-white/90 backdrop-blur-md transition hover:bg-black/65"
|
||||
[class.h-8]="compact()"
|
||||
[class.w-8]="compact()"
|
||||
[class.h-10]="!compact()"
|
||||
[class.w-10]="!compact()"
|
||||
[title]="muted() ? 'Unmute stream audio' : 'Mute stream audio'"
|
||||
(click)="toggleMuted(); $event.stopPropagation()"
|
||||
>
|
||||
<ng-icon
|
||||
[name]="muted() ? 'lucideVolumeX' : 'lucideVolume2'"
|
||||
[class.h-3.5]="compact()"
|
||||
[class.w-3.5]="compact()"
|
||||
[class.h-4]="!compact()"
|
||||
[class.w-4]="!compact()"
|
||||
/>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="absolute inset-x-0 bottom-0 p-4">
|
||||
@if (item().isLocal) {
|
||||
@if (!compact()) {
|
||||
<div class="rounded-2xl bg-black/50 px-4 py-3 text-xs text-white/75 backdrop-blur-md">
|
||||
Your preview stays muted locally to avoid audio feedback.
|
||||
</div>
|
||||
}
|
||||
} @else {
|
||||
@if (compact()) {
|
||||
<div class="rounded-xl bg-black/50 px-3 py-2 text-[11px] text-white/80 backdrop-blur-md">
|
||||
{{ muted() ? 'Muted' : volume() + '% audio' }}
|
||||
</div>
|
||||
} @else {
|
||||
<div class="rounded-2xl bg-black/50 px-4 py-3 backdrop-blur-md">
|
||||
<div class="mb-2 flex items-center justify-between text-xs text-white/80">
|
||||
<span class="flex items-center gap-2 font-medium">
|
||||
<ng-icon
|
||||
[name]="muted() ? 'lucideVolumeX' : 'lucideVolume2'"
|
||||
class="h-3.5 w-3.5"
|
||||
/>
|
||||
Stream audio
|
||||
</span>
|
||||
<span>{{ muted() ? 'Muted' : volume() + '%' }}</span>
|
||||
</div>
|
||||
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="100"
|
||||
[value]="volume()"
|
||||
class="w-full accent-primary"
|
||||
(click)="$event.stopPropagation()"
|
||||
(input)="updateVolume($event)"
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
}
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
@@ -0,0 +1,263 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering */
|
||||
import { CommonModule } from '@angular/common';
|
||||
import {
|
||||
Component,
|
||||
ElementRef,
|
||||
effect,
|
||||
HostListener,
|
||||
inject,
|
||||
input,
|
||||
OnDestroy,
|
||||
output,
|
||||
signal,
|
||||
viewChild
|
||||
} from '@angular/core';
|
||||
import { NgIcon, provideIcons } from '@ng-icons/core';
|
||||
import {
|
||||
lucideMaximize,
|
||||
lucideMinimize,
|
||||
lucideMonitor,
|
||||
lucideVolume2,
|
||||
lucideVolumeX
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
import { UserAvatarComponent } from '../../../shared';
|
||||
import { ScreenSharePlaybackService } from './screen-share-playback.service';
|
||||
import { ScreenShareWorkspaceStreamItem } from './screen-share-workspace.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-screen-share-stream-tile',
|
||||
standalone: true,
|
||||
imports: [
|
||||
CommonModule,
|
||||
NgIcon,
|
||||
UserAvatarComponent
|
||||
],
|
||||
viewProviders: [
|
||||
provideIcons({
|
||||
lucideMaximize,
|
||||
lucideMinimize,
|
||||
lucideMonitor,
|
||||
lucideVolume2,
|
||||
lucideVolumeX
|
||||
})
|
||||
],
|
||||
templateUrl: './screen-share-stream-tile.component.html',
|
||||
host: {
|
||||
class: 'block h-full'
|
||||
}
|
||||
})
|
||||
export class ScreenShareStreamTileComponent implements OnDestroy {
|
||||
private readonly screenSharePlayback = inject(ScreenSharePlaybackService);
|
||||
private fullscreenHeaderHideTimeoutId: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
readonly item = input.required<ScreenShareWorkspaceStreamItem>();
|
||||
readonly focused = input(false);
|
||||
readonly featured = input(false);
|
||||
readonly compact = input(false);
|
||||
readonly mini = input(false);
|
||||
readonly immersive = input(false);
|
||||
readonly focusRequested = output<string>();
|
||||
readonly tileRef = viewChild<ElementRef<HTMLElement>>('tileRoot');
|
||||
readonly videoRef = viewChild<ElementRef<HTMLVideoElement>>('streamVideo');
|
||||
|
||||
readonly isFullscreen = signal(false);
|
||||
readonly showFullscreenHeader = signal(true);
|
||||
readonly volume = signal(100);
|
||||
readonly muted = signal(false);
|
||||
|
||||
constructor() {
|
||||
effect(() => {
|
||||
const ref = this.videoRef();
|
||||
const item = this.item();
|
||||
|
||||
if (!ref) {
|
||||
return;
|
||||
}
|
||||
|
||||
const video = ref.nativeElement;
|
||||
|
||||
if (video.srcObject !== item.stream) {
|
||||
video.srcObject = item.stream;
|
||||
}
|
||||
|
||||
void video.play().catch(() => {});
|
||||
});
|
||||
|
||||
effect(
|
||||
() => {
|
||||
this.screenSharePlayback.settings();
|
||||
|
||||
const item = this.item();
|
||||
|
||||
if (item.isLocal) {
|
||||
this.volume.set(0);
|
||||
this.muted.set(false);
|
||||
return;
|
||||
}
|
||||
|
||||
this.volume.set(this.screenSharePlayback.getUserVolume(item.peerKey));
|
||||
this.muted.set(this.screenSharePlayback.isUserMuted(item.peerKey));
|
||||
},
|
||||
{ allowSignalWrites: true }
|
||||
);
|
||||
|
||||
effect(() => {
|
||||
const ref = this.videoRef();
|
||||
const item = this.item();
|
||||
const muted = this.muted();
|
||||
const volume = this.volume();
|
||||
|
||||
if (!ref) {
|
||||
return;
|
||||
}
|
||||
|
||||
const video = ref.nativeElement;
|
||||
|
||||
if (item.isLocal) {
|
||||
video.muted = true;
|
||||
video.volume = 0;
|
||||
return;
|
||||
}
|
||||
|
||||
video.muted = muted;
|
||||
video.volume = Math.max(0, Math.min(1, volume / 100));
|
||||
void video.play().catch(() => {});
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
@HostListener('document:fullscreenchange')
|
||||
onFullscreenChange(): void {
|
||||
const tile = this.tileRef()?.nativeElement;
|
||||
const isFullscreen = !!tile && document.fullscreenElement === tile;
|
||||
|
||||
this.isFullscreen.set(isFullscreen);
|
||||
|
||||
if (isFullscreen) {
|
||||
this.revealFullscreenHeader();
|
||||
return;
|
||||
}
|
||||
|
||||
this.clearFullscreenHeaderHideTimeout();
|
||||
this.showFullscreenHeader.set(true);
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
this.clearFullscreenHeaderHideTimeout();
|
||||
|
||||
const tile = this.tileRef()?.nativeElement;
|
||||
|
||||
if (tile && document.fullscreenElement === tile) {
|
||||
void document.exitFullscreen().catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
canToggleFullscreen(): boolean {
|
||||
return !this.mini() && !this.compact() && (this.immersive() || this.focused());
|
||||
}
|
||||
|
||||
onTilePointerMove(): void {
|
||||
if (!this.isFullscreen()) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.revealFullscreenHeader();
|
||||
}
|
||||
|
||||
async onTileDoubleClick(event: MouseEvent): Promise<void> {
|
||||
event.preventDefault();
|
||||
event.stopPropagation();
|
||||
|
||||
if (!this.canToggleFullscreen()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tile = this.tileRef()?.nativeElement;
|
||||
|
||||
if (!tile || !tile.requestFullscreen) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (document.fullscreenElement === tile) {
|
||||
await document.exitFullscreen().catch(() => {});
|
||||
return;
|
||||
}
|
||||
|
||||
await tile.requestFullscreen().catch(() => {});
|
||||
}
|
||||
|
||||
async exitFullscreen(event?: Event): Promise<void> {
|
||||
event?.preventDefault();
|
||||
event?.stopPropagation();
|
||||
|
||||
if (!this.isFullscreen()) {
|
||||
return;
|
||||
}
|
||||
|
||||
await document.exitFullscreen().catch(() => {});
|
||||
}
|
||||
|
||||
requestFocus(): void {
|
||||
this.focusRequested.emit(this.item().peerKey);
|
||||
}
|
||||
|
||||
toggleMuted(): void {
|
||||
const item = this.item();
|
||||
|
||||
if (item.isLocal) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nextMuted = !this.muted();
|
||||
|
||||
this.muted.set(nextMuted);
|
||||
this.screenSharePlayback.setUserMuted(item.peerKey, nextMuted);
|
||||
}
|
||||
|
||||
updateVolume(event: Event): void {
|
||||
const item = this.item();
|
||||
|
||||
if (item.isLocal) {
|
||||
return;
|
||||
}
|
||||
|
||||
const input = event.target as HTMLInputElement;
|
||||
const nextVolume = Math.max(0, Math.min(100, parseInt(input.value, 10) || 0));
|
||||
|
||||
this.volume.set(nextVolume);
|
||||
this.screenSharePlayback.setUserVolume(item.peerKey, nextVolume);
|
||||
|
||||
if (nextVolume > 0 && this.muted()) {
|
||||
this.muted.set(false);
|
||||
this.screenSharePlayback.setUserMuted(item.peerKey, false);
|
||||
}
|
||||
}
|
||||
|
||||
displayName(): string {
|
||||
return this.item().isLocal ? 'You' : this.item().user.displayName;
|
||||
}
|
||||
|
||||
private scheduleFullscreenHeaderHide(): void {
|
||||
this.clearFullscreenHeaderHideTimeout();
|
||||
|
||||
this.fullscreenHeaderHideTimeoutId = setTimeout(() => {
|
||||
this.showFullscreenHeader.set(false);
|
||||
this.fullscreenHeaderHideTimeoutId = null;
|
||||
}, 2200);
|
||||
}
|
||||
|
||||
private revealFullscreenHeader(): void {
|
||||
this.showFullscreenHeader.set(true);
|
||||
this.scheduleFullscreenHeaderHide();
|
||||
}
|
||||
|
||||
private clearFullscreenHeaderHideTimeout(): void {
|
||||
if (this.fullscreenHeaderHideTimeoutId === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
clearTimeout(this.fullscreenHeaderHideTimeoutId);
|
||||
this.fullscreenHeaderHideTimeoutId = null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,365 @@
|
||||
<!-- eslint-disable @angular-eslint/template/cyclomatic-complexity -->
|
||||
<div class="absolute inset-0">
|
||||
@if (showExpanded()) {
|
||||
<section
|
||||
class="pointer-events-auto absolute inset-0 bg-background/95 backdrop-blur-xl"
|
||||
(mouseenter)="onWorkspacePointerMove()"
|
||||
(mousemove)="onWorkspacePointerMove()"
|
||||
>
|
||||
<div class="flex h-full min-h-0 flex-col">
|
||||
<div class="relative flex-1 min-h-0 overflow-hidden">
|
||||
<div
|
||||
class="pointer-events-none absolute inset-x-3 top-3 z-10 transition-all duration-300 sm:inset-x-4 sm:top-4"
|
||||
[class.opacity-0]="!showWorkspaceHeader()"
|
||||
[class.translate-y-[-12px]]="!showWorkspaceHeader()"
|
||||
>
|
||||
<div
|
||||
class="pointer-events-auto flex flex-wrap items-center gap-3 rounded-2xl border border-white/10 bg-black/45 px-4 py-3 backdrop-blur-lg"
|
||||
[class.pointer-events-none]="!showWorkspaceHeader()"
|
||||
>
|
||||
<div class="flex min-w-0 flex-1 items-center gap-3">
|
||||
<div class="flex h-10 w-10 items-center justify-center rounded-2xl bg-primary/10 text-primary">
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
class="h-5 w-5"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="min-w-0 flex-1">
|
||||
<div class="flex flex-wrap items-center gap-2">
|
||||
<h2 class="truncate text-sm font-semibold text-white sm:text-base">{{ connectedVoiceChannelName() }}</h2>
|
||||
<span class="rounded-full bg-primary/10 px-2 py-0.5 text-[10px] font-semibold uppercase tracking-[0.18em] text-primary">
|
||||
Streams
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<div class="mt-1 flex flex-wrap items-center gap-2 text-xs text-white/65">
|
||||
<span>{{ serverName() }}</span>
|
||||
<span class="h-1 w-1 rounded-full bg-white/25"></span>
|
||||
<span>{{ connectedVoiceUsers().length }} in voice</span>
|
||||
<span class="h-1 w-1 rounded-full bg-white/25"></span>
|
||||
<span>{{ liveShareCount() }} live {{ liveShareCount() === 1 ? 'stream' : 'streams' }}</span>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@if (connectedVoiceUsers().length > 0) {
|
||||
<div class="hidden items-center gap-2 lg:flex">
|
||||
@for (participant of connectedVoiceUsers().slice(0, 4); track trackUser($index, participant)) {
|
||||
<app-user-avatar
|
||||
[name]="participant.displayName"
|
||||
[avatarUrl]="participant.avatarUrl"
|
||||
size="xs"
|
||||
[ringClass]="'ring-2 ring-white/10'"
|
||||
/>
|
||||
}
|
||||
|
||||
@if (connectedVoiceUsers().length > 4) {
|
||||
<div class="rounded-full bg-white/10 px-2.5 py-1 text-[11px] font-medium text-white/70">
|
||||
+{{ connectedVoiceUsers().length - 4 }}
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
|
||||
@if (isWidescreenMode() && widescreenShare()) {
|
||||
<div class="flex min-w-0 items-center gap-2 rounded-2xl border border-white/10 bg-black/35 px-2.5 py-2 text-white/85">
|
||||
<app-user-avatar
|
||||
[name]="focusedShareTitle()"
|
||||
[avatarUrl]="widescreenShare()!.user.avatarUrl"
|
||||
size="xs"
|
||||
/>
|
||||
|
||||
<div class="min-w-0">
|
||||
<p class="truncate text-xs font-semibold text-white">{{ focusedShareTitle() }}</p>
|
||||
<p class="text-[10px] uppercase tracking-[0.18em] text-white/55">
|
||||
{{ widescreenShare()!.isLocal ? 'Local preview' : 'Focused stream' }}
|
||||
</p>
|
||||
</div>
|
||||
|
||||
@if (focusedAudioShare()) {
|
||||
<div class="mx-1 hidden h-6 w-px bg-white/10 sm:block"></div>
|
||||
|
||||
<div class="flex items-center gap-2">
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-8 w-8 items-center justify-center rounded-full border border-white/10 bg-black/45 text-white/75 transition hover:bg-black/60 hover:text-white"
|
||||
[title]="focusedShareMuted() ? 'Unmute stream audio' : 'Mute stream audio'"
|
||||
(click)="toggleFocusedShareMuted()"
|
||||
>
|
||||
<ng-icon
|
||||
[name]="focusedShareMuted() ? 'lucideVolumeX' : 'lucideVolume2'"
|
||||
class="h-3.5 w-3.5"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<input
|
||||
type="range"
|
||||
min="0"
|
||||
max="100"
|
||||
[value]="focusedShareVolume()"
|
||||
class="h-1.5 w-20 accent-primary sm:w-24"
|
||||
(input)="updateFocusedShareVolume($event)"
|
||||
/>
|
||||
|
||||
<span class="w-10 text-right text-[11px] text-white/65">
|
||||
{{ focusedShareMuted() ? 'Muted' : focusedShareVolume() + '%' }}
|
||||
</span>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
|
||||
<div class="ml-auto flex items-center gap-2">
|
||||
@if (isWidescreenMode() && hasMultipleShares()) {
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex items-center gap-2 rounded-full border border-white/10 bg-black/35 px-3 py-2 text-xs font-medium text-white/80 transition hover:bg-black/55 hover:text-white"
|
||||
title="Show all streams"
|
||||
(click)="showAllStreams()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideUsers"
|
||||
class="h-3.5 w-3.5"
|
||||
/>
|
||||
All streams
|
||||
</button>
|
||||
}
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-10 w-10 items-center justify-center rounded-full border border-white/10 bg-black/35 text-white/70 transition hover:bg-black/55 hover:text-white"
|
||||
title="Minimize stream workspace"
|
||||
(click)="minimizeWorkspace()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMinimize"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-10 w-10 items-center justify-center rounded-full border border-white/10 bg-black/35 text-white/70 transition hover:bg-black/55 hover:text-white"
|
||||
title="Return to chat"
|
||||
(click)="closeWorkspace()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideX"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@if (isWidescreenMode() && thumbnailShares().length > 0) {
|
||||
<div
|
||||
class="pointer-events-none absolute inset-x-3 bottom-3 z-10 transition-all duration-300 sm:inset-x-4 sm:bottom-4"
|
||||
[class.opacity-0]="!showWorkspaceHeader()"
|
||||
[class.translate-y-[12px]]="!showWorkspaceHeader()"
|
||||
>
|
||||
<div
|
||||
class="pointer-events-auto rounded-2xl border border-white/10 bg-black/45 p-2.5 backdrop-blur-lg"
|
||||
[class.pointer-events-none]="!showWorkspaceHeader()"
|
||||
>
|
||||
<div class="mb-2 flex items-center justify-between px-1">
|
||||
<span class="text-[10px] font-semibold uppercase tracking-[0.18em] text-white/55">Other live streams</span>
|
||||
<span class="text-[10px] text-white/40">{{ thumbnailShares().length }}</span>
|
||||
</div>
|
||||
|
||||
<div class="flex gap-2 overflow-x-auto pb-1">
|
||||
@for (share of thumbnailShares(); track trackShare($index, share)) {
|
||||
<div class="h-[5.25rem] w-[9.5rem] shrink-0 sm:h-[5.5rem] sm:w-[10rem]">
|
||||
<app-screen-share-stream-tile
|
||||
[item]="share"
|
||||
[mini]="true"
|
||||
[focused]="false"
|
||||
(focusRequested)="focusShare($event)"
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
<div
|
||||
class="h-full min-h-0"
|
||||
[ngClass]="isWidescreenMode() ? 'p-0' : 'p-3 pt-20 sm:p-4 sm:pt-24'"
|
||||
>
|
||||
@if (activeShares().length > 0) {
|
||||
@if (isWidescreenMode() && widescreenShare()) {
|
||||
<div class="h-full min-h-0">
|
||||
<app-screen-share-stream-tile
|
||||
[item]="widescreenShare()!"
|
||||
[featured]="true"
|
||||
[focused]="true"
|
||||
[immersive]="true"
|
||||
(focusRequested)="focusShare($event)"
|
||||
/>
|
||||
</div>
|
||||
} @else {
|
||||
<div
|
||||
class="grid h-full min-h-0 auto-rows-[minmax(15rem,1fr)] grid-cols-1 gap-3 overflow-auto sm:grid-cols-2 sm:gap-4"
|
||||
[ngClass]="{ '2xl:grid-cols-3': activeShares().length > 2 }"
|
||||
>
|
||||
@for (share of activeShares(); track trackShare($index, share)) {
|
||||
<div class="min-h-[15rem]">
|
||||
<app-screen-share-stream-tile
|
||||
[item]="share"
|
||||
[focused]="false"
|
||||
(focusRequested)="focusShare($event)"
|
||||
/>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
} @else {
|
||||
<div class="flex h-full items-center justify-center">
|
||||
<div class="w-full max-w-3xl rounded-[2rem] border border-dashed border-white/10 bg-card/60 p-8 text-center shadow-2xl sm:p-10">
|
||||
<div class="mx-auto mb-5 flex h-16 w-16 items-center justify-center rounded-3xl bg-primary/10 text-primary">
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
class="h-8 w-8"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<h2 class="text-2xl font-semibold text-foreground">No live screen shares yet</h2>
|
||||
<p class="mx-auto mt-3 max-w-2xl text-sm leading-6 text-muted-foreground">
|
||||
Click Screen Share below to start streaming, or wait for someone in {{ connectedVoiceChannelName() }} to go live.
|
||||
</p>
|
||||
|
||||
@if (connectedVoiceUsers().length > 0) {
|
||||
<div class="mt-6 flex flex-wrap items-center justify-center gap-3">
|
||||
@for (participant of connectedVoiceUsers().slice(0, 4); track trackUser($index, participant)) {
|
||||
<div class="flex items-center gap-2 rounded-full border border-white/10 bg-black/30 px-3 py-2">
|
||||
<app-user-avatar
|
||||
[name]="participant.displayName"
|
||||
[avatarUrl]="participant.avatarUrl"
|
||||
size="xs"
|
||||
/>
|
||||
<span class="text-sm text-foreground">{{ participant.displayName }}</span>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
}
|
||||
|
||||
<div class="mt-8 flex flex-wrap items-center justify-center gap-3 text-sm text-muted-foreground">
|
||||
<span class="inline-flex items-center gap-2 rounded-full bg-secondary/70 px-4 py-2">
|
||||
<ng-icon
|
||||
name="lucideUsers"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
{{ connectedVoiceUsers().length }} participants ready
|
||||
</span>
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex items-center gap-2 rounded-full bg-primary px-5 py-2.5 font-medium text-primary-foreground transition hover:bg-primary/90"
|
||||
(click)="toggleScreenShare()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
Start screen sharing
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
}
|
||||
|
||||
@if (showMiniWindow()) {
|
||||
<div
|
||||
class="pointer-events-auto absolute z-20 w-[20rem] select-none overflow-hidden rounded-[1.75rem] border border-white/10 bg-card/95 shadow-2xl backdrop-blur-xl"
|
||||
[style.left.px]="miniPosition().left"
|
||||
[style.top.px]="miniPosition().top"
|
||||
(dblclick)="restoreWorkspace()"
|
||||
>
|
||||
<div
|
||||
class="flex cursor-move items-center gap-3 border-b border-white/10 bg-black/25 px-4 py-3"
|
||||
(mousedown)="startMiniWindowDrag($event)"
|
||||
>
|
||||
<div class="flex h-9 w-9 items-center justify-center rounded-2xl bg-primary/10 text-primary">
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
</div>
|
||||
|
||||
<div class="min-w-0 flex-1">
|
||||
<p class="truncate text-sm font-semibold text-foreground">{{ connectedVoiceChannelName() }}</p>
|
||||
<p class="truncate text-xs text-muted-foreground">
|
||||
{{ liveShareCount() }} live {{ liveShareCount() === 1 ? 'stream' : 'streams' }} · double-click to expand
|
||||
</p>
|
||||
</div>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-8 w-8 items-center justify-center rounded-full text-muted-foreground transition hover:bg-black/30 hover:text-foreground"
|
||||
title="Expand"
|
||||
(click)="restoreWorkspace()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMaximize"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
</button>
|
||||
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-8 w-8 items-center justify-center rounded-full text-muted-foreground transition hover:bg-black/30 hover:text-foreground"
|
||||
title="Close"
|
||||
(click)="closeWorkspace()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideX"
|
||||
class="h-4 w-4"
|
||||
/>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<div class="relative aspect-video bg-black">
|
||||
@if (miniPreviewShare()) {
|
||||
<video
|
||||
#miniPreview
|
||||
autoplay
|
||||
playsinline
|
||||
class="h-full w-full bg-black object-cover"
|
||||
></video>
|
||||
} @else {
|
||||
<div class="flex h-full items-center justify-center text-muted-foreground">
|
||||
<div class="text-center">
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
class="mx-auto h-8 w-8 opacity-50"
|
||||
/>
|
||||
<p class="mt-2 text-sm">Waiting for a live stream</p>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
<div class="absolute inset-x-0 bottom-0 bg-gradient-to-t from-black/85 via-black/50 to-transparent px-4 py-3 text-white">
|
||||
<p class="truncate text-sm font-semibold">
|
||||
{{ miniPreviewTitle() }}
|
||||
</p>
|
||||
<p class="truncate text-xs text-white/75">Connected to {{ serverName() }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
}
|
||||
|
||||
@if (showScreenShareQualityDialog()) {
|
||||
<app-screen-share-quality-dialog
|
||||
[selectedQuality]="screenShareQuality()"
|
||||
[includeSystemAudio]="includeSystemAudio()"
|
||||
(cancelled)="onScreenShareQualityCancelled()"
|
||||
(confirmed)="onScreenShareQualityConfirmed($event)"
|
||||
/>
|
||||
}
|
||||
</div>
|
||||
@@ -0,0 +1,957 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering, complexity */
|
||||
import { CommonModule } from '@angular/common';
|
||||
import {
|
||||
Component,
|
||||
DestroyRef,
|
||||
ElementRef,
|
||||
HostListener,
|
||||
computed,
|
||||
effect,
|
||||
inject,
|
||||
signal,
|
||||
viewChild
|
||||
} from '@angular/core';
|
||||
import { takeUntilDestroyed } from '@angular/core/rxjs-interop';
|
||||
import { Store } from '@ngrx/store';
|
||||
import { NgIcon, provideIcons } from '@ng-icons/core';
|
||||
import {
|
||||
lucideHeadphones,
|
||||
lucideMaximize,
|
||||
lucideMic,
|
||||
lucideMicOff,
|
||||
lucideMinimize,
|
||||
lucideMonitor,
|
||||
lucideMonitorOff,
|
||||
lucidePhoneOff,
|
||||
lucideUsers,
|
||||
lucideVolume2,
|
||||
lucideVolumeX,
|
||||
lucideX
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
import { User } from '../../../core/models';
|
||||
import { VoiceSessionService } from '../../../core/services/voice-session.service';
|
||||
import { loadVoiceSettingsFromStorage, saveVoiceSettingsToStorage } from '../../../core/services/voice-settings.storage';
|
||||
import { VoiceWorkspacePosition, VoiceWorkspaceService } from '../../../core/services/voice-workspace.service';
|
||||
import { ScreenShareQuality, ScreenShareStartOptions } from '../../../core/services/webrtc';
|
||||
import { WebRTCService } from '../../../core/services/webrtc.service';
|
||||
import { selectCurrentRoom } from '../../../store/rooms/rooms.selectors';
|
||||
import { UsersActions } from '../../../store/users/users.actions';
|
||||
import { selectCurrentUser, selectOnlineUsers } from '../../../store/users/users.selectors';
|
||||
import { ScreenShareQualityDialogComponent, UserAvatarComponent } from '../../../shared';
|
||||
import { VoicePlaybackService } from '../voice-controls/services/voice-playback.service';
|
||||
import { ScreenSharePlaybackService } from './screen-share-playback.service';
|
||||
import { ScreenShareStreamTileComponent } from './screen-share-stream-tile.component';
|
||||
import { ScreenShareWorkspaceStreamItem } from './screen-share-workspace.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-screen-share-workspace',
|
||||
standalone: true,
|
||||
imports: [
|
||||
CommonModule,
|
||||
NgIcon,
|
||||
ScreenShareQualityDialogComponent,
|
||||
ScreenShareStreamTileComponent,
|
||||
UserAvatarComponent
|
||||
],
|
||||
viewProviders: [
|
||||
provideIcons({
|
||||
lucideHeadphones,
|
||||
lucideMaximize,
|
||||
lucideMic,
|
||||
lucideMicOff,
|
||||
lucideMinimize,
|
||||
lucideMonitor,
|
||||
lucideMonitorOff,
|
||||
lucidePhoneOff,
|
||||
lucideUsers,
|
||||
lucideVolume2,
|
||||
lucideVolumeX,
|
||||
lucideX
|
||||
})
|
||||
],
|
||||
templateUrl: './screen-share-workspace.component.html',
|
||||
host: {
|
||||
class: 'pointer-events-none absolute inset-0 z-20 block'
|
||||
}
|
||||
})
|
||||
export class ScreenShareWorkspaceComponent {
|
||||
private readonly destroyRef = inject(DestroyRef);
|
||||
private readonly elementRef = inject<ElementRef<HTMLElement>>(ElementRef);
|
||||
private readonly store = inject(Store);
|
||||
private readonly webrtc = inject(WebRTCService);
|
||||
private readonly voicePlayback = inject(VoicePlaybackService);
|
||||
private readonly screenSharePlayback = inject(ScreenSharePlaybackService);
|
||||
private readonly voiceSession = inject(VoiceSessionService);
|
||||
private readonly voiceWorkspace = inject(VoiceWorkspaceService);
|
||||
|
||||
private readonly remoteStreamRevision = signal(0);
|
||||
|
||||
private readonly miniWindowWidth = 320;
|
||||
private readonly miniWindowHeight = 228;
|
||||
private miniWindowDragging = false;
|
||||
private miniDragOffsetX = 0;
|
||||
private miniDragOffsetY = 0;
|
||||
private wasExpanded = false;
|
||||
private wasAutoHideChrome = false;
|
||||
private headerHideTimeoutId: ReturnType<typeof setTimeout> | null = null;
|
||||
private readonly observedRemoteStreams = new Map<string, {
|
||||
stream: MediaStream;
|
||||
cleanup: () => void;
|
||||
}>();
|
||||
|
||||
readonly miniPreviewRef = viewChild<ElementRef<HTMLVideoElement>>('miniPreview');
|
||||
|
||||
readonly currentRoom = this.store.selectSignal(selectCurrentRoom);
|
||||
readonly currentUser = this.store.selectSignal(selectCurrentUser);
|
||||
readonly onlineUsers = this.store.selectSignal(selectOnlineUsers);
|
||||
readonly voiceSessionInfo = this.voiceSession.voiceSession;
|
||||
|
||||
readonly showExpanded = this.voiceWorkspace.isExpanded;
|
||||
readonly showMiniWindow = this.voiceWorkspace.isMinimized;
|
||||
readonly shouldConnectRemoteShares = this.voiceWorkspace.shouldConnectRemoteShares;
|
||||
readonly miniPosition = this.voiceWorkspace.miniWindowPosition;
|
||||
readonly showWorkspaceHeader = signal(true);
|
||||
|
||||
readonly isConnected = computed(() => this.webrtc.isVoiceConnected());
|
||||
readonly isMuted = computed(() => this.webrtc.isMuted());
|
||||
readonly isDeafened = computed(() => this.webrtc.isDeafened());
|
||||
readonly isScreenSharing = computed(() => this.webrtc.isScreenSharing());
|
||||
|
||||
readonly includeSystemAudio = signal(false);
|
||||
readonly screenShareQuality = signal<ScreenShareQuality>('balanced');
|
||||
readonly askScreenShareQuality = signal(true);
|
||||
readonly showScreenShareQualityDialog = signal(false);
|
||||
|
||||
readonly connectedVoiceUsers = computed(() => {
|
||||
const room = this.currentRoom();
|
||||
const me = this.currentUser();
|
||||
const roomId = me?.voiceState?.roomId;
|
||||
const serverId = me?.voiceState?.serverId;
|
||||
|
||||
if (!room || !roomId || !serverId || serverId !== room.id) {
|
||||
return [] as User[];
|
||||
}
|
||||
|
||||
const voiceUsers = this.onlineUsers().filter(
|
||||
(user) =>
|
||||
!!user.voiceState?.isConnected
|
||||
&& user.voiceState.roomId === roomId
|
||||
&& user.voiceState.serverId === room.id
|
||||
);
|
||||
|
||||
if (!me?.voiceState?.isConnected) {
|
||||
return voiceUsers;
|
||||
}
|
||||
|
||||
const currentKeys = new Set(voiceUsers.map((user) => user.oderId || user.id));
|
||||
const meKey = me.oderId || me.id;
|
||||
|
||||
if (meKey && !currentKeys.has(meKey)) {
|
||||
return [me, ...voiceUsers];
|
||||
}
|
||||
|
||||
return voiceUsers;
|
||||
});
|
||||
|
||||
readonly activeShares = computed<ScreenShareWorkspaceStreamItem[]>(() => {
|
||||
this.remoteStreamRevision();
|
||||
|
||||
const room = this.currentRoom();
|
||||
const me = this.currentUser();
|
||||
const connectedRoomId = me?.voiceState?.roomId;
|
||||
const connectedServerId = me?.voiceState?.serverId;
|
||||
|
||||
if (!room || !me || !connectedRoomId || connectedServerId !== room.id) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const shares: ScreenShareWorkspaceStreamItem[] = [];
|
||||
const localStream = this.webrtc.screenStream();
|
||||
const localPeerKey = this.getUserPeerKey(me);
|
||||
|
||||
if (localStream && localPeerKey) {
|
||||
shares.push({
|
||||
id: localPeerKey,
|
||||
peerKey: localPeerKey,
|
||||
user: me,
|
||||
stream: localStream,
|
||||
isLocal: true
|
||||
});
|
||||
}
|
||||
|
||||
for (const user of this.onlineUsers()) {
|
||||
const peerKey = this.getUserPeerKey(user);
|
||||
|
||||
if (!peerKey || peerKey === localPeerKey) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (
|
||||
!user.voiceState?.isConnected
|
||||
|| user.voiceState.roomId !== connectedRoomId
|
||||
|| user.voiceState.serverId !== room.id
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (user.screenShareState?.isSharing === false) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const remoteShare = this.getRemoteShareStream(user);
|
||||
|
||||
if (!remoteShare) {
|
||||
continue;
|
||||
}
|
||||
|
||||
shares.push({
|
||||
id: remoteShare.peerKey,
|
||||
peerKey: remoteShare.peerKey,
|
||||
user,
|
||||
stream: remoteShare.stream,
|
||||
isLocal: false
|
||||
});
|
||||
}
|
||||
|
||||
return shares.sort((shareA, shareB) => {
|
||||
if (shareA.isLocal !== shareB.isLocal) {
|
||||
return shareA.isLocal ? 1 : -1;
|
||||
}
|
||||
|
||||
return shareA.user.displayName.localeCompare(shareB.user.displayName);
|
||||
});
|
||||
});
|
||||
|
||||
readonly widescreenShareId = computed(() => {
|
||||
const requested = this.voiceWorkspace.focusedStreamId();
|
||||
const activeShares = this.activeShares();
|
||||
|
||||
if (requested && activeShares.some((share) => share.peerKey === requested)) {
|
||||
return requested;
|
||||
}
|
||||
|
||||
if (activeShares.length === 1) {
|
||||
return activeShares[0].peerKey;
|
||||
}
|
||||
|
||||
return null;
|
||||
});
|
||||
|
||||
readonly isWidescreenMode = computed(() => this.widescreenShareId() !== null);
|
||||
readonly shouldAutoHideChrome = computed(
|
||||
() => this.showExpanded() && this.isWidescreenMode() && this.activeShares().length > 0
|
||||
);
|
||||
readonly hasMultipleShares = computed(() => this.activeShares().length > 1);
|
||||
readonly widescreenShare = computed(
|
||||
() => this.activeShares().find((share) => share.peerKey === this.widescreenShareId()) ?? null
|
||||
);
|
||||
readonly focusedAudioShare = computed(() => {
|
||||
const share = this.widescreenShare();
|
||||
|
||||
return share && !share.isLocal ? share : null;
|
||||
});
|
||||
readonly focusedShareTitle = computed(() => {
|
||||
const share = this.widescreenShare();
|
||||
|
||||
if (!share) {
|
||||
return 'Focused stream';
|
||||
}
|
||||
|
||||
return share.isLocal ? 'Your stream' : share.user.displayName;
|
||||
});
|
||||
readonly thumbnailShares = computed(() => {
|
||||
const widescreenShareId = this.widescreenShareId();
|
||||
|
||||
if (!widescreenShareId) {
|
||||
return [] as ScreenShareWorkspaceStreamItem[];
|
||||
}
|
||||
|
||||
return this.activeShares().filter((share) => share.peerKey !== widescreenShareId);
|
||||
});
|
||||
readonly miniPreviewShare = computed(
|
||||
() => this.widescreenShare() ?? this.activeShares()[0] ?? null
|
||||
);
|
||||
readonly miniPreviewTitle = computed(() => {
|
||||
const previewShare = this.miniPreviewShare();
|
||||
|
||||
if (!previewShare) {
|
||||
return 'Voice workspace';
|
||||
}
|
||||
|
||||
return previewShare.isLocal ? 'Your stream' : previewShare.user.displayName;
|
||||
});
|
||||
readonly liveShareCount = computed(() => this.activeShares().length);
|
||||
readonly connectedVoiceChannelName = computed(() => {
|
||||
const me = this.currentUser();
|
||||
const room = this.currentRoom();
|
||||
const channelId = me?.voiceState?.roomId ?? this.voiceSessionInfo()?.roomId;
|
||||
const channel = room?.channels?.find(
|
||||
(candidate) => candidate.id === channelId && candidate.type === 'voice'
|
||||
);
|
||||
|
||||
if (channel) {
|
||||
return channel.name;
|
||||
}
|
||||
|
||||
const sessionRoomName = this.voiceSessionInfo()?.roomName?.replace(/^🔊\s*/, '');
|
||||
|
||||
return sessionRoomName || 'Voice Lounge';
|
||||
});
|
||||
readonly serverName = computed(
|
||||
() => this.currentRoom()?.name || this.voiceSessionInfo()?.serverName || 'Voice server'
|
||||
);
|
||||
|
||||
constructor() {
|
||||
this.destroyRef.onDestroy(() => {
|
||||
this.clearHeaderHideTimeout();
|
||||
this.cleanupObservedRemoteStreams();
|
||||
this.webrtc.syncRemoteScreenShareRequests([], false);
|
||||
this.screenSharePlayback.teardownAll();
|
||||
});
|
||||
|
||||
this.webrtc.onRemoteStream
|
||||
.pipe(takeUntilDestroyed(this.destroyRef))
|
||||
.subscribe(({ peerId }) => {
|
||||
this.observeRemoteStream(peerId);
|
||||
this.bumpRemoteStreamRevision();
|
||||
});
|
||||
|
||||
this.webrtc.onPeerDisconnected
|
||||
.pipe(takeUntilDestroyed(this.destroyRef))
|
||||
.subscribe(() => this.bumpRemoteStreamRevision());
|
||||
|
||||
effect(() => {
|
||||
const ref = this.miniPreviewRef();
|
||||
const previewShare = this.miniPreviewShare();
|
||||
const showMiniWindow = this.showMiniWindow();
|
||||
|
||||
if (!ref) {
|
||||
return;
|
||||
}
|
||||
|
||||
const video = ref.nativeElement;
|
||||
|
||||
if (!showMiniWindow || !previewShare) {
|
||||
video.srcObject = null;
|
||||
return;
|
||||
}
|
||||
|
||||
if (video.srcObject !== previewShare.stream) {
|
||||
video.srcObject = previewShare.stream;
|
||||
}
|
||||
|
||||
video.muted = true;
|
||||
video.volume = 0;
|
||||
void video.play().catch(() => {});
|
||||
});
|
||||
|
||||
effect(() => {
|
||||
if (!this.showMiniWindow()) {
|
||||
return;
|
||||
}
|
||||
|
||||
requestAnimationFrame(() => this.ensureMiniWindowPosition());
|
||||
});
|
||||
|
||||
effect(() => {
|
||||
const shouldConnectRemoteShares = this.shouldConnectRemoteShares();
|
||||
const currentUserPeerKey = this.getUserPeerKey(this.currentUser());
|
||||
const peerKeys = Array.from(new Set(
|
||||
this.connectedVoiceUsers()
|
||||
.map((user) => this.getUserPeerKey(user))
|
||||
.filter((peerKey): peerKey is string => !!peerKey && peerKey !== currentUserPeerKey)
|
||||
));
|
||||
|
||||
this.webrtc.syncRemoteScreenShareRequests(peerKeys, shouldConnectRemoteShares);
|
||||
|
||||
if (!shouldConnectRemoteShares) {
|
||||
this.screenSharePlayback.teardownAll();
|
||||
}
|
||||
});
|
||||
|
||||
effect(() => {
|
||||
this.remoteStreamRevision();
|
||||
|
||||
const room = this.currentRoom();
|
||||
const currentUser = this.currentUser();
|
||||
const connectedRoomId = currentUser?.voiceState?.roomId;
|
||||
const connectedServerId = currentUser?.voiceState?.serverId;
|
||||
const peerKeys = new Set<string>();
|
||||
|
||||
if (room && connectedRoomId && connectedServerId === room.id) {
|
||||
for (const user of this.onlineUsers()) {
|
||||
if (
|
||||
!user.voiceState?.isConnected
|
||||
|| user.voiceState.roomId !== connectedRoomId
|
||||
|| user.voiceState.serverId !== room.id
|
||||
) {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (const peerKey of [user.oderId, user.id]) {
|
||||
if (!peerKey || peerKey === this.getUserPeerKey(currentUser)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
peerKeys.add(peerKey);
|
||||
this.observeRemoteStream(peerKey);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this.pruneObservedRemoteStreams(peerKeys);
|
||||
});
|
||||
|
||||
effect(
|
||||
() => {
|
||||
const isExpanded = this.showExpanded();
|
||||
const shouldAutoHideChrome = this.shouldAutoHideChrome();
|
||||
|
||||
if (!isExpanded) {
|
||||
this.clearHeaderHideTimeout();
|
||||
this.showWorkspaceHeader.set(true);
|
||||
this.wasExpanded = false;
|
||||
this.wasAutoHideChrome = false;
|
||||
return;
|
||||
}
|
||||
|
||||
if (!shouldAutoHideChrome) {
|
||||
this.clearHeaderHideTimeout();
|
||||
this.showWorkspaceHeader.set(true);
|
||||
this.wasExpanded = true;
|
||||
this.wasAutoHideChrome = false;
|
||||
return;
|
||||
}
|
||||
|
||||
const shouldRevealChrome = !this.wasExpanded || !this.wasAutoHideChrome;
|
||||
|
||||
this.wasExpanded = true;
|
||||
this.wasAutoHideChrome = true;
|
||||
|
||||
if (shouldRevealChrome) {
|
||||
this.revealWorkspaceChrome();
|
||||
}
|
||||
},
|
||||
{ allowSignalWrites: true }
|
||||
);
|
||||
}
|
||||
|
||||
onWorkspacePointerMove(): void {
|
||||
if (!this.shouldAutoHideChrome()) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.revealWorkspaceChrome();
|
||||
}
|
||||
|
||||
@HostListener('window:mousemove', ['$event'])
|
||||
onWindowMouseMove(event: MouseEvent): void {
|
||||
if (!this.miniWindowDragging) {
|
||||
return;
|
||||
}
|
||||
|
||||
event.preventDefault();
|
||||
|
||||
const bounds = this.getWorkspaceBounds();
|
||||
const nextPosition = this.clampMiniWindowPosition({
|
||||
left: event.clientX - bounds.left - this.miniDragOffsetX,
|
||||
top: event.clientY - bounds.top - this.miniDragOffsetY
|
||||
});
|
||||
|
||||
this.voiceWorkspace.setMiniWindowPosition(nextPosition);
|
||||
}
|
||||
|
||||
@HostListener('window:mouseup')
|
||||
onWindowMouseUp(): void {
|
||||
this.miniWindowDragging = false;
|
||||
}
|
||||
|
||||
@HostListener('window:resize')
|
||||
onWindowResize(): void {
|
||||
if (!this.showMiniWindow()) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.ensureMiniWindowPosition();
|
||||
}
|
||||
|
||||
trackUser(index: number, user: User): string {
|
||||
return this.getUserPeerKey(user) || `${index}`;
|
||||
}
|
||||
|
||||
trackShare(index: number, share: ScreenShareWorkspaceStreamItem): string {
|
||||
return share.id || `${index}`;
|
||||
}
|
||||
|
||||
focusShare(peerKey: string): void {
|
||||
if (this.widescreenShareId() === peerKey) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.voiceWorkspace.focusStream(peerKey);
|
||||
}
|
||||
|
||||
showAllStreams(): void {
|
||||
this.voiceWorkspace.clearFocusedStream();
|
||||
}
|
||||
|
||||
minimizeWorkspace(): void {
|
||||
this.voiceWorkspace.minimize();
|
||||
this.ensureMiniWindowPosition();
|
||||
}
|
||||
|
||||
restoreWorkspace(): void {
|
||||
this.voiceWorkspace.restore();
|
||||
}
|
||||
|
||||
closeWorkspace(): void {
|
||||
this.voiceWorkspace.clearFocusedStream();
|
||||
this.voiceWorkspace.close();
|
||||
}
|
||||
|
||||
focusedShareVolume(): number {
|
||||
const share = this.focusedAudioShare();
|
||||
|
||||
if (!share) {
|
||||
return 100;
|
||||
}
|
||||
|
||||
return this.screenSharePlayback.getUserVolume(share.peerKey);
|
||||
}
|
||||
|
||||
focusedShareMuted(): boolean {
|
||||
const share = this.focusedAudioShare();
|
||||
|
||||
if (!share) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.screenSharePlayback.isUserMuted(share.peerKey);
|
||||
}
|
||||
|
||||
toggleFocusedShareMuted(): void {
|
||||
const share = this.focusedAudioShare();
|
||||
|
||||
if (!share) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.screenSharePlayback.setUserMuted(
|
||||
share.peerKey,
|
||||
!this.screenSharePlayback.isUserMuted(share.peerKey)
|
||||
);
|
||||
}
|
||||
|
||||
updateFocusedShareVolume(event: Event): void {
|
||||
const share = this.focusedAudioShare();
|
||||
|
||||
if (!share) {
|
||||
return;
|
||||
}
|
||||
|
||||
const input = event.target as HTMLInputElement;
|
||||
const nextVolume = Math.max(0, Math.min(100, parseInt(input.value, 10) || 0));
|
||||
|
||||
this.screenSharePlayback.setUserVolume(share.peerKey, nextVolume);
|
||||
|
||||
if (nextVolume > 0 && this.screenSharePlayback.isUserMuted(share.peerKey)) {
|
||||
this.screenSharePlayback.setUserMuted(share.peerKey, false);
|
||||
}
|
||||
}
|
||||
|
||||
startMiniWindowDrag(event: MouseEvent): void {
|
||||
const target = event.target as HTMLElement | null;
|
||||
|
||||
if (target?.closest('button, input')) {
|
||||
return;
|
||||
}
|
||||
|
||||
event.preventDefault();
|
||||
|
||||
const bounds = this.getWorkspaceBounds();
|
||||
const currentPosition = this.voiceWorkspace.miniWindowPosition();
|
||||
|
||||
this.miniWindowDragging = true;
|
||||
this.miniDragOffsetX = event.clientX - bounds.left - currentPosition.left;
|
||||
this.miniDragOffsetY = event.clientY - bounds.top - currentPosition.top;
|
||||
}
|
||||
|
||||
toggleMute(): void {
|
||||
const nextMuted = !this.isMuted();
|
||||
|
||||
this.webrtc.toggleMute(nextMuted);
|
||||
this.syncVoiceState({
|
||||
isConnected: this.isConnected(),
|
||||
isMuted: nextMuted,
|
||||
isDeafened: this.isDeafened()
|
||||
});
|
||||
|
||||
this.broadcastVoiceState(nextMuted, this.isDeafened());
|
||||
}
|
||||
|
||||
toggleDeafen(): void {
|
||||
const nextDeafened = !this.isDeafened();
|
||||
|
||||
let nextMuted = this.isMuted();
|
||||
|
||||
this.webrtc.toggleDeafen(nextDeafened);
|
||||
this.voicePlayback.updateDeafened(nextDeafened);
|
||||
|
||||
if (nextDeafened && !nextMuted) {
|
||||
nextMuted = true;
|
||||
this.webrtc.toggleMute(true);
|
||||
}
|
||||
|
||||
this.syncVoiceState({
|
||||
isConnected: this.isConnected(),
|
||||
isMuted: nextMuted,
|
||||
isDeafened: nextDeafened
|
||||
});
|
||||
|
||||
this.broadcastVoiceState(nextMuted, nextDeafened);
|
||||
}
|
||||
|
||||
async toggleScreenShare(): Promise<void> {
|
||||
if (this.isScreenSharing()) {
|
||||
this.webrtc.stopScreenShare();
|
||||
return;
|
||||
}
|
||||
|
||||
this.syncScreenShareSettings();
|
||||
|
||||
if (this.askScreenShareQuality()) {
|
||||
this.showScreenShareQualityDialog.set(true);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.startScreenShareWithOptions(this.screenShareQuality());
|
||||
}
|
||||
|
||||
onScreenShareQualityCancelled(): void {
|
||||
this.showScreenShareQualityDialog.set(false);
|
||||
}
|
||||
|
||||
async onScreenShareQualityConfirmed(quality: ScreenShareQuality): Promise<void> {
|
||||
this.showScreenShareQualityDialog.set(false);
|
||||
this.screenShareQuality.set(quality);
|
||||
saveVoiceSettingsToStorage({ screenShareQuality: quality });
|
||||
await this.startScreenShareWithOptions(quality);
|
||||
}
|
||||
|
||||
disconnect(): void {
|
||||
this.webrtc.stopVoiceHeartbeat();
|
||||
|
||||
this.webrtc.broadcastMessage({
|
||||
type: 'voice-state',
|
||||
oderId: this.currentUser()?.oderId || this.currentUser()?.id,
|
||||
displayName: this.currentUser()?.displayName || 'User',
|
||||
voiceState: {
|
||||
isConnected: false,
|
||||
isMuted: false,
|
||||
isDeafened: false,
|
||||
roomId: undefined,
|
||||
serverId: undefined
|
||||
}
|
||||
});
|
||||
|
||||
if (this.isScreenSharing()) {
|
||||
this.webrtc.stopScreenShare();
|
||||
}
|
||||
|
||||
this.webrtc.disableVoice();
|
||||
this.voicePlayback.teardownAll();
|
||||
this.voicePlayback.updateDeafened(false);
|
||||
|
||||
const user = this.currentUser();
|
||||
|
||||
if (user?.id) {
|
||||
this.store.dispatch(
|
||||
UsersActions.updateVoiceState({
|
||||
userId: user.id,
|
||||
voiceState: {
|
||||
isConnected: false,
|
||||
isMuted: false,
|
||||
isDeafened: false,
|
||||
roomId: undefined,
|
||||
serverId: undefined
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
this.voiceSession.endSession();
|
||||
this.voiceWorkspace.reset();
|
||||
}
|
||||
|
||||
getControlButtonClass(
|
||||
isActive: boolean,
|
||||
accent: 'default' | 'primary' | 'danger' = 'default'
|
||||
): string {
|
||||
const base = 'inline-flex min-w-[5.5rem] flex-col items-center gap-2 rounded-2xl px-4 py-3 text-sm font-medium transition-colors';
|
||||
|
||||
if (accent === 'danger') {
|
||||
return `${base} bg-destructive text-destructive-foreground hover:bg-destructive/90`;
|
||||
}
|
||||
|
||||
if (accent === 'primary' || isActive) {
|
||||
return `${base} bg-primary/15 text-primary hover:bg-primary/25`;
|
||||
}
|
||||
|
||||
return `${base} bg-secondary/80 text-foreground hover:bg-secondary`;
|
||||
}
|
||||
|
||||
private bumpRemoteStreamRevision(): void {
|
||||
this.remoteStreamRevision.update((value) => value + 1);
|
||||
}
|
||||
|
||||
private syncVoiceState(voiceState: {
|
||||
isConnected: boolean;
|
||||
isMuted: boolean;
|
||||
isDeafened: boolean;
|
||||
}): void {
|
||||
const user = this.currentUser();
|
||||
const identifiers = this.getCurrentVoiceIdentifiers();
|
||||
|
||||
if (!user?.id) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.store.dispatch(
|
||||
UsersActions.updateVoiceState({
|
||||
userId: user.id,
|
||||
voiceState: {
|
||||
...voiceState,
|
||||
roomId: identifiers.roomId,
|
||||
serverId: identifiers.serverId
|
||||
}
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
private broadcastVoiceState(isMuted: boolean, isDeafened: boolean): void {
|
||||
const identifiers = this.getCurrentVoiceIdentifiers();
|
||||
|
||||
this.webrtc.broadcastMessage({
|
||||
type: 'voice-state',
|
||||
oderId: this.currentUser()?.oderId || this.currentUser()?.id,
|
||||
displayName: this.currentUser()?.displayName || 'User',
|
||||
voiceState: {
|
||||
isConnected: this.isConnected(),
|
||||
isMuted,
|
||||
isDeafened,
|
||||
roomId: identifiers.roomId,
|
||||
serverId: identifiers.serverId
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private getCurrentVoiceIdentifiers(): {
|
||||
roomId: string | undefined;
|
||||
serverId: string | undefined;
|
||||
} {
|
||||
const me = this.currentUser();
|
||||
|
||||
return {
|
||||
roomId: me?.voiceState?.roomId ?? this.voiceSessionInfo()?.roomId,
|
||||
serverId: me?.voiceState?.serverId ?? this.currentRoom()?.id ?? this.voiceSessionInfo()?.serverId
|
||||
};
|
||||
}
|
||||
|
||||
private syncScreenShareSettings(): void {
|
||||
const settings = loadVoiceSettingsFromStorage();
|
||||
|
||||
this.includeSystemAudio.set(settings.includeSystemAudio);
|
||||
this.screenShareQuality.set(settings.screenShareQuality);
|
||||
this.askScreenShareQuality.set(settings.askScreenShareQuality);
|
||||
}
|
||||
|
||||
private async startScreenShareWithOptions(quality: ScreenShareQuality): Promise<void> {
|
||||
const options: ScreenShareStartOptions = {
|
||||
includeSystemAudio: this.includeSystemAudio(),
|
||||
quality
|
||||
};
|
||||
|
||||
try {
|
||||
await this.webrtc.startScreenShare(options);
|
||||
|
||||
this.voiceWorkspace.open(null);
|
||||
} catch {
|
||||
// Screen-share prompt was dismissed or failed.
|
||||
}
|
||||
}
|
||||
|
||||
private getUserPeerKey(user: User | null | undefined): string | null {
|
||||
return user?.oderId || user?.id || null;
|
||||
}
|
||||
|
||||
private getRemoteShareStream(user: User): { peerKey: string; stream: MediaStream } | null {
|
||||
const peerKeys = [user.oderId, user.id].filter(
|
||||
(candidate): candidate is string => !!candidate
|
||||
);
|
||||
|
||||
for (const peerKey of peerKeys) {
|
||||
const stream = this.webrtc.getRemoteScreenShareStream(peerKey);
|
||||
|
||||
if (stream && this.hasActiveVideo(stream)) {
|
||||
return { peerKey, stream };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private hasActiveVideo(stream: MediaStream): boolean {
|
||||
return stream.getVideoTracks().some((track) => track.readyState === 'live');
|
||||
}
|
||||
|
||||
private ensureMiniWindowPosition(): void {
|
||||
const bounds = this.getWorkspaceBounds();
|
||||
|
||||
if (bounds.width === 0 || bounds.height === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this.voiceWorkspace.hasCustomMiniWindowPosition()) {
|
||||
this.voiceWorkspace.setMiniWindowPosition(
|
||||
this.clampMiniWindowPosition({
|
||||
left: bounds.width - this.miniWindowWidth - 20,
|
||||
top: bounds.height - this.miniWindowHeight - 20
|
||||
}),
|
||||
false
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.voiceWorkspace.setMiniWindowPosition(
|
||||
this.clampMiniWindowPosition(this.voiceWorkspace.miniWindowPosition()),
|
||||
true
|
||||
);
|
||||
}
|
||||
|
||||
private clampMiniWindowPosition(position: VoiceWorkspacePosition): VoiceWorkspacePosition {
|
||||
const bounds = this.getWorkspaceBounds();
|
||||
const minLeft = 8;
|
||||
const minTop = 8;
|
||||
const maxLeft = Math.max(minLeft, bounds.width - this.miniWindowWidth - 8);
|
||||
const maxTop = Math.max(minTop, bounds.height - this.miniWindowHeight - 8);
|
||||
|
||||
return {
|
||||
left: this.clamp(position.left, minLeft, maxLeft),
|
||||
top: this.clamp(position.top, minTop, maxTop)
|
||||
};
|
||||
}
|
||||
|
||||
private getWorkspaceBounds(): DOMRect {
|
||||
return this.elementRef.nativeElement.getBoundingClientRect();
|
||||
}
|
||||
|
||||
private observeRemoteStream(peerKey: string): void {
|
||||
const stream = this.webrtc.getRemoteScreenShareStream(peerKey);
|
||||
const existing = this.observedRemoteStreams.get(peerKey);
|
||||
|
||||
if (!stream) {
|
||||
if (existing) {
|
||||
existing.cleanup();
|
||||
this.observedRemoteStreams.delete(peerKey);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (existing?.stream === stream) {
|
||||
return;
|
||||
}
|
||||
|
||||
existing?.cleanup();
|
||||
|
||||
const onChanged = () => this.bumpRemoteStreamRevision();
|
||||
const trackCleanups: (() => void)[] = [];
|
||||
const bindTrack = (track: MediaStreamTrack) => {
|
||||
if (track.kind !== 'video') {
|
||||
return;
|
||||
}
|
||||
|
||||
const onTrackChanged = () => onChanged();
|
||||
|
||||
track.addEventListener('ended', onTrackChanged);
|
||||
track.addEventListener('mute', onTrackChanged);
|
||||
track.addEventListener('unmute', onTrackChanged);
|
||||
|
||||
trackCleanups.push(() => {
|
||||
track.removeEventListener('ended', onTrackChanged);
|
||||
track.removeEventListener('mute', onTrackChanged);
|
||||
track.removeEventListener('unmute', onTrackChanged);
|
||||
});
|
||||
};
|
||||
|
||||
stream.getVideoTracks().forEach((track) => bindTrack(track));
|
||||
|
||||
const onAddTrack = (event: MediaStreamTrackEvent) => {
|
||||
bindTrack(event.track);
|
||||
onChanged();
|
||||
};
|
||||
const onRemoveTrack = () => onChanged();
|
||||
|
||||
stream.addEventListener('addtrack', onAddTrack);
|
||||
stream.addEventListener('removetrack', onRemoveTrack);
|
||||
|
||||
this.observedRemoteStreams.set(peerKey, {
|
||||
stream,
|
||||
cleanup: () => {
|
||||
stream.removeEventListener('addtrack', onAddTrack);
|
||||
stream.removeEventListener('removetrack', onRemoveTrack);
|
||||
trackCleanups.forEach((cleanup) => cleanup());
|
||||
}
|
||||
});
|
||||
|
||||
onChanged();
|
||||
}
|
||||
|
||||
private pruneObservedRemoteStreams(activePeerKeys: Set<string>): void {
|
||||
for (const [peerKey, observed] of this.observedRemoteStreams.entries()) {
|
||||
if (activePeerKeys.has(peerKey)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
observed.cleanup();
|
||||
this.observedRemoteStreams.delete(peerKey);
|
||||
}
|
||||
}
|
||||
|
||||
private cleanupObservedRemoteStreams(): void {
|
||||
for (const observed of this.observedRemoteStreams.values()) {
|
||||
observed.cleanup();
|
||||
}
|
||||
|
||||
this.observedRemoteStreams.clear();
|
||||
}
|
||||
|
||||
private scheduleHeaderHide(): void {
|
||||
this.clearHeaderHideTimeout();
|
||||
|
||||
this.headerHideTimeoutId = setTimeout(() => {
|
||||
this.showWorkspaceHeader.set(false);
|
||||
this.headerHideTimeoutId = null;
|
||||
}, 2200);
|
||||
}
|
||||
|
||||
private revealWorkspaceChrome(): void {
|
||||
this.showWorkspaceHeader.set(true);
|
||||
this.scheduleHeaderHide();
|
||||
}
|
||||
|
||||
private clearHeaderHideTimeout(): void {
|
||||
if (this.headerHideTimeoutId === null) {
|
||||
return;
|
||||
}
|
||||
|
||||
clearTimeout(this.headerHideTimeoutId);
|
||||
this.headerHideTimeoutId = null;
|
||||
}
|
||||
|
||||
private clamp(value: number, min: number, max: number): number {
|
||||
return Math.min(Math.max(value, min), max);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
import { User } from '../../../core/models';
|
||||
|
||||
export interface ScreenShareWorkspaceStreamItem {
|
||||
id: string;
|
||||
peerKey: string;
|
||||
user: User;
|
||||
stream: MediaStream;
|
||||
isLocal: boolean;
|
||||
}
|
||||
@@ -20,7 +20,7 @@ export interface PlaybackOptions {
|
||||
interface PeerAudioPipeline {
|
||||
audioElement: HTMLAudioElement;
|
||||
context: AudioContext;
|
||||
sourceNode: MediaStreamAudioSourceNode;
|
||||
sourceNodes: MediaStreamAudioSourceNode[];
|
||||
gainNode: GainNode;
|
||||
}
|
||||
|
||||
@@ -33,6 +33,7 @@ export class VoicePlaybackService {
|
||||
private rawRemoteStreams = new Map<string, MediaStream>();
|
||||
private userVolumes = new Map<string, number>();
|
||||
private userMuted = new Map<string, boolean>();
|
||||
private preferredOutputDeviceId = 'default';
|
||||
private masterVolume = 1;
|
||||
private deafened = false;
|
||||
|
||||
@@ -80,7 +81,7 @@ export class VoicePlaybackService {
|
||||
const peers = this.webrtc.getConnectedPeers();
|
||||
|
||||
for (const peerId of peers) {
|
||||
const stream = this.webrtc.getRemoteStream(peerId);
|
||||
const stream = this.webrtc.getRemoteVoiceStream(peerId);
|
||||
|
||||
if (stream && this.hasAudio(stream)) {
|
||||
const trackedRaw = this.rawRemoteStreams.get(peerId);
|
||||
@@ -125,25 +126,8 @@ export class VoicePlaybackService {
|
||||
}
|
||||
|
||||
applyOutputDevice(deviceId: string): void {
|
||||
if (!deviceId)
|
||||
return;
|
||||
|
||||
this.peerPipelines.forEach((pipeline) => {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const anyAudio = pipeline.audioElement as any;
|
||||
|
||||
if (typeof anyAudio.setSinkId === 'function') {
|
||||
anyAudio.setSinkId(deviceId).catch(() => {});
|
||||
}
|
||||
|
||||
// Also try setting sink on the AudioContext destination (Chromium ≥ 110)
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const anyCtx = pipeline.context as any;
|
||||
|
||||
if (typeof anyCtx.setSinkId === 'function') {
|
||||
anyCtx.setSinkId(deviceId).catch(() => {});
|
||||
}
|
||||
});
|
||||
this.preferredOutputDeviceId = deviceId || 'default';
|
||||
void this.applyEffectiveOutputDeviceToAllPipelines();
|
||||
}
|
||||
|
||||
teardownAll(): void {
|
||||
@@ -165,23 +149,61 @@ export class VoicePlaybackService {
|
||||
private createPipeline(peerId: string, stream: MediaStream): void {
|
||||
// Chromium/Electron needs a muted <audio> element before Web Audio can read the stream.
|
||||
const audioEl = new Audio();
|
||||
const audioTracks = stream.getAudioTracks().filter((track) => track.readyState === 'live');
|
||||
|
||||
audioEl.srcObject = stream;
|
||||
audioEl.muted = true;
|
||||
audioEl.play().catch(() => {});
|
||||
|
||||
const ctx = new AudioContext();
|
||||
const sourceNode = ctx.createMediaStreamSource(stream);
|
||||
const gainNode = ctx.createGain();
|
||||
const sourceNodes = audioTracks.map((track) => ctx.createMediaStreamSource(new MediaStream([track])));
|
||||
|
||||
sourceNode.connect(gainNode);
|
||||
sourceNodes.forEach((sourceNode) => sourceNode.connect(gainNode));
|
||||
gainNode.connect(ctx.destination);
|
||||
|
||||
const pipeline: PeerAudioPipeline = { audioElement: audioEl, context: ctx, sourceNode, gainNode };
|
||||
const pipeline: PeerAudioPipeline = { audioElement: audioEl, context: ctx, sourceNodes, gainNode };
|
||||
|
||||
this.peerPipelines.set(peerId, pipeline);
|
||||
|
||||
this.applyGain(peerId);
|
||||
void this.applyEffectiveOutputDeviceToPipeline(pipeline);
|
||||
}
|
||||
|
||||
private async applyEffectiveOutputDeviceToAllPipelines(): Promise<void> {
|
||||
await Promise.all(Array.from(this.peerPipelines.values(), (pipeline) =>
|
||||
this.applyEffectiveOutputDeviceToPipeline(pipeline)
|
||||
));
|
||||
}
|
||||
|
||||
private async applyEffectiveOutputDeviceToPipeline(pipeline: PeerAudioPipeline): Promise<void> {
|
||||
const deviceId = this.getEffectiveOutputDeviceId();
|
||||
|
||||
if (!deviceId) {
|
||||
return;
|
||||
}
|
||||
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const anyAudio = pipeline.audioElement as any;
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const anyCtx = pipeline.context as any;
|
||||
const tasks: Promise<unknown>[] = [];
|
||||
|
||||
if (typeof anyAudio.setSinkId === 'function') {
|
||||
tasks.push(anyAudio.setSinkId(deviceId).catch(() => undefined));
|
||||
}
|
||||
|
||||
if (typeof anyCtx.setSinkId === 'function') {
|
||||
tasks.push(anyCtx.setSinkId(deviceId).catch(() => undefined));
|
||||
}
|
||||
|
||||
if (tasks.length > 0) {
|
||||
await Promise.all(tasks);
|
||||
}
|
||||
}
|
||||
|
||||
private getEffectiveOutputDeviceId(): string {
|
||||
return this.preferredOutputDeviceId;
|
||||
}
|
||||
|
||||
private removePipeline(peerId: string): void {
|
||||
@@ -192,11 +214,18 @@ export class VoicePlaybackService {
|
||||
|
||||
try {
|
||||
pipeline.gainNode.disconnect();
|
||||
pipeline.sourceNode.disconnect();
|
||||
} catch {
|
||||
// nodes may already be disconnected
|
||||
}
|
||||
|
||||
pipeline.sourceNodes.forEach((sourceNode) => {
|
||||
try {
|
||||
sourceNode.disconnect();
|
||||
} catch {
|
||||
// nodes may already be disconnected
|
||||
}
|
||||
});
|
||||
|
||||
pipeline.audioElement.srcObject = null;
|
||||
pipeline.audioElement.remove();
|
||||
|
||||
|
||||
@@ -119,3 +119,12 @@
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@if (showScreenShareQualityDialog()) {
|
||||
<app-screen-share-quality-dialog
|
||||
[selectedQuality]="screenShareQuality()"
|
||||
[includeSystemAudio]="includeSystemAudio()"
|
||||
(cancelled)="onScreenShareQualityCancelled()"
|
||||
(confirmed)="onScreenShareQualityConfirmed($event)"
|
||||
/>
|
||||
}
|
||||
|
||||
@@ -29,10 +29,12 @@ import { VoiceActivityService } from '../../../core/services/voice-activity.serv
|
||||
import { UsersActions } from '../../../store/users/users.actions';
|
||||
import { selectCurrentUser } from '../../../store/users/users.selectors';
|
||||
import { selectCurrentRoom } from '../../../store/rooms/rooms.selectors';
|
||||
import { STORAGE_KEY_VOICE_SETTINGS } from '../../../core/constants';
|
||||
import { SettingsModalService } from '../../../core/services/settings-modal.service';
|
||||
import { loadVoiceSettingsFromStorage, saveVoiceSettingsToStorage } from '../../../core/services/voice-settings.storage';
|
||||
import { ScreenShareQuality } from '../../../core/services/webrtc';
|
||||
import {
|
||||
DebugConsoleComponent,
|
||||
ScreenShareQualityDialogComponent,
|
||||
UserAvatarComponent
|
||||
} from '../../../shared';
|
||||
import { PlaybackOptions, VoicePlaybackService } from './services/voice-playback.service';
|
||||
@@ -49,6 +51,7 @@ interface AudioDevice {
|
||||
CommonModule,
|
||||
NgIcon,
|
||||
DebugConsoleComponent,
|
||||
ScreenShareQualityDialogComponent,
|
||||
UserAvatarComponent
|
||||
],
|
||||
viewProviders: [
|
||||
@@ -96,6 +99,9 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
latencyProfile = signal<'low' | 'balanced' | 'high'>('balanced');
|
||||
includeSystemAudio = signal(false);
|
||||
noiseReduction = signal(true);
|
||||
screenShareQuality = signal<ScreenShareQuality>('balanced');
|
||||
askScreenShareQuality = signal(true);
|
||||
showScreenShareQualityDialog = signal(false);
|
||||
|
||||
private playbackOptions(): PlaybackOptions {
|
||||
return {
|
||||
@@ -116,8 +122,15 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
|
||||
// Subscribe to remote streams to play audio from peers
|
||||
this.remoteStreamSubscription = this.webrtcService.onRemoteStream.subscribe(
|
||||
({ peerId, stream }) => {
|
||||
this.voicePlayback.handleRemoteStream(peerId, stream, this.playbackOptions());
|
||||
({ peerId }) => {
|
||||
const voiceStream = this.webrtcService.getRemoteVoiceStream(peerId);
|
||||
|
||||
if (!voiceStream) {
|
||||
this.voicePlayback.removeRemoteAudio(peerId);
|
||||
return;
|
||||
}
|
||||
|
||||
this.voicePlayback.handleRemoteStream(peerId, voiceStream, this.playbackOptions());
|
||||
}
|
||||
);
|
||||
|
||||
@@ -396,13 +409,28 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
this.webrtcService.stopScreenShare();
|
||||
this.isScreenSharing.set(false);
|
||||
} else {
|
||||
try {
|
||||
await this.webrtcService.startScreenShare(this.includeSystemAudio());
|
||||
this.isScreenSharing.set(true);
|
||||
} catch (_error) {}
|
||||
this.syncScreenShareSettings();
|
||||
|
||||
if (this.askScreenShareQuality()) {
|
||||
this.showScreenShareQualityDialog.set(true);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.startScreenShareWithOptions(this.screenShareQuality());
|
||||
}
|
||||
}
|
||||
|
||||
onScreenShareQualityCancelled(): void {
|
||||
this.showScreenShareQualityDialog.set(false);
|
||||
}
|
||||
|
||||
async onScreenShareQualityConfirmed(quality: ScreenShareQuality): Promise<void> {
|
||||
this.showScreenShareQualityDialog.set(false);
|
||||
this.screenShareQuality.set(quality);
|
||||
this.saveSettings();
|
||||
await this.startScreenShareWithOptions(quality);
|
||||
}
|
||||
|
||||
toggleSettings(): void {
|
||||
this.settingsModal.open('voice');
|
||||
}
|
||||
@@ -484,64 +512,33 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
}
|
||||
|
||||
private loadSettings(): void {
|
||||
try {
|
||||
const raw = localStorage.getItem(STORAGE_KEY_VOICE_SETTINGS);
|
||||
const settings = loadVoiceSettingsFromStorage();
|
||||
|
||||
if (!raw)
|
||||
return;
|
||||
|
||||
const settings = JSON.parse(raw) as {
|
||||
inputDevice?: string;
|
||||
outputDevice?: string;
|
||||
inputVolume?: number;
|
||||
outputVolume?: number;
|
||||
audioBitrate?: number;
|
||||
latencyProfile?: 'low' | 'balanced' | 'high';
|
||||
includeSystemAudio?: boolean;
|
||||
noiseReduction?: boolean;
|
||||
};
|
||||
|
||||
if (settings.inputDevice)
|
||||
this.selectedInputDevice.set(settings.inputDevice);
|
||||
|
||||
if (settings.outputDevice)
|
||||
this.selectedOutputDevice.set(settings.outputDevice);
|
||||
|
||||
if (typeof settings.inputVolume === 'number')
|
||||
this.inputVolume.set(settings.inputVolume);
|
||||
|
||||
if (typeof settings.outputVolume === 'number')
|
||||
this.outputVolume.set(settings.outputVolume);
|
||||
|
||||
if (typeof settings.audioBitrate === 'number')
|
||||
this.audioBitrate.set(settings.audioBitrate);
|
||||
|
||||
if (settings.latencyProfile)
|
||||
this.latencyProfile.set(settings.latencyProfile);
|
||||
|
||||
if (typeof settings.includeSystemAudio === 'boolean')
|
||||
this.includeSystemAudio.set(settings.includeSystemAudio);
|
||||
|
||||
if (typeof settings.noiseReduction === 'boolean')
|
||||
this.noiseReduction.set(settings.noiseReduction);
|
||||
} catch {}
|
||||
this.selectedInputDevice.set(settings.inputDevice);
|
||||
this.selectedOutputDevice.set(settings.outputDevice);
|
||||
this.inputVolume.set(settings.inputVolume);
|
||||
this.outputVolume.set(settings.outputVolume);
|
||||
this.audioBitrate.set(settings.audioBitrate);
|
||||
this.latencyProfile.set(settings.latencyProfile);
|
||||
this.includeSystemAudio.set(settings.includeSystemAudio);
|
||||
this.noiseReduction.set(settings.noiseReduction);
|
||||
this.screenShareQuality.set(settings.screenShareQuality);
|
||||
this.askScreenShareQuality.set(settings.askScreenShareQuality);
|
||||
}
|
||||
|
||||
private saveSettings(): void {
|
||||
try {
|
||||
const voiceSettings = {
|
||||
inputDevice: this.selectedInputDevice(),
|
||||
outputDevice: this.selectedOutputDevice(),
|
||||
inputVolume: this.inputVolume(),
|
||||
outputVolume: this.outputVolume(),
|
||||
audioBitrate: this.audioBitrate(),
|
||||
latencyProfile: this.latencyProfile(),
|
||||
includeSystemAudio: this.includeSystemAudio(),
|
||||
noiseReduction: this.noiseReduction()
|
||||
};
|
||||
|
||||
localStorage.setItem(STORAGE_KEY_VOICE_SETTINGS, JSON.stringify(voiceSettings));
|
||||
} catch {}
|
||||
saveVoiceSettingsToStorage({
|
||||
inputDevice: this.selectedInputDevice(),
|
||||
outputDevice: this.selectedOutputDevice(),
|
||||
inputVolume: this.inputVolume(),
|
||||
outputVolume: this.outputVolume(),
|
||||
audioBitrate: this.audioBitrate(),
|
||||
latencyProfile: this.latencyProfile(),
|
||||
includeSystemAudio: this.includeSystemAudio(),
|
||||
noiseReduction: this.noiseReduction(),
|
||||
screenShareQuality: this.screenShareQuality(),
|
||||
askScreenShareQuality: this.askScreenShareQuality()
|
||||
});
|
||||
}
|
||||
|
||||
private applySettingsToWebRTC(): void {
|
||||
@@ -567,6 +564,25 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
this.voicePlayback.applyOutputDevice(deviceId);
|
||||
}
|
||||
|
||||
private syncScreenShareSettings(): void {
|
||||
const settings = loadVoiceSettingsFromStorage();
|
||||
|
||||
this.includeSystemAudio.set(settings.includeSystemAudio);
|
||||
this.screenShareQuality.set(settings.screenShareQuality);
|
||||
this.askScreenShareQuality.set(settings.askScreenShareQuality);
|
||||
}
|
||||
|
||||
private async startScreenShareWithOptions(quality: ScreenShareQuality): Promise<void> {
|
||||
try {
|
||||
await this.webrtcService.startScreenShare({
|
||||
includeSystemAudio: this.includeSystemAudio(),
|
||||
quality
|
||||
});
|
||||
|
||||
this.isScreenSharing.set(true);
|
||||
} catch (_error) {}
|
||||
}
|
||||
|
||||
getMuteButtonClass(): string {
|
||||
const base =
|
||||
'w-10 h-10 inline-flex items-center justify-center rounded-full transition-colors disabled:opacity-50 disabled:cursor-not-allowed';
|
||||
|
||||
@@ -0,0 +1,86 @@
|
||||
<div
|
||||
class="fixed inset-0 z-[110] bg-black/70 backdrop-blur-sm"
|
||||
(click)="cancelled.emit(undefined)"
|
||||
(keydown.enter)="cancelled.emit(undefined)"
|
||||
(keydown.space)="cancelled.emit(undefined)"
|
||||
role="button"
|
||||
tabindex="0"
|
||||
aria-label="Close screen share quality dialog"
|
||||
></div>
|
||||
|
||||
<div class="fixed inset-0 z-[111] flex items-center justify-center p-4 pointer-events-none">
|
||||
<div
|
||||
class="pointer-events-auto w-full max-w-2xl rounded-2xl border border-border bg-card shadow-2xl"
|
||||
(click)="$event.stopPropagation()"
|
||||
(keydown.enter)="$event.stopPropagation()"
|
||||
(keydown.space)="$event.stopPropagation()"
|
||||
role="dialog"
|
||||
aria-modal="true"
|
||||
tabindex="-1"
|
||||
>
|
||||
<div class="border-b border-border p-5">
|
||||
<h3 class="text-lg font-semibold text-foreground">Choose screen share quality</h3>
|
||||
<p class="mt-1 text-sm text-muted-foreground">
|
||||
Pick the profile that best matches what you are sharing. You can change the default later in Voice settings.
|
||||
</p>
|
||||
@if (includeSystemAudio()) {
|
||||
<p class="mt-3 rounded-lg bg-primary/10 px-3 py-2 text-xs text-primary">
|
||||
Computer audio will be shared. MeToYou audio is filtered when supported, and your microphone stays on its normal voice track.
|
||||
</p>
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="grid gap-3 p-5 md:grid-cols-2">
|
||||
@for (option of qualityOptions; track option.id) {
|
||||
<button
|
||||
type="button"
|
||||
(click)="chooseQuality(option.id)"
|
||||
class="rounded-xl border px-4 py-4 text-left transition-colors"
|
||||
[class.border-primary]="activeQuality() === option.id"
|
||||
[class.bg-primary/10]="activeQuality() === option.id"
|
||||
[class.text-primary]="activeQuality() === option.id"
|
||||
[class.border-border]="activeQuality() !== option.id"
|
||||
[class.bg-secondary/30]="activeQuality() !== option.id"
|
||||
[class.text-foreground]="activeQuality() !== option.id"
|
||||
>
|
||||
<div class="flex items-start justify-between gap-3">
|
||||
<div>
|
||||
<p class="font-medium">{{ option.label }}</p>
|
||||
<p class="mt-1 text-sm text-muted-foreground">
|
||||
{{ option.description }}
|
||||
</p>
|
||||
</div>
|
||||
<span
|
||||
class="mt-0.5 inline-flex h-5 w-5 flex-shrink-0 items-center justify-center rounded-full border text-[10px]"
|
||||
[class.border-primary]="activeQuality() === option.id"
|
||||
[class.bg-primary]="activeQuality() === option.id"
|
||||
[class.text-primary-foreground]="activeQuality() === option.id"
|
||||
[class.border-border]="activeQuality() !== option.id"
|
||||
>
|
||||
@if (activeQuality() === option.id) {
|
||||
✓
|
||||
}
|
||||
</span>
|
||||
</div>
|
||||
</button>
|
||||
}
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-end gap-2 border-t border-border p-4">
|
||||
<button
|
||||
type="button"
|
||||
(click)="cancelled.emit(undefined)"
|
||||
class="rounded-lg bg-secondary px-4 py-2 text-sm text-foreground transition-colors hover:bg-secondary/80"
|
||||
>
|
||||
Cancel
|
||||
</button>
|
||||
<button
|
||||
type="button"
|
||||
(click)="confirm()"
|
||||
class="rounded-lg bg-primary px-4 py-2 text-sm font-medium text-primary-foreground transition-colors hover:bg-primary/90"
|
||||
>
|
||||
Start sharing
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -0,0 +1,44 @@
|
||||
import {
|
||||
Component,
|
||||
HostListener,
|
||||
OnInit,
|
||||
input,
|
||||
output,
|
||||
signal
|
||||
} from '@angular/core';
|
||||
import { CommonModule } from '@angular/common';
|
||||
import { ScreenShareQuality, SCREEN_SHARE_QUALITY_OPTIONS } from '../../../core/services/webrtc/screen-share.config';
|
||||
|
||||
@Component({
|
||||
selector: 'app-screen-share-quality-dialog',
|
||||
standalone: true,
|
||||
imports: [CommonModule],
|
||||
templateUrl: './screen-share-quality-dialog.component.html'
|
||||
})
|
||||
export class ScreenShareQualityDialogComponent implements OnInit {
|
||||
selectedQuality = input.required<ScreenShareQuality>();
|
||||
includeSystemAudio = input(false);
|
||||
|
||||
confirmed = output<ScreenShareQuality>();
|
||||
cancelled = output<undefined>();
|
||||
|
||||
readonly qualityOptions = SCREEN_SHARE_QUALITY_OPTIONS;
|
||||
readonly activeQuality = signal<ScreenShareQuality>('balanced');
|
||||
|
||||
@HostListener('document:keydown.escape')
|
||||
onEscape(): void {
|
||||
this.cancelled.emit(undefined);
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
this.activeQuality.set(this.selectedQuality());
|
||||
}
|
||||
|
||||
chooseQuality(quality: ScreenShareQuality): void {
|
||||
this.activeQuality.set(quality);
|
||||
}
|
||||
|
||||
confirm(): void {
|
||||
this.confirmed.emit(this.activeQuality());
|
||||
}
|
||||
}
|
||||
@@ -8,4 +8,5 @@ export { LeaveServerDialogComponent } from './components/leave-server-dialog/lea
|
||||
export { ChatAudioPlayerComponent } from './components/chat-audio-player/chat-audio-player.component';
|
||||
export { ChatVideoPlayerComponent } from './components/chat-video-player/chat-video-player.component';
|
||||
export { DebugConsoleComponent } from './components/debug-console/debug-console.component';
|
||||
export { ScreenShareQualityDialogComponent } from './components/screen-share-quality-dialog/screen-share-quality-dialog.component';
|
||||
export { UserVolumeMenuComponent } from './components/user-volume-menu/user-volume-menu.component';
|
||||
|
||||
Reference in New Issue
Block a user