test: Add 8 people voice tests

This commit is contained in:
2026-04-18 14:19:59 +02:00
parent bd21568726
commit 167c45ba8d
17 changed files with 2044 additions and 232 deletions

View File

@@ -3,6 +3,15 @@ import { type BrowserContext, type Page } from '@playwright/test';
const SERVER_ENDPOINTS_STORAGE_KEY = 'metoyou_server_endpoints';
const REMOVED_DEFAULT_KEYS_STORAGE_KEY = 'metoyou_removed_default_server_keys';
export interface SeededEndpointInput {
id: string;
name: string;
url: string;
isActive?: boolean;
isDefault?: boolean;
status?: string;
}
interface SeededEndpointStorageState {
key: string;
removedKey: string;
@@ -17,21 +26,32 @@ interface SeededEndpointStorageState {
}
function buildSeededEndpointStorageState(
port: number = Number(process.env.TEST_SERVER_PORT) || 3099
endpointsOrPort: ReadonlyArray<SeededEndpointInput> | number = Number(process.env.TEST_SERVER_PORT) || 3099
): SeededEndpointStorageState {
const endpoint = {
id: 'e2e-test-server',
name: 'E2E Test Server',
url: `http://localhost:${port}`,
isActive: true,
isDefault: false,
status: 'unknown'
};
const endpoints = Array.isArray(endpointsOrPort)
? endpointsOrPort.map((endpoint) => ({
id: endpoint.id,
name: endpoint.name,
url: endpoint.url,
isActive: endpoint.isActive ?? true,
isDefault: endpoint.isDefault ?? false,
status: endpoint.status ?? 'unknown'
}))
: [
{
id: 'e2e-test-server',
name: 'E2E Test Server',
url: `http://localhost:${endpointsOrPort}`,
isActive: true,
isDefault: false,
status: 'unknown'
}
];
return {
key: SERVER_ENDPOINTS_STORAGE_KEY,
removedKey: REMOVED_DEFAULT_KEYS_STORAGE_KEY,
endpoints: [endpoint]
endpoints
};
}
@@ -59,6 +79,15 @@ export async function installTestServerEndpoint(
await context.addInitScript(applySeededEndpointStorageState, storageState);
}
export async function installTestServerEndpoints(
context: BrowserContext,
endpoints: ReadonlyArray<SeededEndpointInput>
): Promise<void> {
const storageState = buildSeededEndpointStorageState(endpoints);
await context.addInitScript(applySeededEndpointStorageState, storageState);
}
/**
* Seed localStorage with a single signal endpoint pointing at the test server.
* Must be called AFTER navigating to the app origin (localStorage is per-origin)
@@ -79,3 +108,12 @@ export async function seedTestServerEndpoint(
await page.evaluate(applySeededEndpointStorageState, storageState);
}
export async function seedTestServerEndpoints(
page: Page,
endpoints: ReadonlyArray<SeededEndpointInput>
): Promise<void> {
const storageState = buildSeededEndpointStorageState(endpoints);
await page.evaluate(applySeededEndpointStorageState, storageState);
}

132
e2e/helpers/test-server.ts Normal file
View File

@@ -0,0 +1,132 @@
import { spawn, type ChildProcess } from 'node:child_process';
import { once } from 'node:events';
import { createServer } from 'node:net';
import { join } from 'node:path';
export interface TestServerHandle {
port: number;
url: string;
stop: () => Promise<void>;
}
const E2E_DIR = join(__dirname, '..');
const START_SERVER_SCRIPT = join(E2E_DIR, 'helpers', 'start-test-server.js');
export async function startTestServer(retries = 3): Promise<TestServerHandle> {
for (let attempt = 1; attempt <= retries; attempt++) {
const port = await allocatePort();
const child = spawn(process.execPath, [START_SERVER_SCRIPT], {
cwd: E2E_DIR,
env: {
...process.env,
TEST_SERVER_PORT: String(port)
},
stdio: 'pipe'
});
child.stdout?.on('data', (chunk: Buffer | string) => {
process.stdout.write(chunk.toString());
});
child.stderr?.on('data', (chunk: Buffer | string) => {
process.stderr.write(chunk.toString());
});
try {
await waitForServerReady(port, child);
} catch (error) {
await stopServer(child);
if (attempt < retries) {
console.log(`[E2E Server] Attempt ${attempt} failed, retrying...`);
continue;
}
throw error;
}
return {
port,
url: `http://localhost:${port}`,
stop: async () => {
await stopServer(child);
}
};
}
throw new Error('startTestServer: unreachable');
}
async function allocatePort(): Promise<number> {
return await new Promise<number>((resolve, reject) => {
const probe = createServer();
probe.once('error', reject);
probe.listen(0, '127.0.0.1', () => {
const address = probe.address();
if (!address || typeof address === 'string') {
probe.close();
reject(new Error('Failed to resolve an ephemeral test server port'));
return;
}
const { port } = address;
probe.close((error) => {
if (error) {
reject(error);
return;
}
resolve(port);
});
});
});
}
async function waitForServerReady(port: number, child: ChildProcess, timeoutMs = 30_000): Promise<void> {
const readyUrl = `http://127.0.0.1:${port}/api/servers?limit=1`;
const deadline = Date.now() + timeoutMs;
while (Date.now() < deadline) {
if (child.exitCode !== null) {
throw new Error(`Test server exited before becoming ready (exit code ${child.exitCode})`);
}
try {
const response = await fetch(readyUrl);
if (response.ok) {
return;
}
} catch {
// Server still starting.
}
await wait(250);
}
throw new Error(`Timed out waiting for test server on port ${port}`);
}
async function stopServer(child: ChildProcess): Promise<void> {
if (child.exitCode !== null) {
return;
}
child.kill('SIGTERM');
const exited = await Promise.race([once(child, 'exit').then(() => true), wait(3_000).then(() => false)]);
if (!exited && child.exitCode === null) {
child.kill('SIGKILL');
await once(child, 'exit');
}
}
function wait(durationMs: number): Promise<void> {
return new Promise((resolve) => {
setTimeout(resolve, durationMs);
});
}

View File

@@ -46,75 +46,6 @@ export async function installWebRTCTracking(page: Page): Promise<void> {
(window as any).RTCPeerConnection.prototype = OriginalRTCPeerConnection.prototype;
Object.setPrototypeOf((window as any).RTCPeerConnection, OriginalRTCPeerConnection);
// Patch getUserMedia to use an AudioContext oscillator for audio
// instead of the hardware capture device. Chromium's fake audio
// device intermittently fails to produce frames after renegotiation.
const origGetUserMedia = navigator.mediaDevices.getUserMedia.bind(navigator.mediaDevices);
navigator.mediaDevices.getUserMedia = async (constraints?: MediaStreamConstraints) => {
const wantsAudio = !!constraints?.audio;
if (!wantsAudio) {
return origGetUserMedia(constraints);
}
// Get the original stream (may include video)
const originalStream = await origGetUserMedia(constraints);
const audioCtx = new AudioContext();
const noiseBuffer = audioCtx.createBuffer(1, audioCtx.sampleRate * 2, audioCtx.sampleRate);
const noiseData = noiseBuffer.getChannelData(0);
for (let sampleIndex = 0; sampleIndex < noiseData.length; sampleIndex++) {
noiseData[sampleIndex] = (Math.random() * 2 - 1) * 0.18;
}
const source = audioCtx.createBufferSource();
const gain = audioCtx.createGain();
source.buffer = noiseBuffer;
source.loop = true;
gain.gain.value = 0.12;
const dest = audioCtx.createMediaStreamDestination();
source.connect(gain);
gain.connect(dest);
source.start();
if (audioCtx.state === 'suspended') {
try {
await audioCtx.resume();
} catch {}
}
const synthAudioTrack = dest.stream.getAudioTracks()[0];
const resultStream = new MediaStream();
syntheticMediaResources.push({ audioCtx, source });
resultStream.addTrack(synthAudioTrack);
// Keep any video tracks from the original stream
for (const videoTrack of originalStream.getVideoTracks()) {
resultStream.addTrack(videoTrack);
}
// Stop original audio tracks since we're not using them
for (const track of originalStream.getAudioTracks()) {
track.stop();
}
synthAudioTrack.addEventListener('ended', () => {
try {
source.stop();
} catch {}
void audioCtx.close().catch(() => {});
}, { once: true });
return resultStream;
};
// Patch getDisplayMedia to return a synthetic screen share stream
// (canvas-based video + 880Hz oscillator audio) so the browser
// picker dialog is never shown.
@@ -218,6 +149,177 @@ export async function isPeerStillConnected(page: Page): Promise<boolean> {
);
}
/** Returns the number of tracked peer connections in `connected` state. */
export async function getConnectedPeerCount(page: Page): Promise<number> {
return page.evaluate(
() => ((window as any).__rtcConnections as RTCPeerConnection[] | undefined)?.filter(
(pc) => pc.connectionState === 'connected'
).length ?? 0
);
}
/** Wait until the expected number of peer connections are `connected`. */
export async function waitForConnectedPeerCount(page: Page, expectedCount: number, timeout = 45_000): Promise<void> {
await page.waitForFunction(
(count) => ((window as any).__rtcConnections as RTCPeerConnection[] | undefined)?.filter(
(pc) => pc.connectionState === 'connected'
).length === count,
expectedCount,
{ timeout }
);
}
/**
* Resume all suspended AudioContext instances created by the synthetic
* media patch. Uses CDP `Runtime.evaluate` with `userGesture: true` so
* Chrome treats the call as a user-gesture — this satisfies the autoplay
* policy that otherwise blocks `AudioContext.resume()`.
*/
export async function resumeSyntheticAudioContexts(page: Page): Promise<number> {
const cdpSession = await page.context().newCDPSession(page);
try {
const result = await cdpSession.send('Runtime.evaluate', {
expression: `(async () => {
const resources = window.__rtcSyntheticMediaResources;
if (!resources) return 0;
let resumed = 0;
for (const r of resources) {
if (r.audioCtx.state === 'suspended') {
await r.audioCtx.resume();
resumed++;
}
}
return resumed;
})()`,
awaitPromise: true,
userGesture: true
});
return result.result.value ?? 0;
} finally {
await cdpSession.detach();
}
}
interface PerPeerAudioStat {
connectionState: string;
inboundBytes: number;
inboundPackets: number;
outboundBytes: number;
outboundPackets: number;
}
/** Get per-peer audio stats for every tracked RTCPeerConnection. */
export async function getPerPeerAudioStats(page: Page): Promise<PerPeerAudioStat[]> {
return page.evaluate(async () => {
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
if (!connections?.length) {
return [];
}
const snapshots: PerPeerAudioStat[] = [];
for (const pc of connections) {
let inboundBytes = 0;
let inboundPackets = 0;
let outboundBytes = 0;
let outboundPackets = 0;
try {
const stats = await pc.getStats();
stats.forEach((report: any) => {
const kind = report.kind ?? report.mediaType;
if (report.type === 'outbound-rtp' && kind === 'audio') {
outboundBytes += report.bytesSent ?? 0;
outboundPackets += report.packetsSent ?? 0;
}
if (report.type === 'inbound-rtp' && kind === 'audio') {
inboundBytes += report.bytesReceived ?? 0;
inboundPackets += report.packetsReceived ?? 0;
}
});
} catch {
// Closed connection.
}
snapshots.push({
connectionState: pc.connectionState,
inboundBytes,
inboundPackets,
outboundBytes,
outboundPackets
});
}
return snapshots;
});
}
/** Wait until every connected peer connection shows inbound and outbound audio flow. */
export async function waitForAllPeerAudioFlow(
page: Page,
expectedConnectedPeers: number,
timeoutMs = 45_000,
pollIntervalMs = 1_000
): Promise<void> {
const deadline = Date.now() + timeoutMs;
// Track which peer indices have been confirmed flowing at least once.
// This prevents a peer from being missed just because it briefly paused
// during one specific poll interval.
const confirmedFlowing = new Set<number>();
let previous = await getPerPeerAudioStats(page);
while (Date.now() < deadline) {
await page.waitForTimeout(pollIntervalMs);
const current = await getPerPeerAudioStats(page);
const connectedPeers = current.filter((stat) => stat.connectionState === 'connected');
if (connectedPeers.length >= expectedConnectedPeers) {
for (let index = 0; index < current.length; index++) {
const curr = current[index];
if (!curr || curr.connectionState !== 'connected') {
continue;
}
const prev = previous[index] ?? {
connectionState: 'new',
inboundBytes: 0,
inboundPackets: 0,
outboundBytes: 0,
outboundPackets: 0
};
const inboundFlowing = curr.inboundBytes > prev.inboundBytes || curr.inboundPackets > prev.inboundPackets;
const outboundFlowing = curr.outboundBytes > prev.outboundBytes || curr.outboundPackets > prev.outboundPackets;
if (inboundFlowing && outboundFlowing) {
confirmedFlowing.add(index);
}
}
// Check if enough peers have been confirmed across all samples
const connectedIndices = current
.map((stat, idx) => stat.connectionState === 'connected' ? idx : -1)
.filter((idx) => idx >= 0);
const confirmedCount = connectedIndices.filter((idx) => confirmedFlowing.has(idx)).length;
if (confirmedCount >= expectedConnectedPeers) {
return;
}
}
previous = current;
}
throw new Error(`Timed out waiting for ${expectedConnectedPeers} peers with bidirectional audio flow`);
}
/**
* Get outbound and inbound audio RTP stats aggregated across all peer
* connections. Uses a per-connection high water mark stored on `window` so