test: Add playwright main usage test
Some checks failed
Deploy Web Apps / deploy (push) Has been cancelled
Queue Release Build / prepare (push) Successful in 21s
Queue Release Build / build-linux (push) Successful in 27m44s
Queue Release Build / build-windows (push) Successful in 32m16s
Queue Release Build / finalize (push) Successful in 1m54s

This commit is contained in:
2026-04-11 16:48:26 +02:00
parent f33440a827
commit 391d9235f1
25 changed files with 2968 additions and 67 deletions

View File

@@ -0,0 +1,717 @@
/* eslint-disable @typescript-eslint/no-explicit-any */
import { type Page } from '@playwright/test';
/**
* Install RTCPeerConnection monkey-patch on a page BEFORE navigating.
* Tracks all created peer connections and their remote tracks so tests
* can inspect WebRTC state via `page.evaluate()`.
*
* Call immediately after page creation, before any `goto()`.
*/
export async function installWebRTCTracking(page: Page): Promise<void> {
await page.addInitScript(() => {
const connections: RTCPeerConnection[] = [];
(window as any).__rtcConnections = connections;
(window as any).__rtcRemoteTracks = [] as { kind: string; id: string; readyState: string }[];
const OriginalRTCPeerConnection = window.RTCPeerConnection;
(window as any).RTCPeerConnection = function(this: RTCPeerConnection, ...args: any[]) {
const pc: RTCPeerConnection = new OriginalRTCPeerConnection(...args);
connections.push(pc);
pc.addEventListener('connectionstatechange', () => {
(window as any).__lastRtcState = pc.connectionState;
});
pc.addEventListener('track', (event: RTCTrackEvent) => {
(window as any).__rtcRemoteTracks.push({
kind: event.track.kind,
id: event.track.id,
readyState: event.track.readyState
});
});
return pc;
} as any;
(window as any).RTCPeerConnection.prototype = OriginalRTCPeerConnection.prototype;
Object.setPrototypeOf((window as any).RTCPeerConnection, OriginalRTCPeerConnection);
// Patch getUserMedia to use an AudioContext oscillator for audio
// instead of the hardware capture device. Chromium's fake audio
// device intermittently fails to produce frames after renegotiation.
const origGetUserMedia = navigator.mediaDevices.getUserMedia.bind(navigator.mediaDevices);
navigator.mediaDevices.getUserMedia = async (constraints?: MediaStreamConstraints) => {
const wantsAudio = !!constraints?.audio;
if (!wantsAudio) {
return origGetUserMedia(constraints);
}
// Get the original stream (may include video)
const originalStream = await origGetUserMedia(constraints);
const audioCtx = new AudioContext();
const oscillator = audioCtx.createOscillator();
oscillator.frequency.value = 440;
const dest = audioCtx.createMediaStreamDestination();
oscillator.connect(dest);
oscillator.start();
const synthAudioTrack = dest.stream.getAudioTracks()[0];
const resultStream = new MediaStream();
resultStream.addTrack(synthAudioTrack);
// Keep any video tracks from the original stream
for (const videoTrack of originalStream.getVideoTracks()) {
resultStream.addTrack(videoTrack);
}
// Stop original audio tracks since we're not using them
for (const track of originalStream.getAudioTracks()) {
track.stop();
}
return resultStream;
};
// Patch getDisplayMedia to return a synthetic screen share stream
// (canvas-based video + 880Hz oscillator audio) so the browser
// picker dialog is never shown.
navigator.mediaDevices.getDisplayMedia = async (_constraints?: DisplayMediaStreamOptions) => {
const canvas = document.createElement('canvas');
canvas.width = 640;
canvas.height = 480;
const ctx = canvas.getContext('2d');
if (!ctx) {
throw new Error('Canvas 2D context unavailable');
}
let frameCount = 0;
// Draw animated frames so video stats show increasing bytes
const drawFrame = () => {
frameCount++;
ctx.fillStyle = `hsl(${frameCount % 360}, 70%, 50%)`;
ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillStyle = '#fff';
ctx.font = '24px monospace';
ctx.fillText(`Screen Share Frame ${frameCount}`, 40, 60);
};
drawFrame();
const drawInterval = setInterval(drawFrame, 100);
const videoStream = canvas.captureStream(10); // 10 fps
const videoTrack = videoStream.getVideoTracks()[0];
// Stop drawing when the track ends
videoTrack.addEventListener('ended', () => clearInterval(drawInterval));
// Create 880Hz oscillator for screen share audio (distinct from 440Hz voice)
const audioCtx = new AudioContext();
const osc = audioCtx.createOscillator();
osc.frequency.value = 880;
const dest = audioCtx.createMediaStreamDestination();
osc.connect(dest);
osc.start();
const audioTrack = dest.stream.getAudioTracks()[0];
// Combine video + audio into one stream
const resultStream = new MediaStream([videoTrack, audioTrack]);
// Tag the stream so tests can identify it
(resultStream as any).__isScreenShare = true;
return resultStream;
};
});
}
/**
* Wait until at least one RTCPeerConnection reaches the 'connected' state.
*/
export async function waitForPeerConnected(page: Page, timeout = 30_000): Promise<void> {
await page.waitForFunction(
() => (window as any).__rtcConnections?.some(
(pc: RTCPeerConnection) => pc.connectionState === 'connected'
) ?? false,
{ timeout }
);
}
/**
* Check that a peer connection is still in 'connected' state (not failed/disconnected).
*/
export async function isPeerStillConnected(page: Page): Promise<boolean> {
return page.evaluate(
() => (window as any).__rtcConnections?.some(
(pc: RTCPeerConnection) => pc.connectionState === 'connected'
) ?? false
);
}
/**
* Get outbound and inbound audio RTP stats aggregated across all peer
* connections. Uses a per-connection high water mark stored on `window` so
* that connections that close mid-measurement still contribute their last
* known counters, preventing the aggregate from going backwards.
*/
export async function getAudioStats(page: Page): Promise<{
outbound: { bytesSent: number; packetsSent: number } | null;
inbound: { bytesReceived: number; packetsReceived: number } | null;
}> {
return page.evaluate(async () => {
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
if (!connections?.length)
return { outbound: null, inbound: null };
interface HWMEntry {
outBytesSent: number;
outPacketsSent: number;
inBytesReceived: number;
inPacketsReceived: number;
hasOutbound: boolean;
hasInbound: boolean;
};
const hwm: Record<number, HWMEntry> = (window as any).__rtcStatsHWM =
((window as any).__rtcStatsHWM as Record<number, HWMEntry> | undefined) ?? {};
for (let idx = 0; idx < connections.length; idx++) {
let stats: RTCStatsReport;
try {
stats = await connections[idx].getStats();
} catch {
continue; // closed connection - keep its last HWM
}
let obytes = 0;
let opackets = 0;
let ibytes = 0;
let ipackets = 0;
let hasOut = false;
let hasIn = false;
stats.forEach((report: any) => {
const kind = report.kind ?? report.mediaType;
if (report.type === 'outbound-rtp' && kind === 'audio') {
hasOut = true;
obytes += report.bytesSent ?? 0;
opackets += report.packetsSent ?? 0;
}
if (report.type === 'inbound-rtp' && kind === 'audio') {
hasIn = true;
ibytes += report.bytesReceived ?? 0;
ipackets += report.packetsReceived ?? 0;
}
});
if (hasOut || hasIn) {
hwm[idx] = {
outBytesSent: obytes,
outPacketsSent: opackets,
inBytesReceived: ibytes,
inPacketsReceived: ipackets,
hasOutbound: hasOut,
hasInbound: hasIn
};
}
}
let totalOutBytes = 0;
let totalOutPackets = 0;
let totalInBytes = 0;
let totalInPackets = 0;
let anyOutbound = false;
let anyInbound = false;
for (const entry of Object.values(hwm)) {
totalOutBytes += entry.outBytesSent;
totalOutPackets += entry.outPacketsSent;
totalInBytes += entry.inBytesReceived;
totalInPackets += entry.inPacketsReceived;
if (entry.hasOutbound)
anyOutbound = true;
if (entry.hasInbound)
anyInbound = true;
}
return {
outbound: anyOutbound
? { bytesSent: totalOutBytes, packetsSent: totalOutPackets }
: null,
inbound: anyInbound
? { bytesReceived: totalInBytes, packetsReceived: totalInPackets }
: null
};
});
}
/**
* Snapshot audio stats, wait `durationMs`, snapshot again, and return the delta.
* Useful for verifying audio is actively flowing (bytes increasing).
*/
export async function getAudioStatsDelta(page: Page, durationMs = 3_000): Promise<{
outboundBytesDelta: number;
inboundBytesDelta: number;
outboundPacketsDelta: number;
inboundPacketsDelta: number;
}> {
const before = await getAudioStats(page);
await page.waitForTimeout(durationMs);
const after = await getAudioStats(page);
return {
outboundBytesDelta: (after.outbound?.bytesSent ?? 0) - (before.outbound?.bytesSent ?? 0),
inboundBytesDelta: (after.inbound?.bytesReceived ?? 0) - (before.inbound?.bytesReceived ?? 0),
outboundPacketsDelta: (after.outbound?.packetsSent ?? 0) - (before.outbound?.packetsSent ?? 0),
inboundPacketsDelta: (after.inbound?.packetsReceived ?? 0) - (before.inbound?.packetsReceived ?? 0)
};
}
/**
* Wait until at least one connection has both outbound-rtp and inbound-rtp
* audio reports. Call after `waitForPeerConnected` to ensure the audio
* pipeline is ready before measuring deltas.
*/
export async function waitForAudioStatsPresent(page: Page, timeout = 15_000): Promise<void> {
await page.waitForFunction(
async () => {
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
if (!connections?.length)
return false;
for (const pc of connections) {
let stats: RTCStatsReport;
try {
stats = await pc.getStats();
} catch {
continue;
}
let hasOut = false;
let hasIn = false;
stats.forEach((report: any) => {
const kind = report.kind ?? report.mediaType;
if (report.type === 'outbound-rtp' && kind === 'audio')
hasOut = true;
if (report.type === 'inbound-rtp' && kind === 'audio')
hasIn = true;
});
if (hasOut && hasIn)
return true;
}
return false;
},
{ timeout }
);
}
interface AudioFlowDelta {
outboundBytesDelta: number;
inboundBytesDelta: number;
outboundPacketsDelta: number;
inboundPacketsDelta: number;
}
function snapshotToDelta(
curr: Awaited<ReturnType<typeof getAudioStats>>,
prev: Awaited<ReturnType<typeof getAudioStats>>
): AudioFlowDelta {
return {
outboundBytesDelta: (curr.outbound?.bytesSent ?? 0) - (prev.outbound?.bytesSent ?? 0),
inboundBytesDelta: (curr.inbound?.bytesReceived ?? 0) - (prev.inbound?.bytesReceived ?? 0),
outboundPacketsDelta: (curr.outbound?.packetsSent ?? 0) - (prev.outbound?.packetsSent ?? 0),
inboundPacketsDelta: (curr.inbound?.packetsReceived ?? 0) - (prev.inbound?.packetsReceived ?? 0)
};
}
function isDeltaFlowing(delta: AudioFlowDelta): boolean {
const outFlowing = delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0;
const inFlowing = delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0;
return outFlowing && inFlowing;
}
/**
* Poll until two consecutive HWM-based reads show both outbound and inbound
* audio byte counts increasing. Combines per-connection high-water marks
* (which prevent totals from going backwards after connection churn) with
* consecutive comparison (which avoids a stale single baseline).
*/
export async function waitForAudioFlow(
page: Page,
timeoutMs = 30_000,
pollIntervalMs = 1_000
): Promise<AudioFlowDelta> {
const deadline = Date.now() + timeoutMs;
let prev = await getAudioStats(page);
while (Date.now() < deadline) {
await page.waitForTimeout(pollIntervalMs);
const curr = await getAudioStats(page);
const delta = snapshotToDelta(curr, prev);
if (isDeltaFlowing(delta)) {
return delta;
}
prev = curr;
}
// Timeout - return zero deltas so the caller's assertion reports the failure.
return {
outboundBytesDelta: 0,
inboundBytesDelta: 0,
outboundPacketsDelta: 0,
inboundPacketsDelta: 0
};
}
/**
* Get outbound and inbound video RTP stats aggregated across all peer
* connections. Uses the same HWM pattern as {@link getAudioStats}.
*/
export async function getVideoStats(page: Page): Promise<{
outbound: { bytesSent: number; packetsSent: number } | null;
inbound: { bytesReceived: number; packetsReceived: number } | null;
}> {
return page.evaluate(async () => {
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
if (!connections?.length)
return { outbound: null, inbound: null };
interface VHWM {
outBytesSent: number;
outPacketsSent: number;
inBytesReceived: number;
inPacketsReceived: number;
hasOutbound: boolean;
hasInbound: boolean;
}
const hwm: Record<number, VHWM> = (window as any).__rtcVideoStatsHWM =
((window as any).__rtcVideoStatsHWM as Record<number, VHWM> | undefined) ?? {};
for (let idx = 0; idx < connections.length; idx++) {
let stats: RTCStatsReport;
try {
stats = await connections[idx].getStats();
} catch {
continue;
}
let obytes = 0;
let opackets = 0;
let ibytes = 0;
let ipackets = 0;
let hasOut = false;
let hasIn = false;
stats.forEach((report: any) => {
const kind = report.kind ?? report.mediaType;
if (report.type === 'outbound-rtp' && kind === 'video') {
hasOut = true;
obytes += report.bytesSent ?? 0;
opackets += report.packetsSent ?? 0;
}
if (report.type === 'inbound-rtp' && kind === 'video') {
hasIn = true;
ibytes += report.bytesReceived ?? 0;
ipackets += report.packetsReceived ?? 0;
}
});
if (hasOut || hasIn) {
hwm[idx] = {
outBytesSent: obytes,
outPacketsSent: opackets,
inBytesReceived: ibytes,
inPacketsReceived: ipackets,
hasOutbound: hasOut,
hasInbound: hasIn
};
}
}
let totalOutBytes = 0;
let totalOutPackets = 0;
let totalInBytes = 0;
let totalInPackets = 0;
let anyOutbound = false;
let anyInbound = false;
for (const entry of Object.values(hwm)) {
totalOutBytes += entry.outBytesSent;
totalOutPackets += entry.outPacketsSent;
totalInBytes += entry.inBytesReceived;
totalInPackets += entry.inPacketsReceived;
if (entry.hasOutbound)
anyOutbound = true;
if (entry.hasInbound)
anyInbound = true;
}
return {
outbound: anyOutbound
? { bytesSent: totalOutBytes, packetsSent: totalOutPackets }
: null,
inbound: anyInbound
? { bytesReceived: totalInBytes, packetsReceived: totalInPackets }
: null
};
});
}
/**
* Wait until at least one connection has both outbound-rtp and inbound-rtp
* video reports.
*/
export async function waitForVideoStatsPresent(page: Page, timeout = 15_000): Promise<void> {
await page.waitForFunction(
async () => {
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
if (!connections?.length)
return false;
for (const pc of connections) {
let stats: RTCStatsReport;
try {
stats = await pc.getStats();
} catch {
continue;
}
let hasOut = false;
let hasIn = false;
stats.forEach((report: any) => {
const kind = report.kind ?? report.mediaType;
if (report.type === 'outbound-rtp' && kind === 'video')
hasOut = true;
if (report.type === 'inbound-rtp' && kind === 'video')
hasIn = true;
});
if (hasOut && hasIn)
return true;
}
return false;
},
{ timeout }
);
}
interface VideoFlowDelta {
outboundBytesDelta: number;
inboundBytesDelta: number;
outboundPacketsDelta: number;
inboundPacketsDelta: number;
}
function videoSnapshotToDelta(
curr: Awaited<ReturnType<typeof getVideoStats>>,
prev: Awaited<ReturnType<typeof getVideoStats>>
): VideoFlowDelta {
return {
outboundBytesDelta: (curr.outbound?.bytesSent ?? 0) - (prev.outbound?.bytesSent ?? 0),
inboundBytesDelta: (curr.inbound?.bytesReceived ?? 0) - (prev.inbound?.bytesReceived ?? 0),
outboundPacketsDelta: (curr.outbound?.packetsSent ?? 0) - (prev.outbound?.packetsSent ?? 0),
inboundPacketsDelta: (curr.inbound?.packetsReceived ?? 0) - (prev.inbound?.packetsReceived ?? 0)
};
}
function isVideoDeltaFlowing(delta: VideoFlowDelta): boolean {
const outFlowing = delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0;
const inFlowing = delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0;
return outFlowing && inFlowing;
}
/**
* Poll until two consecutive HWM-based reads show both outbound and inbound
* video byte counts increasing - proving screen share video is flowing.
*/
export async function waitForVideoFlow(
page: Page,
timeoutMs = 30_000,
pollIntervalMs = 1_000
): Promise<VideoFlowDelta> {
const deadline = Date.now() + timeoutMs;
let prev = await getVideoStats(page);
while (Date.now() < deadline) {
await page.waitForTimeout(pollIntervalMs);
const curr = await getVideoStats(page);
const delta = videoSnapshotToDelta(curr, prev);
if (isVideoDeltaFlowing(delta)) {
return delta;
}
prev = curr;
}
return {
outboundBytesDelta: 0,
inboundBytesDelta: 0,
outboundPacketsDelta: 0,
inboundPacketsDelta: 0
};
}
/**
* Wait until outbound video bytes are increasing (sender side).
* Use on the page that is sharing its screen.
*/
export async function waitForOutboundVideoFlow(
page: Page,
timeoutMs = 30_000,
pollIntervalMs = 1_000
): Promise<VideoFlowDelta> {
const deadline = Date.now() + timeoutMs;
let prev = await getVideoStats(page);
while (Date.now() < deadline) {
await page.waitForTimeout(pollIntervalMs);
const curr = await getVideoStats(page);
const delta = videoSnapshotToDelta(curr, prev);
if (delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0) {
return delta;
}
prev = curr;
}
return {
outboundBytesDelta: 0,
inboundBytesDelta: 0,
outboundPacketsDelta: 0,
inboundPacketsDelta: 0
};
}
/**
* Wait until inbound video bytes are increasing (receiver side).
* Use on the page that is viewing someone else's screen share.
*/
export async function waitForInboundVideoFlow(
page: Page,
timeoutMs = 30_000,
pollIntervalMs = 1_000
): Promise<VideoFlowDelta> {
const deadline = Date.now() + timeoutMs;
let prev = await getVideoStats(page);
while (Date.now() < deadline) {
await page.waitForTimeout(pollIntervalMs);
const curr = await getVideoStats(page);
const delta = videoSnapshotToDelta(curr, prev);
if (delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0) {
return delta;
}
prev = curr;
}
return {
outboundBytesDelta: 0,
inboundBytesDelta: 0,
outboundPacketsDelta: 0,
inboundPacketsDelta: 0
};
}
/**
* Dump full RTC connection diagnostics for debugging audio flow failures.
*/
export async function dumpRtcDiagnostics(page: Page): Promise<string> {
return page.evaluate(async () => {
const conns = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
if (!conns?.length)
return 'No connections tracked';
const lines: string[] = [`Total connections: ${conns.length}`];
for (let idx = 0; idx < conns.length; idx++) {
const pc = conns[idx];
lines.push(`PC[${idx}]: connection=${pc.connectionState}, signaling=${pc.signalingState}`);
const senders = pc.getSenders().map(
(sender) => `${sender.track?.kind ?? 'none'}:enabled=${sender.track?.enabled}:${sender.track?.readyState ?? 'null'}`
);
const receivers = pc.getReceivers().map(
(recv) => `${recv.track?.kind ?? 'none'}:enabled=${recv.track?.enabled}:${recv.track?.readyState ?? 'null'}`
);
lines.push(` senders=[${senders.join(', ')}]`);
lines.push(` receivers=[${receivers.join(', ')}]`);
try {
const stats = await pc.getStats();
stats.forEach((report: any) => {
if (report.type !== 'outbound-rtp' && report.type !== 'inbound-rtp')
return;
const kind = report.kind ?? report.mediaType;
const bytes = report.type === 'outbound-rtp' ? report.bytesSent : report.bytesReceived;
const packets = report.type === 'outbound-rtp' ? report.packetsSent : report.packetsReceived;
lines.push(` ${report.type}: kind=${kind}, bytes=${bytes}, packets=${packets}`);
});
} catch (err: any) {
lines.push(` getStats() failed: ${err?.message ?? err}`);
}
}
return lines.join('\n');
});
}