test: Add playwright main usage test
Some checks failed
Deploy Web Apps / deploy (push) Has been cancelled
Queue Release Build / prepare (push) Successful in 21s
Queue Release Build / build-linux (push) Successful in 27m44s
Queue Release Build / build-windows (push) Successful in 32m16s
Queue Release Build / finalize (push) Successful in 1m54s
Some checks failed
Deploy Web Apps / deploy (push) Has been cancelled
Queue Release Build / prepare (push) Successful in 21s
Queue Release Build / build-linux (push) Successful in 27m44s
Queue Release Build / build-windows (push) Successful in 32m16s
Queue Release Build / finalize (push) Successful in 1m54s
This commit is contained in:
4
e2e/fixtures/base.ts
Normal file
4
e2e/fixtures/base.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
import { test as base } from '@playwright/test';
|
||||
|
||||
export const test = base;
|
||||
export { expect } from '@playwright/test';
|
||||
202
e2e/fixtures/multi-client.ts
Normal file
202
e2e/fixtures/multi-client.ts
Normal file
@@ -0,0 +1,202 @@
|
||||
import {
|
||||
test as base,
|
||||
chromium,
|
||||
type Page,
|
||||
type BrowserContext,
|
||||
type Browser
|
||||
} from '@playwright/test';
|
||||
import { spawn, type ChildProcess } from 'node:child_process';
|
||||
import { once } from 'node:events';
|
||||
import { createServer } from 'node:net';
|
||||
import { join } from 'node:path';
|
||||
import { installTestServerEndpoint } from '../helpers/seed-test-endpoint';
|
||||
|
||||
export interface Client {
|
||||
page: Page;
|
||||
context: BrowserContext;
|
||||
}
|
||||
|
||||
interface TestServerHandle {
|
||||
port: number;
|
||||
url: string;
|
||||
stop: () => Promise<void>;
|
||||
}
|
||||
|
||||
interface MultiClientFixture {
|
||||
createClient: () => Promise<Client>;
|
||||
testServer: TestServerHandle;
|
||||
}
|
||||
|
||||
const FAKE_AUDIO_FILE = join(__dirname, 'test-tone.wav');
|
||||
const CHROMIUM_FAKE_MEDIA_ARGS = [
|
||||
'--use-fake-device-for-media-stream',
|
||||
'--use-fake-ui-for-media-stream',
|
||||
`--use-file-for-fake-audio-capture=${FAKE_AUDIO_FILE}`
|
||||
];
|
||||
const E2E_DIR = join(__dirname, '..');
|
||||
const START_SERVER_SCRIPT = join(E2E_DIR, 'helpers', 'start-test-server.js');
|
||||
|
||||
export const test = base.extend<MultiClientFixture>({
|
||||
testServer: async ({ playwright: _playwright }, use: (testServer: TestServerHandle) => Promise<void>) => {
|
||||
const testServer = await startTestServer();
|
||||
|
||||
await use(testServer);
|
||||
await testServer.stop();
|
||||
},
|
||||
|
||||
createClient: async ({ testServer }, use) => {
|
||||
const browsers: Browser[] = [];
|
||||
const clients: Client[] = [];
|
||||
const factory = async (): Promise<Client> => {
|
||||
// Launch a dedicated browser per client so each gets its own fake
|
||||
// audio device - shared browsers can starve the first context's
|
||||
// audio capture under load.
|
||||
const browser = await chromium.launch({ args: CHROMIUM_FAKE_MEDIA_ARGS });
|
||||
|
||||
browsers.push(browser);
|
||||
|
||||
const context = await browser.newContext({
|
||||
permissions: ['microphone', 'camera'],
|
||||
baseURL: 'http://localhost:4200'
|
||||
});
|
||||
|
||||
await installTestServerEndpoint(context, testServer.port);
|
||||
|
||||
const page = await context.newPage();
|
||||
|
||||
clients.push({ page, context });
|
||||
return { page, context };
|
||||
};
|
||||
|
||||
await use(factory);
|
||||
|
||||
for (const client of clients) {
|
||||
await client.context.close();
|
||||
}
|
||||
|
||||
for (const browser of browsers) {
|
||||
await browser.close();
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
export { expect } from '@playwright/test';
|
||||
|
||||
async function startTestServer(retries = 3): Promise<TestServerHandle> {
|
||||
for (let attempt = 1; attempt <= retries; attempt++) {
|
||||
const port = await allocatePort();
|
||||
const child = spawn(process.execPath, [START_SERVER_SCRIPT], {
|
||||
cwd: E2E_DIR,
|
||||
env: {
|
||||
...process.env,
|
||||
TEST_SERVER_PORT: String(port)
|
||||
},
|
||||
stdio: 'pipe'
|
||||
});
|
||||
|
||||
child.stdout?.on('data', (chunk: Buffer | string) => {
|
||||
process.stdout.write(chunk.toString());
|
||||
});
|
||||
|
||||
child.stderr?.on('data', (chunk: Buffer | string) => {
|
||||
process.stderr.write(chunk.toString());
|
||||
});
|
||||
|
||||
try {
|
||||
await waitForServerReady(port, child);
|
||||
} catch (error) {
|
||||
await stopServer(child);
|
||||
|
||||
if (attempt < retries) {
|
||||
console.log(`[E2E Server] Attempt ${attempt} failed, retrying...`);
|
||||
continue;
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
return {
|
||||
port,
|
||||
url: `http://localhost:${port}`,
|
||||
stop: async () => {
|
||||
await stopServer(child);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
throw new Error('startTestServer: unreachable');
|
||||
}
|
||||
|
||||
async function allocatePort(): Promise<number> {
|
||||
return new Promise<number>((resolve, reject) => {
|
||||
const probe = createServer();
|
||||
|
||||
probe.once('error', reject);
|
||||
probe.listen(0, '127.0.0.1', () => {
|
||||
const address = probe.address();
|
||||
|
||||
if (!address || typeof address === 'string') {
|
||||
probe.close();
|
||||
reject(new Error('Failed to resolve an ephemeral test server port'));
|
||||
return;
|
||||
}
|
||||
|
||||
const { port } = address;
|
||||
|
||||
probe.close((error) => {
|
||||
if (error) {
|
||||
reject(error);
|
||||
return;
|
||||
}
|
||||
|
||||
resolve(port);
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async function waitForServerReady(port: number, child: ChildProcess, timeoutMs = 30_000): Promise<void> {
|
||||
const readyUrl = `http://127.0.0.1:${port}/api/servers?limit=1`;
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
while (Date.now() < deadline) {
|
||||
if (child.exitCode !== null) {
|
||||
throw new Error(`Test server exited before becoming ready (exit code ${child.exitCode})`);
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch(readyUrl);
|
||||
|
||||
if (response.ok) {
|
||||
return;
|
||||
}
|
||||
} catch {
|
||||
// Server still starting.
|
||||
}
|
||||
|
||||
await wait(250);
|
||||
}
|
||||
|
||||
throw new Error(`Timed out waiting for test server on port ${port}`);
|
||||
}
|
||||
|
||||
async function stopServer(child: ChildProcess): Promise<void> {
|
||||
if (child.exitCode !== null) {
|
||||
return;
|
||||
}
|
||||
|
||||
child.kill('SIGTERM');
|
||||
|
||||
const exited = await Promise.race([once(child, 'exit').then(() => true), wait(3_000).then(() => false)]);
|
||||
|
||||
if (!exited && child.exitCode === null) {
|
||||
child.kill('SIGKILL');
|
||||
await once(child, 'exit');
|
||||
}
|
||||
}
|
||||
|
||||
function wait(durationMs: number): Promise<void> {
|
||||
return new Promise((resolve) => {
|
||||
setTimeout(resolve, durationMs);
|
||||
});
|
||||
}
|
||||
BIN
e2e/fixtures/test-tone.wav
Normal file
BIN
e2e/fixtures/test-tone.wav
Normal file
Binary file not shown.
77
e2e/helpers/seed-test-endpoint.ts
Normal file
77
e2e/helpers/seed-test-endpoint.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { type BrowserContext, type Page } from '@playwright/test';
|
||||
|
||||
const SERVER_ENDPOINTS_STORAGE_KEY = 'metoyou_server_endpoints';
|
||||
const REMOVED_DEFAULT_KEYS_STORAGE_KEY = 'metoyou_removed_default_server_keys';
|
||||
|
||||
type SeededEndpointStorageState = {
|
||||
key: string;
|
||||
removedKey: string;
|
||||
endpoints: {
|
||||
id: string;
|
||||
name: string;
|
||||
url: string;
|
||||
isActive: boolean;
|
||||
isDefault: boolean;
|
||||
status: string;
|
||||
}[];
|
||||
};
|
||||
|
||||
function buildSeededEndpointStorageState(
|
||||
port: number = Number(process.env.TEST_SERVER_PORT) || 3099
|
||||
): SeededEndpointStorageState {
|
||||
const endpoint = {
|
||||
id: 'e2e-test-server',
|
||||
name: 'E2E Test Server',
|
||||
url: `http://localhost:${port}`,
|
||||
isActive: true,
|
||||
isDefault: false,
|
||||
status: 'unknown'
|
||||
};
|
||||
|
||||
return {
|
||||
key: SERVER_ENDPOINTS_STORAGE_KEY,
|
||||
removedKey: REMOVED_DEFAULT_KEYS_STORAGE_KEY,
|
||||
endpoints: [endpoint]
|
||||
};
|
||||
}
|
||||
|
||||
function applySeededEndpointStorageState(storageState: SeededEndpointStorageState): void {
|
||||
try {
|
||||
const storage = window.localStorage;
|
||||
|
||||
storage.setItem(storageState.key, JSON.stringify(storageState.endpoints));
|
||||
storage.setItem(storageState.removedKey, JSON.stringify(['default', 'toju-primary', 'toju-sweden']));
|
||||
} catch {
|
||||
// about:blank and some Playwright UI pages deny localStorage access.
|
||||
}
|
||||
}
|
||||
|
||||
export async function installTestServerEndpoint(
|
||||
context: BrowserContext,
|
||||
port: number = Number(process.env.TEST_SERVER_PORT) || 3099
|
||||
): Promise<void> {
|
||||
const storageState = buildSeededEndpointStorageState(port);
|
||||
|
||||
await context.addInitScript(applySeededEndpointStorageState, storageState);
|
||||
}
|
||||
|
||||
/**
|
||||
* Seed localStorage with a single signal endpoint pointing at the test server.
|
||||
* Must be called AFTER navigating to the app origin (localStorage is per-origin)
|
||||
* but BEFORE the app reads from storage (i.e. before the Angular bootstrap is
|
||||
* relied upon — calling it in the first goto() landing page is fine since the
|
||||
* page will re-read on next navigation/reload).
|
||||
*
|
||||
* Typical usage:
|
||||
* await page.goto('/');
|
||||
* await seedTestServerEndpoint(page);
|
||||
* await page.reload(); // App now picks up the test endpoint
|
||||
*/
|
||||
export async function seedTestServerEndpoint(
|
||||
page: Page,
|
||||
port: number = Number(process.env.TEST_SERVER_PORT) || 3099
|
||||
): Promise<void> {
|
||||
const storageState = buildSeededEndpointStorageState(port);
|
||||
|
||||
await page.evaluate(applySeededEndpointStorageState, storageState);
|
||||
}
|
||||
107
e2e/helpers/start-test-server.js
Normal file
107
e2e/helpers/start-test-server.js
Normal file
@@ -0,0 +1,107 @@
|
||||
/**
|
||||
* Launches an isolated MetoYou signaling server for E2E tests.
|
||||
*
|
||||
* Creates a temporary data directory so the test server gets its own
|
||||
* fresh SQLite database. The server process inherits stdio so Playwright
|
||||
* can watch stdout for readiness and the developer can see logs.
|
||||
*
|
||||
* Cleanup: the temp directory is removed when the process exits.
|
||||
*/
|
||||
const { mkdtempSync, writeFileSync, mkdirSync, rmSync } = require('fs');
|
||||
const { join } = require('path');
|
||||
const { tmpdir } = require('os');
|
||||
const { spawn } = require('child_process');
|
||||
|
||||
const TEST_PORT = process.env.TEST_SERVER_PORT || '3099';
|
||||
const SERVER_DIR = join(__dirname, '..', '..', 'server');
|
||||
const SERVER_ENTRY = join(SERVER_DIR, 'src', 'index.ts');
|
||||
const SERVER_TSCONFIG = join(SERVER_DIR, 'tsconfig.json');
|
||||
|
||||
// ── Create isolated temp data directory ──────────────────────────────
|
||||
const tmpDir = mkdtempSync(join(tmpdir(), 'metoyou-e2e-'));
|
||||
const dataDir = join(tmpDir, 'data');
|
||||
mkdirSync(dataDir, { recursive: true });
|
||||
|
||||
writeFileSync(
|
||||
join(dataDir, 'variables.json'),
|
||||
JSON.stringify({
|
||||
serverPort: parseInt(TEST_PORT, 10),
|
||||
serverProtocol: 'http',
|
||||
serverHost: '',
|
||||
klipyApiKey: '',
|
||||
releaseManifestUrl: '',
|
||||
linkPreview: { enabled: false, cacheTtlMinutes: 60, maxCacheSizeMb: 10 },
|
||||
})
|
||||
);
|
||||
|
||||
console.log(`[E2E Server] Temp data dir: ${tmpDir}`);
|
||||
console.log(`[E2E Server] Starting on port ${TEST_PORT}...`);
|
||||
|
||||
// ── Spawn the server with cwd = temp dir ─────────────────────────────
|
||||
// process.cwd() is used by getRuntimeBaseDir() in the server, so data/
|
||||
// (database, variables.json) will resolve to our temp directory.
|
||||
// Module resolution (require/import) uses __dirname, so server source
|
||||
// and node_modules are found from the real server/ directory.
|
||||
const child = spawn(
|
||||
'npx',
|
||||
['ts-node', '--project', SERVER_TSCONFIG, SERVER_ENTRY],
|
||||
{
|
||||
cwd: tmpDir,
|
||||
env: {
|
||||
...process.env,
|
||||
PORT: TEST_PORT,
|
||||
SSL: 'false',
|
||||
NODE_ENV: 'test',
|
||||
DB_SYNCHRONIZE: 'true',
|
||||
},
|
||||
stdio: 'inherit',
|
||||
shell: true,
|
||||
}
|
||||
);
|
||||
|
||||
let shuttingDown = false;
|
||||
|
||||
child.on('error', (err) => {
|
||||
console.error('[E2E Server] Failed to start:', err.message);
|
||||
cleanup();
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
child.on('exit', (code) => {
|
||||
console.log(`[E2E Server] Exited with code ${code}`);
|
||||
cleanup();
|
||||
|
||||
if (shuttingDown) {
|
||||
process.exit(0);
|
||||
}
|
||||
});
|
||||
|
||||
// ── Cleanup on signals ───────────────────────────────────────────────
|
||||
function cleanup() {
|
||||
try {
|
||||
rmSync(tmpDir, { recursive: true, force: true });
|
||||
console.log(`[E2E Server] Cleaned up temp dir: ${tmpDir}`);
|
||||
} catch {
|
||||
// already gone
|
||||
}
|
||||
}
|
||||
|
||||
function shutdown() {
|
||||
if (shuttingDown) {
|
||||
return;
|
||||
}
|
||||
|
||||
shuttingDown = true;
|
||||
child.kill('SIGTERM');
|
||||
|
||||
// Give child 3s to exit, then force kill
|
||||
setTimeout(() => {
|
||||
if (child.exitCode === null) {
|
||||
child.kill('SIGKILL');
|
||||
}
|
||||
}, 3_000);
|
||||
}
|
||||
|
||||
process.on('SIGTERM', shutdown);
|
||||
process.on('SIGINT', shutdown);
|
||||
process.on('exit', cleanup);
|
||||
717
e2e/helpers/webrtc-helpers.ts
Normal file
717
e2e/helpers/webrtc-helpers.ts
Normal file
@@ -0,0 +1,717 @@
|
||||
/* eslint-disable @typescript-eslint/no-explicit-any */
|
||||
import { type Page } from '@playwright/test';
|
||||
|
||||
/**
|
||||
* Install RTCPeerConnection monkey-patch on a page BEFORE navigating.
|
||||
* Tracks all created peer connections and their remote tracks so tests
|
||||
* can inspect WebRTC state via `page.evaluate()`.
|
||||
*
|
||||
* Call immediately after page creation, before any `goto()`.
|
||||
*/
|
||||
export async function installWebRTCTracking(page: Page): Promise<void> {
|
||||
await page.addInitScript(() => {
|
||||
const connections: RTCPeerConnection[] = [];
|
||||
|
||||
(window as any).__rtcConnections = connections;
|
||||
(window as any).__rtcRemoteTracks = [] as { kind: string; id: string; readyState: string }[];
|
||||
|
||||
const OriginalRTCPeerConnection = window.RTCPeerConnection;
|
||||
|
||||
(window as any).RTCPeerConnection = function(this: RTCPeerConnection, ...args: any[]) {
|
||||
const pc: RTCPeerConnection = new OriginalRTCPeerConnection(...args);
|
||||
|
||||
connections.push(pc);
|
||||
|
||||
pc.addEventListener('connectionstatechange', () => {
|
||||
(window as any).__lastRtcState = pc.connectionState;
|
||||
});
|
||||
|
||||
pc.addEventListener('track', (event: RTCTrackEvent) => {
|
||||
(window as any).__rtcRemoteTracks.push({
|
||||
kind: event.track.kind,
|
||||
id: event.track.id,
|
||||
readyState: event.track.readyState
|
||||
});
|
||||
});
|
||||
|
||||
return pc;
|
||||
} as any;
|
||||
|
||||
(window as any).RTCPeerConnection.prototype = OriginalRTCPeerConnection.prototype;
|
||||
Object.setPrototypeOf((window as any).RTCPeerConnection, OriginalRTCPeerConnection);
|
||||
|
||||
// Patch getUserMedia to use an AudioContext oscillator for audio
|
||||
// instead of the hardware capture device. Chromium's fake audio
|
||||
// device intermittently fails to produce frames after renegotiation.
|
||||
const origGetUserMedia = navigator.mediaDevices.getUserMedia.bind(navigator.mediaDevices);
|
||||
|
||||
navigator.mediaDevices.getUserMedia = async (constraints?: MediaStreamConstraints) => {
|
||||
const wantsAudio = !!constraints?.audio;
|
||||
|
||||
if (!wantsAudio) {
|
||||
return origGetUserMedia(constraints);
|
||||
}
|
||||
|
||||
// Get the original stream (may include video)
|
||||
const originalStream = await origGetUserMedia(constraints);
|
||||
const audioCtx = new AudioContext();
|
||||
const oscillator = audioCtx.createOscillator();
|
||||
|
||||
oscillator.frequency.value = 440;
|
||||
|
||||
const dest = audioCtx.createMediaStreamDestination();
|
||||
|
||||
oscillator.connect(dest);
|
||||
oscillator.start();
|
||||
|
||||
const synthAudioTrack = dest.stream.getAudioTracks()[0];
|
||||
const resultStream = new MediaStream();
|
||||
|
||||
resultStream.addTrack(synthAudioTrack);
|
||||
|
||||
// Keep any video tracks from the original stream
|
||||
for (const videoTrack of originalStream.getVideoTracks()) {
|
||||
resultStream.addTrack(videoTrack);
|
||||
}
|
||||
|
||||
// Stop original audio tracks since we're not using them
|
||||
for (const track of originalStream.getAudioTracks()) {
|
||||
track.stop();
|
||||
}
|
||||
|
||||
return resultStream;
|
||||
};
|
||||
|
||||
// Patch getDisplayMedia to return a synthetic screen share stream
|
||||
// (canvas-based video + 880Hz oscillator audio) so the browser
|
||||
// picker dialog is never shown.
|
||||
navigator.mediaDevices.getDisplayMedia = async (_constraints?: DisplayMediaStreamOptions) => {
|
||||
const canvas = document.createElement('canvas');
|
||||
|
||||
canvas.width = 640;
|
||||
canvas.height = 480;
|
||||
|
||||
const ctx = canvas.getContext('2d');
|
||||
|
||||
if (!ctx) {
|
||||
throw new Error('Canvas 2D context unavailable');
|
||||
}
|
||||
|
||||
let frameCount = 0;
|
||||
|
||||
// Draw animated frames so video stats show increasing bytes
|
||||
const drawFrame = () => {
|
||||
frameCount++;
|
||||
ctx.fillStyle = `hsl(${frameCount % 360}, 70%, 50%)`;
|
||||
ctx.fillRect(0, 0, canvas.width, canvas.height);
|
||||
ctx.fillStyle = '#fff';
|
||||
ctx.font = '24px monospace';
|
||||
ctx.fillText(`Screen Share Frame ${frameCount}`, 40, 60);
|
||||
};
|
||||
|
||||
drawFrame();
|
||||
const drawInterval = setInterval(drawFrame, 100);
|
||||
const videoStream = canvas.captureStream(10); // 10 fps
|
||||
const videoTrack = videoStream.getVideoTracks()[0];
|
||||
|
||||
// Stop drawing when the track ends
|
||||
videoTrack.addEventListener('ended', () => clearInterval(drawInterval));
|
||||
|
||||
// Create 880Hz oscillator for screen share audio (distinct from 440Hz voice)
|
||||
const audioCtx = new AudioContext();
|
||||
const osc = audioCtx.createOscillator();
|
||||
|
||||
osc.frequency.value = 880;
|
||||
|
||||
const dest = audioCtx.createMediaStreamDestination();
|
||||
|
||||
osc.connect(dest);
|
||||
osc.start();
|
||||
|
||||
const audioTrack = dest.stream.getAudioTracks()[0];
|
||||
// Combine video + audio into one stream
|
||||
const resultStream = new MediaStream([videoTrack, audioTrack]);
|
||||
|
||||
// Tag the stream so tests can identify it
|
||||
(resultStream as any).__isScreenShare = true;
|
||||
|
||||
return resultStream;
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait until at least one RTCPeerConnection reaches the 'connected' state.
|
||||
*/
|
||||
export async function waitForPeerConnected(page: Page, timeout = 30_000): Promise<void> {
|
||||
await page.waitForFunction(
|
||||
() => (window as any).__rtcConnections?.some(
|
||||
(pc: RTCPeerConnection) => pc.connectionState === 'connected'
|
||||
) ?? false,
|
||||
{ timeout }
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check that a peer connection is still in 'connected' state (not failed/disconnected).
|
||||
*/
|
||||
export async function isPeerStillConnected(page: Page): Promise<boolean> {
|
||||
return page.evaluate(
|
||||
() => (window as any).__rtcConnections?.some(
|
||||
(pc: RTCPeerConnection) => pc.connectionState === 'connected'
|
||||
) ?? false
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get outbound and inbound audio RTP stats aggregated across all peer
|
||||
* connections. Uses a per-connection high water mark stored on `window` so
|
||||
* that connections that close mid-measurement still contribute their last
|
||||
* known counters, preventing the aggregate from going backwards.
|
||||
*/
|
||||
export async function getAudioStats(page: Page): Promise<{
|
||||
outbound: { bytesSent: number; packetsSent: number } | null;
|
||||
inbound: { bytesReceived: number; packetsReceived: number } | null;
|
||||
}> {
|
||||
return page.evaluate(async () => {
|
||||
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
|
||||
|
||||
if (!connections?.length)
|
||||
return { outbound: null, inbound: null };
|
||||
|
||||
interface HWMEntry {
|
||||
outBytesSent: number;
|
||||
outPacketsSent: number;
|
||||
inBytesReceived: number;
|
||||
inPacketsReceived: number;
|
||||
hasOutbound: boolean;
|
||||
hasInbound: boolean;
|
||||
};
|
||||
|
||||
const hwm: Record<number, HWMEntry> = (window as any).__rtcStatsHWM =
|
||||
((window as any).__rtcStatsHWM as Record<number, HWMEntry> | undefined) ?? {};
|
||||
|
||||
for (let idx = 0; idx < connections.length; idx++) {
|
||||
let stats: RTCStatsReport;
|
||||
|
||||
try {
|
||||
stats = await connections[idx].getStats();
|
||||
} catch {
|
||||
continue; // closed connection - keep its last HWM
|
||||
}
|
||||
|
||||
let obytes = 0;
|
||||
let opackets = 0;
|
||||
let ibytes = 0;
|
||||
let ipackets = 0;
|
||||
let hasOut = false;
|
||||
let hasIn = false;
|
||||
|
||||
stats.forEach((report: any) => {
|
||||
const kind = report.kind ?? report.mediaType;
|
||||
|
||||
if (report.type === 'outbound-rtp' && kind === 'audio') {
|
||||
hasOut = true;
|
||||
obytes += report.bytesSent ?? 0;
|
||||
opackets += report.packetsSent ?? 0;
|
||||
}
|
||||
|
||||
if (report.type === 'inbound-rtp' && kind === 'audio') {
|
||||
hasIn = true;
|
||||
ibytes += report.bytesReceived ?? 0;
|
||||
ipackets += report.packetsReceived ?? 0;
|
||||
}
|
||||
});
|
||||
|
||||
if (hasOut || hasIn) {
|
||||
hwm[idx] = {
|
||||
outBytesSent: obytes,
|
||||
outPacketsSent: opackets,
|
||||
inBytesReceived: ibytes,
|
||||
inPacketsReceived: ipackets,
|
||||
hasOutbound: hasOut,
|
||||
hasInbound: hasIn
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let totalOutBytes = 0;
|
||||
let totalOutPackets = 0;
|
||||
let totalInBytes = 0;
|
||||
let totalInPackets = 0;
|
||||
let anyOutbound = false;
|
||||
let anyInbound = false;
|
||||
|
||||
for (const entry of Object.values(hwm)) {
|
||||
totalOutBytes += entry.outBytesSent;
|
||||
totalOutPackets += entry.outPacketsSent;
|
||||
totalInBytes += entry.inBytesReceived;
|
||||
totalInPackets += entry.inPacketsReceived;
|
||||
|
||||
if (entry.hasOutbound)
|
||||
anyOutbound = true;
|
||||
|
||||
if (entry.hasInbound)
|
||||
anyInbound = true;
|
||||
}
|
||||
|
||||
return {
|
||||
outbound: anyOutbound
|
||||
? { bytesSent: totalOutBytes, packetsSent: totalOutPackets }
|
||||
: null,
|
||||
inbound: anyInbound
|
||||
? { bytesReceived: totalInBytes, packetsReceived: totalInPackets }
|
||||
: null
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Snapshot audio stats, wait `durationMs`, snapshot again, and return the delta.
|
||||
* Useful for verifying audio is actively flowing (bytes increasing).
|
||||
*/
|
||||
export async function getAudioStatsDelta(page: Page, durationMs = 3_000): Promise<{
|
||||
outboundBytesDelta: number;
|
||||
inboundBytesDelta: number;
|
||||
outboundPacketsDelta: number;
|
||||
inboundPacketsDelta: number;
|
||||
}> {
|
||||
const before = await getAudioStats(page);
|
||||
|
||||
await page.waitForTimeout(durationMs);
|
||||
|
||||
const after = await getAudioStats(page);
|
||||
|
||||
return {
|
||||
outboundBytesDelta: (after.outbound?.bytesSent ?? 0) - (before.outbound?.bytesSent ?? 0),
|
||||
inboundBytesDelta: (after.inbound?.bytesReceived ?? 0) - (before.inbound?.bytesReceived ?? 0),
|
||||
outboundPacketsDelta: (after.outbound?.packetsSent ?? 0) - (before.outbound?.packetsSent ?? 0),
|
||||
inboundPacketsDelta: (after.inbound?.packetsReceived ?? 0) - (before.inbound?.packetsReceived ?? 0)
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait until at least one connection has both outbound-rtp and inbound-rtp
|
||||
* audio reports. Call after `waitForPeerConnected` to ensure the audio
|
||||
* pipeline is ready before measuring deltas.
|
||||
*/
|
||||
export async function waitForAudioStatsPresent(page: Page, timeout = 15_000): Promise<void> {
|
||||
await page.waitForFunction(
|
||||
async () => {
|
||||
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
|
||||
|
||||
if (!connections?.length)
|
||||
return false;
|
||||
|
||||
for (const pc of connections) {
|
||||
let stats: RTCStatsReport;
|
||||
|
||||
try {
|
||||
stats = await pc.getStats();
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
let hasOut = false;
|
||||
let hasIn = false;
|
||||
|
||||
stats.forEach((report: any) => {
|
||||
const kind = report.kind ?? report.mediaType;
|
||||
|
||||
if (report.type === 'outbound-rtp' && kind === 'audio')
|
||||
hasOut = true;
|
||||
|
||||
if (report.type === 'inbound-rtp' && kind === 'audio')
|
||||
hasIn = true;
|
||||
});
|
||||
|
||||
if (hasOut && hasIn)
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
{ timeout }
|
||||
);
|
||||
}
|
||||
|
||||
interface AudioFlowDelta {
|
||||
outboundBytesDelta: number;
|
||||
inboundBytesDelta: number;
|
||||
outboundPacketsDelta: number;
|
||||
inboundPacketsDelta: number;
|
||||
}
|
||||
|
||||
function snapshotToDelta(
|
||||
curr: Awaited<ReturnType<typeof getAudioStats>>,
|
||||
prev: Awaited<ReturnType<typeof getAudioStats>>
|
||||
): AudioFlowDelta {
|
||||
return {
|
||||
outboundBytesDelta: (curr.outbound?.bytesSent ?? 0) - (prev.outbound?.bytesSent ?? 0),
|
||||
inboundBytesDelta: (curr.inbound?.bytesReceived ?? 0) - (prev.inbound?.bytesReceived ?? 0),
|
||||
outboundPacketsDelta: (curr.outbound?.packetsSent ?? 0) - (prev.outbound?.packetsSent ?? 0),
|
||||
inboundPacketsDelta: (curr.inbound?.packetsReceived ?? 0) - (prev.inbound?.packetsReceived ?? 0)
|
||||
};
|
||||
}
|
||||
|
||||
function isDeltaFlowing(delta: AudioFlowDelta): boolean {
|
||||
const outFlowing = delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0;
|
||||
const inFlowing = delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0;
|
||||
|
||||
return outFlowing && inFlowing;
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll until two consecutive HWM-based reads show both outbound and inbound
|
||||
* audio byte counts increasing. Combines per-connection high-water marks
|
||||
* (which prevent totals from going backwards after connection churn) with
|
||||
* consecutive comparison (which avoids a stale single baseline).
|
||||
*/
|
||||
export async function waitForAudioFlow(
|
||||
page: Page,
|
||||
timeoutMs = 30_000,
|
||||
pollIntervalMs = 1_000
|
||||
): Promise<AudioFlowDelta> {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
let prev = await getAudioStats(page);
|
||||
|
||||
while (Date.now() < deadline) {
|
||||
await page.waitForTimeout(pollIntervalMs);
|
||||
const curr = await getAudioStats(page);
|
||||
const delta = snapshotToDelta(curr, prev);
|
||||
|
||||
if (isDeltaFlowing(delta)) {
|
||||
return delta;
|
||||
}
|
||||
|
||||
prev = curr;
|
||||
}
|
||||
|
||||
// Timeout - return zero deltas so the caller's assertion reports the failure.
|
||||
return {
|
||||
outboundBytesDelta: 0,
|
||||
inboundBytesDelta: 0,
|
||||
outboundPacketsDelta: 0,
|
||||
inboundPacketsDelta: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get outbound and inbound video RTP stats aggregated across all peer
|
||||
* connections. Uses the same HWM pattern as {@link getAudioStats}.
|
||||
*/
|
||||
export async function getVideoStats(page: Page): Promise<{
|
||||
outbound: { bytesSent: number; packetsSent: number } | null;
|
||||
inbound: { bytesReceived: number; packetsReceived: number } | null;
|
||||
}> {
|
||||
return page.evaluate(async () => {
|
||||
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
|
||||
|
||||
if (!connections?.length)
|
||||
return { outbound: null, inbound: null };
|
||||
|
||||
interface VHWM {
|
||||
outBytesSent: number;
|
||||
outPacketsSent: number;
|
||||
inBytesReceived: number;
|
||||
inPacketsReceived: number;
|
||||
hasOutbound: boolean;
|
||||
hasInbound: boolean;
|
||||
}
|
||||
|
||||
const hwm: Record<number, VHWM> = (window as any).__rtcVideoStatsHWM =
|
||||
((window as any).__rtcVideoStatsHWM as Record<number, VHWM> | undefined) ?? {};
|
||||
|
||||
for (let idx = 0; idx < connections.length; idx++) {
|
||||
let stats: RTCStatsReport;
|
||||
|
||||
try {
|
||||
stats = await connections[idx].getStats();
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
let obytes = 0;
|
||||
let opackets = 0;
|
||||
let ibytes = 0;
|
||||
let ipackets = 0;
|
||||
let hasOut = false;
|
||||
let hasIn = false;
|
||||
|
||||
stats.forEach((report: any) => {
|
||||
const kind = report.kind ?? report.mediaType;
|
||||
|
||||
if (report.type === 'outbound-rtp' && kind === 'video') {
|
||||
hasOut = true;
|
||||
obytes += report.bytesSent ?? 0;
|
||||
opackets += report.packetsSent ?? 0;
|
||||
}
|
||||
|
||||
if (report.type === 'inbound-rtp' && kind === 'video') {
|
||||
hasIn = true;
|
||||
ibytes += report.bytesReceived ?? 0;
|
||||
ipackets += report.packetsReceived ?? 0;
|
||||
}
|
||||
});
|
||||
|
||||
if (hasOut || hasIn) {
|
||||
hwm[idx] = {
|
||||
outBytesSent: obytes,
|
||||
outPacketsSent: opackets,
|
||||
inBytesReceived: ibytes,
|
||||
inPacketsReceived: ipackets,
|
||||
hasOutbound: hasOut,
|
||||
hasInbound: hasIn
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
let totalOutBytes = 0;
|
||||
let totalOutPackets = 0;
|
||||
let totalInBytes = 0;
|
||||
let totalInPackets = 0;
|
||||
let anyOutbound = false;
|
||||
let anyInbound = false;
|
||||
|
||||
for (const entry of Object.values(hwm)) {
|
||||
totalOutBytes += entry.outBytesSent;
|
||||
totalOutPackets += entry.outPacketsSent;
|
||||
totalInBytes += entry.inBytesReceived;
|
||||
totalInPackets += entry.inPacketsReceived;
|
||||
|
||||
if (entry.hasOutbound)
|
||||
anyOutbound = true;
|
||||
|
||||
if (entry.hasInbound)
|
||||
anyInbound = true;
|
||||
}
|
||||
|
||||
return {
|
||||
outbound: anyOutbound
|
||||
? { bytesSent: totalOutBytes, packetsSent: totalOutPackets }
|
||||
: null,
|
||||
inbound: anyInbound
|
||||
? { bytesReceived: totalInBytes, packetsReceived: totalInPackets }
|
||||
: null
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait until at least one connection has both outbound-rtp and inbound-rtp
|
||||
* video reports.
|
||||
*/
|
||||
export async function waitForVideoStatsPresent(page: Page, timeout = 15_000): Promise<void> {
|
||||
await page.waitForFunction(
|
||||
async () => {
|
||||
const connections = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
|
||||
|
||||
if (!connections?.length)
|
||||
return false;
|
||||
|
||||
for (const pc of connections) {
|
||||
let stats: RTCStatsReport;
|
||||
|
||||
try {
|
||||
stats = await pc.getStats();
|
||||
} catch {
|
||||
continue;
|
||||
}
|
||||
|
||||
let hasOut = false;
|
||||
let hasIn = false;
|
||||
|
||||
stats.forEach((report: any) => {
|
||||
const kind = report.kind ?? report.mediaType;
|
||||
|
||||
if (report.type === 'outbound-rtp' && kind === 'video')
|
||||
hasOut = true;
|
||||
|
||||
if (report.type === 'inbound-rtp' && kind === 'video')
|
||||
hasIn = true;
|
||||
});
|
||||
|
||||
if (hasOut && hasIn)
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
},
|
||||
{ timeout }
|
||||
);
|
||||
}
|
||||
|
||||
interface VideoFlowDelta {
|
||||
outboundBytesDelta: number;
|
||||
inboundBytesDelta: number;
|
||||
outboundPacketsDelta: number;
|
||||
inboundPacketsDelta: number;
|
||||
}
|
||||
|
||||
function videoSnapshotToDelta(
|
||||
curr: Awaited<ReturnType<typeof getVideoStats>>,
|
||||
prev: Awaited<ReturnType<typeof getVideoStats>>
|
||||
): VideoFlowDelta {
|
||||
return {
|
||||
outboundBytesDelta: (curr.outbound?.bytesSent ?? 0) - (prev.outbound?.bytesSent ?? 0),
|
||||
inboundBytesDelta: (curr.inbound?.bytesReceived ?? 0) - (prev.inbound?.bytesReceived ?? 0),
|
||||
outboundPacketsDelta: (curr.outbound?.packetsSent ?? 0) - (prev.outbound?.packetsSent ?? 0),
|
||||
inboundPacketsDelta: (curr.inbound?.packetsReceived ?? 0) - (prev.inbound?.packetsReceived ?? 0)
|
||||
};
|
||||
}
|
||||
|
||||
function isVideoDeltaFlowing(delta: VideoFlowDelta): boolean {
|
||||
const outFlowing = delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0;
|
||||
const inFlowing = delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0;
|
||||
|
||||
return outFlowing && inFlowing;
|
||||
}
|
||||
|
||||
/**
|
||||
* Poll until two consecutive HWM-based reads show both outbound and inbound
|
||||
* video byte counts increasing - proving screen share video is flowing.
|
||||
*/
|
||||
export async function waitForVideoFlow(
|
||||
page: Page,
|
||||
timeoutMs = 30_000,
|
||||
pollIntervalMs = 1_000
|
||||
): Promise<VideoFlowDelta> {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
let prev = await getVideoStats(page);
|
||||
|
||||
while (Date.now() < deadline) {
|
||||
await page.waitForTimeout(pollIntervalMs);
|
||||
const curr = await getVideoStats(page);
|
||||
const delta = videoSnapshotToDelta(curr, prev);
|
||||
|
||||
if (isVideoDeltaFlowing(delta)) {
|
||||
return delta;
|
||||
}
|
||||
|
||||
prev = curr;
|
||||
}
|
||||
|
||||
return {
|
||||
outboundBytesDelta: 0,
|
||||
inboundBytesDelta: 0,
|
||||
outboundPacketsDelta: 0,
|
||||
inboundPacketsDelta: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait until outbound video bytes are increasing (sender side).
|
||||
* Use on the page that is sharing its screen.
|
||||
*/
|
||||
export async function waitForOutboundVideoFlow(
|
||||
page: Page,
|
||||
timeoutMs = 30_000,
|
||||
pollIntervalMs = 1_000
|
||||
): Promise<VideoFlowDelta> {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
let prev = await getVideoStats(page);
|
||||
|
||||
while (Date.now() < deadline) {
|
||||
await page.waitForTimeout(pollIntervalMs);
|
||||
const curr = await getVideoStats(page);
|
||||
const delta = videoSnapshotToDelta(curr, prev);
|
||||
|
||||
if (delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0) {
|
||||
return delta;
|
||||
}
|
||||
|
||||
prev = curr;
|
||||
}
|
||||
|
||||
return {
|
||||
outboundBytesDelta: 0,
|
||||
inboundBytesDelta: 0,
|
||||
outboundPacketsDelta: 0,
|
||||
inboundPacketsDelta: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Wait until inbound video bytes are increasing (receiver side).
|
||||
* Use on the page that is viewing someone else's screen share.
|
||||
*/
|
||||
export async function waitForInboundVideoFlow(
|
||||
page: Page,
|
||||
timeoutMs = 30_000,
|
||||
pollIntervalMs = 1_000
|
||||
): Promise<VideoFlowDelta> {
|
||||
const deadline = Date.now() + timeoutMs;
|
||||
|
||||
let prev = await getVideoStats(page);
|
||||
|
||||
while (Date.now() < deadline) {
|
||||
await page.waitForTimeout(pollIntervalMs);
|
||||
const curr = await getVideoStats(page);
|
||||
const delta = videoSnapshotToDelta(curr, prev);
|
||||
|
||||
if (delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0) {
|
||||
return delta;
|
||||
}
|
||||
|
||||
prev = curr;
|
||||
}
|
||||
|
||||
return {
|
||||
outboundBytesDelta: 0,
|
||||
inboundBytesDelta: 0,
|
||||
outboundPacketsDelta: 0,
|
||||
inboundPacketsDelta: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Dump full RTC connection diagnostics for debugging audio flow failures.
|
||||
*/
|
||||
export async function dumpRtcDiagnostics(page: Page): Promise<string> {
|
||||
return page.evaluate(async () => {
|
||||
const conns = (window as any).__rtcConnections as RTCPeerConnection[] | undefined;
|
||||
|
||||
if (!conns?.length)
|
||||
return 'No connections tracked';
|
||||
|
||||
const lines: string[] = [`Total connections: ${conns.length}`];
|
||||
|
||||
for (let idx = 0; idx < conns.length; idx++) {
|
||||
const pc = conns[idx];
|
||||
|
||||
lines.push(`PC[${idx}]: connection=${pc.connectionState}, signaling=${pc.signalingState}`);
|
||||
|
||||
const senders = pc.getSenders().map(
|
||||
(sender) => `${sender.track?.kind ?? 'none'}:enabled=${sender.track?.enabled}:${sender.track?.readyState ?? 'null'}`
|
||||
);
|
||||
const receivers = pc.getReceivers().map(
|
||||
(recv) => `${recv.track?.kind ?? 'none'}:enabled=${recv.track?.enabled}:${recv.track?.readyState ?? 'null'}`
|
||||
);
|
||||
|
||||
lines.push(` senders=[${senders.join(', ')}]`);
|
||||
lines.push(` receivers=[${receivers.join(', ')}]`);
|
||||
|
||||
try {
|
||||
const stats = await pc.getStats();
|
||||
|
||||
stats.forEach((report: any) => {
|
||||
if (report.type !== 'outbound-rtp' && report.type !== 'inbound-rtp')
|
||||
return;
|
||||
|
||||
const kind = report.kind ?? report.mediaType;
|
||||
const bytes = report.type === 'outbound-rtp' ? report.bytesSent : report.bytesReceived;
|
||||
const packets = report.type === 'outbound-rtp' ? report.packetsSent : report.packetsReceived;
|
||||
|
||||
lines.push(` ${report.type}: kind=${kind}, bytes=${bytes}, packets=${packets}`);
|
||||
});
|
||||
} catch (err: any) {
|
||||
lines.push(` getStats() failed: ${err?.message ?? err}`);
|
||||
}
|
||||
}
|
||||
|
||||
return lines.join('\n');
|
||||
});
|
||||
}
|
||||
143
e2e/pages/chat-messages.page.ts
Normal file
143
e2e/pages/chat-messages.page.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import {
|
||||
expect,
|
||||
type Locator,
|
||||
type Page
|
||||
} from '@playwright/test';
|
||||
|
||||
export type ChatDropFilePayload = {
|
||||
name: string;
|
||||
mimeType: string;
|
||||
base64: string;
|
||||
};
|
||||
|
||||
export class ChatMessagesPage {
|
||||
readonly composer: Locator;
|
||||
readonly composerInput: Locator;
|
||||
readonly sendButton: Locator;
|
||||
readonly typingIndicator: Locator;
|
||||
readonly gifButton: Locator;
|
||||
readonly gifPicker: Locator;
|
||||
readonly messageItems: Locator;
|
||||
|
||||
constructor(private page: Page) {
|
||||
this.composer = page.locator('app-chat-message-composer');
|
||||
this.composerInput = page.getByPlaceholder('Type a message...');
|
||||
this.sendButton = page.getByRole('button', { name: 'Send message' });
|
||||
this.typingIndicator = page.locator('app-typing-indicator');
|
||||
this.gifButton = page.getByRole('button', { name: 'Search KLIPY GIFs' });
|
||||
this.gifPicker = page.getByRole('dialog', { name: 'KLIPY GIF picker' });
|
||||
this.messageItems = page.locator('[data-message-id]');
|
||||
}
|
||||
|
||||
async waitForReady(): Promise<void> {
|
||||
await expect(this.composerInput).toBeVisible({ timeout: 30_000 });
|
||||
}
|
||||
|
||||
async sendMessage(content: string): Promise<void> {
|
||||
await this.waitForReady();
|
||||
await this.composerInput.fill(content);
|
||||
await this.sendButton.click();
|
||||
}
|
||||
|
||||
async typeDraft(content: string): Promise<void> {
|
||||
await this.waitForReady();
|
||||
await this.composerInput.fill(content);
|
||||
}
|
||||
|
||||
async clearDraft(): Promise<void> {
|
||||
await this.waitForReady();
|
||||
await this.composerInput.fill('');
|
||||
}
|
||||
|
||||
async attachFiles(files: ChatDropFilePayload[]): Promise<void> {
|
||||
await this.waitForReady();
|
||||
|
||||
await this.composerInput.evaluate((element, payloads: ChatDropFilePayload[]) => {
|
||||
const dataTransfer = new DataTransfer();
|
||||
|
||||
for (const payload of payloads) {
|
||||
const binary = atob(payload.base64);
|
||||
const bytes = new Uint8Array(binary.length);
|
||||
|
||||
for (let index = 0; index < binary.length; index++) {
|
||||
bytes[index] = binary.charCodeAt(index);
|
||||
}
|
||||
|
||||
dataTransfer.items.add(new File([bytes], payload.name, { type: payload.mimeType }));
|
||||
}
|
||||
|
||||
element.dispatchEvent(new DragEvent('drop', {
|
||||
bubbles: true,
|
||||
cancelable: true,
|
||||
dataTransfer
|
||||
}));
|
||||
}, files);
|
||||
}
|
||||
|
||||
async openGifPicker(): Promise<void> {
|
||||
await this.waitForReady();
|
||||
await this.gifButton.click();
|
||||
await expect(this.gifPicker).toBeVisible({ timeout: 10_000 });
|
||||
}
|
||||
|
||||
async selectFirstGif(): Promise<void> {
|
||||
const gifCard = this.gifPicker.getByRole('button', { name: /click to select/i }).first();
|
||||
|
||||
await expect(gifCard).toBeVisible({ timeout: 10_000 });
|
||||
await gifCard.click();
|
||||
}
|
||||
|
||||
getMessageItemByText(text: string): Locator {
|
||||
return this.messageItems.filter({
|
||||
has: this.page.getByText(text, { exact: false })
|
||||
}).last();
|
||||
}
|
||||
|
||||
getMessageImageByAlt(altText: string): Locator {
|
||||
return this.page.locator(`[data-message-id] img[alt="${altText}"]`).last();
|
||||
}
|
||||
|
||||
async expectMessageImageLoaded(altText: string): Promise<void> {
|
||||
const image = this.getMessageImageByAlt(altText);
|
||||
|
||||
await expect(image).toBeVisible({ timeout: 20_000 });
|
||||
await expect.poll(async () =>
|
||||
image.evaluate((element) => {
|
||||
const img = element as HTMLImageElement;
|
||||
|
||||
return img.complete && img.naturalWidth > 0 && img.naturalHeight > 0;
|
||||
}), {
|
||||
timeout: 20_000,
|
||||
message: `Image ${altText} should fully load in chat`
|
||||
}).toBe(true);
|
||||
}
|
||||
|
||||
getEmbedCardByTitle(title: string): Locator {
|
||||
return this.page.locator('app-chat-link-embed').filter({
|
||||
has: this.page.getByText(title, { exact: true })
|
||||
}).last();
|
||||
}
|
||||
|
||||
async editOwnMessage(originalText: string, updatedText: string): Promise<void> {
|
||||
const messageItem = this.getMessageItemByText(originalText);
|
||||
const editButton = messageItem.locator('button:has(ng-icon[name="lucideEdit"])').first();
|
||||
const editTextarea = this.page.locator('textarea.edit-textarea').first();
|
||||
const saveButton = this.page.locator('button:has(ng-icon[name="lucideCheck"])').first();
|
||||
|
||||
await expect(messageItem).toBeVisible({ timeout: 15_000 });
|
||||
await messageItem.hover();
|
||||
await editButton.click();
|
||||
await expect(editTextarea).toBeVisible({ timeout: 10_000 });
|
||||
await editTextarea.fill(updatedText);
|
||||
await saveButton.click();
|
||||
}
|
||||
|
||||
async deleteOwnMessage(text: string): Promise<void> {
|
||||
const messageItem = this.getMessageItemByText(text);
|
||||
const deleteButton = messageItem.locator('button:has(ng-icon[name="lucideTrash2"])').first();
|
||||
|
||||
await expect(messageItem).toBeVisible({ timeout: 15_000 });
|
||||
await messageItem.hover();
|
||||
await deleteButton.click();
|
||||
}
|
||||
}
|
||||
390
e2e/pages/chat-room.page.ts
Normal file
390
e2e/pages/chat-room.page.ts
Normal file
@@ -0,0 +1,390 @@
|
||||
import {
|
||||
expect,
|
||||
type Page,
|
||||
type Locator
|
||||
} from '@playwright/test';
|
||||
|
||||
export class ChatRoomPage {
|
||||
readonly chatMessages: Locator;
|
||||
readonly voiceWorkspace: Locator;
|
||||
readonly channelsSidePanel: Locator;
|
||||
readonly usersSidePanel: Locator;
|
||||
|
||||
constructor(private page: Page) {
|
||||
this.chatMessages = page.locator('app-chat-messages');
|
||||
this.voiceWorkspace = page.locator('app-voice-workspace');
|
||||
this.channelsSidePanel = page.locator('app-rooms-side-panel').first();
|
||||
this.usersSidePanel = page.locator('app-rooms-side-panel').last();
|
||||
}
|
||||
|
||||
/** Click a voice channel by name in the channels sidebar to join voice. */
|
||||
async joinVoiceChannel(channelName: string) {
|
||||
const channelButton = this.page.locator('app-rooms-side-panel')
|
||||
.getByRole('button', { name: channelName, exact: true });
|
||||
|
||||
await expect(channelButton).toBeVisible({ timeout: 15_000 });
|
||||
await channelButton.click();
|
||||
}
|
||||
|
||||
/** Click a text channel by name in the channels sidebar to switch chat rooms. */
|
||||
async joinTextChannel(channelName: string) {
|
||||
const channelButton = this.getTextChannelButton(channelName);
|
||||
|
||||
if (await channelButton.count() === 0) {
|
||||
await this.refreshRoomMetadata();
|
||||
}
|
||||
|
||||
await expect(channelButton).toBeVisible({ timeout: 15_000 });
|
||||
await channelButton.click();
|
||||
}
|
||||
|
||||
/** Creates a text channel and waits until it appears locally. */
|
||||
async ensureTextChannelExists(channelName: string) {
|
||||
const channelButton = this.getTextChannelButton(channelName);
|
||||
|
||||
if (await channelButton.count() > 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.openCreateTextChannelDialog();
|
||||
await this.createChannel(channelName);
|
||||
|
||||
try {
|
||||
await expect(channelButton).toBeVisible({ timeout: 5_000 });
|
||||
} catch {
|
||||
await this.createTextChannelThroughComponent(channelName);
|
||||
}
|
||||
|
||||
await this.persistCurrentChannelsToServer(channelName);
|
||||
await expect(channelButton).toBeVisible({ timeout: 15_000 });
|
||||
}
|
||||
|
||||
/** Click "Create Voice Channel" button in the channels sidebar. */
|
||||
async openCreateVoiceChannelDialog() {
|
||||
await this.page.locator('button[title="Create Voice Channel"]').click();
|
||||
}
|
||||
|
||||
/** Click "Create Text Channel" button in the channels sidebar. */
|
||||
async openCreateTextChannelDialog() {
|
||||
await this.page.locator('button[title="Create Text Channel"]').click();
|
||||
}
|
||||
|
||||
/** Fill the channel name in the create channel dialog and confirm. */
|
||||
async createChannel(name: string) {
|
||||
const dialog = this.page.locator('app-confirm-dialog');
|
||||
const channelNameInput = dialog.getByPlaceholder('Channel name');
|
||||
const createButton = dialog.getByRole('button', { name: 'Create', exact: true });
|
||||
|
||||
await expect(channelNameInput).toBeVisible({ timeout: 10_000 });
|
||||
await channelNameInput.fill(name);
|
||||
await channelNameInput.press('Enter');
|
||||
|
||||
if (await dialog.isVisible()) {
|
||||
try {
|
||||
await createButton.click();
|
||||
} catch {
|
||||
// Enter may already have confirmed and removed the dialog.
|
||||
}
|
||||
}
|
||||
|
||||
await expect(dialog).not.toBeVisible({ timeout: 10_000 });
|
||||
}
|
||||
|
||||
/** Get the voice controls component. */
|
||||
get voiceControls() {
|
||||
return this.page.locator('app-voice-controls');
|
||||
}
|
||||
|
||||
/** Get the mute toggle button inside voice controls. */
|
||||
get muteButton() {
|
||||
return this.voiceControls.locator('button:has(ng-icon[name="lucideMic"]), button:has(ng-icon[name="lucideMicOff"])').first();
|
||||
}
|
||||
|
||||
/** Get the disconnect/hang-up button (destructive styled). */
|
||||
get disconnectButton() {
|
||||
return this.voiceControls.locator('button:has(ng-icon[name="lucidePhoneOff"])').first();
|
||||
}
|
||||
|
||||
/** Get all voice stream tiles. */
|
||||
get streamTiles() {
|
||||
return this.page.locator('app-voice-workspace-stream-tile');
|
||||
}
|
||||
|
||||
/** Get the count of voice users listed under a voice channel. */
|
||||
async getVoiceUserCountInChannel(channelName: string): Promise<number> {
|
||||
const channelSection = this.page.locator('app-rooms-side-panel')
|
||||
.getByRole('button', { name: channelName })
|
||||
.locator('..');
|
||||
const userAvatars = channelSection.locator('app-user-avatar');
|
||||
|
||||
return userAvatars.count();
|
||||
}
|
||||
|
||||
/** Get the screen share toggle button inside voice controls. */
|
||||
get screenShareButton() {
|
||||
return this.voiceControls.locator(
|
||||
'button:has(ng-icon[name="lucideMonitor"]), button:has(ng-icon[name="lucideMonitorOff"])'
|
||||
).first();
|
||||
}
|
||||
|
||||
/** Start screen sharing. Bypasses the quality dialog via localStorage preset. */
|
||||
async startScreenShare() {
|
||||
// Disable quality dialog so clicking the button starts sharing immediately
|
||||
await this.page.evaluate(() => {
|
||||
const key = 'metoyou_voice_settings';
|
||||
const raw = localStorage.getItem(key);
|
||||
const settings = raw ? JSON.parse(raw) : {};
|
||||
|
||||
settings.askScreenShareQuality = false;
|
||||
settings.screenShareQuality = 'balanced';
|
||||
localStorage.setItem(key, JSON.stringify(settings));
|
||||
});
|
||||
|
||||
await this.screenShareButton.click();
|
||||
}
|
||||
|
||||
/** Stop screen sharing by clicking the active screen share button. */
|
||||
async stopScreenShare() {
|
||||
await this.screenShareButton.click();
|
||||
}
|
||||
|
||||
/** Check whether the screen share button shows the active (MonitorOff) icon. */
|
||||
get isScreenShareActive() {
|
||||
return this.voiceControls.locator('button:has(ng-icon[name="lucideMonitorOff"])').first();
|
||||
}
|
||||
|
||||
private getTextChannelButton(channelName: string): Locator {
|
||||
const channelPattern = new RegExp(`#\\s*${escapeRegExp(channelName)}$`, 'i');
|
||||
|
||||
return this.channelsSidePanel.getByRole('button', { name: channelPattern }).first();
|
||||
}
|
||||
|
||||
private async createTextChannelThroughComponent(channelName: string): Promise<void> {
|
||||
await this.page.evaluate((name) => {
|
||||
interface ChannelSidebarComponent {
|
||||
createChannel: (type: 'text' | 'voice') => void;
|
||||
newChannelName: string;
|
||||
confirmCreateChannel: () => void;
|
||||
}
|
||||
interface AngularDebugApi {
|
||||
getComponent: (element: Element) => ChannelSidebarComponent;
|
||||
}
|
||||
interface WindowWithAngularDebug extends Window {
|
||||
ng?: AngularDebugApi;
|
||||
}
|
||||
|
||||
const host = document.querySelector('app-rooms-side-panel');
|
||||
const debugApi = (window as WindowWithAngularDebug).ng;
|
||||
|
||||
if (!host || !debugApi?.getComponent) {
|
||||
throw new Error('Angular debug API unavailable for text channel fallback');
|
||||
}
|
||||
|
||||
const component = debugApi.getComponent(host);
|
||||
|
||||
component.createChannel('text');
|
||||
component.newChannelName = name;
|
||||
component.confirmCreateChannel();
|
||||
}, channelName);
|
||||
}
|
||||
|
||||
private async persistCurrentChannelsToServer(channelName: string): Promise<void> {
|
||||
const result = await this.page.evaluate(async (requestedChannelName) => {
|
||||
interface ServerEndpoint {
|
||||
isActive?: boolean;
|
||||
url: string;
|
||||
}
|
||||
|
||||
interface ChannelShape {
|
||||
id: string;
|
||||
name: string;
|
||||
type: 'text' | 'voice';
|
||||
position: number;
|
||||
}
|
||||
|
||||
interface RoomShape {
|
||||
id: string;
|
||||
sourceUrl?: string;
|
||||
channels?: ChannelShape[];
|
||||
}
|
||||
|
||||
interface UserShape {
|
||||
id: string;
|
||||
}
|
||||
|
||||
interface ChannelSidebarComponent {
|
||||
currentRoom: () => RoomShape | null;
|
||||
currentUser: () => UserShape | null;
|
||||
}
|
||||
|
||||
interface AngularDebugApi {
|
||||
getComponent: (element: Element) => ChannelSidebarComponent;
|
||||
}
|
||||
|
||||
interface WindowWithAngularDebug extends Window {
|
||||
ng?: AngularDebugApi;
|
||||
}
|
||||
|
||||
const host = document.querySelector('app-rooms-side-panel');
|
||||
const debugApi = (window as WindowWithAngularDebug).ng;
|
||||
|
||||
if (!host || !debugApi?.getComponent) {
|
||||
throw new Error('Angular debug API unavailable for channel persistence');
|
||||
}
|
||||
|
||||
const component = debugApi.getComponent(host);
|
||||
const room = component.currentRoom();
|
||||
const currentUser = component.currentUser();
|
||||
const endpoints = JSON.parse(localStorage.getItem('metoyou_server_endpoints') || '[]') as ServerEndpoint[];
|
||||
const activeEndpoint = endpoints.find((endpoint) => endpoint.isActive) || endpoints[0] || null;
|
||||
const apiBaseUrl = room?.sourceUrl || activeEndpoint?.url;
|
||||
const normalizedChannelName = requestedChannelName.trim().replace(/\s+/g, ' ');
|
||||
const existingChannels = Array.isArray(room?.channels) ? room.channels : [];
|
||||
const hasTextChannel = existingChannels.some((channel) =>
|
||||
channel.type === 'text' && channel.name.trim().toLowerCase() === normalizedChannelName.toLowerCase()
|
||||
);
|
||||
const nextChannels = hasTextChannel
|
||||
? existingChannels
|
||||
: [
|
||||
...existingChannels,
|
||||
{
|
||||
id: globalThis.crypto.randomUUID(),
|
||||
name: normalizedChannelName,
|
||||
type: 'text' as const,
|
||||
position: existingChannels.length
|
||||
}
|
||||
];
|
||||
|
||||
if (!room?.id || !currentUser?.id || !apiBaseUrl) {
|
||||
throw new Error('Missing room, user, or endpoint when persisting channels');
|
||||
}
|
||||
|
||||
const response = await fetch(`${apiBaseUrl}/api/servers/${room.id}`, {
|
||||
method: 'PUT',
|
||||
headers: {
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
currentOwnerId: currentUser.id,
|
||||
channels: nextChannels
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Failed to persist channels: ${response.status}`);
|
||||
}
|
||||
|
||||
return { roomId: room.id, channels: nextChannels };
|
||||
}, channelName);
|
||||
|
||||
// Update NGRX store directly so the UI reflects the new channel
|
||||
// immediately, without waiting for an async effect round-trip.
|
||||
await this.dispatchRoomChannelsUpdate(result.roomId, result.channels);
|
||||
}
|
||||
|
||||
private async dispatchRoomChannelsUpdate(
|
||||
roomId: string,
|
||||
channels: { id: string; name: string; type: string; position: number }[]
|
||||
): Promise<void> {
|
||||
await this.page.evaluate(({ rid, chs }) => {
|
||||
interface AngularDebugApi {
|
||||
getComponent: (element: Element) => Record<string, unknown>;
|
||||
}
|
||||
|
||||
const host = document.querySelector('app-rooms-side-panel');
|
||||
const debugApi = (window as { ng?: AngularDebugApi }).ng;
|
||||
|
||||
if (!host || !debugApi?.getComponent) {
|
||||
return;
|
||||
}
|
||||
|
||||
const component = debugApi.getComponent(host);
|
||||
const store = component['store'] as { dispatch: (a: Record<string, unknown>) => void } | undefined;
|
||||
|
||||
if (store?.dispatch) {
|
||||
store.dispatch({
|
||||
type: '[Rooms] Update Room',
|
||||
roomId: rid,
|
||||
changes: { channels: chs }
|
||||
});
|
||||
}
|
||||
}, { rid: roomId, chs: channels });
|
||||
}
|
||||
|
||||
private async refreshRoomMetadata(): Promise<void> {
|
||||
await this.page.evaluate(async () => {
|
||||
interface ServerEndpoint {
|
||||
isActive?: boolean;
|
||||
url: string;
|
||||
}
|
||||
|
||||
interface ChannelShape {
|
||||
id: string;
|
||||
name: string;
|
||||
type: 'text' | 'voice';
|
||||
position: number;
|
||||
}
|
||||
|
||||
interface AngularDebugApi {
|
||||
getComponent: (element: Element) => Record<string, unknown>;
|
||||
}
|
||||
|
||||
interface WindowWithAngularDebug extends Window {
|
||||
ng?: AngularDebugApi;
|
||||
}
|
||||
|
||||
const host = document.querySelector('app-rooms-side-panel');
|
||||
const debugApi = (window as WindowWithAngularDebug).ng;
|
||||
|
||||
if (!host || !debugApi?.getComponent) {
|
||||
throw new Error('Angular debug API unavailable for room refresh');
|
||||
}
|
||||
|
||||
const component = debugApi.getComponent(host);
|
||||
const currentRoom = typeof component['currentRoom'] === 'function'
|
||||
? (component['currentRoom'] as () => { id: string; sourceUrl?: string; channels?: ChannelShape[] } | null)()
|
||||
: null;
|
||||
|
||||
if (!currentRoom) {
|
||||
throw new Error('No current room to refresh');
|
||||
}
|
||||
|
||||
const store = component['store'] as { dispatch: (action: Record<string, unknown>) => void } | undefined;
|
||||
|
||||
if (!store?.dispatch) {
|
||||
throw new Error('NGRX store not available on component');
|
||||
}
|
||||
|
||||
// Fetch server data directly via REST API instead of triggering
|
||||
// an async NGRX effect that can race with pending writes.
|
||||
const endpoints = JSON.parse(localStorage.getItem('metoyou_server_endpoints') || '[]') as ServerEndpoint[];
|
||||
const activeEndpoint = endpoints.find((ep) => ep.isActive) || endpoints[0] || null;
|
||||
const apiBaseUrl = currentRoom.sourceUrl || activeEndpoint?.url;
|
||||
|
||||
if (!apiBaseUrl) {
|
||||
throw new Error('No API base URL available for room refresh');
|
||||
}
|
||||
|
||||
const response = await fetch(`${apiBaseUrl}/api/servers/${currentRoom.id}`);
|
||||
|
||||
if (response.ok) {
|
||||
const serverData = await response.json() as { channels?: ChannelShape[] };
|
||||
|
||||
if (serverData.channels?.length) {
|
||||
store.dispatch({
|
||||
type: '[Rooms] Update Room',
|
||||
roomId: currentRoom.id,
|
||||
changes: { channels: serverData.channels }
|
||||
});
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Brief wait for Angular change detection to propagate
|
||||
await this.page.waitForTimeout(500);
|
||||
}
|
||||
}
|
||||
|
||||
function escapeRegExp(value: string): string {
|
||||
return value.replace(/[.*+?^${}()|[\]\\]/g, '\\$&');
|
||||
}
|
||||
29
e2e/pages/login.page.ts
Normal file
29
e2e/pages/login.page.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { type Page, type Locator } from '@playwright/test';
|
||||
|
||||
export class LoginPage {
|
||||
readonly usernameInput: Locator;
|
||||
readonly passwordInput: Locator;
|
||||
readonly serverSelect: Locator;
|
||||
readonly submitButton: Locator;
|
||||
readonly errorText: Locator;
|
||||
readonly registerLink: Locator;
|
||||
|
||||
constructor(private page: Page) {
|
||||
this.usernameInput = page.locator('#login-username');
|
||||
this.passwordInput = page.locator('#login-password');
|
||||
this.serverSelect = page.locator('#login-server');
|
||||
this.submitButton = page.getByRole('button', { name: 'Login' });
|
||||
this.errorText = page.locator('.text-destructive');
|
||||
this.registerLink = page.getByRole('button', { name: 'Register' });
|
||||
}
|
||||
|
||||
async goto() {
|
||||
await this.page.goto('/login');
|
||||
}
|
||||
|
||||
async login(username: string, password: string) {
|
||||
await this.usernameInput.fill(username);
|
||||
await this.passwordInput.fill(password);
|
||||
await this.submitButton.click();
|
||||
}
|
||||
}
|
||||
45
e2e/pages/register.page.ts
Normal file
45
e2e/pages/register.page.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { expect, type Page, type Locator } from '@playwright/test';
|
||||
|
||||
export class RegisterPage {
|
||||
readonly usernameInput: Locator;
|
||||
readonly displayNameInput: Locator;
|
||||
readonly passwordInput: Locator;
|
||||
readonly serverSelect: Locator;
|
||||
readonly submitButton: Locator;
|
||||
readonly errorText: Locator;
|
||||
readonly loginLink: Locator;
|
||||
|
||||
constructor(private page: Page) {
|
||||
this.usernameInput = page.locator('#register-username');
|
||||
this.displayNameInput = page.locator('#register-display-name');
|
||||
this.passwordInput = page.locator('#register-password');
|
||||
this.serverSelect = page.locator('#register-server');
|
||||
this.submitButton = page.getByRole('button', { name: 'Create Account' });
|
||||
this.errorText = page.locator('.text-destructive');
|
||||
this.loginLink = page.getByRole('button', { name: 'Login' });
|
||||
}
|
||||
|
||||
async goto() {
|
||||
await this.page.goto('/register', { waitUntil: 'domcontentloaded' });
|
||||
|
||||
try {
|
||||
await expect(this.usernameInput).toBeVisible({ timeout: 10_000 });
|
||||
} catch {
|
||||
// Angular router may redirect to /login on first load; click through.
|
||||
const registerLink = this.page.getByRole('link', { name: 'Register' })
|
||||
.or(this.page.getByText('Register'));
|
||||
|
||||
await registerLink.first().click();
|
||||
await expect(this.usernameInput).toBeVisible({ timeout: 30_000 });
|
||||
}
|
||||
|
||||
await expect(this.submitButton).toBeVisible({ timeout: 30_000 });
|
||||
}
|
||||
|
||||
async register(username: string, displayName: string, password: string) {
|
||||
await this.usernameInput.fill(username);
|
||||
await this.displayNameInput.fill(displayName);
|
||||
await this.passwordInput.fill(password);
|
||||
await this.submitButton.click();
|
||||
}
|
||||
}
|
||||
65
e2e/pages/server-search.page.ts
Normal file
65
e2e/pages/server-search.page.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import {
|
||||
type Page,
|
||||
type Locator,
|
||||
expect
|
||||
} from '@playwright/test';
|
||||
|
||||
export class ServerSearchPage {
|
||||
readonly searchInput: Locator;
|
||||
readonly createServerButton: Locator;
|
||||
readonly settingsButton: Locator;
|
||||
|
||||
// Create server dialog
|
||||
readonly serverNameInput: Locator;
|
||||
readonly serverDescriptionInput: Locator;
|
||||
readonly serverTopicInput: Locator;
|
||||
readonly signalEndpointSelect: Locator;
|
||||
readonly privateCheckbox: Locator;
|
||||
readonly serverPasswordInput: Locator;
|
||||
readonly dialogCreateButton: Locator;
|
||||
readonly dialogCancelButton: Locator;
|
||||
|
||||
constructor(private page: Page) {
|
||||
this.searchInput = page.getByPlaceholder('Search servers...');
|
||||
this.createServerButton = page.getByRole('button', { name: 'Create New Server' });
|
||||
this.settingsButton = page.locator('button[title="Settings"]');
|
||||
|
||||
// Create dialog elements
|
||||
this.serverNameInput = page.locator('#create-server-name');
|
||||
this.serverDescriptionInput = page.locator('#create-server-description');
|
||||
this.serverTopicInput = page.locator('#create-server-topic');
|
||||
this.signalEndpointSelect = page.locator('#create-server-signal-endpoint');
|
||||
this.privateCheckbox = page.locator('#private');
|
||||
this.serverPasswordInput = page.locator('#create-server-password');
|
||||
this.dialogCreateButton = page.locator('div[role="dialog"]').getByRole('button', { name: 'Create' });
|
||||
this.dialogCancelButton = page.locator('div[role="dialog"]').getByRole('button', { name: 'Cancel' });
|
||||
}
|
||||
|
||||
async goto() {
|
||||
await this.page.goto('/search');
|
||||
}
|
||||
|
||||
async createServer(name: string, options?: { description?: string; topic?: string }) {
|
||||
await this.createServerButton.click();
|
||||
await expect(this.serverNameInput).toBeVisible();
|
||||
await this.serverNameInput.fill(name);
|
||||
|
||||
if (options?.description) {
|
||||
await this.serverDescriptionInput.fill(options.description);
|
||||
}
|
||||
|
||||
if (options?.topic) {
|
||||
await this.serverTopicInput.fill(options.topic);
|
||||
}
|
||||
|
||||
await this.dialogCreateButton.click();
|
||||
}
|
||||
|
||||
async joinSavedRoom(name: string) {
|
||||
await this.page.getByRole('button', { name }).click();
|
||||
}
|
||||
|
||||
async joinServerFromSearch(name: string) {
|
||||
await this.page.locator('button', { hasText: name }).click();
|
||||
}
|
||||
}
|
||||
39
e2e/playwright.config.ts
Normal file
39
e2e/playwright.config.ts
Normal file
@@ -0,0 +1,39 @@
|
||||
import { defineConfig, devices } from '@playwright/test';
|
||||
|
||||
export default defineConfig({
|
||||
testDir: './tests',
|
||||
timeout: 90_000,
|
||||
expect: { timeout: 10_000 },
|
||||
retries: process.env.CI ? 2 : 0,
|
||||
workers: 1,
|
||||
reporter: [['html', { outputFolder: '../test-results/html-report' }], ['list']],
|
||||
outputDir: '../test-results/artifacts',
|
||||
use: {
|
||||
baseURL: 'http://localhost:4200',
|
||||
trace: 'on-first-retry',
|
||||
screenshot: 'only-on-failure',
|
||||
video: 'on-first-retry',
|
||||
actionTimeout: 15_000,
|
||||
},
|
||||
projects: [
|
||||
{
|
||||
name: 'chromium',
|
||||
use: {
|
||||
...devices['Desktop Chrome'],
|
||||
permissions: ['microphone', 'camera'],
|
||||
launchOptions: {
|
||||
args: [
|
||||
'--use-fake-device-for-media-stream',
|
||||
'--use-fake-ui-for-media-stream',
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
webServer: {
|
||||
command: 'cd ../toju-app && npx ng serve',
|
||||
port: 4200,
|
||||
reuseExistingServer: !process.env.CI,
|
||||
timeout: 120_000,
|
||||
},
|
||||
});
|
||||
295
e2e/tests/chat/chat-message-features.spec.ts
Normal file
295
e2e/tests/chat/chat-message-features.spec.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
import { type Page } from '@playwright/test';
|
||||
import { test, expect, type Client } from '../../fixtures/multi-client';
|
||||
import { RegisterPage } from '../../pages/register.page';
|
||||
import { ServerSearchPage } from '../../pages/server-search.page';
|
||||
import { ChatRoomPage } from '../../pages/chat-room.page';
|
||||
import {
|
||||
ChatMessagesPage,
|
||||
type ChatDropFilePayload
|
||||
} from '../../pages/chat-messages.page';
|
||||
|
||||
const MOCK_EMBED_URL = 'https://example.test/mock-embed';
|
||||
const MOCK_EMBED_TITLE = 'Mock Embed Title';
|
||||
const MOCK_EMBED_DESCRIPTION = 'Mock embed description for chat E2E coverage.';
|
||||
const MOCK_GIF_IMAGE_URL = 'data:image/gif;base64,R0lGODlhAQABAPAAAP///wAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==';
|
||||
const DELETED_MESSAGE_CONTENT = '[Message deleted]';
|
||||
|
||||
test.describe('Chat messaging features', () => {
|
||||
test.describe.configure({ timeout: 180_000 });
|
||||
|
||||
test('syncs messages in a newly created text channel', async ({ createClient }) => {
|
||||
const scenario = await createChatScenario(createClient);
|
||||
const channelName = uniqueName('updates');
|
||||
const aliceMessage = `Alice text channel message ${uniqueName('msg')}`;
|
||||
const bobMessage = `Bob text channel reply ${uniqueName('msg')}`;
|
||||
|
||||
await test.step('Alice creates a new text channel and both users join it', async () => {
|
||||
await scenario.aliceRoom.ensureTextChannelExists(channelName);
|
||||
await scenario.aliceRoom.joinTextChannel(channelName);
|
||||
await scenario.bobRoom.joinTextChannel(channelName);
|
||||
});
|
||||
|
||||
await test.step('Alice and Bob see synced messages in the new text channel', async () => {
|
||||
await scenario.aliceMessages.sendMessage(aliceMessage);
|
||||
await expect(scenario.bobMessages.getMessageItemByText(aliceMessage)).toBeVisible({ timeout: 20_000 });
|
||||
|
||||
await scenario.bobMessages.sendMessage(bobMessage);
|
||||
await expect(scenario.aliceMessages.getMessageItemByText(bobMessage)).toBeVisible({ timeout: 20_000 });
|
||||
});
|
||||
});
|
||||
|
||||
test('shows typing indicators to other users', async ({ createClient }) => {
|
||||
const scenario = await createChatScenario(createClient);
|
||||
const draftMessage = `Typing indicator draft ${uniqueName('draft')}`;
|
||||
|
||||
await test.step('Alice starts typing in general channel', async () => {
|
||||
await scenario.aliceMessages.typeDraft(draftMessage);
|
||||
});
|
||||
|
||||
await test.step('Bob sees Alice typing', async () => {
|
||||
await expect(scenario.bob.page.getByText('Alice is typing...')).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
});
|
||||
|
||||
test('edits and removes messages for both users', async ({ createClient }) => {
|
||||
const scenario = await createChatScenario(createClient);
|
||||
const originalMessage = `Editable message ${uniqueName('edit')}`;
|
||||
const updatedMessage = `Edited message ${uniqueName('edit')}`;
|
||||
|
||||
await test.step('Alice sends a message and Bob receives it', async () => {
|
||||
await scenario.aliceMessages.sendMessage(originalMessage);
|
||||
await expect(scenario.bobMessages.getMessageItemByText(originalMessage)).toBeVisible({ timeout: 20_000 });
|
||||
});
|
||||
|
||||
await test.step('Alice edits the message and both users see updated content', async () => {
|
||||
await scenario.aliceMessages.editOwnMessage(originalMessage, updatedMessage);
|
||||
await expect(scenario.aliceMessages.getMessageItemByText(updatedMessage)).toBeVisible({ timeout: 20_000 });
|
||||
await expect(scenario.alice.page.getByText('(edited)')).toBeVisible({ timeout: 10_000 });
|
||||
await expect(scenario.bobMessages.getMessageItemByText(updatedMessage)).toBeVisible({ timeout: 20_000 });
|
||||
});
|
||||
|
||||
await test.step('Alice deletes the message and both users see deletion state', async () => {
|
||||
await scenario.aliceMessages.deleteOwnMessage(updatedMessage);
|
||||
await expect(scenario.aliceMessages.getMessageItemByText(DELETED_MESSAGE_CONTENT)).toBeVisible({ timeout: 20_000 });
|
||||
await expect(scenario.bobMessages.getMessageItemByText(DELETED_MESSAGE_CONTENT)).toBeVisible({ timeout: 20_000 });
|
||||
});
|
||||
});
|
||||
|
||||
test('syncs image and file attachments between users', async ({ createClient }) => {
|
||||
const scenario = await createChatScenario(createClient);
|
||||
const imageName = `${uniqueName('diagram')}.svg`;
|
||||
const fileName = `${uniqueName('notes')}.txt`;
|
||||
const imageCaption = `Image upload ${uniqueName('caption')}`;
|
||||
const fileCaption = `File upload ${uniqueName('caption')}`;
|
||||
const imageAttachment = createTextFilePayload(imageName, 'image/svg+xml', buildMockSvgMarkup(imageName));
|
||||
const fileAttachment = createTextFilePayload(fileName, 'text/plain', `Attachment body for ${fileName}`);
|
||||
|
||||
await test.step('Alice sends image attachment and Bob receives it', async () => {
|
||||
await scenario.aliceMessages.attachFiles([imageAttachment]);
|
||||
await scenario.aliceMessages.sendMessage(imageCaption);
|
||||
|
||||
await scenario.aliceMessages.expectMessageImageLoaded(imageName);
|
||||
await expect(scenario.bobMessages.getMessageItemByText(imageCaption)).toBeVisible({ timeout: 20_000 });
|
||||
await scenario.bobMessages.expectMessageImageLoaded(imageName);
|
||||
});
|
||||
|
||||
await test.step('Alice sends generic file attachment and Bob receives it', async () => {
|
||||
await scenario.aliceMessages.attachFiles([fileAttachment]);
|
||||
await scenario.aliceMessages.sendMessage(fileCaption);
|
||||
|
||||
await expect(scenario.bobMessages.getMessageItemByText(fileCaption)).toBeVisible({ timeout: 20_000 });
|
||||
await expect(scenario.bob.page.getByText(fileName, { exact: false })).toBeVisible({ timeout: 20_000 });
|
||||
});
|
||||
});
|
||||
|
||||
test('renders link embeds for shared links', async ({ createClient }) => {
|
||||
const scenario = await createChatScenario(createClient);
|
||||
const messageText = `Useful docs ${MOCK_EMBED_URL}`;
|
||||
|
||||
await test.step('Alice shares a link in chat', async () => {
|
||||
await scenario.aliceMessages.sendMessage(messageText);
|
||||
await expect(scenario.bobMessages.getMessageItemByText(messageText)).toBeVisible({ timeout: 20_000 });
|
||||
});
|
||||
|
||||
await test.step('Both users see mocked link embed metadata', async () => {
|
||||
await expect(scenario.aliceMessages.getEmbedCardByTitle(MOCK_EMBED_TITLE)).toBeVisible({ timeout: 20_000 });
|
||||
await expect(scenario.bobMessages.getEmbedCardByTitle(MOCK_EMBED_TITLE)).toBeVisible({ timeout: 20_000 });
|
||||
await expect(scenario.bob.page.getByText(MOCK_EMBED_DESCRIPTION)).toBeVisible({ timeout: 20_000 });
|
||||
});
|
||||
});
|
||||
|
||||
test('sends KLIPY GIF messages with mocked API responses', async ({ createClient }) => {
|
||||
const scenario = await createChatScenario(createClient);
|
||||
|
||||
await test.step('Alice opens GIF picker and sends mocked GIF', async () => {
|
||||
await scenario.aliceMessages.openGifPicker();
|
||||
await scenario.aliceMessages.selectFirstGif();
|
||||
});
|
||||
|
||||
await test.step('Bob sees GIF message sync', async () => {
|
||||
await scenario.aliceMessages.expectMessageImageLoaded('KLIPY GIF');
|
||||
await scenario.bobMessages.expectMessageImageLoaded('KLIPY GIF');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
type ChatScenario = {
|
||||
alice: Client;
|
||||
bob: Client;
|
||||
aliceRoom: ChatRoomPage;
|
||||
bobRoom: ChatRoomPage;
|
||||
aliceMessages: ChatMessagesPage;
|
||||
bobMessages: ChatMessagesPage;
|
||||
};
|
||||
|
||||
async function createChatScenario(createClient: () => Promise<Client>): Promise<ChatScenario> {
|
||||
const suffix = uniqueName('chat');
|
||||
const serverName = `Chat Server ${suffix}`;
|
||||
const aliceCredentials = {
|
||||
username: `alice_${suffix}`,
|
||||
displayName: 'Alice',
|
||||
password: 'TestPass123!'
|
||||
};
|
||||
const bobCredentials = {
|
||||
username: `bob_${suffix}`,
|
||||
displayName: 'Bob',
|
||||
password: 'TestPass123!'
|
||||
};
|
||||
const alice = await createClient();
|
||||
const bob = await createClient();
|
||||
|
||||
await installChatFeatureMocks(alice.page);
|
||||
await installChatFeatureMocks(bob.page);
|
||||
|
||||
const aliceRegisterPage = new RegisterPage(alice.page);
|
||||
const bobRegisterPage = new RegisterPage(bob.page);
|
||||
|
||||
await aliceRegisterPage.goto();
|
||||
await aliceRegisterPage.register(
|
||||
aliceCredentials.username,
|
||||
aliceCredentials.displayName,
|
||||
aliceCredentials.password
|
||||
);
|
||||
await expect(alice.page).toHaveURL(/\/search/, { timeout: 15_000 });
|
||||
|
||||
await bobRegisterPage.goto();
|
||||
await bobRegisterPage.register(
|
||||
bobCredentials.username,
|
||||
bobCredentials.displayName,
|
||||
bobCredentials.password
|
||||
);
|
||||
await expect(bob.page).toHaveURL(/\/search/, { timeout: 15_000 });
|
||||
|
||||
const aliceSearchPage = new ServerSearchPage(alice.page);
|
||||
|
||||
await aliceSearchPage.createServer(serverName, {
|
||||
description: 'E2E chat server for messaging feature coverage'
|
||||
});
|
||||
await expect(alice.page).toHaveURL(/\/room\//, { timeout: 15_000 });
|
||||
|
||||
const bobSearchPage = new ServerSearchPage(bob.page);
|
||||
const serverCard = bob.page.locator('button', { hasText: serverName }).first();
|
||||
|
||||
await bobSearchPage.searchInput.fill(serverName);
|
||||
await expect(serverCard).toBeVisible({ timeout: 15_000 });
|
||||
await serverCard.click();
|
||||
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
|
||||
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
const bobRoom = new ChatRoomPage(bob.page);
|
||||
const aliceMessages = new ChatMessagesPage(alice.page);
|
||||
const bobMessages = new ChatMessagesPage(bob.page);
|
||||
|
||||
await aliceMessages.waitForReady();
|
||||
await bobMessages.waitForReady();
|
||||
|
||||
return {
|
||||
alice,
|
||||
bob,
|
||||
aliceRoom,
|
||||
bobRoom,
|
||||
aliceMessages,
|
||||
bobMessages
|
||||
};
|
||||
}
|
||||
|
||||
async function installChatFeatureMocks(page: Page): Promise<void> {
|
||||
await page.route('**/api/klipy/config', async (route) => {
|
||||
await route.fulfill({
|
||||
status: 200,
|
||||
contentType: 'application/json',
|
||||
body: JSON.stringify({ enabled: true })
|
||||
});
|
||||
});
|
||||
|
||||
await page.route('**/api/klipy/gifs**', async (route) => {
|
||||
await route.fulfill({
|
||||
status: 200,
|
||||
contentType: 'application/json',
|
||||
body: JSON.stringify({
|
||||
enabled: true,
|
||||
hasNext: false,
|
||||
results: [
|
||||
{
|
||||
id: 'mock-gif-1',
|
||||
slug: 'mock-gif-1',
|
||||
title: 'Mock Celebration GIF',
|
||||
url: MOCK_GIF_IMAGE_URL,
|
||||
previewUrl: MOCK_GIF_IMAGE_URL,
|
||||
width: 64,
|
||||
height: 64
|
||||
}
|
||||
]
|
||||
})
|
||||
});
|
||||
});
|
||||
|
||||
await page.route('**/api/link-metadata**', async (route) => {
|
||||
const requestUrl = new URL(route.request().url());
|
||||
const requestedTargetUrl = requestUrl.searchParams.get('url') ?? '';
|
||||
|
||||
if (requestedTargetUrl === MOCK_EMBED_URL) {
|
||||
await route.fulfill({
|
||||
status: 200,
|
||||
contentType: 'application/json',
|
||||
body: JSON.stringify({
|
||||
title: MOCK_EMBED_TITLE,
|
||||
description: MOCK_EMBED_DESCRIPTION,
|
||||
imageUrl: MOCK_GIF_IMAGE_URL,
|
||||
siteName: 'Mock Docs'
|
||||
})
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
await route.fulfill({
|
||||
status: 200,
|
||||
contentType: 'application/json',
|
||||
body: JSON.stringify({ failed: true })
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
function createTextFilePayload(name: string, mimeType: string, content: string): ChatDropFilePayload {
|
||||
return {
|
||||
name,
|
||||
mimeType,
|
||||
base64: Buffer.from(content, 'utf8').toString('base64')
|
||||
};
|
||||
}
|
||||
|
||||
function buildMockSvgMarkup(label: string): string {
|
||||
return [
|
||||
'<svg xmlns="http://www.w3.org/2000/svg" width="160" height="120" viewBox="0 0 160 120">',
|
||||
'<rect width="160" height="120" rx="18" fill="#0f172a" />',
|
||||
'<circle cx="38" cy="36" r="18" fill="#38bdf8" />',
|
||||
'<rect x="66" y="28" width="64" height="16" rx="8" fill="#f8fafc" />',
|
||||
'<rect x="24" y="74" width="112" height="12" rx="6" fill="#22c55e" />',
|
||||
`<text x="24" y="104" fill="#e2e8f0" font-size="12" font-family="Arial, sans-serif">${label}</text>`,
|
||||
'</svg>'
|
||||
].join('');
|
||||
}
|
||||
|
||||
function uniqueName(prefix: string): string {
|
||||
return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
|
||||
}
|
||||
396
e2e/tests/screen-share/screen-share.spec.ts
Normal file
396
e2e/tests/screen-share/screen-share.spec.ts
Normal file
@@ -0,0 +1,396 @@
|
||||
import { test, expect } from '../../fixtures/multi-client';
|
||||
import {
|
||||
installWebRTCTracking,
|
||||
waitForPeerConnected,
|
||||
isPeerStillConnected,
|
||||
waitForAudioFlow,
|
||||
waitForAudioStatsPresent,
|
||||
waitForVideoFlow,
|
||||
waitForOutboundVideoFlow,
|
||||
waitForInboundVideoFlow,
|
||||
dumpRtcDiagnostics
|
||||
} from '../../helpers/webrtc-helpers';
|
||||
import { RegisterPage } from '../../pages/register.page';
|
||||
import { ServerSearchPage } from '../../pages/server-search.page';
|
||||
import { ChatRoomPage } from '../../pages/chat-room.page';
|
||||
|
||||
/**
|
||||
* Screen sharing E2E tests: verify video, screen-share audio, and voice audio
|
||||
* flow correctly between users during screen sharing.
|
||||
*
|
||||
* Uses the same dedicated-browser-per-client infrastructure as voice tests.
|
||||
* getDisplayMedia is monkey-patched to return a synthetic canvas video stream
|
||||
* + 880 Hz oscillator audio, bypassing the browser picker dialog.
|
||||
*/
|
||||
|
||||
const ALICE = { username: `alice_ss_${Date.now()}`, displayName: 'Alice', password: 'TestPass123!' };
|
||||
const BOB = { username: `bob_ss_${Date.now()}`, displayName: 'Bob', password: 'TestPass123!' };
|
||||
const SERVER_NAME = `SS Test ${Date.now()}`;
|
||||
const VOICE_CHANNEL = 'General';
|
||||
|
||||
/** Register a user and navigate to /search. */
|
||||
async function registerUser(page: import('@playwright/test').Page, user: typeof ALICE) {
|
||||
const registerPage = new RegisterPage(page);
|
||||
|
||||
await registerPage.goto();
|
||||
await expect(registerPage.submitButton).toBeVisible();
|
||||
await registerPage.register(user.username, user.displayName, user.password);
|
||||
await expect(page).toHaveURL(/\/search/, { timeout: 15_000 });
|
||||
}
|
||||
|
||||
/** Both users register → Alice creates server → Bob joins. */
|
||||
async function setupServerWithBothUsers(
|
||||
alice: { page: import('@playwright/test').Page },
|
||||
bob: { page: import('@playwright/test').Page }
|
||||
) {
|
||||
await registerUser(alice.page, ALICE);
|
||||
await registerUser(bob.page, BOB);
|
||||
|
||||
// Alice creates server
|
||||
const aliceSearch = new ServerSearchPage(alice.page);
|
||||
|
||||
await aliceSearch.createServer(SERVER_NAME, { description: 'Screen share E2E' });
|
||||
await expect(alice.page).toHaveURL(/\/room\//, { timeout: 15_000 });
|
||||
|
||||
// Bob joins server
|
||||
const bobSearch = new ServerSearchPage(bob.page);
|
||||
|
||||
await bobSearch.searchInput.fill(SERVER_NAME);
|
||||
|
||||
const serverCard = bob.page.locator('button', { hasText: SERVER_NAME }).first();
|
||||
|
||||
await expect(serverCard).toBeVisible({ timeout: 10_000 });
|
||||
await serverCard.click();
|
||||
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
|
||||
}
|
||||
|
||||
/** Ensure voice channel exists and both users join it. */
|
||||
async function joinVoiceTogether(
|
||||
alice: { page: import('@playwright/test').Page },
|
||||
bob: { page: import('@playwright/test').Page }
|
||||
) {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
const existingChannel = alice.page
|
||||
.locator('app-rooms-side-panel')
|
||||
.getByRole('button', { name: VOICE_CHANNEL, exact: true });
|
||||
|
||||
if (await existingChannel.count() === 0) {
|
||||
await aliceRoom.openCreateVoiceChannelDialog();
|
||||
await aliceRoom.createChannel(VOICE_CHANNEL);
|
||||
await expect(existingChannel).toBeVisible({ timeout: 10_000 });
|
||||
}
|
||||
|
||||
await aliceRoom.joinVoiceChannel(VOICE_CHANNEL);
|
||||
await expect(alice.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
|
||||
|
||||
const bobRoom = new ChatRoomPage(bob.page);
|
||||
|
||||
await bobRoom.joinVoiceChannel(VOICE_CHANNEL);
|
||||
await expect(bob.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
|
||||
|
||||
// Wait for WebRTC + audio pipeline
|
||||
await waitForPeerConnected(alice.page, 30_000);
|
||||
await waitForPeerConnected(bob.page, 30_000);
|
||||
await waitForAudioStatsPresent(alice.page, 20_000);
|
||||
await waitForAudioStatsPresent(bob.page, 20_000);
|
||||
|
||||
// Expand voice workspace on both clients so the demand-driven screen
|
||||
// share request flow can fire (requires connectRemoteShares = true).
|
||||
// Click the "VIEW" badge that appears next to the active voice channel.
|
||||
const aliceView = alice.page.locator('app-rooms-side-panel')
|
||||
.getByRole('button', { name: /view/i })
|
||||
.first();
|
||||
const bobView = bob.page.locator('app-rooms-side-panel')
|
||||
.getByRole('button', { name: /view/i })
|
||||
.first();
|
||||
|
||||
await expect(aliceView).toBeVisible({ timeout: 10_000 });
|
||||
await aliceView.click();
|
||||
await expect(alice.page.locator('app-voice-workspace')).toBeVisible({ timeout: 10_000 });
|
||||
|
||||
await expect(bobView).toBeVisible({ timeout: 10_000 });
|
||||
await bobView.click();
|
||||
await expect(bob.page.locator('app-voice-workspace')).toBeVisible({ timeout: 10_000 });
|
||||
|
||||
// Re-verify audio stats are present after workspace expansion (the VIEW
|
||||
// click can trigger renegotiation which briefly disrupts audio).
|
||||
await waitForAudioStatsPresent(alice.page, 20_000);
|
||||
await waitForAudioStatsPresent(bob.page, 20_000);
|
||||
}
|
||||
|
||||
function expectFlowing(
|
||||
delta: { outboundBytesDelta: number; inboundBytesDelta: number; outboundPacketsDelta: number; inboundPacketsDelta: number },
|
||||
label: string
|
||||
) {
|
||||
expect(
|
||||
delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0,
|
||||
`${label} should be sending`
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0,
|
||||
`${label} should be receiving`
|
||||
).toBe(true);
|
||||
}
|
||||
|
||||
test.describe('Screen sharing', () => {
|
||||
test('single user screen share: video and audio flow to receiver, voice audio continues', async ({ createClient }) => {
|
||||
test.setTimeout(180_000);
|
||||
|
||||
const alice = await createClient();
|
||||
const bob = await createClient();
|
||||
|
||||
await installWebRTCTracking(alice.page);
|
||||
await installWebRTCTracking(bob.page);
|
||||
|
||||
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
|
||||
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
|
||||
|
||||
// ── Setup: register, server, voice ────────────────────────────
|
||||
|
||||
await test.step('Setup server and voice channel', async () => {
|
||||
await setupServerWithBothUsers(alice, bob);
|
||||
await joinVoiceTogether(alice, bob);
|
||||
});
|
||||
|
||||
// ── Verify voice audio before screen share ────────────────────
|
||||
|
||||
await test.step('Voice audio flows before screen share', async () => {
|
||||
const aliceDelta = await waitForAudioFlow(alice.page, 30_000);
|
||||
const bobDelta = await waitForAudioFlow(bob.page, 30_000);
|
||||
|
||||
expectFlowing(aliceDelta, 'Alice voice');
|
||||
expectFlowing(bobDelta, 'Bob voice');
|
||||
});
|
||||
|
||||
// ── Alice starts screen sharing ───────────────────────────────
|
||||
|
||||
await test.step('Alice starts screen sharing', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
await aliceRoom.startScreenShare();
|
||||
|
||||
// Screen share button should show active state (MonitorOff icon)
|
||||
await expect(aliceRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
// ── Verify screen share video flows ───────────────────────────
|
||||
|
||||
await test.step('Screen share video flows from Alice to Bob', async () => {
|
||||
// Screen share is unidirectional: Alice sends video, Bob receives it.
|
||||
const aliceVideo = await waitForOutboundVideoFlow(alice.page, 30_000);
|
||||
const bobVideo = await waitForInboundVideoFlow(bob.page, 30_000);
|
||||
|
||||
if (aliceVideo.outboundBytesDelta === 0 || bobVideo.inboundBytesDelta === 0) {
|
||||
console.log('[Alice RTC]\n' + await dumpRtcDiagnostics(alice.page));
|
||||
console.log('[Bob RTC]\n' + await dumpRtcDiagnostics(bob.page));
|
||||
}
|
||||
|
||||
expect(
|
||||
aliceVideo.outboundBytesDelta > 0 || aliceVideo.outboundPacketsDelta > 0,
|
||||
'Alice should be sending screen share video'
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
bobVideo.inboundBytesDelta > 0 || bobVideo.inboundPacketsDelta > 0,
|
||||
'Bob should be receiving screen share video'
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// ── Verify voice audio continues during screen share ──────────
|
||||
|
||||
await test.step('Voice audio continues during screen share', async () => {
|
||||
const aliceAudio = await waitForAudioFlow(alice.page, 20_000);
|
||||
const bobAudio = await waitForAudioFlow(bob.page, 20_000);
|
||||
|
||||
expectFlowing(aliceAudio, 'Alice voice during screen share');
|
||||
expectFlowing(bobAudio, 'Bob voice during screen share');
|
||||
});
|
||||
|
||||
// ── Bob can hear Alice talk while she screen shares ───────────
|
||||
|
||||
await test.step('Bob receives audio from Alice during screen share', async () => {
|
||||
// Specifically check Bob is receiving audio (from Alice's voice)
|
||||
const bobAudio = await waitForAudioFlow(bob.page, 15_000);
|
||||
|
||||
expect(
|
||||
bobAudio.inboundBytesDelta > 0,
|
||||
'Bob should receive voice audio while Alice screen shares'
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
// ── Alice stops screen sharing ────────────────────────────────
|
||||
|
||||
await test.step('Alice stops screen sharing', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
await aliceRoom.stopScreenShare();
|
||||
|
||||
// Active icon should disappear - regular Monitor icon shown instead
|
||||
await expect(
|
||||
aliceRoom.voiceControls.locator('button:has(ng-icon[name="lucideMonitor"])').first()
|
||||
).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
// ── Voice audio still works after screen share ends ───────────
|
||||
|
||||
await test.step('Voice audio resumes normally after screen share stops', async () => {
|
||||
const aliceAudio = await waitForAudioFlow(alice.page, 20_000);
|
||||
const bobAudio = await waitForAudioFlow(bob.page, 20_000);
|
||||
|
||||
expectFlowing(aliceAudio, 'Alice voice after screen share');
|
||||
expectFlowing(bobAudio, 'Bob voice after screen share');
|
||||
});
|
||||
});
|
||||
|
||||
test('multiple users screen share simultaneously', async ({ createClient }) => {
|
||||
test.setTimeout(180_000);
|
||||
|
||||
const alice = await createClient();
|
||||
const bob = await createClient();
|
||||
|
||||
await installWebRTCTracking(alice.page);
|
||||
await installWebRTCTracking(bob.page);
|
||||
|
||||
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
|
||||
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
|
||||
|
||||
await test.step('Setup server and voice channel', async () => {
|
||||
await setupServerWithBothUsers(alice, bob);
|
||||
await joinVoiceTogether(alice, bob);
|
||||
});
|
||||
|
||||
// ── Both users start screen sharing ───────────────────────────
|
||||
|
||||
await test.step('Alice starts screen sharing', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
await aliceRoom.startScreenShare();
|
||||
await expect(aliceRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
await test.step('Bob starts screen sharing', async () => {
|
||||
const bobRoom = new ChatRoomPage(bob.page);
|
||||
|
||||
await bobRoom.startScreenShare();
|
||||
await expect(bobRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
// ── Verify video flows in both directions ─────────────────────
|
||||
|
||||
await test.step('Video flows bidirectionally with both screen shares active', async () => {
|
||||
// Both sharing: each page sends and receives video
|
||||
const aliceVideo = await waitForVideoFlow(alice.page, 30_000);
|
||||
const bobVideo = await waitForVideoFlow(bob.page, 30_000);
|
||||
|
||||
expectFlowing(aliceVideo, 'Alice screen share video');
|
||||
expectFlowing(bobVideo, 'Bob screen share video');
|
||||
});
|
||||
|
||||
// ── Voice audio continues with dual screen shares ─────────────
|
||||
|
||||
await test.step('Voice audio continues with both users screen sharing', async () => {
|
||||
const aliceAudio = await waitForAudioFlow(alice.page, 20_000);
|
||||
const bobAudio = await waitForAudioFlow(bob.page, 20_000);
|
||||
|
||||
expectFlowing(aliceAudio, 'Alice voice during dual screen share');
|
||||
expectFlowing(bobAudio, 'Bob voice during dual screen share');
|
||||
});
|
||||
|
||||
// ── Both stop screen sharing ──────────────────────────────────
|
||||
|
||||
await test.step('Both users stop screen sharing', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
const bobRoom = new ChatRoomPage(bob.page);
|
||||
|
||||
await aliceRoom.stopScreenShare();
|
||||
await expect(
|
||||
aliceRoom.voiceControls.locator('button:has(ng-icon[name="lucideMonitor"])').first()
|
||||
).toBeVisible({ timeout: 10_000 });
|
||||
|
||||
await bobRoom.stopScreenShare();
|
||||
await expect(
|
||||
bobRoom.voiceControls.locator('button:has(ng-icon[name="lucideMonitor"])').first()
|
||||
).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
});
|
||||
|
||||
test('screen share connection stays stable for 10+ seconds', async ({ createClient }) => {
|
||||
test.setTimeout(180_000);
|
||||
|
||||
const alice = await createClient();
|
||||
const bob = await createClient();
|
||||
|
||||
await installWebRTCTracking(alice.page);
|
||||
await installWebRTCTracking(bob.page);
|
||||
|
||||
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
|
||||
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
|
||||
|
||||
await test.step('Setup server and voice channel', async () => {
|
||||
await setupServerWithBothUsers(alice, bob);
|
||||
await joinVoiceTogether(alice, bob);
|
||||
});
|
||||
|
||||
await test.step('Alice starts screen sharing', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
await aliceRoom.startScreenShare();
|
||||
await expect(aliceRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
|
||||
|
||||
// Wait for video pipeline to fully establish
|
||||
await waitForOutboundVideoFlow(alice.page, 30_000);
|
||||
await waitForInboundVideoFlow(bob.page, 30_000);
|
||||
});
|
||||
|
||||
// ── Stability checkpoints at 0s, 5s, 10s ─────────────────────
|
||||
|
||||
await test.step('Connection stays stable for 10+ seconds during screen share', async () => {
|
||||
for (const checkpoint of [
|
||||
0,
|
||||
5_000,
|
||||
5_000
|
||||
]) {
|
||||
if (checkpoint > 0) {
|
||||
await alice.page.waitForTimeout(checkpoint);
|
||||
}
|
||||
|
||||
const aliceConnected = await isPeerStillConnected(alice.page);
|
||||
const bobConnected = await isPeerStillConnected(bob.page);
|
||||
|
||||
expect(aliceConnected, 'Alice should still be connected').toBe(true);
|
||||
expect(bobConnected, 'Bob should still be connected').toBe(true);
|
||||
}
|
||||
|
||||
// After 10s - verify both video and audio still flowing
|
||||
const aliceVideo = await waitForOutboundVideoFlow(alice.page, 15_000);
|
||||
const bobVideo = await waitForInboundVideoFlow(bob.page, 15_000);
|
||||
|
||||
expect(
|
||||
aliceVideo.outboundBytesDelta > 0,
|
||||
'Alice still sending screen share video after 10s'
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
bobVideo.inboundBytesDelta > 0,
|
||||
'Bob still receiving screen share video after 10s'
|
||||
).toBe(true);
|
||||
|
||||
const aliceAudio = await waitForAudioFlow(alice.page, 15_000);
|
||||
const bobAudio = await waitForAudioFlow(bob.page, 15_000);
|
||||
|
||||
expectFlowing(aliceAudio, 'Alice voice after 10s screen share');
|
||||
expectFlowing(bobAudio, 'Bob voice after 10s screen share');
|
||||
});
|
||||
|
||||
// ── Clean disconnect ──────────────────────────────────────────
|
||||
|
||||
await test.step('Alice stops screen share and disconnects', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
await aliceRoom.stopScreenShare();
|
||||
await aliceRoom.disconnectButton.click();
|
||||
await expect(aliceRoom.disconnectButton).not.toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
});
|
||||
});
|
||||
260
e2e/tests/voice/voice-full-journey.spec.ts
Normal file
260
e2e/tests/voice/voice-full-journey.spec.ts
Normal file
@@ -0,0 +1,260 @@
|
||||
import { test, expect } from '../../fixtures/multi-client';
|
||||
import {
|
||||
installWebRTCTracking,
|
||||
waitForPeerConnected,
|
||||
isPeerStillConnected,
|
||||
getAudioStatsDelta,
|
||||
waitForAudioFlow,
|
||||
waitForAudioStatsPresent,
|
||||
dumpRtcDiagnostics
|
||||
} from '../../helpers/webrtc-helpers';
|
||||
import { RegisterPage } from '../../pages/register.page';
|
||||
import { ServerSearchPage } from '../../pages/server-search.page';
|
||||
import { ChatRoomPage } from '../../pages/chat-room.page';
|
||||
|
||||
/**
|
||||
* Full user journey: register → create server → join → voice → verify audio
|
||||
* for 10+ seconds of stable connectivity.
|
||||
*
|
||||
* Uses two independent browser contexts (Alice & Bob) to simulate real
|
||||
* multi-user WebRTC voice chat.
|
||||
*/
|
||||
|
||||
const ALICE = { username: `alice_${Date.now()}`, displayName: 'Alice', password: 'TestPass123!' };
|
||||
const BOB = { username: `bob_${Date.now()}`, displayName: 'Bob', password: 'TestPass123!' };
|
||||
const SERVER_NAME = `E2E Test Server ${Date.now()}`;
|
||||
const VOICE_CHANNEL = 'General';
|
||||
|
||||
test.describe('Full user journey: register → server → voice chat', () => {
|
||||
test('two users register, create server, join voice, and stay connected 10+ seconds with audio', async ({ createClient }) => {
|
||||
test.setTimeout(180_000); // 3 min - covers registration, server creation, voice establishment, and 10s stability check
|
||||
|
||||
const alice = await createClient();
|
||||
const bob = await createClient();
|
||||
|
||||
// Install WebRTC tracking before any navigation
|
||||
await installWebRTCTracking(alice.page);
|
||||
await installWebRTCTracking(bob.page);
|
||||
|
||||
// Forward browser console for debugging
|
||||
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
|
||||
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
|
||||
|
||||
// ── Step 1: Register both users ──────────────────────────────────
|
||||
|
||||
await test.step('Alice registers an account', async () => {
|
||||
const registerPage = new RegisterPage(alice.page);
|
||||
|
||||
await registerPage.goto();
|
||||
await expect(registerPage.submitButton).toBeVisible();
|
||||
await registerPage.register(ALICE.username, ALICE.displayName, ALICE.password);
|
||||
|
||||
// After registration, app should navigate to /search
|
||||
await expect(alice.page).toHaveURL(/\/search/, { timeout: 15_000 });
|
||||
});
|
||||
|
||||
await test.step('Bob registers an account', async () => {
|
||||
const registerPage = new RegisterPage(bob.page);
|
||||
|
||||
await registerPage.goto();
|
||||
await expect(registerPage.submitButton).toBeVisible();
|
||||
await registerPage.register(BOB.username, BOB.displayName, BOB.password);
|
||||
|
||||
await expect(bob.page).toHaveURL(/\/search/, { timeout: 15_000 });
|
||||
});
|
||||
|
||||
// ── Step 2: Alice creates a server ───────────────────────────────
|
||||
|
||||
await test.step('Alice creates a new server', async () => {
|
||||
const searchPage = new ServerSearchPage(alice.page);
|
||||
|
||||
await searchPage.createServer(SERVER_NAME, {
|
||||
description: 'E2E test server for voice testing'
|
||||
});
|
||||
|
||||
// After server creation, app navigates to the room
|
||||
await expect(alice.page).toHaveURL(/\/room\//, { timeout: 15_000 });
|
||||
});
|
||||
|
||||
// ── Step 3: Bob joins the server ─────────────────────────────────
|
||||
|
||||
await test.step('Bob finds and joins the server', async () => {
|
||||
const searchPage = new ServerSearchPage(bob.page);
|
||||
|
||||
// Search for the server
|
||||
await searchPage.searchInput.fill(SERVER_NAME);
|
||||
|
||||
// Wait for search results and click the server
|
||||
const serverCard = bob.page.locator('button', { hasText: SERVER_NAME }).first();
|
||||
|
||||
await expect(serverCard).toBeVisible({ timeout: 10_000 });
|
||||
await serverCard.click();
|
||||
|
||||
// Bob should be in the room now
|
||||
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
|
||||
});
|
||||
|
||||
// ── Step 4: Create a voice channel (if one doesn't exist) ────────
|
||||
|
||||
await test.step('Alice ensures a voice channel is available', async () => {
|
||||
const chatRoom = new ChatRoomPage(alice.page);
|
||||
const existingVoiceChannel = alice.page.locator('app-rooms-side-panel')
|
||||
.getByRole('button', { name: VOICE_CHANNEL, exact: true });
|
||||
const voiceChannelExists = await existingVoiceChannel.count() > 0;
|
||||
|
||||
if (!voiceChannelExists) {
|
||||
// Click "Create Voice Channel" plus button
|
||||
await chatRoom.openCreateVoiceChannelDialog();
|
||||
await chatRoom.createChannel(VOICE_CHANNEL);
|
||||
|
||||
// Wait for the channel to appear
|
||||
await expect(existingVoiceChannel).toBeVisible({ timeout: 10_000 });
|
||||
}
|
||||
});
|
||||
|
||||
// ── Step 5: Both users join the voice channel ────────────────────
|
||||
|
||||
await test.step('Alice joins the voice channel', async () => {
|
||||
const chatRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
await chatRoom.joinVoiceChannel(VOICE_CHANNEL);
|
||||
|
||||
// Voice controls should appear (indicates voice is connected)
|
||||
await expect(alice.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
|
||||
});
|
||||
|
||||
await test.step('Bob joins the voice channel', async () => {
|
||||
const chatRoom = new ChatRoomPage(bob.page);
|
||||
|
||||
await chatRoom.joinVoiceChannel(VOICE_CHANNEL);
|
||||
|
||||
await expect(bob.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
|
||||
});
|
||||
|
||||
// ── Step 6: Verify WebRTC connection establishes ─────────────────
|
||||
|
||||
await test.step('WebRTC peer connection reaches "connected" state', async () => {
|
||||
await waitForPeerConnected(alice.page, 30_000);
|
||||
await waitForPeerConnected(bob.page, 30_000);
|
||||
|
||||
// Wait for audio RTP pipeline to appear before measuring deltas -
|
||||
// renegotiation after initial connect can temporarily remove stats.
|
||||
await waitForAudioStatsPresent(alice.page, 20_000);
|
||||
await waitForAudioStatsPresent(bob.page, 20_000);
|
||||
});
|
||||
|
||||
// ── Step 7: Verify audio is flowing in both directions ───────────
|
||||
|
||||
await test.step('Audio packets are flowing between Alice and Bob', async () => {
|
||||
const aliceDelta = await waitForAudioFlow(alice.page, 30_000);
|
||||
const bobDelta = await waitForAudioFlow(bob.page, 30_000);
|
||||
|
||||
if (aliceDelta.outboundBytesDelta === 0 || aliceDelta.inboundBytesDelta === 0
|
||||
|| bobDelta.outboundBytesDelta === 0 || bobDelta.inboundBytesDelta === 0) {
|
||||
console.log('[Alice RTC Diagnostics]\n' + await dumpRtcDiagnostics(alice.page));
|
||||
console.log('[Bob RTC Diagnostics]\n' + await dumpRtcDiagnostics(bob.page));
|
||||
}
|
||||
|
||||
expectAudioFlow(aliceDelta, 'Alice');
|
||||
expectAudioFlow(bobDelta, 'Bob');
|
||||
});
|
||||
|
||||
// ── Step 8: Verify UI states are correct ─────────────────────────
|
||||
|
||||
await test.step('Voice UI shows correct state for both users', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
const bobRoom = new ChatRoomPage(bob.page);
|
||||
|
||||
// Both should see voice controls with "Connected" status
|
||||
await expect(alice.page.locator('app-voice-controls')).toBeVisible();
|
||||
await expect(bob.page.locator('app-voice-controls')).toBeVisible();
|
||||
|
||||
// Both should see the voice workspace or at least voice users listed
|
||||
// Check that both users appear in the voice channel user list
|
||||
const aliceSeesBob = aliceRoom.channelsSidePanel.getByText(BOB.displayName).first();
|
||||
const bobSeesAlice = bobRoom.channelsSidePanel.getByText(ALICE.displayName).first();
|
||||
|
||||
await expect(aliceSeesBob).toBeVisible({ timeout: 10_000 });
|
||||
await expect(bobSeesAlice).toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
|
||||
// ── Step 9: Stay connected for 10+ seconds, verify stability ─────
|
||||
|
||||
await test.step('Connection remains stable for 10+ seconds', async () => {
|
||||
// Check connectivity at 0s, 5s, and 10s intervals
|
||||
for (const checkpoint of [
|
||||
0,
|
||||
5_000,
|
||||
5_000
|
||||
]) {
|
||||
if (checkpoint > 0) {
|
||||
await alice.page.waitForTimeout(checkpoint);
|
||||
}
|
||||
|
||||
const aliceConnected = await isPeerStillConnected(alice.page);
|
||||
const bobConnected = await isPeerStillConnected(bob.page);
|
||||
|
||||
expect(aliceConnected, 'Alice should still be connected').toBe(true);
|
||||
expect(bobConnected, 'Bob should still be connected').toBe(true);
|
||||
}
|
||||
|
||||
// After 10s total, verify audio is still flowing
|
||||
const aliceDelta = await waitForAudioFlow(alice.page, 15_000);
|
||||
const bobDelta = await waitForAudioFlow(bob.page, 15_000);
|
||||
|
||||
expectAudioFlow(aliceDelta, 'Alice after 10s');
|
||||
expectAudioFlow(bobDelta, 'Bob after 10s');
|
||||
});
|
||||
|
||||
// ── Step 10: Verify mute/unmute works correctly ──────────────────
|
||||
|
||||
await test.step('Mute toggle works correctly', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
// Alice mutes - click the first button in voice controls (mute button)
|
||||
await aliceRoom.muteButton.click();
|
||||
|
||||
// After muting, Alice's outbound audio should stop increasing
|
||||
// When muted, bytesSent may still show small comfort noise or zero growth
|
||||
// The key assertion is that Bob's inbound for Alice's stream stops or reduces
|
||||
await getAudioStatsDelta(alice.page, 2_000);
|
||||
|
||||
// Alice unmutes
|
||||
await aliceRoom.muteButton.click();
|
||||
|
||||
// After unmuting, outbound should resume
|
||||
const unmutedDelta = await waitForAudioFlow(alice.page, 15_000);
|
||||
|
||||
expectAudioFlow(unmutedDelta, 'Alice after unmuting');
|
||||
});
|
||||
|
||||
// ── Step 11: Clean disconnect ────────────────────────────────────
|
||||
|
||||
await test.step('Alice disconnects from voice', async () => {
|
||||
const aliceRoom = new ChatRoomPage(alice.page);
|
||||
|
||||
// Click the disconnect/hang-up button
|
||||
await aliceRoom.disconnectButton.click();
|
||||
|
||||
// Connected controls should collapse for Alice after disconnect
|
||||
await expect(aliceRoom.disconnectButton).not.toBeVisible({ timeout: 10_000 });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
function expectAudioFlow(delta: {
|
||||
outboundBytesDelta: number;
|
||||
inboundBytesDelta: number;
|
||||
outboundPacketsDelta: number;
|
||||
inboundPacketsDelta: number;
|
||||
}, label: string): void {
|
||||
expect(
|
||||
delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0,
|
||||
`${label} should be sending audio`
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0,
|
||||
`${label} should be receiving audio`
|
||||
).toBe(true);
|
||||
}
|
||||
Reference in New Issue
Block a user