feat: Add TURN server support
All checks were successful
Queue Release Build / prepare (push) Successful in 15s
Deploy Web Apps / deploy (push) Successful in 5m35s
Queue Release Build / build-linux (push) Successful in 24m45s
Queue Release Build / build-windows (push) Successful in 13m52s
Queue Release Build / finalize (push) Successful in 23s

This commit is contained in:
2026-04-18 21:27:04 +02:00
parent 167c45ba8d
commit 44588e8789
60 changed files with 2404 additions and 365 deletions

36
e2e/README.md Normal file
View File

@@ -0,0 +1,36 @@
# End-to-End Tests
Playwright suite for the MetoYou / Toju product client. The tests exercise browser flows such as authentication, chat, voice, screen sharing, and settings with reusable page objects and helpers.
## Commands
Run these from the repository root:
- `npm run test:e2e` runs the full Playwright suite.
- `npm run test:e2e:ui` opens Playwright UI mode.
- `npm run test:e2e:debug` runs the suite in debug mode.
- `npm run test:e2e:report` opens the HTML report in `test-results/html-report`.
You can also run `npx playwright test` from `e2e/` directly.
## Runtime
- `playwright.config.ts` starts `cd ../toju-app && npx ng serve` as the test web server.
- The suite targets `http://localhost:4200`.
- Tests currently run with a single Chromium worker.
- The browser launches with fake media-device flags and grants microphone/camera permissions.
- Artifacts are written to `../test-results/artifacts`, and the HTML report is written to `../test-results/html-report`.
## Structure
| Path | Description |
| --- | --- |
| `tests/` | Test specs grouped by feature area such as `auth/`, `chat/`, `voice/`, `screen-share/`, and `settings/` |
| `pages/` | Reusable Playwright page objects |
| `helpers/` | Test helpers, fake-server utilities, and WebRTC helpers |
| `fixtures/` | Shared test fixtures |
## Notes
- The suite is product-client focused; it does not currently spin up the marketing website.
- Keep reusable browser flows in `pages/` and cross-test utilities in `helpers/`.

View File

@@ -26,7 +26,7 @@ interface SeededEndpointStorageState {
}
function buildSeededEndpointStorageState(
endpointsOrPort: ReadonlyArray<SeededEndpointInput> | number = Number(process.env.TEST_SERVER_PORT) || 3099
endpointsOrPort: readonly SeededEndpointInput[] | number = Number(process.env.TEST_SERVER_PORT) || 3099
): SeededEndpointStorageState {
const endpoints = Array.isArray(endpointsOrPort)
? endpointsOrPort.map((endpoint) => ({
@@ -81,7 +81,7 @@ export async function installTestServerEndpoint(
export async function installTestServerEndpoints(
context: BrowserContext,
endpoints: ReadonlyArray<SeededEndpointInput>
endpoints: readonly SeededEndpointInput[]
): Promise<void> {
const storageState = buildSeededEndpointStorageState(endpoints);
@@ -111,7 +111,7 @@ export async function seedTestServerEndpoint(
export async function seedTestServerEndpoints(
page: Page,
endpoints: ReadonlyArray<SeededEndpointInput>
endpoints: readonly SeededEndpointInput[]
): Promise<void> {
const storageState = buildSeededEndpointStorageState(endpoints);

View File

@@ -129,6 +129,48 @@ export async function installWebRTCTracking(page: Page): Promise<void> {
/**
* Wait until at least one RTCPeerConnection reaches the 'connected' state.
*/
/**
* Ensure every `AudioContext` created by the page auto-resumes so that
* the input-gain Web Audio pipeline (`source -> gain -> destination`) never
* stalls in the "suspended" state.
*
* On Linux with multiple headless Chromium instances, `new AudioContext()`
* can start suspended without a user-gesture gate, causing the media
* pipeline to emit only a single RTP packet.
*
* Call once per page, BEFORE navigating, alongside `installWebRTCTracking`.
*/
export async function installAutoResumeAudioContext(page: Page): Promise<void> {
await page.addInitScript(() => {
const OrigAudioContext = window.AudioContext;
(window as any).AudioContext = function(this: AudioContext, ...args: any[]) {
const ctx: AudioContext = new OrigAudioContext(...args);
// Track all created AudioContexts for test diagnostics
const tracked = ((window as any).__trackedAudioContexts ??= []) as AudioContext[];
tracked.push(ctx);
if (ctx.state === 'suspended') {
ctx.resume().catch(() => { /* noop */ });
}
// Also catch transitions to suspended after creation
ctx.addEventListener('statechange', () => {
if (ctx.state === 'suspended') {
ctx.resume().catch(() => { /* noop */ });
}
});
return ctx;
} as any;
(window as any).AudioContext.prototype = OrigAudioContext.prototype;
Object.setPrototypeOf((window as any).AudioContext, OrigAudioContext);
});
}
export async function waitForPeerConnected(page: Page, timeout = 30_000): Promise<void> {
await page.waitForFunction(
() => (window as any).__rtcConnections?.some(
@@ -172,7 +214,7 @@ export async function waitForConnectedPeerCount(page: Page, expectedCount: numbe
/**
* Resume all suspended AudioContext instances created by the synthetic
* media patch. Uses CDP `Runtime.evaluate` with `userGesture: true` so
* Chrome treats the call as a user-gesture this satisfies the autoplay
* Chrome treats the call as a user-gesture - this satisfies the autoplay
* policy that otherwise blocks `AudioContext.resume()`.
*/
export async function resumeSyntheticAudioContexts(page: Page): Promise<number> {

View File

@@ -22,7 +22,11 @@ export default defineConfig({
...devices['Desktop Chrome'],
permissions: ['microphone', 'camera'],
launchOptions: {
args: ['--use-fake-device-for-media-stream', '--use-fake-ui-for-media-stream', '--autoplay-policy=no-user-gesture-required']
args: [
'--use-fake-device-for-media-stream',
'--use-fake-ui-for-media-stream',
'--autoplay-policy=no-user-gesture-required'
]
}
}
}

View File

@@ -8,7 +8,8 @@ import {
waitForVideoFlow,
waitForOutboundVideoFlow,
waitForInboundVideoFlow,
dumpRtcDiagnostics
dumpRtcDiagnostics,
installAutoResumeAudioContext
} from '../../helpers/webrtc-helpers';
import { RegisterPage } from '../../pages/register.page';
import { ServerSearchPage } from '../../pages/server-search.page';
@@ -38,7 +39,7 @@ async function registerUser(page: import('@playwright/test').Page, user: typeof
await expect(page).toHaveURL(/\/search/, { timeout: 15_000 });
}
/** Both users register Alice creates server Bob joins. */
/** Both users register -> Alice creates server -> Bob joins. */
async function setupServerWithBothUsers(
alice: { page: import('@playwright/test').Page },
bob: { page: import('@playwright/test').Page }
@@ -80,19 +81,45 @@ async function joinVoiceTogether(
await expect(existingChannel).toBeVisible({ timeout: 10_000 });
}
await aliceRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(alice.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
const bobRoom = new ChatRoomPage(bob.page);
const doJoin = async () => {
await aliceRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(alice.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
await bobRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(bob.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
await bobRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(bob.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
// Wait for WebRTC + audio pipeline
await waitForPeerConnected(alice.page, 30_000);
await waitForPeerConnected(bob.page, 30_000);
await waitForAudioStatsPresent(alice.page, 20_000);
await waitForAudioStatsPresent(bob.page, 20_000);
// Wait for WebRTC + audio pipeline
await waitForPeerConnected(alice.page, 30_000);
await waitForPeerConnected(bob.page, 30_000);
await waitForAudioStatsPresent(alice.page, 20_000);
await waitForAudioStatsPresent(bob.page, 20_000);
};
await doJoin();
// Chromium's --use-fake-device-for-media-stream can produce a silent
// capture track on the very first getUserMedia call. If bidirectional
// audio doesn't flow within a short window, leave and rejoin voice to
// re-acquire the mic (second getUserMedia on a warm device works).
const aliceDelta = await waitForAudioFlow(alice.page, 10_000);
const bobDelta = await waitForAudioFlow(bob.page, 10_000);
const aliceFlowing =
(aliceDelta.outboundBytesDelta > 0 || aliceDelta.outboundPacketsDelta > 0) &&
(aliceDelta.inboundBytesDelta > 0 || aliceDelta.inboundPacketsDelta > 0);
const bobFlowing =
(bobDelta.outboundBytesDelta > 0 || bobDelta.outboundPacketsDelta > 0) &&
(bobDelta.inboundBytesDelta > 0 || bobDelta.inboundPacketsDelta > 0);
if (!aliceFlowing || !bobFlowing) {
// Leave voice
await aliceRoom.disconnectButton.click();
await bobRoom.disconnectButton.click();
await alice.page.waitForTimeout(2_000);
// Rejoin
await doJoin();
}
// Expand voice workspace on both clients so the demand-driven screen
// share request flow can fire (requires connectRemoteShares = true).
@@ -142,6 +169,20 @@ test.describe('Screen sharing', () => {
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
await installAutoResumeAudioContext(alice.page);
await installAutoResumeAudioContext(bob.page);
// Seed deterministic voice settings so noise reduction doesn't
// swallow the fake audio tone.
const voiceSettings = JSON.stringify({
inputVolume: 100, outputVolume: 100, audioBitrate: 96,
latencyProfile: 'balanced', includeSystemAudio: false,
noiseReduction: false, screenShareQuality: 'balanced',
askScreenShareQuality: false
});
await alice.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
await bob.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
@@ -251,6 +292,18 @@ test.describe('Screen sharing', () => {
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
await installAutoResumeAudioContext(alice.page);
await installAutoResumeAudioContext(bob.page);
const voiceSettings = JSON.stringify({
inputVolume: 100, outputVolume: 100, audioBitrate: 96,
latencyProfile: 'balanced', includeSystemAudio: false,
noiseReduction: false, screenShareQuality: 'balanced',
askScreenShareQuality: false
});
await alice.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
await bob.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
@@ -323,6 +376,18 @@ test.describe('Screen sharing', () => {
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
await installAutoResumeAudioContext(alice.page);
await installAutoResumeAudioContext(bob.page);
const voiceSettings = JSON.stringify({
inputVolume: 100, outputVolume: 100, audioBitrate: 96,
latencyProfile: 'balanced', includeSystemAudio: false,
noiseReduction: false, screenShareQuality: 'balanced',
askScreenShareQuality: false
});
await alice.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
await bob.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
bob.page.on('console', msg => console.log('[Bob]', msg.text()));

View File

@@ -0,0 +1,227 @@
import { test, expect } from '../../fixtures/multi-client';
import { RegisterPage } from '../../pages/register.page';
import { ServerSearchPage } from '../../pages/server-search.page';
import { ChatRoomPage } from '../../pages/chat-room.page';
import {
installAutoResumeAudioContext,
installWebRTCTracking,
waitForConnectedPeerCount
} from '../../helpers/webrtc-helpers';
const VOICE_SETTINGS = JSON.stringify({
inputVolume: 100,
outputVolume: 100,
audioBitrate: 96,
latencyProfile: 'balanced',
includeSystemAudio: false,
noiseReduction: false,
screenShareQuality: 'balanced',
askScreenShareQuality: false
});
/**
* Seed deterministic voice settings on a page so noise reduction and
* input gating don't interfere with the fake audio tone.
*/
async function seedVoiceSettings(page: import('@playwright/test').Page): Promise<void> {
await page.addInitScript((settings: string) => {
localStorage.setItem('metoyou_voice_settings', settings);
}, VOICE_SETTINGS);
}
/**
* Close all of a client's RTCPeerConnections and prevent any
* reconnection by sabotaging the SDP negotiation methods on the
* prototype - new connections get created but can never complete ICE.
*
* Chromium doesn't fire `connectionstatechange` on programmatic
* `close()`, so we dispatch the event manually so the app's recovery
* code runs and updates the connected-peers signal.
*/
async function killAndBlockPeerConnections(page: import('@playwright/test').Page): Promise<void> {
await page.evaluate(() => {
// Sabotage SDP methods so no NEW connections can negotiate.
const proto = RTCPeerConnection.prototype;
proto.createOffer = () => Promise.reject(new DOMException('blocked', 'NotAllowedError'));
proto.createAnswer = () => Promise.reject(new DOMException('blocked', 'NotAllowedError'));
proto.setLocalDescription = () => Promise.reject(new DOMException('blocked', 'NotAllowedError'));
proto.setRemoteDescription = () => Promise.reject(new DOMException('blocked', 'NotAllowedError'));
// Close every existing connection and manually fire the event
// Chromium omits when close() is called from JS.
const connections = (window as { __rtcConnections?: RTCPeerConnection[] }).__rtcConnections ?? [];
for (const pc of connections) {
try {
pc.close();
pc.dispatchEvent(new Event('connectionstatechange'));
} catch { /* already closed */ }
}
});
}
test.describe('Connectivity warning', () => {
test.describe.configure({ timeout: 180_000 });
test('shows warning icon when a peer loses all connections', async ({ createClient }) => {
const suffix = `connwarn_${Date.now()}`;
const serverName = `ConnWarn ${suffix}`;
const alice = await createClient();
const bob = await createClient();
const charlie = await createClient();
// ── Install WebRTC tracking & AudioContext auto-resume ──
for (const client of [
alice,
bob,
charlie
]) {
await installWebRTCTracking(client.page);
await installAutoResumeAudioContext(client.page);
await seedVoiceSettings(client.page);
}
// ── Register all three users ──
await test.step('Register Alice', async () => {
const register = new RegisterPage(alice.page);
await register.goto();
await register.register(`alice_${suffix}`, 'Alice', 'TestPass123!');
await expect(alice.page.getByPlaceholder('Search servers...')).toBeVisible({ timeout: 30_000 });
});
await test.step('Register Bob', async () => {
const register = new RegisterPage(bob.page);
await register.goto();
await register.register(`bob_${suffix}`, 'Bob', 'TestPass123!');
await expect(bob.page.getByPlaceholder('Search servers...')).toBeVisible({ timeout: 30_000 });
});
await test.step('Register Charlie', async () => {
const register = new RegisterPage(charlie.page);
await register.goto();
await register.register(`charlie_${suffix}`, 'Charlie', 'TestPass123!');
await expect(charlie.page.getByPlaceholder('Search servers...')).toBeVisible({ timeout: 30_000 });
});
// ── Create server and have everyone join ──
await test.step('Alice creates a server', async () => {
const search = new ServerSearchPage(alice.page);
await search.createServer(serverName);
});
await test.step('Bob joins the server', async () => {
const search = new ServerSearchPage(bob.page);
await search.searchInput.fill(serverName);
const card = bob.page.locator('button', { hasText: serverName }).first();
await expect(card).toBeVisible({ timeout: 15_000 });
await card.click();
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
});
await test.step('Charlie joins the server', async () => {
const search = new ServerSearchPage(charlie.page);
await search.searchInput.fill(serverName);
const card = charlie.page.locator('button', { hasText: serverName }).first();
await expect(card).toBeVisible({ timeout: 15_000 });
await card.click();
await expect(charlie.page).toHaveURL(/\/room\//, { timeout: 15_000 });
});
const aliceRoom = new ChatRoomPage(alice.page);
const bobRoom = new ChatRoomPage(bob.page);
const charlieRoom = new ChatRoomPage(charlie.page);
// ── Everyone joins voice ──
await test.step('All three join voice', async () => {
await aliceRoom.joinVoiceChannel('General');
await bobRoom.joinVoiceChannel('General');
await charlieRoom.joinVoiceChannel('General');
});
await test.step('All users see each other in voice', async () => {
// Each user should see the other two in the voice channel list.
await expect(
aliceRoom.channelsSidePanel.getByText('Bob')
).toBeVisible({ timeout: 20_000 });
await expect(
aliceRoom.channelsSidePanel.getByText('Charlie')
).toBeVisible({ timeout: 20_000 });
await expect(
bobRoom.channelsSidePanel.getByText('Alice')
).toBeVisible({ timeout: 20_000 });
await expect(
bobRoom.channelsSidePanel.getByText('Charlie')
).toBeVisible({ timeout: 20_000 });
await expect(
charlieRoom.channelsSidePanel.getByText('Alice')
).toBeVisible({ timeout: 20_000 });
await expect(
charlieRoom.channelsSidePanel.getByText('Bob')
).toBeVisible({ timeout: 20_000 });
});
// ── Wait for full mesh to establish ──
await test.step('All peer connections establish', async () => {
// Each client should have 2 connected peers (full mesh of 3).
await waitForConnectedPeerCount(alice.page, 2, 30_000);
await waitForConnectedPeerCount(bob.page, 2, 30_000);
await waitForConnectedPeerCount(charlie.page, 2, 30_000);
});
// ── Break Charlie's connections ──
await test.step('Kill Charlie peer connections and block reconnection', async () => {
await killAndBlockPeerConnections(charlie.page);
// Give the health service time to detect the desync.
// Peer latency pings stop -> connectedPeers updates -> desyncPeerIds recalculates.
await alice.page.waitForTimeout(15_000);
});
// ── Assert connectivity warnings ──
//
// The warning icon (lucideAlertTriangle) is a direct sibling of the
// user-name span inside the same voice-row div. Using the CSS
// general-sibling combinator (~) avoids accidentally matching a
// parent container that holds multiple rows.
await test.step('Alice sees warning icon next to Charlie', async () => {
const charlieWarning = aliceRoom.channelsSidePanel
.locator('span.truncate:has-text("Charlie") ~ ng-icon[name="lucideAlertTriangle"]');
await expect(charlieWarning).toBeVisible({ timeout: 30_000 });
});
await test.step('Bob sees warning icon next to Charlie', async () => {
const charlieWarning = bobRoom.channelsSidePanel
.locator('span.truncate:has-text("Charlie") ~ ng-icon[name="lucideAlertTriangle"]');
await expect(charlieWarning).toBeVisible({ timeout: 30_000 });
});
await test.step('Alice does NOT see warning icon next to Bob', async () => {
const bobWarning = aliceRoom.channelsSidePanel
.locator('span.truncate:has-text("Bob") ~ ng-icon[name="lucideAlertTriangle"]');
await expect(bobWarning).not.toBeVisible();
});
await test.step('Charlie sees local desync banner', async () => {
const desyncBanner = charlie.page.locator('text=You may have connectivity issues');
await expect(desyncBanner).toBeVisible({ timeout: 30_000 });
});
});
});

View File

@@ -0,0 +1,126 @@
import { test, expect } from '../../fixtures/multi-client';
import { RegisterPage } from '../../pages/register.page';
test.describe('ICE server settings', () => {
test.describe.configure({ timeout: 120_000 });
async function registerAndOpenNetworkSettings(page: import('@playwright/test').Page, suffix: string) {
const register = new RegisterPage(page);
await register.goto();
await register.register(`user_${suffix}`, 'IceTestUser', 'TestPass123!');
await expect(page.getByPlaceholder('Search servers...')).toBeVisible({ timeout: 30_000 });
await page.getByTitle('Settings').click();
await expect(page.getByRole('dialog')).toBeVisible({ timeout: 10_000 });
await page.getByRole('button', { name: 'Network' }).click();
}
test('allows adding, removing, and reordering ICE servers', async ({ createClient }) => {
const client = await createClient();
const { page } = client;
const suffix = `ice_${Date.now()}`;
await test.step('Register and open Network settings', async () => {
await registerAndOpenNetworkSettings(page, suffix);
});
const iceSection = page.getByTestId('ice-server-settings');
await test.step('Default STUN servers are listed', async () => {
await expect(iceSection).toBeVisible({ timeout: 5_000 });
const entries = page.getByTestId('ice-server-list').locator('[data-testid^="ice-entry-"]');
await expect(entries.first()).toBeVisible({ timeout: 5_000 });
const count = await entries.count();
expect(count).toBeGreaterThanOrEqual(1);
});
await test.step('Add a STUN server', async () => {
await page.getByTestId('ice-type-select').selectOption('stun');
await page.getByTestId('ice-url-input').fill('stun:custom.example.com:3478');
await page.getByTestId('ice-add-button').click();
await expect(page.getByText('stun:custom.example.com:3478')).toBeVisible({ timeout: 5_000 });
});
await test.step('Add a TURN server with credentials', async () => {
await page.getByTestId('ice-type-select').selectOption('turn');
await page.getByTestId('ice-url-input').fill('turn:relay.example.com:443');
await page.getByTestId('ice-username-input').fill('testuser');
await page.getByTestId('ice-credential-input').fill('testpass');
await page.getByTestId('ice-add-button').click();
await expect(page.getByText('turn:relay.example.com:443')).toBeVisible({ timeout: 5_000 });
await expect(page.getByText('User: testuser')).toBeVisible({ timeout: 5_000 });
});
await test.step('Remove first entry and verify count decreases', async () => {
const entries = page.getByTestId('ice-server-list').locator('[data-testid^="ice-entry-"]');
const countBefore = await entries.count();
await entries.first().getByTitle('Remove')
.click();
await expect(entries).toHaveCount(countBefore - 1, { timeout: 5_000 });
});
await test.step('Reorder: move second entry up', async () => {
const entries = page.getByTestId('ice-server-list').locator('[data-testid^="ice-entry-"]');
const count = await entries.count();
if (count >= 2) {
const secondText = await entries.nth(1).locator('p')
.first()
.textContent();
if (!secondText) {
throw new Error('Expected ICE server entry text before reordering');
}
await entries.nth(1).getByTitle('Move up (higher priority)')
.click();
// Wait for the moved entry text to appear at position 0
await expect(entries.first().locator('p')
.first()).toHaveText(secondText, { timeout: 5_000 });
}
});
await test.step('Restore defaults resets list', async () => {
await page.getByTestId('ice-restore-defaults').click();
await expect(page.getByText('turn:relay.example.com:443')).not.toBeVisible({ timeout: 3_000 });
const entries = page.getByTestId('ice-server-list').locator('[data-testid^="ice-entry-"]');
await expect(entries.first()).toBeVisible({ timeout: 5_000 });
});
await test.step('Settings persist after page reload', async () => {
await page.getByTestId('ice-type-select').selectOption('stun');
await page.getByTestId('ice-url-input').fill('stun:persist-test.example.com:3478');
await page.getByTestId('ice-add-button').click();
await expect(page.getByText('stun:persist-test.example.com:3478')).toBeVisible({ timeout: 5_000 });
await page.reload({ waitUntil: 'domcontentloaded' });
await page.getByTitle('Settings').click();
await expect(page.getByRole('dialog')).toBeVisible({ timeout: 10_000 });
await page.getByRole('button', { name: 'Network' }).click();
await expect(page.getByText('stun:persist-test.example.com:3478')).toBeVisible({ timeout: 10_000 });
});
});
test('validates TURN entries require credentials', async ({ createClient }) => {
const client = await createClient();
const { page } = client;
const suffix = `iceval_${Date.now()}`;
await test.step('Register and open Network settings', async () => {
await registerAndOpenNetworkSettings(page, suffix);
});
await test.step('Adding TURN without credentials shows error', async () => {
await page.getByTestId('ice-type-select').selectOption('turn');
await page.getByTestId('ice-url-input').fill('turn:noncred.example.com:443');
await page.getByTestId('ice-add-button').click();
await expect(page.getByText('Username is required for TURN servers')).toBeVisible({ timeout: 5_000 });
});
});
});

View File

@@ -0,0 +1,216 @@
import { test, expect } from '../../fixtures/multi-client';
import { RegisterPage } from '../../pages/register.page';
import { ServerSearchPage } from '../../pages/server-search.page';
import { ChatRoomPage } from '../../pages/chat-room.page';
import {
dumpRtcDiagnostics,
installAutoResumeAudioContext,
installWebRTCTracking,
waitForAllPeerAudioFlow,
waitForPeerConnected,
waitForConnectedPeerCount,
waitForAudioStatsPresent
} from '../../helpers/webrtc-helpers';
const ICE_STORAGE_KEY = 'metoyou_ice_servers';
interface StoredIceServerEntry {
type?: string;
urls?: string;
}
/**
* Tests that user-configured ICE servers are persisted and used by peer connections.
*
* On localhost TURN relay is never needed (direct always succeeds), so this test:
* 1. Seeds Bob's browser with an additional TURN entry via localStorage.
* 2. Has both users join voice with differing ICE configs.
* 3. Verifies both can connect and Bob's TURN entry is still in storage.
*/
test.describe('STUN/TURN fallback behaviour', () => {
test.describe.configure({ timeout: 180_000 });
test('users with different ICE configs can voice chat together', async ({ createClient }) => {
const suffix = `turnfb_${Date.now()}`;
const serverName = `Fallback ${suffix}`;
const alice = await createClient();
const bob = await createClient();
// Install WebRTC tracking before any navigation so we can inspect
// peer connections and audio stats.
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
// Ensure AudioContexts auto-resume so the input-gain pipeline
// (source -> gain -> destination) never stalls in "suspended" state.
await installAutoResumeAudioContext(alice.page);
await installAutoResumeAudioContext(bob.page);
// Set deterministic voice settings so noise reduction and input gating
// don't swallow the fake audio tone.
const voiceSettings = JSON.stringify({
inputVolume: 100,
outputVolume: 100,
audioBitrate: 96,
latencyProfile: 'balanced',
includeSystemAudio: false,
noiseReduction: false,
screenShareQuality: 'balanced',
askScreenShareQuality: false
});
await alice.page.addInitScript((settings: string) => {
localStorage.setItem('metoyou_voice_settings', settings);
}, voiceSettings);
await bob.page.addInitScript((settings: string) => {
localStorage.setItem('metoyou_voice_settings', settings);
}, voiceSettings);
// Seed Bob with an extra TURN entry before the app reads localStorage.
await bob.context.addInitScript((key: string) => {
try {
const existing = JSON.parse(localStorage.getItem(key) || '[]');
existing.push({
id: 'e2e-turn',
type: 'turn',
urls: 'turn:localhost:3478',
username: 'e2euser',
credential: 'e2epass'
});
localStorage.setItem(key, JSON.stringify(existing));
} catch { /* noop */ }
}, ICE_STORAGE_KEY);
await test.step('Register Alice', async () => {
const register = new RegisterPage(alice.page);
await register.goto();
await register.register(`alice_${suffix}`, 'Alice', 'TestPass123!');
await expect(alice.page.getByPlaceholder('Search servers...')).toBeVisible({ timeout: 30_000 });
});
await test.step('Register Bob', async () => {
const register = new RegisterPage(bob.page);
await register.goto();
await register.register(`bob_${suffix}`, 'Bob', 'TestPass123!');
await expect(bob.page.getByPlaceholder('Search servers...')).toBeVisible({ timeout: 30_000 });
});
await test.step('Alice creates a server', async () => {
const search = new ServerSearchPage(alice.page);
await search.createServer(serverName);
});
await test.step('Bob joins Alice server', async () => {
const search = new ServerSearchPage(bob.page);
await search.searchInput.fill(serverName);
const serverCard = bob.page.locator('button', { hasText: serverName }).first();
await expect(serverCard).toBeVisible({ timeout: 15_000 });
await serverCard.click();
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
});
const aliceRoom = new ChatRoomPage(alice.page);
const bobRoom = new ChatRoomPage(bob.page);
await test.step('Both join voice', async () => {
await aliceRoom.joinVoiceChannel('General');
await bobRoom.joinVoiceChannel('General');
});
await test.step('Both users see each other in voice', async () => {
await expect(
aliceRoom.channelsSidePanel.getByText('Bob')
).toBeVisible({ timeout: 20_000 });
await expect(
bobRoom.channelsSidePanel.getByText('Alice')
).toBeVisible({ timeout: 20_000 });
});
await test.step('Peer connections establish and audio flows bidirectionally', async () => {
await waitForPeerConnected(alice.page, 30_000);
await waitForPeerConnected(bob.page, 30_000);
await waitForConnectedPeerCount(alice.page, 1, 30_000);
await waitForConnectedPeerCount(bob.page, 1, 30_000);
// Wait for audio RTP stats to appear (tracks negotiated)
await waitForAudioStatsPresent(alice.page, 30_000);
await waitForAudioStatsPresent(bob.page, 30_000);
// Allow mesh to settle - voice routing and renegotiation can
// cause a second offer/answer cycle after the initial connection.
await alice.page.waitForTimeout(5_000);
// Chromium's --use-fake-device-for-media-stream can produce a
// silent capture track on the very first getUserMedia call. If
// bidirectional audio does not flow within a short window, leave
// and rejoin voice to re-acquire the mic (the second getUserMedia
// on a warm device always works).
let audioFlowing = false;
try {
await Promise.all([waitForAllPeerAudioFlow(alice.page, 1, 15_000), waitForAllPeerAudioFlow(bob.page, 1, 15_000)]);
audioFlowing = true;
} catch {
// Silent sender detected - rejoin voice to work around Chromium bug
}
if (!audioFlowing) {
// Leave voice
await aliceRoom.disconnectButton.click();
await bobRoom.disconnectButton.click();
await alice.page.waitForTimeout(2_000);
// Rejoin
await aliceRoom.joinVoiceChannel('General');
await bobRoom.joinVoiceChannel('General');
await expect(
aliceRoom.channelsSidePanel.getByText('Bob')
).toBeVisible({ timeout: 20_000 });
await expect(
bobRoom.channelsSidePanel.getByText('Alice')
).toBeVisible({ timeout: 20_000 });
await waitForPeerConnected(alice.page, 30_000);
await waitForPeerConnected(bob.page, 30_000);
await waitForConnectedPeerCount(alice.page, 1, 30_000);
await waitForConnectedPeerCount(bob.page, 1, 30_000);
await waitForAudioStatsPresent(alice.page, 30_000);
await waitForAudioStatsPresent(bob.page, 30_000);
await alice.page.waitForTimeout(3_000);
}
// Final assertion - must succeed after the (optional) rejoin.
try {
await Promise.all([waitForAllPeerAudioFlow(alice.page, 1, 60_000), waitForAllPeerAudioFlow(bob.page, 1, 60_000)]);
} catch (error) {
console.log('[Alice RTC Diagnostics]\n' + await dumpRtcDiagnostics(alice.page));
console.log('[Bob RTC Diagnostics]\n' + await dumpRtcDiagnostics(bob.page));
throw error;
}
});
await test.step('Bob still has TURN entry in localStorage', async () => {
const stored: StoredIceServerEntry[] = await bob.page.evaluate(
(key) => JSON.parse(localStorage.getItem(key) || '[]') as StoredIceServerEntry[],
ICE_STORAGE_KEY
);
const hasTurn = stored.some(
(entry) => entry.type === 'turn' && entry.urls === 'turn:localhost:3478'
);
expect(hasTurn).toBe(true);
});
});
});

View File

@@ -1,9 +1,6 @@
import { expect, type Page } from '@playwright/test';
import { test, type Client } from '../../fixtures/multi-client';
import {
installTestServerEndpoints,
type SeededEndpointInput
} from '../../helpers/seed-test-endpoint';
import { installTestServerEndpoints, type SeededEndpointInput } from '../../helpers/seed-test-endpoint';
import { startTestServer } from '../../helpers/test-server';
import {
dumpRtcDiagnostics,
@@ -22,12 +19,10 @@ import { ChatMessagesPage } from '../../pages/chat-messages.page';
// ── Signal endpoint identifiers ──────────────────────────────────────
const PRIMARY_SIGNAL_ID = 'e2e-mixed-signal-a';
const SECONDARY_SIGNAL_ID = 'e2e-mixed-signal-b';
// ── Room / channel names ─────────────────────────────────────────────
const VOICE_ROOM_NAME = `Mixed Signal Voice ${Date.now()}`;
const SECONDARY_ROOM_NAME = `Mixed Signal Chat ${Date.now()}`;
const VOICE_CHANNEL = 'General';
// ── User constants ───────────────────────────────────────────────────
const USER_PASSWORD = 'TestPass123!';
const USER_COUNT = 8;
@@ -37,7 +32,7 @@ const STABILITY_WINDOW_MS = 20_000;
// ── User signal configuration groups ─────────────────────────────────
//
// Group A (users 0-1): Both signal servers in network config (normal)
// Group B (users 2-3): Only primary signal secondary NOT in config.
// Group B (users 2-3): Only primary signal - secondary NOT in config.
// They join the secondary room via invite link,
// which auto-adds the endpoint.
// Group C (users 4-5): Both signals initially, but secondary is removed
@@ -66,23 +61,43 @@ function endpointsForGroup(
switch (group) {
case 'both':
return [
{ id: PRIMARY_SIGNAL_ID, name: 'E2E Signal A', url: primaryUrl, isActive: true, status: 'online' },
{ id: SECONDARY_SIGNAL_ID, name: 'E2E Signal B', url: secondaryUrl, isActive: true, status: 'online' }
{
id: PRIMARY_SIGNAL_ID,
name: 'E2E Signal A',
url: primaryUrl,
isActive: true,
status: 'online'
},
{
id: SECONDARY_SIGNAL_ID,
name: 'E2E Signal B',
url: secondaryUrl,
isActive: true,
status: 'online'
}
];
case 'primary-only':
return [
{ id: PRIMARY_SIGNAL_ID, name: 'E2E Signal A', url: primaryUrl, isActive: true, status: 'online' }
];
return [{ id: PRIMARY_SIGNAL_ID, name: 'E2E Signal A', url: primaryUrl, isActive: true, status: 'online' }];
case 'both-then-remove-secondary':
// Seed both initially; test will remove secondary after registration.
return [
{ id: PRIMARY_SIGNAL_ID, name: 'E2E Signal A', url: primaryUrl, isActive: true, status: 'online' },
{ id: SECONDARY_SIGNAL_ID, name: 'E2E Signal B', url: secondaryUrl, isActive: true, status: 'online' }
{
id: PRIMARY_SIGNAL_ID,
name: 'E2E Signal A',
url: primaryUrl,
isActive: true,
status: 'online'
},
{
id: SECONDARY_SIGNAL_ID,
name: 'E2E Signal B',
url: secondaryUrl,
isActive: true,
status: 'online'
}
];
case 'secondary-only':
return [
{ id: SECONDARY_SIGNAL_ID, name: 'E2E Signal B', url: secondaryUrl, isActive: true, status: 'online' }
];
return [{ id: SECONDARY_SIGNAL_ID, name: 'E2E Signal B', url: secondaryUrl, isActive: true, status: 'online' }];
}
}
@@ -96,11 +111,6 @@ test.describe('Mixed signal-config voice', () => {
const secondaryServer = await startTestServer();
try {
const allEndpoints: SeededEndpointInput[] = [
{ id: PRIMARY_SIGNAL_ID, name: 'E2E Signal A', url: testServer.url, isActive: true, status: 'online' },
{ id: SECONDARY_SIGNAL_ID, name: 'E2E Signal B', url: secondaryServer.url, isActive: true, status: 'online' }
];
const users = buildUsers();
const clients: TestClient[] = [];
@@ -139,12 +149,14 @@ test.describe('Mixed signal-config voice', () => {
description: 'Voice room on primary signal',
sourceId: PRIMARY_SIGNAL_ID
});
await expect(clients[0].page).toHaveURL(/\/room\//, { timeout: 20_000 });
await searchPage.createServer(SECONDARY_ROOM_NAME, {
description: 'Chat room on secondary signal',
sourceId: SECONDARY_SIGNAL_ID
});
await expect(clients[0].page).toHaveURL(/\/room\//, { timeout: 20_000 });
});
@@ -164,7 +176,6 @@ test.describe('Mixed signal-config voice', () => {
// Navigate to secondary room to get its ID
await openSavedRoomByName(clients[0].page, SECONDARY_ROOM_NAME);
const secondaryRoomId = await getCurrentRoomId(clients[0].page);
// Create invite for primary room (voice) via API
const primaryInvite = await createInviteViaApi(
testServer.url,
@@ -172,6 +183,7 @@ test.describe('Mixed signal-config voice', () => {
userId,
clients[0].user.displayName
);
primaryRoomInviteUrl = `/invite/${primaryInvite.id}?server=${encodeURIComponent(testServer.url)}`;
// Create invite for secondary room (chat) via API
@@ -181,12 +193,13 @@ test.describe('Mixed signal-config voice', () => {
userId,
clients[0].user.displayName
);
secondaryRoomInviteUrl = `/invite/${secondaryInvite.id}?server=${encodeURIComponent(secondaryServer.url)}`;
});
// ── Remove secondary endpoint for group C ───────────────────
await test.step('Remove secondary signal from group C users', async () => {
for (const client of clients.filter((c) => c.user.group === 'both-then-remove-secondary')) {
for (const client of clients.filter((clientItem) => clientItem.user.group === 'both-then-remove-secondary')) {
await client.page.evaluate((primaryEndpoint) => {
localStorage.setItem('metoyou_server_endpoints', JSON.stringify([primaryEndpoint]));
}, { id: PRIMARY_SIGNAL_ID, name: 'E2E Signal A', url: testServer.url, isActive: true, isDefault: false, status: 'online' });
@@ -197,11 +210,11 @@ test.describe('Mixed signal-config voice', () => {
await test.step('All users join the voice room (some via search, some via invite)', async () => {
for (const client of clients.slice(1)) {
if (client.user.group === 'secondary-only') {
// Group D: no primary signal join voice room via invite
// Group D: no primary signal -> join voice room via invite
await client.page.goto(primaryRoomInviteUrl);
await waitForInviteJoin(client.page);
} else {
// Groups A, B, C: have primary signal join via search
// Groups A, B, C: have primary signal -> join via search
await joinRoomFromSearch(client.page, VOICE_ROOM_NAME);
}
}
@@ -213,11 +226,11 @@ test.describe('Mixed signal-config voice', () => {
await test.step('All users also join the secondary chat room', async () => {
for (const client of clients.slice(1)) {
if (client.user.group === 'primary-only') {
// Group B: no secondary signal join chat room via invite
// Group B: no secondary signal -> join chat room via invite
await client.page.goto(secondaryRoomInviteUrl);
await waitForInviteJoin(client.page);
} else if (client.user.group === 'secondary-only') {
// Group D: has secondary join via search
// Group D: has secondary -> join via search
await openSearchView(client.page);
await joinRoomFromSearch(client.page, SECONDARY_ROOM_NAME);
} else {
@@ -285,7 +298,7 @@ test.describe('Mixed signal-config voice', () => {
await test.step('Voice stays stable 20s while some users navigate and chat on other servers', async () => {
// Pick 2 users from different groups to navigate away and chat
const chatters = [clients[2], clients[6]]; // group C + group D
const stayers = clients.filter((c) => !chatters.includes(c));
const stayers = clients.filter((clientItem) => !chatters.includes(clientItem));
// Chatters navigate to secondary room and send messages
for (const chatter of chatters) {
@@ -303,11 +316,12 @@ test.describe('Mixed signal-config voice', () => {
await expect(
chatPage0.getMessageItemByText(`Reply from ${chatters[1].user.displayName}`)
).toBeVisible({ timeout: 15_000 });
await expect(
chatPage1.getMessageItemByText(`Hello from ${chatters[0].user.displayName}`)
).toBeVisible({ timeout: 15_000 });
// Meanwhile stability loop on all clients (including chatters voice still active)
// Meanwhile stability loop on all clients (including chatters - voice still active)
const deadline = Date.now() + STABILITY_WINDOW_MS;
while (Date.now() < deadline) {
@@ -391,7 +405,7 @@ test.describe('Mixed signal-config voice', () => {
await room.deafenButton.click();
await client.page.waitForTimeout(500);
// Un-deafen does NOT restore mute user stays muted
// Un-deafen does NOT restore mute - user stays muted
await waitForVoiceStateAcrossPages(clients, client.user.displayName, {
isMuted: true,
isDeafened: false
@@ -429,10 +443,14 @@ test.describe('Mixed signal-config voice', () => {
function buildUsers(): TestUser[] {
const groups: SignalGroup[] = [
'both', 'both', // 0-1
'primary-only', 'primary-only', // 2-3
'both-then-remove-secondary', 'both-then-remove-secondary', // 4-5
'secondary-only', 'secondary-only' // 6-7
'both',
'both', // 0-1
'primary-only',
'primary-only', // 2-3
'both-then-remove-secondary',
'both-then-remove-secondary', // 4-5
'secondary-only',
'secondary-only' // 6-7
];
return groups.map((group, index) => ({
@@ -574,7 +592,7 @@ async function openSavedRoomByName(page: Page, roomName: string): Promise<void>
}
async function waitForInviteJoin(page: Page): Promise<void> {
// Invite page loads auto-joins redirects to room
// Invite page loads -> auto-joins -> redirects to room
await expect(page).toHaveURL(/\/room\//, { timeout: 30_000 });
await expect(page.locator('app-rooms-side-panel').first()).toBeVisible({ timeout: 20_000 });
}
@@ -605,11 +623,13 @@ async function waitForCurrentRoomName(page: Page, roomName: string, timeout = 20
}
async function openVoiceWorkspace(page: Page): Promise<void> {
if (await page.locator('app-voice-workspace').isVisible().catch(() => false)) {
if (await page.locator('app-voice-workspace').isVisible()
.catch(() => false)) {
return;
}
const viewButton = page.locator('app-rooms-side-panel').getByRole('button', { name: /view|open/i }).first();
const viewButton = page.locator('app-rooms-side-panel').getByRole('button', { name: /view|open/i })
.first();
await expect(viewButton).toBeVisible({ timeout: 10_000 });
await viewButton.click();
@@ -619,6 +639,7 @@ async function openVoiceWorkspace(page: Page): Promise<void> {
async function joinVoiceChannelUntilConnected(page: Page, channelName: string, attempts = 3): Promise<void> {
const room = new ChatRoomPage(page);
let lastError: unknown;
for (let attempt = 1; attempt <= attempts; attempt++) {
@@ -634,10 +655,11 @@ async function joinVoiceChannelUntilConnected(page: Page, channelName: string, a
}
}
throw new Error([
`Failed to connect ${page.url()} to voice channel ${channelName}.`,
lastError instanceof Error ? `Last error: ${lastError.message}` : 'Last error: unavailable'
].join('\n'));
const lastErrorMessage = lastError instanceof Error
? `Last error: ${lastError.message}`
: 'Last error: unavailable';
throw new Error(`Failed to connect ${page.url()} to voice channel ${channelName}.\n${lastErrorMessage}`);
}
async function waitForLocalVoiceChannelConnection(page: Page, channelName: string, timeout = 20_000): Promise<void> {
@@ -691,7 +713,7 @@ async function waitForVoiceWorkspaceUserCount(page: Page, expectedCount: number)
}
const component = debugApi.getComponent(host);
const connectedUsers = (component['connectedVoiceUsers'] as (() => Array<unknown>) | undefined)?.() ?? [];
const connectedUsers = (component['connectedVoiceUsers'] as (() => unknown[]) | undefined)?.() ?? [];
return connectedUsers.length === count;
},
@@ -724,7 +746,7 @@ async function waitForVoiceRosterCount(page: Page, channelName: string, expected
return false;
}
const roster = (component['voiceUsersInRoom'] as ((roomId: string) => Array<unknown>) | undefined)?.(channelId) ?? [];
const roster = (component['voiceUsersInRoom'] as ((roomId: string) => unknown[]) | undefined)?.(channelId) ?? [];
return roster.length === expected;
},
@@ -734,7 +756,7 @@ async function waitForVoiceRosterCount(page: Page, channelName: string, expected
}
async function waitForVoiceStateAcrossPages(
clients: ReadonlyArray<TestClient>,
clients: readonly TestClient[],
displayName: string,
expectedState: { isMuted: boolean; isDeafened: boolean }
): Promise<void> {
@@ -765,7 +787,7 @@ async function waitForVoiceStateAcrossPages(
}
const roster = (component['voiceUsersInRoom'] as ((roomId: string) => UserShape[]) | undefined)?.(voiceChannel.id) ?? [];
const entry = roster.find((u) => u.displayName === expectedDisplayName);
const entry = roster.find((userEntry) => userEntry.displayName === expectedDisplayName);
return entry?.voiceState?.isMuted === expectedMuted
&& entry?.voiceState?.isDeafened === expectedDeafened;

View File

@@ -1,9 +1,6 @@
import { expect, type Page } from '@playwright/test';
import { test, type Client } from '../../fixtures/multi-client';
import {
installTestServerEndpoints,
type SeededEndpointInput
} from '../../helpers/seed-test-endpoint';
import { installTestServerEndpoints, type SeededEndpointInput } from '../../helpers/seed-test-endpoint';
import { startTestServer } from '../../helpers/test-server';
import {
dumpRtcDiagnostics,
@@ -28,11 +25,11 @@ const USER_COUNT = 8;
const EXPECTED_REMOTE_PEERS = USER_COUNT - 1;
const STABILITY_WINDOW_MS = 20_000;
type TestUser = {
interface TestUser {
username: string;
displayName: string;
password: string;
};
}
type TestClient = Client & {
user: TestUser;
@@ -64,7 +61,6 @@ test.describe('Dual-signal multi-user voice', () => {
status: 'online'
}
];
const users = buildUsers();
const clients = await createTrackedClients(createClient, users, endpoints);
@@ -86,12 +82,14 @@ test.describe('Dual-signal multi-user voice', () => {
description: 'Primary signal room for 8-user voice mesh',
sourceId: PRIMARY_SIGNAL_ID
});
await expect(clients[0].page).toHaveURL(/\/room\//, { timeout: 20_000 });
await searchPage.createServer(SECONDARY_ROOM_NAME, {
description: 'Secondary signal room for dual-socket coverage',
sourceId: SECONDARY_SIGNAL_ID
});
await expect(clients[0].page).toHaveURL(/\/room\//, { timeout: 20_000 });
});
@@ -141,7 +139,7 @@ test.describe('Dual-signal multi-user voice', () => {
waitForAudioStatsPresent(client.page, 30_000)
));
// Allow the mesh to settle voice routing, allowed-peer-id
// Allow the mesh to settle - voice routing, allowed-peer-id
// propagation and renegotiation all need time after the last
// user joins.
await clients[0].page.waitForTimeout(5_000);
@@ -173,6 +171,7 @@ test.describe('Dual-signal multi-user voice', () => {
timeout: 10_000,
intervals: [500, 1_000]
}).toBe(EXPECTED_REMOTE_PEERS);
await expect.poll(async () => await getConnectedSignalManagerCount(client.page), {
timeout: 10_000,
intervals: [500, 1_000]
@@ -236,7 +235,7 @@ test.describe('Dual-signal multi-user voice', () => {
await room.deafenButton.click();
await client.page.waitForTimeout(500);
// Un-deafen does NOT restore mute the user stays muted
// Un-deafen does NOT restore mute - the user stays muted
await waitForVoiceStateAcrossPages(clients, client.user.displayName, {
isMuted: true,
isDeafened: false
@@ -245,7 +244,7 @@ test.describe('Dual-signal multi-user voice', () => {
});
await test.step('Unmute all users and verify audio flows end-to-end', async () => {
// Every user is left muted after deafen cycling unmute them all
// Every user is left muted after deafen cycling - unmute them all
for (const client of clients) {
const room = new ChatRoomPage(client.page);
@@ -256,7 +255,7 @@ test.describe('Dual-signal multi-user voice', () => {
});
}
// Final audio flow check on every peer confirms the full
// Final audio flow check on every peer - confirms the full
// send/receive pipeline still works after mute+deafen cycling
for (const client of clients) {
try {
@@ -284,7 +283,7 @@ function buildUsers(): TestUser[] {
async function createTrackedClients(
createClient: () => Promise<Client>,
users: TestUser[],
endpoints: ReadonlyArray<SeededEndpointInput>
endpoints: readonly SeededEndpointInput[]
): Promise<TestClient[]> {
const clients: TestClient[] = [];
@@ -384,9 +383,11 @@ async function waitForCurrentRoomName(page: Page, roomName: string, timeout = 20
}
async function openVoiceWorkspace(page: Page): Promise<void> {
const viewButton = page.locator('app-rooms-side-panel').getByRole('button', { name: /view|open/i }).first();
const viewButton = page.locator('app-rooms-side-panel').getByRole('button', { name: /view|open/i })
.first();
if (await page.locator('app-voice-workspace').isVisible().catch(() => false)) {
if (await page.locator('app-voice-workspace').isVisible()
.catch(() => false)) {
return;
}
@@ -396,6 +397,7 @@ async function openVoiceWorkspace(page: Page): Promise<void> {
async function joinVoiceChannelUntilConnected(page: Page, channelName: string, attempts = 3): Promise<void> {
const room = new ChatRoomPage(page);
let lastError: unknown;
for (let attempt = 1; attempt <= attempts; attempt++) {
@@ -559,7 +561,7 @@ async function getVoiceJoinDiagnostics(page: Page, channelName: string): Promise
const realtime = component['realtime'] as {
connectionErrorMessage?: () => string | null;
signalingTransportHandler?: {
getConnectedSignalingManagers?: () => Array<{ signalUrl: string }>;
getConnectedSignalingManagers?: () => { signalUrl: string }[];
};
} | undefined;
@@ -596,7 +598,7 @@ async function waitForConnectedSignalManagerCount(page: Page, expectedCount: num
const component = debugApi.getComponent(host);
const realtime = component['realtime'] as {
signalingTransportHandler?: {
getConnectedSignalingManagers?: () => Array<{ signalUrl: string }>;
getConnectedSignalingManagers?: () => { signalUrl: string }[];
};
} | undefined;
const countValue = realtime?.signalingTransportHandler?.getConnectedSignalingManagers?.().length ?? 0;
@@ -624,7 +626,7 @@ async function getConnectedSignalManagerCount(page: Page): Promise<number> {
const component = debugApi.getComponent(host);
const realtime = component['realtime'] as {
signalingTransportHandler?: {
getConnectedSignalingManagers?: () => Array<{ signalUrl: string }>;
getConnectedSignalingManagers?: () => { signalUrl: string }[];
};
} | undefined;
@@ -647,7 +649,7 @@ async function waitForVoiceWorkspaceUserCount(page: Page, expectedCount: number)
}
const component = debugApi.getComponent(host);
const connectedUsers = (component['connectedVoiceUsers'] as (() => Array<unknown>) | undefined)?.() ?? [];
const connectedUsers = (component['connectedVoiceUsers'] as (() => unknown[]) | undefined)?.() ?? [];
return connectedUsers.length === count;
},
@@ -688,7 +690,7 @@ async function waitForVoiceRosterCount(page: Page, channelName: string, expected
return false;
}
const roster = (component['voiceUsersInRoom'] as ((roomId: string) => Array<unknown>) | undefined)?.(channelId) ?? [];
const roster = (component['voiceUsersInRoom'] as ((roomId: string) => unknown[]) | undefined)?.(channelId) ?? [];
return roster.length === expected;
},
@@ -698,7 +700,7 @@ async function waitForVoiceRosterCount(page: Page, channelName: string, expected
}
async function waitForVoiceStateAcrossPages(
clients: ReadonlyArray<TestClient>,
clients: readonly TestClient[],
displayName: string,
expectedState: { isMuted: boolean; isDeafened: boolean }
): Promise<void> {

View File

@@ -1,6 +1,7 @@
import { test, expect } from '../../fixtures/multi-client';
import {
installWebRTCTracking,
installAutoResumeAudioContext,
waitForPeerConnected,
isPeerStillConnected,
getAudioStatsDelta,
@@ -13,7 +14,7 @@ import { ServerSearchPage } from '../../pages/server-search.page';
import { ChatRoomPage } from '../../pages/chat-room.page';
/**
* Full user journey: register create server join voice verify audio
* Full user journey: register -> create server -> join -> voice -> verify audio
* for 10+ seconds of stable connectivity.
*
* Uses two independent browser contexts (Alice & Bob) to simulate real
@@ -25,7 +26,7 @@ const BOB = { username: `bob_${Date.now()}`, displayName: 'Bob', password: 'Test
const SERVER_NAME = `E2E Test Server ${Date.now()}`;
const VOICE_CHANNEL = 'General';
test.describe('Full user journey: register server voice chat', () => {
test.describe('Full user journey: register -> server -> voice chat', () => {
test('two users register, create server, join voice, and stay connected 10+ seconds with audio', async ({ createClient }) => {
test.setTimeout(180_000); // 3 min - covers registration, server creation, voice establishment, and 10s stability check
@@ -35,6 +36,20 @@ test.describe('Full user journey: register → server → voice chat', () => {
// Install WebRTC tracking before any navigation
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
await installAutoResumeAudioContext(alice.page);
await installAutoResumeAudioContext(bob.page);
// Seed deterministic voice settings so noise reduction doesn't
// swallow the fake audio tone.
const voiceSettings = JSON.stringify({
inputVolume: 100, outputVolume: 100, audioBitrate: 96,
latencyProfile: 'balanced', includeSystemAudio: false,
noiseReduction: false, screenShareQuality: 'balanced',
askScreenShareQuality: false
});
await alice.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
await bob.page.addInitScript((settingsValue: string) => localStorage.setItem('metoyou_voice_settings', settingsValue), voiceSettings);
// Forward browser console for debugging
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
@@ -146,8 +161,38 @@ test.describe('Full user journey: register → server → voice chat', () => {
// ── Step 7: Verify audio is flowing in both directions ───────────
await test.step('Audio packets are flowing between Alice and Bob', async () => {
const aliceDelta = await waitForAudioFlow(alice.page, 30_000);
const bobDelta = await waitForAudioFlow(bob.page, 30_000);
// Chromium's --use-fake-device-for-media-stream can produce a
// silent capture track on the very first getUserMedia call. If
// bidirectional audio doesn't flow within a short window, leave
// and rejoin voice to re-acquire the mic.
let aliceDelta = await waitForAudioFlow(alice.page, 15_000);
let bobDelta = await waitForAudioFlow(bob.page, 15_000);
const isFlowing = (delta: typeof aliceDelta) =>
(delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0) &&
(delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0);
if (!isFlowing(aliceDelta) || !isFlowing(bobDelta)) {
const aliceRoom = new ChatRoomPage(alice.page);
const bobRoom = new ChatRoomPage(bob.page);
await aliceRoom.disconnectButton.click();
await bobRoom.disconnectButton.click();
await alice.page.waitForTimeout(2_000);
await aliceRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(alice.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
await bobRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(bob.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
await waitForPeerConnected(alice.page, 30_000);
await waitForPeerConnected(bob.page, 30_000);
await waitForAudioStatsPresent(alice.page, 20_000);
await waitForAudioStatsPresent(bob.page, 20_000);
aliceDelta = await waitForAudioFlow(alice.page, 30_000);
bobDelta = await waitForAudioFlow(bob.page, 30_000);
}
if (aliceDelta.outboundBytesDelta === 0 || aliceDelta.inboundBytesDelta === 0
|| bobDelta.outboundBytesDelta === 0 || bobDelta.inboundBytesDelta === 0) {