test: Add playwright main usage test
Some checks failed
Deploy Web Apps / deploy (push) Has been cancelled
Queue Release Build / prepare (push) Successful in 21s
Queue Release Build / build-linux (push) Successful in 27m44s
Queue Release Build / build-windows (push) Successful in 32m16s
Queue Release Build / finalize (push) Successful in 1m54s

This commit is contained in:
2026-04-11 16:48:26 +02:00
parent f33440a827
commit 391d9235f1
25 changed files with 2968 additions and 67 deletions

View File

@@ -0,0 +1,295 @@
import { type Page } from '@playwright/test';
import { test, expect, type Client } from '../../fixtures/multi-client';
import { RegisterPage } from '../../pages/register.page';
import { ServerSearchPage } from '../../pages/server-search.page';
import { ChatRoomPage } from '../../pages/chat-room.page';
import {
ChatMessagesPage,
type ChatDropFilePayload
} from '../../pages/chat-messages.page';
const MOCK_EMBED_URL = 'https://example.test/mock-embed';
const MOCK_EMBED_TITLE = 'Mock Embed Title';
const MOCK_EMBED_DESCRIPTION = 'Mock embed description for chat E2E coverage.';
const MOCK_GIF_IMAGE_URL = 'data:image/gif;base64,R0lGODlhAQABAPAAAP///wAAACH5BAAAAAAALAAAAAABAAEAAAICRAEAOw==';
const DELETED_MESSAGE_CONTENT = '[Message deleted]';
test.describe('Chat messaging features', () => {
test.describe.configure({ timeout: 180_000 });
test('syncs messages in a newly created text channel', async ({ createClient }) => {
const scenario = await createChatScenario(createClient);
const channelName = uniqueName('updates');
const aliceMessage = `Alice text channel message ${uniqueName('msg')}`;
const bobMessage = `Bob text channel reply ${uniqueName('msg')}`;
await test.step('Alice creates a new text channel and both users join it', async () => {
await scenario.aliceRoom.ensureTextChannelExists(channelName);
await scenario.aliceRoom.joinTextChannel(channelName);
await scenario.bobRoom.joinTextChannel(channelName);
});
await test.step('Alice and Bob see synced messages in the new text channel', async () => {
await scenario.aliceMessages.sendMessage(aliceMessage);
await expect(scenario.bobMessages.getMessageItemByText(aliceMessage)).toBeVisible({ timeout: 20_000 });
await scenario.bobMessages.sendMessage(bobMessage);
await expect(scenario.aliceMessages.getMessageItemByText(bobMessage)).toBeVisible({ timeout: 20_000 });
});
});
test('shows typing indicators to other users', async ({ createClient }) => {
const scenario = await createChatScenario(createClient);
const draftMessage = `Typing indicator draft ${uniqueName('draft')}`;
await test.step('Alice starts typing in general channel', async () => {
await scenario.aliceMessages.typeDraft(draftMessage);
});
await test.step('Bob sees Alice typing', async () => {
await expect(scenario.bob.page.getByText('Alice is typing...')).toBeVisible({ timeout: 10_000 });
});
});
test('edits and removes messages for both users', async ({ createClient }) => {
const scenario = await createChatScenario(createClient);
const originalMessage = `Editable message ${uniqueName('edit')}`;
const updatedMessage = `Edited message ${uniqueName('edit')}`;
await test.step('Alice sends a message and Bob receives it', async () => {
await scenario.aliceMessages.sendMessage(originalMessage);
await expect(scenario.bobMessages.getMessageItemByText(originalMessage)).toBeVisible({ timeout: 20_000 });
});
await test.step('Alice edits the message and both users see updated content', async () => {
await scenario.aliceMessages.editOwnMessage(originalMessage, updatedMessage);
await expect(scenario.aliceMessages.getMessageItemByText(updatedMessage)).toBeVisible({ timeout: 20_000 });
await expect(scenario.alice.page.getByText('(edited)')).toBeVisible({ timeout: 10_000 });
await expect(scenario.bobMessages.getMessageItemByText(updatedMessage)).toBeVisible({ timeout: 20_000 });
});
await test.step('Alice deletes the message and both users see deletion state', async () => {
await scenario.aliceMessages.deleteOwnMessage(updatedMessage);
await expect(scenario.aliceMessages.getMessageItemByText(DELETED_MESSAGE_CONTENT)).toBeVisible({ timeout: 20_000 });
await expect(scenario.bobMessages.getMessageItemByText(DELETED_MESSAGE_CONTENT)).toBeVisible({ timeout: 20_000 });
});
});
test('syncs image and file attachments between users', async ({ createClient }) => {
const scenario = await createChatScenario(createClient);
const imageName = `${uniqueName('diagram')}.svg`;
const fileName = `${uniqueName('notes')}.txt`;
const imageCaption = `Image upload ${uniqueName('caption')}`;
const fileCaption = `File upload ${uniqueName('caption')}`;
const imageAttachment = createTextFilePayload(imageName, 'image/svg+xml', buildMockSvgMarkup(imageName));
const fileAttachment = createTextFilePayload(fileName, 'text/plain', `Attachment body for ${fileName}`);
await test.step('Alice sends image attachment and Bob receives it', async () => {
await scenario.aliceMessages.attachFiles([imageAttachment]);
await scenario.aliceMessages.sendMessage(imageCaption);
await scenario.aliceMessages.expectMessageImageLoaded(imageName);
await expect(scenario.bobMessages.getMessageItemByText(imageCaption)).toBeVisible({ timeout: 20_000 });
await scenario.bobMessages.expectMessageImageLoaded(imageName);
});
await test.step('Alice sends generic file attachment and Bob receives it', async () => {
await scenario.aliceMessages.attachFiles([fileAttachment]);
await scenario.aliceMessages.sendMessage(fileCaption);
await expect(scenario.bobMessages.getMessageItemByText(fileCaption)).toBeVisible({ timeout: 20_000 });
await expect(scenario.bob.page.getByText(fileName, { exact: false })).toBeVisible({ timeout: 20_000 });
});
});
test('renders link embeds for shared links', async ({ createClient }) => {
const scenario = await createChatScenario(createClient);
const messageText = `Useful docs ${MOCK_EMBED_URL}`;
await test.step('Alice shares a link in chat', async () => {
await scenario.aliceMessages.sendMessage(messageText);
await expect(scenario.bobMessages.getMessageItemByText(messageText)).toBeVisible({ timeout: 20_000 });
});
await test.step('Both users see mocked link embed metadata', async () => {
await expect(scenario.aliceMessages.getEmbedCardByTitle(MOCK_EMBED_TITLE)).toBeVisible({ timeout: 20_000 });
await expect(scenario.bobMessages.getEmbedCardByTitle(MOCK_EMBED_TITLE)).toBeVisible({ timeout: 20_000 });
await expect(scenario.bob.page.getByText(MOCK_EMBED_DESCRIPTION)).toBeVisible({ timeout: 20_000 });
});
});
test('sends KLIPY GIF messages with mocked API responses', async ({ createClient }) => {
const scenario = await createChatScenario(createClient);
await test.step('Alice opens GIF picker and sends mocked GIF', async () => {
await scenario.aliceMessages.openGifPicker();
await scenario.aliceMessages.selectFirstGif();
});
await test.step('Bob sees GIF message sync', async () => {
await scenario.aliceMessages.expectMessageImageLoaded('KLIPY GIF');
await scenario.bobMessages.expectMessageImageLoaded('KLIPY GIF');
});
});
});
type ChatScenario = {
alice: Client;
bob: Client;
aliceRoom: ChatRoomPage;
bobRoom: ChatRoomPage;
aliceMessages: ChatMessagesPage;
bobMessages: ChatMessagesPage;
};
async function createChatScenario(createClient: () => Promise<Client>): Promise<ChatScenario> {
const suffix = uniqueName('chat');
const serverName = `Chat Server ${suffix}`;
const aliceCredentials = {
username: `alice_${suffix}`,
displayName: 'Alice',
password: 'TestPass123!'
};
const bobCredentials = {
username: `bob_${suffix}`,
displayName: 'Bob',
password: 'TestPass123!'
};
const alice = await createClient();
const bob = await createClient();
await installChatFeatureMocks(alice.page);
await installChatFeatureMocks(bob.page);
const aliceRegisterPage = new RegisterPage(alice.page);
const bobRegisterPage = new RegisterPage(bob.page);
await aliceRegisterPage.goto();
await aliceRegisterPage.register(
aliceCredentials.username,
aliceCredentials.displayName,
aliceCredentials.password
);
await expect(alice.page).toHaveURL(/\/search/, { timeout: 15_000 });
await bobRegisterPage.goto();
await bobRegisterPage.register(
bobCredentials.username,
bobCredentials.displayName,
bobCredentials.password
);
await expect(bob.page).toHaveURL(/\/search/, { timeout: 15_000 });
const aliceSearchPage = new ServerSearchPage(alice.page);
await aliceSearchPage.createServer(serverName, {
description: 'E2E chat server for messaging feature coverage'
});
await expect(alice.page).toHaveURL(/\/room\//, { timeout: 15_000 });
const bobSearchPage = new ServerSearchPage(bob.page);
const serverCard = bob.page.locator('button', { hasText: serverName }).first();
await bobSearchPage.searchInput.fill(serverName);
await expect(serverCard).toBeVisible({ timeout: 15_000 });
await serverCard.click();
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
const aliceRoom = new ChatRoomPage(alice.page);
const bobRoom = new ChatRoomPage(bob.page);
const aliceMessages = new ChatMessagesPage(alice.page);
const bobMessages = new ChatMessagesPage(bob.page);
await aliceMessages.waitForReady();
await bobMessages.waitForReady();
return {
alice,
bob,
aliceRoom,
bobRoom,
aliceMessages,
bobMessages
};
}
async function installChatFeatureMocks(page: Page): Promise<void> {
await page.route('**/api/klipy/config', async (route) => {
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({ enabled: true })
});
});
await page.route('**/api/klipy/gifs**', async (route) => {
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({
enabled: true,
hasNext: false,
results: [
{
id: 'mock-gif-1',
slug: 'mock-gif-1',
title: 'Mock Celebration GIF',
url: MOCK_GIF_IMAGE_URL,
previewUrl: MOCK_GIF_IMAGE_URL,
width: 64,
height: 64
}
]
})
});
});
await page.route('**/api/link-metadata**', async (route) => {
const requestUrl = new URL(route.request().url());
const requestedTargetUrl = requestUrl.searchParams.get('url') ?? '';
if (requestedTargetUrl === MOCK_EMBED_URL) {
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({
title: MOCK_EMBED_TITLE,
description: MOCK_EMBED_DESCRIPTION,
imageUrl: MOCK_GIF_IMAGE_URL,
siteName: 'Mock Docs'
})
});
return;
}
await route.fulfill({
status: 200,
contentType: 'application/json',
body: JSON.stringify({ failed: true })
});
});
}
function createTextFilePayload(name: string, mimeType: string, content: string): ChatDropFilePayload {
return {
name,
mimeType,
base64: Buffer.from(content, 'utf8').toString('base64')
};
}
function buildMockSvgMarkup(label: string): string {
return [
'<svg xmlns="http://www.w3.org/2000/svg" width="160" height="120" viewBox="0 0 160 120">',
'<rect width="160" height="120" rx="18" fill="#0f172a" />',
'<circle cx="38" cy="36" r="18" fill="#38bdf8" />',
'<rect x="66" y="28" width="64" height="16" rx="8" fill="#f8fafc" />',
'<rect x="24" y="74" width="112" height="12" rx="6" fill="#22c55e" />',
`<text x="24" y="104" fill="#e2e8f0" font-size="12" font-family="Arial, sans-serif">${label}</text>`,
'</svg>'
].join('');
}
function uniqueName(prefix: string): string {
return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
}

View File

@@ -0,0 +1,396 @@
import { test, expect } from '../../fixtures/multi-client';
import {
installWebRTCTracking,
waitForPeerConnected,
isPeerStillConnected,
waitForAudioFlow,
waitForAudioStatsPresent,
waitForVideoFlow,
waitForOutboundVideoFlow,
waitForInboundVideoFlow,
dumpRtcDiagnostics
} from '../../helpers/webrtc-helpers';
import { RegisterPage } from '../../pages/register.page';
import { ServerSearchPage } from '../../pages/server-search.page';
import { ChatRoomPage } from '../../pages/chat-room.page';
/**
* Screen sharing E2E tests: verify video, screen-share audio, and voice audio
* flow correctly between users during screen sharing.
*
* Uses the same dedicated-browser-per-client infrastructure as voice tests.
* getDisplayMedia is monkey-patched to return a synthetic canvas video stream
* + 880 Hz oscillator audio, bypassing the browser picker dialog.
*/
const ALICE = { username: `alice_ss_${Date.now()}`, displayName: 'Alice', password: 'TestPass123!' };
const BOB = { username: `bob_ss_${Date.now()}`, displayName: 'Bob', password: 'TestPass123!' };
const SERVER_NAME = `SS Test ${Date.now()}`;
const VOICE_CHANNEL = 'General';
/** Register a user and navigate to /search. */
async function registerUser(page: import('@playwright/test').Page, user: typeof ALICE) {
const registerPage = new RegisterPage(page);
await registerPage.goto();
await expect(registerPage.submitButton).toBeVisible();
await registerPage.register(user.username, user.displayName, user.password);
await expect(page).toHaveURL(/\/search/, { timeout: 15_000 });
}
/** Both users register → Alice creates server → Bob joins. */
async function setupServerWithBothUsers(
alice: { page: import('@playwright/test').Page },
bob: { page: import('@playwright/test').Page }
) {
await registerUser(alice.page, ALICE);
await registerUser(bob.page, BOB);
// Alice creates server
const aliceSearch = new ServerSearchPage(alice.page);
await aliceSearch.createServer(SERVER_NAME, { description: 'Screen share E2E' });
await expect(alice.page).toHaveURL(/\/room\//, { timeout: 15_000 });
// Bob joins server
const bobSearch = new ServerSearchPage(bob.page);
await bobSearch.searchInput.fill(SERVER_NAME);
const serverCard = bob.page.locator('button', { hasText: SERVER_NAME }).first();
await expect(serverCard).toBeVisible({ timeout: 10_000 });
await serverCard.click();
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
}
/** Ensure voice channel exists and both users join it. */
async function joinVoiceTogether(
alice: { page: import('@playwright/test').Page },
bob: { page: import('@playwright/test').Page }
) {
const aliceRoom = new ChatRoomPage(alice.page);
const existingChannel = alice.page
.locator('app-rooms-side-panel')
.getByRole('button', { name: VOICE_CHANNEL, exact: true });
if (await existingChannel.count() === 0) {
await aliceRoom.openCreateVoiceChannelDialog();
await aliceRoom.createChannel(VOICE_CHANNEL);
await expect(existingChannel).toBeVisible({ timeout: 10_000 });
}
await aliceRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(alice.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
const bobRoom = new ChatRoomPage(bob.page);
await bobRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(bob.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
// Wait for WebRTC + audio pipeline
await waitForPeerConnected(alice.page, 30_000);
await waitForPeerConnected(bob.page, 30_000);
await waitForAudioStatsPresent(alice.page, 20_000);
await waitForAudioStatsPresent(bob.page, 20_000);
// Expand voice workspace on both clients so the demand-driven screen
// share request flow can fire (requires connectRemoteShares = true).
// Click the "VIEW" badge that appears next to the active voice channel.
const aliceView = alice.page.locator('app-rooms-side-panel')
.getByRole('button', { name: /view/i })
.first();
const bobView = bob.page.locator('app-rooms-side-panel')
.getByRole('button', { name: /view/i })
.first();
await expect(aliceView).toBeVisible({ timeout: 10_000 });
await aliceView.click();
await expect(alice.page.locator('app-voice-workspace')).toBeVisible({ timeout: 10_000 });
await expect(bobView).toBeVisible({ timeout: 10_000 });
await bobView.click();
await expect(bob.page.locator('app-voice-workspace')).toBeVisible({ timeout: 10_000 });
// Re-verify audio stats are present after workspace expansion (the VIEW
// click can trigger renegotiation which briefly disrupts audio).
await waitForAudioStatsPresent(alice.page, 20_000);
await waitForAudioStatsPresent(bob.page, 20_000);
}
function expectFlowing(
delta: { outboundBytesDelta: number; inboundBytesDelta: number; outboundPacketsDelta: number; inboundPacketsDelta: number },
label: string
) {
expect(
delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0,
`${label} should be sending`
).toBe(true);
expect(
delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0,
`${label} should be receiving`
).toBe(true);
}
test.describe('Screen sharing', () => {
test('single user screen share: video and audio flow to receiver, voice audio continues', async ({ createClient }) => {
test.setTimeout(180_000);
const alice = await createClient();
const bob = await createClient();
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
// ── Setup: register, server, voice ────────────────────────────
await test.step('Setup server and voice channel', async () => {
await setupServerWithBothUsers(alice, bob);
await joinVoiceTogether(alice, bob);
});
// ── Verify voice audio before screen share ────────────────────
await test.step('Voice audio flows before screen share', async () => {
const aliceDelta = await waitForAudioFlow(alice.page, 30_000);
const bobDelta = await waitForAudioFlow(bob.page, 30_000);
expectFlowing(aliceDelta, 'Alice voice');
expectFlowing(bobDelta, 'Bob voice');
});
// ── Alice starts screen sharing ───────────────────────────────
await test.step('Alice starts screen sharing', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
await aliceRoom.startScreenShare();
// Screen share button should show active state (MonitorOff icon)
await expect(aliceRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
});
// ── Verify screen share video flows ───────────────────────────
await test.step('Screen share video flows from Alice to Bob', async () => {
// Screen share is unidirectional: Alice sends video, Bob receives it.
const aliceVideo = await waitForOutboundVideoFlow(alice.page, 30_000);
const bobVideo = await waitForInboundVideoFlow(bob.page, 30_000);
if (aliceVideo.outboundBytesDelta === 0 || bobVideo.inboundBytesDelta === 0) {
console.log('[Alice RTC]\n' + await dumpRtcDiagnostics(alice.page));
console.log('[Bob RTC]\n' + await dumpRtcDiagnostics(bob.page));
}
expect(
aliceVideo.outboundBytesDelta > 0 || aliceVideo.outboundPacketsDelta > 0,
'Alice should be sending screen share video'
).toBe(true);
expect(
bobVideo.inboundBytesDelta > 0 || bobVideo.inboundPacketsDelta > 0,
'Bob should be receiving screen share video'
).toBe(true);
});
// ── Verify voice audio continues during screen share ──────────
await test.step('Voice audio continues during screen share', async () => {
const aliceAudio = await waitForAudioFlow(alice.page, 20_000);
const bobAudio = await waitForAudioFlow(bob.page, 20_000);
expectFlowing(aliceAudio, 'Alice voice during screen share');
expectFlowing(bobAudio, 'Bob voice during screen share');
});
// ── Bob can hear Alice talk while she screen shares ───────────
await test.step('Bob receives audio from Alice during screen share', async () => {
// Specifically check Bob is receiving audio (from Alice's voice)
const bobAudio = await waitForAudioFlow(bob.page, 15_000);
expect(
bobAudio.inboundBytesDelta > 0,
'Bob should receive voice audio while Alice screen shares'
).toBe(true);
});
// ── Alice stops screen sharing ────────────────────────────────
await test.step('Alice stops screen sharing', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
await aliceRoom.stopScreenShare();
// Active icon should disappear - regular Monitor icon shown instead
await expect(
aliceRoom.voiceControls.locator('button:has(ng-icon[name="lucideMonitor"])').first()
).toBeVisible({ timeout: 10_000 });
});
// ── Voice audio still works after screen share ends ───────────
await test.step('Voice audio resumes normally after screen share stops', async () => {
const aliceAudio = await waitForAudioFlow(alice.page, 20_000);
const bobAudio = await waitForAudioFlow(bob.page, 20_000);
expectFlowing(aliceAudio, 'Alice voice after screen share');
expectFlowing(bobAudio, 'Bob voice after screen share');
});
});
test('multiple users screen share simultaneously', async ({ createClient }) => {
test.setTimeout(180_000);
const alice = await createClient();
const bob = await createClient();
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
await test.step('Setup server and voice channel', async () => {
await setupServerWithBothUsers(alice, bob);
await joinVoiceTogether(alice, bob);
});
// ── Both users start screen sharing ───────────────────────────
await test.step('Alice starts screen sharing', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
await aliceRoom.startScreenShare();
await expect(aliceRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
});
await test.step('Bob starts screen sharing', async () => {
const bobRoom = new ChatRoomPage(bob.page);
await bobRoom.startScreenShare();
await expect(bobRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
});
// ── Verify video flows in both directions ─────────────────────
await test.step('Video flows bidirectionally with both screen shares active', async () => {
// Both sharing: each page sends and receives video
const aliceVideo = await waitForVideoFlow(alice.page, 30_000);
const bobVideo = await waitForVideoFlow(bob.page, 30_000);
expectFlowing(aliceVideo, 'Alice screen share video');
expectFlowing(bobVideo, 'Bob screen share video');
});
// ── Voice audio continues with dual screen shares ─────────────
await test.step('Voice audio continues with both users screen sharing', async () => {
const aliceAudio = await waitForAudioFlow(alice.page, 20_000);
const bobAudio = await waitForAudioFlow(bob.page, 20_000);
expectFlowing(aliceAudio, 'Alice voice during dual screen share');
expectFlowing(bobAudio, 'Bob voice during dual screen share');
});
// ── Both stop screen sharing ──────────────────────────────────
await test.step('Both users stop screen sharing', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
const bobRoom = new ChatRoomPage(bob.page);
await aliceRoom.stopScreenShare();
await expect(
aliceRoom.voiceControls.locator('button:has(ng-icon[name="lucideMonitor"])').first()
).toBeVisible({ timeout: 10_000 });
await bobRoom.stopScreenShare();
await expect(
bobRoom.voiceControls.locator('button:has(ng-icon[name="lucideMonitor"])').first()
).toBeVisible({ timeout: 10_000 });
});
});
test('screen share connection stays stable for 10+ seconds', async ({ createClient }) => {
test.setTimeout(180_000);
const alice = await createClient();
const bob = await createClient();
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
await test.step('Setup server and voice channel', async () => {
await setupServerWithBothUsers(alice, bob);
await joinVoiceTogether(alice, bob);
});
await test.step('Alice starts screen sharing', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
await aliceRoom.startScreenShare();
await expect(aliceRoom.isScreenShareActive).toBeVisible({ timeout: 10_000 });
// Wait for video pipeline to fully establish
await waitForOutboundVideoFlow(alice.page, 30_000);
await waitForInboundVideoFlow(bob.page, 30_000);
});
// ── Stability checkpoints at 0s, 5s, 10s ─────────────────────
await test.step('Connection stays stable for 10+ seconds during screen share', async () => {
for (const checkpoint of [
0,
5_000,
5_000
]) {
if (checkpoint > 0) {
await alice.page.waitForTimeout(checkpoint);
}
const aliceConnected = await isPeerStillConnected(alice.page);
const bobConnected = await isPeerStillConnected(bob.page);
expect(aliceConnected, 'Alice should still be connected').toBe(true);
expect(bobConnected, 'Bob should still be connected').toBe(true);
}
// After 10s - verify both video and audio still flowing
const aliceVideo = await waitForOutboundVideoFlow(alice.page, 15_000);
const bobVideo = await waitForInboundVideoFlow(bob.page, 15_000);
expect(
aliceVideo.outboundBytesDelta > 0,
'Alice still sending screen share video after 10s'
).toBe(true);
expect(
bobVideo.inboundBytesDelta > 0,
'Bob still receiving screen share video after 10s'
).toBe(true);
const aliceAudio = await waitForAudioFlow(alice.page, 15_000);
const bobAudio = await waitForAudioFlow(bob.page, 15_000);
expectFlowing(aliceAudio, 'Alice voice after 10s screen share');
expectFlowing(bobAudio, 'Bob voice after 10s screen share');
});
// ── Clean disconnect ──────────────────────────────────────────
await test.step('Alice stops screen share and disconnects', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
await aliceRoom.stopScreenShare();
await aliceRoom.disconnectButton.click();
await expect(aliceRoom.disconnectButton).not.toBeVisible({ timeout: 10_000 });
});
});
});

View File

@@ -0,0 +1,260 @@
import { test, expect } from '../../fixtures/multi-client';
import {
installWebRTCTracking,
waitForPeerConnected,
isPeerStillConnected,
getAudioStatsDelta,
waitForAudioFlow,
waitForAudioStatsPresent,
dumpRtcDiagnostics
} from '../../helpers/webrtc-helpers';
import { RegisterPage } from '../../pages/register.page';
import { ServerSearchPage } from '../../pages/server-search.page';
import { ChatRoomPage } from '../../pages/chat-room.page';
/**
* Full user journey: register → create server → join → voice → verify audio
* for 10+ seconds of stable connectivity.
*
* Uses two independent browser contexts (Alice & Bob) to simulate real
* multi-user WebRTC voice chat.
*/
const ALICE = { username: `alice_${Date.now()}`, displayName: 'Alice', password: 'TestPass123!' };
const BOB = { username: `bob_${Date.now()}`, displayName: 'Bob', password: 'TestPass123!' };
const SERVER_NAME = `E2E Test Server ${Date.now()}`;
const VOICE_CHANNEL = 'General';
test.describe('Full user journey: register → server → voice chat', () => {
test('two users register, create server, join voice, and stay connected 10+ seconds with audio', async ({ createClient }) => {
test.setTimeout(180_000); // 3 min - covers registration, server creation, voice establishment, and 10s stability check
const alice = await createClient();
const bob = await createClient();
// Install WebRTC tracking before any navigation
await installWebRTCTracking(alice.page);
await installWebRTCTracking(bob.page);
// Forward browser console for debugging
alice.page.on('console', msg => console.log('[Alice]', msg.text()));
bob.page.on('console', msg => console.log('[Bob]', msg.text()));
// ── Step 1: Register both users ──────────────────────────────────
await test.step('Alice registers an account', async () => {
const registerPage = new RegisterPage(alice.page);
await registerPage.goto();
await expect(registerPage.submitButton).toBeVisible();
await registerPage.register(ALICE.username, ALICE.displayName, ALICE.password);
// After registration, app should navigate to /search
await expect(alice.page).toHaveURL(/\/search/, { timeout: 15_000 });
});
await test.step('Bob registers an account', async () => {
const registerPage = new RegisterPage(bob.page);
await registerPage.goto();
await expect(registerPage.submitButton).toBeVisible();
await registerPage.register(BOB.username, BOB.displayName, BOB.password);
await expect(bob.page).toHaveURL(/\/search/, { timeout: 15_000 });
});
// ── Step 2: Alice creates a server ───────────────────────────────
await test.step('Alice creates a new server', async () => {
const searchPage = new ServerSearchPage(alice.page);
await searchPage.createServer(SERVER_NAME, {
description: 'E2E test server for voice testing'
});
// After server creation, app navigates to the room
await expect(alice.page).toHaveURL(/\/room\//, { timeout: 15_000 });
});
// ── Step 3: Bob joins the server ─────────────────────────────────
await test.step('Bob finds and joins the server', async () => {
const searchPage = new ServerSearchPage(bob.page);
// Search for the server
await searchPage.searchInput.fill(SERVER_NAME);
// Wait for search results and click the server
const serverCard = bob.page.locator('button', { hasText: SERVER_NAME }).first();
await expect(serverCard).toBeVisible({ timeout: 10_000 });
await serverCard.click();
// Bob should be in the room now
await expect(bob.page).toHaveURL(/\/room\//, { timeout: 15_000 });
});
// ── Step 4: Create a voice channel (if one doesn't exist) ────────
await test.step('Alice ensures a voice channel is available', async () => {
const chatRoom = new ChatRoomPage(alice.page);
const existingVoiceChannel = alice.page.locator('app-rooms-side-panel')
.getByRole('button', { name: VOICE_CHANNEL, exact: true });
const voiceChannelExists = await existingVoiceChannel.count() > 0;
if (!voiceChannelExists) {
// Click "Create Voice Channel" plus button
await chatRoom.openCreateVoiceChannelDialog();
await chatRoom.createChannel(VOICE_CHANNEL);
// Wait for the channel to appear
await expect(existingVoiceChannel).toBeVisible({ timeout: 10_000 });
}
});
// ── Step 5: Both users join the voice channel ────────────────────
await test.step('Alice joins the voice channel', async () => {
const chatRoom = new ChatRoomPage(alice.page);
await chatRoom.joinVoiceChannel(VOICE_CHANNEL);
// Voice controls should appear (indicates voice is connected)
await expect(alice.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
});
await test.step('Bob joins the voice channel', async () => {
const chatRoom = new ChatRoomPage(bob.page);
await chatRoom.joinVoiceChannel(VOICE_CHANNEL);
await expect(bob.page.locator('app-voice-controls')).toBeVisible({ timeout: 15_000 });
});
// ── Step 6: Verify WebRTC connection establishes ─────────────────
await test.step('WebRTC peer connection reaches "connected" state', async () => {
await waitForPeerConnected(alice.page, 30_000);
await waitForPeerConnected(bob.page, 30_000);
// Wait for audio RTP pipeline to appear before measuring deltas -
// renegotiation after initial connect can temporarily remove stats.
await waitForAudioStatsPresent(alice.page, 20_000);
await waitForAudioStatsPresent(bob.page, 20_000);
});
// ── Step 7: Verify audio is flowing in both directions ───────────
await test.step('Audio packets are flowing between Alice and Bob', async () => {
const aliceDelta = await waitForAudioFlow(alice.page, 30_000);
const bobDelta = await waitForAudioFlow(bob.page, 30_000);
if (aliceDelta.outboundBytesDelta === 0 || aliceDelta.inboundBytesDelta === 0
|| bobDelta.outboundBytesDelta === 0 || bobDelta.inboundBytesDelta === 0) {
console.log('[Alice RTC Diagnostics]\n' + await dumpRtcDiagnostics(alice.page));
console.log('[Bob RTC Diagnostics]\n' + await dumpRtcDiagnostics(bob.page));
}
expectAudioFlow(aliceDelta, 'Alice');
expectAudioFlow(bobDelta, 'Bob');
});
// ── Step 8: Verify UI states are correct ─────────────────────────
await test.step('Voice UI shows correct state for both users', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
const bobRoom = new ChatRoomPage(bob.page);
// Both should see voice controls with "Connected" status
await expect(alice.page.locator('app-voice-controls')).toBeVisible();
await expect(bob.page.locator('app-voice-controls')).toBeVisible();
// Both should see the voice workspace or at least voice users listed
// Check that both users appear in the voice channel user list
const aliceSeesBob = aliceRoom.channelsSidePanel.getByText(BOB.displayName).first();
const bobSeesAlice = bobRoom.channelsSidePanel.getByText(ALICE.displayName).first();
await expect(aliceSeesBob).toBeVisible({ timeout: 10_000 });
await expect(bobSeesAlice).toBeVisible({ timeout: 10_000 });
});
// ── Step 9: Stay connected for 10+ seconds, verify stability ─────
await test.step('Connection remains stable for 10+ seconds', async () => {
// Check connectivity at 0s, 5s, and 10s intervals
for (const checkpoint of [
0,
5_000,
5_000
]) {
if (checkpoint > 0) {
await alice.page.waitForTimeout(checkpoint);
}
const aliceConnected = await isPeerStillConnected(alice.page);
const bobConnected = await isPeerStillConnected(bob.page);
expect(aliceConnected, 'Alice should still be connected').toBe(true);
expect(bobConnected, 'Bob should still be connected').toBe(true);
}
// After 10s total, verify audio is still flowing
const aliceDelta = await waitForAudioFlow(alice.page, 15_000);
const bobDelta = await waitForAudioFlow(bob.page, 15_000);
expectAudioFlow(aliceDelta, 'Alice after 10s');
expectAudioFlow(bobDelta, 'Bob after 10s');
});
// ── Step 10: Verify mute/unmute works correctly ──────────────────
await test.step('Mute toggle works correctly', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
// Alice mutes - click the first button in voice controls (mute button)
await aliceRoom.muteButton.click();
// After muting, Alice's outbound audio should stop increasing
// When muted, bytesSent may still show small comfort noise or zero growth
// The key assertion is that Bob's inbound for Alice's stream stops or reduces
await getAudioStatsDelta(alice.page, 2_000);
// Alice unmutes
await aliceRoom.muteButton.click();
// After unmuting, outbound should resume
const unmutedDelta = await waitForAudioFlow(alice.page, 15_000);
expectAudioFlow(unmutedDelta, 'Alice after unmuting');
});
// ── Step 11: Clean disconnect ────────────────────────────────────
await test.step('Alice disconnects from voice', async () => {
const aliceRoom = new ChatRoomPage(alice.page);
// Click the disconnect/hang-up button
await aliceRoom.disconnectButton.click();
// Connected controls should collapse for Alice after disconnect
await expect(aliceRoom.disconnectButton).not.toBeVisible({ timeout: 10_000 });
});
});
});
function expectAudioFlow(delta: {
outboundBytesDelta: number;
inboundBytesDelta: number;
outboundPacketsDelta: number;
inboundPacketsDelta: number;
}, label: string): void {
expect(
delta.outboundBytesDelta > 0 || delta.outboundPacketsDelta > 0,
`${label} should be sending audio`
).toBe(true);
expect(
delta.inboundBytesDelta > 0 || delta.inboundPacketsDelta > 0,
`${label} should be receiving audio`
).toBe(true);
}