Move toju-app into own its folder
This commit is contained in:
113
toju-app/src/app/infrastructure/persistence/README.md
Normal file
113
toju-app/src/app/infrastructure/persistence/README.md
Normal file
@@ -0,0 +1,113 @@
|
||||
# Persistence Infrastructure
|
||||
|
||||
Offline-first storage layer that keeps messages, users, rooms, reactions, bans, and attachments on the client. The rest of the app only ever talks to `DatabaseService`, which picks the right backend for the current platform at runtime.
|
||||
|
||||
## Files
|
||||
|
||||
```
|
||||
persistence/
|
||||
├── index.ts Barrel (exports DatabaseService)
|
||||
├── database.service.ts Platform-agnostic facade
|
||||
├── browser-database.service.ts IndexedDB backend (web)
|
||||
└── electron-database.service.ts IPC/SQLite backend (desktop)
|
||||
```
|
||||
|
||||
## Platform routing
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
Consumer[Store effects / facades / components]
|
||||
Consumer --> Facade[DatabaseService<br/>facade]
|
||||
Facade -->|isBrowser?| Decision{Platform}
|
||||
Decision -- Browser --> IDB[BrowserDatabaseService<br/>IndexedDB]
|
||||
Decision -- Electron --> IPC[ElectronDatabaseService<br/>IPC to main process]
|
||||
IPC --> Main[Electron main process<br/>TypeORM + SQLite]
|
||||
|
||||
click Facade "database.service.ts" "DatabaseService - platform-agnostic facade" _blank
|
||||
click IDB "browser-database.service.ts" "IndexedDB backend for web" _blank
|
||||
click IPC "electron-database.service.ts" "IPC client for Electron" _blank
|
||||
```
|
||||
|
||||
`DatabaseService` is an `@Injectable({ providedIn: 'root' })` that injects both backends and delegates every call to whichever one matches the current platform. Consumers never import a backend directly.
|
||||
|
||||
## Object stores / tables
|
||||
|
||||
Both backends store the same entity types:
|
||||
|
||||
| Store | Key | Indexes | Description |
|
||||
|---|---|---|---|
|
||||
| `messages` | `id` | `roomId` | Chat messages, sorted by timestamp |
|
||||
| `users` | `oderId` | | User profiles |
|
||||
| `rooms` | `id` | | Server/room metadata |
|
||||
| `reactions` | `oderId-emoji-messageId` | | Emoji reactions, deduplicated per user |
|
||||
| `bans` | `oderId` | | Active bans per room |
|
||||
| `attachments` | `id` | | File/image metadata tied to messages |
|
||||
| `meta` | `key` | | Key-value pairs (e.g. `currentUserId`) |
|
||||
|
||||
The IndexedDB schema is at version 2.
|
||||
|
||||
## How the two backends differ
|
||||
|
||||
### Browser (IndexedDB)
|
||||
|
||||
All operations run inside IndexedDB transactions in the renderer thread. Queries like `getMessages` pull all messages for a room via the `roomId` index, sort them by timestamp in JS, then apply limit/offset. Deleted messages are normalised on read (content replaced with a sentinel string).
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Eff as NgRx Effect
|
||||
participant DB as DatabaseService
|
||||
participant BDB as BrowserDatabaseService
|
||||
participant IDB as IndexedDB
|
||||
|
||||
Eff->>DB: getMessages(roomId, 50)
|
||||
DB->>BDB: getMessages(roomId, 50)
|
||||
BDB->>IDB: tx.objectStore("messages")<br/>.index("roomId").getAll(roomId)
|
||||
IDB-->>BDB: Message[]
|
||||
Note over BDB: Sort by timestamp, slice, normalise
|
||||
BDB-->>DB: Message[]
|
||||
DB-->>Eff: Message[]
|
||||
```
|
||||
|
||||
### Electron (SQLite via IPC)
|
||||
|
||||
The renderer sends structured command/query objects through the Electron preload bridge. The main process handles them with TypeORM against a local SQLite file. No database logic runs in the renderer.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant Eff as NgRx Effect
|
||||
participant DB as DatabaseService
|
||||
participant EDB as ElectronDatabaseService
|
||||
participant IPC as Preload Bridge
|
||||
participant Main as Main Process<br/>TypeORM + SQLite
|
||||
|
||||
Eff->>DB: saveMessage(msg)
|
||||
DB->>EDB: saveMessage(msg)
|
||||
EDB->>IPC: api.command({type: "save-message", payload: {message}})
|
||||
IPC->>Main: ipcRenderer.invoke
|
||||
Main-->>IPC: void
|
||||
IPC-->>EDB: Promise resolves
|
||||
EDB-->>DB: void
|
||||
DB-->>Eff: void
|
||||
```
|
||||
|
||||
The Electron backend's `initialize()` is a no-op because the main process creates the database before the renderer window opens.
|
||||
|
||||
## API surface
|
||||
|
||||
Every method on `DatabaseService` maps 1:1 to both backends:
|
||||
|
||||
**Messages**: `saveMessage`, `getMessages`, `getMessageById`, `deleteMessage`, `updateMessage`, `clearRoomMessages`
|
||||
|
||||
**Reactions**: `saveReaction`, `removeReaction`, `getReactionsForMessage`
|
||||
|
||||
**Users**: `saveUser`, `getUser`, `getCurrentUser`, `setCurrentUserId`, `getUsersByRoom`, `updateUser`
|
||||
|
||||
**Rooms**: `saveRoom`, `getRoom`, `getAllRooms`, `deleteRoom`, `updateRoom`
|
||||
|
||||
**Bans**: `saveBan`, `removeBan`, `getBansForRoom`, `isUserBanned`
|
||||
|
||||
**Attachments**: `saveAttachment`, `getAttachmentsForMessage`, `getAllAttachments`, `deleteAttachmentsForMessage`
|
||||
|
||||
**Lifecycle**: `initialize`, `clearAllData`
|
||||
|
||||
The facade also exposes an `isReady` signal that flips to `true` after `initialize()` completes, so components can gate rendering until the DB is available.
|
||||
@@ -0,0 +1,441 @@
|
||||
/* eslint-disable, @typescript-eslint/no-non-null-assertion */
|
||||
import { Injectable } from '@angular/core';
|
||||
import {
|
||||
DELETED_MESSAGE_CONTENT,
|
||||
Message,
|
||||
User,
|
||||
Room,
|
||||
Reaction,
|
||||
BanEntry
|
||||
} from '../../shared-kernel';
|
||||
import type { ChatAttachmentMeta } from '../../shared-kernel';
|
||||
|
||||
/** IndexedDB database name for the MetoYou application. */
|
||||
const DATABASE_NAME = 'metoyou';
|
||||
/** IndexedDB schema version - bump when adding/changing object stores. */
|
||||
const DATABASE_VERSION = 2;
|
||||
/** Names of every object store used by the application. */
|
||||
const STORE_MESSAGES = 'messages';
|
||||
const STORE_USERS = 'users';
|
||||
const STORE_ROOMS = 'rooms';
|
||||
const STORE_REACTIONS = 'reactions';
|
||||
const STORE_BANS = 'bans';
|
||||
const STORE_META = 'meta';
|
||||
const STORE_ATTACHMENTS = 'attachments';
|
||||
/** All object store names, used when clearing the entire database. */
|
||||
const ALL_STORE_NAMES: string[] = [
|
||||
STORE_MESSAGES,
|
||||
STORE_USERS,
|
||||
STORE_ROOMS,
|
||||
STORE_REACTIONS,
|
||||
STORE_BANS,
|
||||
STORE_ATTACHMENTS,
|
||||
STORE_META
|
||||
];
|
||||
|
||||
/**
|
||||
* IndexedDB-backed database service used when the app runs in a
|
||||
* plain browser (i.e. without Electron).
|
||||
*
|
||||
* Every public method mirrors the {@link DatabaseService} API so the
|
||||
* facade can delegate transparently.
|
||||
*/
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class BrowserDatabaseService {
|
||||
/** Handle to the opened IndexedDB database, or `null` before {@link initialize}. */
|
||||
private database: IDBDatabase | null = null;
|
||||
|
||||
/** Open (or create) the IndexedDB database. Safe to call multiple times. */
|
||||
async initialize(): Promise<void> {
|
||||
if (this.database)
|
||||
return;
|
||||
|
||||
this.database = await this.openDatabase();
|
||||
}
|
||||
|
||||
/** Persist a single message. */
|
||||
async saveMessage(message: Message): Promise<void> {
|
||||
await this.put(STORE_MESSAGES, message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve messages for a room, sorted oldest-first.
|
||||
* @param roomId - Target room.
|
||||
* @param limit - Maximum number of messages to return.
|
||||
* @param offset - Number of messages to skip (for pagination).
|
||||
*/
|
||||
async getMessages(roomId: string, limit = 100, offset = 0): Promise<Message[]> {
|
||||
const allRoomMessages = await this.getAllFromIndex<Message>(
|
||||
STORE_MESSAGES, 'roomId', roomId
|
||||
);
|
||||
|
||||
return allRoomMessages
|
||||
.sort((first, second) => first.timestamp - second.timestamp)
|
||||
.slice(offset, offset + limit)
|
||||
.map((message) => this.normaliseMessage(message));
|
||||
}
|
||||
|
||||
/** Delete a message by its ID. */
|
||||
async deleteMessage(messageId: string): Promise<void> {
|
||||
await this.deleteRecord(STORE_MESSAGES, messageId);
|
||||
}
|
||||
|
||||
/** Apply partial updates to an existing message. */
|
||||
async updateMessage(messageId: string, updates: Partial<Message>): Promise<void> {
|
||||
const existing = await this.get<Message>(STORE_MESSAGES, messageId);
|
||||
|
||||
if (existing) {
|
||||
await this.put(STORE_MESSAGES, { ...existing,
|
||||
...updates });
|
||||
}
|
||||
}
|
||||
|
||||
/** Retrieve a single message by ID, or `null` if not found. */
|
||||
async getMessageById(messageId: string): Promise<Message | null> {
|
||||
const message = await this.get<Message>(STORE_MESSAGES, messageId);
|
||||
|
||||
return message ? this.normaliseMessage(message) : null;
|
||||
}
|
||||
|
||||
/** Remove every message belonging to a room. */
|
||||
async clearRoomMessages(roomId: string): Promise<void> {
|
||||
const messages = await this.getAllFromIndex<Message>(
|
||||
STORE_MESSAGES, 'roomId', roomId
|
||||
);
|
||||
const transaction = this.createTransaction(STORE_MESSAGES, 'readwrite');
|
||||
|
||||
for (const message of messages) {
|
||||
transaction.objectStore(STORE_MESSAGES).delete(message.id);
|
||||
}
|
||||
|
||||
await this.awaitTransaction(transaction);
|
||||
}
|
||||
|
||||
/**
|
||||
* Persist a reaction, ignoring duplicates (same user + same emoji on
|
||||
* the same message).
|
||||
*/
|
||||
async saveReaction(reaction: Reaction): Promise<void> {
|
||||
const existing = await this.getAllFromIndex<Reaction>(
|
||||
STORE_REACTIONS, 'messageId', reaction.messageId
|
||||
);
|
||||
const isDuplicate = existing.some(
|
||||
(entry) => entry.userId === reaction.userId && entry.emoji === reaction.emoji
|
||||
);
|
||||
|
||||
if (!isDuplicate) {
|
||||
await this.put(STORE_REACTIONS, reaction);
|
||||
}
|
||||
}
|
||||
|
||||
/** Remove a specific reaction (identified by user + emoji + message). */
|
||||
async removeReaction(messageId: string, userId: string, emoji: string): Promise<void> {
|
||||
const reactions = await this.getAllFromIndex<Reaction>(
|
||||
STORE_REACTIONS, 'messageId', messageId
|
||||
);
|
||||
const target = reactions.find(
|
||||
(entry) => entry.userId === userId && entry.emoji === emoji
|
||||
);
|
||||
|
||||
if (target) {
|
||||
await this.deleteRecord(STORE_REACTIONS, target.id);
|
||||
}
|
||||
}
|
||||
|
||||
/** Return all reactions for a given message. */
|
||||
async getReactionsForMessage(messageId: string): Promise<Reaction[]> {
|
||||
return this.getAllFromIndex<Reaction>(STORE_REACTIONS, 'messageId', messageId);
|
||||
}
|
||||
|
||||
/** Persist a user record. */
|
||||
async saveUser(user: User): Promise<void> {
|
||||
await this.put(STORE_USERS, user);
|
||||
}
|
||||
|
||||
/** Retrieve a user by ID, or `null` if not found. */
|
||||
async getUser(userId: string): Promise<User | null> {
|
||||
return (await this.get<User>(STORE_USERS, userId)) ?? null;
|
||||
}
|
||||
|
||||
/** Retrieve the last-authenticated ("current") user, or `null`. */
|
||||
async getCurrentUser(): Promise<User | null> {
|
||||
const meta = await this.get<{ id: string; value: string }>(
|
||||
STORE_META, 'currentUserId'
|
||||
);
|
||||
|
||||
if (!meta)
|
||||
return null;
|
||||
|
||||
return this.getUser(meta.value);
|
||||
}
|
||||
|
||||
/** Store which user ID is considered "current" (logged-in). */
|
||||
async setCurrentUserId(userId: string): Promise<void> {
|
||||
await this.put(STORE_META, { id: 'currentUserId',
|
||||
value: userId });
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve all known users.
|
||||
* @param _roomId - Accepted for API parity but currently unused.
|
||||
*/
|
||||
async getUsersByRoom(_roomId: string): Promise<User[]> {
|
||||
return this.getAll<User>(STORE_USERS);
|
||||
}
|
||||
|
||||
/** Apply partial updates to an existing user. */
|
||||
async updateUser(userId: string, updates: Partial<User>): Promise<void> {
|
||||
const existing = await this.get<User>(STORE_USERS, userId);
|
||||
|
||||
if (existing) {
|
||||
await this.put(STORE_USERS, { ...existing,
|
||||
...updates });
|
||||
}
|
||||
}
|
||||
|
||||
/** Persist a room record. */
|
||||
async saveRoom(room: Room): Promise<void> {
|
||||
await this.put(STORE_ROOMS, room);
|
||||
}
|
||||
|
||||
/** Retrieve a room by ID, or `null` if not found. */
|
||||
async getRoom(roomId: string): Promise<Room | null> {
|
||||
return (await this.get<Room>(STORE_ROOMS, roomId)) ?? null;
|
||||
}
|
||||
|
||||
/** Return every persisted room. */
|
||||
async getAllRooms(): Promise<Room[]> {
|
||||
return this.getAll<Room>(STORE_ROOMS);
|
||||
}
|
||||
|
||||
/** Delete a room and all of its messages. */
|
||||
async deleteRoom(roomId: string): Promise<void> {
|
||||
await this.deleteRecord(STORE_ROOMS, roomId);
|
||||
await this.clearRoomMessages(roomId);
|
||||
}
|
||||
|
||||
/** Apply partial updates to an existing room. */
|
||||
async updateRoom(roomId: string, updates: Partial<Room>): Promise<void> {
|
||||
const existing = await this.get<Room>(STORE_ROOMS, roomId);
|
||||
|
||||
if (existing) {
|
||||
await this.put(STORE_ROOMS, { ...existing,
|
||||
...updates });
|
||||
}
|
||||
}
|
||||
|
||||
/** Persist a ban entry. */
|
||||
async saveBan(ban: BanEntry): Promise<void> {
|
||||
await this.put(STORE_BANS, ban);
|
||||
}
|
||||
|
||||
/** Remove a ban by the banned user's `oderId`. */
|
||||
async removeBan(oderId: string): Promise<void> {
|
||||
const allBans = await this.getAll<BanEntry>(STORE_BANS);
|
||||
const match = allBans.find((ban) => ban.oderId === oderId);
|
||||
|
||||
if (match) {
|
||||
await this.deleteRecord(STORE_BANS, match.oderId);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return active (non-expired) bans for a room.
|
||||
*
|
||||
* @param roomId - Room to query.
|
||||
*/
|
||||
async getBansForRoom(roomId: string): Promise<BanEntry[]> {
|
||||
const allBans = await this.getAllFromIndex<BanEntry>(
|
||||
STORE_BANS, 'roomId', roomId
|
||||
);
|
||||
const now = Date.now();
|
||||
|
||||
return allBans.filter(
|
||||
(ban) => !ban.expiresAt || ban.expiresAt > now
|
||||
);
|
||||
}
|
||||
|
||||
/** Check whether a specific user is currently banned from a room. */
|
||||
async isUserBanned(userId: string, roomId: string): Promise<boolean> {
|
||||
const activeBans = await this.getBansForRoom(roomId);
|
||||
|
||||
return activeBans.some((ban) => ban.oderId === userId);
|
||||
}
|
||||
|
||||
/** Persist attachment metadata associated with a chat message. */
|
||||
async saveAttachment(attachment: ChatAttachmentMeta): Promise<void> {
|
||||
await this.put(STORE_ATTACHMENTS, attachment);
|
||||
}
|
||||
|
||||
/** Return all attachment records associated with a message. */
|
||||
async getAttachmentsForMessage(messageId: string): Promise<ChatAttachmentMeta[]> {
|
||||
return this.getAllFromIndex<ChatAttachmentMeta>(STORE_ATTACHMENTS, 'messageId', messageId);
|
||||
}
|
||||
|
||||
/** Return every persisted attachment record. */
|
||||
async getAllAttachments(): Promise<ChatAttachmentMeta[]> {
|
||||
return this.getAll<ChatAttachmentMeta>(STORE_ATTACHMENTS);
|
||||
}
|
||||
|
||||
/** Delete every attachment record for a specific message. */
|
||||
async deleteAttachmentsForMessage(messageId: string): Promise<void> {
|
||||
const attachments = await this.getAllFromIndex<ChatAttachmentMeta>(
|
||||
STORE_ATTACHMENTS, 'messageId', messageId
|
||||
);
|
||||
const transaction = this.createTransaction(STORE_ATTACHMENTS, 'readwrite');
|
||||
|
||||
for (const attachment of attachments) {
|
||||
transaction.objectStore(STORE_ATTACHMENTS).delete(attachment.id);
|
||||
}
|
||||
|
||||
await this.awaitTransaction(transaction);
|
||||
}
|
||||
|
||||
/** Wipe all persisted data in every object store. */
|
||||
async clearAllData(): Promise<void> {
|
||||
const transaction = this.createTransaction(ALL_STORE_NAMES, 'readwrite');
|
||||
|
||||
for (const storeName of ALL_STORE_NAMES) {
|
||||
transaction.objectStore(storeName).clear();
|
||||
}
|
||||
|
||||
await this.awaitTransaction(transaction);
|
||||
}
|
||||
|
||||
private openDatabase(): Promise<IDBDatabase> {
|
||||
return new Promise((resolve, reject) => {
|
||||
const request = indexedDB.open(DATABASE_NAME, DATABASE_VERSION);
|
||||
|
||||
request.onerror = () => reject(request.error);
|
||||
request.onupgradeneeded = () => this.setupSchema(request.result);
|
||||
request.onsuccess = () => resolve(request.result);
|
||||
});
|
||||
}
|
||||
|
||||
private setupSchema(database: IDBDatabase): void {
|
||||
const messagesStore = this.ensureStore(database, STORE_MESSAGES, { keyPath: 'id' });
|
||||
|
||||
this.ensureIndex(messagesStore, 'roomId', 'roomId');
|
||||
this.ensureIndex(messagesStore, 'timestamp', 'timestamp');
|
||||
|
||||
this.ensureStore(database, STORE_USERS, { keyPath: 'id' });
|
||||
|
||||
const roomsStore = this.ensureStore(database, STORE_ROOMS, { keyPath: 'id' });
|
||||
|
||||
this.ensureIndex(roomsStore, 'timestamp', 'timestamp');
|
||||
|
||||
const reactionsStore = this.ensureStore(database, STORE_REACTIONS, { keyPath: 'id' });
|
||||
|
||||
this.ensureIndex(reactionsStore, 'messageId', 'messageId');
|
||||
this.ensureIndex(reactionsStore, 'userId', 'userId');
|
||||
|
||||
const bansStore = this.ensureStore(database, STORE_BANS, { keyPath: 'oderId' });
|
||||
|
||||
this.ensureIndex(bansStore, 'roomId', 'roomId');
|
||||
this.ensureIndex(bansStore, 'expiresAt', 'expiresAt');
|
||||
|
||||
this.ensureStore(database, STORE_META, { keyPath: 'id' });
|
||||
|
||||
const attachmentsStore = this.ensureStore(database, STORE_ATTACHMENTS, { keyPath: 'id' });
|
||||
|
||||
this.ensureIndex(attachmentsStore, 'messageId', 'messageId');
|
||||
}
|
||||
|
||||
private ensureStore(
|
||||
database: IDBDatabase,
|
||||
name: string,
|
||||
options?: IDBObjectStoreParameters
|
||||
): IDBObjectStore {
|
||||
if (database.objectStoreNames.contains(name)) {
|
||||
return (database.transaction(name, 'readonly') as IDBTransaction).objectStore(name);
|
||||
}
|
||||
|
||||
return database.createObjectStore(name, options);
|
||||
}
|
||||
|
||||
private ensureIndex(store: IDBObjectStore, name: string, keyPath: string): void {
|
||||
if (!store.indexNames.contains(name)) {
|
||||
store.createIndex(name, keyPath, { unique: false });
|
||||
}
|
||||
}
|
||||
|
||||
private createTransaction(
|
||||
storeNames: string | string[],
|
||||
mode: IDBTransactionMode
|
||||
): IDBTransaction {
|
||||
if (!this.database) {
|
||||
throw new Error('Database has not been initialized');
|
||||
}
|
||||
|
||||
return this.database.transaction(storeNames, mode);
|
||||
}
|
||||
|
||||
private awaitTransaction(transaction: IDBTransaction): Promise<void> {
|
||||
return new Promise((resolve, reject) => {
|
||||
transaction.oncomplete = () => resolve();
|
||||
transaction.onerror = () => reject(transaction.error);
|
||||
transaction.onabort = () => reject(transaction.error);
|
||||
});
|
||||
}
|
||||
|
||||
private async put(storeName: string, value: unknown): Promise<void> {
|
||||
const transaction = this.createTransaction(storeName, 'readwrite');
|
||||
|
||||
transaction.objectStore(storeName).put(value);
|
||||
|
||||
await this.awaitTransaction(transaction);
|
||||
}
|
||||
|
||||
private async get<T>(storeName: string, key: IDBValidKey): Promise<T | undefined> {
|
||||
const transaction = this.createTransaction(storeName, 'readonly');
|
||||
const request = transaction.objectStore(storeName).get(key);
|
||||
|
||||
return new Promise<T | undefined>((resolve, reject) => {
|
||||
request.onsuccess = () => resolve(request.result as T | undefined);
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
||||
private async getAll<T>(storeName: string): Promise<T[]> {
|
||||
const transaction = this.createTransaction(storeName, 'readonly');
|
||||
const request = transaction.objectStore(storeName).getAll();
|
||||
|
||||
return new Promise<T[]>((resolve, reject) => {
|
||||
request.onsuccess = () => resolve((request.result as T[]) ?? []);
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
||||
private async getAllFromIndex<T>(
|
||||
storeName: string,
|
||||
indexName: string,
|
||||
query: IDBValidKey | IDBKeyRange
|
||||
): Promise<T[]> {
|
||||
const transaction = this.createTransaction(storeName, 'readonly');
|
||||
const request = transaction.objectStore(storeName)
|
||||
.index(indexName)
|
||||
.getAll(query);
|
||||
|
||||
return new Promise<T[]>((resolve, reject) => {
|
||||
request.onsuccess = () => resolve((request.result as T[]) ?? []);
|
||||
request.onerror = () => reject(request.error);
|
||||
});
|
||||
}
|
||||
|
||||
private async deleteRecord(storeName: string, key: IDBValidKey): Promise<void> {
|
||||
const transaction = this.createTransaction(storeName, 'readwrite');
|
||||
|
||||
transaction.objectStore(storeName).delete(key);
|
||||
|
||||
await this.awaitTransaction(transaction);
|
||||
}
|
||||
|
||||
private normaliseMessage(message: Message): Message {
|
||||
if (message.content === DELETED_MESSAGE_CONTENT) {
|
||||
return { ...message,
|
||||
reactions: [] };
|
||||
}
|
||||
|
||||
return message;
|
||||
}
|
||||
}
|
||||
135
toju-app/src/app/infrastructure/persistence/database.service.ts
Normal file
135
toju-app/src/app/infrastructure/persistence/database.service.ts
Normal file
@@ -0,0 +1,135 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering, */
|
||||
import {
|
||||
inject,
|
||||
Injectable,
|
||||
signal
|
||||
} from '@angular/core';
|
||||
import {
|
||||
Message,
|
||||
User,
|
||||
Room,
|
||||
Reaction,
|
||||
BanEntry
|
||||
} from '../../shared-kernel';
|
||||
import type { ChatAttachmentMeta } from '../../shared-kernel';
|
||||
import { PlatformService } from '../../core/platform';
|
||||
import { BrowserDatabaseService } from './browser-database.service';
|
||||
import { ElectronDatabaseService } from './electron-database.service';
|
||||
|
||||
/**
|
||||
* Facade database service that transparently delegates to the correct
|
||||
* storage backend based on the runtime platform.
|
||||
*
|
||||
* - Electron -> SQLite via {@link ElectronDatabaseService} (IPC to main process).
|
||||
* - Browser -> IndexedDB via {@link BrowserDatabaseService}.
|
||||
*
|
||||
* All consumers inject `DatabaseService`; the underlying storage engine
|
||||
* is selected automatically.
|
||||
*/
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class DatabaseService {
|
||||
private readonly platform = inject(PlatformService);
|
||||
private readonly browserDb = inject(BrowserDatabaseService);
|
||||
private readonly electronDb = inject(ElectronDatabaseService);
|
||||
|
||||
/** Reactive flag: `true` once {@link initialize} has completed. */
|
||||
isReady = signal(false);
|
||||
|
||||
/** The active storage backend for the current platform. */
|
||||
private get backend() {
|
||||
return this.platform.isBrowser ? this.browserDb : this.electronDb;
|
||||
}
|
||||
|
||||
/** Initialise the platform-specific database. */
|
||||
async initialize(): Promise<void> {
|
||||
await this.backend.initialize();
|
||||
this.isReady.set(true);
|
||||
}
|
||||
|
||||
/** Persist a single chat message. */
|
||||
saveMessage(message: Message) { return this.backend.saveMessage(message); }
|
||||
|
||||
/** Retrieve messages for a room with optional pagination. */
|
||||
getMessages(roomId: string, limit = 100, offset = 0) { return this.backend.getMessages(roomId, limit, offset); }
|
||||
|
||||
/** Permanently delete a message by ID. */
|
||||
deleteMessage(messageId: string) { return this.backend.deleteMessage(messageId); }
|
||||
|
||||
/** Apply partial updates to an existing message. */
|
||||
updateMessage(messageId: string, updates: Partial<Message>) { return this.backend.updateMessage(messageId, updates); }
|
||||
|
||||
/** Retrieve a single message by ID. */
|
||||
getMessageById(messageId: string) { return this.backend.getMessageById(messageId); }
|
||||
|
||||
/** Remove every message belonging to a room. */
|
||||
clearRoomMessages(roomId: string) { return this.backend.clearRoomMessages(roomId); }
|
||||
|
||||
/** Persist a reaction. */
|
||||
saveReaction(reaction: Reaction) { return this.backend.saveReaction(reaction); }
|
||||
|
||||
/** Remove a specific reaction (user + emoji + message). */
|
||||
removeReaction(messageId: string, userId: string, emoji: string) { return this.backend.removeReaction(messageId, userId, emoji); }
|
||||
|
||||
/** Return all reactions for a given message. */
|
||||
getReactionsForMessage(messageId: string) { return this.backend.getReactionsForMessage(messageId); }
|
||||
|
||||
/** Persist a user record. */
|
||||
saveUser(user: User) { return this.backend.saveUser(user); }
|
||||
|
||||
/** Retrieve a user by ID. */
|
||||
getUser(userId: string) { return this.backend.getUser(userId); }
|
||||
|
||||
/** Retrieve the current (logged-in) user. */
|
||||
getCurrentUser() { return this.backend.getCurrentUser(); }
|
||||
|
||||
/** Store the current user ID. */
|
||||
setCurrentUserId(userId: string) { return this.backend.setCurrentUserId(userId); }
|
||||
|
||||
/** Retrieve users in a room. */
|
||||
getUsersByRoom(roomId: string) { return this.backend.getUsersByRoom(roomId); }
|
||||
|
||||
/** Apply partial updates to an existing user. */
|
||||
updateUser(userId: string, updates: Partial<User>) { return this.backend.updateUser(userId, updates); }
|
||||
|
||||
/** Persist a room record. */
|
||||
saveRoom(room: Room) { return this.backend.saveRoom(room); }
|
||||
|
||||
/** Retrieve a room by ID. */
|
||||
getRoom(roomId: string) { return this.backend.getRoom(roomId); }
|
||||
|
||||
/** Return every persisted room. */
|
||||
getAllRooms() { return this.backend.getAllRooms(); }
|
||||
|
||||
/** Delete a room and its associated messages. */
|
||||
deleteRoom(roomId: string) { return this.backend.deleteRoom(roomId); }
|
||||
|
||||
/** Apply partial updates to an existing room. */
|
||||
updateRoom(roomId: string, updates: Partial<Room>) { return this.backend.updateRoom(roomId, updates); }
|
||||
|
||||
/** Persist a ban entry. */
|
||||
saveBan(ban: BanEntry) { return this.backend.saveBan(ban); }
|
||||
|
||||
/** Remove a ban by oderId. */
|
||||
removeBan(oderId: string) { return this.backend.removeBan(oderId); }
|
||||
|
||||
/** Return active bans for a room. */
|
||||
getBansForRoom(roomId: string) { return this.backend.getBansForRoom(roomId); }
|
||||
|
||||
/** Check whether a user is currently banned from a room. */
|
||||
isUserBanned(userId: string, roomId: string) { return this.backend.isUserBanned(userId, roomId); }
|
||||
|
||||
/** Persist attachment metadata. */
|
||||
saveAttachment(attachment: ChatAttachmentMeta) { return this.backend.saveAttachment(attachment); }
|
||||
|
||||
/** Return all attachment records for a message. */
|
||||
getAttachmentsForMessage(messageId: string) { return this.backend.getAttachmentsForMessage(messageId); }
|
||||
|
||||
/** Return every persisted attachment record. */
|
||||
getAllAttachments() { return this.backend.getAllAttachments(); }
|
||||
|
||||
/** Delete all attachment records for a message. */
|
||||
deleteAttachmentsForMessage(messageId: string) { return this.backend.deleteAttachmentsForMessage(messageId); }
|
||||
|
||||
/** Wipe all persisted data. */
|
||||
clearAllData() { return this.backend.clearAllData(); }
|
||||
}
|
||||
@@ -0,0 +1,189 @@
|
||||
import { Injectable, inject } from '@angular/core';
|
||||
import {
|
||||
Message,
|
||||
User,
|
||||
Room,
|
||||
Reaction,
|
||||
BanEntry
|
||||
} from '../../shared-kernel';
|
||||
import type { ElectronApi } from '../../core/platform/electron/electron-api.models';
|
||||
import { ElectronBridgeService } from '../../core/platform/electron/electron-bridge.service';
|
||||
|
||||
/**
|
||||
* Database service for the Electron (desktop) runtime.
|
||||
*
|
||||
* The SQLite database is managed by TypeORM in the Electron main process.
|
||||
* This service is a thin CQRS IPC client that dispatches structured
|
||||
* command/query objects through the unified preload channels.
|
||||
*/
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class ElectronDatabaseService {
|
||||
private readonly electronBridge = inject(ElectronBridgeService);
|
||||
|
||||
/** Shorthand accessor for the preload-exposed CQRS API. */
|
||||
private get api(): ElectronApi {
|
||||
return this.electronBridge.requireApi();
|
||||
}
|
||||
|
||||
/**
|
||||
* No-op: the database is initialised in the main process before the
|
||||
* renderer window opens and requires no explicit bootstrap call here.
|
||||
*/
|
||||
async initialize(): Promise<void> { /* no-op */ }
|
||||
|
||||
/** Persist a single chat message. */
|
||||
saveMessage(message: Message): Promise<void> {
|
||||
return this.api.command({ type: 'save-message', payload: { message } });
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve messages for a room, sorted oldest-first.
|
||||
*
|
||||
* @param roomId - Target room.
|
||||
* @param limit - Maximum number of messages to return.
|
||||
* @param offset - Number of messages to skip (for pagination).
|
||||
*/
|
||||
getMessages(roomId: string, limit = 100, offset = 0): Promise<Message[]> {
|
||||
return this.api.query<Message[]>({ type: 'get-messages', payload: { roomId, limit, offset } });
|
||||
}
|
||||
|
||||
/** Permanently delete a message by ID. */
|
||||
deleteMessage(messageId: string): Promise<void> {
|
||||
return this.api.command({ type: 'delete-message', payload: { messageId } });
|
||||
}
|
||||
|
||||
/** Apply partial updates to an existing message. */
|
||||
updateMessage(messageId: string, updates: Partial<Message>): Promise<void> {
|
||||
return this.api.command({ type: 'update-message', payload: { messageId, updates } });
|
||||
}
|
||||
|
||||
/** Retrieve a single message by ID, or `null` if not found. */
|
||||
getMessageById(messageId: string): Promise<Message | null> {
|
||||
return this.api.query<Message | null>({ type: 'get-message-by-id', payload: { messageId } });
|
||||
}
|
||||
|
||||
/** Remove every message belonging to a room. */
|
||||
clearRoomMessages(roomId: string): Promise<void> {
|
||||
return this.api.command({ type: 'clear-room-messages', payload: { roomId } });
|
||||
}
|
||||
|
||||
/** Persist a reaction (deduplication is handled main-process side). */
|
||||
saveReaction(reaction: Reaction): Promise<void> {
|
||||
return this.api.command({ type: 'save-reaction', payload: { reaction } });
|
||||
}
|
||||
|
||||
/** Remove a specific reaction (user + emoji + message). */
|
||||
removeReaction(messageId: string, userId: string, emoji: string): Promise<void> {
|
||||
return this.api.command({ type: 'remove-reaction', payload: { messageId, userId, emoji } });
|
||||
}
|
||||
|
||||
/** Return all reactions for a given message. */
|
||||
getReactionsForMessage(messageId: string): Promise<Reaction[]> {
|
||||
return this.api.query<Reaction[]>({ type: 'get-reactions-for-message', payload: { messageId } });
|
||||
}
|
||||
|
||||
/** Persist a user record. */
|
||||
saveUser(user: User): Promise<void> {
|
||||
return this.api.command({ type: 'save-user', payload: { user } });
|
||||
}
|
||||
|
||||
/** Retrieve a user by ID, or `null` if not found. */
|
||||
getUser(userId: string): Promise<User | null> {
|
||||
return this.api.query<User | null>({ type: 'get-user', payload: { userId } });
|
||||
}
|
||||
|
||||
/** Retrieve the last-authenticated ("current") user, or `null`. */
|
||||
getCurrentUser(): Promise<User | null> {
|
||||
return this.api.query<User | null>({ type: 'get-current-user', payload: {} });
|
||||
}
|
||||
|
||||
/** Store which user ID is considered "current" (logged-in). */
|
||||
setCurrentUserId(userId: string): Promise<void> {
|
||||
return this.api.command({ type: 'set-current-user-id', payload: { userId } });
|
||||
}
|
||||
|
||||
/** Retrieve users associated with a room. */
|
||||
getUsersByRoom(roomId: string): Promise<User[]> {
|
||||
return this.api.query<User[]>({ type: 'get-users-by-room', payload: { roomId } });
|
||||
}
|
||||
|
||||
/** Apply partial updates to an existing user. */
|
||||
updateUser(userId: string, updates: Partial<User>): Promise<void> {
|
||||
return this.api.command({ type: 'update-user', payload: { userId, updates } });
|
||||
}
|
||||
|
||||
/** Persist a room record. */
|
||||
saveRoom(room: Room): Promise<void> {
|
||||
return this.api.command({ type: 'save-room', payload: { room } });
|
||||
}
|
||||
|
||||
/** Retrieve a room by ID, or `null` if not found. */
|
||||
getRoom(roomId: string): Promise<Room | null> {
|
||||
return this.api.query<Room | null>({ type: 'get-room', payload: { roomId } });
|
||||
}
|
||||
|
||||
/** Return every persisted room. */
|
||||
getAllRooms(): Promise<Room[]> {
|
||||
return this.api.query<Room[]>({ type: 'get-all-rooms', payload: {} });
|
||||
}
|
||||
|
||||
/** Delete a room by ID (also removes its messages). */
|
||||
deleteRoom(roomId: string): Promise<void> {
|
||||
return this.api.command({ type: 'delete-room', payload: { roomId } });
|
||||
}
|
||||
|
||||
/** Apply partial updates to an existing room. */
|
||||
updateRoom(roomId: string, updates: Partial<Room>): Promise<void> {
|
||||
return this.api.command({ type: 'update-room', payload: { roomId, updates } });
|
||||
}
|
||||
|
||||
/** Persist a ban entry. */
|
||||
saveBan(ban: BanEntry): Promise<void> {
|
||||
return this.api.command({ type: 'save-ban', payload: { ban } });
|
||||
}
|
||||
|
||||
/** Remove a ban by the banned user's `oderId`. */
|
||||
removeBan(oderId: string): Promise<void> {
|
||||
return this.api.command({ type: 'remove-ban', payload: { oderId } });
|
||||
}
|
||||
|
||||
/** Return active bans for a room. */
|
||||
getBansForRoom(roomId: string): Promise<BanEntry[]> {
|
||||
return this.api.query<BanEntry[]>({ type: 'get-bans-for-room', payload: { roomId } });
|
||||
}
|
||||
|
||||
/** Check whether a user is currently banned from a room. */
|
||||
isUserBanned(userId: string, roomId: string): Promise<boolean> {
|
||||
return this.api.query<boolean>({ type: 'is-user-banned', payload: { userId, roomId } });
|
||||
}
|
||||
|
||||
/** Persist attachment metadata. */
|
||||
// eslint-disable-next-line
|
||||
saveAttachment(attachment: any): Promise<void> {
|
||||
return this.api.command({ type: 'save-attachment', payload: { attachment } });
|
||||
}
|
||||
|
||||
/** Return all attachment records for a message. */
|
||||
// eslint-disable-next-line
|
||||
getAttachmentsForMessage(messageId: string): Promise<any[]> {
|
||||
// eslint-disable-next-line
|
||||
return this.api.query<any[]>({ type: 'get-attachments-for-message', payload: { messageId } });
|
||||
}
|
||||
|
||||
/** Return every persisted attachment record. */
|
||||
// eslint-disable-next-line
|
||||
getAllAttachments(): Promise<any[]> {
|
||||
// eslint-disable-next-line
|
||||
return this.api.query<any[]>({ type: 'get-all-attachments', payload: {} });
|
||||
}
|
||||
|
||||
/** Delete all attachment records for a message. */
|
||||
deleteAttachmentsForMessage(messageId: string): Promise<void> {
|
||||
return this.api.command({ type: 'delete-attachments-for-message', payload: { messageId } });
|
||||
}
|
||||
|
||||
/** Wipe every table, removing all persisted data. */
|
||||
clearAllData(): Promise<void> {
|
||||
return this.api.command({ type: 'clear-all-data', payload: {} });
|
||||
}
|
||||
}
|
||||
1
toju-app/src/app/infrastructure/persistence/index.ts
Normal file
1
toju-app/src/app/infrastructure/persistence/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from './database.service';
|
||||
322
toju-app/src/app/infrastructure/realtime/README.md
Normal file
322
toju-app/src/app/infrastructure/realtime/README.md
Normal file
@@ -0,0 +1,322 @@
|
||||
# Realtime Infrastructure
|
||||
|
||||
Low-level WebRTC and WebSocket plumbing that the rest of the app sits on top of. Nothing in here knows about Angular components, NgRx, or domain logic. It exposes observables, signals, and callbacks that higher layers (facades, effects, components) consume.
|
||||
|
||||
## Module map
|
||||
|
||||
```
|
||||
realtime/
|
||||
├── realtime-session.service.ts Composition root (WebRTCService)
|
||||
├── realtime.types.ts PeerData, credentials, tracker types
|
||||
├── realtime.constants.ts ICE servers, signal types, bitrates, intervals
|
||||
│
|
||||
├── signaling/ WebSocket layer
|
||||
│ ├── signaling.manager.ts One WebSocket per signaling URL
|
||||
│ ├── signaling-transport-handler.ts Routes messages to the right socket
|
||||
│ ├── server-signaling-coordinator.ts Maps peers/servers to signaling URLs
|
||||
│ ├── signaling-message-handler.ts Dispatches incoming signaling messages
|
||||
│ └── server-membership-signaling-handler.ts Join / leave / switch protocol
|
||||
│
|
||||
├── peer-connection-manager/ WebRTC peer connections
|
||||
│ ├── peer-connection.manager.ts Owns all RTCPeerConnection instances
|
||||
│ ├── shared.ts PeerData type + state factory
|
||||
│ ├── connection/
|
||||
│ │ ├── create-peer-connection.ts RTCPeerConnection factory (ICE, transceivers)
|
||||
│ │ └── negotiation.ts Offer/answer/ICE with collision handling
|
||||
│ ├── messaging/
|
||||
│ │ ├── data-channel.ts Ordered data channel for chat + control
|
||||
│ │ └── ping.ts Latency measurement (PING/PONG every 5s)
|
||||
│ ├── recovery/
|
||||
│ │ └── peer-recovery.ts Disconnect grace period + reconnect loop
|
||||
│ └── streams/
|
||||
│ └── remote-streams.ts Classifies incoming tracks (voice vs screen)
|
||||
│
|
||||
├── media/ Local capture and processing
|
||||
│ ├── media.manager.ts getUserMedia, mute, deafen, gain pipeline
|
||||
│ ├── noise-reduction.manager.ts RNNoise AudioWorklet graph
|
||||
│ ├── voice-session-controller.ts Higher-level wrapper over MediaManager
|
||||
│ ├── screen-share.manager.ts Screen capture + per-peer track distribution
|
||||
│ └── screen-share-platforms/
|
||||
│ ├── shared.ts Electron desktopCapturer types
|
||||
│ ├── browser-screen-share.capture.ts Standard getDisplayMedia
|
||||
│ ├── desktop-electron-screen-share.capture.ts Electron source picker (Windows)
|
||||
│ └── linux-electron-screen-share.capture.ts PulseAudio/PipeWire routing (Linux)
|
||||
│
|
||||
├── streams/ Stream facades
|
||||
│ ├── peer-media-facade.ts Unified API over peers, media, screen share
|
||||
│ └── remote-screen-share-request-controller.ts On-demand screen share delivery
|
||||
│
|
||||
├── state/
|
||||
│ └── webrtc-state-controller.ts Angular Signals for all connection state
|
||||
│
|
||||
└── logging/
|
||||
├── webrtc-logger.ts Conditional [WebRTC] prefixed logging
|
||||
└── debug-network-metrics.ts Per-peer stats (drops, latency, throughput)
|
||||
```
|
||||
|
||||
## How it all fits together
|
||||
|
||||
`WebRTCService` is the composition root. It instantiates every other manager, then wires their callbacks together after construction (to avoid circular references). No manager imports another manager directly.
|
||||
|
||||
```mermaid
|
||||
graph TD
|
||||
WS[WebRTCService<br/>composition root]
|
||||
|
||||
WS --> SC[SignalingTransportHandler]
|
||||
WS --> PCM[PeerConnectionManager]
|
||||
WS --> MM[MediaManager]
|
||||
WS --> SSM[ScreenShareManager]
|
||||
WS --> State[WebRtcStateController<br/>Angular Signals]
|
||||
WS --> VSC[VoiceSessionController]
|
||||
WS --> PMF[PeerMediaFacade]
|
||||
WS --> RSSRC[RemoteScreenShareRequestController]
|
||||
|
||||
SC --> SM1[SignalingManager<br/>socket A]
|
||||
SC --> SM2[SignalingManager<br/>socket B]
|
||||
SC --> Coord[ServerSignalingCoordinator]
|
||||
|
||||
PCM --> Conn[create-peer-connection]
|
||||
PCM --> Neg[negotiation]
|
||||
PCM --> DC[data-channel]
|
||||
PCM --> Ping[ping]
|
||||
PCM --> Rec[peer-recovery]
|
||||
PCM --> RS[remote-streams]
|
||||
|
||||
MM --> NR[NoiseReductionManager<br/>RNNoise worklet]
|
||||
SSM --> BrowserCap[Browser capture]
|
||||
SSM --> ElectronCap[Electron capture]
|
||||
SSM --> LinuxCap[Linux audio routing]
|
||||
|
||||
click WS "realtime-session.service.ts" "WebRTCService - composition root" _blank
|
||||
click SC "signaling/signaling-transport-handler.ts" "Routes messages to the right WebSocket" _blank
|
||||
click PCM "peer-connection-manager/peer-connection.manager.ts" "Owns all RTCPeerConnection instances" _blank
|
||||
click MM "media/media.manager.ts" "getUserMedia, mute, deafen, gain pipeline" _blank
|
||||
click SSM "media/screen-share.manager.ts" "Screen capture and per-peer distribution" _blank
|
||||
click State "state/webrtc-state-controller.ts" "Angular Signals for connection state" _blank
|
||||
click VSC "media/voice-session-controller.ts" "Higher-level voice session wrapper" _blank
|
||||
click PMF "streams/peer-media-facade.ts" "Unified API over peers, media, screen share" _blank
|
||||
click RSSRC "streams/remote-screen-share-request-controller.ts" "On-demand screen share delivery" _blank
|
||||
click SM1 "signaling/signaling.manager.ts" "One WebSocket per signaling URL" _blank
|
||||
click SM2 "signaling/signaling.manager.ts" "One WebSocket per signaling URL" _blank
|
||||
click Coord "signaling/server-signaling-coordinator.ts" "Maps peers/servers to signaling URLs" _blank
|
||||
click Conn "peer-connection-manager/connection/create-peer-connection.ts" "RTCPeerConnection factory" _blank
|
||||
click Neg "peer-connection-manager/connection/negotiation.ts" "Offer/answer/ICE with collision handling" _blank
|
||||
click DC "peer-connection-manager/messaging/data-channel.ts" "Ordered data channel for chat + control" _blank
|
||||
click Ping "peer-connection-manager/messaging/ping.ts" "Latency measurement via PING/PONG" _blank
|
||||
click Rec "peer-connection-manager/recovery/peer-recovery.ts" "Disconnect grace period + reconnect loop" _blank
|
||||
click RS "peer-connection-manager/streams/remote-streams.ts" "Classifies incoming tracks" _blank
|
||||
click NR "media/noise-reduction.manager.ts" "RNNoise AudioWorklet graph" _blank
|
||||
click BrowserCap "media/screen-share-platforms/browser-screen-share.capture.ts" "Standard getDisplayMedia" _blank
|
||||
click ElectronCap "media/screen-share-platforms/desktop-electron-screen-share.capture.ts" "Electron source picker" _blank
|
||||
click LinuxCap "media/screen-share-platforms/linux-electron-screen-share.capture.ts" "PulseAudio/PipeWire routing" _blank
|
||||
```
|
||||
|
||||
## Signaling (WebSocket)
|
||||
|
||||
The signaling layer's only job is getting two peers to exchange SDP offers/answers and ICE candidates so they can establish a direct WebRTC connection. Once the peer connection is up, signaling is only used for presence (user joined/left) and reconnection.
|
||||
|
||||
Each signaling URL gets its own `SignalingManager` (one WebSocket each). `SignalingTransportHandler` picks the right socket based on which server the message is for. `ServerSignalingCoordinator` tracks which peers belong to which servers and which signaling URLs, so we know when it is safe to tear down a peer connection after leaving a server.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant UI as App
|
||||
participant STH as SignalingTransportHandler
|
||||
participant SM as SignalingManager
|
||||
participant WS as WebSocket
|
||||
participant Srv as Signaling Server
|
||||
|
||||
UI->>STH: identify(credentials)
|
||||
STH->>SM: send(identify message)
|
||||
SM->>WS: ws.send(JSON)
|
||||
WS->>Srv: identify
|
||||
|
||||
UI->>STH: joinServer(serverId)
|
||||
STH->>SM: send(join_server)
|
||||
SM->>WS: ws.send(JSON)
|
||||
|
||||
Srv-->>WS: server_users [peerA, peerB]
|
||||
WS-->>SM: onmessage
|
||||
SM-->>STH: messageReceived$
|
||||
STH-->>UI: routes to SignalingMessageHandler
|
||||
```
|
||||
|
||||
### Reconnection
|
||||
|
||||
When the WebSocket drops, `SignalingManager` schedules reconnection with exponential backoff (1s, 2s, 4s, ... up to 30s). On reconnect it replays the cached `identify` and `join_server` messages so presence is restored without the UI doing anything.
|
||||
|
||||
## Peer connection lifecycle
|
||||
|
||||
Peers connect to each other directly with `RTCPeerConnection`. The "initiator" (whoever was already in the room) creates the data channel and audio/video transceivers, then sends an offer. The other side creates an answer.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant A as Peer A (initiator)
|
||||
participant Sig as Signaling Server
|
||||
participant B as Peer B
|
||||
|
||||
Note over A: createPeerConnection(B, initiator=true)
|
||||
Note over A: Creates data channel + transceivers
|
||||
|
||||
A->>Sig: offer (SDP)
|
||||
Sig->>B: offer (SDP)
|
||||
|
||||
Note over B: createPeerConnection(A, initiator=false)
|
||||
Note over B: setRemoteDescription(offer)
|
||||
Note over B: Attach local audio tracks
|
||||
B->>Sig: answer (SDP)
|
||||
Sig->>A: answer (SDP)
|
||||
Note over A: setRemoteDescription(answer)
|
||||
|
||||
A->>Sig: ICE candidates
|
||||
Sig->>B: ICE candidates
|
||||
B->>Sig: ICE candidates
|
||||
Sig->>A: ICE candidates
|
||||
|
||||
Note over A,B: RTCPeerConnection state -> "connected"
|
||||
Note over A,B: Data channel opens, voice flows
|
||||
```
|
||||
|
||||
### Offer collision
|
||||
|
||||
Both peers might send offers at the same time ("glare"). The negotiation module implements the "polite peer" pattern: one side is designated polite (the non-initiator) and will roll back its local offer if it detects a collision, then accept the remote offer instead. The impolite side ignores the incoming offer.
|
||||
|
||||
### Disconnect recovery
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> Connected
|
||||
Connected --> Disconnected: connectionState = "disconnected"
|
||||
Disconnected --> Connected: recovers within 10s
|
||||
Disconnected --> Failed: grace period expires
|
||||
Failed --> Reconnecting: schedule reconnect (every 5s)
|
||||
Reconnecting --> Connected: new offer accepted
|
||||
Reconnecting --> GaveUp: 12 attempts failed
|
||||
Connected --> Closed: leave / cleanup
|
||||
GaveUp --> [*]
|
||||
Closed --> [*]
|
||||
```
|
||||
|
||||
When a peer connection enters `disconnected`, a 10-second grace period starts. If it recovers on its own (network blip), nothing happens. If it reaches `failed`, the connection is torn down and a reconnect loop starts: a fresh `RTCPeerConnection` is created and a new offer is sent every 5 seconds, up to 12 attempts.
|
||||
|
||||
## Data channel
|
||||
|
||||
A single ordered data channel carries all peer-to-peer messages: chat events, voice/screen state broadcasts, state requests, pings, and screen share control.
|
||||
|
||||
Back-pressure is handled with a high-water mark (4 MB) and low-water mark (1 MB). `sendToPeerBuffered()` waits for the buffer to drain before sending, which matters during file transfers.
|
||||
|
||||
Every 5 seconds a PING message is sent to each peer. The peer responds with PONG carrying the original timestamp, and the round-trip latency is stored in a signal.
|
||||
|
||||
## Media pipeline
|
||||
|
||||
### Voice
|
||||
|
||||
```mermaid
|
||||
graph LR
|
||||
Mic[getUserMedia] --> Raw[Raw mic stream]
|
||||
Raw --> RNN{RNNoise<br/>enabled?}
|
||||
RNN -- yes --> Worklet[AudioWorklet<br/>NoiseSuppressor]
|
||||
RNN -- no --> Gain
|
||||
Worklet --> Gain{Input gain<br/>adjusted?}
|
||||
Gain -- yes --> GainNode[GainNode pipeline]
|
||||
Gain -- no --> Out[Local media stream]
|
||||
GainNode --> Out
|
||||
Out --> Peers[replaceTrack on<br/>all peer audio senders]
|
||||
|
||||
click Mic "media/media.manager.ts" "MediaManager.enableVoice()" _blank
|
||||
click Worklet "media/noise-reduction.manager.ts" "NoiseReductionManager.enable()" _blank
|
||||
click GainNode "media/media.manager.ts" "MediaManager.applyInputGainToCurrentStream()" _blank
|
||||
click Out "media/media.manager.ts" "MediaManager.localMediaStream" _blank
|
||||
click Peers "media/media.manager.ts" "MediaManager.bindLocalTracksToAllPeers()" _blank
|
||||
```
|
||||
|
||||
`MediaManager` grabs the mic with `getUserMedia`, optionally pipes it through the RNNoise AudioWorklet for noise reduction (48 kHz, loaded from `rnnoise-worklet.js`), optionally runs it through a `GainNode` for input volume control, and then pushes the resulting stream to every connected peer via `replaceTrack`.
|
||||
|
||||
Mute just disables the audio track (`track.enabled = false`), the connection stays up. Deafen suppresses incoming audio playback on the local side.
|
||||
|
||||
### Screen share
|
||||
|
||||
Screen capture uses a platform-specific strategy:
|
||||
|
||||
| Platform | Capture method |
|
||||
|---|---|
|
||||
| Browser | `getDisplayMedia` with quality presets |
|
||||
| Windows (Electron) | Electron `desktopCapturer.getSources()` with a source picker UI |
|
||||
| Linux (Electron) | `getDisplayMedia` for video + PulseAudio/PipeWire routing for system audio, keeping voice playback out of the capture |
|
||||
|
||||
Screen share tracks are distributed on-demand. A peer sends a `SCREEN_SHARE_REQUEST` message over the data channel, and only then does the sharer attach screen tracks to that peer's connection and renegotiate.
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant V as Viewer
|
||||
participant S as Sharer
|
||||
|
||||
V->>S: SCREEN_SHARE_REQUEST (data channel)
|
||||
Note over S: Add viewer to requestedViewerPeerIds
|
||||
Note over S: Attach screen video + audio senders
|
||||
S->>V: renegotiate (new offer with screen tracks)
|
||||
V->>S: answer
|
||||
Note over V: ontrack fires with screen video
|
||||
Note over V: Classified as screen share stream
|
||||
Note over V: UI renders video
|
||||
|
||||
V->>S: SCREEN_SHARE_STOP (data channel)
|
||||
Note over S: Remove screen senders
|
||||
S->>V: renegotiate (offer without screen tracks)
|
||||
```
|
||||
|
||||
## State
|
||||
|
||||
`WebRtcStateController` holds all connection state as Angular Signals: `isConnected`, `isMuted`, `isDeafened`, `isScreenSharing`, `connectedPeers`, `peerLatencies`, etc. Managers call update methods on the controller after state changes. Components and facades read these signals reactively.
|
||||
|
||||
## Logging
|
||||
|
||||
`WebRTCLogger` wraps `console.*` with a `[WebRTC]` prefix and a debug flag so logging can be toggled at runtime. `DebugNetworkMetrics` tracks per-peer stats (connection drops, handshake counts, message counts, download rates) for the debug console UI.
|
||||
|
||||
## ICE and STUN
|
||||
|
||||
WebRTC connections require a way for two peers to discover how to reach each other across different networks (NATs, firewalls, etc.). This is handled by ICE, with help from STUN.
|
||||
|
||||
### ICE (Interactive Connectivity Establishment)
|
||||
|
||||
ICE is the mechanism WebRTC uses to establish a connection between peers. Instead of relying on a single network path, it:
|
||||
|
||||
- Gathers multiple possible connection candidates (IP address + port pairs)
|
||||
- Exchanges those candidates via the signaling layer
|
||||
- Attempts connectivity checks between all candidate pairs
|
||||
- Selects the first working path
|
||||
|
||||
Typical candidate types include:
|
||||
|
||||
- **Host candidates** - local network interfaces (e.g. LAN IPs)
|
||||
- **Server reflexive candidates** - public-facing address discovered via STUN
|
||||
- **Relay candidates** - provided by TURN servers (fallback)
|
||||
|
||||
ICE runs automatically as part of `RTCPeerConnection`. As candidates are discovered, they are emitted via `onicecandidate` and must be forwarded to the remote peer through signaling.
|
||||
|
||||
Connection state transitions (e.g. `checking` → `connected` → `failed`) reflect ICE progress.
|
||||
|
||||
### STUN (Session Traversal Utilities for NAT)
|
||||
|
||||
STUN is used to determine a peer's public-facing IP address and port when behind a NAT.
|
||||
|
||||
A STUN server responds with the external address it observes for a request. This allows a peer to generate a **server reflexive candidate**, which can be used by other peers to attempt a direct connection.
|
||||
|
||||
Without STUN, only local (host) candidates would be available, which typically do not work across different networks.
|
||||
|
||||
### TURN
|
||||
|
||||
TURN (Traversal Using Relays around NAT) is a fallback mechanism used in some WebRTC systems when direct peer-to-peer connectivity cannot be established.
|
||||
|
||||
Instead of connecting peers directly:
|
||||
|
||||
- Each peer establishes a connection to a TURN server
|
||||
- The TURN server relays all media and data between peers
|
||||
|
||||
This approach is more reliable in restrictive network environments but introduces additional latency and bandwidth overhead, since all traffic flows through the relay instead of directly between peers.
|
||||
|
||||
Toju/Zoracord does not use TURN and does not have code written to support it.
|
||||
|
||||
### Summary
|
||||
|
||||
- **ICE** coordinates connection establishment by trying multiple network paths
|
||||
- **STUN** provides public-facing address discovery for NAT traversal
|
||||
18
toju-app/src/app/infrastructure/realtime/index.ts
Normal file
18
toju-app/src/app/infrastructure/realtime/index.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
export { WebRTCService } from './realtime-session.service';
|
||||
export * from './realtime.constants';
|
||||
export * from './realtime.types';
|
||||
export * from './screen-share.config';
|
||||
export * from './logging/webrtc-logger';
|
||||
export * from './media/media.manager';
|
||||
export * from './media/noise-reduction.manager';
|
||||
export * from './media/screen-share.manager';
|
||||
export * from './media/voice-session-controller';
|
||||
export * from './signaling/server-signaling-coordinator';
|
||||
export * from './signaling/signaling-message-handler';
|
||||
export * from './signaling/server-membership-signaling-handler';
|
||||
export * from './signaling/signaling.manager';
|
||||
export * from './signaling/signaling-transport-handler';
|
||||
export * from './streams/peer-media-facade';
|
||||
export * from './streams/remote-screen-share-request-controller';
|
||||
export * from './state/webrtc-state-controller';
|
||||
export * from './peer-connection.manager';
|
||||
@@ -0,0 +1,386 @@
|
||||
type DebugNetworkMetricDirection = 'inbound' | 'outbound';
|
||||
type DebugNetworkHandshakeType = 'answer' | 'ice_candidate' | 'offer';
|
||||
|
||||
const FILE_RATE_WINDOW_MS = 6_000;
|
||||
|
||||
export interface DebugNetworkMetricHandshakeCounts {
|
||||
answersReceived: number;
|
||||
answersSent: number;
|
||||
iceReceived: number;
|
||||
iceSent: number;
|
||||
offersReceived: number;
|
||||
offersSent: number;
|
||||
}
|
||||
|
||||
export interface DebugNetworkMetricTextCounts {
|
||||
received: number;
|
||||
sent: number;
|
||||
}
|
||||
|
||||
export interface DebugNetworkMetricStreamCounts {
|
||||
audio: number;
|
||||
video: number;
|
||||
}
|
||||
|
||||
export interface DebugNetworkMetricDownloadRates {
|
||||
audioMbps: number | null;
|
||||
fileMbps: number | null;
|
||||
updatedAt: number | null;
|
||||
videoMbps: number | null;
|
||||
}
|
||||
|
||||
export interface DebugNetworkMetricSnapshot {
|
||||
connectionDrops: number;
|
||||
downloads: DebugNetworkMetricDownloadRates;
|
||||
handshake: DebugNetworkMetricHandshakeCounts;
|
||||
lastConnectionState: string | null;
|
||||
pingMs: number | null;
|
||||
streams: DebugNetworkMetricStreamCounts;
|
||||
textMessages: DebugNetworkMetricTextCounts;
|
||||
}
|
||||
|
||||
interface DebugNetworkFileSample {
|
||||
bytes: number;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
interface InternalDebugNetworkMetricState extends DebugNetworkMetricSnapshot {
|
||||
fileSamples: DebugNetworkFileSample[];
|
||||
}
|
||||
|
||||
function createHandshakeCounts(): DebugNetworkMetricHandshakeCounts {
|
||||
return {
|
||||
answersReceived: 0,
|
||||
answersSent: 0,
|
||||
iceReceived: 0,
|
||||
iceSent: 0,
|
||||
offersReceived: 0,
|
||||
offersSent: 0
|
||||
};
|
||||
}
|
||||
|
||||
function createTextCounts(): DebugNetworkMetricTextCounts {
|
||||
return {
|
||||
received: 0,
|
||||
sent: 0
|
||||
};
|
||||
}
|
||||
|
||||
function createStreamCounts(): DebugNetworkMetricStreamCounts {
|
||||
return {
|
||||
audio: 0,
|
||||
video: 0
|
||||
};
|
||||
}
|
||||
|
||||
function createDownloadRates(): DebugNetworkMetricDownloadRates {
|
||||
return {
|
||||
audioMbps: null,
|
||||
fileMbps: null,
|
||||
updatedAt: null,
|
||||
videoMbps: null
|
||||
};
|
||||
}
|
||||
|
||||
function getRecord(value: unknown): Record<string, unknown> | null {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value))
|
||||
return null;
|
||||
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function getString(record: Record<string, unknown>, key: string): string | null {
|
||||
const value = record[key];
|
||||
|
||||
return typeof value === 'string' ? value : null;
|
||||
}
|
||||
|
||||
function isTrackedTextMessageType(type: string | null): boolean {
|
||||
return type === 'chat-message' || type === 'message';
|
||||
}
|
||||
|
||||
class DebugNetworkMetricsStore {
|
||||
private readonly metrics = new Map<string, InternalDebugNetworkMetricState>();
|
||||
|
||||
recordConnectionState(peerId: string, state: string): void {
|
||||
if (!peerId || !state)
|
||||
return;
|
||||
|
||||
const metric = this.ensure(peerId);
|
||||
|
||||
if ((state === 'disconnected' || state === 'failed') && metric.lastConnectionState !== state)
|
||||
metric.connectionDrops += 1;
|
||||
|
||||
metric.lastConnectionState = state;
|
||||
|
||||
if (state === 'closed' || state === 'failed' || state === 'disconnected') {
|
||||
metric.streams.audio = 0;
|
||||
metric.streams.video = 0;
|
||||
}
|
||||
}
|
||||
|
||||
recordPing(peerId: string, pingMs: number): void {
|
||||
if (!peerId || !Number.isFinite(pingMs))
|
||||
return;
|
||||
|
||||
this.ensure(peerId).pingMs = Math.max(0, Math.round(pingMs));
|
||||
}
|
||||
|
||||
recordDataChannelPayload(
|
||||
peerId: string,
|
||||
payload: Record<string, unknown>,
|
||||
direction: DebugNetworkMetricDirection
|
||||
): void {
|
||||
if (!peerId)
|
||||
return;
|
||||
|
||||
const type = getString(payload, 'type');
|
||||
|
||||
if (isTrackedTextMessageType(type))
|
||||
this.incrementText(peerId, direction);
|
||||
}
|
||||
|
||||
recordSignalingPayload(payload: unknown, direction: DebugNetworkMetricDirection): void {
|
||||
const record = getRecord(payload);
|
||||
|
||||
if (!record)
|
||||
return;
|
||||
|
||||
const type = getString(record, 'type');
|
||||
|
||||
if (type !== 'offer' && type !== 'answer' && type !== 'ice_candidate')
|
||||
return;
|
||||
|
||||
const peerId = direction === 'outbound'
|
||||
? getString(record, 'targetUserId')
|
||||
: getString(record, 'fromUserId');
|
||||
|
||||
if (!peerId)
|
||||
return;
|
||||
|
||||
this.incrementHandshake(peerId, type, direction);
|
||||
}
|
||||
|
||||
recordStreams(peerId: string, streams: Partial<DebugNetworkMetricStreamCounts>): void {
|
||||
if (!peerId)
|
||||
return;
|
||||
|
||||
const metric = this.ensure(peerId);
|
||||
|
||||
if (typeof streams.audio === 'number' && Number.isFinite(streams.audio))
|
||||
metric.streams.audio = Math.max(0, Math.round(streams.audio));
|
||||
|
||||
if (typeof streams.video === 'number' && Number.isFinite(streams.video))
|
||||
metric.streams.video = Math.max(0, Math.round(streams.video));
|
||||
}
|
||||
|
||||
recordDownloadRates(
|
||||
peerId: string,
|
||||
rates: { audioMbps?: number | null; videoMbps?: number | null },
|
||||
timestamp: number = Date.now()
|
||||
): void {
|
||||
if (!peerId)
|
||||
return;
|
||||
|
||||
const metric = this.ensure(peerId);
|
||||
|
||||
if (rates.audioMbps !== undefined)
|
||||
metric.downloads.audioMbps = this.sanitizeRate(rates.audioMbps);
|
||||
|
||||
if (rates.videoMbps !== undefined)
|
||||
metric.downloads.videoMbps = this.sanitizeRate(rates.videoMbps);
|
||||
|
||||
metric.downloads.updatedAt = timestamp;
|
||||
}
|
||||
|
||||
recordFileChunk(peerId: string, bytes: number, timestamp: number = Date.now()): void {
|
||||
if (!peerId || !Number.isFinite(bytes) || bytes <= 0)
|
||||
return;
|
||||
|
||||
const metric = this.ensure(peerId);
|
||||
|
||||
metric.fileSamples.push({
|
||||
bytes,
|
||||
timestamp
|
||||
});
|
||||
|
||||
metric.fileSamples = metric.fileSamples.filter(
|
||||
(sample) => timestamp - sample.timestamp <= FILE_RATE_WINDOW_MS
|
||||
);
|
||||
|
||||
metric.downloads.fileMbps = this.calculateFileMbps(metric.fileSamples, timestamp);
|
||||
}
|
||||
|
||||
getSnapshot(peerId: string): DebugNetworkMetricSnapshot | null {
|
||||
const metric = this.metrics.get(peerId);
|
||||
|
||||
if (!metric)
|
||||
return null;
|
||||
|
||||
const now = Date.now();
|
||||
|
||||
metric.fileSamples = metric.fileSamples.filter(
|
||||
(sample) => now - sample.timestamp <= FILE_RATE_WINDOW_MS
|
||||
);
|
||||
|
||||
metric.downloads.fileMbps = this.calculateFileMbps(metric.fileSamples, now);
|
||||
|
||||
return {
|
||||
connectionDrops: metric.connectionDrops,
|
||||
downloads: { ...metric.downloads },
|
||||
handshake: { ...metric.handshake },
|
||||
lastConnectionState: metric.lastConnectionState,
|
||||
pingMs: metric.pingMs,
|
||||
streams: { ...metric.streams },
|
||||
textMessages: { ...metric.textMessages }
|
||||
};
|
||||
}
|
||||
|
||||
private ensure(peerId: string): InternalDebugNetworkMetricState {
|
||||
const existing = this.metrics.get(peerId);
|
||||
|
||||
if (existing)
|
||||
return existing;
|
||||
|
||||
const created: InternalDebugNetworkMetricState = {
|
||||
connectionDrops: 0,
|
||||
downloads: createDownloadRates(),
|
||||
fileSamples: [],
|
||||
handshake: createHandshakeCounts(),
|
||||
lastConnectionState: null,
|
||||
pingMs: null,
|
||||
streams: createStreamCounts(),
|
||||
textMessages: createTextCounts()
|
||||
};
|
||||
|
||||
this.metrics.set(peerId, created);
|
||||
|
||||
return created;
|
||||
}
|
||||
|
||||
private incrementHandshake(
|
||||
peerId: string,
|
||||
type: DebugNetworkHandshakeType,
|
||||
direction: DebugNetworkMetricDirection,
|
||||
count = 1
|
||||
): void {
|
||||
if (!peerId || count <= 0)
|
||||
return;
|
||||
|
||||
const metric = this.ensure(peerId);
|
||||
|
||||
switch (type) {
|
||||
case 'offer':
|
||||
if (direction === 'outbound')
|
||||
metric.handshake.offersSent += count;
|
||||
else
|
||||
metric.handshake.offersReceived += count;
|
||||
|
||||
return;
|
||||
|
||||
case 'answer':
|
||||
if (direction === 'outbound')
|
||||
metric.handshake.answersSent += count;
|
||||
else
|
||||
metric.handshake.answersReceived += count;
|
||||
|
||||
return;
|
||||
|
||||
case 'ice_candidate':
|
||||
if (direction === 'outbound')
|
||||
metric.handshake.iceSent += count;
|
||||
else
|
||||
metric.handshake.iceReceived += count;
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private incrementText(
|
||||
peerId: string,
|
||||
direction: DebugNetworkMetricDirection,
|
||||
count = 1
|
||||
): void {
|
||||
if (!peerId || count <= 0)
|
||||
return;
|
||||
|
||||
const metric = this.ensure(peerId);
|
||||
|
||||
if (direction === 'outbound') {
|
||||
metric.textMessages.sent += count;
|
||||
return;
|
||||
}
|
||||
|
||||
metric.textMessages.received += count;
|
||||
}
|
||||
|
||||
private calculateFileMbps(samples: DebugNetworkFileSample[], now: number): number | null {
|
||||
if (samples.length === 0)
|
||||
return null;
|
||||
|
||||
const totalBytes = samples.reduce((sum, sample) => sum + sample.bytes, 0);
|
||||
const earliestTimestamp = samples[0]?.timestamp ?? now;
|
||||
const durationMs = Math.max(1_000, now - earliestTimestamp);
|
||||
|
||||
return this.sanitizeRate(totalBytes * 8 / durationMs / 1000);
|
||||
}
|
||||
|
||||
private sanitizeRate(value: number | null | undefined): number | null {
|
||||
if (typeof value !== 'number' || !Number.isFinite(value) || value < 0)
|
||||
return null;
|
||||
|
||||
return Math.round(value * 1000) / 1000;
|
||||
}
|
||||
}
|
||||
|
||||
const debugNetworkMetricsStore = new DebugNetworkMetricsStore();
|
||||
|
||||
export function recordDebugNetworkConnectionState(peerId: string, state: string): void {
|
||||
debugNetworkMetricsStore.recordConnectionState(peerId, state);
|
||||
}
|
||||
|
||||
export function recordDebugNetworkPing(peerId: string, pingMs: number): void {
|
||||
debugNetworkMetricsStore.recordPing(peerId, pingMs);
|
||||
}
|
||||
|
||||
export function recordDebugNetworkDataChannelPayload(
|
||||
peerId: string,
|
||||
payload: Record<string, unknown>,
|
||||
direction: DebugNetworkMetricDirection
|
||||
): void {
|
||||
debugNetworkMetricsStore.recordDataChannelPayload(peerId, payload, direction);
|
||||
}
|
||||
|
||||
export function recordDebugNetworkSignalingPayload(
|
||||
payload: unknown,
|
||||
direction: DebugNetworkMetricDirection
|
||||
): void {
|
||||
debugNetworkMetricsStore.recordSignalingPayload(payload, direction);
|
||||
}
|
||||
|
||||
export function recordDebugNetworkStreams(
|
||||
peerId: string,
|
||||
streams: Partial<DebugNetworkMetricStreamCounts>
|
||||
): void {
|
||||
debugNetworkMetricsStore.recordStreams(peerId, streams);
|
||||
}
|
||||
|
||||
export function recordDebugNetworkDownloadRates(
|
||||
peerId: string,
|
||||
rates: { audioMbps?: number | null; videoMbps?: number | null },
|
||||
timestamp?: number
|
||||
): void {
|
||||
debugNetworkMetricsStore.recordDownloadRates(peerId, rates, timestamp);
|
||||
}
|
||||
|
||||
export function recordDebugNetworkFileChunk(
|
||||
peerId: string,
|
||||
bytes: number,
|
||||
timestamp?: number
|
||||
): void {
|
||||
debugNetworkMetricsStore.recordFileChunk(peerId, bytes, timestamp);
|
||||
}
|
||||
|
||||
export function getDebugNetworkMetricSnapshot(peerId: string): DebugNetworkMetricSnapshot | null {
|
||||
return debugNetworkMetricsStore.getSnapshot(peerId);
|
||||
}
|
||||
@@ -0,0 +1,129 @@
|
||||
/* eslint-disable max-statements-per-line */
|
||||
/**
|
||||
* Lightweight logging utility for the WebRTC subsystem.
|
||||
* All log lines are prefixed with `[WebRTC]`.
|
||||
*/
|
||||
export interface WebRTCTrafficDetails {
|
||||
[key: string]: unknown;
|
||||
bytes?: number;
|
||||
bufferedAmount?: number;
|
||||
channelLabel?: string;
|
||||
messageId?: string;
|
||||
payloadPreview?: unknown;
|
||||
peerId?: string;
|
||||
readyState?: number | string | null;
|
||||
roomId?: string;
|
||||
targetPeerId?: string;
|
||||
type?: string;
|
||||
url?: string | null;
|
||||
}
|
||||
|
||||
export class WebRTCLogger {
|
||||
constructor(private readonly isEnabled: boolean | (() => boolean) = true) {}
|
||||
|
||||
/** Informational log (only when debug is enabled). */
|
||||
info(prefix: string, ...args: unknown[]): void {
|
||||
if (!this.isDebugEnabled())
|
||||
return;
|
||||
|
||||
try { console.log(`[WebRTC] ${prefix}`, ...args); } catch { /* swallow */ }
|
||||
}
|
||||
|
||||
/** Warning log (only when debug is enabled). */
|
||||
warn(prefix: string, ...args: unknown[]): void {
|
||||
if (!this.isDebugEnabled())
|
||||
return;
|
||||
|
||||
try { console.warn(`[WebRTC] ${prefix}`, ...args); } catch { /* swallow */ }
|
||||
}
|
||||
|
||||
/** Structured network-traffic log for signaling and data-channel activity. */
|
||||
traffic(scope: 'data-channel' | 'signaling', direction: 'inbound' | 'outbound', details: WebRTCTrafficDetails): void {
|
||||
this.info(`[${scope}] ${direction}`, details);
|
||||
}
|
||||
|
||||
/** Error log (always emitted regardless of debug flag). */
|
||||
error(prefix: string, err: unknown, extra?: Record<string, unknown>): void {
|
||||
const errorDetails = this.extractErrorDetails(err);
|
||||
const payload = {
|
||||
name: errorDetails.name,
|
||||
message: errorDetails.message,
|
||||
stack: errorDetails.stack,
|
||||
...extra
|
||||
};
|
||||
|
||||
try { console.error(`[WebRTC] ${prefix}`, payload); } catch { /* swallow */ }
|
||||
}
|
||||
|
||||
/** Attach lifecycle event listeners to a track for debugging. */
|
||||
attachTrackDiagnostics(track: MediaStreamTrack, label: string): void {
|
||||
const settings = typeof track.getSettings === 'function' ? track.getSettings() : {} as MediaTrackSettings;
|
||||
|
||||
this.info(`Track attached: ${label}`, {
|
||||
id: track.id,
|
||||
kind: track.kind,
|
||||
readyState: track.readyState,
|
||||
contentHint: track.contentHint,
|
||||
settings
|
||||
});
|
||||
|
||||
track.addEventListener('ended', () => this.warn(`Track ended: ${label}`, { id: track.id,
|
||||
kind: track.kind }));
|
||||
|
||||
track.addEventListener('mute', () => this.warn(`Track muted: ${label}`, { id: track.id,
|
||||
kind: track.kind }));
|
||||
|
||||
track.addEventListener('unmute', () => this.info(`Track unmuted: ${label}`, { id: track.id,
|
||||
kind: track.kind }));
|
||||
}
|
||||
|
||||
/** Log a MediaStream summary and attach diagnostics to every track. */
|
||||
logStream(label: string, stream: MediaStream | null): void {
|
||||
if (!stream) {
|
||||
this.warn(`Stream missing: ${label}`);
|
||||
return;
|
||||
}
|
||||
|
||||
const audioTracks = stream.getAudioTracks();
|
||||
const videoTracks = stream.getVideoTracks();
|
||||
|
||||
this.info(`Stream ready: ${label}`, {
|
||||
id: stream.id,
|
||||
audioTrackCount: audioTracks.length,
|
||||
videoTrackCount: videoTracks.length,
|
||||
allTrackIds: stream.getTracks().map(streamTrack => ({ id: streamTrack.id,
|
||||
kind: streamTrack.kind }))
|
||||
});
|
||||
|
||||
audioTracks.forEach((audioTrack, index) => this.attachTrackDiagnostics(audioTrack, `${label}:audio#${index}`));
|
||||
videoTracks.forEach((videoTrack, index) => this.attachTrackDiagnostics(videoTrack, `${label}:video#${index}`));
|
||||
}
|
||||
|
||||
private isDebugEnabled(): boolean {
|
||||
return typeof this.isEnabled === 'function'
|
||||
? this.isEnabled()
|
||||
: this.isEnabled;
|
||||
}
|
||||
|
||||
private extractErrorDetails(err: unknown): {
|
||||
name?: unknown;
|
||||
message?: unknown;
|
||||
stack?: unknown;
|
||||
} {
|
||||
if (typeof err !== 'object' || err === null) {
|
||||
return {};
|
||||
}
|
||||
|
||||
const candidate = err as {
|
||||
name?: unknown;
|
||||
message?: unknown;
|
||||
stack?: unknown;
|
||||
};
|
||||
|
||||
return {
|
||||
name: candidate.name,
|
||||
message: candidate.message,
|
||||
stack: candidate.stack
|
||||
};
|
||||
}
|
||||
}
|
||||
697
toju-app/src/app/infrastructure/realtime/media/media.manager.ts
Normal file
697
toju-app/src/app/infrastructure/realtime/media/media.manager.ts
Normal file
@@ -0,0 +1,697 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering, @typescript-eslint/no-unused-vars,, id-length */
|
||||
/**
|
||||
* Manages local voice media: getUserMedia, mute, deafen,
|
||||
* attaching/detaching audio tracks to peer connections, bitrate tuning,
|
||||
* and optional RNNoise-based noise reduction.
|
||||
*/
|
||||
import { Subject } from 'rxjs';
|
||||
import { ChatEvent } from '../../../shared-kernel';
|
||||
import { LatencyProfile } from '../realtime.constants';
|
||||
import { PeerData } from '../realtime.types';
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
import { NoiseReductionManager } from './noise-reduction.manager';
|
||||
import {
|
||||
TRACK_KIND_AUDIO,
|
||||
TRACK_KIND_VIDEO,
|
||||
TRANSCEIVER_SEND_RECV,
|
||||
TRANSCEIVER_RECV_ONLY,
|
||||
TRANSCEIVER_INACTIVE,
|
||||
AUDIO_BITRATE_MIN_BPS,
|
||||
AUDIO_BITRATE_MAX_BPS,
|
||||
KBPS_TO_BPS,
|
||||
LATENCY_PROFILE_BITRATES,
|
||||
VOLUME_MIN,
|
||||
VOLUME_MAX,
|
||||
VOICE_HEARTBEAT_INTERVAL_MS,
|
||||
DEFAULT_DISPLAY_NAME,
|
||||
P2P_TYPE_VOICE_STATE
|
||||
} from '../realtime.constants';
|
||||
|
||||
/**
|
||||
* Callbacks the MediaManager needs from the owning service / peer manager.
|
||||
*/
|
||||
export interface MediaManagerCallbacks {
|
||||
/** All active peer connections (for attaching tracks). */
|
||||
getActivePeers(): Map<string, PeerData>;
|
||||
/** Trigger SDP renegotiation for a specific peer. */
|
||||
renegotiate(peerId: string): Promise<void>;
|
||||
/** Broadcast a message to all peers. */
|
||||
broadcastMessage(event: ChatEvent): void;
|
||||
/** Get identify credentials (for broadcasting). */
|
||||
getIdentifyOderId(): string;
|
||||
getIdentifyDisplayName(): string;
|
||||
}
|
||||
|
||||
export class MediaManager {
|
||||
/** The stream sent to peers (may be raw or denoised). */
|
||||
private localMediaStream: MediaStream | null = null;
|
||||
|
||||
/**
|
||||
* The raw microphone stream from `getUserMedia`.
|
||||
* Kept separately so noise reduction can be toggled
|
||||
* without re-acquiring the mic.
|
||||
*/
|
||||
private rawMicStream: MediaStream | null = null;
|
||||
|
||||
/** Remote audio output volume (0-1). */
|
||||
private remoteAudioVolume = VOLUME_MAX;
|
||||
|
||||
// -- Input gain pipeline (mic volume) --
|
||||
/** The stream BEFORE gain is applied (for identity checks). */
|
||||
private preGainStream: MediaStream | null = null;
|
||||
private inputGainCtx: AudioContext | null = null;
|
||||
private inputGainSourceNode: MediaStreamAudioSourceNode | null = null;
|
||||
private inputGainNode: GainNode | null = null;
|
||||
private inputGainDest: MediaStreamAudioDestinationNode | null = null;
|
||||
/** Normalised 0-1 input gain (1 = 100%). */
|
||||
private inputGainVolume = 1.0;
|
||||
|
||||
/** Voice-presence heartbeat timer. */
|
||||
private voicePresenceTimer: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
/** Emitted when voice is successfully connected. */
|
||||
readonly voiceConnected$ = new Subject<void>();
|
||||
|
||||
/** RNNoise noise-reduction processor. */
|
||||
private readonly noiseReduction: NoiseReductionManager;
|
||||
|
||||
/**
|
||||
* Tracks the user's *desired* noise-reduction state, independent of
|
||||
* whether the worklet is actually running. This lets us honour the
|
||||
* preference even when it is set before the mic stream is acquired.
|
||||
*/
|
||||
private _noiseReductionDesired = true;
|
||||
|
||||
// State tracked locally (the service exposes these via signals)
|
||||
private isVoiceActive = false;
|
||||
private isMicMuted = false;
|
||||
private isSelfDeafened = false;
|
||||
|
||||
/** Current voice channel room ID (set when joining voice). */
|
||||
private currentVoiceRoomId: string | undefined;
|
||||
/** Current voice channel server ID (set when joining voice). */
|
||||
private currentVoiceServerId: string | undefined;
|
||||
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private callbacks: MediaManagerCallbacks
|
||||
) {
|
||||
this.noiseReduction = new NoiseReductionManager(logger);
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the callback set at runtime.
|
||||
* Needed because of circular initialisation between managers.
|
||||
*
|
||||
* @param nextCallbacks - The new callback interface to wire into this manager.
|
||||
*/
|
||||
setCallbacks(nextCallbacks: MediaManagerCallbacks): void {
|
||||
this.callbacks = nextCallbacks;
|
||||
}
|
||||
|
||||
/** Returns the current local media stream, or `null` if voice is disabled. */
|
||||
getLocalStream(): MediaStream | null {
|
||||
return this.localMediaStream;
|
||||
}
|
||||
/** Returns the raw microphone stream before processing, if available. */
|
||||
getRawMicStream(): MediaStream | null {
|
||||
return this.rawMicStream;
|
||||
}
|
||||
/** Whether voice is currently active (mic captured). */
|
||||
getIsVoiceActive(): boolean {
|
||||
return this.isVoiceActive;
|
||||
}
|
||||
/** Whether the local microphone is muted. */
|
||||
getIsMicMuted(): boolean {
|
||||
return this.isMicMuted;
|
||||
}
|
||||
/** Whether the user has self-deafened. */
|
||||
getIsSelfDeafened(): boolean {
|
||||
return this.isSelfDeafened;
|
||||
}
|
||||
/** Current remote audio output volume (normalised 0-1). */
|
||||
getRemoteAudioVolume(): number {
|
||||
return this.remoteAudioVolume;
|
||||
}
|
||||
/** The voice channel room ID, if currently in voice. */
|
||||
getCurrentVoiceRoomId(): string | undefined {
|
||||
return this.currentVoiceRoomId;
|
||||
}
|
||||
/** The voice channel server ID, if currently in voice. */
|
||||
getCurrentVoiceServerId(): string | undefined {
|
||||
return this.currentVoiceServerId;
|
||||
}
|
||||
/** Whether the user wants noise reduction (may or may not be running yet). */
|
||||
getIsNoiseReductionEnabled(): boolean {
|
||||
return this._noiseReductionDesired;
|
||||
}
|
||||
|
||||
/**
|
||||
* Request microphone access via `getUserMedia` and bind the resulting
|
||||
* audio track to every active peer connection.
|
||||
*
|
||||
* If a local stream already exists it is stopped first.
|
||||
*
|
||||
* @returns The captured {@link MediaStream}.
|
||||
* @throws If `getUserMedia` is unavailable (non-secure context) or the user denies access.
|
||||
*/
|
||||
async enableVoice(): Promise<MediaStream> {
|
||||
try {
|
||||
// Stop any existing stream first
|
||||
if (this.localMediaStream) {
|
||||
this.logger.info('Stopping existing local stream before enabling voice');
|
||||
this.localMediaStream.getTracks().forEach((track) => track.stop());
|
||||
this.localMediaStream = null;
|
||||
}
|
||||
|
||||
const mediaConstraints: MediaStreamConstraints = {
|
||||
audio: {
|
||||
echoCancellation: true,
|
||||
noiseSuppression: !this._noiseReductionDesired,
|
||||
autoGainControl: true
|
||||
},
|
||||
video: false
|
||||
};
|
||||
|
||||
this.logger.info('getUserMedia constraints', mediaConstraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getUserMedia) {
|
||||
throw new Error(
|
||||
'navigator.mediaDevices is not available. ' +
|
||||
'This requires a secure context (HTTPS or localhost). ' +
|
||||
'If accessing from an external device, use HTTPS.'
|
||||
);
|
||||
}
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
|
||||
|
||||
this.rawMicStream = stream;
|
||||
|
||||
// If the user wants noise reduction, pipe through the denoiser
|
||||
this.localMediaStream = this._noiseReductionDesired
|
||||
? await this.noiseReduction.enable(stream)
|
||||
: stream;
|
||||
|
||||
// Apply input gain (mic volume) before sending to peers
|
||||
this.applyInputGainToCurrentStream();
|
||||
|
||||
this.logger.logStream('localVoice', this.localMediaStream);
|
||||
|
||||
this.bindLocalTracksToAllPeers();
|
||||
|
||||
this.isVoiceActive = true;
|
||||
this.voiceConnected$.next();
|
||||
return this.localMediaStream;
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to getUserMedia', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop all local media tracks and remove audio senders from peers.
|
||||
* The peer connections themselves are kept alive.
|
||||
*/
|
||||
disableVoice(): void {
|
||||
this.noiseReduction.disable();
|
||||
this.teardownInputGain();
|
||||
|
||||
// Stop the raw mic tracks (the denoised stream's tracks are
|
||||
// derived nodes and will stop once their source is gone).
|
||||
if (this.rawMicStream) {
|
||||
this.rawMicStream.getTracks().forEach((track) => track.stop());
|
||||
this.rawMicStream = null;
|
||||
}
|
||||
|
||||
this.localMediaStream = null;
|
||||
|
||||
// Remove audio senders but keep connections alive
|
||||
this.callbacks.getActivePeers().forEach((peerData) => {
|
||||
const senders = peerData.connection.getSenders();
|
||||
|
||||
senders.forEach((sender) => {
|
||||
if (sender.track?.kind === TRACK_KIND_AUDIO) {
|
||||
peerData.connection.removeTrack(sender);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
this.isVoiceActive = false;
|
||||
this.currentVoiceRoomId = undefined;
|
||||
this.currentVoiceServerId = undefined;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the local stream from an external source (e.g. voice-controls component).
|
||||
*
|
||||
* The raw stream is saved so noise reduction can be toggled on/off later.
|
||||
* If noise reduction is already enabled the stream is piped through the
|
||||
* denoiser before being sent to peers.
|
||||
*/
|
||||
async setLocalStream(stream: MediaStream): Promise<void> {
|
||||
this.rawMicStream = stream;
|
||||
this.logger.info('setLocalStream - noiseReductionDesired =', this._noiseReductionDesired);
|
||||
|
||||
// Pipe through the denoiser when the user wants noise reduction
|
||||
if (this._noiseReductionDesired) {
|
||||
this.logger.info('Piping new stream through noise reduction');
|
||||
this.localMediaStream = await this.noiseReduction.enable(stream);
|
||||
} else {
|
||||
this.localMediaStream = stream;
|
||||
}
|
||||
|
||||
// Apply input gain (mic volume) before sending to peers
|
||||
this.applyInputGainToCurrentStream();
|
||||
|
||||
this.bindLocalTracksToAllPeers();
|
||||
this.isVoiceActive = true;
|
||||
this.voiceConnected$.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle the local microphone mute state.
|
||||
*
|
||||
* @param muted - Explicit state; if omitted, the current state is toggled.
|
||||
*/
|
||||
toggleMute(muted?: boolean): void {
|
||||
const newMutedState = muted !== undefined ? muted : !this.isMicMuted;
|
||||
|
||||
this.isMicMuted = newMutedState;
|
||||
this.applyCurrentMuteState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle self-deafen (suppress all incoming audio playback).
|
||||
*
|
||||
* @param deafened - Explicit state; if omitted, the current state is toggled.
|
||||
*/
|
||||
toggleDeafen(deafened?: boolean): void {
|
||||
this.isSelfDeafened = deafened !== undefined ? deafened : !this.isSelfDeafened;
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle RNNoise noise reduction on the local microphone.
|
||||
*
|
||||
* When enabled the raw mic stream is routed through the RNNoise
|
||||
* AudioWorklet and peer senders are updated with the denoised track.
|
||||
* When disabled the original raw mic track is restored.
|
||||
*
|
||||
* @param enabled - Explicit state; if omitted, the current state is toggled.
|
||||
*/
|
||||
async toggleNoiseReduction(enabled?: boolean): Promise<void> {
|
||||
const shouldEnable = enabled !== undefined ? enabled : !this._noiseReductionDesired;
|
||||
|
||||
// Always persist the preference
|
||||
this._noiseReductionDesired = shouldEnable;
|
||||
this.logger.info(
|
||||
'Noise reduction desired =',
|
||||
shouldEnable,
|
||||
'| worklet active =',
|
||||
this.noiseReduction.isEnabled
|
||||
);
|
||||
|
||||
// Do not update the browser's built-in noiseSuppression constraint on the
|
||||
// live mic track here. Chromium may share the underlying capture source,
|
||||
// which can leak the constraint change into other active streams. We only
|
||||
// apply the browser constraint when the microphone stream is acquired.
|
||||
|
||||
if (shouldEnable === this.noiseReduction.isEnabled)
|
||||
return;
|
||||
|
||||
if (shouldEnable) {
|
||||
if (!this.rawMicStream) {
|
||||
this.logger.warn(
|
||||
'Cannot enable noise reduction - no mic stream yet (will apply on connect)'
|
||||
);
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.info('Enabling noise reduction on raw mic stream');
|
||||
const cleanStream = await this.noiseReduction.enable(this.rawMicStream);
|
||||
|
||||
this.localMediaStream = cleanStream;
|
||||
} else {
|
||||
this.noiseReduction.disable();
|
||||
|
||||
if (this.rawMicStream) {
|
||||
this.localMediaStream = this.rawMicStream;
|
||||
}
|
||||
}
|
||||
|
||||
// Re-apply input gain to the (possibly new) stream
|
||||
this.applyInputGainToCurrentStream();
|
||||
|
||||
// Propagate the new audio track to every peer connection
|
||||
this.bindLocalTracksToAllPeers();
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the output volume for remote audio.
|
||||
*
|
||||
* @param volume - Normalised value: 0 = silent, 1 = 100%, up to 2 = 200%.
|
||||
*/
|
||||
setOutputVolume(volume: number): void {
|
||||
this.remoteAudioVolume = Math.max(VOLUME_MIN, Math.min(2, volume));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the input (microphone) volume.
|
||||
*
|
||||
* If a local stream is active the gain node is updated in real time.
|
||||
* If no stream exists yet the value is stored and applied on connect.
|
||||
*
|
||||
* @param volume - Normalised 0-1 (0 = silent, 1 = 100%).
|
||||
*/
|
||||
setInputVolume(volume: number): void {
|
||||
this.inputGainVolume = Math.max(0, Math.min(1, volume));
|
||||
|
||||
if (this.inputGainNode) {
|
||||
// Pipeline already exists - just update the gain value
|
||||
this.inputGainNode.gain.value = this.inputGainVolume;
|
||||
} else if (this.localMediaStream) {
|
||||
// Stream is active but gain pipeline hasn't been created yet
|
||||
this.applyInputGainToCurrentStream();
|
||||
this.bindLocalTracksToAllPeers();
|
||||
}
|
||||
}
|
||||
|
||||
/** Get current input gain value (0-1). */
|
||||
getInputVolume(): number {
|
||||
return this.inputGainVolume;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum audio bitrate on every active peer's audio sender.
|
||||
*
|
||||
* The value is clamped between {@link AUDIO_BITRATE_MIN_BPS} and
|
||||
* {@link AUDIO_BITRATE_MAX_BPS}.
|
||||
*
|
||||
* @param kbps - Target bitrate in kilobits per second.
|
||||
*/
|
||||
async setAudioBitrate(kbps: number): Promise<void> {
|
||||
const targetBps = Math.max(
|
||||
AUDIO_BITRATE_MIN_BPS,
|
||||
Math.min(AUDIO_BITRATE_MAX_BPS, Math.floor(kbps * KBPS_TO_BPS))
|
||||
);
|
||||
|
||||
this.callbacks.getActivePeers().forEach(async (peerData) => {
|
||||
const sender =
|
||||
peerData.audioSender ||
|
||||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_AUDIO);
|
||||
|
||||
if (!sender?.track)
|
||||
return;
|
||||
|
||||
if (peerData.connection.signalingState !== 'stable')
|
||||
return;
|
||||
|
||||
let params: RTCRtpSendParameters;
|
||||
|
||||
try {
|
||||
params = sender.getParameters();
|
||||
} catch (error) {
|
||||
this.logger.warn('getParameters failed; skipping bitrate apply', error);
|
||||
return;
|
||||
}
|
||||
|
||||
params.encodings = params.encodings || [{}];
|
||||
params.encodings[0].maxBitrate = targetBps;
|
||||
|
||||
try {
|
||||
await sender.setParameters(params);
|
||||
this.logger.info('Applied audio bitrate', { targetBps });
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to set audio bitrate', error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a named latency profile that maps to a predefined bitrate.
|
||||
*
|
||||
* @param profile - One of `'low'`, `'balanced'`, or `'high'`.
|
||||
*/
|
||||
async setLatencyProfile(profile: LatencyProfile): Promise<void> {
|
||||
await this.setAudioBitrate(LATENCY_PROFILE_BITRATES[profile]);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start periodically broadcasting voice presence to all peers.
|
||||
*
|
||||
* Optionally records the voice room/server so heartbeats include them.
|
||||
*
|
||||
* @param roomId - The voice channel room ID.
|
||||
* @param serverId - The voice channel server ID.
|
||||
*/
|
||||
startVoiceHeartbeat(roomId?: string, serverId?: string): void {
|
||||
this.stopVoiceHeartbeat();
|
||||
|
||||
// Persist voice channel context so heartbeats and state snapshots include it
|
||||
if (roomId !== undefined)
|
||||
this.currentVoiceRoomId = roomId;
|
||||
|
||||
if (serverId !== undefined)
|
||||
this.currentVoiceServerId = serverId;
|
||||
|
||||
this.voicePresenceTimer = setInterval(() => {
|
||||
if (this.isVoiceActive) {
|
||||
this.broadcastVoicePresence();
|
||||
}
|
||||
}, VOICE_HEARTBEAT_INTERVAL_MS);
|
||||
|
||||
// Also send an immediate heartbeat
|
||||
if (this.isVoiceActive) {
|
||||
this.broadcastVoicePresence();
|
||||
}
|
||||
}
|
||||
|
||||
/** Stop the voice-presence heartbeat timer. */
|
||||
stopVoiceHeartbeat(): void {
|
||||
if (this.voicePresenceTimer) {
|
||||
clearInterval(this.voicePresenceTimer);
|
||||
this.voicePresenceTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind local audio/video tracks to all existing peer transceivers.
|
||||
* Restores transceiver direction to sendrecv if previously set to recvonly
|
||||
* (which happens when disableVoice calls removeTrack).
|
||||
*/
|
||||
private bindLocalTracksToAllPeers(): void {
|
||||
const peers = this.callbacks.getActivePeers();
|
||||
|
||||
if (!this.localMediaStream)
|
||||
return;
|
||||
|
||||
const localStream = this.localMediaStream;
|
||||
const localAudioTrack = localStream.getAudioTracks()[0] || null;
|
||||
const localVideoTrack = localStream.getVideoTracks()[0] || null;
|
||||
|
||||
peers.forEach((peerData, peerId) => {
|
||||
if (localAudioTrack) {
|
||||
const audioTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_AUDIO, {
|
||||
preferredSender: peerData.audioSender,
|
||||
excludedSenders: [peerData.screenAudioSender]
|
||||
});
|
||||
const audioSender = audioTransceiver.sender;
|
||||
|
||||
peerData.audioSender = audioSender;
|
||||
|
||||
// Restore direction after removeTrack (which sets it to recvonly)
|
||||
if (
|
||||
audioTransceiver &&
|
||||
(audioTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
|
||||
audioTransceiver.direction === TRANSCEIVER_INACTIVE)
|
||||
) {
|
||||
audioTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
|
||||
if (typeof audioSender.setStreams === 'function') {
|
||||
audioSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
audioSender
|
||||
.replaceTrack(localAudioTrack)
|
||||
.then(() => this.logger.info('audio replaceTrack ok', { peerId }))
|
||||
.catch((error) => this.logger.error('audio replaceTrack failed', error));
|
||||
}
|
||||
|
||||
if (localVideoTrack) {
|
||||
const videoTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_VIDEO, {
|
||||
preferredSender: peerData.videoSender,
|
||||
excludedSenders: [peerData.screenVideoSender]
|
||||
});
|
||||
const videoSender = videoTransceiver.sender;
|
||||
|
||||
peerData.videoSender = videoSender;
|
||||
|
||||
if (
|
||||
videoTransceiver &&
|
||||
(videoTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
|
||||
videoTransceiver.direction === TRANSCEIVER_INACTIVE)
|
||||
) {
|
||||
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
|
||||
if (typeof videoSender.setStreams === 'function') {
|
||||
videoSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
videoSender
|
||||
.replaceTrack(localVideoTrack)
|
||||
.then(() => this.logger.info('video replaceTrack ok', { peerId }))
|
||||
.catch((error) => this.logger.error('video replaceTrack failed', error));
|
||||
}
|
||||
|
||||
this.callbacks.renegotiate(peerId);
|
||||
});
|
||||
}
|
||||
|
||||
private getOrCreateReusableTransceiver(
|
||||
peerData: PeerData,
|
||||
kind: typeof TRACK_KIND_AUDIO | typeof TRACK_KIND_VIDEO,
|
||||
options: {
|
||||
preferredSender?: RTCRtpSender;
|
||||
excludedSenders?: (RTCRtpSender | undefined)[];
|
||||
}
|
||||
): RTCRtpTransceiver {
|
||||
const excludedSenders = new Set(
|
||||
(options.excludedSenders ?? []).filter((sender): sender is RTCRtpSender => !!sender)
|
||||
);
|
||||
const existingTransceivers = peerData.connection.getTransceivers();
|
||||
const preferredTransceiver = options.preferredSender
|
||||
? existingTransceivers.find((transceiver) => transceiver.sender === options.preferredSender)
|
||||
: null;
|
||||
|
||||
if (preferredTransceiver) {
|
||||
return preferredTransceiver;
|
||||
}
|
||||
|
||||
const attachedSenderTransceiver = existingTransceivers.find((transceiver) =>
|
||||
!excludedSenders.has(transceiver.sender)
|
||||
&& transceiver.sender.track?.kind === kind
|
||||
);
|
||||
|
||||
if (attachedSenderTransceiver) {
|
||||
return attachedSenderTransceiver;
|
||||
}
|
||||
|
||||
const reusableReceiverTransceiver = existingTransceivers.find((transceiver) =>
|
||||
!excludedSenders.has(transceiver.sender)
|
||||
&& !transceiver.sender.track
|
||||
&& transceiver.receiver.track?.kind === kind
|
||||
);
|
||||
|
||||
if (reusableReceiverTransceiver) {
|
||||
return reusableReceiverTransceiver;
|
||||
}
|
||||
|
||||
return peerData.connection.addTransceiver(kind, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
});
|
||||
}
|
||||
|
||||
/** Broadcast a voice-presence state event to all connected peers. */
|
||||
private broadcastVoicePresence(): void {
|
||||
const oderId = this.callbacks.getIdentifyOderId();
|
||||
const displayName = this.callbacks.getIdentifyDisplayName();
|
||||
|
||||
this.callbacks.broadcastMessage({
|
||||
type: P2P_TYPE_VOICE_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
voiceState: {
|
||||
isConnected: this.isVoiceActive,
|
||||
isMuted: this.isMicMuted,
|
||||
isDeafened: this.isSelfDeafened,
|
||||
roomId: this.currentVoiceRoomId,
|
||||
serverId: this.currentVoiceServerId
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// -- Input gain helpers --
|
||||
|
||||
/**
|
||||
* Route the current `localMediaStream` through a Web Audio GainNode so
|
||||
* the microphone level can be adjusted without renegotiating peers.
|
||||
*
|
||||
* If a gain pipeline already exists for the same source stream the gain
|
||||
* value is simply updated. Otherwise a new pipeline is created.
|
||||
*/
|
||||
private applyInputGainToCurrentStream(): void {
|
||||
const stream = this.localMediaStream;
|
||||
|
||||
if (!stream)
|
||||
return;
|
||||
|
||||
// If the source stream hasn't changed, just update gain
|
||||
if (this.preGainStream === stream && this.inputGainNode && this.inputGainCtx) {
|
||||
this.inputGainNode.gain.value = this.inputGainVolume;
|
||||
return;
|
||||
}
|
||||
|
||||
// Tear down the old pipeline (if any)
|
||||
this.teardownInputGain();
|
||||
|
||||
// Build new pipeline: source → gain → destination
|
||||
this.preGainStream = stream;
|
||||
this.inputGainCtx = new AudioContext();
|
||||
this.inputGainSourceNode = this.inputGainCtx.createMediaStreamSource(stream);
|
||||
this.inputGainNode = this.inputGainCtx.createGain();
|
||||
this.inputGainNode.gain.value = this.inputGainVolume;
|
||||
this.inputGainDest = this.inputGainCtx.createMediaStreamDestination();
|
||||
|
||||
this.inputGainSourceNode.connect(this.inputGainNode);
|
||||
this.inputGainNode.connect(this.inputGainDest);
|
||||
|
||||
// Replace localMediaStream with the gained stream
|
||||
this.localMediaStream = this.inputGainDest.stream;
|
||||
this.applyCurrentMuteState();
|
||||
}
|
||||
|
||||
/** Keep the active outbound track aligned with the stored mute state. */
|
||||
private applyCurrentMuteState(): void {
|
||||
if (!this.localMediaStream)
|
||||
return;
|
||||
|
||||
const enabled = !this.isMicMuted;
|
||||
|
||||
this.localMediaStream.getAudioTracks().forEach((track) => {
|
||||
track.enabled = enabled;
|
||||
});
|
||||
}
|
||||
|
||||
/** Disconnect and close the input-gain AudioContext. */
|
||||
private teardownInputGain(): void {
|
||||
try {
|
||||
this.inputGainSourceNode?.disconnect();
|
||||
this.inputGainNode?.disconnect();
|
||||
} catch (error) {
|
||||
this.logger.warn('Input gain nodes were already disconnected during teardown', error);
|
||||
}
|
||||
|
||||
if (this.inputGainCtx && this.inputGainCtx.state !== 'closed') {
|
||||
this.inputGainCtx.close().catch((error) => {
|
||||
this.logger.warn('Failed to close input gain audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
this.inputGainCtx = null;
|
||||
this.inputGainSourceNode = null;
|
||||
this.inputGainNode = null;
|
||||
this.inputGainDest = null;
|
||||
this.preGainStream = null;
|
||||
}
|
||||
|
||||
/** Clean up all resources. */
|
||||
destroy(): void {
|
||||
this.teardownInputGain();
|
||||
this.disableVoice();
|
||||
this.stopVoiceHeartbeat();
|
||||
this.noiseReduction.destroy();
|
||||
this.voiceConnected$.complete();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,204 @@
|
||||
/* eslint-disable @typescript-eslint/no-non-null-assertion */
|
||||
/**
|
||||
* Manages RNNoise-based noise reduction for microphone audio.
|
||||
*
|
||||
* Uses the `@timephy/rnnoise-wasm` AudioWorklet to process the raw
|
||||
* microphone stream through a neural-network noise gate, producing
|
||||
* a clean output stream that can be sent to peers instead.
|
||||
*
|
||||
* Architecture:
|
||||
* raw mic → AudioContext.createMediaStreamSource
|
||||
* → NoiseSuppressorWorklet (AudioWorkletNode)
|
||||
* → MediaStreamDestination → clean MediaStream
|
||||
*
|
||||
* The manager is intentionally stateless w.r.t. Angular signals;
|
||||
* the owning MediaManager / WebRTCService drives signals.
|
||||
*/
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
|
||||
/** Name used to register / instantiate the AudioWorklet processor. */
|
||||
const WORKLET_PROCESSOR_NAME = 'NoiseSuppressorWorklet';
|
||||
/** RNNoise is trained on 48 kHz audio - the AudioContext must match. */
|
||||
const RNNOISE_SAMPLE_RATE = 48_000;
|
||||
/**
|
||||
* Relative path (from the served application root) to the **bundled**
|
||||
* worklet script placed in `public/` and served as a static asset.
|
||||
*/
|
||||
const WORKLET_MODULE_PATH = 'rnnoise-worklet.js';
|
||||
|
||||
export class NoiseReductionManager {
|
||||
/** The AudioContext used for the noise-reduction graph. */
|
||||
private audioContext: AudioContext | null = null;
|
||||
|
||||
/** Source node wrapping the raw microphone stream. */
|
||||
private sourceNode: MediaStreamAudioSourceNode | null = null;
|
||||
|
||||
/** The RNNoise AudioWorklet node. */
|
||||
private workletNode: AudioWorkletNode | null = null;
|
||||
|
||||
/** Destination node that exposes the cleaned stream. */
|
||||
private destinationNode: MediaStreamAudioDestinationNode | null = null;
|
||||
|
||||
/** Whether the worklet module has been loaded into the AudioContext. */
|
||||
private workletLoaded = false;
|
||||
|
||||
/** Whether noise reduction is currently active. */
|
||||
private _isEnabled = false;
|
||||
|
||||
constructor(private readonly logger: WebRTCLogger) {}
|
||||
|
||||
/** Whether noise reduction is currently active. */
|
||||
get isEnabled(): boolean {
|
||||
return this._isEnabled;
|
||||
}
|
||||
|
||||
/**
|
||||
* Enable noise reduction on a raw microphone stream.
|
||||
*
|
||||
* Builds the AudioWorklet processing graph and returns a new
|
||||
* {@link MediaStream} whose audio has been denoised.
|
||||
*
|
||||
* If the worklet cannot be loaded (e.g. unsupported browser),
|
||||
* the original stream is returned unchanged and an error is logged.
|
||||
*
|
||||
* @param rawStream - The raw `getUserMedia` microphone stream.
|
||||
* @returns A denoised {@link MediaStream}, or the original if setup fails.
|
||||
*/
|
||||
async enable(rawStream: MediaStream): Promise<MediaStream> {
|
||||
if (this._isEnabled && this.destinationNode) {
|
||||
this.logger.info('Noise reduction already enabled, returning existing clean stream');
|
||||
return this.destinationNode.stream;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.buildProcessingGraph(rawStream);
|
||||
this._isEnabled = true;
|
||||
this.logger.info('Noise reduction enabled');
|
||||
return this.destinationNode!.stream;
|
||||
} catch (err) {
|
||||
this.logger.error('Failed to enable noise reduction, returning raw stream', err);
|
||||
this.teardownGraph();
|
||||
return rawStream;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disable noise reduction and tear down the processing graph.
|
||||
*
|
||||
* After calling this, the original raw microphone stream should be
|
||||
* used again (the caller is responsible for re-binding tracks).
|
||||
*/
|
||||
disable(): void {
|
||||
if (!this._isEnabled)
|
||||
return;
|
||||
|
||||
this.teardownGraph();
|
||||
this._isEnabled = false;
|
||||
this.logger.info('Noise reduction disabled');
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-pipe a new raw stream through the existing noise-reduction graph.
|
||||
*
|
||||
* Useful when the microphone device changes but noise reduction
|
||||
* should stay active.
|
||||
*
|
||||
* @param rawStream - The new raw microphone stream.
|
||||
* @returns The denoised stream, or the raw stream on failure.
|
||||
*/
|
||||
async replaceInputStream(rawStream: MediaStream): Promise<MediaStream> {
|
||||
if (!this._isEnabled)
|
||||
return rawStream;
|
||||
|
||||
try {
|
||||
// Disconnect old source but keep the rest of the graph alive
|
||||
this.sourceNode?.disconnect();
|
||||
|
||||
if (!this.audioContext || !this.workletNode || !this.destinationNode) {
|
||||
throw new Error('Processing graph not initialised');
|
||||
}
|
||||
|
||||
this.sourceNode = this.audioContext.createMediaStreamSource(rawStream);
|
||||
this.sourceNode.connect(this.workletNode);
|
||||
|
||||
this.logger.info('Noise reduction input stream replaced');
|
||||
return this.destinationNode.stream;
|
||||
} catch (err) {
|
||||
this.logger.error('Failed to replace noise reduction input', err);
|
||||
return rawStream;
|
||||
}
|
||||
}
|
||||
|
||||
/** Clean up all resources. Safe to call multiple times. */
|
||||
destroy(): void {
|
||||
this.disable();
|
||||
this.audioContext = null;
|
||||
this.workletLoaded = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the AudioWorklet processing graph:
|
||||
* rawStream → source → workletNode → destination
|
||||
*/
|
||||
private async buildProcessingGraph(rawStream: MediaStream): Promise<void> {
|
||||
// Reuse or create the AudioContext (must be 48 kHz for RNNoise)
|
||||
if (!this.audioContext || this.audioContext.state === 'closed') {
|
||||
this.audioContext = new AudioContext({ sampleRate: RNNOISE_SAMPLE_RATE });
|
||||
this.workletLoaded = false;
|
||||
}
|
||||
|
||||
// Resume if suspended (browsers auto-suspend until user gesture)
|
||||
if (this.audioContext.state === 'suspended') {
|
||||
await this.audioContext.resume();
|
||||
}
|
||||
|
||||
// Load the worklet module once per AudioContext lifetime
|
||||
if (!this.workletLoaded) {
|
||||
await this.audioContext.audioWorklet.addModule(WORKLET_MODULE_PATH);
|
||||
this.workletLoaded = true;
|
||||
this.logger.info('RNNoise worklet module loaded');
|
||||
}
|
||||
|
||||
// Build the node graph
|
||||
this.sourceNode = this.audioContext.createMediaStreamSource(rawStream);
|
||||
this.workletNode = new AudioWorkletNode(this.audioContext, WORKLET_PROCESSOR_NAME);
|
||||
this.destinationNode = this.audioContext.createMediaStreamDestination();
|
||||
|
||||
this.sourceNode.connect(this.workletNode).connect(this.destinationNode);
|
||||
}
|
||||
|
||||
/** Disconnect and release all graph nodes. */
|
||||
private teardownGraph(): void {
|
||||
try {
|
||||
this.sourceNode?.disconnect();
|
||||
} catch (error) {
|
||||
this.logger.warn('Noise reduction source node already disconnected', error);
|
||||
}
|
||||
|
||||
try {
|
||||
this.workletNode?.disconnect();
|
||||
} catch (error) {
|
||||
this.logger.warn('Noise reduction worklet node already disconnected', error);
|
||||
}
|
||||
|
||||
try {
|
||||
this.destinationNode?.disconnect();
|
||||
} catch (error) {
|
||||
this.logger.warn('Noise reduction destination node already disconnected', error);
|
||||
}
|
||||
|
||||
this.sourceNode = null;
|
||||
this.workletNode = null;
|
||||
this.destinationNode = null;
|
||||
|
||||
// Close the context to free hardware resources
|
||||
if (this.audioContext && this.audioContext.state !== 'closed') {
|
||||
this.audioContext.close().catch((error) => {
|
||||
this.logger.warn('Failed to close RNNoise audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
this.audioContext = null;
|
||||
this.workletLoaded = false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../../screen-share.config';
|
||||
import { WebRTCLogger } from '../../logging/webrtc-logger';
|
||||
import { getElectronApi } from '../../../../core/platform/electron/get-electron-api';
|
||||
|
||||
export class BrowserScreenShareCapture {
|
||||
constructor(private readonly logger: WebRTCLogger) {}
|
||||
|
||||
async startCapture(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<MediaStream> {
|
||||
const displayConstraints = this.buildDisplayMediaConstraints(options, preset);
|
||||
|
||||
this.logger.info('getDisplayMedia constraints', displayConstraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getDisplayMedia) {
|
||||
throw new Error('navigator.mediaDevices.getDisplayMedia is not available.');
|
||||
}
|
||||
|
||||
const stream = await navigator.mediaDevices.getDisplayMedia(displayConstraints);
|
||||
|
||||
this.logAudioTrackSettings(stream);
|
||||
|
||||
return stream;
|
||||
}
|
||||
|
||||
private buildDisplayMediaConstraints(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): DisplayMediaStreamOptions {
|
||||
const supportedConstraints = navigator.mediaDevices?.getSupportedConstraints?.() as Record<string, boolean> | undefined;
|
||||
const isWindowsElectron = this.isWindowsElectron();
|
||||
const audioConstraints: Record<string, unknown> | false = options.includeSystemAudio
|
||||
? {
|
||||
echoCancellation: false,
|
||||
noiseSuppression: false,
|
||||
autoGainControl: false
|
||||
}
|
||||
: false;
|
||||
|
||||
if (audioConstraints && supportedConstraints?.['restrictOwnAudio']) {
|
||||
audioConstraints['restrictOwnAudio'] = true;
|
||||
}
|
||||
|
||||
if (audioConstraints && supportedConstraints?.['suppressLocalAudioPlayback']) {
|
||||
// Windows Electron should keep voice playback audible to the sharer.
|
||||
// Use own-audio restriction to keep the app's playback out of the
|
||||
// captured stream instead of muting local playback.
|
||||
audioConstraints['suppressLocalAudioPlayback'] = !isWindowsElectron;
|
||||
}
|
||||
|
||||
return {
|
||||
video: {
|
||||
width: { ideal: preset.width, max: preset.width },
|
||||
height: { ideal: preset.height, max: preset.height },
|
||||
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
|
||||
},
|
||||
audio: audioConstraints,
|
||||
monitorTypeSurfaces: 'include',
|
||||
selfBrowserSurface: 'exclude',
|
||||
surfaceSwitching: 'include',
|
||||
systemAudio: options.includeSystemAudio ? 'include' : 'exclude'
|
||||
} as DisplayMediaStreamOptions;
|
||||
}
|
||||
|
||||
private logAudioTrackSettings(stream: MediaStream): void {
|
||||
const audioTrack = stream.getAudioTracks()[0];
|
||||
|
||||
if (!audioTrack || typeof audioTrack.getSettings !== 'function') {
|
||||
return;
|
||||
}
|
||||
|
||||
const settings = audioTrack.getSettings() as MediaTrackSettings & {
|
||||
restrictOwnAudio?: boolean;
|
||||
suppressLocalAudioPlayback?: boolean;
|
||||
};
|
||||
|
||||
this.logger.info('getDisplayMedia audio track settings', {
|
||||
restrictOwnAudio: settings.restrictOwnAudio ?? null,
|
||||
suppressLocalAudioPlayback: settings.suppressLocalAudioPlayback ?? null
|
||||
});
|
||||
}
|
||||
|
||||
private isWindowsElectron(): boolean {
|
||||
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return !!getElectronApi()
|
||||
&& /win/i.test(`${navigator.userAgent} ${navigator.platform}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,169 @@
|
||||
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../../screen-share.config';
|
||||
import { ELECTRON_ENTIRE_SCREEN_SOURCE_NAME } from '../../realtime.constants';
|
||||
import { WebRTCLogger } from '../../logging/webrtc-logger';
|
||||
import {
|
||||
DesktopSource,
|
||||
ElectronDesktopCaptureResult,
|
||||
ElectronDesktopMediaStreamConstraints,
|
||||
ElectronDesktopSourceSelection,
|
||||
ScreenShareElectronApi
|
||||
} from './shared';
|
||||
|
||||
interface DesktopElectronScreenShareCaptureDependencies {
|
||||
getElectronApi(): ScreenShareElectronApi | null;
|
||||
getSelectDesktopSource(): ((
|
||||
sources: readonly DesktopSource[],
|
||||
options: { includeSystemAudio: boolean }
|
||||
) => Promise<ElectronDesktopSourceSelection>) | undefined;
|
||||
}
|
||||
|
||||
export class DesktopElectronScreenShareCapture {
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private readonly dependencies: DesktopElectronScreenShareCaptureDependencies
|
||||
) {}
|
||||
|
||||
isAvailable(): boolean {
|
||||
return !!this.dependencies.getElectronApi()?.getSources && !this.isLinuxElectron();
|
||||
}
|
||||
|
||||
shouldSuppressRemotePlaybackDuringShare(
|
||||
includeSystemAudio: boolean,
|
||||
usingElectronDesktopCapture: boolean
|
||||
): boolean {
|
||||
// Chromium display-media capture can use own-audio suppression on modern
|
||||
// builds. The Electron desktop-capturer fallback cannot, so keep the old
|
||||
// Windows mute behavior only for that fallback path.
|
||||
return includeSystemAudio && usingElectronDesktopCapture && this.isWindowsElectron();
|
||||
}
|
||||
|
||||
async startCapture(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<ElectronDesktopCaptureResult> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
|
||||
if (!electronApi?.getSources) {
|
||||
throw new Error('Electron desktop capture is unavailable.');
|
||||
}
|
||||
|
||||
const sources = await electronApi.getSources();
|
||||
const selection = await this.resolveSourceSelection(sources, options.includeSystemAudio);
|
||||
const captureOptions = {
|
||||
...options,
|
||||
includeSystemAudio: selection.includeSystemAudio
|
||||
};
|
||||
|
||||
if (!selection.source) {
|
||||
throw new Error('No desktop capture sources were available.');
|
||||
}
|
||||
|
||||
this.logger.info('Selected Electron desktop source', {
|
||||
includeSystemAudio: selection.includeSystemAudio,
|
||||
sourceId: selection.source.id,
|
||||
sourceName: selection.source.name
|
||||
});
|
||||
|
||||
const constraints = this.buildConstraints(selection.source.id, captureOptions, preset);
|
||||
|
||||
this.logger.info('desktopCapturer constraints', constraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getUserMedia) {
|
||||
throw new Error('navigator.mediaDevices.getUserMedia is not available (requires HTTPS or localhost).');
|
||||
}
|
||||
|
||||
return {
|
||||
includeSystemAudio: selection.includeSystemAudio,
|
||||
stream: await navigator.mediaDevices.getUserMedia(constraints)
|
||||
};
|
||||
}
|
||||
|
||||
private async resolveSourceSelection(
|
||||
sources: DesktopSource[],
|
||||
includeSystemAudio: boolean
|
||||
): Promise<ElectronDesktopSourceSelection> {
|
||||
const orderedSources = this.sortSources(sources);
|
||||
const defaultSource = orderedSources.find((source) => source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME)
|
||||
?? orderedSources[0];
|
||||
|
||||
if (orderedSources.length === 0) {
|
||||
throw new Error('No desktop capture sources were available.');
|
||||
}
|
||||
|
||||
const selectDesktopSource = this.dependencies.getSelectDesktopSource();
|
||||
|
||||
if (!this.isWindowsElectron() || orderedSources.length < 2 || !selectDesktopSource) {
|
||||
return {
|
||||
includeSystemAudio,
|
||||
source: defaultSource
|
||||
};
|
||||
}
|
||||
|
||||
return await selectDesktopSource(orderedSources, { includeSystemAudio });
|
||||
}
|
||||
|
||||
private sortSources(sources: DesktopSource[]): DesktopSource[] {
|
||||
return [...sources].sort((left, right) => {
|
||||
const weightDiff = this.getSourceWeight(left) - this.getSourceWeight(right);
|
||||
|
||||
if (weightDiff !== 0) {
|
||||
return weightDiff;
|
||||
}
|
||||
|
||||
return left.name.localeCompare(right.name);
|
||||
});
|
||||
}
|
||||
|
||||
private getSourceWeight(source: DesktopSource): number {
|
||||
return source.name === ELECTRON_ENTIRE_SCREEN_SOURCE_NAME || source.id.startsWith('screen')
|
||||
? 0
|
||||
: 1;
|
||||
}
|
||||
|
||||
private buildConstraints(
|
||||
sourceId: string,
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): ElectronDesktopMediaStreamConstraints {
|
||||
const constraints: ElectronDesktopMediaStreamConstraints = {
|
||||
video: {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: sourceId,
|
||||
maxWidth: preset.width,
|
||||
maxHeight: preset.height,
|
||||
maxFrameRate: preset.frameRate
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
if (options.includeSystemAudio) {
|
||||
constraints.audio = {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop',
|
||||
chromeMediaSourceId: sourceId
|
||||
}
|
||||
};
|
||||
} else {
|
||||
constraints.audio = false;
|
||||
}
|
||||
|
||||
return constraints;
|
||||
}
|
||||
|
||||
private isLinuxElectron(): boolean {
|
||||
if (!this.dependencies.getElectronApi() || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return /linux/i.test(`${navigator.userAgent} ${navigator.platform}`);
|
||||
}
|
||||
|
||||
private isWindowsElectron(): boolean {
|
||||
if (!this.isAvailable() || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
return /win/i.test(`${navigator.userAgent} ${navigator.platform}`);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,439 @@
|
||||
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../../screen-share.config';
|
||||
import { WebRTCLogger } from '../../logging/webrtc-logger';
|
||||
import {
|
||||
LinuxScreenShareAudioRoutingInfo,
|
||||
LinuxScreenShareMonitorAudioChunkPayload,
|
||||
LinuxScreenShareMonitorAudioEndedPayload,
|
||||
LinuxScreenShareMonitorCaptureInfo,
|
||||
ScreenShareElectronApi
|
||||
} from './shared';
|
||||
|
||||
interface LinuxScreenShareMonitorAudioPipeline {
|
||||
audioContext: AudioContext;
|
||||
audioTrack: MediaStreamTrack;
|
||||
bitsPerSample: number;
|
||||
captureId: string;
|
||||
channelCount: number;
|
||||
mediaDestination: MediaStreamAudioDestinationNode;
|
||||
nextStartTime: number;
|
||||
pendingBytes: Uint8Array;
|
||||
sampleRate: number;
|
||||
unsubscribeChunk: () => void;
|
||||
unsubscribeEnded: () => void;
|
||||
}
|
||||
|
||||
interface LinuxElectronScreenShareCaptureDependencies {
|
||||
getElectronApi(): ScreenShareElectronApi | null;
|
||||
onCaptureEnded(): void;
|
||||
startDisplayMedia(options: ScreenShareStartOptions, preset: ScreenShareQualityPreset): Promise<MediaStream>;
|
||||
}
|
||||
|
||||
export class LinuxElectronScreenShareCapture {
|
||||
private audioRoutingActive = false;
|
||||
private audioRoutingResetPromise: Promise<void> | null = null;
|
||||
private monitorAudioPipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private readonly dependencies: LinuxElectronScreenShareCaptureDependencies
|
||||
) {}
|
||||
|
||||
isSupported(): boolean {
|
||||
if (typeof window === 'undefined' || typeof navigator === 'undefined') {
|
||||
return false;
|
||||
}
|
||||
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
const platformHint = `${navigator.userAgent} ${navigator.platform}`;
|
||||
|
||||
return !!electronApi?.prepareLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.activateLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.deactivateLinuxScreenShareAudioRouting
|
||||
&& !!electronApi?.startLinuxScreenShareMonitorCapture
|
||||
&& !!electronApi?.stopLinuxScreenShareMonitorCapture
|
||||
&& !!electronApi?.onLinuxScreenShareMonitorAudioChunk
|
||||
&& !!electronApi?.onLinuxScreenShareMonitorAudioEnded
|
||||
&& /linux/i.test(platformHint);
|
||||
}
|
||||
|
||||
async awaitPendingReset(): Promise<void> {
|
||||
if (!this.audioRoutingResetPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
await this.audioRoutingResetPromise;
|
||||
}
|
||||
|
||||
scheduleReset(): void {
|
||||
if (!this.audioRoutingActive || this.audioRoutingResetPromise) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.audioRoutingResetPromise = this.resetAudioRouting()
|
||||
.catch((error) => {
|
||||
this.logger.warn('Failed to reset Linux Electron audio routing', error);
|
||||
})
|
||||
.finally(() => {
|
||||
this.audioRoutingResetPromise = null;
|
||||
});
|
||||
}
|
||||
|
||||
async startCapture(
|
||||
options: ScreenShareStartOptions,
|
||||
preset: ScreenShareQualityPreset
|
||||
): Promise<MediaStream> {
|
||||
const electronApi = this.getRequiredElectronApi();
|
||||
const routingInfo = await electronApi.prepareLinuxScreenShareAudioRouting();
|
||||
|
||||
this.assertAudioRoutingReady(routingInfo, 'Linux Electron audio routing is unavailable.');
|
||||
|
||||
let desktopStream: MediaStream | null = null;
|
||||
|
||||
try {
|
||||
const activation = await electronApi.activateLinuxScreenShareAudioRouting();
|
||||
|
||||
this.assertAudioRoutingReady(activation, 'Failed to activate Linux Electron audio routing.');
|
||||
|
||||
if (!activation.active) {
|
||||
throw new Error(activation.reason || 'Failed to activate Linux Electron audio routing.');
|
||||
}
|
||||
|
||||
desktopStream = await this.dependencies.startDisplayMedia({
|
||||
...options,
|
||||
includeSystemAudio: false
|
||||
}, preset);
|
||||
|
||||
const { audioTrack, captureInfo } = await this.startMonitorTrack();
|
||||
const stream = new MediaStream([...desktopStream.getVideoTracks(), audioTrack]);
|
||||
|
||||
desktopStream.getAudioTracks().forEach((track) => track.stop());
|
||||
|
||||
this.audioRoutingActive = true;
|
||||
this.logger.info('Linux Electron screen-share audio routing enabled', {
|
||||
screenShareMonitorSourceName: captureInfo.sourceName,
|
||||
voiceSinkName: activation.voiceSinkName
|
||||
});
|
||||
|
||||
return stream;
|
||||
} catch (error) {
|
||||
desktopStream?.getTracks().forEach((track) => track.stop());
|
||||
await this.resetAudioRouting();
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private getRequiredElectronApi(): Required<Pick<
|
||||
ScreenShareElectronApi,
|
||||
| 'prepareLinuxScreenShareAudioRouting'
|
||||
| 'activateLinuxScreenShareAudioRouting'
|
||||
| 'deactivateLinuxScreenShareAudioRouting'
|
||||
| 'startLinuxScreenShareMonitorCapture'
|
||||
| 'stopLinuxScreenShareMonitorCapture'
|
||||
| 'onLinuxScreenShareMonitorAudioChunk'
|
||||
| 'onLinuxScreenShareMonitorAudioEnded'
|
||||
>> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
|
||||
if (!electronApi?.prepareLinuxScreenShareAudioRouting
|
||||
|| !electronApi.activateLinuxScreenShareAudioRouting
|
||||
|| !electronApi.deactivateLinuxScreenShareAudioRouting
|
||||
|| !electronApi.startLinuxScreenShareMonitorCapture
|
||||
|| !electronApi.stopLinuxScreenShareMonitorCapture
|
||||
|| !electronApi.onLinuxScreenShareMonitorAudioChunk
|
||||
|| !electronApi.onLinuxScreenShareMonitorAudioEnded) {
|
||||
throw new Error('Linux Electron audio routing is unavailable.');
|
||||
}
|
||||
|
||||
return {
|
||||
prepareLinuxScreenShareAudioRouting: electronApi.prepareLinuxScreenShareAudioRouting,
|
||||
activateLinuxScreenShareAudioRouting: electronApi.activateLinuxScreenShareAudioRouting,
|
||||
deactivateLinuxScreenShareAudioRouting: electronApi.deactivateLinuxScreenShareAudioRouting,
|
||||
startLinuxScreenShareMonitorCapture: electronApi.startLinuxScreenShareMonitorCapture,
|
||||
stopLinuxScreenShareMonitorCapture: electronApi.stopLinuxScreenShareMonitorCapture,
|
||||
onLinuxScreenShareMonitorAudioChunk: electronApi.onLinuxScreenShareMonitorAudioChunk,
|
||||
onLinuxScreenShareMonitorAudioEnded: electronApi.onLinuxScreenShareMonitorAudioEnded
|
||||
};
|
||||
}
|
||||
|
||||
private assertAudioRoutingReady(
|
||||
routingInfo: LinuxScreenShareAudioRoutingInfo,
|
||||
unavailableReason: string
|
||||
): void {
|
||||
if (!routingInfo.available) {
|
||||
throw new Error(routingInfo.reason || unavailableReason);
|
||||
}
|
||||
|
||||
if (!routingInfo.monitorCaptureSupported) {
|
||||
throw new Error('Linux screen-share monitor capture requires restarting the desktop app so the new Electron main process can load.');
|
||||
}
|
||||
}
|
||||
|
||||
private async resetAudioRouting(): Promise<void> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
const captureId = this.monitorAudioPipeline?.captureId;
|
||||
|
||||
this.audioRoutingActive = false;
|
||||
|
||||
this.disposeMonitorAudioPipeline();
|
||||
|
||||
try {
|
||||
if (captureId && electronApi?.stopLinuxScreenShareMonitorCapture) {
|
||||
await electronApi.stopLinuxScreenShareMonitorCapture(captureId);
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to stop Linux screen-share monitor capture', error);
|
||||
}
|
||||
|
||||
try {
|
||||
if (electronApi?.deactivateLinuxScreenShareAudioRouting) {
|
||||
await electronApi.deactivateLinuxScreenShareAudioRouting();
|
||||
}
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to deactivate Linux Electron audio routing', error);
|
||||
}
|
||||
}
|
||||
|
||||
private async startMonitorTrack(): Promise<{
|
||||
audioTrack: MediaStreamTrack;
|
||||
captureInfo: LinuxScreenShareMonitorCaptureInfo;
|
||||
}> {
|
||||
const electronApi = this.dependencies.getElectronApi();
|
||||
|
||||
if (!electronApi?.startLinuxScreenShareMonitorCapture
|
||||
|| !electronApi?.stopLinuxScreenShareMonitorCapture
|
||||
|| !electronApi?.onLinuxScreenShareMonitorAudioChunk
|
||||
|| !electronApi?.onLinuxScreenShareMonitorAudioEnded) {
|
||||
throw new Error('Linux screen-share monitor capture is unavailable.');
|
||||
}
|
||||
|
||||
const queuedChunksByCaptureId = new Map<string, Uint8Array[]>();
|
||||
const queuedEndedReasons = new Map<string, string | undefined>();
|
||||
|
||||
let pipeline: LinuxScreenShareMonitorAudioPipeline | null = null;
|
||||
let captureInfo: LinuxScreenShareMonitorCaptureInfo | null = null;
|
||||
|
||||
const queueChunk = (captureId: string, chunk: Uint8Array): void => {
|
||||
const queuedChunks = queuedChunksByCaptureId.get(captureId) || [];
|
||||
|
||||
queuedChunks.push(this.copyBytes(chunk));
|
||||
queuedChunksByCaptureId.set(captureId, queuedChunks);
|
||||
};
|
||||
const onChunk = (payload: LinuxScreenShareMonitorAudioChunkPayload): void => {
|
||||
if (!pipeline || payload.captureId !== pipeline.captureId) {
|
||||
queueChunk(payload.captureId, payload.chunk);
|
||||
return;
|
||||
}
|
||||
|
||||
this.handleMonitorAudioChunk(pipeline, payload.chunk);
|
||||
};
|
||||
const onEnded = (payload: LinuxScreenShareMonitorAudioEndedPayload): void => {
|
||||
if (!pipeline || payload.captureId !== pipeline.captureId) {
|
||||
queuedEndedReasons.set(payload.captureId, payload.reason);
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.warn('Linux screen-share monitor capture ended', payload);
|
||||
this.dependencies.onCaptureEnded();
|
||||
};
|
||||
const unsubscribeChunk = electronApi.onLinuxScreenShareMonitorAudioChunk(onChunk) as () => void;
|
||||
const unsubscribeEnded = electronApi.onLinuxScreenShareMonitorAudioEnded(onEnded) as () => void;
|
||||
|
||||
try {
|
||||
captureInfo = await electronApi.startLinuxScreenShareMonitorCapture() as LinuxScreenShareMonitorCaptureInfo;
|
||||
|
||||
const audioContext = new AudioContext({ sampleRate: captureInfo.sampleRate });
|
||||
const mediaDestination = audioContext.createMediaStreamDestination();
|
||||
|
||||
await audioContext.resume();
|
||||
|
||||
const audioTrack = mediaDestination.stream.getAudioTracks()[0];
|
||||
|
||||
if (!audioTrack) {
|
||||
throw new Error('Renderer audio pipeline did not produce a screen-share monitor track.');
|
||||
}
|
||||
|
||||
pipeline = {
|
||||
audioContext,
|
||||
audioTrack,
|
||||
bitsPerSample: captureInfo.bitsPerSample,
|
||||
captureId: captureInfo.captureId,
|
||||
channelCount: captureInfo.channelCount,
|
||||
mediaDestination,
|
||||
nextStartTime: audioContext.currentTime + 0.05,
|
||||
pendingBytes: new Uint8Array(0),
|
||||
sampleRate: captureInfo.sampleRate,
|
||||
unsubscribeChunk,
|
||||
unsubscribeEnded
|
||||
};
|
||||
|
||||
this.monitorAudioPipeline = pipeline;
|
||||
const activeCaptureId = captureInfo.captureId;
|
||||
|
||||
audioTrack.addEventListener('ended', () => {
|
||||
if (this.monitorAudioPipeline?.captureId === activeCaptureId) {
|
||||
this.dependencies.onCaptureEnded();
|
||||
}
|
||||
}, { once: true });
|
||||
|
||||
const queuedChunks = queuedChunksByCaptureId.get(captureInfo.captureId) || [];
|
||||
const activePipeline = pipeline;
|
||||
|
||||
queuedChunks.forEach((chunk) => {
|
||||
this.handleMonitorAudioChunk(activePipeline, chunk);
|
||||
});
|
||||
|
||||
queuedChunksByCaptureId.delete(captureInfo.captureId);
|
||||
|
||||
if (queuedEndedReasons.has(captureInfo.captureId)) {
|
||||
throw new Error(queuedEndedReasons.get(captureInfo.captureId)
|
||||
|| 'Linux screen-share monitor capture ended before audio initialisation completed.');
|
||||
}
|
||||
|
||||
return {
|
||||
audioTrack,
|
||||
captureInfo
|
||||
};
|
||||
} catch (error) {
|
||||
if (pipeline) {
|
||||
this.disposeMonitorAudioPipeline(pipeline.captureId);
|
||||
} else {
|
||||
unsubscribeChunk();
|
||||
unsubscribeEnded();
|
||||
}
|
||||
|
||||
try {
|
||||
await electronApi.stopLinuxScreenShareMonitorCapture(captureInfo?.captureId);
|
||||
} catch (stopError) {
|
||||
this.logger.warn('Failed to stop Linux screen-share monitor capture after startup failure', stopError);
|
||||
}
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private disposeMonitorAudioPipeline(captureId?: string): void {
|
||||
if (!this.monitorAudioPipeline) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (captureId && captureId !== this.monitorAudioPipeline.captureId) {
|
||||
return;
|
||||
}
|
||||
|
||||
const pipeline = this.monitorAudioPipeline;
|
||||
|
||||
this.monitorAudioPipeline = null;
|
||||
pipeline.unsubscribeChunk();
|
||||
pipeline.unsubscribeEnded();
|
||||
pipeline.audioTrack.stop();
|
||||
pipeline.pendingBytes = new Uint8Array(0);
|
||||
|
||||
void pipeline.audioContext.close().catch((error) => {
|
||||
this.logger.warn('Failed to close Linux screen-share monitor audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
private handleMonitorAudioChunk(
|
||||
pipeline: LinuxScreenShareMonitorAudioPipeline,
|
||||
chunk: Uint8Array
|
||||
): void {
|
||||
if (pipeline.bitsPerSample !== 16) {
|
||||
this.logger.warn('Unsupported Linux screen-share monitor capture sample size', {
|
||||
bitsPerSample: pipeline.bitsPerSample,
|
||||
captureId: pipeline.captureId
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const bytesPerSample = pipeline.bitsPerSample / 8;
|
||||
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
|
||||
|
||||
if (!Number.isFinite(bytesPerFrame) || bytesPerFrame <= 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
const combinedBytes = this.concatBytes(pipeline.pendingBytes, chunk);
|
||||
const completeByteLength = combinedBytes.byteLength - (combinedBytes.byteLength % bytesPerFrame);
|
||||
|
||||
if (completeByteLength <= 0) {
|
||||
pipeline.pendingBytes = combinedBytes;
|
||||
return;
|
||||
}
|
||||
|
||||
const completeBytes = combinedBytes.subarray(0, completeByteLength);
|
||||
|
||||
pipeline.pendingBytes = this.copyBytes(combinedBytes.subarray(completeByteLength));
|
||||
|
||||
if (pipeline.audioContext.state !== 'running') {
|
||||
void pipeline.audioContext.resume().catch((error) => {
|
||||
this.logger.warn('Failed to resume Linux screen-share monitor audio context', error);
|
||||
});
|
||||
}
|
||||
|
||||
const frameCount = completeByteLength / bytesPerFrame;
|
||||
const audioBuffer = this.createAudioBuffer(pipeline, completeBytes, frameCount);
|
||||
const source = pipeline.audioContext.createBufferSource();
|
||||
|
||||
source.buffer = audioBuffer;
|
||||
source.connect(pipeline.mediaDestination);
|
||||
|
||||
source.onended = () => {
|
||||
source.disconnect();
|
||||
};
|
||||
|
||||
const now = pipeline.audioContext.currentTime;
|
||||
const startTime = Math.max(pipeline.nextStartTime, now + 0.02);
|
||||
|
||||
source.start(startTime);
|
||||
pipeline.nextStartTime = startTime + audioBuffer.duration;
|
||||
}
|
||||
|
||||
private createAudioBuffer(
|
||||
pipeline: LinuxScreenShareMonitorAudioPipeline,
|
||||
bytes: Uint8Array,
|
||||
frameCount: number
|
||||
): AudioBuffer {
|
||||
const audioBuffer = pipeline.audioContext.createBuffer(pipeline.channelCount, frameCount, pipeline.sampleRate);
|
||||
const sampleData = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
|
||||
const channelData = Array.from(
|
||||
{ length: pipeline.channelCount },
|
||||
(_, channelIndex) => audioBuffer.getChannelData(channelIndex)
|
||||
);
|
||||
const bytesPerSample = pipeline.bitsPerSample / 8;
|
||||
const bytesPerFrame = bytesPerSample * pipeline.channelCount;
|
||||
|
||||
for (let frameIndex = 0; frameIndex < frameCount; frameIndex += 1) {
|
||||
const frameOffset = frameIndex * bytesPerFrame;
|
||||
|
||||
for (let channelIndex = 0; channelIndex < pipeline.channelCount; channelIndex += 1) {
|
||||
const sampleOffset = frameOffset + (channelIndex * bytesPerSample);
|
||||
|
||||
channelData[channelIndex][frameIndex] = sampleData.getInt16(sampleOffset, true) / 32768;
|
||||
}
|
||||
}
|
||||
|
||||
return audioBuffer;
|
||||
}
|
||||
|
||||
private concatBytes(first: Uint8Array, second: Uint8Array): Uint8Array {
|
||||
if (first.byteLength === 0) {
|
||||
return this.copyBytes(second);
|
||||
}
|
||||
|
||||
if (second.byteLength === 0) {
|
||||
return this.copyBytes(first);
|
||||
}
|
||||
|
||||
const combined = new Uint8Array(first.byteLength + second.byteLength);
|
||||
|
||||
combined.set(first, 0);
|
||||
combined.set(second, first.byteLength);
|
||||
|
||||
return combined;
|
||||
}
|
||||
|
||||
private copyBytes(bytes: Uint8Array): Uint8Array {
|
||||
return bytes.byteLength > 0 ? new Uint8Array(bytes) : new Uint8Array(0);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
import type {
|
||||
ElectronApi,
|
||||
ElectronWindow,
|
||||
LinuxScreenShareAudioRoutingInfo,
|
||||
LinuxScreenShareMonitorAudioChunkPayload,
|
||||
LinuxScreenShareMonitorAudioEndedPayload,
|
||||
LinuxScreenShareMonitorCaptureInfo
|
||||
} from '../../../../core/platform/electron/electron-api.models';
|
||||
|
||||
export interface DesktopSource {
|
||||
id: string;
|
||||
name: string;
|
||||
thumbnail: string;
|
||||
}
|
||||
|
||||
export interface ElectronDesktopSourceSelection {
|
||||
includeSystemAudio: boolean;
|
||||
source: DesktopSource;
|
||||
}
|
||||
|
||||
export interface ElectronDesktopCaptureResult {
|
||||
includeSystemAudio: boolean;
|
||||
stream: MediaStream;
|
||||
}
|
||||
|
||||
export type {
|
||||
LinuxScreenShareAudioRoutingInfo,
|
||||
LinuxScreenShareMonitorAudioChunkPayload,
|
||||
LinuxScreenShareMonitorAudioEndedPayload,
|
||||
LinuxScreenShareMonitorCaptureInfo
|
||||
};
|
||||
|
||||
export type ScreenShareElectronApi = Partial<Pick<
|
||||
ElectronApi,
|
||||
| 'getSources'
|
||||
| 'prepareLinuxScreenShareAudioRouting'
|
||||
| 'activateLinuxScreenShareAudioRouting'
|
||||
| 'deactivateLinuxScreenShareAudioRouting'
|
||||
| 'startLinuxScreenShareMonitorCapture'
|
||||
| 'stopLinuxScreenShareMonitorCapture'
|
||||
| 'onLinuxScreenShareMonitorAudioChunk'
|
||||
| 'onLinuxScreenShareMonitorAudioEnded'
|
||||
>>;
|
||||
|
||||
export type ElectronDesktopVideoConstraint = MediaTrackConstraints & {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop';
|
||||
chromeMediaSourceId: string;
|
||||
maxWidth: number;
|
||||
maxHeight: number;
|
||||
maxFrameRate: number;
|
||||
};
|
||||
};
|
||||
|
||||
export type ElectronDesktopAudioConstraint = MediaTrackConstraints & {
|
||||
mandatory: {
|
||||
chromeMediaSource: 'desktop';
|
||||
chromeMediaSourceId: string;
|
||||
};
|
||||
};
|
||||
|
||||
export interface ElectronDesktopMediaStreamConstraints extends MediaStreamConstraints {
|
||||
video: ElectronDesktopVideoConstraint;
|
||||
audio?: false | ElectronDesktopAudioConstraint;
|
||||
}
|
||||
|
||||
export type ScreenShareWindow = ElectronWindow;
|
||||
@@ -0,0 +1,575 @@
|
||||
/* eslint-disable, @typescript-eslint/no-non-null-assertion, @typescript-eslint/member-ordering, id-denylist */
|
||||
/**
|
||||
* Manages screen sharing: getDisplayMedia / Electron desktop capturer,
|
||||
* system-audio capture, and attaching screen tracks to peers.
|
||||
*/
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
import { ScreenShareQualityPreset, ScreenShareStartOptions } from '../screen-share.config';
|
||||
import { PeerData } from '../realtime.types';
|
||||
import {
|
||||
TRACK_KIND_AUDIO,
|
||||
TRACK_KIND_VIDEO,
|
||||
TRANSCEIVER_SEND_RECV,
|
||||
TRANSCEIVER_RECV_ONLY
|
||||
} from '../realtime.constants';
|
||||
import { DEFAULT_SCREEN_SHARE_START_OPTIONS, SCREEN_SHARE_QUALITY_PRESETS } from '../screen-share.config';
|
||||
import { BrowserScreenShareCapture } from './screen-share-platforms/browser-screen-share.capture';
|
||||
import { DesktopElectronScreenShareCapture } from './screen-share-platforms/desktop-electron-screen-share.capture';
|
||||
import { LinuxElectronScreenShareCapture } from './screen-share-platforms/linux-electron-screen-share.capture';
|
||||
import { getElectronApi } from '../../../core/platform/electron/get-electron-api';
|
||||
import { ScreenShareElectronApi } from './screen-share-platforms/shared';
|
||||
|
||||
/**
|
||||
* Callbacks the ScreenShareManager needs from the owning service.
|
||||
*/
|
||||
export interface ScreenShareCallbacks {
|
||||
getActivePeers(): Map<string, PeerData>;
|
||||
getLocalMediaStream(): MediaStream | null;
|
||||
renegotiate(peerId: string): Promise<void>;
|
||||
broadcastCurrentStates(): void;
|
||||
selectDesktopSource?(
|
||||
sources: readonly { id: string; name: string; thumbnail: string }[],
|
||||
options: { includeSystemAudio: boolean }
|
||||
): Promise<{
|
||||
includeSystemAudio: boolean;
|
||||
source: { id: string; name: string; thumbnail: string };
|
||||
}>;
|
||||
updateLocalScreenShareState?(state: LocalScreenShareState): void;
|
||||
}
|
||||
|
||||
type ScreenShareCaptureMethod = 'display-media' | 'electron-desktop' | 'linux-electron';
|
||||
|
||||
export interface LocalScreenShareState {
|
||||
active: boolean;
|
||||
captureMethod: ScreenShareCaptureMethod | null;
|
||||
includeSystemAudio: boolean;
|
||||
stream: MediaStream | null;
|
||||
suppressRemotePlayback: boolean;
|
||||
forceDefaultRemotePlaybackOutput: boolean;
|
||||
}
|
||||
|
||||
export class ScreenShareManager {
|
||||
/** The active screen-capture stream. */
|
||||
private activeScreenStream: MediaStream | null = null;
|
||||
|
||||
/** Optional system-audio stream captured alongside the screen. */
|
||||
private screenAudioStream: MediaStream | null = null;
|
||||
|
||||
/** The quality preset currently applied to the active share. */
|
||||
private activeScreenPreset: ScreenShareQualityPreset | null = null;
|
||||
|
||||
/** Remote peers that explicitly requested screen-share video. */
|
||||
private readonly requestedViewerPeerIds = new Set<string>();
|
||||
|
||||
/** Browser `getDisplayMedia` capture path. */
|
||||
private readonly browserScreenShareCapture: BrowserScreenShareCapture;
|
||||
|
||||
/** Desktop Electron capture path for non-Linux desktop builds. */
|
||||
private readonly desktopElectronScreenShareCapture: DesktopElectronScreenShareCapture;
|
||||
|
||||
/** Linux Electron screen/audio capture path with isolated audio routing. */
|
||||
private readonly linuxElectronScreenShareCapture: LinuxElectronScreenShareCapture;
|
||||
|
||||
/** Whether screen sharing is currently active. */
|
||||
private isScreenActive = false;
|
||||
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private callbacks: ScreenShareCallbacks
|
||||
) {
|
||||
this.browserScreenShareCapture = new BrowserScreenShareCapture(this.logger);
|
||||
this.desktopElectronScreenShareCapture = new DesktopElectronScreenShareCapture(this.logger, {
|
||||
getElectronApi: () => this.getElectronApi(),
|
||||
getSelectDesktopSource: () => this.callbacks.selectDesktopSource
|
||||
});
|
||||
|
||||
this.linuxElectronScreenShareCapture = new LinuxElectronScreenShareCapture(this.logger, {
|
||||
getElectronApi: () => this.getElectronApi(),
|
||||
onCaptureEnded: () => {
|
||||
if (this.isScreenActive) {
|
||||
this.stopScreenShare();
|
||||
}
|
||||
},
|
||||
startDisplayMedia: async (options, preset) =>
|
||||
await this.browserScreenShareCapture.startCapture(options, preset)
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the callback set at runtime.
|
||||
* Needed because of circular initialisation between managers.
|
||||
*
|
||||
* @param nextCallbacks - The new callback interface to wire into this manager.
|
||||
*/
|
||||
setCallbacks(nextCallbacks: ScreenShareCallbacks): void {
|
||||
this.callbacks = nextCallbacks;
|
||||
}
|
||||
|
||||
/** Returns the current screen-capture stream, or `null` if inactive. */
|
||||
getScreenStream(): MediaStream | null { return this.activeScreenStream; }
|
||||
/** Whether screen sharing is currently active. */
|
||||
getIsScreenActive(): boolean { return this.isScreenActive; }
|
||||
|
||||
/**
|
||||
* Begin screen sharing.
|
||||
*
|
||||
* On Linux Electron builds, prefers a dedicated PulseAudio/PipeWire routing
|
||||
* path so remote voice playback is kept out of captured system audio.
|
||||
* On Windows Electron builds, prefers `getDisplayMedia` with system audio
|
||||
* so the separate mic `getUserMedia` stream is not disrupted; falls back to
|
||||
* Electron desktop capture only when `getDisplayMedia` fails entirely.
|
||||
* In browser contexts, uses `getDisplayMedia`.
|
||||
*
|
||||
* @param options - Screen-share capture options.
|
||||
* @returns The captured screen {@link MediaStream}.
|
||||
* @throws If both Electron and browser screen capture fail.
|
||||
*/
|
||||
async startScreenShare(options: ScreenShareStartOptions = DEFAULT_SCREEN_SHARE_START_OPTIONS): Promise<MediaStream> {
|
||||
const shareOptions = {
|
||||
...DEFAULT_SCREEN_SHARE_START_OPTIONS,
|
||||
...options
|
||||
};
|
||||
const preset = SCREEN_SHARE_QUALITY_PRESETS[shareOptions.quality];
|
||||
const electronDesktopCaptureAvailable = this.desktopElectronScreenShareCapture.isAvailable();
|
||||
|
||||
let captureMethod: ScreenShareCaptureMethod | null = null;
|
||||
|
||||
try {
|
||||
this.logger.info('startScreenShare invoked', shareOptions);
|
||||
|
||||
if (this.activeScreenStream) {
|
||||
this.stopScreenShare();
|
||||
}
|
||||
|
||||
await this.linuxElectronScreenShareCapture.awaitPendingReset();
|
||||
|
||||
this.activeScreenStream = null;
|
||||
|
||||
if (shareOptions.includeSystemAudio && this.linuxElectronScreenShareCapture.isSupported()) {
|
||||
try {
|
||||
this.activeScreenStream = await this.linuxElectronScreenShareCapture.startCapture(shareOptions, preset);
|
||||
captureMethod = 'linux-electron';
|
||||
} catch (error) {
|
||||
this.rethrowIfScreenShareAborted(error);
|
||||
this.logger.warn('Linux Electron audio routing failed; falling back to standard capture', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.activeScreenStream && shareOptions.includeSystemAudio) {
|
||||
try {
|
||||
this.activeScreenStream = await this.browserScreenShareCapture.startCapture(shareOptions, preset);
|
||||
captureMethod = 'display-media';
|
||||
|
||||
if (this.activeScreenStream.getAudioTracks().length === 0) {
|
||||
if (electronDesktopCaptureAvailable) {
|
||||
// On Windows Electron, keep the getDisplayMedia stream for video
|
||||
// rather than falling through to getUserMedia desktop audio which
|
||||
// can replace or kill the active mic stream.
|
||||
this.logger.warn(
|
||||
'getDisplayMedia did not provide system audio; '
|
||||
+ 'continuing without system audio to preserve mic stream'
|
||||
);
|
||||
|
||||
shareOptions.includeSystemAudio = false;
|
||||
} else {
|
||||
this.logger.warn('getDisplayMedia did not provide system audio; trying next capture method');
|
||||
this.activeScreenStream.getTracks().forEach((track) => track.stop());
|
||||
this.activeScreenStream = null;
|
||||
captureMethod = null;
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
this.rethrowIfScreenShareAborted(error);
|
||||
this.logger.warn('getDisplayMedia with system audio failed; falling back to Electron desktop capture', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.activeScreenStream && electronDesktopCaptureAvailable) {
|
||||
try {
|
||||
const electronCapture = await this.desktopElectronScreenShareCapture.startCapture(shareOptions, preset);
|
||||
|
||||
this.activeScreenStream = electronCapture.stream;
|
||||
shareOptions.includeSystemAudio = electronCapture.includeSystemAudio;
|
||||
captureMethod = 'electron-desktop';
|
||||
} catch (error) {
|
||||
this.rethrowIfScreenShareAborted(error);
|
||||
this.logger.warn('Electron desktop capture failed; falling back to getDisplayMedia', error);
|
||||
}
|
||||
}
|
||||
|
||||
if (!this.activeScreenStream) {
|
||||
this.activeScreenStream = await this.browserScreenShareCapture.startCapture(shareOptions, preset);
|
||||
captureMethod = 'display-media';
|
||||
}
|
||||
|
||||
this.configureScreenStream(preset);
|
||||
this.prepareScreenAudio(shareOptions.includeSystemAudio);
|
||||
this.activeScreenPreset = preset;
|
||||
this.attachScreenTracksToPeers(preset);
|
||||
|
||||
this.isScreenActive = true;
|
||||
this.publishLocalScreenShareState(shareOptions.includeSystemAudio, captureMethod);
|
||||
this.callbacks.broadcastCurrentStates();
|
||||
|
||||
const activeScreenStream = this.activeScreenStream;
|
||||
|
||||
if (!activeScreenStream) {
|
||||
throw new Error('Screen sharing did not produce an active stream.');
|
||||
}
|
||||
|
||||
const screenVideoTrack = activeScreenStream.getVideoTracks()[0];
|
||||
|
||||
if (screenVideoTrack) {
|
||||
screenVideoTrack.onended = () => {
|
||||
this.logger.warn('Screen video track ended');
|
||||
this.stopScreenShare();
|
||||
};
|
||||
}
|
||||
|
||||
return activeScreenStream;
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to start screen share', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Stop screen sharing and remove screen-share tracks on all peers.
|
||||
*
|
||||
* Stops all screen-capture tracks, resets screen transceivers to receive-only,
|
||||
* and triggers renegotiation.
|
||||
*/
|
||||
stopScreenShare(): void {
|
||||
if (this.activeScreenStream) {
|
||||
this.activeScreenStream.getTracks().forEach((track) => track.stop());
|
||||
this.activeScreenStream = null;
|
||||
}
|
||||
|
||||
this.linuxElectronScreenShareCapture.scheduleReset();
|
||||
|
||||
this.screenAudioStream = null;
|
||||
this.activeScreenPreset = null;
|
||||
this.isScreenActive = false;
|
||||
this.publishLocalScreenShareState(false, null);
|
||||
this.callbacks.broadcastCurrentStates();
|
||||
|
||||
this.callbacks.getActivePeers().forEach((peerData, peerId) => {
|
||||
this.detachScreenTracksFromPeer(peerData, peerId);
|
||||
});
|
||||
}
|
||||
|
||||
requestScreenShareForPeer(peerId: string): void {
|
||||
this.requestedViewerPeerIds.add(peerId);
|
||||
|
||||
if (!this.isScreenActive || !this.activeScreenPreset) {
|
||||
return;
|
||||
}
|
||||
|
||||
const peerData = this.callbacks.getActivePeers().get(peerId);
|
||||
|
||||
if (!peerData) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.attachScreenTracksToPeer(peerData, peerId, this.activeScreenPreset);
|
||||
}
|
||||
|
||||
stopScreenShareForPeer(peerId: string): void {
|
||||
this.requestedViewerPeerIds.delete(peerId);
|
||||
|
||||
const peerData = this.callbacks.getActivePeers().get(peerId);
|
||||
|
||||
if (!peerData) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.detachScreenTracksFromPeer(peerData, peerId);
|
||||
}
|
||||
|
||||
clearScreenShareRequest(peerId: string): void {
|
||||
this.requestedViewerPeerIds.delete(peerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Attach the current screen-share tracks to a newly-connected peer.
|
||||
*
|
||||
* This is needed when a peer connects after screen sharing already started,
|
||||
* because `startScreenShare()` only pushes tracks to peers that existed at
|
||||
* the time sharing began.
|
||||
*/
|
||||
syncScreenShareToPeer(peerId: string): void {
|
||||
if (
|
||||
!this.requestedViewerPeerIds.has(peerId)
|
||||
|| !this.isScreenActive
|
||||
|| !this.activeScreenStream
|
||||
|| !this.activeScreenPreset
|
||||
) {
|
||||
return;
|
||||
}
|
||||
|
||||
const peerData = this.callbacks.getActivePeers().get(peerId);
|
||||
|
||||
if (!peerData) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.attachScreenTracksToPeer(peerData, peerId, this.activeScreenPreset);
|
||||
}
|
||||
|
||||
/** Clean up all resources. */
|
||||
destroy(): void {
|
||||
this.stopScreenShare();
|
||||
}
|
||||
|
||||
private getElectronApi(): ScreenShareElectronApi | null {
|
||||
return getElectronApi();
|
||||
}
|
||||
|
||||
private publishLocalScreenShareState(
|
||||
includeSystemAudio: boolean,
|
||||
captureMethod: ScreenShareCaptureMethod | null
|
||||
): void {
|
||||
this.callbacks.updateLocalScreenShareState?.({
|
||||
active: this.isScreenActive,
|
||||
captureMethod: this.isScreenActive ? captureMethod : null,
|
||||
includeSystemAudio: this.isScreenActive ? includeSystemAudio : false,
|
||||
stream: this.isScreenActive ? this.activeScreenStream : null,
|
||||
suppressRemotePlayback: this.isScreenActive
|
||||
&& this.desktopElectronScreenShareCapture.shouldSuppressRemotePlaybackDuringShare(
|
||||
includeSystemAudio,
|
||||
captureMethod === 'electron-desktop'
|
||||
),
|
||||
forceDefaultRemotePlaybackOutput: this.isScreenActive
|
||||
&& includeSystemAudio
|
||||
&& captureMethod === 'linux-electron'
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a dedicated stream for system audio captured alongside the screen.
|
||||
*
|
||||
* @param includeSystemAudio - Whether system audio should be sent.
|
||||
*/
|
||||
private prepareScreenAudio(includeSystemAudio: boolean): void {
|
||||
const screenAudioTrack = includeSystemAudio ? (this.activeScreenStream?.getAudioTracks()[0] || null) : null;
|
||||
|
||||
if (!screenAudioTrack) {
|
||||
if (includeSystemAudio) {
|
||||
this.logger.warn('System audio was requested, but no screen audio track was captured');
|
||||
}
|
||||
|
||||
this.screenAudioStream = null;
|
||||
return;
|
||||
}
|
||||
|
||||
this.screenAudioStream = new MediaStream([screenAudioTrack]);
|
||||
this.logger.attachTrackDiagnostics(screenAudioTrack, 'screenAudio');
|
||||
this.logger.logStream('screenAudio', this.screenAudioStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Attach screen video and optional system-audio tracks to all
|
||||
* active peer connections, then trigger SDP renegotiation.
|
||||
*
|
||||
* @param options - Screen-share capture options.
|
||||
* @param preset - Selected quality preset for sender tuning.
|
||||
*/
|
||||
private attachScreenTracksToPeers(
|
||||
preset: ScreenShareQualityPreset
|
||||
): void {
|
||||
this.callbacks.getActivePeers().forEach((peerData, peerId) => {
|
||||
if (!this.requestedViewerPeerIds.has(peerId)) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.attachScreenTracksToPeer(peerData, peerId, preset);
|
||||
});
|
||||
}
|
||||
|
||||
private attachScreenTracksToPeer(
|
||||
peerData: PeerData,
|
||||
peerId: string,
|
||||
preset: ScreenShareQualityPreset
|
||||
): void {
|
||||
if (!this.activeScreenStream) {
|
||||
return;
|
||||
}
|
||||
|
||||
const screenVideoTrack = this.activeScreenStream.getVideoTracks()[0];
|
||||
|
||||
if (!screenVideoTrack) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.logger.attachTrackDiagnostics(screenVideoTrack, `screenVideo:${peerId}`);
|
||||
|
||||
let videoSender = peerData.videoSender || peerData.connection.getSenders().find((sender) => sender.track?.kind === TRACK_KIND_VIDEO);
|
||||
|
||||
if (!videoSender) {
|
||||
const videoTransceiver = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
});
|
||||
|
||||
videoSender = videoTransceiver.sender;
|
||||
peerData.videoSender = videoSender;
|
||||
} else {
|
||||
const videoTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === videoSender
|
||||
);
|
||||
|
||||
if (videoTransceiver?.direction === TRANSCEIVER_RECV_ONLY) {
|
||||
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
}
|
||||
|
||||
peerData.screenVideoSender = videoSender;
|
||||
|
||||
if (typeof videoSender.setStreams === 'function') {
|
||||
videoSender.setStreams(this.activeScreenStream);
|
||||
}
|
||||
|
||||
videoSender.replaceTrack(screenVideoTrack)
|
||||
.then(() => {
|
||||
this.logger.info('screen video replaceTrack ok', { peerId });
|
||||
void this.applyScreenShareVideoParameters(videoSender, preset, peerId);
|
||||
})
|
||||
.catch((error) => this.logger.error('screen video replaceTrack failed', error));
|
||||
|
||||
const screenAudioTrack = this.screenAudioStream?.getAudioTracks()[0] || null;
|
||||
|
||||
if (screenAudioTrack) {
|
||||
this.logger.attachTrackDiagnostics(screenAudioTrack, `screenAudio:${peerId}`);
|
||||
let screenAudioSender = peerData.screenAudioSender;
|
||||
|
||||
if (!screenAudioSender) {
|
||||
const screenAudioTransceiver = peerData.connection.addTransceiver(TRACK_KIND_AUDIO, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
});
|
||||
|
||||
screenAudioSender = screenAudioTransceiver.sender;
|
||||
} else {
|
||||
const screenAudioTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === screenAudioSender
|
||||
);
|
||||
|
||||
if (screenAudioTransceiver?.direction === TRANSCEIVER_RECV_ONLY) {
|
||||
screenAudioTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
}
|
||||
|
||||
peerData.screenAudioSender = screenAudioSender;
|
||||
|
||||
if (typeof screenAudioSender.setStreams === 'function') {
|
||||
screenAudioSender.setStreams(this.activeScreenStream);
|
||||
}
|
||||
|
||||
screenAudioSender.replaceTrack(screenAudioTrack)
|
||||
.then(() => this.logger.info('screen audio replaceTrack ok', { peerId }))
|
||||
.catch((error) => this.logger.error('screen audio replaceTrack failed', error));
|
||||
}
|
||||
|
||||
this.callbacks.renegotiate(peerId);
|
||||
}
|
||||
|
||||
private detachScreenTracksFromPeer(peerData: PeerData, peerId: string): void {
|
||||
const transceivers = peerData.connection.getTransceivers();
|
||||
const videoTransceiver = transceivers.find(
|
||||
(transceiver) => transceiver.sender === peerData.videoSender || transceiver.sender === peerData.screenVideoSender
|
||||
);
|
||||
const screenAudioTransceiver = transceivers.find(
|
||||
(transceiver) => transceiver.sender === peerData.screenAudioSender
|
||||
);
|
||||
|
||||
if (videoTransceiver) {
|
||||
videoTransceiver.sender.replaceTrack(null).catch((error) => {
|
||||
this.logger.error('Failed to clear screen video sender track', error, { peerId });
|
||||
});
|
||||
|
||||
if (videoTransceiver.direction === TRANSCEIVER_SEND_RECV) {
|
||||
videoTransceiver.direction = TRANSCEIVER_RECV_ONLY;
|
||||
}
|
||||
}
|
||||
|
||||
if (screenAudioTransceiver) {
|
||||
screenAudioTransceiver.sender.replaceTrack(null).catch((error) => {
|
||||
this.logger.error('Failed to clear screen audio sender track', error, { peerId });
|
||||
});
|
||||
|
||||
if (screenAudioTransceiver.direction === TRANSCEIVER_SEND_RECV) {
|
||||
screenAudioTransceiver.direction = TRANSCEIVER_RECV_ONLY;
|
||||
}
|
||||
}
|
||||
|
||||
peerData.screenVideoSender = undefined;
|
||||
peerData.screenAudioSender = undefined;
|
||||
|
||||
this.callbacks.renegotiate(peerId);
|
||||
}
|
||||
|
||||
private isScreenShareSelectionAborted(error: unknown): boolean {
|
||||
return error instanceof Error
|
||||
&& (error.name === 'AbortError' || error.name === 'NotAllowedError');
|
||||
}
|
||||
|
||||
private rethrowIfScreenShareAborted(error: unknown): void {
|
||||
if (this.isScreenShareSelectionAborted(error)) {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private configureScreenStream(preset: ScreenShareQualityPreset): void {
|
||||
const screenVideoTrack = this.activeScreenStream?.getVideoTracks()[0];
|
||||
|
||||
if (!screenVideoTrack) {
|
||||
throw new Error('Screen capture returned no video track.');
|
||||
}
|
||||
|
||||
if ('contentHint' in screenVideoTrack) {
|
||||
screenVideoTrack.contentHint = preset.contentHint;
|
||||
}
|
||||
|
||||
this.logger.attachTrackDiagnostics(screenVideoTrack, 'screenVideo');
|
||||
this.logger.logStream('screen', this.activeScreenStream);
|
||||
|
||||
if (typeof screenVideoTrack.applyConstraints === 'function') {
|
||||
screenVideoTrack.applyConstraints({
|
||||
width: { ideal: preset.width, max: preset.width },
|
||||
height: { ideal: preset.height, max: preset.height },
|
||||
frameRate: { ideal: preset.frameRate, max: preset.frameRate }
|
||||
}).catch((error) => {
|
||||
this.logger.warn('Failed to re-apply screen video constraints', error);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private async applyScreenShareVideoParameters(
|
||||
sender: RTCRtpSender,
|
||||
preset: ScreenShareQualityPreset,
|
||||
peerId: string
|
||||
): Promise<void> {
|
||||
try {
|
||||
const params = sender.getParameters();
|
||||
const encodings = params.encodings?.length ? params.encodings : [{} as RTCRtpEncodingParameters];
|
||||
|
||||
params.encodings = encodings.map((encoding, index) => index === 0
|
||||
? {
|
||||
...encoding,
|
||||
maxBitrate: preset.maxBitrateBps,
|
||||
maxFramerate: preset.frameRate,
|
||||
scaleResolutionDownBy: preset.scaleResolutionDownBy ?? encoding.scaleResolutionDownBy ?? 1
|
||||
}
|
||||
: encoding);
|
||||
|
||||
(params as RTCRtpSendParameters & { degradationPreference?: string }).degradationPreference = preset.degradationPreference;
|
||||
|
||||
await sender.setParameters(params);
|
||||
this.logger.info('Applied screen-share sender parameters', {
|
||||
peerId,
|
||||
maxBitrate: preset.maxBitrateBps,
|
||||
maxFramerate: preset.frameRate
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to apply screen-share sender parameters', error, { peerId });
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,117 @@
|
||||
import { MediaManager } from './media.manager';
|
||||
import { LatencyProfile } from '../realtime.constants';
|
||||
import { VoiceStateSnapshot } from '../realtime.types';
|
||||
|
||||
interface VoiceSessionControllerDependencies {
|
||||
mediaManager: MediaManager;
|
||||
getIsScreenSharing(): boolean;
|
||||
setVoiceConnected(connected: boolean): void;
|
||||
setMuted(muted: boolean): void;
|
||||
setDeafened(deafened: boolean): void;
|
||||
setNoiseReductionEnabled(enabled: boolean): void;
|
||||
}
|
||||
|
||||
export class VoiceSessionController {
|
||||
private voiceServerId: string | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly dependencies: VoiceSessionControllerDependencies
|
||||
) {}
|
||||
|
||||
getVoiceServerId(): string | null {
|
||||
return this.voiceServerId;
|
||||
}
|
||||
|
||||
getEffectiveServerId(activeServerId: string | null): string | null {
|
||||
return this.voiceServerId || activeServerId;
|
||||
}
|
||||
|
||||
handleVoiceConnected(): void {
|
||||
this.dependencies.setVoiceConnected(true);
|
||||
}
|
||||
|
||||
getCurrentVoiceState(): VoiceStateSnapshot {
|
||||
return {
|
||||
isConnected: this.dependencies.mediaManager.getIsVoiceActive(),
|
||||
isMuted: this.dependencies.mediaManager.getIsMicMuted(),
|
||||
isDeafened: this.dependencies.mediaManager.getIsSelfDeafened(),
|
||||
isScreenSharing: this.dependencies.getIsScreenSharing(),
|
||||
roomId: this.dependencies.mediaManager.getCurrentVoiceRoomId(),
|
||||
serverId: this.dependencies.mediaManager.getCurrentVoiceServerId()
|
||||
};
|
||||
}
|
||||
|
||||
async enableVoice(): Promise<MediaStream> {
|
||||
const stream = await this.dependencies.mediaManager.enableVoice();
|
||||
|
||||
this.syncMediaSignals();
|
||||
return stream;
|
||||
}
|
||||
|
||||
disableVoice(): void {
|
||||
this.voiceServerId = null;
|
||||
this.dependencies.mediaManager.disableVoice();
|
||||
this.dependencies.setVoiceConnected(false);
|
||||
}
|
||||
|
||||
async setLocalStream(stream: MediaStream): Promise<void> {
|
||||
await this.dependencies.mediaManager.setLocalStream(stream);
|
||||
this.syncMediaSignals();
|
||||
}
|
||||
|
||||
toggleMute(muted?: boolean): void {
|
||||
this.dependencies.mediaManager.toggleMute(muted);
|
||||
this.dependencies.setMuted(this.dependencies.mediaManager.getIsMicMuted());
|
||||
}
|
||||
|
||||
toggleDeafen(deafened?: boolean): void {
|
||||
this.dependencies.mediaManager.toggleDeafen(deafened);
|
||||
this.dependencies.setDeafened(this.dependencies.mediaManager.getIsSelfDeafened());
|
||||
}
|
||||
|
||||
async toggleNoiseReduction(enabled?: boolean): Promise<void> {
|
||||
await this.dependencies.mediaManager.toggleNoiseReduction(enabled);
|
||||
this.dependencies.setNoiseReductionEnabled(this.dependencies.mediaManager.getIsNoiseReductionEnabled());
|
||||
}
|
||||
|
||||
setOutputVolume(volume: number): void {
|
||||
this.dependencies.mediaManager.setOutputVolume(volume);
|
||||
}
|
||||
|
||||
setInputVolume(volume: number): void {
|
||||
this.dependencies.mediaManager.setInputVolume(volume);
|
||||
}
|
||||
|
||||
async setAudioBitrate(kbps: number): Promise<void> {
|
||||
return await this.dependencies.mediaManager.setAudioBitrate(kbps);
|
||||
}
|
||||
|
||||
async setLatencyProfile(profile: LatencyProfile): Promise<void> {
|
||||
return await this.dependencies.mediaManager.setLatencyProfile(profile);
|
||||
}
|
||||
|
||||
startVoiceHeartbeat(roomId?: string, serverId?: string): void {
|
||||
if (serverId) {
|
||||
this.voiceServerId = serverId;
|
||||
}
|
||||
|
||||
this.dependencies.mediaManager.startVoiceHeartbeat(roomId, serverId);
|
||||
}
|
||||
|
||||
stopVoiceHeartbeat(): void {
|
||||
this.dependencies.mediaManager.stopVoiceHeartbeat();
|
||||
}
|
||||
|
||||
resetVoiceSession(): void {
|
||||
this.voiceServerId = null;
|
||||
this.dependencies.mediaManager.stopVoiceHeartbeat();
|
||||
this.dependencies.mediaManager.disableVoice();
|
||||
this.dependencies.setVoiceConnected(false);
|
||||
}
|
||||
|
||||
private syncMediaSignals(): void {
|
||||
this.dependencies.setVoiceConnected(this.dependencies.mediaManager.getIsVoiceActive());
|
||||
this.dependencies.setMuted(this.dependencies.mediaManager.getIsMicMuted());
|
||||
this.dependencies.setDeafened(this.dependencies.mediaManager.getIsSelfDeafened());
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,190 @@
|
||||
import {
|
||||
CONNECTION_STATE_CLOSED,
|
||||
CONNECTION_STATE_CONNECTED,
|
||||
CONNECTION_STATE_DISCONNECTED,
|
||||
CONNECTION_STATE_FAILED,
|
||||
DATA_CHANNEL_LABEL,
|
||||
ICE_SERVERS,
|
||||
SIGNALING_TYPE_ICE_CANDIDATE,
|
||||
TRACK_KIND_AUDIO,
|
||||
TRACK_KIND_VIDEO,
|
||||
TRANSCEIVER_RECV_ONLY,
|
||||
TRANSCEIVER_SEND_RECV
|
||||
} from '../../realtime.constants';
|
||||
import { recordDebugNetworkConnectionState } from '../../logging/debug-network-metrics';
|
||||
import { PeerData } from '../../realtime.types';
|
||||
import { ConnectionLifecycleHandlers, PeerConnectionManagerContext } from '../shared';
|
||||
|
||||
/**
|
||||
* Create and configure a new RTCPeerConnection for a remote peer.
|
||||
*/
|
||||
export function createPeerConnection(
|
||||
context: PeerConnectionManagerContext,
|
||||
remotePeerId: string,
|
||||
isInitiator: boolean,
|
||||
handlers: ConnectionLifecycleHandlers
|
||||
): PeerData {
|
||||
const { callbacks, logger, state } = context;
|
||||
|
||||
logger.info('Creating peer connection', { remotePeerId, isInitiator });
|
||||
|
||||
const connection = new RTCPeerConnection({ iceServers: ICE_SERVERS });
|
||||
|
||||
let dataChannel: RTCDataChannel | null = null;
|
||||
|
||||
connection.onicecandidate = (event) => {
|
||||
if (event.candidate) {
|
||||
logger.info('ICE candidate gathered', {
|
||||
remotePeerId,
|
||||
candidateType: (event.candidate as RTCIceCandidate & { type?: string }).type
|
||||
});
|
||||
|
||||
callbacks.sendRawMessage({
|
||||
type: SIGNALING_TYPE_ICE_CANDIDATE,
|
||||
targetUserId: remotePeerId,
|
||||
payload: { candidate: event.candidate }
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
connection.onconnectionstatechange = () => {
|
||||
logger.info('connectionstatechange', {
|
||||
remotePeerId,
|
||||
state: connection.connectionState
|
||||
});
|
||||
|
||||
recordDebugNetworkConnectionState(remotePeerId, connection.connectionState);
|
||||
|
||||
switch (connection.connectionState) {
|
||||
case CONNECTION_STATE_CONNECTED:
|
||||
handlers.clearPeerDisconnectGraceTimer(remotePeerId);
|
||||
handlers.addToConnectedPeers(remotePeerId);
|
||||
state.peerConnected$.next(remotePeerId);
|
||||
handlers.clearPeerReconnectTimer(remotePeerId);
|
||||
state.disconnectedPeerTracker.delete(remotePeerId);
|
||||
handlers.requestVoiceStateFromPeer(remotePeerId);
|
||||
break;
|
||||
|
||||
case CONNECTION_STATE_DISCONNECTED:
|
||||
handlers.schedulePeerDisconnectRecovery(remotePeerId);
|
||||
break;
|
||||
|
||||
case CONNECTION_STATE_FAILED:
|
||||
handlers.trackDisconnectedPeer(remotePeerId);
|
||||
handlers.removePeer(remotePeerId, { preserveReconnectState: true });
|
||||
handlers.schedulePeerReconnect(remotePeerId);
|
||||
break;
|
||||
|
||||
case CONNECTION_STATE_CLOSED:
|
||||
handlers.removePeer(remotePeerId);
|
||||
break;
|
||||
}
|
||||
};
|
||||
|
||||
connection.oniceconnectionstatechange = () => {
|
||||
logger.info('iceconnectionstatechange', {
|
||||
remotePeerId,
|
||||
state: connection.iceConnectionState
|
||||
});
|
||||
};
|
||||
|
||||
connection.onsignalingstatechange = () => {
|
||||
logger.info('signalingstatechange', {
|
||||
remotePeerId,
|
||||
state: connection.signalingState
|
||||
});
|
||||
};
|
||||
|
||||
connection.onnegotiationneeded = () => {
|
||||
logger.info('negotiationneeded', { remotePeerId });
|
||||
};
|
||||
|
||||
connection.ontrack = (event) => {
|
||||
handlers.handleRemoteTrack(event, remotePeerId);
|
||||
};
|
||||
|
||||
if (isInitiator) {
|
||||
dataChannel = connection.createDataChannel(DATA_CHANNEL_LABEL, { ordered: true });
|
||||
handlers.setupDataChannel(dataChannel, remotePeerId);
|
||||
} else {
|
||||
connection.ondatachannel = (event) => {
|
||||
logger.info('Received data channel', { remotePeerId });
|
||||
dataChannel = event.channel;
|
||||
|
||||
const existing = state.activePeerConnections.get(remotePeerId);
|
||||
|
||||
if (existing) {
|
||||
existing.dataChannel = dataChannel;
|
||||
}
|
||||
|
||||
handlers.setupDataChannel(dataChannel, remotePeerId);
|
||||
};
|
||||
}
|
||||
|
||||
const peerData: PeerData = {
|
||||
connection,
|
||||
dataChannel,
|
||||
isInitiator,
|
||||
pendingIceCandidates: [],
|
||||
audioSender: undefined,
|
||||
videoSender: undefined,
|
||||
remoteVoiceStreamIds: new Set<string>(),
|
||||
remoteScreenShareStreamIds: new Set<string>()
|
||||
};
|
||||
|
||||
if (isInitiator) {
|
||||
const audioTransceiver = connection.addTransceiver(TRACK_KIND_AUDIO, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
});
|
||||
const videoTransceiver = connection.addTransceiver(TRACK_KIND_VIDEO, {
|
||||
direction: TRANSCEIVER_RECV_ONLY
|
||||
});
|
||||
|
||||
peerData.audioSender = audioTransceiver.sender;
|
||||
peerData.videoSender = videoTransceiver.sender;
|
||||
}
|
||||
|
||||
state.activePeerConnections.set(remotePeerId, peerData);
|
||||
|
||||
const localStream = callbacks.getLocalMediaStream();
|
||||
|
||||
if (localStream && isInitiator) {
|
||||
logger.logStream(`localStream->${remotePeerId}`, localStream);
|
||||
|
||||
localStream.getTracks().forEach((track) => {
|
||||
if (track.kind === TRACK_KIND_AUDIO && peerData.audioSender) {
|
||||
if (typeof peerData.audioSender.setStreams === 'function') {
|
||||
peerData.audioSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
peerData.audioSender
|
||||
.replaceTrack(track)
|
||||
.then(() => logger.info('audio replaceTrack (init) ok', { remotePeerId }))
|
||||
.catch((error) =>
|
||||
logger.error('audio replaceTrack failed at createPeerConnection', error)
|
||||
);
|
||||
} else if (track.kind === TRACK_KIND_VIDEO && peerData.videoSender) {
|
||||
if (typeof peerData.videoSender.setStreams === 'function') {
|
||||
peerData.videoSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
peerData.videoSender
|
||||
.replaceTrack(track)
|
||||
.then(() => logger.info('video replaceTrack (init) ok', { remotePeerId }))
|
||||
.catch((error) =>
|
||||
logger.error('video replaceTrack failed at createPeerConnection', error)
|
||||
);
|
||||
} else {
|
||||
const sender = connection.addTrack(track, localStream);
|
||||
|
||||
if (track.kind === TRACK_KIND_AUDIO)
|
||||
peerData.audioSender = sender;
|
||||
|
||||
if (track.kind === TRACK_KIND_VIDEO)
|
||||
peerData.videoSender = sender;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return peerData;
|
||||
}
|
||||
@@ -0,0 +1,270 @@
|
||||
/* eslint-disable complexity */
|
||||
import {
|
||||
SIGNALING_TYPE_ANSWER,
|
||||
SIGNALING_TYPE_OFFER,
|
||||
TRACK_KIND_AUDIO,
|
||||
TRACK_KIND_VIDEO,
|
||||
TRANSCEIVER_SEND_RECV
|
||||
} from '../../realtime.constants';
|
||||
import {
|
||||
NegotiationHandlers,
|
||||
PeerConnectionManagerContext,
|
||||
PeerConnectionManagerState
|
||||
} from '../shared';
|
||||
|
||||
/**
|
||||
* Queue a negotiation task so SDP operations for a single peer never overlap.
|
||||
*/
|
||||
export function enqueueNegotiation(
|
||||
state: PeerConnectionManagerState,
|
||||
peerId: string,
|
||||
task: () => Promise<void>
|
||||
): void {
|
||||
const previousTask = state.peerNegotiationQueue.get(peerId) ?? Promise.resolve();
|
||||
const nextTask = previousTask.then(task, task);
|
||||
|
||||
state.peerNegotiationQueue.set(peerId, nextTask);
|
||||
}
|
||||
|
||||
export async function doCreateAndSendOffer(
|
||||
context: PeerConnectionManagerContext,
|
||||
remotePeerId: string
|
||||
): Promise<void> {
|
||||
const { callbacks, logger, state } = context;
|
||||
const peerData = state.activePeerConnections.get(remotePeerId);
|
||||
|
||||
if (!peerData)
|
||||
return;
|
||||
|
||||
try {
|
||||
const offer = await peerData.connection.createOffer();
|
||||
|
||||
await peerData.connection.setLocalDescription(offer);
|
||||
logger.info('Sending offer', {
|
||||
remotePeerId,
|
||||
type: offer.type,
|
||||
sdpLength: offer.sdp?.length
|
||||
});
|
||||
|
||||
callbacks.sendRawMessage({
|
||||
type: SIGNALING_TYPE_OFFER,
|
||||
targetUserId: remotePeerId,
|
||||
payload: { sdp: offer }
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to create offer', error, {
|
||||
localDescriptionType: peerData.connection.localDescription?.type ?? null,
|
||||
remotePeerId,
|
||||
signalingState: peerData.connection.signalingState
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function doHandleOffer(
|
||||
context: PeerConnectionManagerContext,
|
||||
fromUserId: string,
|
||||
sdp: RTCSessionDescriptionInit,
|
||||
handlers: NegotiationHandlers
|
||||
): Promise<void> {
|
||||
const { callbacks, logger, state } = context;
|
||||
|
||||
logger.info('Handling offer', { fromUserId });
|
||||
|
||||
let peerData = state.activePeerConnections.get(fromUserId);
|
||||
|
||||
if (!peerData) {
|
||||
peerData = handlers.createPeerConnection(fromUserId, false);
|
||||
}
|
||||
|
||||
try {
|
||||
const signalingState = peerData.connection.signalingState;
|
||||
const hasCollision =
|
||||
signalingState === 'have-local-offer' || signalingState === 'have-local-pranswer';
|
||||
|
||||
if (hasCollision) {
|
||||
const localId =
|
||||
callbacks.getIdentifyCredentials()?.oderId || callbacks.getLocalPeerId();
|
||||
const isPolite = localId > fromUserId;
|
||||
|
||||
if (!isPolite) {
|
||||
logger.info('Ignoring colliding offer (impolite side)', { fromUserId, localId });
|
||||
return;
|
||||
}
|
||||
|
||||
logger.info('Rolling back local offer (polite side)', { fromUserId, localId });
|
||||
|
||||
await peerData.connection.setLocalDescription({
|
||||
type: 'rollback'
|
||||
} as RTCSessionDescriptionInit);
|
||||
}
|
||||
|
||||
await peerData.connection.setRemoteDescription(new RTCSessionDescription(sdp));
|
||||
|
||||
const transceivers = peerData.connection.getTransceivers();
|
||||
|
||||
for (const transceiver of transceivers) {
|
||||
const receiverKind = transceiver.receiver.track?.kind;
|
||||
|
||||
if (receiverKind === TRACK_KIND_AUDIO) {
|
||||
if (!peerData.audioSender) {
|
||||
peerData.audioSender = transceiver.sender;
|
||||
}
|
||||
|
||||
transceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
} else if (receiverKind === TRACK_KIND_VIDEO && !peerData.videoSender) {
|
||||
peerData.videoSender = transceiver.sender;
|
||||
}
|
||||
}
|
||||
|
||||
const localStream = callbacks.getLocalMediaStream();
|
||||
|
||||
if (localStream) {
|
||||
logger.logStream(`localStream->${fromUserId} (answerer)`, localStream);
|
||||
|
||||
for (const track of localStream.getTracks()) {
|
||||
if (track.kind === TRACK_KIND_AUDIO && peerData.audioSender) {
|
||||
await peerData.audioSender.replaceTrack(track);
|
||||
logger.info('audio replaceTrack (answerer) ok', { fromUserId });
|
||||
} else if (track.kind === TRACK_KIND_VIDEO && peerData.videoSender) {
|
||||
await peerData.videoSender.replaceTrack(track);
|
||||
logger.info('video replaceTrack (answerer) ok', { fromUserId });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (const candidate of peerData.pendingIceCandidates) {
|
||||
await peerData.connection.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
}
|
||||
|
||||
peerData.pendingIceCandidates = [];
|
||||
|
||||
const answer = await peerData.connection.createAnswer();
|
||||
|
||||
await peerData.connection.setLocalDescription(answer);
|
||||
|
||||
logger.info('Sending answer', {
|
||||
to: fromUserId,
|
||||
type: answer.type,
|
||||
sdpLength: answer.sdp?.length
|
||||
});
|
||||
|
||||
callbacks.sendRawMessage({
|
||||
type: SIGNALING_TYPE_ANSWER,
|
||||
targetUserId: fromUserId,
|
||||
payload: { sdp: answer }
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle offer', error, {
|
||||
fromUserId,
|
||||
pendingIceCandidates: peerData.pendingIceCandidates.length,
|
||||
sdpLength: sdp.sdp?.length,
|
||||
signalingState: peerData.connection.signalingState
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function doHandleAnswer(
|
||||
context: PeerConnectionManagerContext,
|
||||
fromUserId: string,
|
||||
sdp: RTCSessionDescriptionInit
|
||||
): Promise<void> {
|
||||
const { logger, state } = context;
|
||||
|
||||
logger.info('Handling answer', { fromUserId });
|
||||
|
||||
const peerData = state.activePeerConnections.get(fromUserId);
|
||||
|
||||
if (!peerData) {
|
||||
logger.error('No peer for answer', new Error('Missing peer'), { fromUserId });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
if (peerData.connection.signalingState === 'have-local-offer') {
|
||||
await peerData.connection.setRemoteDescription(new RTCSessionDescription(sdp));
|
||||
|
||||
for (const candidate of peerData.pendingIceCandidates) {
|
||||
await peerData.connection.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
}
|
||||
|
||||
peerData.pendingIceCandidates = [];
|
||||
} else {
|
||||
logger.warn('Ignoring answer - wrong signaling state', {
|
||||
state: peerData.connection.signalingState
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to handle answer', error, {
|
||||
fromUserId,
|
||||
pendingIceCandidates: peerData.pendingIceCandidates.length,
|
||||
sdpLength: sdp.sdp?.length,
|
||||
signalingState: peerData.connection.signalingState
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function doHandleIceCandidate(
|
||||
context: PeerConnectionManagerContext,
|
||||
fromUserId: string,
|
||||
candidate: RTCIceCandidateInit,
|
||||
handlers: NegotiationHandlers
|
||||
): Promise<void> {
|
||||
const { logger, state } = context;
|
||||
|
||||
let peerData = state.activePeerConnections.get(fromUserId);
|
||||
|
||||
if (!peerData) {
|
||||
logger.info('Creating peer for early ICE', { fromUserId });
|
||||
peerData = handlers.createPeerConnection(fromUserId, false);
|
||||
}
|
||||
|
||||
try {
|
||||
if (peerData.connection.remoteDescription) {
|
||||
await peerData.connection.addIceCandidate(new RTCIceCandidate(candidate));
|
||||
} else {
|
||||
logger.info('Queuing ICE candidate', { fromUserId });
|
||||
peerData.pendingIceCandidates.push(candidate);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('Failed to add ICE candidate', error, {
|
||||
candidateMid: candidate.sdpMid ?? null,
|
||||
candidateMLineIndex: candidate.sdpMLineIndex ?? null,
|
||||
fromUserId,
|
||||
hasRemoteDescription: !!peerData.connection.remoteDescription,
|
||||
pendingIceCandidates: peerData.pendingIceCandidates.length
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
export async function doRenegotiate(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string
|
||||
): Promise<void> {
|
||||
const { callbacks, logger, state } = context;
|
||||
const peerData = state.activePeerConnections.get(peerId);
|
||||
|
||||
if (!peerData)
|
||||
return;
|
||||
|
||||
try {
|
||||
const offer = await peerData.connection.createOffer();
|
||||
|
||||
await peerData.connection.setLocalDescription(offer);
|
||||
logger.info('Renegotiate offer', {
|
||||
peerId,
|
||||
type: offer.type,
|
||||
sdpLength: offer.sdp?.length
|
||||
});
|
||||
|
||||
callbacks.sendRawMessage({
|
||||
type: SIGNALING_TYPE_OFFER,
|
||||
targetUserId: peerId,
|
||||
payload: { sdp: offer }
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('Failed to renegotiate', error, {
|
||||
peerId,
|
||||
signalingState: peerData.connection.signalingState
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,2 @@
|
||||
export * from './peer-connection.manager';
|
||||
export * from './shared';
|
||||
@@ -0,0 +1,487 @@
|
||||
import { ChatEvent } from '../../../../shared-kernel';
|
||||
import {
|
||||
DATA_CHANNEL_HIGH_WATER_BYTES,
|
||||
DATA_CHANNEL_LOW_WATER_BYTES,
|
||||
DATA_CHANNEL_STATE_OPEN,
|
||||
DEFAULT_DISPLAY_NAME,
|
||||
P2P_TYPE_PING,
|
||||
P2P_TYPE_PONG,
|
||||
P2P_TYPE_SCREEN_STATE,
|
||||
P2P_TYPE_STATE_REQUEST,
|
||||
P2P_TYPE_VOICE_STATE,
|
||||
P2P_TYPE_VOICE_STATE_REQUEST
|
||||
} from '../../realtime.constants';
|
||||
import { recordDebugNetworkDataChannelPayload, recordDebugNetworkPing } from '../../logging/debug-network-metrics';
|
||||
import { PeerConnectionManagerContext } from '../shared';
|
||||
import { startPingInterval } from './ping';
|
||||
|
||||
type PeerMessage = Record<string, unknown> & {
|
||||
type?: string;
|
||||
ts?: number;
|
||||
};
|
||||
|
||||
/**
|
||||
* Wire open/close/error/message handlers onto a data channel.
|
||||
*/
|
||||
export function setupDataChannel(
|
||||
context: PeerConnectionManagerContext,
|
||||
channel: RTCDataChannel,
|
||||
remotePeerId: string
|
||||
): void {
|
||||
const { logger } = context;
|
||||
|
||||
channel.onopen = () => {
|
||||
logger.info('[data-channel] Data channel open', {
|
||||
channelLabel: channel.label,
|
||||
negotiated: channel.negotiated,
|
||||
ordered: channel.ordered,
|
||||
peerId: remotePeerId,
|
||||
protocol: channel.protocol || null
|
||||
});
|
||||
|
||||
sendCurrentStatesToChannel(context, channel, remotePeerId);
|
||||
|
||||
try {
|
||||
const stateRequest = { type: P2P_TYPE_STATE_REQUEST };
|
||||
const rawPayload = JSON.stringify(stateRequest);
|
||||
|
||||
channel.send(rawPayload);
|
||||
logDataChannelTraffic(context, channel, remotePeerId, 'outbound', rawPayload, stateRequest);
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to request peer state on open', error, {
|
||||
bufferedAmount: channel.bufferedAmount,
|
||||
channelLabel: channel.label,
|
||||
peerId: remotePeerId,
|
||||
readyState: channel.readyState,
|
||||
type: P2P_TYPE_STATE_REQUEST
|
||||
});
|
||||
}
|
||||
|
||||
startPingInterval(context.state, logger, remotePeerId);
|
||||
};
|
||||
|
||||
channel.onclose = () => {
|
||||
logger.info('[data-channel] Data channel closed', {
|
||||
bufferedAmount: channel.bufferedAmount,
|
||||
channelLabel: channel.label,
|
||||
peerId: remotePeerId,
|
||||
readyState: channel.readyState
|
||||
});
|
||||
};
|
||||
|
||||
channel.onerror = (error) => {
|
||||
logger.error('[data-channel] Data channel error', error, {
|
||||
bufferedAmount: channel.bufferedAmount,
|
||||
channelLabel: channel.label,
|
||||
peerId: remotePeerId,
|
||||
readyState: channel.readyState
|
||||
});
|
||||
};
|
||||
|
||||
channel.onmessage = (event) => {
|
||||
const rawPayload = typeof event.data === 'string'
|
||||
? event.data
|
||||
: String(event.data ?? '');
|
||||
|
||||
try {
|
||||
const message = JSON.parse(rawPayload) as PeerMessage;
|
||||
|
||||
logDataChannelTraffic(context, channel, remotePeerId, 'inbound', rawPayload, message);
|
||||
|
||||
handlePeerMessage(context, remotePeerId, message);
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to parse peer message', error, {
|
||||
bytes: measurePayloadBytes(rawPayload),
|
||||
channelLabel: channel.label,
|
||||
peerId: remotePeerId,
|
||||
rawPreview: getRawPreview(rawPayload)
|
||||
});
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Route an incoming peer-to-peer message.
|
||||
*/
|
||||
export function handlePeerMessage(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string,
|
||||
message: PeerMessage
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
|
||||
logger.info('[data-channel] Received P2P message', summarizePeerMessage(message, { peerId }));
|
||||
recordDebugNetworkDataChannelPayload(peerId, message, 'inbound');
|
||||
|
||||
if (message.type === P2P_TYPE_STATE_REQUEST || message.type === P2P_TYPE_VOICE_STATE_REQUEST) {
|
||||
sendCurrentStatesToPeer(context, peerId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === P2P_TYPE_PING) {
|
||||
sendToPeer(context, peerId, {
|
||||
type: P2P_TYPE_PONG,
|
||||
ts: message.ts
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (message.type === P2P_TYPE_PONG) {
|
||||
const sentAt = state.pendingPings.get(peerId);
|
||||
|
||||
if (sentAt && typeof message.ts === 'number' && message.ts === sentAt) {
|
||||
const latencyMs = Math.round(performance.now() - sentAt);
|
||||
|
||||
state.peerLatencies.set(peerId, latencyMs);
|
||||
state.peerLatencyChanged$.next({ peerId, latencyMs });
|
||||
recordDebugNetworkPing(peerId, latencyMs);
|
||||
logger.info('[data-channel] Peer latency updated', { latencyMs, peerId });
|
||||
}
|
||||
|
||||
state.pendingPings.delete(peerId);
|
||||
return;
|
||||
}
|
||||
|
||||
const enrichedMessage = {
|
||||
...message,
|
||||
fromPeerId: peerId
|
||||
} as ChatEvent;
|
||||
|
||||
state.messageReceived$.next(enrichedMessage);
|
||||
}
|
||||
|
||||
/** Broadcast a ChatEvent to every peer with an open data channel. */
|
||||
export function broadcastMessage(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: object
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
|
||||
let data = '';
|
||||
|
||||
try {
|
||||
data = JSON.stringify(event);
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to serialize broadcast payload', error, {
|
||||
payloadPreview: summarizePeerMessage(event as PeerMessage)
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
state.activePeerConnections.forEach((peerData, peerId) => {
|
||||
try {
|
||||
if (peerData.dataChannel?.readyState === DATA_CHANNEL_STATE_OPEN) {
|
||||
peerData.dataChannel.send(data);
|
||||
recordDebugNetworkDataChannelPayload(peerId, event as PeerMessage, 'outbound');
|
||||
|
||||
logDataChannelTraffic(context, peerData.dataChannel, peerId, 'outbound', data, event as PeerMessage);
|
||||
}
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to broadcast message to peer', error, {
|
||||
bufferedAmount: peerData.dataChannel?.bufferedAmount,
|
||||
channelLabel: peerData.dataChannel?.label,
|
||||
payloadPreview: summarizePeerMessage(event as PeerMessage),
|
||||
peerId,
|
||||
readyState: peerData.dataChannel?.readyState ?? null
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a ChatEvent to a specific peer's data channel.
|
||||
*/
|
||||
export function sendToPeer(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string,
|
||||
event: object
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
const peerData = state.activePeerConnections.get(peerId);
|
||||
|
||||
if (!peerData?.dataChannel || peerData.dataChannel.readyState !== DATA_CHANNEL_STATE_OPEN) {
|
||||
logger.warn('Peer not connected - cannot send', { peerId });
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const rawPayload = JSON.stringify(event);
|
||||
|
||||
peerData.dataChannel.send(rawPayload);
|
||||
recordDebugNetworkDataChannelPayload(peerId, event as PeerMessage, 'outbound');
|
||||
|
||||
logDataChannelTraffic(context, peerData.dataChannel, peerId, 'outbound', rawPayload, event as PeerMessage);
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to send message to peer', error, {
|
||||
bufferedAmount: peerData.dataChannel.bufferedAmount,
|
||||
channelLabel: peerData.dataChannel.label,
|
||||
payloadPreview: summarizePeerMessage(event as PeerMessage),
|
||||
peerId,
|
||||
readyState: peerData.dataChannel.readyState
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a ChatEvent with back-pressure awareness.
|
||||
*/
|
||||
export async function sendToPeerBuffered(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string,
|
||||
event: object
|
||||
): Promise<void> {
|
||||
const { logger, state } = context;
|
||||
const peerData = state.activePeerConnections.get(peerId);
|
||||
|
||||
if (!peerData?.dataChannel || peerData.dataChannel.readyState !== DATA_CHANNEL_STATE_OPEN) {
|
||||
logger.warn('Peer not connected - cannot send buffered', { peerId });
|
||||
return;
|
||||
}
|
||||
|
||||
const channel = peerData.dataChannel;
|
||||
const data = JSON.stringify(event);
|
||||
|
||||
if (typeof channel.bufferedAmountLowThreshold === 'number') {
|
||||
channel.bufferedAmountLowThreshold = DATA_CHANNEL_LOW_WATER_BYTES;
|
||||
}
|
||||
|
||||
if (channel.bufferedAmount > DATA_CHANNEL_HIGH_WATER_BYTES) {
|
||||
logger.warn('[data-channel] Waiting for buffered amount to drain', {
|
||||
bufferedAmount: channel.bufferedAmount,
|
||||
channelLabel: channel.label,
|
||||
highWaterMark: DATA_CHANNEL_HIGH_WATER_BYTES,
|
||||
lowWaterMark: DATA_CHANNEL_LOW_WATER_BYTES,
|
||||
peerId
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
const handleBufferedAmountLow = () => {
|
||||
if (channel.bufferedAmount <= DATA_CHANNEL_LOW_WATER_BYTES) {
|
||||
channel.removeEventListener('bufferedamountlow', handleBufferedAmountLow);
|
||||
resolve();
|
||||
}
|
||||
};
|
||||
|
||||
channel.addEventListener('bufferedamountlow', handleBufferedAmountLow, { once: true });
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
channel.send(data);
|
||||
recordDebugNetworkDataChannelPayload(peerId, event as PeerMessage, 'outbound');
|
||||
|
||||
logDataChannelTraffic(context, channel, peerId, 'outbound', data, event as PeerMessage);
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to send buffered message', error, {
|
||||
bufferedAmount: channel.bufferedAmount,
|
||||
channelLabel: channel.label,
|
||||
payloadPreview: summarizePeerMessage(event as PeerMessage),
|
||||
peerId,
|
||||
readyState: channel.readyState
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send the current voice and screen-share states to a single peer.
|
||||
*/
|
||||
export function sendCurrentStatesToPeer(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string
|
||||
): void {
|
||||
const { callbacks } = context;
|
||||
const credentials = callbacks.getIdentifyCredentials();
|
||||
const oderId = credentials?.oderId || callbacks.getLocalPeerId();
|
||||
const displayName = credentials?.displayName || DEFAULT_DISPLAY_NAME;
|
||||
const voiceState = callbacks.getVoiceStateSnapshot();
|
||||
|
||||
sendToPeer(context, peerId, {
|
||||
type: P2P_TYPE_VOICE_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
voiceState
|
||||
});
|
||||
|
||||
sendToPeer(context, peerId, {
|
||||
type: P2P_TYPE_SCREEN_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
isScreenSharing: callbacks.isScreenSharingActive()
|
||||
});
|
||||
}
|
||||
|
||||
export function sendCurrentStatesToChannel(
|
||||
context: PeerConnectionManagerContext,
|
||||
channel: RTCDataChannel,
|
||||
remotePeerId: string
|
||||
): void {
|
||||
const { callbacks, logger } = context;
|
||||
|
||||
if (channel.readyState !== DATA_CHANNEL_STATE_OPEN) {
|
||||
logger.warn('Cannot send states - channel not open', {
|
||||
remotePeerId,
|
||||
state: channel.readyState
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const credentials = callbacks.getIdentifyCredentials();
|
||||
const oderId = credentials?.oderId || callbacks.getLocalPeerId();
|
||||
const displayName = credentials?.displayName || DEFAULT_DISPLAY_NAME;
|
||||
const voiceState = callbacks.getVoiceStateSnapshot();
|
||||
|
||||
try {
|
||||
const voiceStatePayload = {
|
||||
type: P2P_TYPE_VOICE_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
voiceState
|
||||
};
|
||||
const screenStatePayload = {
|
||||
type: P2P_TYPE_SCREEN_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
isScreenSharing: callbacks.isScreenSharingActive()
|
||||
};
|
||||
const voiceStateRaw = JSON.stringify(voiceStatePayload);
|
||||
const screenStateRaw = JSON.stringify(screenStatePayload);
|
||||
|
||||
channel.send(voiceStateRaw);
|
||||
logDataChannelTraffic(context, channel, remotePeerId, 'outbound', voiceStateRaw, voiceStatePayload);
|
||||
channel.send(screenStateRaw);
|
||||
logDataChannelTraffic(context, channel, remotePeerId, 'outbound', screenStateRaw, screenStatePayload);
|
||||
|
||||
logger.info('[data-channel] Sent initial states to channel', { remotePeerId, voiceState });
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to send initial states to channel', error, {
|
||||
bufferedAmount: channel.bufferedAmount,
|
||||
channelLabel: channel.label,
|
||||
peerId: remotePeerId,
|
||||
readyState: channel.readyState,
|
||||
voiceState
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/** Broadcast the current voice and screen-share states to all connected peers. */
|
||||
export function broadcastCurrentStates(context: PeerConnectionManagerContext): void {
|
||||
const { callbacks } = context;
|
||||
const credentials = callbacks.getIdentifyCredentials();
|
||||
const oderId = credentials?.oderId || callbacks.getLocalPeerId();
|
||||
const displayName = credentials?.displayName || DEFAULT_DISPLAY_NAME;
|
||||
const voiceState = callbacks.getVoiceStateSnapshot();
|
||||
|
||||
broadcastMessage(context, {
|
||||
type: P2P_TYPE_VOICE_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
voiceState
|
||||
});
|
||||
|
||||
broadcastMessage(context, {
|
||||
type: P2P_TYPE_SCREEN_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
isScreenSharing: callbacks.isScreenSharingActive()
|
||||
});
|
||||
}
|
||||
|
||||
function logDataChannelTraffic(
|
||||
context: PeerConnectionManagerContext,
|
||||
channel: RTCDataChannel,
|
||||
peerId: string,
|
||||
direction: 'inbound' | 'outbound',
|
||||
rawPayload: string,
|
||||
payload: PeerMessage
|
||||
): void {
|
||||
context.logger.traffic('data-channel', direction, {
|
||||
...summarizePeerMessage(payload, { peerId }),
|
||||
bufferedAmount: channel.bufferedAmount,
|
||||
bytes: measurePayloadBytes(rawPayload),
|
||||
channelLabel: channel.label,
|
||||
readyState: channel.readyState
|
||||
});
|
||||
}
|
||||
|
||||
function summarizePeerMessage(payload: PeerMessage, base?: Record<string, unknown>): Record<string, unknown> {
|
||||
const summary: Record<string, unknown> = {
|
||||
...base,
|
||||
keys: Object.keys(payload).slice(0, 10),
|
||||
type: typeof payload.type === 'string' ? payload.type : 'unknown'
|
||||
};
|
||||
const payloadMessage = asObject(payload['message']);
|
||||
const voiceState = asObject(payload['voiceState']);
|
||||
|
||||
if (typeof payload['oderId'] === 'string')
|
||||
summary['oderId'] = payload['oderId'];
|
||||
|
||||
if (typeof payload['displayName'] === 'string')
|
||||
summary['displayName'] = payload['displayName'];
|
||||
|
||||
if (typeof payload['roomId'] === 'string')
|
||||
summary['roomId'] = payload['roomId'];
|
||||
|
||||
if (typeof payload['serverId'] === 'string')
|
||||
summary['serverId'] = payload['serverId'];
|
||||
|
||||
if (typeof payload['messageId'] === 'string')
|
||||
summary['messageId'] = payload['messageId'];
|
||||
|
||||
if (typeof payload['isScreenSharing'] === 'boolean')
|
||||
summary['isScreenSharing'] = payload['isScreenSharing'];
|
||||
|
||||
if (typeof payload['content'] === 'string')
|
||||
summary['contentLength'] = payload['content'].length;
|
||||
|
||||
if (Array.isArray(payload['ids']))
|
||||
summary['idsCount'] = payload['ids'].length;
|
||||
|
||||
if (Array.isArray(payload['items']))
|
||||
summary['itemsCount'] = payload['items'].length;
|
||||
|
||||
if (Array.isArray(payload['messages']))
|
||||
summary['messagesCount'] = payload['messages'].length;
|
||||
|
||||
if (payloadMessage) {
|
||||
if (typeof payloadMessage['id'] === 'string')
|
||||
summary['messageId'] = payloadMessage['id'];
|
||||
|
||||
if (typeof payloadMessage['roomId'] === 'string')
|
||||
summary['roomId'] = payloadMessage['roomId'];
|
||||
|
||||
if (typeof payloadMessage['content'] === 'string')
|
||||
summary['contentLength'] = payloadMessage['content'].length;
|
||||
}
|
||||
|
||||
if (voiceState) {
|
||||
summary['voiceState'] = {
|
||||
isConnected: voiceState['isConnected'] === true,
|
||||
isMuted: voiceState['isMuted'] === true,
|
||||
isDeafened: voiceState['isDeafened'] === true,
|
||||
isSpeaking: voiceState['isSpeaking'] === true,
|
||||
roomId: typeof voiceState['roomId'] === 'string' ? voiceState['roomId'] : undefined,
|
||||
serverId: typeof voiceState['serverId'] === 'string' ? voiceState['serverId'] : undefined,
|
||||
volume: typeof voiceState['volume'] === 'number' ? voiceState['volume'] : undefined
|
||||
};
|
||||
}
|
||||
|
||||
return summary;
|
||||
}
|
||||
|
||||
function asObject(value: unknown): Record<string, unknown> | null {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value))
|
||||
return null;
|
||||
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function measurePayloadBytes(payload: string): number {
|
||||
return new TextEncoder().encode(payload).length;
|
||||
}
|
||||
|
||||
function getRawPreview(payload: string): string {
|
||||
return payload.replace(/\s+/g, ' ').slice(0, 240);
|
||||
}
|
||||
@@ -0,0 +1,71 @@
|
||||
import {
|
||||
DATA_CHANNEL_STATE_OPEN,
|
||||
P2P_TYPE_PING,
|
||||
PEER_PING_INTERVAL_MS
|
||||
} from '../../realtime.constants';
|
||||
import { WebRTCLogger } from '../../logging/webrtc-logger';
|
||||
import { PeerConnectionManagerState } from '../shared';
|
||||
|
||||
/** Start periodic pings to a peer to measure round-trip latency. */
|
||||
export function startPingInterval(state: PeerConnectionManagerState, logger: WebRTCLogger, peerId: string): void {
|
||||
stopPingInterval(state, peerId);
|
||||
sendPing(state, logger, peerId);
|
||||
|
||||
const timer = setInterval(() => sendPing(state, logger, peerId), PEER_PING_INTERVAL_MS);
|
||||
|
||||
state.peerPingTimers.set(peerId, timer);
|
||||
}
|
||||
|
||||
/** Stop the periodic ping for a specific peer. */
|
||||
export function stopPingInterval(state: PeerConnectionManagerState, peerId: string): void {
|
||||
const timer = state.peerPingTimers.get(peerId);
|
||||
|
||||
if (timer) {
|
||||
clearInterval(timer);
|
||||
state.peerPingTimers.delete(peerId);
|
||||
}
|
||||
}
|
||||
|
||||
/** Cancel all active ping timers. */
|
||||
export function clearAllPingTimers(state: PeerConnectionManagerState): void {
|
||||
state.peerPingTimers.forEach((timer) => clearInterval(timer));
|
||||
state.peerPingTimers.clear();
|
||||
}
|
||||
|
||||
/** Send a single ping to a peer. */
|
||||
export function sendPing(state: PeerConnectionManagerState, logger: WebRTCLogger, peerId: string): void {
|
||||
const peerData = state.activePeerConnections.get(peerId);
|
||||
|
||||
if (!peerData?.dataChannel || peerData.dataChannel.readyState !== DATA_CHANNEL_STATE_OPEN)
|
||||
return;
|
||||
|
||||
const timestamp = performance.now();
|
||||
|
||||
state.pendingPings.set(peerId, timestamp);
|
||||
|
||||
try {
|
||||
const payload = JSON.stringify({
|
||||
type: P2P_TYPE_PING,
|
||||
ts: timestamp
|
||||
});
|
||||
|
||||
peerData.dataChannel.send(payload);
|
||||
logger.traffic('data-channel', 'outbound', {
|
||||
bufferedAmount: peerData.dataChannel.bufferedAmount,
|
||||
bytes: new TextEncoder().encode(payload).length,
|
||||
channelLabel: peerData.dataChannel.label,
|
||||
peerId,
|
||||
readyState: peerData.dataChannel.readyState,
|
||||
type: P2P_TYPE_PING
|
||||
});
|
||||
} catch (error) {
|
||||
logger.error('[data-channel] Failed to send ping', error, {
|
||||
bufferedAmount: peerData.dataChannel.bufferedAmount,
|
||||
channelLabel: peerData.dataChannel.label,
|
||||
peerId,
|
||||
readyState: peerData.dataChannel.readyState,
|
||||
ts: timestamp,
|
||||
type: P2P_TYPE_PING
|
||||
});
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,463 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering */
|
||||
import { ChatEvent } from '../../../shared-kernel';
|
||||
import { recordDebugNetworkDownloadRates } from '../logging/debug-network-metrics';
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
import { PeerData } from '../realtime.types';
|
||||
import { createPeerConnection as createManagedPeerConnection } from './connection/create-peer-connection';
|
||||
import {
|
||||
doCreateAndSendOffer,
|
||||
doHandleAnswer,
|
||||
doHandleIceCandidate,
|
||||
doHandleOffer,
|
||||
doRenegotiate,
|
||||
enqueueNegotiation
|
||||
} from './connection/negotiation';
|
||||
import {
|
||||
broadcastCurrentStates,
|
||||
broadcastMessage,
|
||||
sendCurrentStatesToPeer,
|
||||
sendToPeer,
|
||||
sendToPeerBuffered,
|
||||
setupDataChannel
|
||||
} from './messaging/data-channel';
|
||||
import {
|
||||
addToConnectedPeers,
|
||||
clearAllPeerReconnectTimers,
|
||||
clearPeerDisconnectGraceTimer,
|
||||
clearPeerReconnectTimer,
|
||||
closeAllPeers as closeManagedPeers,
|
||||
getConnectedPeerIds,
|
||||
removePeer as removeManagedPeer,
|
||||
requestVoiceStateFromPeer,
|
||||
resetConnectedPeers,
|
||||
schedulePeerDisconnectRecovery,
|
||||
schedulePeerReconnect,
|
||||
trackDisconnectedPeer
|
||||
} from './recovery/peer-recovery';
|
||||
import { clearRemoteScreenShareStream as clearManagedRemoteScreenShareStream, handleRemoteTrack } from './streams/remote-streams';
|
||||
import {
|
||||
ConnectionLifecycleHandlers,
|
||||
createPeerConnectionManagerState,
|
||||
NegotiationHandlers,
|
||||
PeerConnectionCallbacks,
|
||||
PeerConnectionManagerContext,
|
||||
RecoveryHandlers,
|
||||
RemovePeerOptions
|
||||
} from './shared';
|
||||
|
||||
const PEER_STATS_POLL_INTERVAL_MS = 2_000;
|
||||
const PEER_STATS_SAMPLE_MIN_INTERVAL_MS = 500;
|
||||
|
||||
interface PeerInboundByteSnapshot {
|
||||
audioBytesReceived: number;
|
||||
collectedAt: number;
|
||||
videoBytesReceived: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Creates and manages RTCPeerConnections, data channels,
|
||||
* offer/answer negotiation, ICE candidates, and P2P reconnection.
|
||||
*/
|
||||
export class PeerConnectionManager {
|
||||
private readonly state = createPeerConnectionManagerState();
|
||||
private readonly lastInboundByteSnapshots = new Map<string, PeerInboundByteSnapshot>();
|
||||
private statsPollTimer: ReturnType<typeof setInterval> | null = null;
|
||||
private transportStatsPollInFlight = false;
|
||||
|
||||
/** Active peer connections keyed by remote peer ID. */
|
||||
readonly activePeerConnections = this.state.activePeerConnections;
|
||||
|
||||
/** Remote composite streams keyed by remote peer ID. */
|
||||
readonly remotePeerStreams = this.state.remotePeerStreams;
|
||||
|
||||
/** Remote voice-only streams keyed by remote peer ID. */
|
||||
readonly remotePeerVoiceStreams = this.state.remotePeerVoiceStreams;
|
||||
|
||||
/** Remote screen-share streams keyed by remote peer ID. */
|
||||
readonly remotePeerScreenShareStreams = this.state.remotePeerScreenShareStreams;
|
||||
|
||||
/** Last measured latency (ms) per peer. */
|
||||
readonly peerLatencies = this.state.peerLatencies;
|
||||
|
||||
/** Emitted whenever a peer latency value changes. */
|
||||
readonly peerLatencyChanged$ = this.state.peerLatencyChanged$;
|
||||
|
||||
readonly peerConnected$ = this.state.peerConnected$;
|
||||
readonly peerDisconnected$ = this.state.peerDisconnected$;
|
||||
readonly remoteStream$ = this.state.remoteStream$;
|
||||
readonly messageReceived$ = this.state.messageReceived$;
|
||||
|
||||
/** Emitted whenever the connected peer list changes. */
|
||||
readonly connectedPeersChanged$ = this.state.connectedPeersChanged$;
|
||||
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private callbacks: PeerConnectionCallbacks
|
||||
) {
|
||||
this.startTransportStatsPolling();
|
||||
}
|
||||
|
||||
/**
|
||||
* Replace the callback set at runtime.
|
||||
* Needed because of circular initialisation between managers.
|
||||
*/
|
||||
setCallbacks(callbacks: PeerConnectionCallbacks): void {
|
||||
this.callbacks = callbacks;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new RTCPeerConnection to a remote peer.
|
||||
*/
|
||||
createPeerConnection(remotePeerId: string, isInitiator: boolean): PeerData {
|
||||
return createManagedPeerConnection(
|
||||
this.context,
|
||||
remotePeerId,
|
||||
isInitiator,
|
||||
this.connectionHandlers
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an SDP offer and send it to the remote peer via the signaling server.
|
||||
*/
|
||||
async createAndSendOffer(remotePeerId: string): Promise<void> {
|
||||
return new Promise<void>((resolve) => {
|
||||
enqueueNegotiation(this.state, remotePeerId, async () => {
|
||||
await doCreateAndSendOffer(this.context, remotePeerId);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an incoming SDP offer from a remote peer.
|
||||
*/
|
||||
handleOffer(fromUserId: string, sdp: RTCSessionDescriptionInit): void {
|
||||
enqueueNegotiation(this.state, fromUserId, () =>
|
||||
doHandleOffer(this.context, fromUserId, sdp, this.negotiationHandlers)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle an incoming SDP answer from a remote peer.
|
||||
*/
|
||||
handleAnswer(fromUserId: string, sdp: RTCSessionDescriptionInit): void {
|
||||
enqueueNegotiation(this.state, fromUserId, () =>
|
||||
doHandleAnswer(this.context, fromUserId, sdp)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Process an incoming ICE candidate from a remote peer.
|
||||
*/
|
||||
handleIceCandidate(fromUserId: string, candidate: RTCIceCandidateInit): void {
|
||||
enqueueNegotiation(this.state, fromUserId, () =>
|
||||
doHandleIceCandidate(this.context, fromUserId, candidate, this.negotiationHandlers)
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Re-negotiate (create offer) to push track changes to remote.
|
||||
*/
|
||||
async renegotiate(peerId: string): Promise<void> {
|
||||
return new Promise<void>((resolve) => {
|
||||
enqueueNegotiation(this.state, peerId, async () => {
|
||||
await doRenegotiate(this.context, peerId);
|
||||
resolve();
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/** Broadcast a ChatEvent to every peer with an open data channel. */
|
||||
broadcastMessage(event: ChatEvent): void {
|
||||
broadcastMessage(this.context, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a ChatEvent to a specific peer's data channel.
|
||||
*/
|
||||
sendToPeer(peerId: string, event: ChatEvent): void {
|
||||
sendToPeer(this.context, peerId, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a ChatEvent with back-pressure awareness.
|
||||
*/
|
||||
async sendToPeerBuffered(peerId: string, event: ChatEvent): Promise<void> {
|
||||
await sendToPeerBuffered(this.context, peerId, event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send the current voice and screen-share states to a single peer.
|
||||
*/
|
||||
sendCurrentStatesToPeer(peerId: string): void {
|
||||
sendCurrentStatesToPeer(this.context, peerId);
|
||||
}
|
||||
|
||||
/** Broadcast the current voice and screen-share states to all connected peers. */
|
||||
broadcastCurrentStates(): void {
|
||||
broadcastCurrentStates(this.context);
|
||||
}
|
||||
|
||||
/**
|
||||
* Close and remove a peer connection, data channel, and emit a disconnect event.
|
||||
*/
|
||||
removePeer(peerId: string, options?: RemovePeerOptions): void {
|
||||
removeManagedPeer(this.context, peerId, options);
|
||||
this.clearPeerTransportStats(peerId);
|
||||
}
|
||||
|
||||
/** Close every active peer connection and clear internal state. */
|
||||
closeAllPeers(): void {
|
||||
closeManagedPeers(this.state);
|
||||
this.lastInboundByteSnapshots.clear();
|
||||
}
|
||||
|
||||
/** Cancel all pending peer reconnect timers and clear the tracker. */
|
||||
clearAllPeerReconnectTimers(): void {
|
||||
clearAllPeerReconnectTimers(this.state);
|
||||
}
|
||||
|
||||
/** Return a snapshot copy of the currently-connected peer IDs. */
|
||||
getConnectedPeerIds(): string[] {
|
||||
return getConnectedPeerIds(this.state);
|
||||
}
|
||||
|
||||
/** Remove any cached remote screen-share tracks for a peer. */
|
||||
clearRemoteScreenShareStream(peerId: string): void {
|
||||
clearManagedRemoteScreenShareStream(this.context, peerId);
|
||||
}
|
||||
|
||||
/** Reset the connected peers list to empty and notify subscribers. */
|
||||
resetConnectedPeers(): void {
|
||||
resetConnectedPeers(this.state);
|
||||
}
|
||||
|
||||
/** Clean up all resources. */
|
||||
destroy(): void {
|
||||
this.stopTransportStatsPolling();
|
||||
this.lastInboundByteSnapshots.clear();
|
||||
this.closeAllPeers();
|
||||
this.peerConnected$.complete();
|
||||
this.peerDisconnected$.complete();
|
||||
this.remoteStream$.complete();
|
||||
this.messageReceived$.complete();
|
||||
this.connectedPeersChanged$.complete();
|
||||
this.peerLatencyChanged$.complete();
|
||||
}
|
||||
|
||||
private get context(): PeerConnectionManagerContext {
|
||||
return {
|
||||
logger: this.logger,
|
||||
callbacks: this.callbacks,
|
||||
state: this.state
|
||||
};
|
||||
}
|
||||
|
||||
private get connectionHandlers(): ConnectionLifecycleHandlers {
|
||||
return {
|
||||
clearPeerDisconnectGraceTimer: (peerId: string) => this.clearPeerDisconnectGraceTimer(peerId),
|
||||
addToConnectedPeers: (peerId: string) => this.addToConnectedPeers(peerId),
|
||||
clearPeerReconnectTimer: (peerId: string) => this.clearPeerReconnectTimer(peerId),
|
||||
requestVoiceStateFromPeer: (peerId: string) => this.requestVoiceStateFromPeer(peerId),
|
||||
schedulePeerDisconnectRecovery: (peerId: string) =>
|
||||
this.schedulePeerDisconnectRecovery(peerId),
|
||||
trackDisconnectedPeer: (peerId: string) => this.trackDisconnectedPeer(peerId),
|
||||
removePeer: (peerId: string, options?: RemovePeerOptions) => this.removePeer(peerId, options),
|
||||
schedulePeerReconnect: (peerId: string) => this.schedulePeerReconnect(peerId),
|
||||
handleRemoteTrack: (event: RTCTrackEvent, peerId: string) =>
|
||||
this.handleRemoteTrack(event, peerId),
|
||||
setupDataChannel: (channel: RTCDataChannel, peerId: string) =>
|
||||
this.setupDataChannel(channel, peerId)
|
||||
};
|
||||
}
|
||||
|
||||
private get negotiationHandlers(): NegotiationHandlers {
|
||||
return {
|
||||
createPeerConnection: (remotePeerId: string, isInitiator: boolean) =>
|
||||
this.createPeerConnection(remotePeerId, isInitiator)
|
||||
};
|
||||
}
|
||||
|
||||
private get recoveryHandlers(): RecoveryHandlers {
|
||||
return {
|
||||
removePeer: (peerId: string, options?: RemovePeerOptions) => this.removePeer(peerId, options),
|
||||
createPeerConnection: (peerId: string, isInitiator: boolean) =>
|
||||
this.createPeerConnection(peerId, isInitiator),
|
||||
createAndSendOffer: (peerId: string) => this.createAndSendOffer(peerId)
|
||||
};
|
||||
}
|
||||
|
||||
private setupDataChannel(channel: RTCDataChannel, remotePeerId: string): void {
|
||||
setupDataChannel(this.context, channel, remotePeerId);
|
||||
}
|
||||
|
||||
private handleRemoteTrack(event: RTCTrackEvent, remotePeerId: string): void {
|
||||
handleRemoteTrack(this.context, event, remotePeerId);
|
||||
}
|
||||
|
||||
private trackDisconnectedPeer(peerId: string): void {
|
||||
trackDisconnectedPeer(this.state, peerId);
|
||||
}
|
||||
|
||||
private clearPeerReconnectTimer(peerId: string): void {
|
||||
clearPeerReconnectTimer(this.state, peerId);
|
||||
}
|
||||
|
||||
private clearPeerDisconnectGraceTimer(peerId: string): void {
|
||||
clearPeerDisconnectGraceTimer(this.state, peerId);
|
||||
}
|
||||
|
||||
private schedulePeerDisconnectRecovery(peerId: string): void {
|
||||
schedulePeerDisconnectRecovery(this.context, peerId, this.recoveryHandlers);
|
||||
}
|
||||
|
||||
private schedulePeerReconnect(peerId: string): void {
|
||||
schedulePeerReconnect(this.context, peerId, this.recoveryHandlers);
|
||||
}
|
||||
|
||||
private requestVoiceStateFromPeer(peerId: string): void {
|
||||
requestVoiceStateFromPeer(this.state, this.logger, peerId);
|
||||
}
|
||||
|
||||
private addToConnectedPeers(peerId: string): void {
|
||||
addToConnectedPeers(this.state, peerId);
|
||||
}
|
||||
|
||||
private startTransportStatsPolling(): void {
|
||||
if (this.statsPollTimer)
|
||||
return;
|
||||
|
||||
this.statsPollTimer = setInterval(() => {
|
||||
void this.pollTransportStats();
|
||||
}, PEER_STATS_POLL_INTERVAL_MS);
|
||||
}
|
||||
|
||||
private stopTransportStatsPolling(): void {
|
||||
if (!this.statsPollTimer)
|
||||
return;
|
||||
|
||||
clearInterval(this.statsPollTimer);
|
||||
this.statsPollTimer = null;
|
||||
}
|
||||
|
||||
private clearPeerTransportStats(peerId: string): void {
|
||||
this.lastInboundByteSnapshots.delete(peerId);
|
||||
}
|
||||
|
||||
private async pollTransportStats(): Promise<void> {
|
||||
if (this.transportStatsPollInFlight || this.state.activePeerConnections.size === 0)
|
||||
return;
|
||||
|
||||
this.transportStatsPollInFlight = true;
|
||||
|
||||
try {
|
||||
for (const [peerId, peerData] of Array.from(this.state.activePeerConnections.entries())) {
|
||||
await this.pollPeerTransportStats(peerId, peerData);
|
||||
}
|
||||
} finally {
|
||||
this.transportStatsPollInFlight = false;
|
||||
}
|
||||
}
|
||||
|
||||
private async pollPeerTransportStats(peerId: string, peerData: PeerData): Promise<void> {
|
||||
const connectionState = peerData.connection.connectionState;
|
||||
|
||||
if (connectionState === 'closed' || connectionState === 'failed') {
|
||||
this.clearPeerTransportStats(peerId);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const stats = await peerData.connection.getStats();
|
||||
|
||||
let audioBytesReceived = 0;
|
||||
let videoBytesReceived = 0;
|
||||
|
||||
stats.forEach((report) => {
|
||||
const summary = this.getInboundRtpSummary(report);
|
||||
|
||||
if (!summary)
|
||||
return;
|
||||
|
||||
if (summary.kind === 'audio')
|
||||
audioBytesReceived += summary.bytesReceived;
|
||||
|
||||
if (summary.kind === 'video')
|
||||
videoBytesReceived += summary.bytesReceived;
|
||||
});
|
||||
|
||||
const collectedAt = Date.now();
|
||||
const previous = this.lastInboundByteSnapshots.get(peerId);
|
||||
|
||||
this.lastInboundByteSnapshots.set(peerId, {
|
||||
audioBytesReceived,
|
||||
collectedAt,
|
||||
videoBytesReceived
|
||||
});
|
||||
|
||||
if (!previous)
|
||||
return;
|
||||
|
||||
const elapsedMs = collectedAt - previous.collectedAt;
|
||||
|
||||
if (elapsedMs < PEER_STATS_SAMPLE_MIN_INTERVAL_MS)
|
||||
return;
|
||||
|
||||
const audioDownloadMbps = this.calculateMbps(audioBytesReceived - previous.audioBytesReceived, elapsedMs);
|
||||
const videoDownloadMbps = this.calculateMbps(videoBytesReceived - previous.videoBytesReceived, elapsedMs);
|
||||
|
||||
recordDebugNetworkDownloadRates(peerId, {
|
||||
audioMbps: this.roundMetric(audioDownloadMbps),
|
||||
videoMbps: this.roundMetric(videoDownloadMbps)
|
||||
}, collectedAt);
|
||||
|
||||
this.logger.info('Peer transport stats', {
|
||||
audioDownloadMbps: this.roundMetric(audioDownloadMbps),
|
||||
connectionState,
|
||||
remotePeerId: peerId,
|
||||
totalDownloadMbps: this.roundMetric(audioDownloadMbps + videoDownloadMbps),
|
||||
videoDownloadMbps: this.roundMetric(videoDownloadMbps)
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.warn('Failed to collect peer transport stats', {
|
||||
connectionState,
|
||||
error: (error as Error)?.message ?? String(error),
|
||||
peerId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private getInboundRtpSummary(report: RTCStats): { bytesReceived: number; kind: 'audio' | 'video' } | null {
|
||||
const summary = report as unknown as Record<string, unknown>;
|
||||
|
||||
if (summary['type'] !== 'inbound-rtp' || summary['isRemote'] === true)
|
||||
return null;
|
||||
|
||||
const bytesReceived = typeof summary['bytesReceived'] === 'number'
|
||||
? summary['bytesReceived']
|
||||
: null;
|
||||
const mediaKind = typeof summary['kind'] === 'string'
|
||||
? summary['kind']
|
||||
: (typeof summary['mediaType'] === 'string' ? summary['mediaType'] : null);
|
||||
|
||||
if (bytesReceived === null || (mediaKind !== 'audio' && mediaKind !== 'video'))
|
||||
return null;
|
||||
|
||||
return {
|
||||
bytesReceived,
|
||||
kind: mediaKind
|
||||
};
|
||||
}
|
||||
|
||||
private calculateMbps(deltaBytes: number, elapsedMs: number): number {
|
||||
if (elapsedMs <= 0)
|
||||
return 0;
|
||||
|
||||
return Math.max(0, deltaBytes) * 8 / elapsedMs / 1000;
|
||||
}
|
||||
|
||||
private roundMetric(value: number): number {
|
||||
return Math.round(value * 1000) / 1000;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,268 @@
|
||||
import {
|
||||
CONNECTION_STATE_CONNECTED,
|
||||
DATA_CHANNEL_STATE_OPEN,
|
||||
P2P_TYPE_VOICE_STATE_REQUEST,
|
||||
PEER_DISCONNECT_GRACE_MS,
|
||||
PEER_RECONNECT_INTERVAL_MS,
|
||||
PEER_RECONNECT_MAX_ATTEMPTS
|
||||
} from '../../realtime.constants';
|
||||
import {
|
||||
PeerConnectionManagerContext,
|
||||
PeerConnectionManagerState,
|
||||
RecoveryHandlers,
|
||||
RemovePeerOptions
|
||||
} from '../shared';
|
||||
import { clearAllPingTimers, stopPingInterval } from '../messaging/ping';
|
||||
|
||||
/**
|
||||
* Close and remove a peer connection, data channel, and emit a disconnect event.
|
||||
*/
|
||||
export function removePeer(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string,
|
||||
options?: RemovePeerOptions
|
||||
): void {
|
||||
const { state } = context;
|
||||
const peerData = state.activePeerConnections.get(peerId);
|
||||
const preserveReconnectState = options?.preserveReconnectState === true;
|
||||
|
||||
clearPeerDisconnectGraceTimer(state, peerId);
|
||||
|
||||
if (!preserveReconnectState) {
|
||||
clearPeerReconnectTimer(state, peerId);
|
||||
state.disconnectedPeerTracker.delete(peerId);
|
||||
}
|
||||
|
||||
state.remotePeerStreams.delete(peerId);
|
||||
state.remotePeerVoiceStreams.delete(peerId);
|
||||
state.remotePeerScreenShareStreams.delete(peerId);
|
||||
|
||||
if (peerData) {
|
||||
if (peerData.dataChannel)
|
||||
peerData.dataChannel.close();
|
||||
|
||||
peerData.connection.close();
|
||||
state.activePeerConnections.delete(peerId);
|
||||
state.peerNegotiationQueue.delete(peerId);
|
||||
removeFromConnectedPeers(state, peerId);
|
||||
stopPingInterval(state, peerId);
|
||||
state.peerLatencies.delete(peerId);
|
||||
state.pendingPings.delete(peerId);
|
||||
state.peerDisconnected$.next(peerId);
|
||||
}
|
||||
}
|
||||
|
||||
/** Close every active peer connection and clear internal state. */
|
||||
export function closeAllPeers(state: PeerConnectionManagerState): void {
|
||||
clearAllPeerReconnectTimers(state);
|
||||
clearAllPeerDisconnectGraceTimers(state);
|
||||
clearAllPingTimers(state);
|
||||
|
||||
state.activePeerConnections.forEach((peerData) => {
|
||||
if (peerData.dataChannel)
|
||||
peerData.dataChannel.close();
|
||||
|
||||
peerData.connection.close();
|
||||
});
|
||||
|
||||
state.activePeerConnections.clear();
|
||||
state.remotePeerStreams.clear();
|
||||
state.remotePeerVoiceStreams.clear();
|
||||
state.remotePeerScreenShareStreams.clear();
|
||||
state.peerNegotiationQueue.clear();
|
||||
state.peerLatencies.clear();
|
||||
state.pendingPings.clear();
|
||||
state.connectedPeersChanged$.next([]);
|
||||
}
|
||||
|
||||
export function trackDisconnectedPeer(state: PeerConnectionManagerState, peerId: string): void {
|
||||
state.disconnectedPeerTracker.set(peerId, {
|
||||
lastSeenTimestamp: Date.now(),
|
||||
reconnectAttempts: 0
|
||||
});
|
||||
}
|
||||
|
||||
export function clearPeerReconnectTimer(
|
||||
state: PeerConnectionManagerState,
|
||||
peerId: string
|
||||
): void {
|
||||
const timer = state.peerReconnectTimers.get(peerId);
|
||||
|
||||
if (timer) {
|
||||
clearInterval(timer);
|
||||
state.peerReconnectTimers.delete(peerId);
|
||||
}
|
||||
}
|
||||
|
||||
export function clearPeerDisconnectGraceTimer(
|
||||
state: PeerConnectionManagerState,
|
||||
peerId: string
|
||||
): void {
|
||||
const timer = state.peerDisconnectGraceTimers.get(peerId);
|
||||
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
state.peerDisconnectGraceTimers.delete(peerId);
|
||||
}
|
||||
}
|
||||
|
||||
/** Cancel all pending peer reconnect timers and clear the tracker. */
|
||||
export function clearAllPeerReconnectTimers(state: PeerConnectionManagerState): void {
|
||||
state.peerReconnectTimers.forEach((timer) => clearInterval(timer));
|
||||
state.peerReconnectTimers.clear();
|
||||
state.disconnectedPeerTracker.clear();
|
||||
}
|
||||
|
||||
export function clearAllPeerDisconnectGraceTimers(state: PeerConnectionManagerState): void {
|
||||
state.peerDisconnectGraceTimers.forEach((timer) => clearTimeout(timer));
|
||||
state.peerDisconnectGraceTimers.clear();
|
||||
}
|
||||
|
||||
export function schedulePeerDisconnectRecovery(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string,
|
||||
handlers: RecoveryHandlers
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
|
||||
if (state.peerDisconnectGraceTimers.has(peerId))
|
||||
return;
|
||||
|
||||
logger.warn('Peer temporarily disconnected; waiting before reconnect', { peerId });
|
||||
|
||||
const timer = setTimeout(() => {
|
||||
state.peerDisconnectGraceTimers.delete(peerId);
|
||||
|
||||
const peerData = state.activePeerConnections.get(peerId);
|
||||
|
||||
if (!peerData)
|
||||
return;
|
||||
|
||||
const connectionState = peerData.connection.connectionState;
|
||||
|
||||
if (connectionState === CONNECTION_STATE_CONNECTED || connectionState === 'connecting') {
|
||||
logger.info('Peer recovered before disconnect grace expired', {
|
||||
peerId,
|
||||
state: connectionState
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
logger.warn('Peer still disconnected after grace period; recreating connection', {
|
||||
peerId,
|
||||
state: connectionState
|
||||
});
|
||||
|
||||
trackDisconnectedPeer(state, peerId);
|
||||
handlers.removePeer(peerId, { preserveReconnectState: true });
|
||||
schedulePeerReconnect(context, peerId, handlers);
|
||||
}, PEER_DISCONNECT_GRACE_MS);
|
||||
|
||||
state.peerDisconnectGraceTimers.set(peerId, timer);
|
||||
}
|
||||
|
||||
export function schedulePeerReconnect(
|
||||
context: PeerConnectionManagerContext,
|
||||
peerId: string,
|
||||
handlers: RecoveryHandlers
|
||||
): void {
|
||||
const { callbacks, logger, state } = context;
|
||||
|
||||
if (state.peerReconnectTimers.has(peerId))
|
||||
return;
|
||||
|
||||
logger.info('Scheduling P2P reconnect', { peerId });
|
||||
|
||||
const timer = setInterval(() => {
|
||||
const info = state.disconnectedPeerTracker.get(peerId);
|
||||
|
||||
if (!info) {
|
||||
clearPeerReconnectTimer(state, peerId);
|
||||
return;
|
||||
}
|
||||
|
||||
info.reconnectAttempts++;
|
||||
logger.info('P2P reconnect attempt', {
|
||||
peerId,
|
||||
attempt: info.reconnectAttempts
|
||||
});
|
||||
|
||||
if (info.reconnectAttempts >= PEER_RECONNECT_MAX_ATTEMPTS) {
|
||||
logger.info('P2P reconnect max attempts reached', { peerId });
|
||||
clearPeerReconnectTimer(state, peerId);
|
||||
state.disconnectedPeerTracker.delete(peerId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (!callbacks.isSignalingConnected()) {
|
||||
logger.info('Skipping P2P reconnect - no signaling connection', { peerId });
|
||||
return;
|
||||
}
|
||||
|
||||
attemptPeerReconnect(state, peerId, handlers);
|
||||
}, PEER_RECONNECT_INTERVAL_MS);
|
||||
|
||||
state.peerReconnectTimers.set(peerId, timer);
|
||||
}
|
||||
|
||||
export function attemptPeerReconnect(
|
||||
state: PeerConnectionManagerState,
|
||||
peerId: string,
|
||||
handlers: RecoveryHandlers
|
||||
): void {
|
||||
if (state.activePeerConnections.has(peerId)) {
|
||||
handlers.removePeer(peerId, { preserveReconnectState: true });
|
||||
}
|
||||
|
||||
handlers.createPeerConnection(peerId, true);
|
||||
void handlers.createAndSendOffer(peerId);
|
||||
}
|
||||
|
||||
export function requestVoiceStateFromPeer(
|
||||
state: PeerConnectionManagerState,
|
||||
logger: PeerConnectionManagerContext['logger'],
|
||||
peerId: string
|
||||
): void {
|
||||
const peerData = state.activePeerConnections.get(peerId);
|
||||
|
||||
if (peerData?.dataChannel?.readyState === DATA_CHANNEL_STATE_OPEN) {
|
||||
try {
|
||||
peerData.dataChannel.send(JSON.stringify({ type: P2P_TYPE_VOICE_STATE_REQUEST }));
|
||||
} catch (error) {
|
||||
logger.warn('Failed to request voice state', error);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/** Return a snapshot copy of the currently-connected peer IDs. */
|
||||
export function getConnectedPeerIds(state: PeerConnectionManagerState): string[] {
|
||||
return [...state.connectedPeersList];
|
||||
}
|
||||
|
||||
export function addToConnectedPeers(state: PeerConnectionManagerState, peerId: string): void {
|
||||
if (!state.connectedPeersList.includes(peerId)) {
|
||||
state.connectedPeersList = [...state.connectedPeersList, peerId];
|
||||
state.connectedPeersChanged$.next(state.connectedPeersList);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove a peer from the connected list and notify subscribers.
|
||||
*/
|
||||
export function removeFromConnectedPeers(
|
||||
state: PeerConnectionManagerState,
|
||||
peerId: string
|
||||
): void {
|
||||
state.connectedPeersList = state.connectedPeersList.filter(
|
||||
(connectedId) => connectedId !== peerId
|
||||
);
|
||||
|
||||
state.connectedPeersChanged$.next(state.connectedPeersList);
|
||||
}
|
||||
|
||||
/** Reset the connected peers list to empty and notify subscribers. */
|
||||
export function resetConnectedPeers(state: PeerConnectionManagerState): void {
|
||||
state.connectedPeersList = [];
|
||||
state.connectedPeersChanged$.next([]);
|
||||
}
|
||||
@@ -0,0 +1,107 @@
|
||||
import { Subject } from 'rxjs';
|
||||
import { ChatEvent } from '../../../shared-kernel';
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
import {
|
||||
DisconnectedPeerEntry,
|
||||
IdentifyCredentials,
|
||||
PeerData,
|
||||
VoiceStateSnapshot
|
||||
} from '../realtime.types';
|
||||
|
||||
/**
|
||||
* Callbacks the PeerConnectionManager needs from the owning service.
|
||||
* This keeps the manager decoupled from Angular DI / signals.
|
||||
*/
|
||||
export interface PeerConnectionCallbacks {
|
||||
/** Send a raw JSON message via the signaling server. */
|
||||
sendRawMessage(msg: Record<string, unknown>): void;
|
||||
/** Get the current local media stream (mic audio). */
|
||||
getLocalMediaStream(): MediaStream | null;
|
||||
/** Whether signaling is currently connected. */
|
||||
isSignalingConnected(): boolean;
|
||||
/** Returns the current voice/screen state snapshot for broadcasting. */
|
||||
getVoiceStateSnapshot(): VoiceStateSnapshot;
|
||||
/** Returns the identify credentials (oderId + displayName). */
|
||||
getIdentifyCredentials(): IdentifyCredentials | null;
|
||||
/** Returns the local peer ID. */
|
||||
getLocalPeerId(): string;
|
||||
/** Whether screen sharing is active. */
|
||||
isScreenSharingActive(): boolean;
|
||||
}
|
||||
|
||||
export interface PeerConnectionManagerState {
|
||||
activePeerConnections: Map<string, PeerData>;
|
||||
remotePeerStreams: Map<string, MediaStream>;
|
||||
remotePeerVoiceStreams: Map<string, MediaStream>;
|
||||
remotePeerScreenShareStreams: Map<string, MediaStream>;
|
||||
disconnectedPeerTracker: Map<string, DisconnectedPeerEntry>;
|
||||
peerReconnectTimers: Map<string, ReturnType<typeof setInterval>>;
|
||||
peerDisconnectGraceTimers: Map<string, ReturnType<typeof setTimeout>>;
|
||||
pendingPings: Map<string, number>;
|
||||
peerPingTimers: Map<string, ReturnType<typeof setInterval>>;
|
||||
peerLatencies: Map<string, number>;
|
||||
peerLatencyChanged$: Subject<{ peerId: string; latencyMs: number }>;
|
||||
peerNegotiationQueue: Map<string, Promise<void>>;
|
||||
peerConnected$: Subject<string>;
|
||||
peerDisconnected$: Subject<string>;
|
||||
remoteStream$: Subject<{ peerId: string; stream: MediaStream }>;
|
||||
messageReceived$: Subject<ChatEvent>;
|
||||
connectedPeersChanged$: Subject<string[]>;
|
||||
connectedPeersList: string[];
|
||||
}
|
||||
|
||||
export interface PeerConnectionManagerContext {
|
||||
readonly logger: WebRTCLogger;
|
||||
readonly callbacks: PeerConnectionCallbacks;
|
||||
readonly state: PeerConnectionManagerState;
|
||||
}
|
||||
|
||||
export interface RemovePeerOptions {
|
||||
preserveReconnectState?: boolean;
|
||||
}
|
||||
|
||||
export interface ConnectionLifecycleHandlers {
|
||||
clearPeerDisconnectGraceTimer(peerId: string): void;
|
||||
addToConnectedPeers(peerId: string): void;
|
||||
clearPeerReconnectTimer(peerId: string): void;
|
||||
requestVoiceStateFromPeer(peerId: string): void;
|
||||
schedulePeerDisconnectRecovery(peerId: string): void;
|
||||
trackDisconnectedPeer(peerId: string): void;
|
||||
removePeer(peerId: string, options?: RemovePeerOptions): void;
|
||||
schedulePeerReconnect(peerId: string): void;
|
||||
handleRemoteTrack(event: RTCTrackEvent, remotePeerId: string): void;
|
||||
setupDataChannel(channel: RTCDataChannel, remotePeerId: string): void;
|
||||
}
|
||||
|
||||
export interface NegotiationHandlers {
|
||||
createPeerConnection(remotePeerId: string, isInitiator: boolean): PeerData;
|
||||
}
|
||||
|
||||
export interface RecoveryHandlers {
|
||||
removePeer(peerId: string, options?: RemovePeerOptions): void;
|
||||
createPeerConnection(peerId: string, isInitiator: boolean): PeerData;
|
||||
createAndSendOffer(peerId: string): Promise<void>;
|
||||
}
|
||||
|
||||
export function createPeerConnectionManagerState(): PeerConnectionManagerState {
|
||||
return {
|
||||
activePeerConnections: new Map<string, PeerData>(),
|
||||
remotePeerStreams: new Map<string, MediaStream>(),
|
||||
remotePeerVoiceStreams: new Map<string, MediaStream>(),
|
||||
remotePeerScreenShareStreams: new Map<string, MediaStream>(),
|
||||
disconnectedPeerTracker: new Map<string, DisconnectedPeerEntry>(),
|
||||
peerReconnectTimers: new Map<string, ReturnType<typeof setInterval>>(),
|
||||
peerDisconnectGraceTimers: new Map<string, ReturnType<typeof setTimeout>>(),
|
||||
pendingPings: new Map<string, number>(),
|
||||
peerPingTimers: new Map<string, ReturnType<typeof setInterval>>(),
|
||||
peerLatencies: new Map<string, number>(),
|
||||
peerLatencyChanged$: new Subject<{ peerId: string; latencyMs: number }>(),
|
||||
peerNegotiationQueue: new Map<string, Promise<void>>(),
|
||||
peerConnected$: new Subject<string>(),
|
||||
peerDisconnected$: new Subject<string>(),
|
||||
remoteStream$: new Subject<{ peerId: string; stream: MediaStream }>(),
|
||||
messageReceived$: new Subject<ChatEvent>(),
|
||||
connectedPeersChanged$: new Subject<string[]>(),
|
||||
connectedPeersList: []
|
||||
};
|
||||
}
|
||||
@@ -0,0 +1,356 @@
|
||||
import { TRACK_KIND_AUDIO, TRACK_KIND_VIDEO } from '../../realtime.constants';
|
||||
import { recordDebugNetworkStreams } from '../../logging/debug-network-metrics';
|
||||
import { PeerConnectionManagerContext } from '../shared';
|
||||
|
||||
export function handleRemoteTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
const track = event.track;
|
||||
const isScreenAudio = isScreenShareAudioTrack(context, event, remotePeerId);
|
||||
const settings =
|
||||
typeof track.getSettings === 'function' ? track.getSettings() : ({} as MediaTrackSettings);
|
||||
|
||||
logger.info('Remote track', {
|
||||
remotePeerId,
|
||||
kind: track.kind,
|
||||
id: track.id,
|
||||
enabled: track.enabled,
|
||||
readyState: track.readyState,
|
||||
settings
|
||||
});
|
||||
|
||||
logger.attachTrackDiagnostics(track, `remote:${remotePeerId}:${track.kind}`);
|
||||
|
||||
if (track.kind === TRACK_KIND_VIDEO && (!track.enabled || track.readyState !== 'live')) {
|
||||
logger.info('Skipping inactive video track', {
|
||||
remotePeerId,
|
||||
enabled: track.enabled,
|
||||
readyState: track.readyState
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const compositeStream = buildCompositeRemoteStream(state, remotePeerId, track);
|
||||
const voiceStream = isVoiceAudioTrack(track, isScreenAudio)
|
||||
? buildAudioOnlyStream(state.remotePeerVoiceStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
const screenShareStream = isScreenShareTrack(track, isScreenAudio)
|
||||
? buildScreenShareStream(state.remotePeerScreenShareStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
|
||||
track.addEventListener('ended', () => removeRemoteTrack(context, remotePeerId, track.id));
|
||||
|
||||
state.remotePeerStreams.set(remotePeerId, compositeStream);
|
||||
|
||||
if (voiceStream) {
|
||||
state.remotePeerVoiceStreams.set(remotePeerId, voiceStream);
|
||||
}
|
||||
|
||||
if (screenShareStream) {
|
||||
state.remotePeerScreenShareStreams.set(remotePeerId, screenShareStream);
|
||||
}
|
||||
|
||||
rememberIncomingStreamIds(state, event, remotePeerId, {
|
||||
isScreenAudio,
|
||||
isVoiceAudio: !!voiceStream,
|
||||
isScreenTrack: !!screenShareStream
|
||||
});
|
||||
|
||||
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
|
||||
}
|
||||
|
||||
export function clearRemoteScreenShareStream(
|
||||
context: PeerConnectionManagerContext,
|
||||
remotePeerId: string
|
||||
): void {
|
||||
const { state } = context;
|
||||
const peerData = state.activePeerConnections.get(remotePeerId);
|
||||
const screenShareStream = state.remotePeerScreenShareStreams.get(remotePeerId);
|
||||
|
||||
if (!screenShareStream) {
|
||||
return;
|
||||
}
|
||||
|
||||
const screenShareTrackIds = new Set(
|
||||
screenShareStream.getTracks().map((track) => track.id)
|
||||
);
|
||||
const compositeStream = removeTracksFromStreamMap(
|
||||
state.remotePeerStreams,
|
||||
remotePeerId,
|
||||
screenShareTrackIds
|
||||
);
|
||||
|
||||
removeTracksFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, screenShareTrackIds);
|
||||
state.remotePeerScreenShareStreams.delete(remotePeerId);
|
||||
|
||||
peerData?.remoteScreenShareStreamIds.clear();
|
||||
|
||||
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
|
||||
}
|
||||
|
||||
function buildCompositeRemoteStream(
|
||||
state: PeerConnectionManagerContext['state'],
|
||||
remotePeerId: string,
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
return buildMergedStream(state.remotePeerStreams.get(remotePeerId), incomingTrack, {
|
||||
replaceVideoTrack: true
|
||||
});
|
||||
}
|
||||
|
||||
function buildAudioOnlyStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
return buildMergedStream(existingStream, incomingTrack, {
|
||||
allowedKinds: [TRACK_KIND_AUDIO],
|
||||
replaceVideoTrack: false
|
||||
});
|
||||
}
|
||||
|
||||
function buildScreenShareStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
return buildMergedStream(existingStream, incomingTrack, {
|
||||
replaceVideoTrack: true
|
||||
});
|
||||
}
|
||||
|
||||
function buildMergedStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack,
|
||||
options: {
|
||||
allowedKinds?: string[];
|
||||
replaceVideoTrack: boolean;
|
||||
}
|
||||
): MediaStream {
|
||||
const allowedKinds = options.allowedKinds ?? [TRACK_KIND_AUDIO, TRACK_KIND_VIDEO];
|
||||
|
||||
let preservedTracks: MediaStreamTrack[] = [];
|
||||
|
||||
if (existingStream) {
|
||||
preservedTracks = existingStream.getTracks().filter(
|
||||
(existingTrack) => {
|
||||
if (existingTrack.readyState !== 'live') {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!allowedKinds.includes(existingTrack.kind)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (options.replaceVideoTrack && incomingTrack.kind === TRACK_KIND_VIDEO) {
|
||||
return existingTrack.kind !== TRACK_KIND_VIDEO;
|
||||
}
|
||||
|
||||
return existingTrack.id !== incomingTrack.id;
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
return new MediaStream([...preservedTracks, incomingTrack]);
|
||||
}
|
||||
|
||||
function removeRemoteTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
remotePeerId: string,
|
||||
trackId: string
|
||||
): void {
|
||||
const { state } = context;
|
||||
const peerData = state.activePeerConnections.get(remotePeerId);
|
||||
const compositeStream = removeTrackFromStreamMap(state.remotePeerStreams, remotePeerId, trackId);
|
||||
|
||||
removeTrackFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, trackId);
|
||||
removeTrackFromStreamMap(state.remotePeerScreenShareStreams, remotePeerId, trackId);
|
||||
|
||||
if (!state.remotePeerVoiceStreams.has(remotePeerId)) {
|
||||
peerData?.remoteVoiceStreamIds.clear();
|
||||
}
|
||||
|
||||
if (!state.remotePeerScreenShareStreams.has(remotePeerId)) {
|
||||
peerData?.remoteScreenShareStreamIds.clear();
|
||||
}
|
||||
|
||||
publishRemoteStreamUpdate(context, remotePeerId, compositeStream);
|
||||
}
|
||||
|
||||
function removeTrackFromStreamMap(
|
||||
streamMap: Map<string, MediaStream>,
|
||||
remotePeerId: string,
|
||||
trackId: string
|
||||
): MediaStream | null {
|
||||
return removeTracksFromStreamMap(streamMap, remotePeerId, new Set([trackId]));
|
||||
}
|
||||
|
||||
function removeTracksFromStreamMap(
|
||||
streamMap: Map<string, MediaStream>,
|
||||
remotePeerId: string,
|
||||
trackIds: ReadonlySet<string>
|
||||
): MediaStream | null {
|
||||
const currentStream = streamMap.get(remotePeerId);
|
||||
|
||||
if (!currentStream) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const remainingTracks = currentStream
|
||||
.getTracks()
|
||||
.filter((existingTrack) => !trackIds.has(existingTrack.id) && existingTrack.readyState === 'live');
|
||||
|
||||
if (remainingTracks.length === currentStream.getTracks().length) {
|
||||
return currentStream;
|
||||
}
|
||||
|
||||
if (remainingTracks.length === 0) {
|
||||
streamMap.delete(remotePeerId);
|
||||
return null;
|
||||
}
|
||||
|
||||
const nextStream = new MediaStream(remainingTracks);
|
||||
|
||||
streamMap.set(remotePeerId, nextStream);
|
||||
return nextStream;
|
||||
}
|
||||
|
||||
function publishRemoteStreamUpdate(
|
||||
context: PeerConnectionManagerContext,
|
||||
remotePeerId: string,
|
||||
compositeStream: MediaStream | null
|
||||
): void {
|
||||
const { logger, state } = context;
|
||||
const stream = compositeStream ?? new MediaStream();
|
||||
|
||||
state.remoteStream$.next({
|
||||
peerId: remotePeerId,
|
||||
stream
|
||||
});
|
||||
|
||||
recordDebugNetworkStreams(remotePeerId, {
|
||||
audio: stream.getAudioTracks().length,
|
||||
video: stream.getVideoTracks().length
|
||||
});
|
||||
|
||||
logger.info('Remote stream updated', {
|
||||
audioTrackCount: stream.getAudioTracks().length,
|
||||
remotePeerId,
|
||||
trackCount: stream.getTracks().length,
|
||||
videoTrackCount: stream.getVideoTracks().length
|
||||
});
|
||||
}
|
||||
|
||||
function isVoiceAudioTrack(track: MediaStreamTrack, isScreenAudio: boolean): boolean {
|
||||
return track.kind === TRACK_KIND_AUDIO && !isScreenAudio;
|
||||
}
|
||||
|
||||
function isScreenShareTrack(track: MediaStreamTrack, isScreenAudio: boolean): boolean {
|
||||
return track.kind === TRACK_KIND_VIDEO || isScreenAudio;
|
||||
}
|
||||
|
||||
function isScreenShareAudioTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): boolean {
|
||||
if (event.track.kind !== TRACK_KIND_AUDIO) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const peerData = context.state.activePeerConnections.get(remotePeerId);
|
||||
|
||||
if (!peerData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const incomingStreamIds = getIncomingStreamIds(event);
|
||||
|
||||
if (incomingStreamIds.some((streamId) => peerData.remoteScreenShareStreamIds.has(streamId))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (incomingStreamIds.some((streamId) => peerData.remoteVoiceStreamIds.has(streamId))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (event.streams.some((stream) => stream.getVideoTracks().some((track) => track.readyState === 'live'))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const screenAudioTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === peerData.screenAudioSender
|
||||
);
|
||||
|
||||
if (screenAudioTransceiver && matchesTransceiver(event.transceiver, screenAudioTransceiver)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const voiceAudioTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === peerData.audioSender
|
||||
);
|
||||
|
||||
if (voiceAudioTransceiver) {
|
||||
return !matchesTransceiver(event.transceiver, voiceAudioTransceiver);
|
||||
}
|
||||
|
||||
const audioTransceivers = peerData.connection.getTransceivers().filter((transceiver) =>
|
||||
transceiver.receiver.track?.kind === TRACK_KIND_AUDIO || transceiver === event.transceiver
|
||||
);
|
||||
const transceiverIndex = audioTransceivers.findIndex((transceiver) =>
|
||||
transceiver === event.transceiver || (!!transceiver.mid && transceiver.mid === event.transceiver.mid)
|
||||
);
|
||||
|
||||
return transceiverIndex > 0;
|
||||
}
|
||||
|
||||
function rememberIncomingStreamIds(
|
||||
state: PeerConnectionManagerContext['state'],
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string,
|
||||
options: {
|
||||
isScreenAudio: boolean;
|
||||
isVoiceAudio: boolean;
|
||||
isScreenTrack: boolean;
|
||||
}
|
||||
): void {
|
||||
const peerData = state.activePeerConnections.get(remotePeerId);
|
||||
|
||||
if (!peerData) {
|
||||
return;
|
||||
}
|
||||
|
||||
const incomingStreamIds = getIncomingStreamIds(event);
|
||||
|
||||
if (incomingStreamIds.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.track.kind === TRACK_KIND_VIDEO || options.isScreenAudio || options.isScreenTrack) {
|
||||
incomingStreamIds.forEach((streamId) => {
|
||||
peerData.remoteScreenShareStreamIds.add(streamId);
|
||||
peerData.remoteVoiceStreamIds.delete(streamId);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.isVoiceAudio) {
|
||||
incomingStreamIds.forEach((streamId) => {
|
||||
peerData.remoteVoiceStreamIds.add(streamId);
|
||||
peerData.remoteScreenShareStreamIds.delete(streamId);
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
function getIncomingStreamIds(event: RTCTrackEvent): string[] {
|
||||
return event.streams
|
||||
.map((stream) => stream.id)
|
||||
.filter((streamId): streamId is string => !!streamId);
|
||||
}
|
||||
|
||||
function matchesTransceiver(left: RTCRtpTransceiver, right: RTCRtpTransceiver): boolean {
|
||||
return left === right || (!!left.mid && !!right.mid && left.mid === right.mid);
|
||||
}
|
||||
@@ -0,0 +1 @@
|
||||
export * from './peer-connection-manager';
|
||||
@@ -0,0 +1,623 @@
|
||||
/**
|
||||
* WebRTCService - thin Angular service that composes specialised managers.
|
||||
*
|
||||
* Each concern lives in its own file under `./`:
|
||||
* • SignalingManager - WebSocket lifecycle & reconnection
|
||||
* • PeerConnectionManager - RTCPeerConnection, offers/answers, ICE, data channels
|
||||
* • MediaManager - mic voice, mute, deafen, bitrate
|
||||
* • ScreenShareManager - screen capture & mixed audio
|
||||
* • WebRTCLogger - debug / diagnostic logging
|
||||
*
|
||||
* This file wires them together and exposes a public API that is
|
||||
* identical to the old monolithic service so consumers don't change.
|
||||
*/
|
||||
/* eslint-disable @typescript-eslint/member-ordering, @typescript-eslint/no-non-null-assertion */
|
||||
import {
|
||||
Injectable,
|
||||
inject,
|
||||
OnDestroy
|
||||
} from '@angular/core';
|
||||
import { Observable, Subject } from 'rxjs';
|
||||
import { ChatEvent } from '../../shared-kernel';
|
||||
import type { SignalingMessage } from '../../shared-kernel';
|
||||
import { TimeSyncService } from '../../core/services/time-sync.service';
|
||||
import { DebuggingService } from '../../core/services/debugging';
|
||||
import { ScreenShareSourcePickerService } from '../../domains/screen-share';
|
||||
import { MediaManager } from './media/media.manager';
|
||||
import { ScreenShareManager } from './media/screen-share.manager';
|
||||
import { VoiceSessionController } from './media/voice-session-controller';
|
||||
import type { PeerData, VoiceStateSnapshot } from './realtime.types';
|
||||
import { LatencyProfile } from './realtime.constants';
|
||||
import { ScreenShareStartOptions } from './screen-share.config';
|
||||
import { WebRTCLogger } from './logging/webrtc-logger';
|
||||
import { PeerConnectionManager } from './peer-connection-manager/peer-connection.manager';
|
||||
import { PeerMediaFacade } from './streams/peer-media-facade';
|
||||
import { RemoteScreenShareRequestController } from './streams/remote-screen-share-request-controller';
|
||||
import { IncomingSignalingMessage, IncomingSignalingMessageHandler } from './signaling/signaling-message-handler';
|
||||
import { ServerMembershipSignalingHandler } from './signaling/server-membership-signaling-handler';
|
||||
import { ServerSignalingCoordinator } from './signaling/server-signaling-coordinator';
|
||||
import { SignalingManager } from './signaling/signaling.manager';
|
||||
import { SignalingTransportHandler } from './signaling/signaling-transport-handler';
|
||||
import { WebRtcStateController } from './state/webrtc-state-controller';
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root'
|
||||
})
|
||||
export class WebRTCService implements OnDestroy {
|
||||
private readonly timeSync = inject(TimeSyncService);
|
||||
private readonly debugging = inject(DebuggingService);
|
||||
private readonly screenShareSourcePicker = inject(ScreenShareSourcePickerService);
|
||||
|
||||
private readonly logger = new WebRTCLogger(() => this.debugging.enabled());
|
||||
private readonly state = new WebRtcStateController();
|
||||
|
||||
readonly peerId = this.state.peerId;
|
||||
readonly isConnected = this.state.isConnected;
|
||||
readonly hasEverConnected = this.state.hasEverConnected;
|
||||
readonly isVoiceConnected = this.state.isVoiceConnected;
|
||||
readonly connectedPeers = this.state.connectedPeers;
|
||||
readonly isMuted = this.state.isMuted;
|
||||
readonly isDeafened = this.state.isDeafened;
|
||||
readonly isScreenSharing = this.state.isScreenSharing;
|
||||
readonly isNoiseReductionEnabled = this.state.isNoiseReductionEnabled;
|
||||
readonly screenStream = this.state.screenStream;
|
||||
readonly isScreenShareRemotePlaybackSuppressed = this.state.isScreenShareRemotePlaybackSuppressed;
|
||||
readonly forceDefaultRemotePlaybackOutput = this.state.forceDefaultRemotePlaybackOutput;
|
||||
readonly hasConnectionError = this.state.hasConnectionError;
|
||||
readonly connectionErrorMessage = this.state.connectionErrorMessage;
|
||||
readonly shouldShowConnectionError = this.state.shouldShowConnectionError;
|
||||
readonly peerLatencies = this.state.peerLatencies;
|
||||
|
||||
private readonly signalingMessage$ = new Subject<IncomingSignalingMessage>();
|
||||
readonly onSignalingMessage = this.signalingMessage$.asObservable();
|
||||
|
||||
// Delegates to managers
|
||||
get onMessageReceived(): Observable<ChatEvent> {
|
||||
return this.peerMediaFacade.onMessageReceived;
|
||||
}
|
||||
get onPeerConnected(): Observable<string> {
|
||||
return this.peerMediaFacade.onPeerConnected;
|
||||
}
|
||||
get onPeerDisconnected(): Observable<string> {
|
||||
return this.peerMediaFacade.onPeerDisconnected;
|
||||
}
|
||||
get onRemoteStream(): Observable<{ peerId: string; stream: MediaStream }> {
|
||||
return this.peerMediaFacade.onRemoteStream;
|
||||
}
|
||||
get onVoiceConnected(): Observable<void> {
|
||||
return this.peerMediaFacade.onVoiceConnected;
|
||||
}
|
||||
|
||||
private readonly peerManager: PeerConnectionManager;
|
||||
private readonly mediaManager: MediaManager;
|
||||
private readonly screenShareManager: ScreenShareManager;
|
||||
private readonly peerMediaFacade: PeerMediaFacade;
|
||||
private readonly voiceSessionController: VoiceSessionController;
|
||||
private readonly signalingCoordinator: ServerSignalingCoordinator<IncomingSignalingMessage>;
|
||||
private readonly signalingTransportHandler: SignalingTransportHandler<IncomingSignalingMessage>;
|
||||
private readonly signalingMessageHandler: IncomingSignalingMessageHandler;
|
||||
private readonly serverMembershipSignalingHandler: ServerMembershipSignalingHandler<IncomingSignalingMessage>;
|
||||
private readonly remoteScreenShareRequestController: RemoteScreenShareRequestController;
|
||||
|
||||
constructor() {
|
||||
// Create managers with null callbacks first to break circular initialization
|
||||
this.peerManager = new PeerConnectionManager(this.logger, null!);
|
||||
|
||||
this.mediaManager = new MediaManager(this.logger, null!);
|
||||
|
||||
this.screenShareManager = new ScreenShareManager(this.logger, null!);
|
||||
|
||||
this.peerMediaFacade = new PeerMediaFacade({
|
||||
peerManager: this.peerManager,
|
||||
mediaManager: this.mediaManager,
|
||||
screenShareManager: this.screenShareManager
|
||||
});
|
||||
|
||||
this.voiceSessionController = new VoiceSessionController({
|
||||
mediaManager: this.mediaManager,
|
||||
getIsScreenSharing: () => this.state.isScreenSharingActive(),
|
||||
setVoiceConnected: (connected) => this.state.setVoiceConnected(connected),
|
||||
setMuted: (muted) => this.state.setMuted(muted),
|
||||
setDeafened: (deafened) => this.state.setDeafened(deafened),
|
||||
setNoiseReductionEnabled: (enabled) => this.state.setNoiseReductionEnabled(enabled)
|
||||
});
|
||||
|
||||
this.signalingCoordinator = new ServerSignalingCoordinator({
|
||||
createManager: (_signalUrl, getLastJoinedServer, getMemberServerIds) => new SignalingManager(
|
||||
this.logger,
|
||||
() => this.signalingTransportHandler.getIdentifyCredentials(),
|
||||
getLastJoinedServer,
|
||||
getMemberServerIds
|
||||
),
|
||||
handleConnectionStatus: (_signalUrl, connected, errorMessage) =>
|
||||
this.handleSignalingConnectionStatus(connected, errorMessage),
|
||||
handleHeartbeatTick: () => this.peerMediaFacade.broadcastCurrentStates(),
|
||||
handleMessage: (message, signalUrl) => this.handleSignalingMessage(message, signalUrl)
|
||||
});
|
||||
|
||||
this.signalingTransportHandler = new SignalingTransportHandler({
|
||||
signalingCoordinator: this.signalingCoordinator,
|
||||
logger: this.logger,
|
||||
getLocalPeerId: () => this.state.getLocalPeerId()
|
||||
});
|
||||
|
||||
// Now wire up cross-references (all managers are instantiated)
|
||||
this.peerManager.setCallbacks({
|
||||
sendRawMessage: (msg: Record<string, unknown>) => this.signalingTransportHandler.sendRawMessage(msg),
|
||||
getLocalMediaStream: (): MediaStream | null => this.peerMediaFacade.getLocalStream(),
|
||||
isSignalingConnected: (): boolean => this.state.isSignalingConnected(),
|
||||
getVoiceStateSnapshot: (): VoiceStateSnapshot => this.voiceSessionController.getCurrentVoiceState(),
|
||||
getIdentifyCredentials: () => this.signalingTransportHandler.getIdentifyCredentials(),
|
||||
getLocalPeerId: (): string => this.state.getLocalPeerId(),
|
||||
isScreenSharingActive: (): boolean => this.state.isScreenSharingActive()
|
||||
});
|
||||
|
||||
this.mediaManager.setCallbacks({
|
||||
getActivePeers: (): Map<string, PeerData> => this.peerMediaFacade.getActivePeers(),
|
||||
renegotiate: (peerId: string): Promise<void> => this.peerMediaFacade.renegotiate(peerId),
|
||||
broadcastMessage: (event: ChatEvent): void => this.peerMediaFacade.broadcastMessage(event),
|
||||
getIdentifyOderId: (): string => this.signalingTransportHandler.getIdentifyOderId(),
|
||||
getIdentifyDisplayName: (): string => this.signalingTransportHandler.getIdentifyDisplayName()
|
||||
});
|
||||
|
||||
this.screenShareManager.setCallbacks({
|
||||
getActivePeers: (): Map<string, PeerData> => this.peerMediaFacade.getActivePeers(),
|
||||
getLocalMediaStream: (): MediaStream | null => this.peerMediaFacade.getLocalStream(),
|
||||
renegotiate: (peerId: string): Promise<void> => this.peerMediaFacade.renegotiate(peerId),
|
||||
broadcastCurrentStates: (): void => this.peerMediaFacade.broadcastCurrentStates(),
|
||||
selectDesktopSource: async (sources, options) => await this.screenShareSourcePicker.open(
|
||||
sources,
|
||||
options.includeSystemAudio
|
||||
),
|
||||
updateLocalScreenShareState: (state): void => this.state.applyLocalScreenShareState(state)
|
||||
});
|
||||
|
||||
this.signalingMessageHandler = new IncomingSignalingMessageHandler({
|
||||
getEffectiveServerId: () => this.voiceSessionController.getEffectiveServerId(this.state.currentServerId),
|
||||
peerManager: this.peerManager,
|
||||
setServerTime: (serverTime) => this.timeSync.setFromServerTime(serverTime),
|
||||
signalingCoordinator: this.signalingCoordinator,
|
||||
logger: this.logger
|
||||
});
|
||||
|
||||
this.serverMembershipSignalingHandler = new ServerMembershipSignalingHandler({
|
||||
signalingCoordinator: this.signalingCoordinator,
|
||||
signalingTransport: this.signalingTransportHandler,
|
||||
logger: this.logger,
|
||||
getActiveServerId: () => this.state.currentServerId,
|
||||
isVoiceConnected: () => this.state.isVoiceConnectedActive(),
|
||||
runFullCleanup: () => this.fullCleanup()
|
||||
});
|
||||
|
||||
this.remoteScreenShareRequestController = new RemoteScreenShareRequestController({
|
||||
getConnectedPeerIds: () => this.peerMediaFacade.getConnectedPeerIds(),
|
||||
sendToPeer: (peerId, event) => this.peerMediaFacade.sendToPeer(peerId, event),
|
||||
clearRemoteScreenShareStream: (peerId) => this.peerMediaFacade.clearRemoteScreenShareStream(peerId),
|
||||
requestScreenShareForPeer: (peerId) => this.peerMediaFacade.requestScreenShareForPeer(peerId),
|
||||
stopScreenShareForPeer: (peerId) => this.peerMediaFacade.stopScreenShareForPeer(peerId),
|
||||
clearScreenShareRequest: (peerId) => this.peerMediaFacade.clearScreenShareRequest(peerId)
|
||||
});
|
||||
|
||||
this.wireManagerEvents();
|
||||
}
|
||||
|
||||
private wireManagerEvents(): void {
|
||||
// Internal control-plane messages for on-demand screen-share delivery.
|
||||
this.peerManager.messageReceived$.subscribe((event) =>
|
||||
this.remoteScreenShareRequestController.handlePeerControlMessage(event)
|
||||
);
|
||||
|
||||
// Peer manager → connected peers signal
|
||||
this.peerManager.connectedPeersChanged$.subscribe((peers: string[]) =>
|
||||
this.state.setConnectedPeers(peers)
|
||||
);
|
||||
|
||||
// If we are already sharing when a new peer connection finishes, push the
|
||||
// current screen-share tracks to that peer and renegotiate.
|
||||
this.peerManager.peerConnected$.subscribe((peerId) => {
|
||||
if (this.peerMediaFacade.isScreenShareActive()) {
|
||||
this.peerMediaFacade.syncScreenShareToPeer(peerId);
|
||||
}
|
||||
|
||||
this.remoteScreenShareRequestController.handlePeerConnected(peerId);
|
||||
});
|
||||
|
||||
this.peerManager.peerDisconnected$.subscribe((peerId) => {
|
||||
this.remoteScreenShareRequestController.handlePeerDisconnected(peerId);
|
||||
this.signalingCoordinator.deletePeerTracking(peerId);
|
||||
});
|
||||
|
||||
// Media manager → voice connected signal
|
||||
this.mediaManager.voiceConnected$.subscribe(() => {
|
||||
this.voiceSessionController.handleVoiceConnected();
|
||||
});
|
||||
|
||||
// Peer manager → latency updates
|
||||
this.peerManager.peerLatencyChanged$.subscribe(() =>
|
||||
this.state.syncPeerLatencies(this.peerManager.peerLatencies)
|
||||
);
|
||||
}
|
||||
|
||||
private handleSignalingConnectionStatus(connected: boolean, errorMessage?: string): void {
|
||||
this.state.updateSignalingConnectionStatus(
|
||||
this.signalingCoordinator.isAnySignalingConnected(),
|
||||
connected,
|
||||
errorMessage
|
||||
);
|
||||
}
|
||||
|
||||
private handleSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
this.signalingMessage$.next(message);
|
||||
this.signalingMessageHandler.handleMessage(message, signalUrl);
|
||||
}
|
||||
|
||||
// PUBLIC API - matches the old monolithic service's interface
|
||||
|
||||
/**
|
||||
* Connect to a signaling server via WebSocket.
|
||||
*
|
||||
* @param serverUrl - The WebSocket URL of the signaling server.
|
||||
* @returns An observable that emits `true` once connected.
|
||||
*/
|
||||
connectToSignalingServer(serverUrl: string): Observable<boolean> {
|
||||
return this.signalingTransportHandler.connectToSignalingServer(serverUrl);
|
||||
}
|
||||
|
||||
/** Returns true when the signaling socket for a given URL is currently open. */
|
||||
isSignalingConnectedTo(serverUrl: string): boolean {
|
||||
return this.signalingTransportHandler.isSignalingConnectedTo(serverUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure the signaling WebSocket is connected, reconnecting if needed.
|
||||
*
|
||||
* @param timeoutMs - Maximum time (ms) to wait for the connection.
|
||||
* @returns `true` if connected within the timeout.
|
||||
*/
|
||||
async ensureSignalingConnected(timeoutMs?: number): Promise<boolean> {
|
||||
return await this.signalingTransportHandler.ensureSignalingConnected(timeoutMs);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a signaling-level message (with `from` and `timestamp` auto-populated).
|
||||
*
|
||||
* @param message - The signaling message payload (excluding `from` / `timestamp`).
|
||||
*/
|
||||
sendSignalingMessage(message: Omit<SignalingMessage, 'from' | 'timestamp'>): void {
|
||||
this.signalingTransportHandler.sendSignalingMessage(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a raw JSON payload through the signaling WebSocket.
|
||||
*
|
||||
* @param message - Arbitrary JSON message.
|
||||
*/
|
||||
sendRawMessage(message: Record<string, unknown>): void {
|
||||
this.signalingTransportHandler.sendRawMessage(message);
|
||||
}
|
||||
|
||||
/**
|
||||
* Track the currently-active server ID (for server-scoped operations).
|
||||
*
|
||||
* @param serverId - The server to mark as active.
|
||||
*/
|
||||
setCurrentServer(serverId: string): void {
|
||||
this.state.setCurrentServer(serverId);
|
||||
}
|
||||
|
||||
/** The server ID currently being viewed / active, or `null`. */
|
||||
get currentServerId(): string | null {
|
||||
return this.state.currentServerId;
|
||||
}
|
||||
|
||||
/** The last signaling URL used by the client, if any. */
|
||||
getCurrentSignalingUrl(): string | null {
|
||||
return this.signalingTransportHandler.getCurrentSignalingUrl(this.state.currentServerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send an identify message to the signaling server.
|
||||
*
|
||||
* The credentials are cached so they can be replayed after a reconnect.
|
||||
*
|
||||
* @param oderId - The user's unique order/peer ID.
|
||||
* @param displayName - The user's display name.
|
||||
*/
|
||||
identify(oderId: string, displayName: string, signalUrl?: string): void {
|
||||
this.signalingTransportHandler.identify(oderId, displayName, signalUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Join a server (room) on the signaling server.
|
||||
*
|
||||
* @param roomId - The server / room ID to join.
|
||||
* @param userId - The local user ID.
|
||||
*/
|
||||
joinRoom(roomId: string, userId: string, signalUrl?: string): void {
|
||||
this.serverMembershipSignalingHandler.joinRoom(roomId, userId, signalUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Switch to a different server. If already a member, sends a view event;
|
||||
* otherwise joins the server.
|
||||
*
|
||||
* @param serverId - The target server ID.
|
||||
* @param userId - The local user ID.
|
||||
*/
|
||||
switchServer(serverId: string, userId: string, signalUrl?: string): void {
|
||||
this.serverMembershipSignalingHandler.switchServer(serverId, userId, signalUrl);
|
||||
}
|
||||
|
||||
/**
|
||||
* Leave one or all servers.
|
||||
*
|
||||
* If `serverId` is provided, leaves only that server.
|
||||
* Otherwise leaves every joined server and performs a full cleanup.
|
||||
*
|
||||
* @param serverId - Optional server to leave; omit to leave all.
|
||||
*/
|
||||
leaveRoom(serverId?: string): void {
|
||||
this.serverMembershipSignalingHandler.leaveRoom(serverId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether the local client has joined a given server.
|
||||
*
|
||||
* @param serverId - The server to check.
|
||||
*/
|
||||
hasJoinedServer(serverId: string): boolean {
|
||||
return this.signalingCoordinator.hasJoinedServer(serverId);
|
||||
}
|
||||
|
||||
/** Returns a read-only set of all currently-joined server IDs. */
|
||||
getJoinedServerIds(): ReadonlySet<string> {
|
||||
return this.signalingCoordinator.getJoinedServerIds();
|
||||
}
|
||||
|
||||
/**
|
||||
* Broadcast a {@link ChatEvent} to every connected peer.
|
||||
*
|
||||
* @param event - The chat event to send.
|
||||
*/
|
||||
broadcastMessage(event: ChatEvent): void {
|
||||
this.peerMediaFacade.broadcastMessage(event);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a {@link ChatEvent} to a specific peer.
|
||||
*
|
||||
* @param peerId - The target peer ID.
|
||||
* @param event - The chat event to send.
|
||||
*/
|
||||
sendToPeer(peerId: string, event: ChatEvent): void {
|
||||
this.peerMediaFacade.sendToPeer(peerId, event);
|
||||
}
|
||||
|
||||
syncRemoteScreenShareRequests(peerIds: string[], enabled: boolean): void {
|
||||
this.remoteScreenShareRequestController.syncRemoteScreenShareRequests(peerIds, enabled);
|
||||
}
|
||||
|
||||
/**
|
||||
* Send a {@link ChatEvent} to a peer with back-pressure awareness.
|
||||
*
|
||||
* @param peerId - The target peer ID.
|
||||
* @param event - The chat event to send.
|
||||
*/
|
||||
async sendToPeerBuffered(peerId: string, event: ChatEvent): Promise<void> {
|
||||
return await this.peerMediaFacade.sendToPeerBuffered(peerId, event);
|
||||
}
|
||||
|
||||
/** Returns an array of currently-connected peer IDs. */
|
||||
getConnectedPeers(): string[] {
|
||||
return this.peerMediaFacade.getConnectedPeerIds();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the composite remote {@link MediaStream} for a connected peer.
|
||||
*
|
||||
* @param peerId - The remote peer whose stream to retrieve.
|
||||
* @returns The stream, or `null` if the peer has no active stream.
|
||||
*/
|
||||
getRemoteStream(peerId: string): MediaStream | null {
|
||||
return this.peerMediaFacade.getRemoteStream(peerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote voice-only stream for a connected peer.
|
||||
*
|
||||
* @param peerId - The remote peer whose voice stream to retrieve.
|
||||
* @returns The stream, or `null` if the peer has no active voice audio.
|
||||
*/
|
||||
getRemoteVoiceStream(peerId: string): MediaStream | null {
|
||||
return this.peerMediaFacade.getRemoteVoiceStream(peerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote screen-share stream for a connected peer.
|
||||
*
|
||||
* This contains the screen video track and any audio track that belongs to
|
||||
* the screen share itself, not the peer's normal voice-chat audio.
|
||||
*
|
||||
* @param peerId - The remote peer whose screen-share stream to retrieve.
|
||||
* @returns The stream, or `null` if the peer has no active screen share.
|
||||
*/
|
||||
getRemoteScreenShareStream(peerId: string): MediaStream | null {
|
||||
return this.peerMediaFacade.getRemoteScreenShareStream(peerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current local media stream (microphone audio).
|
||||
*
|
||||
* @returns The local {@link MediaStream}, or `null` if voice is not active.
|
||||
*/
|
||||
getLocalStream(): MediaStream | null {
|
||||
return this.peerMediaFacade.getLocalStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw local microphone stream before gain / RNNoise processing.
|
||||
*
|
||||
* @returns The raw microphone {@link MediaStream}, or `null` if voice is not active.
|
||||
*/
|
||||
getRawMicStream(): MediaStream | null {
|
||||
return this.peerMediaFacade.getRawMicStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Request microphone access and start sending audio to all peers.
|
||||
*
|
||||
* @returns The captured local {@link MediaStream}.
|
||||
*/
|
||||
async enableVoice(): Promise<MediaStream> {
|
||||
return await this.voiceSessionController.enableVoice();
|
||||
}
|
||||
|
||||
/** Stop local voice capture and remove audio senders from peers. */
|
||||
disableVoice(): void {
|
||||
this.voiceSessionController.disableVoice();
|
||||
}
|
||||
|
||||
/**
|
||||
* Inject an externally-obtained media stream as the local voice source.
|
||||
*
|
||||
* @param stream - The media stream to use.
|
||||
*/
|
||||
async setLocalStream(stream: MediaStream): Promise<void> {
|
||||
await this.voiceSessionController.setLocalStream(stream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle the local microphone mute state.
|
||||
*
|
||||
* @param muted - Explicit state; if omitted, the current state is toggled.
|
||||
*/
|
||||
toggleMute(muted?: boolean): void {
|
||||
this.voiceSessionController.toggleMute(muted);
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle self-deafen (suppress incoming audio playback).
|
||||
*
|
||||
* @param deafened - Explicit state; if omitted, the current state is toggled.
|
||||
*/
|
||||
toggleDeafen(deafened?: boolean): void {
|
||||
this.voiceSessionController.toggleDeafen(deafened);
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle RNNoise noise reduction on the local microphone.
|
||||
*
|
||||
* When enabled, the raw mic audio is routed through an AudioWorklet
|
||||
* that applies neural-network noise suppression before being sent
|
||||
* to peers.
|
||||
*
|
||||
* @param enabled - Explicit state; if omitted, the current state is toggled.
|
||||
*/
|
||||
async toggleNoiseReduction(enabled?: boolean): Promise<void> {
|
||||
await this.voiceSessionController.toggleNoiseReduction(enabled);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the output volume for remote audio playback.
|
||||
*
|
||||
* @param volume - Normalised volume (0-1).
|
||||
*/
|
||||
setOutputVolume(volume: number): void {
|
||||
this.voiceSessionController.setOutputVolume(volume);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the input (microphone) volume.
|
||||
*
|
||||
* Adjusts a Web Audio GainNode on the local mic stream so the level
|
||||
* sent to peers changes in real time without renegotiation.
|
||||
*
|
||||
* @param volume - Normalised volume (0-1).
|
||||
*/
|
||||
setInputVolume(volume: number): void {
|
||||
this.voiceSessionController.setInputVolume(volume);
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum audio bitrate for all peer connections.
|
||||
*
|
||||
* @param kbps - Target bitrate in kilobits per second.
|
||||
*/
|
||||
async setAudioBitrate(kbps: number): Promise<void> {
|
||||
return await this.voiceSessionController.setAudioBitrate(kbps);
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply a predefined latency profile that maps to a specific bitrate.
|
||||
*
|
||||
* @param profile - One of `'low'`, `'balanced'`, or `'high'`.
|
||||
*/
|
||||
async setLatencyProfile(profile: LatencyProfile): Promise<void> {
|
||||
return await this.voiceSessionController.setLatencyProfile(profile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start broadcasting voice-presence heartbeats to all peers.
|
||||
*
|
||||
* Also marks the given server as the active voice server and closes
|
||||
* any peer connections that belong to other servers so that audio
|
||||
* is isolated to the correct voice channel.
|
||||
*
|
||||
* @param roomId - The voice channel room ID.
|
||||
* @param serverId - The voice channel server ID.
|
||||
*/
|
||||
startVoiceHeartbeat(roomId?: string, serverId?: string): void {
|
||||
this.voiceSessionController.startVoiceHeartbeat(roomId, serverId);
|
||||
}
|
||||
|
||||
/** Stop the voice-presence heartbeat. */
|
||||
stopVoiceHeartbeat(): void {
|
||||
this.voiceSessionController.stopVoiceHeartbeat();
|
||||
}
|
||||
|
||||
/**
|
||||
* Start sharing the screen (or a window) with all connected peers.
|
||||
*
|
||||
* @param options - Screen-share capture options.
|
||||
* @returns The screen-capture {@link MediaStream}.
|
||||
*/
|
||||
async startScreenShare(options: ScreenShareStartOptions): Promise<MediaStream> {
|
||||
return await this.peerMediaFacade.startScreenShare(options);
|
||||
}
|
||||
|
||||
/** Stop screen sharing and restore microphone audio on all peers. */
|
||||
stopScreenShare(): void {
|
||||
this.peerMediaFacade.stopScreenShare();
|
||||
}
|
||||
|
||||
/** Disconnect from the signaling server and clean up all state. */
|
||||
disconnect(): void {
|
||||
this.leaveRoom();
|
||||
this.destroyAllSignalingManagers();
|
||||
this.state.resetConnectionState();
|
||||
}
|
||||
|
||||
/** Alias for {@link disconnect}. */
|
||||
disconnectAll(): void {
|
||||
this.disconnect();
|
||||
}
|
||||
|
||||
private fullCleanup(): void {
|
||||
this.signalingCoordinator.clearPeerTracking();
|
||||
this.remoteScreenShareRequestController.clear();
|
||||
this.peerMediaFacade.closeAllPeers();
|
||||
this.state.clearPeerViewState();
|
||||
this.voiceSessionController.resetVoiceSession();
|
||||
this.peerMediaFacade.stopScreenShare();
|
||||
this.state.clearScreenShareState();
|
||||
}
|
||||
|
||||
private destroyAllSignalingManagers(): void {
|
||||
this.signalingCoordinator.destroy();
|
||||
}
|
||||
|
||||
ngOnDestroy(): void {
|
||||
this.disconnect();
|
||||
this.peerMediaFacade.destroy();
|
||||
}
|
||||
}
|
||||
105
toju-app/src/app/infrastructure/realtime/realtime.constants.ts
Normal file
105
toju-app/src/app/infrastructure/realtime/realtime.constants.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import type { LatencyProfile } from '../../shared-kernel';
|
||||
|
||||
/**
|
||||
* All magic numbers and strings used across the WebRTC subsystem.
|
||||
* Centralised here so nothing is hard-coded inline.
|
||||
*/
|
||||
|
||||
export const ICE_SERVERS: RTCIceServer[] = [
|
||||
{ urls: 'stun:stun.l.google.com:19302' },
|
||||
{ urls: 'stun:stun1.l.google.com:19302' },
|
||||
{ urls: 'stun:stun2.l.google.com:19302' },
|
||||
{ urls: 'stun:stun3.l.google.com:19302' },
|
||||
{ urls: 'stun:stun4.l.google.com:19302' }
|
||||
];
|
||||
|
||||
/** Base delay (ms) for exponential backoff on signaling reconnect */
|
||||
export const SIGNALING_RECONNECT_BASE_DELAY_MS = 1_000;
|
||||
/** Maximum delay (ms) between signaling reconnect attempts */
|
||||
export const SIGNALING_RECONNECT_MAX_DELAY_MS = 30_000;
|
||||
/** Default timeout (ms) for `ensureSignalingConnected` */
|
||||
export const SIGNALING_CONNECT_TIMEOUT_MS = 5_000;
|
||||
|
||||
/** Maximum P2P reconnect attempts before giving up */
|
||||
export const PEER_RECONNECT_MAX_ATTEMPTS = 12;
|
||||
/** Interval (ms) between P2P reconnect attempts */
|
||||
export const PEER_RECONNECT_INTERVAL_MS = 5_000;
|
||||
/** How long to wait before treating a transient disconnect as fatal */
|
||||
export const PEER_DISCONNECT_GRACE_MS = 10_000;
|
||||
|
||||
/** Interval (ms) for broadcasting state heartbeats */
|
||||
export const STATE_HEARTBEAT_INTERVAL_MS = 5_000;
|
||||
/** Interval (ms) for broadcasting voice presence */
|
||||
export const VOICE_HEARTBEAT_INTERVAL_MS = 5_000;
|
||||
|
||||
/** Data channel name used for P2P chat */
|
||||
export const DATA_CHANNEL_LABEL = 'chat';
|
||||
/** High-water mark (bytes) - pause sending when buffered amount exceeds this */
|
||||
export const DATA_CHANNEL_HIGH_WATER_BYTES = 4 * 1024 * 1024; // 4 MB
|
||||
/** Low-water mark (bytes) - resume sending once buffered amount drops below this */
|
||||
export const DATA_CHANNEL_LOW_WATER_BYTES = 1 * 1024 * 1024; // 1 MB
|
||||
|
||||
export const SCREEN_SHARE_IDEAL_WIDTH = 1920;
|
||||
export const SCREEN_SHARE_IDEAL_HEIGHT = 1080;
|
||||
export const SCREEN_SHARE_IDEAL_FRAME_RATE = 30;
|
||||
/** Electron source name to prefer for whole-screen capture */
|
||||
export { ELECTRON_ENTIRE_SCREEN_SOURCE_NAME } from '../../shared-kernel';
|
||||
|
||||
/** Minimum audio bitrate (bps) */
|
||||
export const AUDIO_BITRATE_MIN_BPS = 16_000;
|
||||
/** Maximum audio bitrate (bps) */
|
||||
export const AUDIO_BITRATE_MAX_BPS = 256_000;
|
||||
/** Multiplier to convert kbps → bps */
|
||||
export const KBPS_TO_BPS = 1_000;
|
||||
/** Pre-defined latency-to-bitrate mappings (bps) */
|
||||
export const LATENCY_PROFILE_BITRATES: Record<LatencyProfile, number> = {
|
||||
low: 64_000,
|
||||
balanced: 96_000,
|
||||
high: 128_000
|
||||
};
|
||||
|
||||
export type { LatencyProfile } from '../../shared-kernel';
|
||||
|
||||
export const TRANSCEIVER_SEND_RECV: RTCRtpTransceiverDirection = 'sendrecv';
|
||||
export const TRANSCEIVER_RECV_ONLY: RTCRtpTransceiverDirection = 'recvonly';
|
||||
export const TRANSCEIVER_INACTIVE: RTCRtpTransceiverDirection = 'inactive';
|
||||
|
||||
export const CONNECTION_STATE_CONNECTED = 'connected';
|
||||
export const CONNECTION_STATE_DISCONNECTED = 'disconnected';
|
||||
export const CONNECTION_STATE_FAILED = 'failed';
|
||||
export const CONNECTION_STATE_CLOSED = 'closed';
|
||||
export const DATA_CHANNEL_STATE_OPEN = 'open';
|
||||
|
||||
export const TRACK_KIND_AUDIO = 'audio';
|
||||
export const TRACK_KIND_VIDEO = 'video';
|
||||
|
||||
export const SIGNALING_TYPE_IDENTIFY = 'identify';
|
||||
export const SIGNALING_TYPE_JOIN_SERVER = 'join_server';
|
||||
export const SIGNALING_TYPE_VIEW_SERVER = 'view_server';
|
||||
export const SIGNALING_TYPE_LEAVE_SERVER = 'leave_server';
|
||||
export const SIGNALING_TYPE_OFFER = 'offer';
|
||||
export const SIGNALING_TYPE_ANSWER = 'answer';
|
||||
export const SIGNALING_TYPE_ICE_CANDIDATE = 'ice_candidate';
|
||||
export const SIGNALING_TYPE_CONNECTED = 'connected';
|
||||
export const SIGNALING_TYPE_SERVER_USERS = 'server_users';
|
||||
export const SIGNALING_TYPE_USER_JOINED = 'user_joined';
|
||||
export const SIGNALING_TYPE_USER_LEFT = 'user_left';
|
||||
|
||||
export const P2P_TYPE_STATE_REQUEST = 'state-request';
|
||||
export const P2P_TYPE_VOICE_STATE_REQUEST = 'voice-state-request';
|
||||
export const P2P_TYPE_VOICE_STATE = 'voice-state';
|
||||
export const P2P_TYPE_SCREEN_STATE = 'screen-state';
|
||||
export const P2P_TYPE_SCREEN_SHARE_REQUEST = 'screen-share-request';
|
||||
export const P2P_TYPE_SCREEN_SHARE_STOP = 'screen-share-stop';
|
||||
export const P2P_TYPE_PING = 'ping';
|
||||
export const P2P_TYPE_PONG = 'pong';
|
||||
|
||||
/** Interval (ms) between peer latency pings */
|
||||
export const PEER_PING_INTERVAL_MS = 5_000;
|
||||
|
||||
/** Default display name fallback */
|
||||
export const DEFAULT_DISPLAY_NAME = 'User';
|
||||
/** Minimum volume (normalised 0-1) */
|
||||
export const VOLUME_MIN = 0;
|
||||
/** Maximum volume (normalised 0-1) */
|
||||
export const VOLUME_MAX = 1;
|
||||
67
toju-app/src/app/infrastructure/realtime/realtime.types.ts
Normal file
67
toju-app/src/app/infrastructure/realtime/realtime.types.ts
Normal file
@@ -0,0 +1,67 @@
|
||||
/**
|
||||
* Shared type definitions for the WebRTC subsystem.
|
||||
*/
|
||||
|
||||
/** Tracks a single peer's connection, data channel, and RTP senders. */
|
||||
export interface PeerData {
|
||||
/** The underlying RTCPeerConnection instance. */
|
||||
connection: RTCPeerConnection;
|
||||
/** The negotiated data channel, or `null` before the channel is established. */
|
||||
dataChannel: RTCDataChannel | null;
|
||||
/** `true` when this side created the offer (and data channel). */
|
||||
isInitiator: boolean;
|
||||
/** ICE candidates received before the remote description was set. */
|
||||
pendingIceCandidates: RTCIceCandidateInit[];
|
||||
/** The RTP sender carrying the local audio track. */
|
||||
audioSender?: RTCRtpSender;
|
||||
/** The RTP sender carrying the local video (camera) track. */
|
||||
videoSender?: RTCRtpSender;
|
||||
/** The RTP sender carrying the screen-share video track. */
|
||||
screenVideoSender?: RTCRtpSender;
|
||||
/** The RTP sender carrying the screen-share audio track. */
|
||||
screenAudioSender?: RTCRtpSender;
|
||||
/** Known remote stream ids that carry the peer's voice audio. */
|
||||
remoteVoiceStreamIds: Set<string>;
|
||||
/** Known remote stream ids that carry the peer's screen-share audio/video. */
|
||||
remoteScreenShareStreamIds: Set<string>;
|
||||
}
|
||||
|
||||
/** Credentials cached for automatic re-identification after reconnect. */
|
||||
export interface IdentifyCredentials {
|
||||
/** The user's unique order / peer identifier. */
|
||||
oderId: string;
|
||||
/** The user's display name shown to other peers. */
|
||||
displayName: string;
|
||||
}
|
||||
|
||||
/** Last-joined server info, used for reconnection. */
|
||||
export interface JoinedServerInfo {
|
||||
/** The server (room) that was last joined. */
|
||||
serverId: string;
|
||||
/** The local user ID at the time of joining. */
|
||||
userId: string;
|
||||
}
|
||||
|
||||
/** Entry in the disconnected-peer tracker for P2P reconnect scheduling. */
|
||||
export interface DisconnectedPeerEntry {
|
||||
/** Timestamp (ms since epoch) when the peer was last seen connected. */
|
||||
lastSeenTimestamp: number;
|
||||
/** Number of reconnect attempts made so far. */
|
||||
reconnectAttempts: number;
|
||||
}
|
||||
|
||||
/** Snapshot of current voice / screen state (broadcast to peers). */
|
||||
export interface VoiceStateSnapshot {
|
||||
/** Whether the user's voice is currently active. */
|
||||
isConnected: boolean;
|
||||
/** Whether the user's microphone is muted. */
|
||||
isMuted: boolean;
|
||||
/** Whether the user has self-deafened. */
|
||||
isDeafened: boolean;
|
||||
/** Whether the user is sharing their screen. */
|
||||
isScreenSharing: boolean;
|
||||
/** The voice channel room ID, if applicable. */
|
||||
roomId?: string;
|
||||
/** The voice channel server ID, if applicable. */
|
||||
serverId?: string;
|
||||
}
|
||||
@@ -0,0 +1,10 @@
|
||||
export {
|
||||
DEFAULT_SCREEN_SHARE_QUALITY,
|
||||
DEFAULT_SCREEN_SHARE_START_OPTIONS,
|
||||
ELECTRON_ENTIRE_SCREEN_SOURCE_NAME,
|
||||
SCREEN_SHARE_QUALITY_OPTIONS,
|
||||
SCREEN_SHARE_QUALITY_PRESETS,
|
||||
type ScreenShareQuality,
|
||||
type ScreenShareQualityPreset,
|
||||
type ScreenShareStartOptions
|
||||
} from '../../shared-kernel';
|
||||
@@ -0,0 +1,146 @@
|
||||
import {
|
||||
SIGNALING_TYPE_JOIN_SERVER,
|
||||
SIGNALING_TYPE_LEAVE_SERVER,
|
||||
SIGNALING_TYPE_VIEW_SERVER
|
||||
} from '../realtime.constants';
|
||||
import { ServerSignalingCoordinator } from './server-signaling-coordinator';
|
||||
import { SignalingTransportHandler } from './signaling-transport-handler';
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
|
||||
interface ServerMembershipSignalingHandlerDependencies<TMessage> {
|
||||
signalingCoordinator: ServerSignalingCoordinator<TMessage>;
|
||||
signalingTransport: SignalingTransportHandler<TMessage>;
|
||||
logger: WebRTCLogger;
|
||||
getActiveServerId(): string | null;
|
||||
isVoiceConnected(): boolean;
|
||||
runFullCleanup(): void;
|
||||
}
|
||||
|
||||
export class ServerMembershipSignalingHandler<TMessage> {
|
||||
constructor(
|
||||
private readonly dependencies: ServerMembershipSignalingHandlerDependencies<TMessage>
|
||||
) {}
|
||||
|
||||
getCurrentSignalingUrl(): string | null {
|
||||
return this.dependencies.signalingTransport.getCurrentSignalingUrl(this.dependencies.getActiveServerId());
|
||||
}
|
||||
|
||||
joinRoom(roomId: string, userId: string, signalUrl?: string): void {
|
||||
const resolvedSignalUrl = this.resolveSignalUrl(roomId, signalUrl);
|
||||
|
||||
if (!resolvedSignalUrl) {
|
||||
this.dependencies.logger.warn('[signaling] Cannot join room without a signaling URL', { roomId });
|
||||
return;
|
||||
}
|
||||
|
||||
this.dependencies.signalingCoordinator.setServerSignalUrl(roomId, resolvedSignalUrl);
|
||||
this.dependencies.signalingCoordinator.setLastJoinedServer(resolvedSignalUrl, {
|
||||
serverId: roomId,
|
||||
userId
|
||||
});
|
||||
|
||||
this.dependencies.signalingCoordinator.addJoinedServer(resolvedSignalUrl, roomId);
|
||||
|
||||
this.dependencies.signalingTransport.sendRawMessageToSignalUrl(resolvedSignalUrl, {
|
||||
type: SIGNALING_TYPE_JOIN_SERVER,
|
||||
serverId: roomId
|
||||
});
|
||||
}
|
||||
|
||||
switchServer(serverId: string, userId: string, signalUrl?: string): void {
|
||||
const resolvedSignalUrl = this.resolveSignalUrl(serverId, signalUrl);
|
||||
|
||||
if (!resolvedSignalUrl) {
|
||||
this.dependencies.logger.warn('[signaling] Cannot switch server without a signaling URL', { serverId });
|
||||
return;
|
||||
}
|
||||
|
||||
this.dependencies.signalingCoordinator.setServerSignalUrl(serverId, resolvedSignalUrl);
|
||||
this.dependencies.signalingCoordinator.setLastJoinedServer(resolvedSignalUrl, {
|
||||
serverId,
|
||||
userId
|
||||
});
|
||||
|
||||
const memberServerIds = this.dependencies.signalingCoordinator.getMemberServerIdsForSignalUrl(resolvedSignalUrl);
|
||||
|
||||
if (memberServerIds.has(serverId)) {
|
||||
this.dependencies.signalingTransport.sendRawMessageToSignalUrl(resolvedSignalUrl, {
|
||||
type: SIGNALING_TYPE_VIEW_SERVER,
|
||||
serverId
|
||||
});
|
||||
|
||||
this.dependencies.logger.info('Viewed server (already joined)', {
|
||||
serverId,
|
||||
signalUrl: resolvedSignalUrl,
|
||||
userId,
|
||||
voiceConnected: this.dependencies.isVoiceConnected()
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.dependencies.signalingCoordinator.addJoinedServer(resolvedSignalUrl, serverId);
|
||||
this.dependencies.signalingTransport.sendRawMessageToSignalUrl(resolvedSignalUrl, {
|
||||
type: SIGNALING_TYPE_JOIN_SERVER,
|
||||
serverId
|
||||
});
|
||||
|
||||
this.dependencies.logger.info('Joined new server via switch', {
|
||||
serverId,
|
||||
signalUrl: resolvedSignalUrl,
|
||||
userId,
|
||||
voiceConnected: this.dependencies.isVoiceConnected()
|
||||
});
|
||||
}
|
||||
|
||||
leaveRoom(serverId?: string): void {
|
||||
if (serverId) {
|
||||
this.leaveSingleRoom(serverId);
|
||||
return;
|
||||
}
|
||||
|
||||
for (const { signalUrl, serverIds } of this.dependencies.signalingCoordinator.getJoinedServerEntries()) {
|
||||
for (const joinedServerId of serverIds) {
|
||||
this.dependencies.signalingTransport.sendRawMessageToSignalUrl(signalUrl, {
|
||||
type: SIGNALING_TYPE_LEAVE_SERVER,
|
||||
serverId: joinedServerId
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
this.dependencies.signalingCoordinator.clearJoinedServers();
|
||||
this.dependencies.runFullCleanup();
|
||||
}
|
||||
|
||||
private leaveSingleRoom(serverId: string): void {
|
||||
const resolvedSignalUrl = this.dependencies.signalingCoordinator.getServerSignalUrl(serverId);
|
||||
|
||||
if (resolvedSignalUrl) {
|
||||
this.dependencies.signalingCoordinator.removeJoinedServer(resolvedSignalUrl, serverId);
|
||||
this.dependencies.signalingTransport.sendRawMessageToSignalUrl(resolvedSignalUrl, {
|
||||
type: SIGNALING_TYPE_LEAVE_SERVER,
|
||||
serverId
|
||||
});
|
||||
} else {
|
||||
this.dependencies.signalingTransport.sendRawMessage({
|
||||
type: SIGNALING_TYPE_LEAVE_SERVER,
|
||||
serverId
|
||||
});
|
||||
|
||||
this.dependencies.signalingCoordinator.removeJoinedServerEverywhere(serverId);
|
||||
}
|
||||
|
||||
this.dependencies.signalingCoordinator.deleteServerSignalUrl(serverId);
|
||||
this.dependencies.logger.info('Left server', { serverId });
|
||||
|
||||
if (this.dependencies.signalingCoordinator.getJoinedServerCount() === 0) {
|
||||
this.dependencies.runFullCleanup();
|
||||
}
|
||||
}
|
||||
|
||||
private resolveSignalUrl(serverId: string, signalUrl?: string): string | null {
|
||||
return signalUrl
|
||||
?? this.dependencies.signalingCoordinator.getServerSignalUrl(serverId)
|
||||
?? this.getCurrentSignalingUrl();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,295 @@
|
||||
import { Subscription } from 'rxjs';
|
||||
import { JoinedServerInfo } from '../realtime.types';
|
||||
import { SignalingManager } from './signaling.manager';
|
||||
|
||||
export interface ConnectedSignalingManager {
|
||||
signalUrl: string;
|
||||
manager: SignalingManager;
|
||||
}
|
||||
|
||||
export interface ServerSignalingCoordinatorCallbacks<TMessage> {
|
||||
createManager(
|
||||
signalUrl: string,
|
||||
getLastJoinedServer: () => JoinedServerInfo | null,
|
||||
getMemberServerIds: () => ReadonlySet<string>
|
||||
): SignalingManager;
|
||||
handleConnectionStatus(signalUrl: string, connected: boolean, errorMessage?: string): void;
|
||||
handleHeartbeatTick(): void;
|
||||
handleMessage(message: TMessage, signalUrl: string): void;
|
||||
}
|
||||
|
||||
export class ServerSignalingCoordinator<TMessage> {
|
||||
private readonly lastJoinedServerBySignalUrl = new Map<string, JoinedServerInfo>();
|
||||
private readonly memberServerIdsBySignalUrl = new Map<string, Set<string>>();
|
||||
private readonly serverSignalingUrlMap = new Map<string, string>();
|
||||
private readonly peerSignalingUrlMap = new Map<string, string>();
|
||||
private readonly signalingManagers = new Map<string, SignalingManager>();
|
||||
private readonly signalingSubscriptions = new Map<string, Subscription[]>();
|
||||
private readonly peerServerMap = new Map<string, Set<string>>();
|
||||
|
||||
constructor(
|
||||
private readonly callbacks: ServerSignalingCoordinatorCallbacks<TMessage>
|
||||
) {}
|
||||
|
||||
ensureSignalingManager(signalUrl: string): SignalingManager {
|
||||
const existingManager = this.signalingManagers.get(signalUrl);
|
||||
|
||||
if (existingManager) {
|
||||
return existingManager;
|
||||
}
|
||||
|
||||
const manager = this.callbacks.createManager(
|
||||
signalUrl,
|
||||
() => this.lastJoinedServerBySignalUrl.get(signalUrl) ?? null,
|
||||
() => this.getMemberServerIdsForSignalUrl(signalUrl)
|
||||
);
|
||||
const subscriptions: Subscription[] = [
|
||||
manager.connectionStatus$.subscribe(({ connected, errorMessage }) =>
|
||||
this.callbacks.handleConnectionStatus(signalUrl, connected, errorMessage)
|
||||
),
|
||||
manager.messageReceived$.subscribe((message) =>
|
||||
this.callbacks.handleMessage(message as TMessage, signalUrl)
|
||||
),
|
||||
manager.heartbeatTick$.subscribe(() => this.callbacks.handleHeartbeatTick())
|
||||
];
|
||||
|
||||
this.signalingManagers.set(signalUrl, manager);
|
||||
this.signalingSubscriptions.set(signalUrl, subscriptions);
|
||||
return manager;
|
||||
}
|
||||
|
||||
getSignalingManager(signalUrl: string): SignalingManager | undefined {
|
||||
return this.signalingManagers.get(signalUrl);
|
||||
}
|
||||
|
||||
isSignalingConnectedTo(signalUrl: string): boolean {
|
||||
return this.signalingManagers.get(signalUrl)?.isSocketOpen() ?? false;
|
||||
}
|
||||
|
||||
isAnySignalingConnected(): boolean {
|
||||
for (const manager of this.signalingManagers.values()) {
|
||||
if (manager.isSocketOpen()) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
getConnectedSignalingManagers(): ConnectedSignalingManager[] {
|
||||
const connectedManagers: ConnectedSignalingManager[] = [];
|
||||
|
||||
for (const [signalUrl, manager] of this.signalingManagers.entries()) {
|
||||
if (!manager.isSocketOpen()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
connectedManagers.push({ signalUrl,
|
||||
manager });
|
||||
}
|
||||
|
||||
return connectedManagers;
|
||||
}
|
||||
|
||||
async ensureAnySignalingConnected(timeoutMs?: number): Promise<boolean> {
|
||||
if (this.isAnySignalingConnected()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (const manager of this.signalingManagers.values()) {
|
||||
if (await manager.ensureConnected(timeoutMs)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
setLastJoinedServer(signalUrl: string, joinedServer: JoinedServerInfo): void {
|
||||
this.lastJoinedServerBySignalUrl.set(signalUrl, joinedServer);
|
||||
}
|
||||
|
||||
clearLastJoinedServers(): void {
|
||||
this.lastJoinedServerBySignalUrl.clear();
|
||||
}
|
||||
|
||||
setServerSignalUrl(serverId: string, signalUrl: string): void {
|
||||
this.serverSignalingUrlMap.set(serverId, signalUrl);
|
||||
}
|
||||
|
||||
getServerSignalUrl(serverId: string): string | undefined {
|
||||
return this.serverSignalingUrlMap.get(serverId);
|
||||
}
|
||||
|
||||
deleteServerSignalUrl(serverId: string): void {
|
||||
this.serverSignalingUrlMap.delete(serverId);
|
||||
}
|
||||
|
||||
setPeerSignalUrl(peerId: string, signalUrl: string): void {
|
||||
this.peerSignalingUrlMap.set(peerId, signalUrl);
|
||||
}
|
||||
|
||||
getPeerSignalUrl(peerId: string): string | undefined {
|
||||
return this.peerSignalingUrlMap.get(peerId);
|
||||
}
|
||||
|
||||
deletePeerSignalUrl(peerId: string): void {
|
||||
this.peerSignalingUrlMap.delete(peerId);
|
||||
}
|
||||
|
||||
addJoinedServer(signalUrl: string, serverId: string): void {
|
||||
this.getOrCreateMemberServerSet(signalUrl).add(serverId);
|
||||
}
|
||||
|
||||
removeJoinedServer(signalUrl: string, serverId: string): void {
|
||||
this.getOrCreateMemberServerSet(signalUrl).delete(serverId);
|
||||
}
|
||||
|
||||
removeJoinedServerEverywhere(serverId: string): void {
|
||||
for (const memberServerIds of this.memberServerIdsBySignalUrl.values()) {
|
||||
memberServerIds.delete(serverId);
|
||||
}
|
||||
}
|
||||
|
||||
getMemberServerIdsForSignalUrl(signalUrl: string): ReadonlySet<string> {
|
||||
return this.memberServerIdsBySignalUrl.get(signalUrl) ?? new Set<string>();
|
||||
}
|
||||
|
||||
getJoinedServerEntries(): { signalUrl: string; serverIds: ReadonlySet<string> }[] {
|
||||
return Array.from(this.memberServerIdsBySignalUrl.entries()).map(([signalUrl, serverIds]) => ({
|
||||
signalUrl,
|
||||
serverIds
|
||||
}));
|
||||
}
|
||||
|
||||
clearJoinedServers(): void {
|
||||
this.memberServerIdsBySignalUrl.clear();
|
||||
this.serverSignalingUrlMap.clear();
|
||||
}
|
||||
|
||||
hasJoinedServer(serverId: string): boolean {
|
||||
for (const memberServerIds of this.memberServerIdsBySignalUrl.values()) {
|
||||
if (memberServerIds.has(serverId)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
getJoinedServerCount(): number {
|
||||
let joinedServerCount = 0;
|
||||
|
||||
for (const memberServerIds of this.memberServerIdsBySignalUrl.values()) {
|
||||
joinedServerCount += memberServerIds.size;
|
||||
}
|
||||
|
||||
return joinedServerCount;
|
||||
}
|
||||
|
||||
getJoinedServerIds(): ReadonlySet<string> {
|
||||
const joinedServerIds = new Set<string>();
|
||||
|
||||
for (const memberServerIds of this.memberServerIdsBySignalUrl.values()) {
|
||||
memberServerIds.forEach((serverId) => joinedServerIds.add(serverId));
|
||||
}
|
||||
|
||||
return joinedServerIds;
|
||||
}
|
||||
|
||||
trackPeerInServer(peerId: string, serverId: string): void {
|
||||
if (!peerId || !serverId)
|
||||
return;
|
||||
|
||||
const trackedServers = this.peerServerMap.get(peerId) ?? new Set<string>();
|
||||
|
||||
trackedServers.add(serverId);
|
||||
this.peerServerMap.set(peerId, trackedServers);
|
||||
}
|
||||
|
||||
hasTrackedPeerServers(peerId: string): boolean {
|
||||
return this.peerServerMap.has(peerId);
|
||||
}
|
||||
|
||||
replacePeerSharedServers(peerId: string, serverIds: string[]): boolean {
|
||||
const sharedServerIds = serverIds.filter((serverId) => this.hasJoinedServer(serverId));
|
||||
|
||||
if (sharedServerIds.length === 0) {
|
||||
this.peerServerMap.delete(peerId);
|
||||
return false;
|
||||
}
|
||||
|
||||
this.peerServerMap.set(peerId, new Set(sharedServerIds));
|
||||
return true;
|
||||
}
|
||||
|
||||
untrackPeerFromServer(peerId: string, serverId: string): boolean {
|
||||
const trackedServers = this.peerServerMap.get(peerId);
|
||||
|
||||
if (!trackedServers)
|
||||
return false;
|
||||
|
||||
trackedServers.delete(serverId);
|
||||
|
||||
if (trackedServers.size === 0) {
|
||||
this.peerServerMap.delete(peerId);
|
||||
return false;
|
||||
}
|
||||
|
||||
this.peerServerMap.set(peerId, trackedServers);
|
||||
return true;
|
||||
}
|
||||
|
||||
deletePeerTracking(peerId: string): void {
|
||||
this.peerServerMap.delete(peerId);
|
||||
this.peerSignalingUrlMap.delete(peerId);
|
||||
}
|
||||
|
||||
clearPeerTracking(): void {
|
||||
this.peerServerMap.clear();
|
||||
this.peerSignalingUrlMap.clear();
|
||||
}
|
||||
|
||||
getPeersOutsideServer(serverId: string): string[] {
|
||||
const peersToClose: string[] = [];
|
||||
|
||||
this.peerServerMap.forEach((peerServerIds, peerId) => {
|
||||
if (!peerServerIds.has(serverId)) {
|
||||
peersToClose.push(peerId);
|
||||
}
|
||||
});
|
||||
|
||||
return peersToClose;
|
||||
}
|
||||
|
||||
destroy(): void {
|
||||
for (const subscriptions of this.signalingSubscriptions.values()) {
|
||||
for (const subscription of subscriptions) {
|
||||
subscription.unsubscribe();
|
||||
}
|
||||
}
|
||||
|
||||
for (const manager of this.signalingManagers.values()) {
|
||||
manager.destroy();
|
||||
}
|
||||
|
||||
this.signalingSubscriptions.clear();
|
||||
this.signalingManagers.clear();
|
||||
this.clearJoinedServers();
|
||||
this.clearLastJoinedServers();
|
||||
this.clearPeerTracking();
|
||||
}
|
||||
|
||||
private getOrCreateMemberServerSet(signalUrl: string): Set<string> {
|
||||
const existingSet = this.memberServerIdsBySignalUrl.get(signalUrl);
|
||||
|
||||
if (existingSet) {
|
||||
return existingSet;
|
||||
}
|
||||
|
||||
const createdSet = new Set<string>();
|
||||
|
||||
this.memberServerIdsBySignalUrl.set(signalUrl, createdSet);
|
||||
return createdSet;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,256 @@
|
||||
import type { SignalingMessage } from '../../../shared-kernel';
|
||||
import { PeerData } from '../realtime.types';
|
||||
import {
|
||||
SIGNALING_TYPE_ANSWER,
|
||||
SIGNALING_TYPE_CONNECTED,
|
||||
SIGNALING_TYPE_ICE_CANDIDATE,
|
||||
SIGNALING_TYPE_OFFER,
|
||||
SIGNALING_TYPE_SERVER_USERS,
|
||||
SIGNALING_TYPE_USER_JOINED,
|
||||
SIGNALING_TYPE_USER_LEFT
|
||||
} from '../realtime.constants';
|
||||
import { PeerConnectionManager } from '../peer-connection-manager/peer-connection.manager';
|
||||
import { ServerSignalingCoordinator } from './server-signaling-coordinator';
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
|
||||
interface SignalingUserSummary {
|
||||
oderId: string;
|
||||
displayName: string;
|
||||
}
|
||||
|
||||
interface IncomingSignalingPayload {
|
||||
sdp?: RTCSessionDescriptionInit;
|
||||
candidate?: RTCIceCandidateInit;
|
||||
}
|
||||
|
||||
export type IncomingSignalingMessage = Omit<Partial<SignalingMessage>, 'type' | 'payload'> & {
|
||||
type: string;
|
||||
payload?: IncomingSignalingPayload;
|
||||
oderId?: string;
|
||||
serverTime?: number;
|
||||
serverId?: string;
|
||||
serverIds?: string[];
|
||||
users?: SignalingUserSummary[];
|
||||
displayName?: string;
|
||||
fromUserId?: string;
|
||||
};
|
||||
|
||||
interface IncomingSignalingMessageHandlerDependencies {
|
||||
peerManager: PeerConnectionManager;
|
||||
signalingCoordinator: ServerSignalingCoordinator<IncomingSignalingMessage>;
|
||||
logger: WebRTCLogger;
|
||||
getEffectiveServerId(): string | null;
|
||||
setServerTime(serverTime: number): void;
|
||||
}
|
||||
|
||||
export class IncomingSignalingMessageHandler {
|
||||
constructor(
|
||||
private readonly dependencies: IncomingSignalingMessageHandlerDependencies
|
||||
) {}
|
||||
|
||||
handleMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
this.dependencies.logger.info('Signaling message', {
|
||||
signalUrl,
|
||||
type: message.type
|
||||
});
|
||||
|
||||
switch (message.type) {
|
||||
case SIGNALING_TYPE_CONNECTED:
|
||||
this.handleConnectedSignalingMessage(message, signalUrl);
|
||||
return;
|
||||
|
||||
case SIGNALING_TYPE_SERVER_USERS:
|
||||
this.handleServerUsersSignalingMessage(message, signalUrl);
|
||||
return;
|
||||
|
||||
case SIGNALING_TYPE_USER_JOINED:
|
||||
this.handleUserJoinedSignalingMessage(message, signalUrl);
|
||||
return;
|
||||
|
||||
case SIGNALING_TYPE_USER_LEFT:
|
||||
this.handleUserLeftSignalingMessage(message, signalUrl);
|
||||
return;
|
||||
|
||||
case SIGNALING_TYPE_OFFER:
|
||||
this.handleOfferSignalingMessage(message, signalUrl);
|
||||
return;
|
||||
|
||||
case SIGNALING_TYPE_ANSWER:
|
||||
this.handleAnswerSignalingMessage(message, signalUrl);
|
||||
return;
|
||||
|
||||
case SIGNALING_TYPE_ICE_CANDIDATE:
|
||||
this.handleIceCandidateSignalingMessage(message, signalUrl);
|
||||
return;
|
||||
|
||||
default:
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
private handleConnectedSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
this.dependencies.logger.info('Server connected', {
|
||||
oderId: message.oderId,
|
||||
signalUrl
|
||||
});
|
||||
|
||||
if (message.serverId) {
|
||||
this.dependencies.signalingCoordinator.setServerSignalUrl(message.serverId, signalUrl);
|
||||
}
|
||||
|
||||
if (typeof message.serverTime === 'number') {
|
||||
this.dependencies.setServerTime(message.serverTime);
|
||||
}
|
||||
}
|
||||
|
||||
private handleServerUsersSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
const users = Array.isArray(message.users) ? message.users : [];
|
||||
|
||||
this.dependencies.logger.info('Server users', {
|
||||
count: users.length,
|
||||
signalUrl,
|
||||
serverId: message.serverId
|
||||
});
|
||||
|
||||
if (message.serverId) {
|
||||
this.dependencies.signalingCoordinator.setServerSignalUrl(message.serverId, signalUrl);
|
||||
}
|
||||
|
||||
for (const user of users) {
|
||||
if (!user.oderId)
|
||||
continue;
|
||||
|
||||
this.dependencies.signalingCoordinator.setPeerSignalUrl(user.oderId, signalUrl);
|
||||
|
||||
if (message.serverId) {
|
||||
this.dependencies.signalingCoordinator.trackPeerInServer(user.oderId, message.serverId);
|
||||
}
|
||||
|
||||
const existing = this.dependencies.peerManager.activePeerConnections.get(user.oderId);
|
||||
|
||||
if (this.canReusePeerConnection(existing)) {
|
||||
this.dependencies.logger.info('Reusing active peer connection', {
|
||||
connectionState: existing?.connection.connectionState ?? 'unknown',
|
||||
dataChannelState: existing?.dataChannel?.readyState ?? 'missing',
|
||||
oderId: user.oderId,
|
||||
serverId: message.serverId,
|
||||
signalUrl
|
||||
});
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
if (existing) {
|
||||
this.dependencies.logger.info('Removing failed peer before recreate', {
|
||||
connectionState: existing.connection.connectionState,
|
||||
dataChannelState: existing.dataChannel?.readyState ?? 'missing',
|
||||
oderId: user.oderId,
|
||||
serverId: message.serverId,
|
||||
signalUrl
|
||||
});
|
||||
|
||||
this.dependencies.peerManager.removePeer(user.oderId);
|
||||
}
|
||||
|
||||
this.dependencies.logger.info('Create peer connection to existing user', {
|
||||
oderId: user.oderId,
|
||||
serverId: message.serverId,
|
||||
signalUrl
|
||||
});
|
||||
|
||||
this.dependencies.peerManager.createPeerConnection(user.oderId, true);
|
||||
void this.dependencies.peerManager.createAndSendOffer(user.oderId);
|
||||
}
|
||||
}
|
||||
|
||||
private handleUserJoinedSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
this.dependencies.logger.info('User joined', {
|
||||
displayName: message.displayName,
|
||||
oderId: message.oderId,
|
||||
signalUrl
|
||||
});
|
||||
|
||||
if (message.serverId) {
|
||||
this.dependencies.signalingCoordinator.setServerSignalUrl(message.serverId, signalUrl);
|
||||
}
|
||||
|
||||
if (message.oderId) {
|
||||
this.dependencies.signalingCoordinator.setPeerSignalUrl(message.oderId, signalUrl);
|
||||
}
|
||||
|
||||
if (message.oderId && message.serverId) {
|
||||
this.dependencies.signalingCoordinator.trackPeerInServer(message.oderId, message.serverId);
|
||||
}
|
||||
}
|
||||
|
||||
private handleUserLeftSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
this.dependencies.logger.info('User left', {
|
||||
displayName: message.displayName,
|
||||
oderId: message.oderId,
|
||||
signalUrl,
|
||||
serverId: message.serverId
|
||||
});
|
||||
|
||||
if (message.oderId) {
|
||||
const hasRemainingSharedServers = Array.isArray(message.serverIds)
|
||||
? this.dependencies.signalingCoordinator.replacePeerSharedServers(message.oderId, message.serverIds)
|
||||
: (message.serverId
|
||||
? this.dependencies.signalingCoordinator.untrackPeerFromServer(message.oderId, message.serverId)
|
||||
: false);
|
||||
|
||||
if (!hasRemainingSharedServers) {
|
||||
this.dependencies.peerManager.removePeer(message.oderId);
|
||||
this.dependencies.signalingCoordinator.deletePeerTracking(message.oderId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private handleOfferSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
const fromUserId = message.fromUserId;
|
||||
const sdp = message.payload?.sdp;
|
||||
|
||||
if (!fromUserId || !sdp)
|
||||
return;
|
||||
|
||||
this.dependencies.signalingCoordinator.setPeerSignalUrl(fromUserId, signalUrl);
|
||||
|
||||
const effectiveServerId = this.dependencies.getEffectiveServerId();
|
||||
|
||||
if (effectiveServerId && !this.dependencies.signalingCoordinator.hasTrackedPeerServers(fromUserId)) {
|
||||
this.dependencies.signalingCoordinator.trackPeerInServer(fromUserId, effectiveServerId);
|
||||
}
|
||||
|
||||
this.dependencies.peerManager.handleOffer(fromUserId, sdp);
|
||||
}
|
||||
|
||||
private handleAnswerSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
const fromUserId = message.fromUserId;
|
||||
const sdp = message.payload?.sdp;
|
||||
|
||||
if (!fromUserId || !sdp)
|
||||
return;
|
||||
|
||||
this.dependencies.signalingCoordinator.setPeerSignalUrl(fromUserId, signalUrl);
|
||||
this.dependencies.peerManager.handleAnswer(fromUserId, sdp);
|
||||
}
|
||||
|
||||
private handleIceCandidateSignalingMessage(message: IncomingSignalingMessage, signalUrl: string): void {
|
||||
const fromUserId = message.fromUserId;
|
||||
const candidate = message.payload?.candidate;
|
||||
|
||||
if (!fromUserId || !candidate)
|
||||
return;
|
||||
|
||||
this.dependencies.signalingCoordinator.setPeerSignalUrl(fromUserId, signalUrl);
|
||||
this.dependencies.peerManager.handleIceCandidate(fromUserId, candidate);
|
||||
}
|
||||
|
||||
private canReusePeerConnection(peer: PeerData | undefined): boolean {
|
||||
if (!peer)
|
||||
return false;
|
||||
|
||||
const connectionState = peer.connection?.connectionState;
|
||||
|
||||
return connectionState !== 'closed' && connectionState !== 'failed';
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,172 @@
|
||||
import { Observable, of } from 'rxjs';
|
||||
import type { SignalingMessage } from '../../../shared-kernel';
|
||||
import { DEFAULT_DISPLAY_NAME, SIGNALING_TYPE_IDENTIFY } from '../realtime.constants';
|
||||
import { IdentifyCredentials } from '../realtime.types';
|
||||
import { ConnectedSignalingManager, ServerSignalingCoordinator } from './server-signaling-coordinator';
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
|
||||
interface SignalingTransportHandlerDependencies<TMessage> {
|
||||
signalingCoordinator: ServerSignalingCoordinator<TMessage>;
|
||||
logger: WebRTCLogger;
|
||||
getLocalPeerId(): string;
|
||||
}
|
||||
|
||||
export class SignalingTransportHandler<TMessage> {
|
||||
private lastIdentifyCredentials: IdentifyCredentials | null = null;
|
||||
|
||||
constructor(
|
||||
private readonly dependencies: SignalingTransportHandlerDependencies<TMessage>
|
||||
) {}
|
||||
|
||||
getIdentifyCredentials(): IdentifyCredentials | null {
|
||||
return this.lastIdentifyCredentials;
|
||||
}
|
||||
|
||||
getIdentifyOderId(): string {
|
||||
return this.lastIdentifyCredentials?.oderId || this.dependencies.getLocalPeerId();
|
||||
}
|
||||
|
||||
getIdentifyDisplayName(): string {
|
||||
return this.lastIdentifyCredentials?.displayName || DEFAULT_DISPLAY_NAME;
|
||||
}
|
||||
|
||||
getConnectedSignalingManagers(): ConnectedSignalingManager[] {
|
||||
return this.dependencies.signalingCoordinator.getConnectedSignalingManagers();
|
||||
}
|
||||
|
||||
getCurrentSignalingUrl(activeServerId: string | null): string | null {
|
||||
if (activeServerId) {
|
||||
const activeServerSignalUrl = this.dependencies.signalingCoordinator.getServerSignalUrl(activeServerId);
|
||||
|
||||
if (activeServerSignalUrl) {
|
||||
return activeServerSignalUrl;
|
||||
}
|
||||
}
|
||||
|
||||
return this.getConnectedSignalingManagers()[0]?.signalUrl ?? null;
|
||||
}
|
||||
|
||||
connectToSignalingServer(serverUrl: string): Observable<boolean> {
|
||||
const manager = this.dependencies.signalingCoordinator.ensureSignalingManager(serverUrl);
|
||||
|
||||
if (manager.isSocketOpen()) {
|
||||
return of(true);
|
||||
}
|
||||
|
||||
return manager.connect(serverUrl);
|
||||
}
|
||||
|
||||
isSignalingConnectedTo(serverUrl: string): boolean {
|
||||
return this.dependencies.signalingCoordinator.isSignalingConnectedTo(serverUrl);
|
||||
}
|
||||
|
||||
async ensureSignalingConnected(timeoutMs?: number): Promise<boolean> {
|
||||
return await this.dependencies.signalingCoordinator.ensureAnySignalingConnected(timeoutMs);
|
||||
}
|
||||
|
||||
sendSignalingMessage(message: Omit<SignalingMessage, 'from' | 'timestamp'>): void {
|
||||
const targetPeerId = message.to;
|
||||
|
||||
if (targetPeerId) {
|
||||
const targetSignalUrl = this.dependencies.signalingCoordinator.getPeerSignalUrl(targetPeerId);
|
||||
|
||||
if (targetSignalUrl) {
|
||||
const targetManager = this.dependencies.signalingCoordinator.ensureSignalingManager(targetSignalUrl);
|
||||
|
||||
targetManager.sendSignalingMessage(message, this.dependencies.getLocalPeerId());
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const connectedManagers = this.getConnectedSignalingManagers();
|
||||
|
||||
if (connectedManagers.length === 0) {
|
||||
this.dependencies.logger.error('[signaling] No active signaling connection for outbound message', new Error('No signaling manager available'), {
|
||||
type: message.type
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
for (const { manager } of connectedManagers) {
|
||||
manager.sendSignalingMessage(message, this.dependencies.getLocalPeerId());
|
||||
}
|
||||
}
|
||||
|
||||
sendRawMessage(message: Record<string, unknown>): void {
|
||||
const targetPeerId = typeof message['targetUserId'] === 'string' ? message['targetUserId'] : null;
|
||||
|
||||
if (targetPeerId) {
|
||||
const targetSignalUrl = this.dependencies.signalingCoordinator.getPeerSignalUrl(targetPeerId);
|
||||
|
||||
if (targetSignalUrl && this.sendRawMessageToSignalUrl(targetSignalUrl, message)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const serverId = typeof message['serverId'] === 'string' ? message['serverId'] : null;
|
||||
|
||||
if (serverId) {
|
||||
const serverSignalUrl = this.dependencies.signalingCoordinator.getServerSignalUrl(serverId);
|
||||
|
||||
if (serverSignalUrl && this.sendRawMessageToSignalUrl(serverSignalUrl, message)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const connectedManagers = this.getConnectedSignalingManagers();
|
||||
|
||||
if (connectedManagers.length === 0) {
|
||||
this.dependencies.logger.error('[signaling] No active signaling connection for outbound message', new Error('No signaling manager available'), {
|
||||
type: typeof message['type'] === 'string' ? message['type'] : 'unknown'
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
for (const { manager } of connectedManagers) {
|
||||
manager.sendRawMessage(message);
|
||||
}
|
||||
}
|
||||
|
||||
sendRawMessageToSignalUrl(signalUrl: string, message: Record<string, unknown>): boolean {
|
||||
const manager = this.dependencies.signalingCoordinator.getSignalingManager(signalUrl);
|
||||
|
||||
if (!manager) {
|
||||
return false;
|
||||
}
|
||||
|
||||
manager.sendRawMessage(message);
|
||||
return true;
|
||||
}
|
||||
|
||||
identify(oderId: string, displayName: string, signalUrl?: string): void {
|
||||
const normalizedDisplayName = displayName.trim() || DEFAULT_DISPLAY_NAME;
|
||||
|
||||
this.lastIdentifyCredentials = {
|
||||
oderId,
|
||||
displayName: normalizedDisplayName
|
||||
};
|
||||
|
||||
const identifyMessage = {
|
||||
type: SIGNALING_TYPE_IDENTIFY,
|
||||
oderId,
|
||||
displayName: normalizedDisplayName
|
||||
};
|
||||
|
||||
if (signalUrl) {
|
||||
this.sendRawMessageToSignalUrl(signalUrl, identifyMessage);
|
||||
return;
|
||||
}
|
||||
|
||||
const connectedManagers = this.getConnectedSignalingManagers();
|
||||
|
||||
if (connectedManagers.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (const { manager } of connectedManagers) {
|
||||
manager.sendRawMessage(identifyMessage);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,479 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering, @typescript-eslint/no-non-null-assertion,, max-statements-per-line */
|
||||
/**
|
||||
* Manages the WebSocket connection to the signaling server,
|
||||
* including automatic reconnection and heartbeats.
|
||||
*/
|
||||
import { Observable, Subject } from 'rxjs';
|
||||
import type { SignalingMessage } from '../../../shared-kernel';
|
||||
import { recordDebugNetworkSignalingPayload } from '../logging/debug-network-metrics';
|
||||
import { IdentifyCredentials, JoinedServerInfo } from '../realtime.types';
|
||||
import { WebRTCLogger } from '../logging/webrtc-logger';
|
||||
import {
|
||||
SIGNALING_RECONNECT_BASE_DELAY_MS,
|
||||
SIGNALING_RECONNECT_MAX_DELAY_MS,
|
||||
SIGNALING_CONNECT_TIMEOUT_MS,
|
||||
STATE_HEARTBEAT_INTERVAL_MS,
|
||||
SIGNALING_TYPE_IDENTIFY,
|
||||
SIGNALING_TYPE_JOIN_SERVER,
|
||||
SIGNALING_TYPE_VIEW_SERVER
|
||||
} from '../realtime.constants';
|
||||
|
||||
interface ParsedSignalingPayload {
|
||||
sdp?: RTCSessionDescriptionInit;
|
||||
candidate?: RTCIceCandidateInit;
|
||||
}
|
||||
|
||||
type ParsedSignalingMessage = Omit<Partial<SignalingMessage>, 'type' | 'payload'> &
|
||||
Record<string, unknown> & {
|
||||
type: string;
|
||||
payload?: ParsedSignalingPayload;
|
||||
};
|
||||
|
||||
export class SignalingManager {
|
||||
private signalingWebSocket: WebSocket | null = null;
|
||||
private lastSignalingUrl: string | null = null;
|
||||
private signalingReconnectAttempts = 0;
|
||||
private signalingReconnectTimer: ReturnType<typeof setTimeout> | null = null;
|
||||
private stateHeartbeatTimer: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
/** Fires every heartbeat tick - the main service hooks this to broadcast state. */
|
||||
readonly heartbeatTick$ = new Subject<void>();
|
||||
|
||||
/** Fires whenever a raw signaling message arrives from the server. */
|
||||
readonly messageReceived$ = new Subject<ParsedSignalingMessage>();
|
||||
|
||||
/** Fires when connection status changes (true = open, false = closed/error). */
|
||||
readonly connectionStatus$ = new Subject<{ connected: boolean; errorMessage?: string }>();
|
||||
|
||||
constructor(
|
||||
private readonly logger: WebRTCLogger,
|
||||
private readonly getLastIdentify: () => IdentifyCredentials | null,
|
||||
private readonly getLastJoinedServer: () => JoinedServerInfo | null,
|
||||
private readonly getMemberServerIds: () => ReadonlySet<string>
|
||||
) {}
|
||||
|
||||
/** Open (or re-open) a WebSocket to the signaling server. */
|
||||
connect(serverUrl: string): Observable<boolean> {
|
||||
this.lastSignalingUrl = serverUrl;
|
||||
return new Observable<boolean>((observer) => {
|
||||
try {
|
||||
this.logger.info('[signaling] Connecting to signaling server', { serverUrl });
|
||||
|
||||
if (this.signalingWebSocket) {
|
||||
this.signalingWebSocket.close();
|
||||
}
|
||||
|
||||
this.lastSignalingUrl = serverUrl;
|
||||
this.signalingWebSocket = new WebSocket(serverUrl);
|
||||
|
||||
this.signalingWebSocket.onopen = () => {
|
||||
this.logger.info('[signaling] Connected to signaling server', {
|
||||
serverUrl,
|
||||
readyState: this.getSocketReadyStateLabel()
|
||||
});
|
||||
|
||||
this.clearReconnect();
|
||||
this.startHeartbeat();
|
||||
this.connectionStatus$.next({ connected: true });
|
||||
this.reIdentifyAndRejoin();
|
||||
observer.next(true);
|
||||
};
|
||||
|
||||
this.signalingWebSocket.onmessage = (event) => {
|
||||
const rawPayload = this.stringifySocketPayload(event.data);
|
||||
const payloadBytes = rawPayload ? this.measurePayloadBytes(rawPayload) : null;
|
||||
|
||||
try {
|
||||
const message = JSON.parse(rawPayload) as ParsedSignalingMessage;
|
||||
const payloadPreview = this.buildPayloadPreview(message);
|
||||
|
||||
recordDebugNetworkSignalingPayload(message, 'inbound');
|
||||
|
||||
this.logger.traffic('signaling', 'inbound', {
|
||||
...payloadPreview,
|
||||
bytes: payloadBytes ?? undefined,
|
||||
payloadPreview,
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
type: typeof message.type === 'string' ? message.type : 'unknown',
|
||||
url: serverUrl
|
||||
});
|
||||
|
||||
this.messageReceived$.next(message);
|
||||
} catch (error) {
|
||||
this.logger.error('[signaling] Failed to parse signaling message', error, {
|
||||
bytes: payloadBytes ?? undefined,
|
||||
rawPreview: this.getPayloadPreview(rawPayload),
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
url: serverUrl
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
this.signalingWebSocket.onerror = (error) => {
|
||||
this.logger.error('[signaling] Signaling socket error', error, {
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
url: serverUrl
|
||||
});
|
||||
|
||||
this.connectionStatus$.next({ connected: false,
|
||||
errorMessage: 'Connection to signaling server failed' });
|
||||
|
||||
observer.error(error);
|
||||
};
|
||||
|
||||
this.signalingWebSocket.onclose = (event) => {
|
||||
this.logger.warn('[signaling] Disconnected from signaling server', {
|
||||
attempts: this.signalingReconnectAttempts,
|
||||
code: event.code,
|
||||
reason: event.reason || null,
|
||||
url: serverUrl,
|
||||
wasClean: event.wasClean
|
||||
});
|
||||
|
||||
this.stopHeartbeat();
|
||||
this.connectionStatus$.next({ connected: false,
|
||||
errorMessage: 'Disconnected from signaling server' });
|
||||
|
||||
this.scheduleReconnect();
|
||||
};
|
||||
} catch (error) {
|
||||
this.logger.error('[signaling] Failed to initialize signaling socket', error, {
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
url: serverUrl
|
||||
});
|
||||
|
||||
observer.error(error);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/** Ensure signaling is connected; try reconnecting if not. */
|
||||
async ensureConnected(timeoutMs: number = SIGNALING_CONNECT_TIMEOUT_MS): Promise<boolean> {
|
||||
if (this.isSocketOpen())
|
||||
return true;
|
||||
|
||||
if (!this.lastSignalingUrl)
|
||||
return false;
|
||||
|
||||
return new Promise<boolean>((resolve) => {
|
||||
let settled = false;
|
||||
|
||||
const timeout = setTimeout(() => {
|
||||
if (!settled) { settled = true; resolve(false); }
|
||||
}, timeoutMs);
|
||||
|
||||
this.connect(this.lastSignalingUrl!).subscribe({
|
||||
next: () => { if (!settled) { settled = true; clearTimeout(timeout); resolve(true); } },
|
||||
error: () => { if (!settled) { settled = true; clearTimeout(timeout); resolve(false); } }
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/** Send a signaling message (with `from` / `timestamp` populated). */
|
||||
sendSignalingMessage(message: Omit<SignalingMessage, 'from' | 'timestamp'>, localPeerId: string): void {
|
||||
if (!this.isSocketOpen()) {
|
||||
this.logger.error('[signaling] Signaling socket not connected', new Error('Socket not open'), {
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
type: message.type,
|
||||
url: this.lastSignalingUrl
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
const fullMessage: SignalingMessage = { ...message,
|
||||
from: localPeerId,
|
||||
timestamp: Date.now() };
|
||||
|
||||
this.sendSerializedPayload(fullMessage, {
|
||||
targetPeerId: message.to,
|
||||
type: message.type,
|
||||
url: this.lastSignalingUrl
|
||||
});
|
||||
}
|
||||
|
||||
/** Send a raw JSON payload (for identify, join_server, etc.). */
|
||||
sendRawMessage(message: Record<string, unknown>): void {
|
||||
if (!this.isSocketOpen()) {
|
||||
this.logger.error('[signaling] Signaling socket not connected', new Error('Socket not open'), {
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
type: typeof message['type'] === 'string' ? message['type'] : 'unknown',
|
||||
url: this.lastSignalingUrl
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
this.sendSerializedPayload(message, {
|
||||
targetPeerId: typeof message['targetUserId'] === 'string' ? message['targetUserId'] : undefined,
|
||||
type: typeof message['type'] === 'string' ? message['type'] : 'unknown',
|
||||
url: this.lastSignalingUrl
|
||||
});
|
||||
}
|
||||
|
||||
/** Gracefully close the WebSocket. */
|
||||
close(): void {
|
||||
this.stopHeartbeat();
|
||||
this.clearReconnect();
|
||||
|
||||
if (this.signalingWebSocket) {
|
||||
this.signalingWebSocket.close();
|
||||
this.signalingWebSocket = null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Whether the underlying WebSocket is currently open. */
|
||||
isSocketOpen(): boolean {
|
||||
return this.signalingWebSocket !== null && this.signalingWebSocket.readyState === WebSocket.OPEN;
|
||||
}
|
||||
|
||||
/** The URL last used to connect (needed for reconnection). */
|
||||
getLastUrl(): string | null {
|
||||
return this.lastSignalingUrl;
|
||||
}
|
||||
|
||||
/** Re-identify and rejoin servers after a reconnect. */
|
||||
private reIdentifyAndRejoin(): void {
|
||||
const credentials = this.getLastIdentify();
|
||||
|
||||
if (credentials) {
|
||||
this.sendRawMessage({ type: SIGNALING_TYPE_IDENTIFY,
|
||||
oderId: credentials.oderId,
|
||||
displayName: credentials.displayName });
|
||||
}
|
||||
|
||||
const memberIds = this.getMemberServerIds();
|
||||
|
||||
if (memberIds.size > 0) {
|
||||
memberIds.forEach((serverId) => {
|
||||
this.sendRawMessage({ type: SIGNALING_TYPE_JOIN_SERVER,
|
||||
serverId });
|
||||
});
|
||||
|
||||
const lastJoined = this.getLastJoinedServer();
|
||||
|
||||
if (lastJoined) {
|
||||
this.sendRawMessage({ type: SIGNALING_TYPE_VIEW_SERVER,
|
||||
serverId: lastJoined.serverId });
|
||||
}
|
||||
} else {
|
||||
const lastJoined = this.getLastJoinedServer();
|
||||
|
||||
if (lastJoined) {
|
||||
this.sendRawMessage({ type: SIGNALING_TYPE_JOIN_SERVER,
|
||||
serverId: lastJoined.serverId });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Schedule a reconnect attempt using exponential backoff.
|
||||
*
|
||||
* The delay doubles with each attempt up to {@link SIGNALING_RECONNECT_MAX_DELAY_MS}.
|
||||
* No-ops if a timer is already pending or no URL is stored.
|
||||
*/
|
||||
private scheduleReconnect(): void {
|
||||
if (this.signalingReconnectTimer || !this.lastSignalingUrl)
|
||||
return;
|
||||
|
||||
const delay = Math.min(
|
||||
SIGNALING_RECONNECT_MAX_DELAY_MS,
|
||||
SIGNALING_RECONNECT_BASE_DELAY_MS * Math.pow(2, this.signalingReconnectAttempts)
|
||||
);
|
||||
|
||||
this.signalingReconnectTimer = setTimeout(() => {
|
||||
this.signalingReconnectTimer = null;
|
||||
this.signalingReconnectAttempts++;
|
||||
this.logger.info('[signaling] Attempting reconnect', {
|
||||
attempt: this.signalingReconnectAttempts,
|
||||
delay,
|
||||
url: this.lastSignalingUrl
|
||||
});
|
||||
|
||||
this.connect(this.lastSignalingUrl!).subscribe({
|
||||
next: () => { this.signalingReconnectAttempts = 0; },
|
||||
error: () => { this.scheduleReconnect(); }
|
||||
});
|
||||
}, delay);
|
||||
}
|
||||
|
||||
/** Cancel any pending reconnect timer and reset the attempt counter. */
|
||||
private clearReconnect(): void {
|
||||
if (this.signalingReconnectTimer) {
|
||||
clearTimeout(this.signalingReconnectTimer);
|
||||
this.signalingReconnectTimer = null;
|
||||
}
|
||||
|
||||
this.signalingReconnectAttempts = 0;
|
||||
}
|
||||
|
||||
/** Start the heartbeat interval that drives periodic state broadcasts. */
|
||||
private startHeartbeat(): void {
|
||||
this.stopHeartbeat();
|
||||
this.stateHeartbeatTimer = setInterval(() => this.heartbeatTick$.next(), STATE_HEARTBEAT_INTERVAL_MS);
|
||||
}
|
||||
|
||||
/** Stop the heartbeat interval. */
|
||||
private stopHeartbeat(): void {
|
||||
if (this.stateHeartbeatTimer) {
|
||||
clearInterval(this.stateHeartbeatTimer);
|
||||
this.stateHeartbeatTimer = null;
|
||||
}
|
||||
}
|
||||
|
||||
/** Clean up all resources. */
|
||||
destroy(): void {
|
||||
this.close();
|
||||
this.heartbeatTick$.complete();
|
||||
this.messageReceived$.complete();
|
||||
this.connectionStatus$.complete();
|
||||
}
|
||||
|
||||
private sendSerializedPayload(
|
||||
message: SignalingMessage | Record<string, unknown>,
|
||||
details: { targetPeerId?: string; type?: string; url?: string | null }
|
||||
): void {
|
||||
let rawPayload = '';
|
||||
|
||||
const payloadPreview = this.buildPayloadPreview(message);
|
||||
|
||||
recordDebugNetworkSignalingPayload(message, 'outbound');
|
||||
|
||||
try {
|
||||
rawPayload = JSON.stringify(message);
|
||||
} catch (error) {
|
||||
this.logger.error('[signaling] Failed to serialize signaling payload', error, {
|
||||
payloadPreview,
|
||||
type: details.type,
|
||||
url: details.url
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
|
||||
try {
|
||||
this.signalingWebSocket!.send(rawPayload);
|
||||
this.logger.traffic('signaling', 'outbound', {
|
||||
...payloadPreview,
|
||||
bytes: this.measurePayloadBytes(rawPayload),
|
||||
payloadPreview,
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
targetPeerId: details.targetPeerId,
|
||||
type: details.type,
|
||||
url: details.url
|
||||
});
|
||||
} catch (error) {
|
||||
this.logger.error('[signaling] Failed to send signaling payload', error, {
|
||||
bytes: this.measurePayloadBytes(rawPayload),
|
||||
payloadPreview,
|
||||
readyState: this.getSocketReadyStateLabel(),
|
||||
targetPeerId: details.targetPeerId,
|
||||
type: details.type,
|
||||
url: details.url
|
||||
});
|
||||
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
private getSocketReadyStateLabel(): string {
|
||||
const readyState = this.signalingWebSocket?.readyState;
|
||||
|
||||
switch (readyState) {
|
||||
case WebSocket.CONNECTING:
|
||||
return 'connecting';
|
||||
case WebSocket.OPEN:
|
||||
return 'open';
|
||||
case WebSocket.CLOSING:
|
||||
return 'closing';
|
||||
case WebSocket.CLOSED:
|
||||
return 'closed';
|
||||
default:
|
||||
return 'unavailable';
|
||||
}
|
||||
}
|
||||
|
||||
private stringifySocketPayload(payload: unknown): string {
|
||||
if (typeof payload === 'string')
|
||||
return payload;
|
||||
|
||||
if (payload instanceof ArrayBuffer)
|
||||
return new TextDecoder().decode(payload);
|
||||
|
||||
return String(payload ?? '');
|
||||
}
|
||||
|
||||
private measurePayloadBytes(payload: string): number {
|
||||
return new TextEncoder().encode(payload).length;
|
||||
}
|
||||
|
||||
private getPayloadPreview(payload: string): string {
|
||||
return payload.replace(/\s+/g, ' ').slice(0, 240);
|
||||
}
|
||||
|
||||
private buildPayloadPreview(payload: SignalingMessage | Record<string, unknown>): Record<string, unknown> {
|
||||
const record = payload as Record<string, unknown>;
|
||||
const voiceState = this.summarizeVoiceState(record['voiceState']);
|
||||
const users = this.summarizeUsers(record['users']);
|
||||
|
||||
return {
|
||||
displayName: typeof record['displayName'] === 'string' ? record['displayName'] : undefined,
|
||||
fromUserId: typeof record['fromUserId'] === 'string' ? record['fromUserId'] : undefined,
|
||||
isScreenSharing: typeof record['isScreenSharing'] === 'boolean' ? record['isScreenSharing'] : undefined,
|
||||
keys: Object.keys(record).slice(0, 10),
|
||||
oderId: typeof record['oderId'] === 'string' ? record['oderId'] : undefined,
|
||||
roomId: typeof record['serverId'] === 'string' ? record['serverId'] : undefined,
|
||||
serverId: typeof record['serverId'] === 'string' ? record['serverId'] : undefined,
|
||||
targetPeerId: typeof record['targetUserId'] === 'string' ? record['targetUserId'] : undefined,
|
||||
type: typeof record['type'] === 'string' ? record['type'] : 'unknown',
|
||||
userCount: Array.isArray(record['users']) ? record['users'].length : undefined,
|
||||
users,
|
||||
voiceState
|
||||
};
|
||||
}
|
||||
|
||||
private summarizeVoiceState(value: unknown): Record<string, unknown> | undefined {
|
||||
const voiceState = this.asRecord(value);
|
||||
|
||||
if (!voiceState)
|
||||
return undefined;
|
||||
|
||||
return {
|
||||
isConnected: voiceState['isConnected'] === true,
|
||||
isMuted: voiceState['isMuted'] === true,
|
||||
isDeafened: voiceState['isDeafened'] === true,
|
||||
isSpeaking: voiceState['isSpeaking'] === true,
|
||||
roomId: typeof voiceState['roomId'] === 'string' ? voiceState['roomId'] : undefined,
|
||||
serverId: typeof voiceState['serverId'] === 'string' ? voiceState['serverId'] : undefined,
|
||||
volume: typeof voiceState['volume'] === 'number' ? voiceState['volume'] : undefined
|
||||
};
|
||||
}
|
||||
|
||||
private summarizeUsers(value: unknown): Record<string, unknown>[] | undefined {
|
||||
if (!Array.isArray(value))
|
||||
return undefined;
|
||||
|
||||
const users: Record<string, unknown>[] = [];
|
||||
|
||||
for (const userValue of value.slice(0, 20)) {
|
||||
const user = this.asRecord(userValue);
|
||||
|
||||
if (!user)
|
||||
continue;
|
||||
|
||||
users.push({
|
||||
displayName: typeof user['displayName'] === 'string' ? user['displayName'] : undefined,
|
||||
oderId: typeof user['oderId'] === 'string' ? user['oderId'] : undefined
|
||||
});
|
||||
}
|
||||
|
||||
return users;
|
||||
}
|
||||
|
||||
private asRecord(value: unknown): Record<string, unknown> | null {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value))
|
||||
return null;
|
||||
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
import {
|
||||
Signal,
|
||||
computed,
|
||||
signal
|
||||
} from '@angular/core';
|
||||
import { v4 as uuidv4 } from 'uuid';
|
||||
import type { LocalScreenShareState } from '../media/screen-share.manager';
|
||||
|
||||
export class WebRtcStateController {
|
||||
readonly peerId: Signal<string>;
|
||||
readonly isConnected: Signal<boolean>;
|
||||
readonly hasEverConnected: Signal<boolean>;
|
||||
readonly isVoiceConnected: Signal<boolean>;
|
||||
readonly connectedPeers: Signal<string[]>;
|
||||
readonly isMuted: Signal<boolean>;
|
||||
readonly isDeafened: Signal<boolean>;
|
||||
readonly isScreenSharing: Signal<boolean>;
|
||||
readonly isNoiseReductionEnabled: Signal<boolean>;
|
||||
readonly screenStream: Signal<MediaStream | null>;
|
||||
readonly isScreenShareRemotePlaybackSuppressed: Signal<boolean>;
|
||||
readonly forceDefaultRemotePlaybackOutput: Signal<boolean>;
|
||||
readonly hasConnectionError: Signal<boolean>;
|
||||
readonly connectionErrorMessage: Signal<string | null>;
|
||||
readonly shouldShowConnectionError: Signal<boolean>;
|
||||
readonly peerLatencies: Signal<ReadonlyMap<string, number>>;
|
||||
|
||||
private activeServerId: string | null = null;
|
||||
private readonly _localPeerId = signal<string>(uuidv4());
|
||||
private readonly _isSignalingConnected = signal(false);
|
||||
private readonly _isVoiceConnected = signal(false);
|
||||
private readonly _connectedPeers = signal<string[]>([]);
|
||||
private readonly _isMuted = signal(false);
|
||||
private readonly _isDeafened = signal(false);
|
||||
private readonly _isScreenSharing = signal(false);
|
||||
private readonly _isNoiseReductionEnabled = signal(false);
|
||||
private readonly _screenStreamSignal = signal<MediaStream | null>(null);
|
||||
private readonly _isScreenShareRemotePlaybackSuppressed = signal(false);
|
||||
private readonly _forceDefaultRemotePlaybackOutput = signal(false);
|
||||
private readonly _hasConnectionError = signal(false);
|
||||
private readonly _connectionErrorMessage = signal<string | null>(null);
|
||||
private readonly _hasEverConnected = signal(false);
|
||||
private readonly _peerLatencies = signal<ReadonlyMap<string, number>>(new Map());
|
||||
|
||||
constructor() {
|
||||
this.peerId = computed(() => this._localPeerId());
|
||||
this.isConnected = computed(() => this._isSignalingConnected());
|
||||
this.hasEverConnected = computed(() => this._hasEverConnected());
|
||||
this.isVoiceConnected = computed(() => this._isVoiceConnected());
|
||||
this.connectedPeers = computed(() => this._connectedPeers());
|
||||
this.isMuted = computed(() => this._isMuted());
|
||||
this.isDeafened = computed(() => this._isDeafened());
|
||||
this.isScreenSharing = computed(() => this._isScreenSharing());
|
||||
this.isNoiseReductionEnabled = computed(() => this._isNoiseReductionEnabled());
|
||||
this.screenStream = computed(() => this._screenStreamSignal());
|
||||
this.isScreenShareRemotePlaybackSuppressed = computed(() => this._isScreenShareRemotePlaybackSuppressed());
|
||||
this.forceDefaultRemotePlaybackOutput = computed(() => this._forceDefaultRemotePlaybackOutput());
|
||||
this.hasConnectionError = computed(() => this._hasConnectionError());
|
||||
this.connectionErrorMessage = computed(() => this._connectionErrorMessage());
|
||||
this.shouldShowConnectionError = computed(() => {
|
||||
if (!this._hasConnectionError())
|
||||
return false;
|
||||
|
||||
if (this._isVoiceConnected() && this._connectedPeers().length > 0)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
});
|
||||
|
||||
this.peerLatencies = computed(() => this._peerLatencies());
|
||||
}
|
||||
|
||||
get currentServerId(): string | null {
|
||||
return this.activeServerId;
|
||||
}
|
||||
|
||||
getLocalPeerId(): string {
|
||||
return this._localPeerId();
|
||||
}
|
||||
|
||||
isSignalingConnected(): boolean {
|
||||
return this._isSignalingConnected();
|
||||
}
|
||||
|
||||
isVoiceConnectedActive(): boolean {
|
||||
return this._isVoiceConnected();
|
||||
}
|
||||
|
||||
isScreenSharingActive(): boolean {
|
||||
return this._isScreenSharing();
|
||||
}
|
||||
|
||||
setCurrentServer(serverId: string): void {
|
||||
this.activeServerId = serverId;
|
||||
}
|
||||
|
||||
setVoiceConnected(connected: boolean): void {
|
||||
this._isVoiceConnected.set(connected);
|
||||
}
|
||||
|
||||
setMuted(muted: boolean): void {
|
||||
this._isMuted.set(muted);
|
||||
}
|
||||
|
||||
setDeafened(deafened: boolean): void {
|
||||
this._isDeafened.set(deafened);
|
||||
}
|
||||
|
||||
setNoiseReductionEnabled(enabled: boolean): void {
|
||||
this._isNoiseReductionEnabled.set(enabled);
|
||||
}
|
||||
|
||||
setConnectedPeers(peers: string[]): void {
|
||||
this._connectedPeers.set(peers);
|
||||
}
|
||||
|
||||
syncPeerLatencies(latencies: ReadonlyMap<string, number>): void {
|
||||
this._peerLatencies.set(new Map(latencies));
|
||||
}
|
||||
|
||||
applyLocalScreenShareState(state: LocalScreenShareState): void {
|
||||
this._isScreenSharing.set(state.active);
|
||||
this._screenStreamSignal.set(state.stream);
|
||||
this._isScreenShareRemotePlaybackSuppressed.set(state.suppressRemotePlayback);
|
||||
this._forceDefaultRemotePlaybackOutput.set(state.forceDefaultRemotePlaybackOutput);
|
||||
}
|
||||
|
||||
clearPeerViewState(): void {
|
||||
this._connectedPeers.set([]);
|
||||
this._peerLatencies.set(new Map());
|
||||
}
|
||||
|
||||
clearScreenShareState(): void {
|
||||
this._isScreenSharing.set(false);
|
||||
this._screenStreamSignal.set(null);
|
||||
this._isScreenShareRemotePlaybackSuppressed.set(false);
|
||||
this._forceDefaultRemotePlaybackOutput.set(false);
|
||||
}
|
||||
|
||||
resetConnectionState(): void {
|
||||
this._isSignalingConnected.set(false);
|
||||
this._hasEverConnected.set(false);
|
||||
this._hasConnectionError.set(false);
|
||||
this._connectionErrorMessage.set(null);
|
||||
}
|
||||
|
||||
updateSignalingConnectionStatus(anyConnected: boolean, markHasEverConnected: boolean, errorMessage?: string): void {
|
||||
if (markHasEverConnected) {
|
||||
this._hasEverConnected.set(true);
|
||||
}
|
||||
|
||||
this._isSignalingConnected.set(anyConnected);
|
||||
this._hasConnectionError.set(!anyConnected);
|
||||
this._connectionErrorMessage.set(anyConnected ? null : (errorMessage ?? 'Disconnected from signaling server'));
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,129 @@
|
||||
import { Observable } from 'rxjs';
|
||||
import { ChatEvent } from '../../../shared-kernel';
|
||||
import { ScreenShareStartOptions } from '../screen-share.config';
|
||||
import { PeerData } from '../realtime.types';
|
||||
import { MediaManager } from '../media/media.manager';
|
||||
import { PeerConnectionManager } from '../peer-connection-manager/peer-connection.manager';
|
||||
import { ScreenShareManager } from '../media/screen-share.manager';
|
||||
|
||||
interface PeerMediaFacadeDependencies {
|
||||
peerManager: PeerConnectionManager;
|
||||
mediaManager: MediaManager;
|
||||
screenShareManager: ScreenShareManager;
|
||||
}
|
||||
|
||||
export class PeerMediaFacade {
|
||||
constructor(
|
||||
private readonly dependencies: PeerMediaFacadeDependencies
|
||||
) {}
|
||||
|
||||
get onMessageReceived(): Observable<ChatEvent> {
|
||||
return this.dependencies.peerManager.messageReceived$.asObservable();
|
||||
}
|
||||
|
||||
get onPeerConnected(): Observable<string> {
|
||||
return this.dependencies.peerManager.peerConnected$.asObservable();
|
||||
}
|
||||
|
||||
get onPeerDisconnected(): Observable<string> {
|
||||
return this.dependencies.peerManager.peerDisconnected$.asObservable();
|
||||
}
|
||||
|
||||
get onRemoteStream(): Observable<{ peerId: string; stream: MediaStream }> {
|
||||
return this.dependencies.peerManager.remoteStream$.asObservable();
|
||||
}
|
||||
|
||||
get onVoiceConnected(): Observable<void> {
|
||||
return this.dependencies.mediaManager.voiceConnected$.asObservable();
|
||||
}
|
||||
|
||||
getActivePeers(): Map<string, PeerData> {
|
||||
return this.dependencies.peerManager.activePeerConnections;
|
||||
}
|
||||
|
||||
async renegotiate(peerId: string): Promise<void> {
|
||||
return await this.dependencies.peerManager.renegotiate(peerId);
|
||||
}
|
||||
|
||||
broadcastMessage(event: ChatEvent): void {
|
||||
this.dependencies.peerManager.broadcastMessage(event);
|
||||
}
|
||||
|
||||
sendToPeer(peerId: string, event: ChatEvent): void {
|
||||
this.dependencies.peerManager.sendToPeer(peerId, event);
|
||||
}
|
||||
|
||||
async sendToPeerBuffered(peerId: string, event: ChatEvent): Promise<void> {
|
||||
return await this.dependencies.peerManager.sendToPeerBuffered(peerId, event);
|
||||
}
|
||||
|
||||
broadcastCurrentStates(): void {
|
||||
this.dependencies.peerManager.broadcastCurrentStates();
|
||||
}
|
||||
|
||||
getConnectedPeerIds(): string[] {
|
||||
return this.dependencies.peerManager.getConnectedPeerIds();
|
||||
}
|
||||
|
||||
getRemoteStream(peerId: string): MediaStream | null {
|
||||
return this.dependencies.peerManager.remotePeerStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
getRemoteVoiceStream(peerId: string): MediaStream | null {
|
||||
return this.dependencies.peerManager.remotePeerVoiceStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
getRemoteScreenShareStream(peerId: string): MediaStream | null {
|
||||
return this.dependencies.peerManager.remotePeerScreenShareStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
clearRemoteScreenShareStream(peerId: string): void {
|
||||
this.dependencies.peerManager.clearRemoteScreenShareStream(peerId);
|
||||
}
|
||||
|
||||
closeAllPeers(): void {
|
||||
this.dependencies.peerManager.closeAllPeers();
|
||||
}
|
||||
|
||||
getLocalStream(): MediaStream | null {
|
||||
return this.dependencies.mediaManager.getLocalStream();
|
||||
}
|
||||
|
||||
getRawMicStream(): MediaStream | null {
|
||||
return this.dependencies.mediaManager.getRawMicStream();
|
||||
}
|
||||
|
||||
isScreenShareActive(): boolean {
|
||||
return this.dependencies.screenShareManager.getIsScreenActive();
|
||||
}
|
||||
|
||||
async startScreenShare(options: ScreenShareStartOptions): Promise<MediaStream> {
|
||||
return await this.dependencies.screenShareManager.startScreenShare(options);
|
||||
}
|
||||
|
||||
stopScreenShare(): void {
|
||||
this.dependencies.screenShareManager.stopScreenShare();
|
||||
}
|
||||
|
||||
requestScreenShareForPeer(peerId: string): void {
|
||||
this.dependencies.screenShareManager.requestScreenShareForPeer(peerId);
|
||||
}
|
||||
|
||||
stopScreenShareForPeer(peerId: string): void {
|
||||
this.dependencies.screenShareManager.stopScreenShareForPeer(peerId);
|
||||
}
|
||||
|
||||
clearScreenShareRequest(peerId: string): void {
|
||||
this.dependencies.screenShareManager.clearScreenShareRequest(peerId);
|
||||
}
|
||||
|
||||
syncScreenShareToPeer(peerId: string): void {
|
||||
this.dependencies.screenShareManager.syncScreenShareToPeer(peerId);
|
||||
}
|
||||
|
||||
destroy(): void {
|
||||
this.dependencies.peerManager.destroy();
|
||||
this.dependencies.mediaManager.destroy();
|
||||
this.dependencies.screenShareManager.destroy();
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,113 @@
|
||||
import { ChatEvent } from '../../../shared-kernel';
|
||||
import {
|
||||
P2P_TYPE_SCREEN_SHARE_REQUEST,
|
||||
P2P_TYPE_SCREEN_SHARE_STOP,
|
||||
P2P_TYPE_SCREEN_STATE
|
||||
} from '../realtime.constants';
|
||||
|
||||
interface RemoteScreenShareRequestControllerDependencies {
|
||||
getConnectedPeerIds(): string[];
|
||||
sendToPeer(peerId: string, event: ChatEvent): void;
|
||||
clearRemoteScreenShareStream(peerId: string): void;
|
||||
requestScreenShareForPeer(peerId: string): void;
|
||||
stopScreenShareForPeer(peerId: string): void;
|
||||
clearScreenShareRequest(peerId: string): void;
|
||||
}
|
||||
|
||||
export class RemoteScreenShareRequestController {
|
||||
private remoteScreenShareRequestsEnabled = false;
|
||||
private readonly desiredRemoteScreenSharePeers = new Set<string>();
|
||||
private readonly activeRemoteScreenSharePeers = new Set<string>();
|
||||
|
||||
constructor(
|
||||
private readonly dependencies: RemoteScreenShareRequestControllerDependencies
|
||||
) {}
|
||||
|
||||
handlePeerConnected(peerId: string): void {
|
||||
if (this.remoteScreenShareRequestsEnabled && this.desiredRemoteScreenSharePeers.has(peerId)) {
|
||||
this.requestRemoteScreenShares([peerId]);
|
||||
}
|
||||
}
|
||||
|
||||
handlePeerDisconnected(peerId: string): void {
|
||||
this.activeRemoteScreenSharePeers.delete(peerId);
|
||||
this.dependencies.clearScreenShareRequest(peerId);
|
||||
}
|
||||
|
||||
handlePeerControlMessage(event: ChatEvent): void {
|
||||
if (!event.fromPeerId) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type === P2P_TYPE_SCREEN_STATE && event.isScreenSharing === false) {
|
||||
this.dependencies.clearRemoteScreenShareStream(event.fromPeerId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type === P2P_TYPE_SCREEN_SHARE_REQUEST) {
|
||||
this.dependencies.requestScreenShareForPeer(event.fromPeerId);
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.type === P2P_TYPE_SCREEN_SHARE_STOP) {
|
||||
this.dependencies.stopScreenShareForPeer(event.fromPeerId);
|
||||
}
|
||||
}
|
||||
|
||||
syncRemoteScreenShareRequests(peerIds: string[], enabled: boolean): void {
|
||||
const nextDesiredPeers = new Set(
|
||||
peerIds.filter((peerId): peerId is string => !!peerId)
|
||||
);
|
||||
|
||||
if (!enabled) {
|
||||
this.remoteScreenShareRequestsEnabled = false;
|
||||
this.desiredRemoteScreenSharePeers.clear();
|
||||
this.stopRemoteScreenShares([...this.activeRemoteScreenSharePeers]);
|
||||
return;
|
||||
}
|
||||
|
||||
this.remoteScreenShareRequestsEnabled = true;
|
||||
|
||||
for (const activePeerId of [...this.activeRemoteScreenSharePeers]) {
|
||||
if (!nextDesiredPeers.has(activePeerId)) {
|
||||
this.stopRemoteScreenShares([activePeerId]);
|
||||
}
|
||||
}
|
||||
|
||||
this.desiredRemoteScreenSharePeers.clear();
|
||||
nextDesiredPeers.forEach((peerId) => this.desiredRemoteScreenSharePeers.add(peerId));
|
||||
this.requestRemoteScreenShares([...nextDesiredPeers]);
|
||||
}
|
||||
|
||||
clear(): void {
|
||||
this.remoteScreenShareRequestsEnabled = false;
|
||||
this.desiredRemoteScreenSharePeers.clear();
|
||||
this.activeRemoteScreenSharePeers.clear();
|
||||
}
|
||||
|
||||
private requestRemoteScreenShares(peerIds: string[]): void {
|
||||
const connectedPeerIds = new Set(this.dependencies.getConnectedPeerIds());
|
||||
|
||||
for (const peerId of peerIds) {
|
||||
if (!connectedPeerIds.has(peerId) || this.activeRemoteScreenSharePeers.has(peerId)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
this.dependencies.sendToPeer(peerId, { type: P2P_TYPE_SCREEN_SHARE_REQUEST });
|
||||
this.activeRemoteScreenSharePeers.add(peerId);
|
||||
}
|
||||
}
|
||||
|
||||
private stopRemoteScreenShares(peerIds: string[]): void {
|
||||
const connectedPeerIds = new Set(this.dependencies.getConnectedPeerIds());
|
||||
|
||||
for (const peerId of peerIds) {
|
||||
if (this.activeRemoteScreenSharePeers.has(peerId) && connectedPeerIds.has(peerId)) {
|
||||
this.dependencies.sendToPeer(peerId, { type: P2P_TYPE_SCREEN_SHARE_STOP });
|
||||
}
|
||||
|
||||
this.activeRemoteScreenSharePeers.delete(peerId);
|
||||
this.dependencies.clearRemoteScreenShareStream(peerId);
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user