feat: Add webcam basic support
This commit is contained in:
@@ -433,7 +433,7 @@ class DebugNetworkSnapshotBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
if (type === 'screen-state') {
|
||||
if (type === 'screen-state' || type === 'camera-state') {
|
||||
const subjectNode = direction === 'outbound'
|
||||
? this.ensureLocalNetworkNode(
|
||||
state,
|
||||
@@ -442,12 +442,14 @@ class DebugNetworkSnapshotBuilder {
|
||||
this.getPayloadString(payload, 'displayName')
|
||||
)
|
||||
: peerNode;
|
||||
const isScreenSharing = this.getPayloadBoolean(payload, 'isScreenSharing');
|
||||
const isStreaming = type === 'screen-state'
|
||||
? this.getPayloadBoolean(payload, 'isScreenSharing')
|
||||
: this.getPayloadBoolean(payload, 'isCameraEnabled');
|
||||
|
||||
if (isScreenSharing !== null) {
|
||||
subjectNode.isStreaming = isScreenSharing;
|
||||
if (isStreaming !== null) {
|
||||
subjectNode.isStreaming = isStreaming;
|
||||
|
||||
if (!isScreenSharing)
|
||||
if (!isStreaming)
|
||||
subjectNode.streams.video = 0;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ infrastructure adapters and UI.
|
||||
| **chat** | Messaging rules, sync logic, GIF/Klipy integration, chat UI | `KlipyService`, `canEditMessage()`, `ChatMessagesComponent` |
|
||||
| **screen-share** | Source picker, quality presets | `ScreenShareFacade` |
|
||||
| **server-directory** | Multi-server endpoint management, health checks, invites, server search UI | `ServerDirectoryFacade` |
|
||||
| **voice-connection** | Voice activity detection, bitrate profiles | `VoiceConnectionFacade` |
|
||||
| **voice-connection** | Voice activity detection, bitrate profiles, in-channel camera transport | `VoiceConnectionFacade` |
|
||||
| **voice-session** | Join/leave orchestration, voice settings persistence | `VoiceSessionFacade` |
|
||||
|
||||
## Folder convention
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
# Screen Share Domain
|
||||
|
||||
Manages screen sharing sessions, source selection (Electron), quality presets, and the viewer/workspace UI. Like `voice-connection`, the actual WebRTC track distribution lives in `infrastructure/realtime`; this domain provides the application-facing API and UI components.
|
||||
Manages screen sharing sessions, source selection (Electron), quality presets, and screen-share-specific UI. Like `voice-connection`, the actual WebRTC track distribution lives in `infrastructure/realtime`; this domain provides the application-facing API for display-media capture and playback.
|
||||
|
||||
The mixed live-stream workspace is intentionally not part of this domain. It lives in `features/room/voice-workspace` because it composes screen share, voice-session, voice-connection, and camera state in one shell.
|
||||
|
||||
## Module map
|
||||
|
||||
@@ -14,12 +16,9 @@ screen-share/
|
||||
│ └── screen-share.config.ts Quality presets and types (re-exported from shared-kernel)
|
||||
│
|
||||
├── feature/
|
||||
│ ├── screen-share-viewer/ Single-stream video player with fullscreen + volume
|
||||
│ └── screen-share-workspace/ Multi-stream grid workspace
|
||||
│ ├── screen-share-workspace.component.ts Grid layout, featured/thumbnail streams, mini-window mode
|
||||
│ ├── screen-share-stream-tile.component.ts Individual stream tile with fullscreen/volume controls
|
||||
│ ├── screen-share-playback.service.ts Per-user mute/volume state for screen share audio
|
||||
│ └── screen-share-workspace.models.ts ScreenShareWorkspaceStreamItem
|
||||
│ ├── screen-share-quality-dialog/ Quality preset picker before capture
|
||||
│ ├── screen-share-source-picker/ Electron source selection dialog
|
||||
│ └── screen-share-viewer/ Single-stream video player with fullscreen + volume
|
||||
│
|
||||
└── index.ts Barrel exports
|
||||
```
|
||||
@@ -33,24 +32,18 @@ graph TD
|
||||
RSF[RealtimeSessionFacade]
|
||||
Config[screen-share.config]
|
||||
Viewer[ScreenShareViewerComponent]
|
||||
Workspace[ScreenShareWorkspaceComponent]
|
||||
Tile[ScreenShareStreamTileComponent]
|
||||
Playback[ScreenSharePlaybackService]
|
||||
Workspace[VoiceWorkspaceComponent]
|
||||
|
||||
SSF --> RSF
|
||||
Viewer --> SSF
|
||||
Workspace --> SSF
|
||||
Workspace --> Playback
|
||||
Workspace --> Tile
|
||||
Picker --> Config
|
||||
|
||||
click SSF "application/screen-share.facade.ts" "Proxy to RealtimeSessionFacade" _blank
|
||||
click Picker "application/screen-share-source-picker.service.ts" "Electron source picker" _blank
|
||||
click RSF "../../infrastructure/realtime/realtime-session.service.ts" "Low-level WebRTC composition root" _blank
|
||||
click Viewer "feature/screen-share-viewer/screen-share-viewer.component.ts" "Single-stream player" _blank
|
||||
click Workspace "feature/screen-share-workspace/screen-share-workspace.component.ts" "Multi-stream workspace" _blank
|
||||
click Tile "feature/screen-share-workspace/screen-share-stream-tile.component.ts" "Stream tile" _blank
|
||||
click Playback "feature/screen-share-workspace/screen-share-playback.service.ts" "Per-user volume state" _blank
|
||||
click Workspace "../../features/room/voice-workspace/voice-workspace.component.ts" "Room-level live stream workspace" _blank
|
||||
click Config "domain/screen-share.config.ts" "Quality presets" _blank
|
||||
```
|
||||
|
||||
@@ -110,28 +103,6 @@ The quality dialog can be shown before each share (`askScreenShareQuality` setti
|
||||
- Focus events from other components via a `viewer:focus` custom DOM event
|
||||
- Auto-stop when the watched user stops sharing or the stream's video tracks end
|
||||
|
||||
## Workspace component
|
||||
## Voice workspace integration
|
||||
|
||||
`ScreenShareWorkspaceComponent` is the multi-stream grid view inside the voice workspace panel. It handles:
|
||||
|
||||
- Listing all active screen shares (local + remote) sorted with remote first
|
||||
- Featured/widescreen mode for a single focused stream with thumbnail sidebar
|
||||
- Mini-window mode (draggable, position-clamped to viewport)
|
||||
- Auto-hide header chrome in widescreen mode (2.2 s timeout, revealed on pointer move)
|
||||
- On-demand remote stream requests via `syncRemoteScreenShareRequests`
|
||||
- Per-stream volume and mute via `ScreenSharePlaybackService`
|
||||
- Voice controls (mute, deafen, disconnect, share toggle) integrated into the workspace header
|
||||
|
||||
```mermaid
|
||||
stateDiagram-v2
|
||||
[*] --> Hidden
|
||||
Hidden --> Expanded: open()
|
||||
Expanded --> GridView: multiple shares, no focus
|
||||
Expanded --> WidescreenView: single share or focused stream
|
||||
WidescreenView --> GridView: showAllStreams()
|
||||
GridView --> WidescreenView: focusShare(peerKey)
|
||||
Expanded --> Minimized: minimize()
|
||||
Minimized --> Expanded: restore()
|
||||
Expanded --> Hidden: close()
|
||||
Minimized --> Hidden: close()
|
||||
```
|
||||
`VoiceWorkspaceComponent` in `features/room/voice-workspace` is the multi-stream grid view inside the room shell. It consumes `ScreenShareFacade` for display-media capture and on-demand remote screen-share requests, but it is not part of this domain because it also owns camera presentation and voice-session controls.
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { User } from '../../../../shared-kernel';
|
||||
|
||||
export interface ScreenShareWorkspaceStreamItem {
|
||||
id: string;
|
||||
peerKey: string;
|
||||
user: User;
|
||||
stream: MediaStream;
|
||||
isLocal: boolean;
|
||||
}
|
||||
@@ -4,5 +4,3 @@ export * from './domain/screen-share.config';
|
||||
|
||||
// Feature components
|
||||
export { ScreenShareViewerComponent } from './feature/screen-share-viewer/screen-share-viewer.component';
|
||||
export { ScreenShareWorkspaceComponent } from './feature/screen-share-workspace/screen-share-workspace.component';
|
||||
export { ScreenShareStreamTileComponent } from './feature/screen-share-workspace/screen-share-stream-tile.component';
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
# Voice Connection Domain
|
||||
|
||||
Bridges the application layer to the low-level realtime infrastructure for voice calls. Provides speaking detection via Web Audio analysis and per-peer volume control for playback. The actual WebRTC plumbing lives in `infrastructure/realtime`; this domain wraps it with a clean facade.
|
||||
Bridges the application layer to the low-level realtime infrastructure for voice calls and in-channel camera transport. Provides speaking detection via Web Audio analysis and per-peer volume control for playback. The actual WebRTC plumbing lives in `infrastructure/realtime`; this domain wraps it with a clean facade.
|
||||
|
||||
## Module map
|
||||
|
||||
```
|
||||
voice-connection/
|
||||
├── application/
|
||||
│ ├── voice-connection.facade.ts Proxy to RealtimeSessionFacade for voice signals and methods
|
||||
│ ├── voice-connection.facade.ts Proxy to RealtimeSessionFacade for voice and camera signals/methods
|
||||
│ ├── voice-activity.service.ts RMS-based speaking detection via AnalyserNode (per-user signals)
|
||||
│ └── voice-playback.service.ts Per-peer GainNode chain, 0-200% volume, deafen support
|
||||
│
|
||||
@@ -42,13 +42,17 @@ graph TD
|
||||
|
||||
`VoiceConnectionFacade` exposes signals and methods from `RealtimeSessionFacade` without leaking infrastructure details into feature components. It covers:
|
||||
|
||||
- Connection state: `isVoiceConnected`, `isMuted`, `isDeafened`, `hasConnectionError`
|
||||
- Stream access: `getRemoteVoiceStream`, `getLocalStream`, `getRawMicStream`
|
||||
- Controls: `enableVoice`, `disableVoice`, `toggleMute`, `toggleDeafen`, `toggleNoiseReduction`
|
||||
- Connection state: `isVoiceConnected`, `isMuted`, `isDeafened`, `isCameraEnabled`, `hasConnectionError`
|
||||
- Stream access: `getRemoteVoiceStream`, `getRemoteCameraStream`, `getLocalStream`, `getLocalCameraStream`, `getRawMicStream`
|
||||
- Controls: `enableVoice`, `disableVoice`, `enableCamera`, `disableCamera`, `toggleMute`, `toggleDeafen`, `toggleNoiseReduction`
|
||||
- Audio tuning: `setOutputVolume`, `setInputVolume`, `setAudioBitrate`, `setLatencyProfile`
|
||||
- Peer events: `onRemoteStream`, `onPeerConnected`, `onPeerDisconnected`
|
||||
- Heartbeat: `startVoiceHeartbeat`, `stopVoiceHeartbeat`
|
||||
|
||||
## Camera transport
|
||||
|
||||
Camera capture is treated as voice-adjacent transport, not screen share. The underlying realtime layer routes webcam video only to peers in the same active voice channel, exposes remote camera streams through `getRemoteCameraStream(peerId)`, and keeps webcam senders separate from screen-share senders so both features can run at the same time.
|
||||
|
||||
## Speaking detection
|
||||
|
||||
`VoiceActivityService` monitors audio levels for local and remote streams using the Web Audio API. Each tracked stream gets its own `AudioContext` with an `AnalyserNode`. A single `requestAnimationFrame` loop polls all analysers.
|
||||
|
||||
@@ -8,6 +8,7 @@ export class VoiceConnectionFacade {
|
||||
readonly isVoiceConnected = inject(RealtimeSessionFacade).isVoiceConnected;
|
||||
readonly isMuted = inject(RealtimeSessionFacade).isMuted;
|
||||
readonly isDeafened = inject(RealtimeSessionFacade).isDeafened;
|
||||
readonly isCameraEnabled = inject(RealtimeSessionFacade).isCameraEnabled;
|
||||
readonly isNoiseReductionEnabled = inject(RealtimeSessionFacade).isNoiseReductionEnabled;
|
||||
readonly hasConnectionError = inject(RealtimeSessionFacade).hasConnectionError;
|
||||
readonly connectionErrorMessage = inject(RealtimeSessionFacade).connectionErrorMessage;
|
||||
@@ -36,10 +37,18 @@ export class VoiceConnectionFacade {
|
||||
return this.realtime.getRemoteVoiceStream(peerId);
|
||||
}
|
||||
|
||||
getRemoteCameraStream(peerId: string): MediaStream | null {
|
||||
return this.realtime.getRemoteCameraStream(peerId);
|
||||
}
|
||||
|
||||
getLocalStream(): MediaStream | null {
|
||||
return this.realtime.getLocalStream();
|
||||
}
|
||||
|
||||
getLocalCameraStream(): MediaStream | null {
|
||||
return this.realtime.getLocalCameraStream();
|
||||
}
|
||||
|
||||
getRawMicStream(): MediaStream | null {
|
||||
return this.realtime.getRawMicStream();
|
||||
}
|
||||
@@ -52,6 +61,14 @@ export class VoiceConnectionFacade {
|
||||
this.realtime.disableVoice();
|
||||
}
|
||||
|
||||
async enableCamera(): Promise<MediaStream> {
|
||||
return await this.realtime.enableCamera();
|
||||
}
|
||||
|
||||
disableCamera(): void {
|
||||
this.realtime.disableCamera();
|
||||
}
|
||||
|
||||
async setLocalStream(stream: MediaStream): Promise<void> {
|
||||
await this.realtime.setLocalStream(stream);
|
||||
}
|
||||
|
||||
@@ -2,6 +2,8 @@
|
||||
|
||||
Tracks voice session metadata across client-side navigation and manages the voice workspace UI state (expanded, minimized, hidden). This domain does not touch WebRTC directly; actual connections live in `voice-connection` and `infrastructure/realtime`.
|
||||
|
||||
The actual mixed live-stream workspace UI lives in `features/room/voice-workspace` and consumes `VoiceWorkspaceService` from this domain.
|
||||
|
||||
## Module map
|
||||
|
||||
```
|
||||
@@ -18,7 +20,7 @@ voice-session/
|
||||
│ └── voice-settings.storage.ts Persists audio device IDs, volumes, bitrate, latency, noise reduction to localStorage
|
||||
│
|
||||
├── feature/
|
||||
│ ├── voice-controls/ Full voice control panel (mic, deafen, devices, screen share, settings)
|
||||
│ ├── voice-controls/ Full voice control panel (mic, camera, deafen, devices, screen share, settings)
|
||||
│ └── floating-voice-controls/ Minimal overlay when user navigates away from the voice server
|
||||
│
|
||||
└── index.ts Barrel exports
|
||||
@@ -93,7 +95,7 @@ stateDiagram-v2
|
||||
Minimized --> Hidden: voice session ends
|
||||
```
|
||||
|
||||
The minimized mode renders a draggable mini-window. Its position is tracked in `miniWindowPosition` and clamped to viewport bounds on resize. `focusedStreamId` controls which screen-share stream gets the widescreen treatment in expanded mode.
|
||||
The minimized mode renders a draggable mini-window. Its position is tracked in `miniWindowPosition` and clamped to viewport bounds on resize. `focusedStreamId` controls which live stream gets the widescreen treatment in expanded mode, using feature-level stream IDs such as `screen:<peerKey>` or `camera:<peerKey>`.
|
||||
|
||||
## Voice settings
|
||||
|
||||
|
||||
@@ -86,6 +86,25 @@
|
||||
/>
|
||||
</button>
|
||||
|
||||
<!-- Camera Toggle -->
|
||||
<button
|
||||
type="button"
|
||||
(click)="toggleCamera()"
|
||||
[class]="getCameraButtonClass()"
|
||||
>
|
||||
@if (isCameraEnabled()) {
|
||||
<ng-icon
|
||||
name="lucideVideoOff"
|
||||
class="w-5 h-5"
|
||||
/>
|
||||
} @else {
|
||||
<ng-icon
|
||||
name="lucideVideo"
|
||||
class="w-5 h-5"
|
||||
/>
|
||||
}
|
||||
</button>
|
||||
|
||||
<!-- Screen Share Toggle -->
|
||||
<button
|
||||
type="button"
|
||||
|
||||
@@ -84,6 +84,7 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
connectionErrorMessage = computed(() => this.webrtcService.connectionErrorMessage());
|
||||
isMuted = signal(false);
|
||||
isDeafened = signal(false);
|
||||
isCameraEnabled = computed(() => this.webrtcService.isCameraEnabled());
|
||||
isScreenSharing = this.screenShareService.isScreenSharing;
|
||||
showSettings = signal(false);
|
||||
|
||||
@@ -281,6 +282,12 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
}
|
||||
})
|
||||
);
|
||||
this.store.dispatch(
|
||||
UsersActions.updateCameraState({
|
||||
userId: user.id,
|
||||
cameraState: { isEnabled: false }
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// End voice session for floating controls
|
||||
@@ -364,6 +371,42 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
}
|
||||
}
|
||||
|
||||
async toggleCamera(): Promise<void> {
|
||||
if (!this.isConnected()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const user = this.currentUser();
|
||||
|
||||
if (this.isCameraEnabled()) {
|
||||
this.webrtcService.disableCamera();
|
||||
|
||||
if (user?.id) {
|
||||
this.store.dispatch(
|
||||
UsersActions.updateCameraState({
|
||||
userId: user.id,
|
||||
cameraState: { isEnabled: false }
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await this.webrtcService.enableCamera();
|
||||
|
||||
if (user?.id) {
|
||||
this.store.dispatch(
|
||||
UsersActions.updateCameraState({
|
||||
userId: user.id,
|
||||
cameraState: { isEnabled: true }
|
||||
})
|
||||
);
|
||||
}
|
||||
} catch (_error) {}
|
||||
}
|
||||
|
||||
async toggleScreenShare(): Promise<void> {
|
||||
if (this.isScreenSharing()) {
|
||||
this.screenShareService.stopScreenShare();
|
||||
@@ -562,6 +605,17 @@ export class VoiceControlsComponent implements OnInit, OnDestroy {
|
||||
return `${base} bg-secondary text-foreground hover:bg-secondary/80`;
|
||||
}
|
||||
|
||||
getCameraButtonClass(): string {
|
||||
const base =
|
||||
'w-10 h-10 inline-flex items-center justify-center rounded-full transition-colors disabled:opacity-50 disabled:cursor-not-allowed';
|
||||
|
||||
if (this.isCameraEnabled()) {
|
||||
return `${base} bg-primary/20 text-primary hover:bg-primary/30`;
|
||||
}
|
||||
|
||||
return `${base} bg-secondary text-foreground hover:bg-secondary/80`;
|
||||
}
|
||||
|
||||
getScreenShareButtonClass(): string {
|
||||
const base =
|
||||
'w-10 h-10 inline-flex items-center justify-center rounded-full transition-colors disabled:opacity-50 disabled:cursor-not-allowed';
|
||||
|
||||
@@ -42,7 +42,7 @@
|
||||
}
|
||||
}
|
||||
|
||||
<app-screen-share-workspace />
|
||||
<app-voice-workspace />
|
||||
</main>
|
||||
|
||||
<!-- Sidebar always visible -->
|
||||
|
||||
@@ -19,8 +19,8 @@ import {
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
import { ChatMessagesComponent } from '../../../domains/chat/feature/chat-messages/chat-messages.component';
|
||||
import { ScreenShareWorkspaceComponent } from '../../../domains/screen-share/feature/screen-share-workspace/screen-share-workspace.component';
|
||||
import { RoomsSidePanelComponent } from '../rooms-side-panel/rooms-side-panel.component';
|
||||
import { VoiceWorkspaceComponent } from '../voice-workspace/voice-workspace.component';
|
||||
|
||||
import {
|
||||
selectCurrentRoom,
|
||||
@@ -39,7 +39,7 @@ import { VoiceWorkspaceService } from '../../../domains/voice-session';
|
||||
CommonModule,
|
||||
NgIcon,
|
||||
ChatMessagesComponent,
|
||||
ScreenShareWorkspaceComponent,
|
||||
VoiceWorkspaceComponent,
|
||||
RoomsSidePanelComponent
|
||||
],
|
||||
viewProviders: [
|
||||
|
||||
@@ -201,11 +201,15 @@
|
||||
[title]="getPeerLatency(u) !== null ? getPeerLatency(u) + ' ms' : 'Measuring...'"
|
||||
></span>
|
||||
}
|
||||
@if (u.screenShareState?.isSharing || isUserSharing(u.id)) {
|
||||
@if (isUserStreaming(u.oderId || u.id)) {
|
||||
<button
|
||||
(click)="viewStream(u.oderId || u.id); $event.stopPropagation()"
|
||||
class="px-1.5 py-0.5 text-[10px] font-bold bg-red-500 text-white rounded animate-pulse hover:bg-red-600 transition-colors"
|
||||
class="inline-flex items-center gap-1 px-1.5 py-0.5 text-[10px] font-bold bg-red-500 text-white rounded animate-pulse hover:bg-red-600 transition-colors"
|
||||
>
|
||||
<ng-icon
|
||||
[name]="getUserLiveIconName(u.oderId || u.id)"
|
||||
class="w-2.5 h-2.5"
|
||||
/>
|
||||
LIVE
|
||||
</button>
|
||||
}
|
||||
@@ -261,13 +265,13 @@
|
||||
In voice
|
||||
</p>
|
||||
}
|
||||
@if (currentUser()?.screenShareState?.isSharing || (currentUser()?.id && isUserSharing(currentUser()!.id))) {
|
||||
@if (currentUser() && isUserStreaming(currentUser()!.oderId || currentUser()!.id)) {
|
||||
<button
|
||||
class="text-[10px] bg-red-500 text-white px-1.5 py-0.5 rounded-sm font-medium flex items-center gap-1 animate-pulse hover:bg-red-600 transition-colors"
|
||||
(click)="viewStream(currentUser()!.oderId || currentUser()!.id); $event.stopPropagation()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
[name]="getUserLiveIconName(currentUser()!.oderId || currentUser()!.id)"
|
||||
class="w-2.5 h-2.5"
|
||||
/>
|
||||
LIVE
|
||||
@@ -318,13 +322,13 @@
|
||||
In voice
|
||||
</p>
|
||||
}
|
||||
@if (user.screenShareState?.isSharing || isUserSharing(user.id)) {
|
||||
@if (isUserStreaming(user.oderId || user.id)) {
|
||||
<button
|
||||
(click)="viewStream(user.oderId || user.id); $event.stopPropagation()"
|
||||
class="text-[10px] bg-red-500 text-white px-1.5 py-0.5 rounded-sm font-medium hover:bg-red-600 transition-colors flex items-center gap-1 animate-pulse"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
[name]="getUserLiveIconName(user.oderId || user.id)"
|
||||
class="w-2.5 h-2.5"
|
||||
/>
|
||||
LIVE
|
||||
|
||||
@@ -15,6 +15,7 @@ import {
|
||||
lucideMicOff,
|
||||
lucideChevronLeft,
|
||||
lucideMonitor,
|
||||
lucideVideo,
|
||||
lucideHash,
|
||||
lucideUsers,
|
||||
lucidePlus,
|
||||
@@ -40,10 +41,7 @@ import { VoiceActivityService, VoiceConnectionFacade } from '../../../domains/vo
|
||||
import { VoiceSessionFacade, VoiceWorkspaceService } from '../../../domains/voice-session';
|
||||
import { VoicePlaybackService } from '../../../domains/voice-connection/application/voice-playback.service';
|
||||
import { VoiceControlsComponent } from '../../../domains/voice-session/feature/voice-controls/voice-controls.component';
|
||||
import {
|
||||
isChannelNameTaken,
|
||||
normalizeChannelName
|
||||
} from '../../../store/rooms/room-channels.rules';
|
||||
import { isChannelNameTaken, normalizeChannelName } from '../../../store/rooms/room-channels.rules';
|
||||
import {
|
||||
ContextMenuComponent,
|
||||
UserAvatarComponent,
|
||||
@@ -81,6 +79,7 @@ type TabView = 'channels' | 'users';
|
||||
lucideMicOff,
|
||||
lucideChevronLeft,
|
||||
lucideMonitor,
|
||||
lucideVideo,
|
||||
lucideHash,
|
||||
lucideUsers,
|
||||
lucidePlus,
|
||||
@@ -274,6 +273,7 @@ export class RoomsSidePanelComponent {
|
||||
input.focus();
|
||||
input.select();
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -334,7 +334,6 @@ export class RoomsSidePanelComponent {
|
||||
|
||||
confirmCreateChannel() {
|
||||
const name = normalizeChannelName(this.newChannelName);
|
||||
|
||||
const validationError = this.getChannelNameError(name);
|
||||
|
||||
if (validationError) {
|
||||
@@ -597,6 +596,13 @@ export class RoomsSidePanelComponent {
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.store.dispatch(
|
||||
UsersActions.updateCameraState({
|
||||
userId: current.id,
|
||||
cameraState: { isEnabled: false }
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
this.voiceConnection.broadcastMessage({
|
||||
@@ -620,11 +626,15 @@ export class RoomsSidePanelComponent {
|
||||
}
|
||||
|
||||
viewShare(userId: string) {
|
||||
this.voiceWorkspace.focusStream(userId, { connectRemoteShares: true });
|
||||
this.voiceWorkspace.focusStream(`screen:${userId}`, { connectRemoteShares: true });
|
||||
}
|
||||
|
||||
viewStream(userId: string) {
|
||||
this.voiceWorkspace.focusStream(userId, { connectRemoteShares: true });
|
||||
const focusTarget = this.isUserSharing(userId)
|
||||
? `screen:${userId}`
|
||||
: `camera:${userId}`;
|
||||
|
||||
this.voiceWorkspace.focusStream(focusTarget, { connectRemoteShares: true });
|
||||
}
|
||||
|
||||
canMoveVoiceUsers(): boolean {
|
||||
@@ -740,31 +750,65 @@ export class RoomsSidePanelComponent {
|
||||
return this.voicePlayback.isUserMuted(peerId);
|
||||
}
|
||||
|
||||
isUserSharing(userId: string): boolean {
|
||||
const me = this.currentUser();
|
||||
isUserOnCamera(userId: string): boolean {
|
||||
const user = this.findKnownUser(userId);
|
||||
|
||||
if (me?.id === userId) {
|
||||
if (!this.isUserInCurrentVoiceRoom(userId, user)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const current = this.currentUser();
|
||||
|
||||
if (current && (current.id === userId || current.oderId === userId)) {
|
||||
return this.voiceConnection.isCameraEnabled();
|
||||
}
|
||||
|
||||
if (user?.cameraState?.isEnabled === true) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (user?.cameraState?.isEnabled === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.getPeerKeysForUser(user, userId)
|
||||
.some((peerKey) => this.hasActiveVideoStream(this.voiceConnection.getRemoteCameraStream(peerKey)));
|
||||
}
|
||||
|
||||
isUserSharing(userId: string): boolean {
|
||||
const user = this.findKnownUser(userId);
|
||||
|
||||
if (!this.isUserInCurrentVoiceRoom(userId, user)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const current = this.currentUser();
|
||||
|
||||
if (current && (current.id === userId || current.oderId === userId)) {
|
||||
return this.screenShare.isScreenSharing();
|
||||
}
|
||||
|
||||
const user = this.onlineUsers().find((onlineUser) => onlineUser.id === userId || onlineUser.oderId === userId);
|
||||
if (user?.screenShareState?.isSharing === true) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (user?.screenShareState?.isSharing === false) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const peerKeys = [
|
||||
user?.oderId,
|
||||
user?.id,
|
||||
userId
|
||||
].filter(
|
||||
(candidate): candidate is string => !!candidate
|
||||
);
|
||||
const stream = peerKeys
|
||||
const stream = this.getPeerKeysForUser(user, userId)
|
||||
.map((peerKey) => this.screenShare.getRemoteScreenShareStream(peerKey))
|
||||
.find((candidate) => !!candidate && candidate.getVideoTracks().length > 0) || null;
|
||||
.find((candidate) => this.hasActiveVideoStream(candidate)) || null;
|
||||
|
||||
return !!stream && stream.getVideoTracks().length > 0;
|
||||
return this.hasActiveVideoStream(stream);
|
||||
}
|
||||
|
||||
isUserStreaming(userId: string): boolean {
|
||||
return this.isUserSharing(userId) || this.isUserOnCamera(userId);
|
||||
}
|
||||
|
||||
getUserLiveIconName(userId: string): string {
|
||||
return this.isUserSharing(userId) ? 'lucideMonitor' : 'lucideVideo';
|
||||
}
|
||||
|
||||
voiceUsersInRoom(roomId: string) {
|
||||
@@ -829,4 +873,45 @@ export class RoomsSidePanelComponent {
|
||||
|
||||
return 'bg-red-500';
|
||||
}
|
||||
|
||||
private findKnownUser(userId: string): User | null {
|
||||
const current = this.currentUser();
|
||||
|
||||
if (current && (current.id === userId || current.oderId === userId)) {
|
||||
return current;
|
||||
}
|
||||
|
||||
return this.onlineUsers().find((onlineUser) => onlineUser.id === userId || onlineUser.oderId === userId) ?? null;
|
||||
}
|
||||
|
||||
private isUserInCurrentVoiceRoom(userId: string, user: User | null): boolean {
|
||||
const currentVoiceState = this.currentUser()?.voiceState;
|
||||
const current = this.currentUser();
|
||||
|
||||
if (!currentVoiceState?.isConnected || !currentVoiceState.roomId || !currentVoiceState.serverId) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (current && (current.id === userId || current.oderId === userId)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
return !!user?.voiceState?.isConnected
|
||||
&& user.voiceState.roomId === currentVoiceState.roomId
|
||||
&& user.voiceState.serverId === currentVoiceState.serverId;
|
||||
}
|
||||
|
||||
private getPeerKeysForUser(user: User | null, userId: string): string[] {
|
||||
return [
|
||||
user?.oderId,
|
||||
user?.id,
|
||||
userId
|
||||
].filter(
|
||||
(candidate): candidate is string => !!candidate
|
||||
);
|
||||
}
|
||||
|
||||
private hasActiveVideoStream(stream: MediaStream | null): boolean {
|
||||
return !!stream && stream.getVideoTracks().some((track) => track.readyState === 'live');
|
||||
}
|
||||
}
|
||||
|
||||
29
toju-app/src/app/features/room/voice-workspace/README.md
Normal file
29
toju-app/src/app/features/room/voice-workspace/README.md
Normal file
@@ -0,0 +1,29 @@
|
||||
# Voice Workspace Feature
|
||||
|
||||
Room-level composition shell for live voice-channel streams. This feature owns the mixed workspace UI that can show:
|
||||
|
||||
- screen shares
|
||||
- webcam streams
|
||||
- voice-session workspace state such as expanded, minimized, focused, and mini-window position
|
||||
|
||||
It intentionally lives under `features/room/` instead of any single domain because it composes multiple domains together:
|
||||
|
||||
- `VoiceWorkspaceService` from `domains/voice-session` for panel state
|
||||
- `VoiceConnectionFacade` from `domains/voice-connection` for voice and camera streams
|
||||
- `ScreenShareFacade` from `domains/screen-share` for screen-share capture and remote share requests
|
||||
|
||||
## Files
|
||||
|
||||
```
|
||||
voice-workspace/
|
||||
├── voice-workspace.component.ts Live stream workspace shell
|
||||
├── voice-workspace.component.html Grid, widescreen, and mini-window layout
|
||||
├── voice-workspace-stream-tile.component.ts Per-stream tile UI and fullscreen controls
|
||||
├── voice-workspace-stream-tile.component.html
|
||||
├── voice-workspace-playback.service.ts Per-peer playback mute/volume state for stream audio
|
||||
└── voice-workspace.models.ts Stream item contracts (`screen` or `camera`)
|
||||
```
|
||||
|
||||
## Boundary
|
||||
|
||||
This feature is the right home for mixed live-stream presentation. Screen-share-specific behavior stays in `domains/screen-share`, and voice/camera transport stays in `domains/voice-connection` plus `infrastructure/realtime`.
|
||||
@@ -1,20 +1,20 @@
|
||||
import { Injectable, signal } from '@angular/core';
|
||||
|
||||
interface ScreenSharePlaybackSettings {
|
||||
interface VoiceWorkspacePlaybackSettings {
|
||||
muted: boolean;
|
||||
volume: number;
|
||||
}
|
||||
|
||||
const DEFAULT_SETTINGS: ScreenSharePlaybackSettings = {
|
||||
const DEFAULT_SETTINGS: VoiceWorkspacePlaybackSettings = {
|
||||
muted: false,
|
||||
volume: 100
|
||||
};
|
||||
|
||||
@Injectable({ providedIn: 'root' })
|
||||
export class ScreenSharePlaybackService {
|
||||
private readonly _settings = signal<ReadonlyMap<string, ScreenSharePlaybackSettings>>(new Map());
|
||||
export class VoiceWorkspacePlaybackService {
|
||||
private readonly _settings = signal<ReadonlyMap<string, VoiceWorkspacePlaybackSettings>>(new Map());
|
||||
|
||||
settings(): ReadonlyMap<string, ScreenSharePlaybackSettings> {
|
||||
settings(): ReadonlyMap<string, VoiceWorkspacePlaybackSettings> {
|
||||
return this._settings();
|
||||
}
|
||||
|
||||
@@ -72,6 +72,6 @@ export class ScreenSharePlaybackService {
|
||||
}
|
||||
|
||||
teardownAll(): void {
|
||||
// Screen-share audio is played directly by the video element.
|
||||
// Stream audio is played directly by the video element.
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,9 @@
|
||||
class="group relative flex h-full min-h-0 flex-col overflow-hidden bg-black/85 transition duration-200"
|
||||
tabindex="0"
|
||||
role="button"
|
||||
[attr.aria-label]="mini() ? 'Focus ' + displayName() + ' stream' : 'Open ' + displayName() + ' stream in widescreen mode'"
|
||||
[attr.aria-label]="
|
||||
mini() ? 'Focus ' + displayName() + ' ' + streamBadgeLabel() : 'Open ' + displayName() + ' ' + streamBadgeLabel() + ' in widescreen mode'
|
||||
"
|
||||
[attr.title]="canToggleFullscreen() ? (isFullscreen() ? 'Double-click to exit fullscreen' : 'Double-click for fullscreen') : null"
|
||||
[ngClass]="{
|
||||
'ring-2 ring-primary/70': focused() && !immersive() && !mini() && !isFullscreen(),
|
||||
@@ -23,7 +25,9 @@
|
||||
#streamVideo
|
||||
autoplay
|
||||
playsinline
|
||||
class="absolute inset-0 h-full w-full bg-black object-contain"
|
||||
class="absolute inset-0 h-full w-full bg-black"
|
||||
[class.object-contain]="item().kind === 'screen'"
|
||||
[class.object-cover]="item().kind === 'camera'"
|
||||
></video>
|
||||
|
||||
<div class="pointer-events-none absolute inset-0 bg-gradient-to-b from-black/70 via-black/10 to-black/80"></div>
|
||||
@@ -48,16 +52,18 @@
|
||||
<div class="min-w-0 flex-1">
|
||||
<div class="flex flex-wrap items-center gap-2">
|
||||
<p class="truncate text-sm font-semibold text-white sm:text-base">{{ displayName() }}</p>
|
||||
<span class="rounded-full bg-primary/10 px-2 py-0.5 text-[10px] font-semibold uppercase tracking-[0.18em] text-primary"> Live </span>
|
||||
<span class="rounded-full bg-primary/10 px-2 py-0.5 text-[10px] font-semibold uppercase tracking-[0.18em] text-primary">
|
||||
{{ streamBadgeLabel() }}
|
||||
</span>
|
||||
</div>
|
||||
|
||||
<p class="mt-1 text-xs text-white/60">
|
||||
{{ item().isLocal ? 'Local preview in fullscreen' : 'Fullscreen stream view' }}
|
||||
{{ fullscreenDescription() }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@if (!item().isLocal) {
|
||||
@if (!item().isLocal && item().hasAudio) {
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex h-9 w-9 items-center justify-center rounded-full border border-white/10 bg-black/45 text-white/75 transition hover:bg-black/60 hover:text-white"
|
||||
@@ -97,7 +103,7 @@
|
||||
/>
|
||||
<div class="min-w-0 flex-1">
|
||||
<p class="truncate text-xs font-semibold text-white">{{ displayName() }}</p>
|
||||
<p class="text-[10px] uppercase tracking-[0.16em] text-white/60">Live stream</p>
|
||||
<p class="text-[10px] uppercase tracking-[0.16em] text-white/60">{{ streamBadgeLabel() }}</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -128,10 +134,10 @@
|
||||
[class.tracking-[0.24em]]="!compact()"
|
||||
>
|
||||
<ng-icon
|
||||
name="lucideMonitor"
|
||||
[name]="streamIconName()"
|
||||
class="h-3 w-3"
|
||||
/>
|
||||
Live
|
||||
{{ streamBadgeLabel() }}
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@@ -156,7 +162,7 @@
|
||||
/>
|
||||
</button>
|
||||
|
||||
@if (!item().isLocal) {
|
||||
@if (!item().isLocal && item().hasAudio) {
|
||||
<button
|
||||
type="button"
|
||||
class="inline-flex items-center justify-center rounded-full border border-white/15 bg-black/45 text-white/90 backdrop-blur-md transition hover:bg-black/65"
|
||||
@@ -182,10 +188,10 @@
|
||||
@if (item().isLocal) {
|
||||
@if (!compact()) {
|
||||
<div class="rounded-2xl bg-black/50 px-4 py-3 text-xs text-white/75 backdrop-blur-md">
|
||||
Your preview stays muted locally to avoid audio feedback.
|
||||
{{ localPreviewDescription() }}
|
||||
</div>
|
||||
}
|
||||
} @else {
|
||||
} @else if (item().hasAudio) {
|
||||
@if (compact()) {
|
||||
<div class="rounded-xl bg-black/50 px-3 py-2 text-[11px] text-white/80 backdrop-blur-md">
|
||||
{{ muted() ? 'Muted' : volume() + '% audio' }}
|
||||
@@ -17,16 +17,17 @@ import {
|
||||
lucideMaximize,
|
||||
lucideMinimize,
|
||||
lucideMonitor,
|
||||
lucideVideo,
|
||||
lucideVolume2,
|
||||
lucideVolumeX
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
import { UserAvatarComponent } from '../../../../shared';
|
||||
import { ScreenSharePlaybackService } from './screen-share-playback.service';
|
||||
import { ScreenShareWorkspaceStreamItem } from './screen-share-workspace.models';
|
||||
import { UserAvatarComponent } from '../../../shared';
|
||||
import { VoiceWorkspacePlaybackService } from './voice-workspace-playback.service';
|
||||
import { VoiceWorkspaceStreamItem } from './voice-workspace.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-screen-share-stream-tile',
|
||||
selector: 'app-voice-workspace-stream-tile',
|
||||
standalone: true,
|
||||
imports: [
|
||||
CommonModule,
|
||||
@@ -38,20 +39,21 @@ import { ScreenShareWorkspaceStreamItem } from './screen-share-workspace.models'
|
||||
lucideMaximize,
|
||||
lucideMinimize,
|
||||
lucideMonitor,
|
||||
lucideVideo,
|
||||
lucideVolume2,
|
||||
lucideVolumeX
|
||||
})
|
||||
],
|
||||
templateUrl: './screen-share-stream-tile.component.html',
|
||||
templateUrl: './voice-workspace-stream-tile.component.html',
|
||||
host: {
|
||||
class: 'block h-full'
|
||||
}
|
||||
})
|
||||
export class ScreenShareStreamTileComponent implements OnDestroy {
|
||||
private readonly screenSharePlayback = inject(ScreenSharePlaybackService);
|
||||
export class VoiceWorkspaceStreamTileComponent implements OnDestroy {
|
||||
private readonly workspacePlayback = inject(VoiceWorkspacePlaybackService);
|
||||
private fullscreenHeaderHideTimeoutId: ReturnType<typeof setTimeout> | null = null;
|
||||
|
||||
readonly item = input.required<ScreenShareWorkspaceStreamItem>();
|
||||
readonly item = input.required<VoiceWorkspaceStreamItem>();
|
||||
readonly focused = input(false);
|
||||
readonly featured = input(false);
|
||||
readonly compact = input(false);
|
||||
@@ -86,18 +88,18 @@ export class ScreenShareStreamTileComponent implements OnDestroy {
|
||||
|
||||
effect(
|
||||
() => {
|
||||
this.screenSharePlayback.settings();
|
||||
this.workspacePlayback.settings();
|
||||
|
||||
const item = this.item();
|
||||
|
||||
if (item.isLocal) {
|
||||
if (item.isLocal || !item.hasAudio) {
|
||||
this.volume.set(0);
|
||||
this.muted.set(false);
|
||||
return;
|
||||
}
|
||||
|
||||
this.volume.set(this.screenSharePlayback.getUserVolume(item.peerKey));
|
||||
this.muted.set(this.screenSharePlayback.isUserMuted(item.peerKey));
|
||||
this.volume.set(this.workspacePlayback.getUserVolume(item.peerKey));
|
||||
this.muted.set(this.workspacePlayback.isUserMuted(item.peerKey));
|
||||
},
|
||||
{ allowSignalWrites: true }
|
||||
);
|
||||
@@ -114,7 +116,7 @@ export class ScreenShareStreamTileComponent implements OnDestroy {
|
||||
|
||||
const video = ref.nativeElement;
|
||||
|
||||
if (item.isLocal) {
|
||||
if (item.isLocal || !item.hasAudio) {
|
||||
video.muted = true;
|
||||
video.volume = 0;
|
||||
return;
|
||||
@@ -199,26 +201,26 @@ export class ScreenShareStreamTileComponent implements OnDestroy {
|
||||
}
|
||||
|
||||
requestFocus(): void {
|
||||
this.focusRequested.emit(this.item().peerKey);
|
||||
this.focusRequested.emit(this.item().id);
|
||||
}
|
||||
|
||||
toggleMuted(): void {
|
||||
const item = this.item();
|
||||
|
||||
if (item.isLocal) {
|
||||
if (item.isLocal || !item.hasAudio) {
|
||||
return;
|
||||
}
|
||||
|
||||
const nextMuted = !this.muted();
|
||||
|
||||
this.muted.set(nextMuted);
|
||||
this.screenSharePlayback.setUserMuted(item.peerKey, nextMuted);
|
||||
this.workspacePlayback.setUserMuted(item.peerKey, nextMuted);
|
||||
}
|
||||
|
||||
updateVolume(event: Event): void {
|
||||
const item = this.item();
|
||||
|
||||
if (item.isLocal) {
|
||||
if (item.isLocal || !item.hasAudio) {
|
||||
return;
|
||||
}
|
||||
|
||||
@@ -226,11 +228,11 @@ export class ScreenShareStreamTileComponent implements OnDestroy {
|
||||
const nextVolume = Math.max(0, Math.min(100, parseInt(input.value, 10) || 0));
|
||||
|
||||
this.volume.set(nextVolume);
|
||||
this.screenSharePlayback.setUserVolume(item.peerKey, nextVolume);
|
||||
this.workspacePlayback.setUserVolume(item.peerKey, nextVolume);
|
||||
|
||||
if (nextVolume > 0 && this.muted()) {
|
||||
this.muted.set(false);
|
||||
this.screenSharePlayback.setUserMuted(item.peerKey, false);
|
||||
this.workspacePlayback.setUserMuted(item.peerKey, false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -238,6 +240,32 @@ export class ScreenShareStreamTileComponent implements OnDestroy {
|
||||
return this.item().isLocal ? 'You' : this.item().user.displayName;
|
||||
}
|
||||
|
||||
streamIconName(): string {
|
||||
return this.item().kind === 'camera' ? 'lucideVideo' : 'lucideMonitor';
|
||||
}
|
||||
|
||||
streamBadgeLabel(): string {
|
||||
return this.item().kind === 'camera' ? 'Camera live' : 'Screen share live';
|
||||
}
|
||||
|
||||
fullscreenDescription(): string {
|
||||
if (this.item().isLocal) {
|
||||
return this.item().kind === 'camera'
|
||||
? 'Local camera preview in fullscreen'
|
||||
: 'Local preview in fullscreen';
|
||||
}
|
||||
|
||||
return this.item().kind === 'camera'
|
||||
? 'Fullscreen camera view'
|
||||
: 'Fullscreen stream view';
|
||||
}
|
||||
|
||||
localPreviewDescription(): string {
|
||||
return this.item().kind === 'camera'
|
||||
? 'Your camera preview never captures audio.'
|
||||
: 'Your preview stays muted locally to avoid audio feedback.';
|
||||
}
|
||||
|
||||
private scheduleFullscreenHeaderHide(): void {
|
||||
this.clearFullscreenHeaderHideTimeout();
|
||||
|
||||
@@ -171,7 +171,7 @@
|
||||
<div class="flex gap-2 overflow-x-auto pb-1">
|
||||
@for (share of thumbnailShares(); track trackShare($index, share)) {
|
||||
<div class="h-[5.25rem] w-[9.5rem] shrink-0 sm:h-[5.5rem] sm:w-[10rem]">
|
||||
<app-screen-share-stream-tile
|
||||
<app-voice-workspace-stream-tile
|
||||
[item]="share"
|
||||
[mini]="true"
|
||||
[focused]="false"
|
||||
@@ -191,7 +191,7 @@
|
||||
@if (activeShares().length > 0) {
|
||||
@if (isWidescreenMode() && widescreenShare()) {
|
||||
<div class="h-full min-h-0">
|
||||
<app-screen-share-stream-tile
|
||||
<app-voice-workspace-stream-tile
|
||||
[item]="widescreenShare()!"
|
||||
[featured]="true"
|
||||
[focused]="true"
|
||||
@@ -206,7 +206,7 @@
|
||||
>
|
||||
@for (share of activeShares(); track trackShare($index, share)) {
|
||||
<div class="min-h-[15rem]">
|
||||
<app-screen-share-stream-tile
|
||||
<app-voice-workspace-stream-tile
|
||||
[item]="share"
|
||||
[focused]="false"
|
||||
(focusRequested)="focusShare($event)"
|
||||
@@ -225,9 +225,9 @@
|
||||
/>
|
||||
</div>
|
||||
|
||||
<h2 class="text-2xl font-semibold text-foreground">No live screen shares yet</h2>
|
||||
<h2 class="text-2xl font-semibold text-foreground">No live streams yet</h2>
|
||||
<p class="mx-auto mt-3 max-w-2xl text-sm leading-6 text-muted-foreground">
|
||||
Click Screen Share below to start streaming, or wait for someone in {{ connectedVoiceChannelName() }} to go live.
|
||||
Turn on your camera, click Screen Share below, or wait for someone in {{ connectedVoiceChannelName() }} to go live.
|
||||
</p>
|
||||
|
||||
@if (connectedVoiceUsers().length > 0) {
|
||||
@@ -29,34 +29,36 @@ import {
|
||||
lucideX
|
||||
} from '@ng-icons/lucide';
|
||||
|
||||
import { User } from '../../../../shared-kernel';
|
||||
import { User } from '../../../shared-kernel';
|
||||
import {
|
||||
loadVoiceSettingsFromStorage,
|
||||
saveVoiceSettingsToStorage,
|
||||
VoiceSessionFacade,
|
||||
VoiceWorkspacePosition,
|
||||
VoiceWorkspaceService
|
||||
} from '../../../../domains/voice-session';
|
||||
import { VoiceConnectionFacade } from '../../../../domains/voice-connection';
|
||||
import { VoicePlaybackService } from '../../../../domains/voice-connection/application/voice-playback.service';
|
||||
import { ScreenShareFacade } from '../../application/screen-share.facade';
|
||||
import { ScreenShareQuality, ScreenShareStartOptions } from '../../domain/screen-share.config';
|
||||
import { selectCurrentRoom } from '../../../../store/rooms/rooms.selectors';
|
||||
import { UsersActions } from '../../../../store/users/users.actions';
|
||||
import { selectCurrentUser, selectOnlineUsers } from '../../../../store/users/users.selectors';
|
||||
import { ScreenShareQualityDialogComponent, UserAvatarComponent } from '../../../../shared';
|
||||
import { ScreenSharePlaybackService } from './screen-share-playback.service';
|
||||
import { ScreenShareStreamTileComponent } from './screen-share-stream-tile.component';
|
||||
import { ScreenShareWorkspaceStreamItem } from './screen-share-workspace.models';
|
||||
} from '../../../domains/voice-session';
|
||||
import { VoiceConnectionFacade, VoicePlaybackService } from '../../../domains/voice-connection';
|
||||
import {
|
||||
ScreenShareFacade,
|
||||
ScreenShareQuality,
|
||||
ScreenShareStartOptions
|
||||
} from '../../../domains/screen-share';
|
||||
import { selectCurrentRoom } from '../../../store/rooms/rooms.selectors';
|
||||
import { UsersActions } from '../../../store/users/users.actions';
|
||||
import { selectCurrentUser, selectOnlineUsers } from '../../../store/users/users.selectors';
|
||||
import { ScreenShareQualityDialogComponent, UserAvatarComponent } from '../../../shared';
|
||||
import { VoiceWorkspacePlaybackService } from './voice-workspace-playback.service';
|
||||
import { VoiceWorkspaceStreamTileComponent } from './voice-workspace-stream-tile.component';
|
||||
import { VoiceWorkspaceStreamItem } from './voice-workspace.models';
|
||||
|
||||
@Component({
|
||||
selector: 'app-screen-share-workspace',
|
||||
selector: 'app-voice-workspace',
|
||||
standalone: true,
|
||||
imports: [
|
||||
CommonModule,
|
||||
NgIcon,
|
||||
ScreenShareQualityDialogComponent,
|
||||
ScreenShareStreamTileComponent,
|
||||
VoiceWorkspaceStreamTileComponent,
|
||||
UserAvatarComponent
|
||||
],
|
||||
viewProviders: [
|
||||
@@ -75,19 +77,19 @@ import { ScreenShareWorkspaceStreamItem } from './screen-share-workspace.models'
|
||||
lucideX
|
||||
})
|
||||
],
|
||||
templateUrl: './screen-share-workspace.component.html',
|
||||
templateUrl: './voice-workspace.component.html',
|
||||
host: {
|
||||
class: 'pointer-events-none absolute inset-0 z-20 block'
|
||||
}
|
||||
})
|
||||
export class ScreenShareWorkspaceComponent {
|
||||
export class VoiceWorkspaceComponent {
|
||||
private readonly destroyRef = inject(DestroyRef);
|
||||
private readonly elementRef = inject<ElementRef<HTMLElement>>(ElementRef);
|
||||
private readonly store = inject(Store);
|
||||
private readonly webrtc = inject(VoiceConnectionFacade);
|
||||
private readonly screenShare = inject(ScreenShareFacade);
|
||||
private readonly voicePlayback = inject(VoicePlaybackService);
|
||||
private readonly screenSharePlayback = inject(ScreenSharePlaybackService);
|
||||
private readonly workspacePlayback = inject(VoiceWorkspacePlaybackService);
|
||||
private readonly voiceSession = inject(VoiceSessionFacade);
|
||||
private readonly voiceWorkspace = inject(VoiceWorkspaceService);
|
||||
|
||||
@@ -160,7 +162,7 @@ export class ScreenShareWorkspaceComponent {
|
||||
return voiceUsers;
|
||||
});
|
||||
|
||||
readonly activeShares = computed<ScreenShareWorkspaceStreamItem[]>(() => {
|
||||
readonly activeShares = computed<VoiceWorkspaceStreamItem[]>(() => {
|
||||
this.remoteStreamRevision();
|
||||
|
||||
const room = this.currentRoom();
|
||||
@@ -172,17 +174,34 @@ export class ScreenShareWorkspaceComponent {
|
||||
return [];
|
||||
}
|
||||
|
||||
const shares: ScreenShareWorkspaceStreamItem[] = [];
|
||||
const localStream = this.screenShare.screenStream();
|
||||
const shares: VoiceWorkspaceStreamItem[] = [];
|
||||
const localScreenStream = this.screenShare.screenStream();
|
||||
const localCameraStream = this.webrtc.isCameraEnabled()
|
||||
? this.webrtc.getLocalCameraStream()
|
||||
: null;
|
||||
const localPeerKey = this.getUserPeerKey(me);
|
||||
|
||||
if (localStream && localPeerKey) {
|
||||
if (localScreenStream && localPeerKey) {
|
||||
shares.push({
|
||||
id: localPeerKey,
|
||||
id: this.buildStreamId(localPeerKey, 'screen'),
|
||||
peerKey: localPeerKey,
|
||||
user: me,
|
||||
stream: localStream,
|
||||
isLocal: true
|
||||
stream: localScreenStream,
|
||||
isLocal: true,
|
||||
kind: 'screen',
|
||||
hasAudio: this.hasActiveAudio(localScreenStream)
|
||||
});
|
||||
}
|
||||
|
||||
if (localCameraStream && localPeerKey) {
|
||||
shares.push({
|
||||
id: this.buildStreamId(localPeerKey, 'camera'),
|
||||
peerKey: localPeerKey,
|
||||
user: me,
|
||||
stream: localCameraStream,
|
||||
isLocal: true,
|
||||
kind: 'camera',
|
||||
hasAudio: false
|
||||
});
|
||||
}
|
||||
|
||||
@@ -201,23 +220,37 @@ export class ScreenShareWorkspaceComponent {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (user.screenShareState?.isSharing === false) {
|
||||
continue;
|
||||
const remoteShare = user.screenShareState?.isSharing === false
|
||||
? null
|
||||
: this.getRemoteScreenShareStream(user);
|
||||
|
||||
if (remoteShare) {
|
||||
shares.push({
|
||||
id: this.buildStreamId(remoteShare.peerKey, 'screen'),
|
||||
peerKey: remoteShare.peerKey,
|
||||
user,
|
||||
stream: remoteShare.stream,
|
||||
isLocal: false,
|
||||
kind: 'screen',
|
||||
hasAudio: this.hasActiveAudio(remoteShare.stream)
|
||||
});
|
||||
}
|
||||
|
||||
const remoteShare = this.getRemoteShareStream(user);
|
||||
const remoteCamera = user.cameraState?.isEnabled === false
|
||||
? null
|
||||
: this.getRemoteCameraStream(user);
|
||||
|
||||
if (!remoteShare) {
|
||||
continue;
|
||||
if (remoteCamera) {
|
||||
shares.push({
|
||||
id: this.buildStreamId(remoteCamera.peerKey, 'camera'),
|
||||
peerKey: remoteCamera.peerKey,
|
||||
user,
|
||||
stream: remoteCamera.stream,
|
||||
isLocal: false,
|
||||
kind: 'camera',
|
||||
hasAudio: false
|
||||
});
|
||||
}
|
||||
|
||||
shares.push({
|
||||
id: remoteShare.peerKey,
|
||||
peerKey: remoteShare.peerKey,
|
||||
user,
|
||||
stream: remoteShare.stream,
|
||||
isLocal: false
|
||||
});
|
||||
}
|
||||
|
||||
return shares.sort((shareA, shareB) => {
|
||||
@@ -225,6 +258,10 @@ export class ScreenShareWorkspaceComponent {
|
||||
return shareA.isLocal ? 1 : -1;
|
||||
}
|
||||
|
||||
if (shareA.kind !== shareB.kind) {
|
||||
return shareA.kind === 'screen' ? -1 : 1;
|
||||
}
|
||||
|
||||
return shareA.user.displayName.localeCompare(shareB.user.displayName);
|
||||
});
|
||||
});
|
||||
@@ -233,12 +270,12 @@ export class ScreenShareWorkspaceComponent {
|
||||
const requested = this.voiceWorkspace.focusedStreamId();
|
||||
const activeShares = this.activeShares();
|
||||
|
||||
if (requested && activeShares.some((share) => share.peerKey === requested)) {
|
||||
if (requested && activeShares.some((share) => share.id === requested)) {
|
||||
return requested;
|
||||
}
|
||||
|
||||
if (activeShares.length === 1) {
|
||||
return activeShares[0].peerKey;
|
||||
return activeShares[0].id;
|
||||
}
|
||||
|
||||
return null;
|
||||
@@ -250,12 +287,12 @@ export class ScreenShareWorkspaceComponent {
|
||||
);
|
||||
readonly hasMultipleShares = computed(() => this.activeShares().length > 1);
|
||||
readonly widescreenShare = computed(
|
||||
() => this.activeShares().find((share) => share.peerKey === this.widescreenShareId()) ?? null
|
||||
() => this.activeShares().find((share) => share.id === this.widescreenShareId()) ?? null
|
||||
);
|
||||
readonly focusedAudioShare = computed(() => {
|
||||
const share = this.widescreenShare();
|
||||
|
||||
return share && !share.isLocal ? share : null;
|
||||
return share && !share.isLocal && share.hasAudio ? share : null;
|
||||
});
|
||||
readonly focusedShareTitle = computed(() => {
|
||||
const share = this.widescreenShare();
|
||||
@@ -264,16 +301,20 @@ export class ScreenShareWorkspaceComponent {
|
||||
return 'Focused stream';
|
||||
}
|
||||
|
||||
return share.isLocal ? 'Your stream' : share.user.displayName;
|
||||
if (!share.isLocal) {
|
||||
return share.user.displayName;
|
||||
}
|
||||
|
||||
return share.kind === 'camera' ? 'Your camera' : 'Your screen';
|
||||
});
|
||||
readonly thumbnailShares = computed(() => {
|
||||
const widescreenShareId = this.widescreenShareId();
|
||||
|
||||
if (!widescreenShareId) {
|
||||
return [] as ScreenShareWorkspaceStreamItem[];
|
||||
return [] as VoiceWorkspaceStreamItem[];
|
||||
}
|
||||
|
||||
return this.activeShares().filter((share) => share.peerKey !== widescreenShareId);
|
||||
return this.activeShares().filter((share) => share.id !== widescreenShareId);
|
||||
});
|
||||
readonly miniPreviewShare = computed(
|
||||
() => this.widescreenShare() ?? this.activeShares()[0] ?? null
|
||||
@@ -285,7 +326,11 @@ export class ScreenShareWorkspaceComponent {
|
||||
return 'Voice workspace';
|
||||
}
|
||||
|
||||
return previewShare.isLocal ? 'Your stream' : previewShare.user.displayName;
|
||||
if (!previewShare.isLocal) {
|
||||
return previewShare.user.displayName;
|
||||
}
|
||||
|
||||
return previewShare.kind === 'camera' ? 'Your camera' : 'Your screen';
|
||||
});
|
||||
readonly liveShareCount = computed(() => this.activeShares().length);
|
||||
readonly connectedVoiceChannelName = computed(() => {
|
||||
@@ -313,7 +358,7 @@ export class ScreenShareWorkspaceComponent {
|
||||
this.clearHeaderHideTimeout();
|
||||
this.cleanupObservedRemoteStreams();
|
||||
this.screenShare.syncRemoteScreenShareRequests([], false);
|
||||
this.screenSharePlayback.teardownAll();
|
||||
this.workspacePlayback.teardownAll();
|
||||
});
|
||||
|
||||
this.screenShare.onRemoteStream
|
||||
@@ -372,7 +417,7 @@ export class ScreenShareWorkspaceComponent {
|
||||
this.screenShare.syncRemoteScreenShareRequests(peerKeys, shouldConnectRemoteShares);
|
||||
|
||||
if (!shouldConnectRemoteShares) {
|
||||
this.screenSharePlayback.teardownAll();
|
||||
this.workspacePlayback.teardownAll();
|
||||
}
|
||||
});
|
||||
|
||||
@@ -486,7 +531,7 @@ export class ScreenShareWorkspaceComponent {
|
||||
return this.getUserPeerKey(user) || `${index}`;
|
||||
}
|
||||
|
||||
trackShare(index: number, share: ScreenShareWorkspaceStreamItem): string {
|
||||
trackShare(index: number, share: VoiceWorkspaceStreamItem): string {
|
||||
return share.id || `${index}`;
|
||||
}
|
||||
|
||||
@@ -523,7 +568,7 @@ export class ScreenShareWorkspaceComponent {
|
||||
return 100;
|
||||
}
|
||||
|
||||
return this.screenSharePlayback.getUserVolume(share.peerKey);
|
||||
return this.workspacePlayback.getUserVolume(share.peerKey);
|
||||
}
|
||||
|
||||
focusedShareMuted(): boolean {
|
||||
@@ -533,7 +578,7 @@ export class ScreenShareWorkspaceComponent {
|
||||
return false;
|
||||
}
|
||||
|
||||
return this.screenSharePlayback.isUserMuted(share.peerKey);
|
||||
return this.workspacePlayback.isUserMuted(share.peerKey);
|
||||
}
|
||||
|
||||
toggleFocusedShareMuted(): void {
|
||||
@@ -543,9 +588,9 @@ export class ScreenShareWorkspaceComponent {
|
||||
return;
|
||||
}
|
||||
|
||||
this.screenSharePlayback.setUserMuted(
|
||||
this.workspacePlayback.setUserMuted(
|
||||
share.peerKey,
|
||||
!this.screenSharePlayback.isUserMuted(share.peerKey)
|
||||
!this.workspacePlayback.isUserMuted(share.peerKey)
|
||||
);
|
||||
}
|
||||
|
||||
@@ -559,10 +604,10 @@ export class ScreenShareWorkspaceComponent {
|
||||
const input = event.target as HTMLInputElement;
|
||||
const nextVolume = Math.max(0, Math.min(100, parseInt(input.value, 10) || 0));
|
||||
|
||||
this.screenSharePlayback.setUserVolume(share.peerKey, nextVolume);
|
||||
this.workspacePlayback.setUserVolume(share.peerKey, nextVolume);
|
||||
|
||||
if (nextVolume > 0 && this.screenSharePlayback.isUserMuted(share.peerKey)) {
|
||||
this.screenSharePlayback.setUserMuted(share.peerKey, false);
|
||||
if (nextVolume > 0 && this.workspacePlayback.isUserMuted(share.peerKey)) {
|
||||
this.workspacePlayback.setUserMuted(share.peerKey, false);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -684,6 +729,13 @@ export class ScreenShareWorkspaceComponent {
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.store.dispatch(
|
||||
UsersActions.updateCameraState({
|
||||
userId: user.id,
|
||||
cameraState: { isEnabled: false }
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
this.voiceSession.endSession();
|
||||
@@ -791,7 +843,11 @@ export class ScreenShareWorkspaceComponent {
|
||||
return user?.oderId || user?.id || null;
|
||||
}
|
||||
|
||||
private getRemoteShareStream(user: User): { peerKey: string; stream: MediaStream } | null {
|
||||
private buildStreamId(peerKey: string, kind: VoiceWorkspaceStreamItem['kind']): string {
|
||||
return `${kind}:${peerKey}`;
|
||||
}
|
||||
|
||||
private getRemoteScreenShareStream(user: User): { peerKey: string; stream: MediaStream } | null {
|
||||
const peerKeys = [user.oderId, user.id].filter(
|
||||
(candidate): candidate is string => !!candidate
|
||||
);
|
||||
@@ -807,10 +863,30 @@ export class ScreenShareWorkspaceComponent {
|
||||
return null;
|
||||
}
|
||||
|
||||
private getRemoteCameraStream(user: User): { peerKey: string; stream: MediaStream } | null {
|
||||
const peerKeys = [user.oderId, user.id].filter(
|
||||
(candidate): candidate is string => !!candidate
|
||||
);
|
||||
|
||||
for (const peerKey of peerKeys) {
|
||||
const stream = this.webrtc.getRemoteCameraStream(peerKey);
|
||||
|
||||
if (stream && this.hasActiveVideo(stream)) {
|
||||
return { peerKey, stream };
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
private hasActiveVideo(stream: MediaStream): boolean {
|
||||
return stream.getVideoTracks().some((track) => track.readyState === 'live');
|
||||
}
|
||||
|
||||
private hasActiveAudio(stream: MediaStream): boolean {
|
||||
return stream.getAudioTracks().some((track) => track.readyState === 'live');
|
||||
}
|
||||
|
||||
private ensureMiniWindowPosition(): void {
|
||||
const bounds = this.getWorkspaceBounds();
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
import { User } from '../../../shared-kernel';
|
||||
|
||||
export type VoiceWorkspaceStreamKind = 'camera' | 'screen';
|
||||
|
||||
export interface VoiceWorkspaceStreamItem {
|
||||
id: string;
|
||||
peerKey: string;
|
||||
user: User;
|
||||
stream: MediaStream;
|
||||
isLocal: boolean;
|
||||
kind: VoiceWorkspaceStreamKind;
|
||||
hasAudio: boolean;
|
||||
}
|
||||
@@ -29,10 +29,10 @@ realtime/
|
||||
│ ├── recovery/
|
||||
│ │ └── peer-recovery.ts Disconnect grace period + reconnect loop
|
||||
│ └── streams/
|
||||
│ └── remote-streams.ts Classifies incoming tracks (voice vs screen)
|
||||
│ └── remote-streams.ts Classifies incoming tracks (voice vs camera vs screen)
|
||||
│
|
||||
├── media/ Local capture and processing
|
||||
│ ├── media.manager.ts getUserMedia, mute, deafen, gain pipeline
|
||||
│ ├── media.manager.ts getUserMedia, mute, deafen, camera capture, same-room routing, gain pipeline
|
||||
│ ├── noise-reduction.manager.ts RNNoise AudioWorklet graph
|
||||
│ ├── voice-session-controller.ts Higher-level wrapper over MediaManager
|
||||
│ ├── screen-share.manager.ts Screen capture + per-peer track distribution
|
||||
@@ -229,12 +229,44 @@ graph LR
|
||||
click Peers "media/media.manager.ts" "MediaManager.bindLocalTracksToAllPeers()" _blank
|
||||
```
|
||||
|
||||
`MediaManager` grabs the mic with `getUserMedia`, optionally pipes it through the RNNoise AudioWorklet for noise reduction (48 kHz, loaded from `rnnoise-worklet.js`), optionally runs it through a `GainNode` for input volume control, and then routes the resulting audio track only to peers that currently belong to the same active voice channel.
|
||||
`MediaManager` grabs the mic with `getUserMedia`, optionally pipes it through the RNNoise AudioWorklet for noise reduction (48 kHz, loaded from `rnnoise-worklet.js`), optionally runs it through a `GainNode` for input volume control, and then routes the resulting audio track only to peers that currently belong to the same active voice channel. The same manager also owns camera capture as a separate video-only stream, attaches it to its own video transceiver, and applies the same voice-channel routing rules so webcam video only reaches peers in the active voice room.
|
||||
|
||||
Mute just disables the audio track (`track.enabled = false`), the connection stays up. Deafen suppresses incoming audio playback on the local side.
|
||||
|
||||
Because peers stay connected across the server for shared state and chat, voice-channel isolation is enforced in both transport and playback: outgoing mic audio is only attached to peers whose voice membership matches the local user's current channel, and remote voice audio plus join/leave cues are only active when the remote peer's announced `voiceState.roomId` and `voiceState.serverId` match the local user's current voice channel.
|
||||
|
||||
### Camera
|
||||
|
||||
```mermaid
|
||||
sequenceDiagram
|
||||
participant UI as VoiceControls/UI
|
||||
participant MM as MediaManager
|
||||
participant Peer as PeerConnectionManager
|
||||
participant Remote as Remote peer
|
||||
participant RS as remote-streams.ts
|
||||
participant Shell as VoiceWorkspaceComponent
|
||||
|
||||
UI->>MM: enableCamera()
|
||||
Note over MM: getUserMedia({ video: true, audio: false })
|
||||
Note over MM: Store localCameraStream
|
||||
MM->>MM: syncCameraRouting()
|
||||
Note over MM: Attach video track only to same-room peers
|
||||
MM->>Peer: renegotiate(peerId)
|
||||
MM->>Remote: broadcast camera-state
|
||||
Peer->>Remote: offer/answer with camera video transceiver
|
||||
Remote->>RS: ontrack(video)
|
||||
Note over RS: Classify as camera, not screen share
|
||||
RS->>Shell: getRemoteCameraStream(peerId)
|
||||
Shell->>Shell: Render camera tile in voice workspace
|
||||
|
||||
UI->>MM: disableCamera()
|
||||
MM->>MM: stopLocalCameraStream()
|
||||
MM->>MM: detach camera sender from peers
|
||||
MM->>Remote: broadcast camera-state(false)
|
||||
```
|
||||
|
||||
Camera capture is video-only, uses a dedicated camera sender, and follows the same same-room peer filter as outgoing voice audio. Incoming camera video is classified separately from screen-share tracks so the workspace can show both at the same time.
|
||||
|
||||
### Screen share
|
||||
|
||||
Screen capture uses a platform-specific strategy:
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
/* eslint-disable @typescript-eslint/member-ordering, @typescript-eslint/no-unused-vars,, id-length */
|
||||
/* eslint-disable @typescript-eslint/member-ordering, @typescript-eslint/no-unused-vars, */
|
||||
/**
|
||||
* Manages local voice media: getUserMedia, mute, deafen,
|
||||
* attaching/detaching audio tracks to peer connections, bitrate tuning,
|
||||
* Manages local voice and camera media: getUserMedia, mute, deafen,
|
||||
* attaching/detaching tracks to peer connections, bitrate tuning,
|
||||
* and optional RNNoise-based noise reduction.
|
||||
*/
|
||||
import { Subject } from 'rxjs';
|
||||
@@ -24,6 +24,7 @@ import {
|
||||
VOLUME_MAX,
|
||||
VOICE_HEARTBEAT_INTERVAL_MS,
|
||||
DEFAULT_DISPLAY_NAME,
|
||||
P2P_TYPE_CAMERA_STATE,
|
||||
P2P_TYPE_VOICE_STATE
|
||||
} from '../realtime.constants';
|
||||
|
||||
@@ -40,6 +41,8 @@ export interface MediaManagerCallbacks {
|
||||
/** Get identify credentials (for broadcasting). */
|
||||
getIdentifyOderId(): string;
|
||||
getIdentifyDisplayName(): string;
|
||||
/** Push the current local camera state back into service-level signals. */
|
||||
setCameraEnabled?(enabled: boolean): void;
|
||||
}
|
||||
|
||||
export class MediaManager {
|
||||
@@ -53,6 +56,9 @@ export class MediaManager {
|
||||
*/
|
||||
private rawMicStream: MediaStream | null = null;
|
||||
|
||||
/** The dedicated local camera stream, always captured without audio. */
|
||||
private localCameraStream: MediaStream | null = null;
|
||||
|
||||
/** Remote audio output volume (0-1). */
|
||||
private remoteAudioVolume = VOLUME_MAX;
|
||||
|
||||
@@ -86,6 +92,7 @@ export class MediaManager {
|
||||
private isVoiceActive = false;
|
||||
private isMicMuted = false;
|
||||
private isSelfDeafened = false;
|
||||
private isCameraActive = false;
|
||||
|
||||
/** Current voice channel room ID (set when joining voice). */
|
||||
private currentVoiceRoomId: string | undefined;
|
||||
@@ -118,6 +125,10 @@ export class MediaManager {
|
||||
getRawMicStream(): MediaStream | null {
|
||||
return this.rawMicStream;
|
||||
}
|
||||
/** Returns the current local camera stream, or `null` if the camera is disabled. */
|
||||
getLocalCameraStream(): MediaStream | null {
|
||||
return this.localCameraStream;
|
||||
}
|
||||
/** Whether voice is currently active (mic captured). */
|
||||
getIsVoiceActive(): boolean {
|
||||
return this.isVoiceActive;
|
||||
@@ -130,6 +141,10 @@ export class MediaManager {
|
||||
getIsSelfDeafened(): boolean {
|
||||
return this.isSelfDeafened;
|
||||
}
|
||||
/** Whether the local camera is currently active. */
|
||||
getIsCameraActive(): boolean {
|
||||
return this.isCameraActive;
|
||||
}
|
||||
/** Current remote audio output volume (normalised 0-1). */
|
||||
getRemoteAudioVolume(): number {
|
||||
return this.remoteAudioVolume;
|
||||
@@ -156,10 +171,12 @@ export class MediaManager {
|
||||
|
||||
this.allowedVoicePeerIds = nextAllowed;
|
||||
this.syncVoiceRouting();
|
||||
this.syncCameraRouting();
|
||||
}
|
||||
|
||||
refreshVoiceRouting(): void {
|
||||
this.syncVoiceRouting();
|
||||
this.syncCameraRouting();
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -229,6 +246,7 @@ export class MediaManager {
|
||||
* The peer connections themselves are kept alive.
|
||||
*/
|
||||
disableVoice(): void {
|
||||
this.disableCamera();
|
||||
this.noiseReduction.disable();
|
||||
this.teardownInputGain();
|
||||
|
||||
@@ -285,6 +303,78 @@ export class MediaManager {
|
||||
this.voiceConnected$.next();
|
||||
}
|
||||
|
||||
/**
|
||||
* Request camera access and bind the resulting video track to peers in the
|
||||
* active voice channel. Audio is explicitly disabled for this capture.
|
||||
*/
|
||||
async enableCamera(): Promise<MediaStream> {
|
||||
if (!this.isVoiceActive) {
|
||||
throw new Error('Voice must be active before enabling the camera.');
|
||||
}
|
||||
|
||||
try {
|
||||
this.stopLocalCameraStream();
|
||||
|
||||
const mediaConstraints: MediaStreamConstraints = {
|
||||
audio: false,
|
||||
video: true
|
||||
};
|
||||
|
||||
this.logger.info('getUserMedia camera constraints', mediaConstraints);
|
||||
|
||||
if (!navigator.mediaDevices?.getUserMedia) {
|
||||
throw new Error(
|
||||
'navigator.mediaDevices is not available. '
|
||||
+ 'This requires a secure context (HTTPS or localhost). '
|
||||
+ 'If accessing from an external device, use HTTPS.'
|
||||
);
|
||||
}
|
||||
|
||||
const stream = await navigator.mediaDevices.getUserMedia(mediaConstraints);
|
||||
const cameraTrack = stream.getVideoTracks()[0];
|
||||
|
||||
if (!cameraTrack) {
|
||||
stream.getTracks().forEach((track) => track.stop());
|
||||
throw new Error('Camera capture did not return a video track.');
|
||||
}
|
||||
|
||||
cameraTrack.onended = () => {
|
||||
if (this.isCameraActive) {
|
||||
this.disableCamera();
|
||||
}
|
||||
};
|
||||
|
||||
this.localCameraStream = stream;
|
||||
this.isCameraActive = true;
|
||||
this.callbacks.setCameraEnabled?.(true);
|
||||
|
||||
this.logger.attachTrackDiagnostics(cameraTrack, 'localCamera');
|
||||
this.logger.logStream('localCamera', stream);
|
||||
|
||||
this.syncCameraRouting();
|
||||
this.broadcastCameraState();
|
||||
|
||||
return stream;
|
||||
} catch (error) {
|
||||
this.logger.error('Failed to get camera media', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
/** Stop camera capture and remove camera senders from every peer. */
|
||||
disableCamera(): void {
|
||||
if (!this.localCameraStream && !this.isCameraActive) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.stopLocalCameraStream();
|
||||
this.isCameraActive = false;
|
||||
this.callbacks.setCameraEnabled?.(false);
|
||||
|
||||
this.syncCameraRouting();
|
||||
this.broadcastCameraState();
|
||||
}
|
||||
|
||||
/**
|
||||
* Toggle the local microphone mute state.
|
||||
*
|
||||
@@ -366,43 +456,41 @@ export class MediaManager {
|
||||
/**
|
||||
* Set the output volume for remote audio.
|
||||
*
|
||||
* @param volume - Normalised value: 0 = silent, 1 = 100%, up to 2 = 200%.
|
||||
* @param volume - Normalized value: 0 = silent, 1 = 100%, up to 2 = 200%.
|
||||
*/
|
||||
setOutputVolume(volume: number): void {
|
||||
this.remoteAudioVolume = Math.max(VOLUME_MIN, Math.min(2, volume));
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the input (microphone) volume.
|
||||
* Set the input microphone gain.
|
||||
*
|
||||
* If a local stream is active the gain node is updated in real time.
|
||||
* If no stream exists yet the value is stored and applied on connect.
|
||||
* If a local stream is already active the gain node is updated immediately.
|
||||
* Otherwise the value is stored and applied the next time voice starts.
|
||||
*
|
||||
* @param volume - Normalised 0-1 (0 = silent, 1 = 100%).
|
||||
* @param volume - Normalized 0-1 value.
|
||||
*/
|
||||
setInputVolume(volume: number): void {
|
||||
this.inputGainVolume = Math.max(0, Math.min(1, volume));
|
||||
|
||||
if (this.inputGainNode) {
|
||||
// Pipeline already exists - just update the gain value
|
||||
this.inputGainNode.gain.value = this.inputGainVolume;
|
||||
} else if (this.localMediaStream) {
|
||||
// Stream is active but gain pipeline hasn't been created yet
|
||||
return;
|
||||
}
|
||||
|
||||
if (this.localMediaStream) {
|
||||
this.applyInputGainToCurrentStream();
|
||||
this.bindLocalTracksToAllPeers();
|
||||
}
|
||||
}
|
||||
|
||||
/** Get current input gain value (0-1). */
|
||||
/** Return the current input gain value. */
|
||||
getInputVolume(): number {
|
||||
return this.inputGainVolume;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set the maximum audio bitrate on every active peer's audio sender.
|
||||
*
|
||||
* The value is clamped between {@link AUDIO_BITRATE_MIN_BPS} and
|
||||
* {@link AUDIO_BITRATE_MAX_BPS}.
|
||||
* Set the maximum audio bitrate on every active peer audio sender.
|
||||
*
|
||||
* @param kbps - Target bitrate in kilobits per second.
|
||||
*/
|
||||
@@ -413,15 +501,16 @@ export class MediaManager {
|
||||
);
|
||||
|
||||
this.callbacks.getActivePeers().forEach(async (peerData) => {
|
||||
const sender =
|
||||
peerData.audioSender ||
|
||||
peerData.connection.getSenders().find((s) => s.track?.kind === TRACK_KIND_AUDIO);
|
||||
const sender = peerData.audioSender
|
||||
|| peerData.connection.getSenders().find((candidate) => candidate.track?.kind === TRACK_KIND_AUDIO);
|
||||
|
||||
if (!sender?.track)
|
||||
if (!sender?.track) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (peerData.connection.signalingState !== 'stable')
|
||||
if (peerData.connection.signalingState !== 'stable') {
|
||||
return;
|
||||
}
|
||||
|
||||
let params: RTCRtpSendParameters;
|
||||
|
||||
@@ -447,7 +536,7 @@ export class MediaManager {
|
||||
/**
|
||||
* Apply a named latency profile that maps to a predefined bitrate.
|
||||
*
|
||||
* @param profile - One of `'low'`, `'balanced'`, or `'high'`.
|
||||
* @param profile - One of `low`, `balanced`, or `high`.
|
||||
*/
|
||||
async setLatencyProfile(profile: LatencyProfile): Promise<void> {
|
||||
await this.setAudioBitrate(LATENCY_PROFILE_BITRATES[profile]);
|
||||
@@ -491,59 +580,10 @@ export class MediaManager {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Bind local audio/video tracks to all existing peer transceivers.
|
||||
* Restores transceiver direction to sendrecv if previously set to recvonly
|
||||
* (which happens when disableVoice calls removeTrack).
|
||||
*/
|
||||
/** Bind any active local mic/camera tracks to the current peer set. */
|
||||
private bindLocalTracksToAllPeers(): void {
|
||||
const peers = this.callbacks.getActivePeers();
|
||||
|
||||
if (!this.localMediaStream)
|
||||
return;
|
||||
|
||||
const localStream = this.localMediaStream;
|
||||
const localAudioTrack = localStream.getAudioTracks()[0] || null;
|
||||
const localVideoTrack = localStream.getVideoTracks()[0] || null;
|
||||
|
||||
peers.forEach((peerData, peerId) => {
|
||||
if (localAudioTrack) {
|
||||
if (this.allowedVoicePeerIds.has(peerId)) {
|
||||
this.attachVoiceTrackToPeer(peerId, peerData, localStream, localAudioTrack);
|
||||
} else {
|
||||
this.detachVoiceTrackFromPeer(peerData);
|
||||
}
|
||||
}
|
||||
|
||||
if (localVideoTrack) {
|
||||
const videoTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_VIDEO, {
|
||||
preferredSender: peerData.videoSender,
|
||||
excludedSenders: [peerData.screenVideoSender]
|
||||
});
|
||||
const videoSender = videoTransceiver.sender;
|
||||
|
||||
peerData.videoSender = videoSender;
|
||||
|
||||
if (
|
||||
videoTransceiver &&
|
||||
(videoTransceiver.direction === TRANSCEIVER_RECV_ONLY ||
|
||||
videoTransceiver.direction === TRANSCEIVER_INACTIVE)
|
||||
) {
|
||||
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
|
||||
if (typeof videoSender.setStreams === 'function') {
|
||||
videoSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
videoSender
|
||||
.replaceTrack(localVideoTrack)
|
||||
.then(() => this.logger.info('video replaceTrack ok', { peerId }))
|
||||
.catch((error) => this.logger.error('video replaceTrack failed', error));
|
||||
}
|
||||
|
||||
this.callbacks.renegotiate(peerId);
|
||||
});
|
||||
this.syncVoiceRouting();
|
||||
this.syncCameraRouting();
|
||||
}
|
||||
|
||||
private syncVoiceRouting(): void {
|
||||
@@ -562,6 +602,22 @@ export class MediaManager {
|
||||
});
|
||||
}
|
||||
|
||||
private syncCameraRouting(): void {
|
||||
const peers = this.callbacks.getActivePeers();
|
||||
const localCameraStream = this.localCameraStream;
|
||||
const localCameraTrack = localCameraStream?.getVideoTracks()[0] || null;
|
||||
|
||||
peers.forEach((peerData, peerId) => {
|
||||
const didChange = localCameraStream && localCameraTrack && this.allowedVoicePeerIds.has(peerId)
|
||||
? this.attachCameraTrackToPeer(peerId, peerData, localCameraStream, localCameraTrack)
|
||||
: this.detachCameraTrackFromPeer(peerData, peerId);
|
||||
|
||||
if (didChange) {
|
||||
void this.callbacks.renegotiate(peerId);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
private attachVoiceTrackToPeer(
|
||||
peerId: string,
|
||||
peerData: PeerData,
|
||||
@@ -613,6 +669,78 @@ export class MediaManager {
|
||||
return true;
|
||||
}
|
||||
|
||||
private attachCameraTrackToPeer(
|
||||
peerId: string,
|
||||
peerData: PeerData,
|
||||
localStream: MediaStream,
|
||||
localCameraTrack: MediaStreamTrack
|
||||
): boolean {
|
||||
const videoTransceiver = this.getOrCreateReusableTransceiver(peerData, TRACK_KIND_VIDEO, {
|
||||
preferredSender: peerData.videoSender,
|
||||
excludedSenders: [peerData.screenVideoSender]
|
||||
});
|
||||
const videoSender = videoTransceiver.sender;
|
||||
const needsDirectionRestore = videoTransceiver.direction === TRANSCEIVER_RECV_ONLY
|
||||
|| videoTransceiver.direction === TRANSCEIVER_INACTIVE;
|
||||
const needsTrackReplace = videoSender.track !== localCameraTrack;
|
||||
|
||||
peerData.videoSender = videoSender;
|
||||
|
||||
if (!needsDirectionRestore && !needsTrackReplace) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (needsDirectionRestore) {
|
||||
videoTransceiver.direction = TRANSCEIVER_SEND_RECV;
|
||||
}
|
||||
|
||||
if (typeof videoSender.setStreams === 'function') {
|
||||
videoSender.setStreams(localStream);
|
||||
}
|
||||
|
||||
if (needsTrackReplace) {
|
||||
videoSender
|
||||
.replaceTrack(localCameraTrack)
|
||||
.then(() => this.logger.info('camera replaceTrack ok', { peerId }))
|
||||
.catch((error) => this.logger.error('camera replaceTrack failed', error));
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private detachCameraTrackFromPeer(peerData: PeerData, peerId: string): boolean {
|
||||
const videoSender = peerData.videoSender
|
||||
?? peerData.connection.getSenders().find((sender) => sender !== peerData.screenVideoSender && sender.track?.kind === TRACK_KIND_VIDEO);
|
||||
const videoTransceiver = videoSender
|
||||
? peerData.connection.getTransceivers().find((transceiver) => transceiver.sender === videoSender)
|
||||
: undefined;
|
||||
|
||||
if (!videoTransceiver) {
|
||||
return false;
|
||||
}
|
||||
|
||||
peerData.videoSender = videoTransceiver.sender;
|
||||
|
||||
const hasTrack = !!videoTransceiver.sender.track;
|
||||
const needsDirectionReset = videoTransceiver.direction === TRANSCEIVER_SEND_RECV;
|
||||
|
||||
if (!hasTrack && !needsDirectionReset) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (hasTrack) {
|
||||
videoTransceiver.sender.replaceTrack(null)
|
||||
.then(() => this.logger.info('camera replaceTrack cleared', { peerId }))
|
||||
.catch((error) => this.logger.error('Failed to clear camera sender track', error, { peerId }));
|
||||
}
|
||||
|
||||
if (needsDirectionReset) {
|
||||
videoTransceiver.direction = TRANSCEIVER_RECV_ONLY;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
private areSetsEqual(left: Set<string>, right: Set<string>): boolean {
|
||||
if (left.size !== right.size) {
|
||||
return false;
|
||||
@@ -690,6 +818,19 @@ export class MediaManager {
|
||||
});
|
||||
}
|
||||
|
||||
/** Broadcast the local camera state to all connected peers. */
|
||||
private broadcastCameraState(): void {
|
||||
const oderId = this.callbacks.getIdentifyOderId();
|
||||
const displayName = this.callbacks.getIdentifyDisplayName();
|
||||
|
||||
this.callbacks.broadcastMessage({
|
||||
type: P2P_TYPE_CAMERA_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
isCameraEnabled: this.isCameraActive
|
||||
});
|
||||
}
|
||||
|
||||
// -- Input gain helpers --
|
||||
|
||||
/**
|
||||
@@ -764,6 +905,22 @@ export class MediaManager {
|
||||
this.preGainStream = null;
|
||||
}
|
||||
|
||||
private stopLocalCameraStream(): void {
|
||||
if (!this.localCameraStream) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.localCameraStream.getTracks().forEach((track) => {
|
||||
if (track.kind === TRACK_KIND_VIDEO) {
|
||||
track.onended = null;
|
||||
}
|
||||
|
||||
track.stop();
|
||||
});
|
||||
|
||||
this.localCameraStream = null;
|
||||
}
|
||||
|
||||
/** Clean up all resources. */
|
||||
destroy(): void {
|
||||
this.teardownInputGain();
|
||||
|
||||
@@ -403,18 +403,17 @@ export class ScreenShareManager {
|
||||
|
||||
this.logger.attachTrackDiagnostics(screenVideoTrack, `screenVideo:${peerId}`);
|
||||
|
||||
let videoSender = peerData.videoSender || peerData.connection.getSenders().find((sender) => sender.track?.kind === TRACK_KIND_VIDEO);
|
||||
let screenVideoSender = peerData.screenVideoSender;
|
||||
|
||||
if (!videoSender) {
|
||||
if (!screenVideoSender) {
|
||||
const videoTransceiver = peerData.connection.addTransceiver(TRACK_KIND_VIDEO, {
|
||||
direction: TRANSCEIVER_SEND_RECV
|
||||
});
|
||||
|
||||
videoSender = videoTransceiver.sender;
|
||||
peerData.videoSender = videoSender;
|
||||
screenVideoSender = videoTransceiver.sender;
|
||||
} else {
|
||||
const videoTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === videoSender
|
||||
(transceiver) => transceiver.sender === screenVideoSender
|
||||
);
|
||||
|
||||
if (videoTransceiver?.direction === TRANSCEIVER_RECV_ONLY) {
|
||||
@@ -422,16 +421,16 @@ export class ScreenShareManager {
|
||||
}
|
||||
}
|
||||
|
||||
peerData.screenVideoSender = videoSender;
|
||||
peerData.screenVideoSender = screenVideoSender;
|
||||
|
||||
if (typeof videoSender.setStreams === 'function') {
|
||||
videoSender.setStreams(this.activeScreenStream);
|
||||
if (typeof screenVideoSender.setStreams === 'function') {
|
||||
screenVideoSender.setStreams(this.activeScreenStream);
|
||||
}
|
||||
|
||||
videoSender.replaceTrack(screenVideoTrack)
|
||||
screenVideoSender.replaceTrack(screenVideoTrack)
|
||||
.then(() => {
|
||||
this.logger.info('screen video replaceTrack ok', { peerId });
|
||||
void this.applyScreenShareVideoParameters(videoSender, preset, peerId);
|
||||
void this.applyScreenShareVideoParameters(screenVideoSender, preset, peerId);
|
||||
})
|
||||
.catch((error) => this.logger.error('screen video replaceTrack failed', error));
|
||||
|
||||
@@ -474,7 +473,7 @@ export class ScreenShareManager {
|
||||
private detachScreenTracksFromPeer(peerData: PeerData, peerId: string): void {
|
||||
const transceivers = peerData.connection.getTransceivers();
|
||||
const videoTransceiver = transceivers.find(
|
||||
(transceiver) => transceiver.sender === peerData.videoSender || transceiver.sender === peerData.screenVideoSender
|
||||
(transceiver) => transceiver.sender === peerData.screenVideoSender
|
||||
);
|
||||
const screenAudioTransceiver = transceivers.find(
|
||||
(transceiver) => transceiver.sender === peerData.screenAudioSender
|
||||
|
||||
@@ -129,6 +129,7 @@ export function createPeerConnection(
|
||||
audioSender: undefined,
|
||||
videoSender: undefined,
|
||||
remoteVoiceStreamIds: new Set<string>(),
|
||||
remoteCameraStreamIds: new Set<string>(),
|
||||
remoteScreenShareStreamIds: new Set<string>()
|
||||
};
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import {
|
||||
DATA_CHANNEL_LOW_WATER_BYTES,
|
||||
DATA_CHANNEL_STATE_OPEN,
|
||||
DEFAULT_DISPLAY_NAME,
|
||||
P2P_TYPE_CAMERA_STATE,
|
||||
P2P_TYPE_PING,
|
||||
P2P_TYPE_PONG,
|
||||
P2P_TYPE_SCREEN_STATE,
|
||||
@@ -285,7 +286,7 @@ export async function sendToPeerBuffered(
|
||||
}
|
||||
|
||||
/**
|
||||
* Send the current voice and screen-share states to a single peer.
|
||||
* Send the current voice, camera, and screen-share states to a single peer.
|
||||
*/
|
||||
export function sendCurrentStatesToPeer(
|
||||
context: PeerConnectionManagerContext,
|
||||
@@ -310,6 +311,13 @@ export function sendCurrentStatesToPeer(
|
||||
displayName,
|
||||
isScreenSharing: callbacks.isScreenSharingActive()
|
||||
});
|
||||
|
||||
sendToPeer(context, peerId, {
|
||||
type: P2P_TYPE_CAMERA_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
isCameraEnabled: callbacks.isCameraEnabled()
|
||||
});
|
||||
}
|
||||
|
||||
export function sendCurrentStatesToChannel(
|
||||
@@ -346,13 +354,22 @@ export function sendCurrentStatesToChannel(
|
||||
displayName,
|
||||
isScreenSharing: callbacks.isScreenSharingActive()
|
||||
};
|
||||
const cameraStatePayload = {
|
||||
type: P2P_TYPE_CAMERA_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
isCameraEnabled: callbacks.isCameraEnabled()
|
||||
};
|
||||
const voiceStateRaw = JSON.stringify(voiceStatePayload);
|
||||
const screenStateRaw = JSON.stringify(screenStatePayload);
|
||||
const cameraStateRaw = JSON.stringify(cameraStatePayload);
|
||||
|
||||
channel.send(voiceStateRaw);
|
||||
logDataChannelTraffic(context, channel, remotePeerId, 'outbound', voiceStateRaw, voiceStatePayload);
|
||||
channel.send(screenStateRaw);
|
||||
logDataChannelTraffic(context, channel, remotePeerId, 'outbound', screenStateRaw, screenStatePayload);
|
||||
channel.send(cameraStateRaw);
|
||||
logDataChannelTraffic(context, channel, remotePeerId, 'outbound', cameraStateRaw, cameraStatePayload);
|
||||
|
||||
logger.info('[data-channel] Sent initial states to channel', { remotePeerId, voiceState });
|
||||
} catch (error) {
|
||||
@@ -366,7 +383,7 @@ export function sendCurrentStatesToChannel(
|
||||
}
|
||||
}
|
||||
|
||||
/** Broadcast the current voice and screen-share states to all connected peers. */
|
||||
/** Broadcast the current voice, camera, and screen-share states to all connected peers. */
|
||||
export function broadcastCurrentStates(context: PeerConnectionManagerContext): void {
|
||||
const { callbacks } = context;
|
||||
const credentials = callbacks.getIdentifyCredentials();
|
||||
@@ -387,6 +404,13 @@ export function broadcastCurrentStates(context: PeerConnectionManagerContext): v
|
||||
displayName,
|
||||
isScreenSharing: callbacks.isScreenSharingActive()
|
||||
});
|
||||
|
||||
broadcastMessage(context, {
|
||||
type: P2P_TYPE_CAMERA_STATE,
|
||||
oderId,
|
||||
displayName,
|
||||
isCameraEnabled: callbacks.isCameraEnabled()
|
||||
});
|
||||
}
|
||||
|
||||
function logDataChannelTraffic(
|
||||
@@ -433,6 +457,9 @@ function summarizePeerMessage(payload: PeerMessage, base?: Record<string, unknow
|
||||
if (typeof payload['isScreenSharing'] === 'boolean')
|
||||
summary['isScreenSharing'] = payload['isScreenSharing'];
|
||||
|
||||
if (typeof payload['isCameraEnabled'] === 'boolean')
|
||||
summary['isCameraEnabled'] = payload['isCameraEnabled'];
|
||||
|
||||
if (typeof payload['content'] === 'string')
|
||||
summary['contentLength'] = payload['content'].length;
|
||||
|
||||
|
||||
@@ -73,6 +73,9 @@ export class PeerConnectionManager {
|
||||
/** Remote voice-only streams keyed by remote peer ID. */
|
||||
readonly remotePeerVoiceStreams = this.state.remotePeerVoiceStreams;
|
||||
|
||||
/** Remote camera streams keyed by remote peer ID. */
|
||||
readonly remotePeerCameraStreams = this.state.remotePeerCameraStreams;
|
||||
|
||||
/** Remote screen-share streams keyed by remote peer ID. */
|
||||
readonly remotePeerScreenShareStreams = this.state.remotePeerScreenShareStreams;
|
||||
|
||||
|
||||
@@ -27,12 +27,15 @@ export interface PeerConnectionCallbacks {
|
||||
getLocalPeerId(): string;
|
||||
/** Whether screen sharing is active. */
|
||||
isScreenSharingActive(): boolean;
|
||||
/** Whether the local camera is active. */
|
||||
isCameraEnabled(): boolean;
|
||||
}
|
||||
|
||||
export interface PeerConnectionManagerState {
|
||||
activePeerConnections: Map<string, PeerData>;
|
||||
remotePeerStreams: Map<string, MediaStream>;
|
||||
remotePeerVoiceStreams: Map<string, MediaStream>;
|
||||
remotePeerCameraStreams: Map<string, MediaStream>;
|
||||
remotePeerScreenShareStreams: Map<string, MediaStream>;
|
||||
disconnectedPeerTracker: Map<string, DisconnectedPeerEntry>;
|
||||
peerReconnectTimers: Map<string, ReturnType<typeof setInterval>>;
|
||||
@@ -88,6 +91,7 @@ export function createPeerConnectionManagerState(): PeerConnectionManagerState {
|
||||
activePeerConnections: new Map<string, PeerData>(),
|
||||
remotePeerStreams: new Map<string, MediaStream>(),
|
||||
remotePeerVoiceStreams: new Map<string, MediaStream>(),
|
||||
remotePeerCameraStreams: new Map<string, MediaStream>(),
|
||||
remotePeerScreenShareStreams: new Map<string, MediaStream>(),
|
||||
disconnectedPeerTracker: new Map<string, DisconnectedPeerEntry>(),
|
||||
peerReconnectTimers: new Map<string, ReturnType<typeof setInterval>>(),
|
||||
|
||||
@@ -10,6 +10,7 @@ export function handleRemoteTrack(
|
||||
const { logger, state } = context;
|
||||
const track = event.track;
|
||||
const isScreenAudio = isScreenShareAudioTrack(context, event, remotePeerId);
|
||||
const isScreenVideo = isScreenShareVideoTrack(context, event, remotePeerId);
|
||||
const settings =
|
||||
typeof track.getSettings === 'function' ? track.getSettings() : ({} as MediaTrackSettings);
|
||||
|
||||
@@ -38,7 +39,11 @@ export function handleRemoteTrack(
|
||||
const voiceStream = isVoiceAudioTrack(track, isScreenAudio)
|
||||
? buildAudioOnlyStream(state.remotePeerVoiceStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
const cameraStream = isCameraTrack(track, isScreenAudio, isScreenVideo)
|
||||
? buildCameraStream(state.remotePeerCameraStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
const screenShareStream = isScreenShareTrack(track, isScreenAudio)
|
||||
|| isScreenVideo
|
||||
? buildScreenShareStream(state.remotePeerScreenShareStreams.get(remotePeerId), track)
|
||||
: null;
|
||||
|
||||
@@ -50,6 +55,10 @@ export function handleRemoteTrack(
|
||||
state.remotePeerVoiceStreams.set(remotePeerId, voiceStream);
|
||||
}
|
||||
|
||||
if (cameraStream) {
|
||||
state.remotePeerCameraStreams.set(remotePeerId, cameraStream);
|
||||
}
|
||||
|
||||
if (screenShareStream) {
|
||||
state.remotePeerScreenShareStreams.set(remotePeerId, screenShareStream);
|
||||
}
|
||||
@@ -57,6 +66,7 @@ export function handleRemoteTrack(
|
||||
rememberIncomingStreamIds(state, event, remotePeerId, {
|
||||
isScreenAudio,
|
||||
isVoiceAudio: !!voiceStream,
|
||||
isCameraTrack: !!cameraStream,
|
||||
isScreenTrack: !!screenShareStream
|
||||
});
|
||||
|
||||
@@ -98,7 +108,7 @@ function buildCompositeRemoteStream(
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
return buildMergedStream(state.remotePeerStreams.get(remotePeerId), incomingTrack, {
|
||||
replaceVideoTrack: true
|
||||
replaceVideoTrack: false
|
||||
});
|
||||
}
|
||||
|
||||
@@ -121,6 +131,16 @@ function buildScreenShareStream(
|
||||
});
|
||||
}
|
||||
|
||||
function buildCameraStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack
|
||||
): MediaStream {
|
||||
return buildMergedStream(existingStream, incomingTrack, {
|
||||
allowedKinds: [TRACK_KIND_VIDEO],
|
||||
replaceVideoTrack: true
|
||||
});
|
||||
}
|
||||
|
||||
function buildMergedStream(
|
||||
existingStream: MediaStream | undefined,
|
||||
incomingTrack: MediaStreamTrack,
|
||||
@@ -166,12 +186,17 @@ function removeRemoteTrack(
|
||||
const compositeStream = removeTrackFromStreamMap(state.remotePeerStreams, remotePeerId, trackId);
|
||||
|
||||
removeTrackFromStreamMap(state.remotePeerVoiceStreams, remotePeerId, trackId);
|
||||
removeTrackFromStreamMap(state.remotePeerCameraStreams, remotePeerId, trackId);
|
||||
removeTrackFromStreamMap(state.remotePeerScreenShareStreams, remotePeerId, trackId);
|
||||
|
||||
if (!state.remotePeerVoiceStreams.has(remotePeerId)) {
|
||||
peerData?.remoteVoiceStreamIds.clear();
|
||||
}
|
||||
|
||||
if (!state.remotePeerCameraStreams.has(remotePeerId)) {
|
||||
peerData?.remoteCameraStreamIds.clear();
|
||||
}
|
||||
|
||||
if (!state.remotePeerScreenShareStreams.has(remotePeerId)) {
|
||||
peerData?.remoteScreenShareStreamIds.clear();
|
||||
}
|
||||
@@ -247,8 +272,16 @@ function isVoiceAudioTrack(track: MediaStreamTrack, isScreenAudio: boolean): boo
|
||||
return track.kind === TRACK_KIND_AUDIO && !isScreenAudio;
|
||||
}
|
||||
|
||||
function isCameraTrack(
|
||||
track: MediaStreamTrack,
|
||||
isScreenAudio: boolean,
|
||||
isScreenVideo: boolean
|
||||
): boolean {
|
||||
return track.kind === TRACK_KIND_VIDEO && !isScreenAudio && !isScreenVideo;
|
||||
}
|
||||
|
||||
function isScreenShareTrack(track: MediaStreamTrack, isScreenAudio: boolean): boolean {
|
||||
return track.kind === TRACK_KIND_VIDEO || isScreenAudio;
|
||||
return track.kind === TRACK_KIND_AUDIO && isScreenAudio;
|
||||
}
|
||||
|
||||
function isScreenShareAudioTrack(
|
||||
@@ -306,6 +339,57 @@ function isScreenShareAudioTrack(
|
||||
return transceiverIndex > 0;
|
||||
}
|
||||
|
||||
function isScreenShareVideoTrack(
|
||||
context: PeerConnectionManagerContext,
|
||||
event: RTCTrackEvent,
|
||||
remotePeerId: string
|
||||
): boolean {
|
||||
if (event.track.kind !== TRACK_KIND_VIDEO) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const peerData = context.state.activePeerConnections.get(remotePeerId);
|
||||
|
||||
if (!peerData) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const incomingStreamIds = getIncomingStreamIds(event);
|
||||
|
||||
if (incomingStreamIds.some((streamId) => peerData.remoteScreenShareStreamIds.has(streamId))) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (incomingStreamIds.some((streamId) => peerData.remoteCameraStreamIds.has(streamId))) {
|
||||
return false;
|
||||
}
|
||||
|
||||
const screenVideoTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === peerData.screenVideoSender
|
||||
);
|
||||
|
||||
if (screenVideoTransceiver && matchesTransceiver(event.transceiver, screenVideoTransceiver)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const cameraVideoTransceiver = peerData.connection.getTransceivers().find(
|
||||
(transceiver) => transceiver.sender === peerData.videoSender
|
||||
);
|
||||
|
||||
if (cameraVideoTransceiver) {
|
||||
return !matchesTransceiver(event.transceiver, cameraVideoTransceiver);
|
||||
}
|
||||
|
||||
const videoTransceivers = peerData.connection.getTransceivers().filter((transceiver) =>
|
||||
transceiver.receiver.track?.kind === TRACK_KIND_VIDEO || transceiver === event.transceiver
|
||||
);
|
||||
const transceiverIndex = videoTransceivers.findIndex((transceiver) =>
|
||||
transceiver === event.transceiver || (!!transceiver.mid && transceiver.mid === event.transceiver.mid)
|
||||
);
|
||||
|
||||
return transceiverIndex > 0;
|
||||
}
|
||||
|
||||
function rememberIncomingStreamIds(
|
||||
state: PeerConnectionManagerContext['state'],
|
||||
event: RTCTrackEvent,
|
||||
@@ -313,6 +397,7 @@ function rememberIncomingStreamIds(
|
||||
options: {
|
||||
isScreenAudio: boolean;
|
||||
isVoiceAudio: boolean;
|
||||
isCameraTrack: boolean;
|
||||
isScreenTrack: boolean;
|
||||
}
|
||||
): void {
|
||||
@@ -328,10 +413,21 @@ function rememberIncomingStreamIds(
|
||||
return;
|
||||
}
|
||||
|
||||
if (event.track.kind === TRACK_KIND_VIDEO || options.isScreenAudio || options.isScreenTrack) {
|
||||
if (options.isScreenAudio || options.isScreenTrack) {
|
||||
incomingStreamIds.forEach((streamId) => {
|
||||
peerData.remoteScreenShareStreamIds.add(streamId);
|
||||
peerData.remoteVoiceStreamIds.delete(streamId);
|
||||
peerData.remoteCameraStreamIds.delete(streamId);
|
||||
});
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (options.isCameraTrack) {
|
||||
incomingStreamIds.forEach((streamId) => {
|
||||
peerData.remoteCameraStreamIds.add(streamId);
|
||||
peerData.remoteVoiceStreamIds.delete(streamId);
|
||||
peerData.remoteScreenShareStreamIds.delete(streamId);
|
||||
});
|
||||
|
||||
return;
|
||||
@@ -340,6 +436,7 @@ function rememberIncomingStreamIds(
|
||||
if (options.isVoiceAudio) {
|
||||
incomingStreamIds.forEach((streamId) => {
|
||||
peerData.remoteVoiceStreamIds.add(streamId);
|
||||
peerData.remoteCameraStreamIds.delete(streamId);
|
||||
peerData.remoteScreenShareStreamIds.delete(streamId);
|
||||
});
|
||||
}
|
||||
|
||||
@@ -58,6 +58,7 @@ export class WebRTCService implements OnDestroy {
|
||||
readonly connectedPeers = this.state.connectedPeers;
|
||||
readonly isMuted = this.state.isMuted;
|
||||
readonly isDeafened = this.state.isDeafened;
|
||||
readonly isCameraEnabled = this.state.isCameraEnabled;
|
||||
readonly isScreenSharing = this.state.isScreenSharing;
|
||||
readonly isNoiseReductionEnabled = this.state.isNoiseReductionEnabled;
|
||||
readonly screenStream = this.state.screenStream;
|
||||
@@ -149,7 +150,8 @@ export class WebRTCService implements OnDestroy {
|
||||
getVoiceStateSnapshot: (): VoiceStateSnapshot => this.voiceSessionController.getCurrentVoiceState(),
|
||||
getIdentifyCredentials: () => this.signalingTransportHandler.getIdentifyCredentials(),
|
||||
getLocalPeerId: (): string => this.state.getLocalPeerId(),
|
||||
isScreenSharingActive: (): boolean => this.state.isScreenSharingActive()
|
||||
isScreenSharingActive: (): boolean => this.state.isScreenSharingActive(),
|
||||
isCameraEnabled: (): boolean => this.state.isCameraEnabledActive()
|
||||
});
|
||||
|
||||
this.mediaManager.setCallbacks({
|
||||
@@ -157,7 +159,8 @@ export class WebRTCService implements OnDestroy {
|
||||
renegotiate: (peerId: string): Promise<void> => this.peerMediaFacade.renegotiate(peerId),
|
||||
broadcastMessage: (event: ChatEvent): void => this.peerMediaFacade.broadcastMessage(event),
|
||||
getIdentifyOderId: (): string => this.signalingTransportHandler.getIdentifyOderId(),
|
||||
getIdentifyDisplayName: (): string => this.signalingTransportHandler.getIdentifyDisplayName()
|
||||
getIdentifyDisplayName: (): string => this.signalingTransportHandler.getIdentifyDisplayName(),
|
||||
setCameraEnabled: (enabled: boolean): void => this.state.setCameraEnabled(enabled)
|
||||
});
|
||||
|
||||
this.screenShareManager.setCallbacks({
|
||||
@@ -434,6 +437,16 @@ export class WebRTCService implements OnDestroy {
|
||||
return this.peerMediaFacade.getRemoteVoiceStream(peerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote camera stream for a connected peer.
|
||||
*
|
||||
* @param peerId - The remote peer whose camera stream to retrieve.
|
||||
* @returns The stream, or `null` if the peer has no active camera video.
|
||||
*/
|
||||
getRemoteCameraStream(peerId: string): MediaStream | null {
|
||||
return this.peerMediaFacade.getRemoteCameraStream(peerId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the remote screen-share stream for a connected peer.
|
||||
*
|
||||
@@ -456,6 +469,15 @@ export class WebRTCService implements OnDestroy {
|
||||
return this.peerMediaFacade.getLocalStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the current local camera stream.
|
||||
*
|
||||
* @returns The local camera {@link MediaStream}, or `null` if the camera is disabled.
|
||||
*/
|
||||
getLocalCameraStream(): MediaStream | null {
|
||||
return this.peerMediaFacade.getLocalCameraStream();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the raw local microphone stream before gain / RNNoise processing.
|
||||
*
|
||||
@@ -477,6 +499,25 @@ export class WebRTCService implements OnDestroy {
|
||||
/** Stop local voice capture and remove audio senders from peers. */
|
||||
disableVoice(): void {
|
||||
this.voiceSessionController.disableVoice();
|
||||
this.state.setCameraEnabled(false);
|
||||
}
|
||||
|
||||
/**
|
||||
* Start sharing the local camera video with peers in the active voice channel.
|
||||
*
|
||||
* @returns The camera {@link MediaStream}.
|
||||
*/
|
||||
async enableCamera(): Promise<MediaStream> {
|
||||
const stream = await this.mediaManager.enableCamera();
|
||||
|
||||
this.state.setCameraEnabled(this.mediaManager.getIsCameraActive());
|
||||
return stream;
|
||||
}
|
||||
|
||||
/** Stop local camera capture and remove camera tracks from peers. */
|
||||
disableCamera(): void {
|
||||
this.mediaManager.disableCamera();
|
||||
this.state.setCameraEnabled(this.mediaManager.getIsCameraActive());
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -614,6 +655,7 @@ export class WebRTCService implements OnDestroy {
|
||||
this.peerMediaFacade.closeAllPeers();
|
||||
this.state.clearPeerViewState();
|
||||
this.voiceSessionController.resetVoiceSession();
|
||||
this.state.setCameraEnabled(false);
|
||||
this.peerMediaFacade.stopScreenShare();
|
||||
this.state.clearScreenShareState();
|
||||
}
|
||||
|
||||
@@ -89,6 +89,7 @@ export const P2P_TYPE_STATE_REQUEST = 'state-request';
|
||||
export const P2P_TYPE_VOICE_STATE_REQUEST = 'voice-state-request';
|
||||
export const P2P_TYPE_VOICE_STATE = 'voice-state';
|
||||
export const P2P_TYPE_SCREEN_STATE = 'screen-state';
|
||||
export const P2P_TYPE_CAMERA_STATE = 'camera-state';
|
||||
export const P2P_TYPE_SCREEN_SHARE_REQUEST = 'screen-share-request';
|
||||
export const P2P_TYPE_SCREEN_SHARE_STOP = 'screen-share-stop';
|
||||
export const P2P_TYPE_PING = 'ping';
|
||||
|
||||
@@ -22,6 +22,8 @@ export interface PeerData {
|
||||
screenAudioSender?: RTCRtpSender;
|
||||
/** Known remote stream ids that carry the peer's voice audio. */
|
||||
remoteVoiceStreamIds: Set<string>;
|
||||
/** Known remote stream ids that carry the peer's camera video. */
|
||||
remoteCameraStreamIds: Set<string>;
|
||||
/** Known remote stream ids that carry the peer's screen-share audio/video. */
|
||||
remoteScreenShareStreamIds: Set<string>;
|
||||
}
|
||||
|
||||
@@ -14,6 +14,7 @@ export class WebRtcStateController {
|
||||
readonly connectedPeers: Signal<string[]>;
|
||||
readonly isMuted: Signal<boolean>;
|
||||
readonly isDeafened: Signal<boolean>;
|
||||
readonly isCameraEnabled: Signal<boolean>;
|
||||
readonly isScreenSharing: Signal<boolean>;
|
||||
readonly isNoiseReductionEnabled: Signal<boolean>;
|
||||
readonly screenStream: Signal<MediaStream | null>;
|
||||
@@ -31,6 +32,7 @@ export class WebRtcStateController {
|
||||
private readonly _connectedPeers = signal<string[]>([]);
|
||||
private readonly _isMuted = signal(false);
|
||||
private readonly _isDeafened = signal(false);
|
||||
private readonly _isCameraEnabled = signal(false);
|
||||
private readonly _isScreenSharing = signal(false);
|
||||
private readonly _isNoiseReductionEnabled = signal(false);
|
||||
private readonly _screenStreamSignal = signal<MediaStream | null>(null);
|
||||
@@ -49,6 +51,7 @@ export class WebRtcStateController {
|
||||
this.connectedPeers = computed(() => this._connectedPeers());
|
||||
this.isMuted = computed(() => this._isMuted());
|
||||
this.isDeafened = computed(() => this._isDeafened());
|
||||
this.isCameraEnabled = computed(() => this._isCameraEnabled());
|
||||
this.isScreenSharing = computed(() => this._isScreenSharing());
|
||||
this.isNoiseReductionEnabled = computed(() => this._isNoiseReductionEnabled());
|
||||
this.screenStream = computed(() => this._screenStreamSignal());
|
||||
@@ -89,6 +92,10 @@ export class WebRtcStateController {
|
||||
return this._isScreenSharing();
|
||||
}
|
||||
|
||||
isCameraEnabledActive(): boolean {
|
||||
return this._isCameraEnabled();
|
||||
}
|
||||
|
||||
setCurrentServer(serverId: string): void {
|
||||
this.activeServerId = serverId;
|
||||
}
|
||||
@@ -105,6 +112,10 @@ export class WebRtcStateController {
|
||||
this._isDeafened.set(deafened);
|
||||
}
|
||||
|
||||
setCameraEnabled(enabled: boolean): void {
|
||||
this._isCameraEnabled.set(enabled);
|
||||
}
|
||||
|
||||
setNoiseReductionEnabled(enabled: boolean): void {
|
||||
this._isNoiseReductionEnabled.set(enabled);
|
||||
}
|
||||
|
||||
@@ -73,6 +73,10 @@ export class PeerMediaFacade {
|
||||
return this.dependencies.peerManager.remotePeerVoiceStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
getRemoteCameraStream(peerId: string): MediaStream | null {
|
||||
return this.dependencies.peerManager.remotePeerCameraStreams.get(peerId) ?? null;
|
||||
}
|
||||
|
||||
getRemoteScreenShareStream(peerId: string): MediaStream | null {
|
||||
return this.dependencies.peerManager.remotePeerScreenShareStreams.get(peerId) ?? null;
|
||||
}
|
||||
@@ -89,6 +93,10 @@ export class PeerMediaFacade {
|
||||
return this.dependencies.mediaManager.getLocalStream();
|
||||
}
|
||||
|
||||
getLocalCameraStream(): MediaStream | null {
|
||||
return this.dependencies.mediaManager.getLocalCameraStream();
|
||||
}
|
||||
|
||||
getRawMicStream(): MediaStream | null {
|
||||
return this.dependencies.mediaManager.getRawMicStream();
|
||||
}
|
||||
|
||||
@@ -59,6 +59,7 @@ export interface ChatEventBase {
|
||||
permissions?: Partial<RoomPermissions>;
|
||||
voiceState?: Partial<VoiceState>;
|
||||
isScreenSharing?: boolean;
|
||||
isCameraEnabled?: boolean;
|
||||
icon?: string;
|
||||
iconUpdatedAt?: number;
|
||||
role?: UserRole;
|
||||
@@ -216,6 +217,11 @@ export interface ScreenStateEvent extends ChatEventBase {
|
||||
isScreenSharing: boolean;
|
||||
}
|
||||
|
||||
export interface CameraStateEvent extends ChatEventBase {
|
||||
type: 'camera-state';
|
||||
isCameraEnabled: boolean;
|
||||
}
|
||||
|
||||
export interface VoiceStateRequestEvent extends ChatEventBase {
|
||||
type: 'voice-state-request';
|
||||
}
|
||||
@@ -332,6 +338,7 @@ export type ChatEvent =
|
||||
| VoiceStateEvent
|
||||
| VoiceChannelMoveEvent
|
||||
| ScreenStateEvent
|
||||
| CameraStateEvent
|
||||
| VoiceStateRequestEvent
|
||||
| StateRequestEvent
|
||||
| ScreenShareRequestEvent
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import type { VoiceState, ScreenShareState } from './voice-state.models';
|
||||
import type { CameraState, VoiceState, ScreenShareState } from './voice-state.models';
|
||||
|
||||
export type UserStatus = 'online' | 'away' | 'busy' | 'offline';
|
||||
|
||||
@@ -19,6 +19,7 @@ export interface User {
|
||||
isRoomOwner?: boolean;
|
||||
voiceState?: VoiceState;
|
||||
screenShareState?: ScreenShareState;
|
||||
cameraState?: CameraState;
|
||||
}
|
||||
|
||||
export interface RoomMember {
|
||||
|
||||
@@ -15,3 +15,7 @@ export interface ScreenShareState {
|
||||
sourceId?: string;
|
||||
sourceName?: string;
|
||||
}
|
||||
|
||||
export interface CameraState {
|
||||
isEnabled: boolean;
|
||||
}
|
||||
|
||||
@@ -1067,6 +1067,8 @@ export class RoomsEffects {
|
||||
return this.handleVoiceChannelMove(event, currentRoom, savedRooms, currentUser ?? null);
|
||||
case 'screen-state':
|
||||
return currentRoom ? this.handleVoiceOrScreenState(event, allUsers, currentUser ?? null, 'screen') : EMPTY;
|
||||
case 'camera-state':
|
||||
return currentRoom ? this.handleVoiceOrScreenState(event, allUsers, currentUser ?? null, 'camera') : EMPTY;
|
||||
case 'server-state-request':
|
||||
return this.handleServerStateRequest(event, currentRoom, savedRooms);
|
||||
case 'server-state-full':
|
||||
@@ -1091,7 +1093,12 @@ export class RoomsEffects {
|
||||
)
|
||||
);
|
||||
|
||||
private handleVoiceOrScreenState(event: ChatEvent, allUsers: User[], currentUser: User | null, kind: 'voice' | 'screen') {
|
||||
private handleVoiceOrScreenState(
|
||||
event: ChatEvent,
|
||||
allUsers: User[],
|
||||
currentUser: User | null,
|
||||
kind: 'voice' | 'screen' | 'camera'
|
||||
) {
|
||||
const userId: string | undefined = event.fromPeerId ?? event.oderId;
|
||||
|
||||
if (!userId)
|
||||
@@ -1157,10 +1164,35 @@ export class RoomsEffects {
|
||||
voiceState: vs }));
|
||||
}
|
||||
|
||||
// screen-state
|
||||
const isSharing = event.isScreenSharing as boolean | undefined;
|
||||
if (kind === 'screen') {
|
||||
const isSharing = event.isScreenSharing as boolean | undefined;
|
||||
|
||||
if (isSharing === undefined)
|
||||
if (isSharing === undefined)
|
||||
return EMPTY;
|
||||
|
||||
if (!userExists) {
|
||||
return of(
|
||||
UsersActions.userJoined({
|
||||
user: buildSignalingUser(
|
||||
{ oderId: userId,
|
||||
displayName: event.displayName || 'User' },
|
||||
{ screenShareState: { isSharing } }
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return of(
|
||||
UsersActions.updateScreenShareState({
|
||||
userId,
|
||||
screenShareState: { isSharing }
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const isCameraEnabled = event.isCameraEnabled as boolean | undefined;
|
||||
|
||||
if (isCameraEnabled === undefined)
|
||||
return EMPTY;
|
||||
|
||||
if (!userExists) {
|
||||
@@ -1169,16 +1201,16 @@ export class RoomsEffects {
|
||||
user: buildSignalingUser(
|
||||
{ oderId: userId,
|
||||
displayName: event.displayName || 'User' },
|
||||
{ screenShareState: { isSharing } }
|
||||
{ cameraState: { isEnabled: isCameraEnabled } }
|
||||
)
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
return of(
|
||||
UsersActions.updateScreenShareState({
|
||||
UsersActions.updateCameraState({
|
||||
userId,
|
||||
screenShareState: { isSharing }
|
||||
cameraState: { isEnabled: isCameraEnabled }
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
@@ -10,7 +10,8 @@ import {
|
||||
User,
|
||||
BanEntry,
|
||||
VoiceState,
|
||||
ScreenShareState
|
||||
ScreenShareState,
|
||||
CameraState
|
||||
} from '../../shared-kernel';
|
||||
|
||||
export const UsersActions = createActionGroup({
|
||||
@@ -52,6 +53,7 @@ export const UsersActions = createActionGroup({
|
||||
'Update Host': props<{ userId: string }>(),
|
||||
|
||||
'Update Voice State': props<{ userId: string; voiceState: Partial<VoiceState> }>(),
|
||||
'Update Screen Share State': props<{ userId: string; screenShareState: Partial<ScreenShareState> }>()
|
||||
'Update Screen Share State': props<{ userId: string; screenShareState: Partial<ScreenShareState> }>(),
|
||||
'Update Camera State': props<{ userId: string; cameraState: Partial<CameraState> }>()
|
||||
}
|
||||
});
|
||||
|
||||
@@ -212,6 +212,23 @@ export const usersReducer = createReducer(
|
||||
state
|
||||
);
|
||||
}),
|
||||
on(UsersActions.updateCameraState, (state, { userId, cameraState }) => {
|
||||
const prev = state.entities[userId]?.cameraState || {
|
||||
isEnabled: false
|
||||
};
|
||||
|
||||
return usersAdapter.updateOne(
|
||||
{
|
||||
id: userId,
|
||||
changes: {
|
||||
cameraState: {
|
||||
isEnabled: cameraState.isEnabled ?? prev.isEnabled
|
||||
}
|
||||
}
|
||||
},
|
||||
state
|
||||
);
|
||||
}),
|
||||
on(UsersActions.syncUsers, (state, { users }) =>
|
||||
usersAdapter.upsertMany(users, state)
|
||||
),
|
||||
|
||||
Reference in New Issue
Block a user