Add notification sounds

This commit is contained in:
2026-03-03 04:07:33 +01:00
parent 8315df42fc
commit 94f9a9f2ed
12 changed files with 283 additions and 45 deletions

View File

@@ -50,6 +50,11 @@
{
"glob": "**/*",
"input": "public"
},
{
"glob": "**/*",
"input": "src/assets",
"output": "assets"
}
],
"styles": [

View File

@@ -1,3 +1,4 @@
export * from './notification-audio.service';
export * from './platform.service';
export * from './browser-database.service';
export * from './electron-database.service';

View File

@@ -0,0 +1,106 @@
import { Injectable, signal } from '@angular/core';
/**
* All known sound effects shipped with the application.
*
* Each key maps to a file in `src/assets/audio/`.
*/
export enum AppSound {
Joining = 'joining',
Leave = 'leave',
Notification = 'notification',
}
/** Path prefix for audio assets (served from the `assets/audio/` folder). */
const AUDIO_BASE = '/assets/audio';
/** File extension used for all sound-effect assets. */
const AUDIO_EXT = 'wav';
/** localStorage key for persisting notification volume. */
const STORAGE_KEY_NOTIFICATION_VOLUME = 'metoyou_notification_volume';
/** Default notification volume (0 1). */
const DEFAULT_VOLUME = 0.2;
/**
* A lightweight audio playback service that pre-loads the shipped
* sound-effect files and lets any component / service trigger them
* by name.
*
* Usage:
* ```ts
* audioService.play(AppSound.Joining);
* ```
*/
@Injectable({ providedIn: 'root' })
export class NotificationAudioService {
/** Pre-loaded audio buffers keyed by {@link AppSound}. */
private readonly cache = new Map<AppSound, HTMLAudioElement>();
/** Reactive notification volume (0 1), persisted to localStorage. */
readonly notificationVolume = signal(this.loadVolume());
constructor() {
this.preload();
}
/** Eagerly create (and start loading) an {@link HTMLAudioElement} for every known sound. */
private preload(): void {
for (const sound of Object.values(AppSound)) {
const audio = new Audio(`${AUDIO_BASE}/${sound}.${AUDIO_EXT}`);
audio.preload = 'auto';
this.cache.set(sound, audio);
}
}
/** Read persisted volume from localStorage, falling back to the default. */
private loadVolume(): number {
try {
const raw = localStorage.getItem(STORAGE_KEY_NOTIFICATION_VOLUME);
if (raw !== null) {
const parsed = parseFloat(raw);
if (!isNaN(parsed)) return Math.max(0, Math.min(1, parsed));
}
} catch {}
return DEFAULT_VOLUME;
}
/**
* Update the notification volume and persist it.
*
* @param volume - A value between 0 (silent) and 1 (full).
*/
setNotificationVolume(volume: number): void {
const clamped = Math.max(0, Math.min(1, volume));
this.notificationVolume.set(clamped);
try {
localStorage.setItem(STORAGE_KEY_NOTIFICATION_VOLUME, String(clamped));
} catch {}
}
/**
* Play a sound effect at the current notification volume.
*
* If playback fails (e.g. browser autoplay policy) the error is
* silently swallowed — sound effects are non-critical.
*
* @param sound - The {@link AppSound} to play.
* @param volumeOverride - Optional explicit volume (0 1). When omitted
* the persisted {@link notificationVolume} is used.
*/
play(sound: AppSound, volumeOverride?: number): void {
const cached = this.cache.get(sound);
if (!cached) return;
const vol = volumeOverride ?? this.notificationVolume();
if (vol === 0) return; // skip playback when muted
// Clone so overlapping plays don't cut each other off.
const clone = cached.cloneNode(true) as HTMLAudioElement;
clone.volume = Math.max(0, Math.min(1, vol));
clone.play().catch(() => {
/* swallow autoplay errors */
});
}
}

View File

@@ -108,11 +108,21 @@ export class WebRTCService implements OnDestroy {
readonly onSignalingMessage = this.signalingMessage$.asObservable();
// Delegates to managers
get onMessageReceived(): Observable<ChatEvent> { return this.peerManager.messageReceived$.asObservable(); }
get onPeerConnected(): Observable<string> { return this.peerManager.peerConnected$.asObservable(); }
get onPeerDisconnected(): Observable<string> { return this.peerManager.peerDisconnected$.asObservable(); }
get onRemoteStream(): Observable<{ peerId: string; stream: MediaStream }> { return this.peerManager.remoteStream$.asObservable(); }
get onVoiceConnected(): Observable<void> { return this.mediaManager.voiceConnected$.asObservable(); }
get onMessageReceived(): Observable<ChatEvent> {
return this.peerManager.messageReceived$.asObservable();
}
get onPeerConnected(): Observable<string> {
return this.peerManager.peerConnected$.asObservable();
}
get onPeerDisconnected(): Observable<string> {
return this.peerManager.peerDisconnected$.asObservable();
}
get onRemoteStream(): Observable<{ peerId: string; stream: MediaStream }> {
return this.peerManager.remoteStream$.asObservable();
}
get onVoiceConnected(): Observable<void> {
return this.mediaManager.voiceConnected$.asObservable();
}
private readonly signalingManager: SignalingManager;
private readonly peerManager: PeerConnectionManager;
@@ -128,20 +138,11 @@ export class WebRTCService implements OnDestroy {
() => this.memberServerIds,
);
this.peerManager = new PeerConnectionManager(
this.logger,
null!,
);
this.peerManager = new PeerConnectionManager(this.logger, null!);
this.mediaManager = new MediaManager(
this.logger,
null!,
);
this.mediaManager = new MediaManager(this.logger, null!);
this.screenShareManager = new ScreenShareManager(
this.logger,
null!,
);
this.screenShareManager = new ScreenShareManager(this.logger, null!);
// Now wire up cross-references (all managers are instantiated)
this.peerManager.setCallbacks({
@@ -155,15 +156,18 @@ export class WebRTCService implements OnDestroy {
});
this.mediaManager.setCallbacks({
getActivePeers: (): Map<string, import('./webrtc').PeerData> => this.peerManager.activePeerConnections,
getActivePeers: (): Map<string, import('./webrtc').PeerData> =>
this.peerManager.activePeerConnections,
renegotiate: (peerId: string): Promise<void> => this.peerManager.renegotiate(peerId),
broadcastMessage: (event: any): void => this.peerManager.broadcastMessage(event),
getIdentifyOderId: (): string => this.lastIdentifyCredentials?.oderId || this._localPeerId(),
getIdentifyDisplayName: (): string => this.lastIdentifyCredentials?.displayName || DEFAULT_DISPLAY_NAME,
getIdentifyDisplayName: (): string =>
this.lastIdentifyCredentials?.displayName || DEFAULT_DISPLAY_NAME,
});
this.screenShareManager.setCallbacks({
getActivePeers: (): Map<string, import('./webrtc').PeerData> => this.peerManager.activePeerConnections,
getActivePeers: (): Map<string, import('./webrtc').PeerData> =>
this.peerManager.activePeerConnections,
getLocalMediaStream: (): MediaStream | null => this.mediaManager.getLocalStream(),
renegotiate: (peerId: string): Promise<void> => this.peerManager.renegotiate(peerId),
broadcastCurrentStates: (): void => this.peerManager.broadcastCurrentStates(),
@@ -188,7 +192,9 @@ export class WebRTCService implements OnDestroy {
this.signalingManager.heartbeatTick$.subscribe(() => this.peerManager.broadcastCurrentStates());
// Peer manager → connected peers signal
this.peerManager.connectedPeersChanged$.subscribe((peers: string[]) => this._connectedPeers.set(peers));
this.peerManager.connectedPeersChanged$.subscribe((peers: string[]) =>
this._connectedPeers.set(peers),
);
// Media manager → voice connected signal
this.mediaManager.voiceConnected$.subscribe(() => {
@@ -215,7 +221,10 @@ export class WebRTCService implements OnDestroy {
break;
case SIGNALING_TYPE_SERVER_USERS: {
this.logger.info('Server users', { count: Array.isArray(message.users) ? message.users.length : 0, serverId: message.serverId });
this.logger.info('Server users', {
count: Array.isArray(message.users) ? message.users.length : 0,
serverId: message.serverId,
});
if (message.users && Array.isArray(message.users)) {
message.users.forEach((user: { oderId: string; displayName: string }) => {
@@ -229,7 +238,10 @@ export class WebRTCService implements OnDestroy {
}
if (!healthy) {
this.logger.info('Create peer connection to existing user', { oderId: user.oderId, serverId: message.serverId });
this.logger.info('Create peer connection to existing user', {
oderId: user.oderId,
serverId: message.serverId,
});
this.peerManager.createPeerConnection(user.oderId, true);
this.peerManager.createAndSendOffer(user.oderId);
if (message.serverId) {
@@ -242,11 +254,18 @@ export class WebRTCService implements OnDestroy {
}
case SIGNALING_TYPE_USER_JOINED:
this.logger.info('User joined', { displayName: message.displayName, oderId: message.oderId });
this.logger.info('User joined', {
displayName: message.displayName,
oderId: message.oderId,
});
break;
case SIGNALING_TYPE_USER_LEFT:
this.logger.info('User left', { displayName: message.displayName, oderId: message.oderId, serverId: message.serverId });
this.logger.info('User left', {
displayName: message.displayName,
oderId: message.oderId,
serverId: message.serverId,
});
if (message.oderId) {
this.peerManager.removePeer(message.oderId);
this.peerServerMap.delete(message.oderId);
@@ -399,11 +418,19 @@ export class WebRTCService implements OnDestroy {
if (this.memberServerIds.has(serverId)) {
this.sendRawMessage({ type: SIGNALING_TYPE_VIEW_SERVER, serverId });
this.logger.info('Viewed server (already joined)', { serverId, userId, voiceConnected: this._isVoiceConnected() });
this.logger.info('Viewed server (already joined)', {
serverId,
userId,
voiceConnected: this._isVoiceConnected(),
});
} else {
this.memberServerIds.add(serverId);
this.sendRawMessage({ type: SIGNALING_TYPE_JOIN_SERVER, serverId });
this.logger.info('Joined new server via switch', { serverId, userId, voiceConnected: this._isVoiceConnected() });
this.logger.info('Joined new server via switch', {
serverId,
userId,
voiceConnected: this._isVoiceConnected(),
});
}
}
@@ -420,7 +447,9 @@ export class WebRTCService implements OnDestroy {
this.memberServerIds.delete(serverId);
this.sendRawMessage({ type: SIGNALING_TYPE_LEAVE_SERVER, serverId });
this.logger.info('Left server', { serverId });
if (this.memberServerIds.size === 0) { this.fullCleanup(); }
if (this.memberServerIds.size === 0) {
this.fullCleanup();
}
return;
}

View File

@@ -74,6 +74,32 @@
class="w-full h-1.5 bg-secondary rounded-lg appearance-none cursor-pointer accent-primary"
/>
</div>
<div>
<label class="block text-xs font-medium text-muted-foreground mb-1">
Notification Volume: {{ audioService.notificationVolume() * 100 | number: '1.0-0' }}%
</label>
<div class="flex items-center gap-2">
<input
type="range"
[value]="audioService.notificationVolume()"
(input)="onNotificationVolumeChange($event)"
min="0"
max="1"
step="0.01"
class="flex-1 h-1.5 bg-secondary rounded-lg appearance-none cursor-pointer accent-primary"
/>
<button
(click)="previewNotificationSound()"
class="px-2.5 py-1 text-xs bg-secondary text-foreground rounded-lg hover:bg-secondary/80 transition-colors flex-shrink-0"
title="Preview notification sound"
>
Test
</button>
</div>
<p class="text-[10px] text-muted-foreground/60 mt-1">
Controls join, leave &amp; notification sounds
</p>
</div>
</div>
</section>
@@ -157,9 +183,7 @@
<div class="flex items-center justify-between">
<div>
<p class="text-sm font-medium text-foreground">Voice Leveling</p>
<p class="text-xs text-muted-foreground">
Automatically equalise volume across speakers
</p>
<p class="text-xs text-muted-foreground">Automatically equalise volume across speakers</p>
</div>
<label class="relative inline-flex items-center cursor-pointer">
<input
@@ -248,9 +272,7 @@
<option value="slow" [selected]="voiceLeveling.speed() === 'slow'">
Slow (natural)
</option>
<option value="medium" [selected]="voiceLeveling.speed() === 'medium'">
Medium
</option>
<option value="medium" [selected]="voiceLeveling.speed() === 'medium'">Medium</option>
<option value="fast" [selected]="voiceLeveling.speed() === 'fast'">
Fast (aggressive)
</option>

View File

@@ -6,6 +6,10 @@ import { lucideMic, lucideHeadphones, lucideAudioLines, lucideActivity } from '@
import { WebRTCService } from '../../../../core/services/webrtc.service';
import { VoiceLevelingService } from '../../../../core/services/voice-leveling.service';
import {
NotificationAudioService,
AppSound,
} from '../../../../core/services/notification-audio.service';
import { STORAGE_KEY_VOICE_SETTINGS } from '../../../../core/constants';
interface AudioDevice {
@@ -30,6 +34,7 @@ interface AudioDevice {
export class VoiceSettingsComponent {
private webrtcService = inject(WebRTCService);
readonly voiceLeveling = inject(VoiceLevelingService);
readonly audioService = inject(NotificationAudioService);
inputDevices = signal<AudioDevice[]>([]);
outputDevices = signal<AudioDevice[]>([]);
@@ -184,4 +189,13 @@ export class VoiceSettingsComponent {
onNoiseGateToggle(): void {
this.voiceLeveling.setNoiseGate(!this.voiceLeveling.noiseGate());
}
onNotificationVolumeChange(event: Event): void {
const input = event.target as HTMLInputElement;
this.audioService.setNotificationVolume(parseFloat(input.value));
}
previewNotificationSound(): void {
this.audioService.play(AppSound.Notification);
}
}

View File

@@ -190,6 +190,39 @@
</div>
<div class="space-y-4">
<!-- Notification Volume -->
<div>
<div class="flex items-center justify-between mb-2">
<div>
<p class="font-medium text-foreground">Notification volume</p>
<p class="text-sm text-muted-foreground">
Volume for join, leave, and notification sounds
</p>
</div>
<span class="text-sm font-medium text-muted-foreground tabular-nums w-10 text-right">
{{ audioService.notificationVolume() * 100 | number: '1.0-0' }}%
</span>
</div>
<div class="flex items-center gap-3">
<input
type="range"
min="0"
max="1"
step="0.01"
[ngModel]="audioService.notificationVolume()"
(ngModelChange)="onNotificationVolumeChange($event)"
class="flex-1 h-2 rounded-full appearance-none bg-secondary accent-primary cursor-pointer"
/>
<button
(click)="previewNotificationSound()"
class="px-3 py-1.5 text-sm bg-secondary text-foreground rounded-lg hover:bg-secondary/80 transition-colors"
title="Preview sound"
>
Test
</button>
</div>
</div>
<div class="flex items-center justify-between">
<div>
<p class="font-medium text-foreground">Noise reduction</p>

View File

@@ -18,6 +18,7 @@ import {
import { ServerDirectoryService } from '../../core/services/server-directory.service';
import { WebRTCService } from '../../core/services/webrtc.service';
import { NotificationAudioService, AppSound } from '../../core/services/notification-audio.service';
import { STORAGE_KEY_CONNECTION_SETTINGS, STORAGE_KEY_VOICE_SETTINGS } from '../../core/constants';
@Component({
@@ -47,6 +48,7 @@ export class SettingsComponent implements OnInit {
private serverDirectory = inject(ServerDirectoryService);
private webrtcService = inject(WebRTCService);
private router = inject(Router);
audioService = inject(NotificationAudioService);
servers = this.serverDirectory.servers;
isTesting = signal(false);
@@ -157,6 +159,16 @@ export class SettingsComponent implements OnInit {
}
}
/** Called when the notification volume slider changes. */
onNotificationVolumeChange(value: number): void {
this.audioService.setNotificationVolume(value);
}
/** Play a preview of the notification sound at the current volume. */
previewNotificationSound(): void {
this.audioService.play(AppSound.Notification);
}
/** Persist noise reduction preference (merged into existing voice settings) and apply immediately. */
async saveVoiceSettings(): Promise<void> {
// Merge into existing voice settings so we don't overwrite device/volume prefs

View File

@@ -23,6 +23,7 @@ import { DatabaseService } from '../../core/services/database.service';
import { WebRTCService } from '../../core/services/webrtc.service';
import { ServerDirectoryService } from '../../core/services/server-directory.service';
import { Room, RoomSettings, RoomPermissions, VoiceState } from '../../core/models';
import { NotificationAudioService, AppSound } from '../../core/services/notification-audio.service';
/** Build a minimal User object from signaling payload. */
function buildSignalingUser(
@@ -58,6 +59,7 @@ export class RoomsEffects {
private db = inject(DatabaseService);
private webrtc = inject(WebRTCService);
private serverDirectory = inject(ServerDirectoryService);
private audioService = inject(NotificationAudioService);
/** Loads all saved rooms from the local database. */
loadRooms$ = createEffect(() =>
@@ -546,6 +548,20 @@ export class RoomsEffects {
const vs = event.voiceState as Partial<VoiceState> | undefined;
if (!vs) return EMPTY;
// Detect voice-connection transitions to play join/leave sounds.
const weAreInVoice = this.webrtc.isVoiceConnected();
if (weAreInVoice) {
const existingUser = allUsers.find((u) => u.id === userId || u.oderId === userId) as any;
const wasConnected = existingUser?.voiceState?.isConnected ?? false;
const nowConnected = vs.isConnected ?? false;
if (!wasConnected && nowConnected) {
this.audioService.play(AppSound.Joining);
} else if (wasConnected && !nowConnected) {
this.audioService.play(AppSound.Leave);
}
}
if (!userExists) {
return of(
UsersActions.userJoined({

Binary file not shown.

BIN
src/assets/audio/leave.wav Normal file

Binary file not shown.

Binary file not shown.