move prefetch sounds to fix hot reload

This commit is contained in:
Half-Shot
2024-12-09 09:49:24 +00:00
parent ee7ef941f5
commit 7c3cba74ba
7 changed files with 81 additions and 66 deletions

View File

@@ -40,8 +40,9 @@ import {
CallEventAudioRenderer,
MAX_PARTICIPANT_COUNT_FOR_SOUND,
} from "./CallEventAudioRenderer";
import { prefetchSounds, useAudioContext } from "../useAudioContext";
import { useAudioContext } from "../useAudioContext";
import { TestReactionsWrapper } from "../utils/testReactions";
import { prefetchSounds } from "../soundUtils";
const localRtcMember = mockRtcMembership("@carol:example.org", "CCCC");
const local = mockMatrixRoomMember(localRtcMember);
@@ -53,6 +54,7 @@ const aliceId = `${alice.userId}:${aliceRtcMember.deviceId}`;
const aliceParticipant = mockRemoteParticipant({ identity: aliceId });
vitest.mock("../useAudioContext");
vitest.mock("../soundUtils");
afterEach(() => {
vitest.resetAllMocks();

View File

@@ -15,7 +15,8 @@ import leftCallSoundMp3 from "../sound/left_call.mp3";
import leftCallSoundOgg from "../sound/left_call.ogg";
import handSoundOgg from "../sound/raise_hand.ogg?url";
import handSoundMp3 from "../sound/raise_hand.mp3?url";
import { prefetchSounds, useAudioContext } from "../useAudioContext";
import { useAudioContext } from "../useAudioContext";
import { prefetchSounds } from "../soundUtils";
import { useReactions } from "../useReactions";
import { useLatest } from "../useLatest";

View File

@@ -29,8 +29,9 @@ import {
playReactionsSound,
soundEffectVolumeSetting,
} from "../settings/settings";
import { prefetchSounds, useAudioContext } from "../useAudioContext";
import { useAudioContext } from "../useAudioContext";
import { GenericReaction, ReactionSet } from "../reactions";
import { prefetchSounds } from "../soundUtils";
const memberUserIdAlice = "@alice:example.org";
const memberUserIdBob = "@bob:example.org";
@@ -60,6 +61,7 @@ function TestComponent({
}
vitest.mock("../useAudioContext");
vitest.mock("../soundUtils");
afterEach(() => {
vitest.resetAllMocks();

View File

@@ -10,7 +10,8 @@ import { ReactNode, useDeferredValue, useEffect, useState } from "react";
import { useReactions } from "../useReactions";
import { playReactionsSound, useSetting } from "../settings/settings";
import { GenericReaction, ReactionSet } from "../reactions";
import { prefetchSounds, useAudioContext } from "../useAudioContext";
import { useAudioContext } from "../useAudioContext";
import { prefetchSounds } from "../soundUtils";
import { useLatest } from "../useLatest";
const soundMap = Object.fromEntries([
@@ -66,5 +67,5 @@ export function ReactionsAudioRenderer(): ReactNode {
}
}
}, [audioEngineRef, shouldPlay, oldReactions, reactions]);
return <></>;
return null;
}

56
src/soundUtils.ts Normal file
View File

@@ -0,0 +1,56 @@
import { logger } from "matrix-js-sdk/src/logger";
type SoundDefinition = { mp3?: string; ogg: string };
export type PrefetchedSounds<S extends string> = Promise<
Record<S, ArrayBuffer>
>;
/**
* Determine the best format we can use to play our sounds
* through. We prefer ogg support if possible, but will fall
* back to MP3.
* @returns "ogg" if the browser is likely to support it, or "mp3" otherwise.
*/
function getPreferredAudioFormat(): "ogg" | "mp3" {
const a = document.createElement("audio");
if (a.canPlayType("audio/ogg") === "maybe") {
return "ogg";
}
// Otherwise just assume MP3, as that has a chance of being more widely supported.
return "mp3";
}
const preferredFormat = getPreferredAudioFormat();
/**
* Prefetch sounds to be used by the AudioContext. This should
* be called outside the scope of a component to ensure the
* sounds load ahead of time.
* @param sounds A set of sound files that may be played.
* @returns A map of sound files to buffers.
*/
export async function prefetchSounds<S extends string>(
sounds: Record<S, SoundDefinition>,
): PrefetchedSounds<S> {
const buffers: Record<string, ArrayBuffer> = {};
await Promise.all(
Object.entries(sounds).map(async ([name, file]) => {
const { mp3, ogg } = file as SoundDefinition;
// Use preferred format, fallback to ogg if no mp3 is provided.
// Load an audio file
const response = await fetch(
preferredFormat === "ogg" ? ogg : (mp3 ?? ogg),
);
if (!response.ok) {
// If the sound doesn't load, it's not the end of the world. We won't play
// the sound when requested, but it's better than failing the whole application.
logger.warn(`Could not load sound ${name}, response was not okay`);
return;
}
// Decode it
buffers[name] = await response.arrayBuffer();
}),
);
return buffers as Record<S, ArrayBuffer>;
}

View File

@@ -15,11 +15,13 @@ import { deviceStub, MediaDevicesContext } from "./livekit/MediaDevicesContext";
import { useAudioContext } from "./useAudioContext";
import { soundEffectVolumeSetting } from "./settings/settings";
const staticSounds = Promise.resolve({
aSound: new ArrayBuffer(0),
});
const TestComponent: FC = () => {
const audioCtx = useAudioContext({
sounds: Promise.resolve({
aSound: new ArrayBuffer(0),
}),
sounds: staticSounds,
latencyHint: "balanced",
});
if (!audioCtx) {

View File

@@ -13,8 +13,7 @@ import {
useSetting,
} from "./settings/settings";
import { useMediaDevices } from "./livekit/MediaDevicesContext";
type SoundDefinition = { mp3?: string; ogg: string };
import { PrefetchedSounds } from "./soundUtils";
/**
* Play a sound though a given AudioContext. Will take
@@ -37,58 +36,12 @@ function playSound(
src.start();
}
/**
* Determine the best format we can use to play our sounds
* through. We prefer ogg support if possible, but will fall
* back to MP3.
* @returns "ogg" if the browser is likely to support it, or "mp3" otherwise.
*/
function getPreferredAudioFormat(): "ogg" | "mp3" {
const a = document.createElement("audio");
if (a.canPlayType("audio/ogg") === "maybe") {
return "ogg";
}
// Otherwise just assume MP3, as that has a chance of being more widely supported.
return "mp3";
}
const preferredFormat = getPreferredAudioFormat();
type PrefetchedSounds<S extends string> = Promise<Record<S, ArrayBuffer>>;
/**
* Prefetch sounds to be used by the AudioContext. This should
* be called outside the scope of a component to ensure the
* sounds load ahead of time.
* @param sounds A set of sound files that may be played.
* @returns A map of sound files to buffers.
*/
export async function prefetchSounds<S extends string>(
sounds: Record<S, SoundDefinition>,
): PrefetchedSounds<S> {
const buffers: Record<string, ArrayBuffer> = {};
await Promise.all(
Object.entries(sounds).map(async ([name, file]) => {
const { mp3, ogg } = file as SoundDefinition;
// Use preferred format, fallback to ogg if no mp3 is provided.
// Load an audio file
const response = await fetch(
preferredFormat === "ogg" ? ogg : (mp3 ?? ogg),
);
if (!response.ok) {
// If the sound doesn't load, it's not the end of the world. We won't play
// the sound when requested, but it's better than failing the whole application.
logger.warn(`Could not load sound ${name}, resposne was not okay`);
return;
}
// Decode it
buffers[name] = await response.arrayBuffer();
}),
);
return buffers as Record<S, ArrayBuffer>;
}
interface Props<S extends string> {
/**
* The sounds to play. If no sounds should be played then
* this can be set to null, which will prevent the audio
* context from being created.
*/
sounds: PrefetchedSounds<S> | null;
latencyHint: AudioContextLatencyCategory;
}
@@ -112,8 +65,8 @@ export function useAudioContext<S extends string>(
const [audioBuffers, setAudioBuffers] = useState<Record<S, AudioBuffer>>();
useEffect(() => {
const soundList = props.sounds;
if (!soundList) {
const sounds = props.sounds;
if (!sounds) {
return;
}
const ctx = new AudioContext({
@@ -126,9 +79,7 @@ export function useAudioContext<S extends string>(
// close during this process, so it's okay if it throws.
(async (): Promise<void> => {
const buffers: Record<string, AudioBuffer> = {};
for (const [name, buffer] of Object.entries<ArrayBuffer>(
await soundList,
)) {
for (const [name, buffer] of Object.entries<ArrayBuffer>(await sounds)) {
const audioBuffer = await ctx.decodeAudioData(buffer.slice(0));
buffers[name] = audioBuffer;
}