Merge branch 'livekit' into valere/fix_connections_leaks

This commit is contained in:
Valere
2025-05-14 14:18:32 +02:00
25 changed files with 704 additions and 1040 deletions

View File

@@ -2,11 +2,11 @@
# https://docs.gradle.org/current/userguide/platforms.html#sub::toml-dependencies-format
[versions]
android_gradle_plugin = "8.8.0"
android_gradle_plugin = "8.10.0"
[libraries]
android_gradle_plugin = { module = "com.android.tools.build:gradle", version.ref = "android_gradle_plugin" }
[plugins]
android_library = { id = "com.android.library", version.ref = "android_gradle_plugin" }
maven_publish = { id = "com.vanniktech.maven.publish", version = "0.30.0" }
maven_publish = { id = "com.vanniktech.maven.publish", version = "0.31.0" }

View File

@@ -1,6 +1,6 @@
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-8.10.2-all.zip
distributionUrl=https\://services.gradle.org/distributions/gradle-8.14-all.zip
networkTimeout=10000
validateDistributionUrl=true
zipStoreBase=GRADLE_USER_HOME

View File

@@ -86,8 +86,7 @@ done
# shellcheck disable=SC2034
APP_BASE_NAME=${0##*/}
# Discard cd standard output in case $CDPATH is set (https://github.com/gradle/gradle/issues/25036)
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s
' "$PWD" ) || exit
APP_HOME=$( cd -P "${APP_HOME:-./}" > /dev/null && printf '%s\n' "$PWD" ) || exit
# Use the maximum available, or set MAX_FD != -1 to use that value.
MAX_FD=maximum
@@ -115,7 +114,7 @@ case "$( uname )" in #(
NONSTOP* ) nonstop=true ;;
esac
CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
CLASSPATH="\\\"\\\""
# Determine the Java command to use to start the JVM.
@@ -206,7 +205,7 @@ fi
DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
# Collect all arguments for the java command:
# * DEFAULT_JVM_OPTS, JAVA_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# * DEFAULT_JVM_OPTS, JAVA_OPTS, and optsEnvironmentVar are not allowed to contain shell fragments,
# and any embedded shellness will be escaped.
# * For example: A user cannot expect ${Hostname} to be expanded, as it is an environment variable and will be
# treated as '${Hostname}' itself on the command line.
@@ -214,7 +213,7 @@ DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
set -- \
"-Dorg.gradle.appname=$APP_BASE_NAME" \
-classpath "$CLASSPATH" \
org.gradle.wrapper.GradleWrapperMain \
-jar "$APP_HOME/gradle/wrapper/gradle-wrapper.jar" \
"$@"
# Stop when "xargs" is not available.

View File

@@ -70,11 +70,11 @@ goto fail
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
set CLASSPATH=
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" -jar "%APP_HOME%\gradle\wrapper\gradle-wrapper.jar" %*
:end
@rem End local scope for the variables with windows NT shell

View File

@@ -27,6 +27,10 @@ export default {
// then Knip will flag it as a false positive
// https://github.com/webpro-nl/knip/issues/766
"@vector-im/compound-web",
// We need this so that TypeScript is happy with @livekit/track-processors.
// This might be a bug in the LiveKit repo but for now we fix it on the
// Element Call side.
"@types/dom-mediacapture-transform",
"matrix-widget-api",
],
ignoreExportsUsedInFile: true,

View File

@@ -70,6 +70,7 @@
"livekit_server_info": "LiveKit Server Info",
"livekit_sfu": "LiveKit SFU: {{url}}",
"matrix_id": "Matrix ID: {{id}}",
"mute_all_audio": "Mute all audio (participants, reactions, join sounds)",
"show_connection_stats": "Show connection statistics",
"show_non_member_tiles": "Show tiles for non-member media",
"url_params": "URL parameters",

View File

@@ -39,7 +39,7 @@
"@formatjs/intl-segmenter": "^11.7.3",
"@livekit/components-core": "^0.12.0",
"@livekit/components-react": "^2.0.0",
"@livekit/protocol": "^1.33.0",
"@livekit/protocol": "^1.38.0",
"@livekit/track-processors": "^0.5.5",
"@mediapipe/tasks-vision": "^0.10.18",
"@opentelemetry/api": "^1.4.0",
@@ -62,6 +62,7 @@
"@testing-library/react": "^16.0.0",
"@testing-library/user-event": "^14.5.1",
"@types/content-type": "^1.1.5",
"@types/dom-mediacapture-transform": "^0.1.11",
"@types/grecaptcha": "^3.0.9",
"@types/jsdom": "^21.1.7",
"@types/lodash-es": "^4.17.12",

View File

@@ -1,146 +0,0 @@
/* eslint-disable */
// The contents of this file below the line are copied from
// @types/dom-mediacapture-transform, which is inlined here into Element Call so
// that we can apply the patch to @types/dom-webcodecs found in
// ./dom-webcodecs.d.ts, which it depends on.
// (https://github.com/DefinitelyTyped/DefinitelyTyped/pull/72625)
// Once that PR is merged and released, we can remove this file and return to
// depending on @types/dom-mediacapture-transform.
// -----------------------------------------------------------------------------
// This project is licensed under the MIT license.
// Copyrights are respective of each contributor listed at the beginning of each definition file.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// In general, these types are only available behind a command line flag or an origin trial in
// Chrome 90+.
// This API depends on WebCodecs.
// Versioning:
// Until the above-mentioned spec is finalized, the major version number is 0. Although not
// necessary for version 0, consider incrementing the minor version number for breaking changes.
// The following modify existing DOM types to allow defining type-safe APIs on audio and video tracks.
/** Specialize MediaStreamTrack so that we can refer specifically to an audio track. */
interface MediaStreamAudioTrack extends MediaStreamTrack {
readonly kind: "audio";
clone(): MediaStreamAudioTrack;
}
/** Specialize MediaStreamTrack so that we can refer specifically to a video track. */
interface MediaStreamVideoTrack extends MediaStreamTrack {
readonly kind: "video";
clone(): MediaStreamVideoTrack;
}
/** Assert that getAudioTracks and getVideoTracks return the tracks with the appropriate kind. */
interface MediaStream {
getAudioTracks(): MediaStreamAudioTrack[];
getVideoTracks(): MediaStreamVideoTrack[];
}
// The following were originally generated from the spec using
// https://github.com/microsoft/TypeScript-DOM-lib-generator, then heavily modified.
/**
* A track sink that is capable of exposing the unencoded frames from the track to a
* ReadableStream, and exposes a control channel for signals going in the oppposite direction.
*/
interface MediaStreamTrackProcessor<T extends AudioData | VideoFrame> {
/**
* Allows reading the frames flowing through the MediaStreamTrack provided to the constructor.
*/
readonly readable: ReadableStream<T>;
/** Allows sending control signals to the MediaStreamTrack provided to the constructor. */
readonly writableControl: WritableStream<MediaStreamTrackSignal>;
}
declare var MediaStreamTrackProcessor: {
prototype: MediaStreamTrackProcessor<any>;
/** Constructor overrides based on the type of track. */
new (
init: MediaStreamTrackProcessorInit & { track: MediaStreamAudioTrack },
): MediaStreamTrackProcessor<AudioData>;
new (
init: MediaStreamTrackProcessorInit & { track: MediaStreamVideoTrack },
): MediaStreamTrackProcessor<VideoFrame>;
};
interface MediaStreamTrackProcessorInit {
track: MediaStreamTrack;
/**
* If media frames are not read from MediaStreamTrackProcessor.readable quickly enough, the
* MediaStreamTrackProcessor will internally buffer up to maxBufferSize of the frames produced
* by the track. If the internal buffer is full, each time the track produces a new frame, the
* oldest frame in the buffer will be dropped and the new frame will be added to the buffer.
*/
maxBufferSize?: number | undefined;
}
/**
* Takes video frames as input, and emits control signals that result from subsequent processing.
*/
interface MediaStreamTrackGenerator<T extends AudioData | VideoFrame>
extends MediaStreamTrack {
/**
* Allows writing media frames to the MediaStreamTrackGenerator, which is itself a
* MediaStreamTrack. When a frame is written to writable, the frames close() method is
* automatically invoked, so that its internal resources are no longer accessible from
* JavaScript.
*/
readonly writable: WritableStream<T>;
/**
* Allows reading control signals sent from any sinks connected to the
* MediaStreamTrackGenerator.
*/
readonly readableControl: ReadableStream<MediaStreamTrackSignal>;
}
type MediaStreamAudioTrackGenerator = MediaStreamTrackGenerator<AudioData> &
MediaStreamAudioTrack;
type MediaStreamVideoTrackGenerator = MediaStreamTrackGenerator<VideoFrame> &
MediaStreamVideoTrack;
declare var MediaStreamTrackGenerator: {
prototype: MediaStreamTrackGenerator<any>;
/** Constructor overrides based on the type of track. */
new (
init: MediaStreamTrackGeneratorInit & {
kind: "audio";
signalTarget?: MediaStreamAudioTrack | undefined;
},
): MediaStreamAudioTrackGenerator;
new (
init: MediaStreamTrackGeneratorInit & {
kind: "video";
signalTarget?: MediaStreamVideoTrack | undefined;
},
): MediaStreamVideoTrackGenerator;
};
interface MediaStreamTrackGeneratorInit {
kind: MediaStreamTrackGeneratorKind;
/**
* (Optional) track to which the MediaStreamTrackGenerator will automatically forward control
* signals. If signalTarget is provided and signalTarget.kind and kind do not match, the
* MediaStreamTrackGenerators constructor will raise an exception.
*/
signalTarget?: MediaStreamTrack | undefined;
}
type MediaStreamTrackGeneratorKind = "audio" | "video";
type MediaStreamTrackSignalType = "request-frame";
interface MediaStreamTrackSignal {
signalType: MediaStreamTrackSignalType;
}

View File

@@ -1,745 +0,0 @@
/* eslint-disable */
// The contents of this file below the line are copied from
// @types/dom-webcodecs, which is inlined here into Element Call so that we can
// apply the patch https://github.com/DefinitelyTyped/DefinitelyTyped/pull/72625
// which is needed for TypeScript 5.8 compatibility. Once that PR is merged and
// released, we can remove this file and return to depending on
// @types/dom-webcodecs.
// -----------------------------------------------------------------------------
// This project is licensed under the MIT license.
// Copyrights are respective of each contributor listed at the beginning of each definition file.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// Versioning:
// Until the WebCodecs spec is finalized, the major version number is 0. I have chosen to use minor
// version 1 to denote the API as defined by the IDL files from the Chromium repo at
// https://chromium.googlesource.com/chromium/src/+/main/third_party/blink/renderer/modules/webcodecs.
// Please use a version number above 0.1 if using the spec at https://w3c.github.io/webcodecs/ as
// the source.
// The declarations in webcodecs.generated.d.ts have been generated using the code in
// https://github.com/yume-chan/webcodecs-lib-generator. See
// https://github.com/yume-chan/webcodecs-lib-generator/blob/main/README.md for more detail.
// The following declarations are copied from
// https://github.com/microsoft/TypeScript-DOM-lib-generator/blob/a75338e1ea8a958bf08a5745141d2ab8f14ba2ca/baselines/dom.generated.d.ts
// and modified to expand the types to include VideoFrame.
/** Shim for OffscreenCanvas, which was removed in TS 4.4 */
interface OffscreenCanvas extends EventTarget {}
/**
* Replaces CanvasImageSource; only applies if WebCodecs is available.
*/
type CanvasImageSourceWebCodecs =
| HTMLOrSVGImageElement
| HTMLVideoElement
| HTMLCanvasElement
| ImageBitmap
| OffscreenCanvas
| VideoFrame;
interface CanvasRenderingContext2D {
drawImage(image: CanvasImageSourceWebCodecs, dx: number, dy: number): void;
drawImage(
image: CanvasImageSourceWebCodecs,
dx: number,
dy: number,
dw: number,
dh: number,
): void;
drawImage(
image: CanvasImageSourceWebCodecs,
sx: number,
sy: number,
sw: number,
sh: number,
dx: number,
dy: number,
dw: number,
dh: number,
): void;
createPattern(
image: CanvasImageSourceWebCodecs,
repetition: string | null,
): CanvasPattern | null;
}
interface OffscreenCanvasRenderingContext2D {
drawImage(image: CanvasImageSourceWebCodecs, dx: number, dy: number): void;
drawImage(
image: CanvasImageSourceWebCodecs,
dx: number,
dy: number,
dw: number,
dh: number,
): void;
drawImage(
image: CanvasImageSourceWebCodecs,
sx: number,
sy: number,
sw: number,
sh: number,
dx: number,
dy: number,
dw: number,
dh: number,
): void;
createPattern(
image: CanvasImageSourceWebCodecs,
repetition: string | null,
): CanvasPattern | null;
}
/**
* Replaces ImageBitmapSource; only applies if WebCodecs is available.
*/
type ImageBitmapSourceWebCodecs = CanvasImageSourceWebCodecs | Blob | ImageData;
declare function createImageBitmap(
image: ImageBitmapSourceWebCodecs,
options?: ImageBitmapOptions,
): Promise<ImageBitmap>;
declare function createImageBitmap(
image: ImageBitmapSourceWebCodecs,
sx: number,
sy: number,
sw: number,
sh: number,
options?: ImageBitmapOptions,
): Promise<ImageBitmap>;
/**
* Replaces TexImageSource; only applies if WebCodecs is available.
*/
type TexImageSourceWebCodecs =
| ImageBitmap
| ImageData
| HTMLImageElement
| HTMLCanvasElement
| HTMLVideoElement
| OffscreenCanvas
| VideoFrame;
interface WebGLRenderingContextOverloads {
texImage2D(
target: GLenum,
level: GLint,
internalformat: GLint,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
texSubImage2D(
target: GLenum,
level: GLint,
xoffset: GLint,
yoffset: GLint,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
}
interface WebGL2RenderingContextBase {
texImage3D(
target: GLenum,
level: GLint,
internalformat: GLint,
width: GLsizei,
height: GLsizei,
depth: GLsizei,
border: GLint,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
texSubImage3D(
target: GLenum,
level: GLint,
xoffset: GLint,
yoffset: GLint,
zoffset: GLint,
width: GLsizei,
height: GLsizei,
depth: GLsizei,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
}
interface WebGL2RenderingContextOverloads {
texImage2D(
target: GLenum,
level: GLint,
internalformat: GLint,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
texImage2D(
target: GLenum,
level: GLint,
internalformat: GLint,
width: GLsizei,
height: GLsizei,
border: GLint,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
texSubImage2D(
target: GLenum,
level: GLint,
xoffset: GLint,
yoffset: GLint,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
texSubImage2D(
target: GLenum,
level: GLint,
xoffset: GLint,
yoffset: GLint,
width: GLsizei,
height: GLsizei,
format: GLenum,
type: GLenum,
source: TexImageSourceWebCodecs,
): void;
}
/////////////////////////////
/// webcodecs APIs
/////////////////////////////
interface AudioDataCopyToOptions {
format?: AudioSampleFormat | undefined;
frameCount?: number | undefined;
frameOffset?: number | undefined;
planeIndex: number;
}
interface AudioDataInit {
data: AllowSharedBufferSource;
format: AudioSampleFormat;
numberOfChannels: number;
numberOfFrames: number;
sampleRate: number;
timestamp: number;
}
interface AudioDecoderConfig {
codec: string;
description?: AllowSharedBufferSource | undefined;
numberOfChannels: number;
sampleRate: number;
}
interface AudioDecoderInit {
error: WebCodecsErrorCallback;
output: AudioDataOutputCallback;
}
interface AudioDecoderSupport {
config?: AudioDecoderConfig;
supported?: boolean;
}
interface AudioEncoderConfig {
bitrate?: number | undefined;
codec: string;
numberOfChannels: number;
sampleRate: number;
}
interface AudioEncoderInit {
error: WebCodecsErrorCallback;
output: EncodedAudioChunkOutputCallback;
}
interface AudioEncoderSupport {
config?: AudioEncoderConfig;
supported?: boolean;
}
interface AvcEncoderConfig {
format?: AvcBitstreamFormat | undefined;
}
interface EncodedAudioChunkInit {
data: AllowSharedBufferSource;
duration?: number | undefined;
timestamp: number;
type: EncodedAudioChunkType;
}
interface EncodedAudioChunkMetadata {
decoderConfig?: AudioDecoderConfig | undefined;
}
interface EncodedVideoChunkInit {
data: AllowSharedBufferSource;
duration?: number | undefined;
timestamp: number;
type: EncodedVideoChunkType;
}
interface EncodedVideoChunkMetadata {
decoderConfig?: VideoDecoderConfig | undefined;
temporalLayerId?: number | undefined;
}
interface ImageDecodeOptions {
completeFramesOnly?: boolean | undefined;
frameIndex?: number | undefined;
}
interface ImageDecodeResult {
complete: boolean;
image: VideoFrame;
}
interface ImageDecoderInit {
colorSpaceConversion?: ColorSpaceConversion | undefined;
data: ImageBufferSource;
desiredHeight?: number | undefined;
desiredWidth?: number | undefined;
preferAnimation?: boolean | undefined;
premultiplyAlpha?: PremultiplyAlpha | undefined;
type: string;
}
interface PlaneLayout {
offset: number;
stride: number;
}
interface VideoColorSpaceInit {
fullRange?: boolean | null | undefined;
matrix?: VideoMatrixCoefficients | null | undefined;
primaries?: VideoColorPrimaries | null | undefined;
transfer?: VideoTransferCharacteristics | null | undefined;
}
interface VideoDecoderConfig {
codec: string;
codedHeight?: number | undefined;
codedWidth?: number | undefined;
colorSpace?: VideoColorSpaceInit | undefined;
description?: AllowSharedBufferSource | undefined;
displayAspectHeight?: number | undefined;
displayAspectWidth?: number | undefined;
hardwareAcceleration?: HardwarePreference | undefined;
optimizeForLatency?: boolean | undefined;
}
interface VideoDecoderInit {
error: WebCodecsErrorCallback;
output: VideoFrameOutputCallback;
}
interface VideoDecoderSupport {
config?: VideoDecoderConfig;
supported?: boolean;
}
interface VideoEncoderConfig {
alpha?: AlphaOption | undefined;
avc?: AvcEncoderConfig | undefined;
bitrate?: number | undefined;
bitrateMode?: VideoEncoderBitrateMode | undefined;
codec: string;
displayHeight?: number | undefined;
displayWidth?: number | undefined;
framerate?: number | undefined;
hardwareAcceleration?: HardwarePreference | undefined;
height: number;
latencyMode?: LatencyMode | undefined;
scalabilityMode?: string | undefined;
width: number;
}
interface VideoEncoderEncodeOptions {
keyFrame?: boolean;
}
interface VideoEncoderInit {
error: WebCodecsErrorCallback;
output: EncodedVideoChunkOutputCallback;
}
interface VideoEncoderSupport {
config?: VideoEncoderConfig;
supported?: boolean;
}
interface VideoFrameBufferInit {
codedHeight: number;
codedWidth: number;
colorSpace?: VideoColorSpaceInit | undefined;
displayHeight?: number | undefined;
displayWidth?: number | undefined;
duration?: number | undefined;
format: VideoPixelFormat;
layout?: PlaneLayout[] | undefined;
timestamp: number;
visibleRect?: DOMRectInit | undefined;
}
interface VideoFrameCopyToOptions {
layout?: PlaneLayout[] | undefined;
rect?: DOMRectInit | undefined;
}
interface VideoFrameInit {
alpha?: AlphaOption | undefined;
displayHeight?: number | undefined;
displayWidth?: number | undefined;
duration?: number | undefined;
timestamp?: number | undefined;
visibleRect?: DOMRectInit | undefined;
}
interface AudioData {
readonly duration: number;
readonly format: AudioSampleFormat | null;
readonly numberOfChannels: number;
readonly numberOfFrames: number;
readonly sampleRate: number;
readonly timestamp: number;
allocationSize(options: AudioDataCopyToOptions): number;
clone(): AudioData;
close(): void;
copyTo(
destination: AllowSharedBufferSource,
options: AudioDataCopyToOptions,
): void;
}
declare var AudioData: {
prototype: AudioData;
new (init: AudioDataInit): AudioData;
};
interface AudioDecoderEventMap {
dequeue: Event;
}
/** Available only in secure contexts. */
interface AudioDecoder {
readonly decodeQueueSize: number;
readonly state: CodecState;
ondequeue: ((this: AudioDecoder, ev: Event) => any) | null;
close(): void;
configure(config: AudioDecoderConfig): void;
decode(chunk: EncodedAudioChunk): void;
flush(): Promise<void>;
reset(): void;
addEventListener<K extends keyof AudioDecoderEventMap>(
type: K,
listener: (this: AudioDecoder, ev: AudioDecoderEventMap[K]) => any,
options?: boolean | AddEventListenerOptions,
): void;
addEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions,
): void;
removeEventListener<K extends keyof AudioDecoderEventMap>(
type: K,
listener: (this: AudioDecoder, ev: AudioDecoderEventMap[K]) => any,
options?: boolean | EventListenerOptions,
): void;
removeEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions,
): void;
}
declare var AudioDecoder: {
prototype: AudioDecoder;
new (init: AudioDecoderInit): AudioDecoder;
isConfigSupported(config: AudioDecoderConfig): Promise<AudioDecoderSupport>;
};
interface AudioEncoderEventMap {
dequeue: Event;
}
/** Available only in secure contexts. */
interface AudioEncoder {
readonly encodeQueueSize: number;
readonly state: CodecState;
ondequeue: ((this: AudioEncoder, ev: Event) => any) | null;
close(): void;
configure(config: AudioEncoderConfig): void;
encode(data: AudioData): void;
flush(): Promise<void>;
reset(): void;
addEventListener<K extends keyof AudioEncoderEventMap>(
type: K,
listener: (this: AudioEncoder, ev: AudioEncoderEventMap[K]) => any,
options?: boolean | AddEventListenerOptions,
): void;
addEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions,
): void;
removeEventListener<K extends keyof AudioEncoderEventMap>(
type: K,
listener: (this: AudioEncoder, ev: AudioEncoderEventMap[K]) => any,
options?: boolean | EventListenerOptions,
): void;
removeEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions,
): void;
}
declare var AudioEncoder: {
prototype: AudioEncoder;
new (init: AudioEncoderInit): AudioEncoder;
isConfigSupported(config: AudioEncoderConfig): Promise<AudioEncoderSupport>;
};
interface EncodedAudioChunk {
readonly byteLength: number;
readonly duration: number | null;
readonly timestamp: number;
readonly type: EncodedAudioChunkType;
copyTo(destination: AllowSharedBufferSource): void;
}
declare var EncodedAudioChunk: {
prototype: EncodedAudioChunk;
new (init: EncodedAudioChunkInit): EncodedAudioChunk;
};
interface EncodedVideoChunk {
readonly byteLength: number;
readonly duration: number | null;
readonly timestamp: number;
readonly type: EncodedVideoChunkType;
copyTo(destination: AllowSharedBufferSource): void;
}
declare var EncodedVideoChunk: {
prototype: EncodedVideoChunk;
new (init: EncodedVideoChunkInit): EncodedVideoChunk;
};
/** Available only in secure contexts. */
interface ImageDecoder {
readonly complete: boolean;
readonly completed: Promise<void>;
readonly tracks: ImageTrackList;
readonly type: string;
close(): void;
decode(options?: ImageDecodeOptions): Promise<ImageDecodeResult>;
reset(): void;
}
// declare var ImageDecoder: {
// prototype: ImageDecoder;
// new(init: ImageDecoderInit): ImageDecoder;
// isTypeSupported(type: string): Promise<boolean>;
// };
// interface ImageTrack {
// readonly animated: boolean;
// readonly frameCount: number;
// readonly repetitionCount: number;
// selected: boolean;
// }
// declare var ImageTrack: {
// prototype: ImageTrack;
// new(): ImageTrack;
// };
// interface ImageTrackList {
// readonly length: number;
// readonly ready: Promise<void>;
// readonly selectedIndex: number;
// readonly selectedTrack: ImageTrack | null;
// [index: number]: ImageTrack;
// }
// declare var ImageTrackList: {
// prototype: ImageTrackList;
// new(): ImageTrackList;
// };
interface VideoColorSpace {
readonly fullRange: boolean | null;
readonly matrix: VideoMatrixCoefficients | null;
readonly primaries: VideoColorPrimaries | null;
readonly transfer: VideoTransferCharacteristics | null;
toJSON(): VideoColorSpaceInit;
}
declare var VideoColorSpace: {
prototype: VideoColorSpace;
new (init?: VideoColorSpaceInit): VideoColorSpace;
};
interface VideoDecoderEventMap {
dequeue: Event;
}
/** Available only in secure contexts. */
interface VideoDecoder {
readonly decodeQueueSize: number;
readonly state: CodecState;
ondequeue: ((this: VideoDecoder, ev: Event) => any) | null;
close(): void;
configure(config: VideoDecoderConfig): void;
decode(chunk: EncodedVideoChunk): void;
flush(): Promise<void>;
reset(): void;
addEventListener<K extends keyof VideoDecoderEventMap>(
type: K,
listener: (this: VideoDecoder, ev: VideoDecoderEventMap[K]) => any,
options?: boolean | AddEventListenerOptions,
): void;
addEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions,
): void;
removeEventListener<K extends keyof VideoDecoderEventMap>(
type: K,
listener: (this: VideoDecoder, ev: VideoDecoderEventMap[K]) => any,
options?: boolean | EventListenerOptions,
): void;
removeEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions,
): void;
}
declare var VideoDecoder: {
prototype: VideoDecoder;
new (init: VideoDecoderInit): VideoDecoder;
isConfigSupported(config: VideoDecoderConfig): Promise<VideoDecoderSupport>;
};
interface VideoEncoderEventMap {
dequeue: Event;
}
/** Available only in secure contexts. */
interface VideoEncoder {
readonly encodeQueueSize: number;
readonly state: CodecState;
close(): void;
ondequeue: ((this: VideoEncoder, ev: Event) => any) | null;
configure(config: VideoEncoderConfig): void;
encode(frame: VideoFrame, options?: VideoEncoderEncodeOptions): void;
flush(): Promise<void>;
reset(): void;
addEventListener<K extends keyof VideoEncoderEventMap>(
type: K,
listener: (this: VideoEncoder, ev: VideoEncoderEventMap[K]) => any,
options?: boolean | AddEventListenerOptions,
): void;
addEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | AddEventListenerOptions,
): void;
removeEventListener<K extends keyof VideoEncoderEventMap>(
type: K,
listener: (this: VideoEncoder, ev: VideoEncoderEventMap[K]) => any,
options?: boolean | EventListenerOptions,
): void;
removeEventListener(
type: string,
listener: EventListenerOrEventListenerObject,
options?: boolean | EventListenerOptions,
): void;
}
declare var VideoEncoder: {
prototype: VideoEncoder;
new (init: VideoEncoderInit): VideoEncoder;
isConfigSupported(config: VideoEncoderConfig): Promise<VideoEncoderSupport>;
};
interface VideoFrame {
readonly codedHeight: number;
readonly codedRect: DOMRectReadOnly | null;
readonly codedWidth: number;
readonly colorSpace: VideoColorSpace;
readonly displayHeight: number;
readonly displayWidth: number;
readonly duration: number | null;
readonly format: VideoPixelFormat | null;
readonly timestamp: number;
readonly visibleRect: DOMRectReadOnly | null;
allocationSize(options?: VideoFrameCopyToOptions): number;
clone(): VideoFrame;
close(): void;
copyTo(
destination: AllowSharedBufferSource,
options?: VideoFrameCopyToOptions,
): Promise<PlaneLayout[]>;
}
declare var VideoFrame: {
prototype: VideoFrame;
new (source: CanvasImageSource, init?: VideoFrameInit): VideoFrame;
new (data: AllowSharedBufferSource, init: VideoFrameBufferInit): VideoFrame;
};
interface AudioDataOutputCallback {
(output: AudioData): void;
}
interface EncodedAudioChunkOutputCallback {
(output: EncodedAudioChunk, metadata: EncodedAudioChunkMetadata): void;
}
interface EncodedVideoChunkOutputCallback {
(chunk: EncodedVideoChunk, metadata: EncodedVideoChunkMetadata): void;
}
interface VideoFrameOutputCallback {
(output: VideoFrame): void;
}
interface WebCodecsErrorCallback {
(error: DOMException): void;
}
// type AllowSharedBufferSource = ArrayBuffer | ArrayBufferView;
// type BitrateMode = "constant" | "variable";
// type ImageBufferSource = ArrayBuffer | ArrayBufferView | ReadableStream;
// type AlphaOption = "discard" | "keep";
// type AudioSampleFormat = "f32" | "f32-planar" | "s16" | "s16-planar" | "s32" | "s32-planar" | "u8" | "u8-planar";
// type AvcBitstreamFormat = "annexb" | "avc";
// type CodecState = "closed" | "configured" | "unconfigured";
// type EncodedAudioChunkType = "delta" | "key";
// type EncodedVideoChunkType = "delta" | "key";
type HardwarePreference =
| "no-preference"
| "prefer-hardware"
| "prefer-software";
// type LatencyMode = "quality" | "realtime";
// type VideoColorPrimaries = "bt470bg" | "bt709" | "smpte170m";
// type VideoMatrixCoefficients = "bt470bg" | "bt709" | "rgb" | "smpte170m";
// type VideoPixelFormat = "BGRA" | "BGRX" | "I420" | "I420A" | "I422" | "I444" | "NV12" | "RGBA" | "RGBX";
// type VideoTransferCharacteristics = "bt709" | "iec61966-2-1" | "smpte170m";

View File

@@ -48,7 +48,7 @@ interface UseLivekitResult {
connState: ECConnectionState;
}
export function useLiveKit(
export function useLivekit(
rtcSession: MatrixRTCSession,
muteStates: MuteStates,
sfuConfig: SFUConfig | undefined,

View File

@@ -47,12 +47,15 @@ export const callEventAudioSounds = prefetchSounds({
export function CallEventAudioRenderer({
vm,
muted,
}: {
vm: CallViewModel;
muted?: boolean;
}): ReactNode {
const audioEngineCtx = useAudioContext({
sounds: callEventAudioSounds,
latencyHint: "interactive",
muted,
});
const audioEngineRef = useLatest(audioEngineCtx);

View File

@@ -62,8 +62,9 @@ import {
} from "../utils/errors.ts";
import { GroupCallErrorBoundary } from "./GroupCallErrorBoundary.tsx";
import {
useExperimentalToDeviceTransportSetting,
useNewMembershipManagerSetting as useNewMembershipManagerSetting,
useNewMembershipManager as useNewMembershipManagerSetting,
useExperimentalToDeviceTransport as useExperimentalToDeviceTransportSetting,
muteAllAudio as muteAllAudioSetting,
useSetting,
} from "../settings/settings";
import { useTypedEventEmitter } from "../useEvents";
@@ -104,11 +105,13 @@ export const GroupCallView: FC<Props> = ({
null,
);
const [muteAllAudio] = useSetting(muteAllAudioSetting);
const memberships = useMatrixRTCSessionMemberships(rtcSession);
const leaveSoundContext = useLatest(
useAudioContext({
sounds: callEventAudioSounds,
latencyHint: "interactive",
muted: muteAllAudio,
}),
);
// This should use `useEffectEvent` (only available in experimental versions)

View File

@@ -0,0 +1,266 @@
/*
Copyright 2025 New Vector Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
beforeEach,
describe,
expect,
it,
type MockedFunction,
vi,
} from "vitest";
import { act, render, type RenderResult } from "@testing-library/react";
import { type MatrixClient, JoinRule, type RoomState } from "matrix-js-sdk";
import { type MatrixRTCSession } from "matrix-js-sdk/lib/matrixrtc";
import { type RelationsContainer } from "matrix-js-sdk/lib/models/relations-container";
import { ConnectionState, type LocalParticipant } from "livekit-client";
import { of } from "rxjs";
import { BrowserRouter } from "react-router-dom";
import { TooltipProvider } from "@vector-im/compound-web";
import {
RoomAudioRenderer,
RoomContext,
useLocalParticipant,
} from "@livekit/components-react";
import { RoomAndToDeviceEvents } from "matrix-js-sdk/lib/matrixrtc/RoomAndToDeviceKeyTransport";
import { type MuteStates } from "./MuteStates";
import { InCallView } from "./InCallView";
import {
mockLivekitRoom,
mockLocalParticipant,
mockMatrixRoom,
mockMatrixRoomMember,
mockRemoteParticipant,
mockRtcMembership,
type MockRTCSession,
} from "../utils/test";
import { E2eeType } from "../e2ee/e2eeType";
import { getBasicCallViewModelEnvironment } from "../utils/test-viewmodel";
import { alice, local } from "../utils/test-fixtures";
import {
developerMode as developerModeSetting,
useExperimentalToDeviceTransport as useExperimentalToDeviceTransportSetting,
} from "../settings/settings";
import { ReactionsSenderProvider } from "../reactions/useReactionsSender";
import { useRoomEncryptionSystem } from "../e2ee/sharedKeyManagement";
// vi.hoisted(() => {
// localStorage = {} as unknown as Storage;
// });
vi.hoisted(
() =>
(global.ImageData = class MockImageData {
public data: number[] = [];
} as unknown as typeof ImageData),
);
vi.mock("../soundUtils");
vi.mock("../useAudioContext");
vi.mock("../tile/GridTile");
vi.mock("../tile/SpotlightTile");
vi.mock("@livekit/components-react");
vi.mock("../e2ee/sharedKeyManagement");
vi.mock("react-use-measure", () => ({
default: (): [() => void, object] => [(): void => {}, {}],
}));
const localRtcMember = mockRtcMembership("@carol:example.org", "CCCC");
const localParticipant = mockLocalParticipant({
identity: "@local:example.org:AAAAAA",
});
const remoteParticipant = mockRemoteParticipant({
identity: "@alice:example.org:AAAAAA",
});
const carol = mockMatrixRoomMember(localRtcMember);
const roomMembers = new Map([carol].map((p) => [p.userId, p]));
const roomId = "!foo:bar";
let useRoomEncryptionSystemMock: MockedFunction<typeof useRoomEncryptionSystem>;
beforeEach(() => {
vi.clearAllMocks();
// RoomAudioRenderer is tested separately.
(
RoomAudioRenderer as MockedFunction<typeof RoomAudioRenderer>
).mockImplementation((_props) => {
return <div>mocked: RoomAudioRenderer</div>;
});
(
useLocalParticipant as MockedFunction<typeof useLocalParticipant>
).mockImplementation(
() =>
({
isScreenShareEnabled: false,
localParticipant: localRtcMember as unknown as LocalParticipant,
}) as unknown as ReturnType<typeof useLocalParticipant>,
);
useRoomEncryptionSystemMock =
useRoomEncryptionSystem as typeof useRoomEncryptionSystemMock;
useRoomEncryptionSystemMock.mockReturnValue({ kind: E2eeType.NONE });
});
function createInCallView(): RenderResult & {
rtcSession: MockRTCSession;
} {
const client = {
getUser: () => null,
getUserId: () => localRtcMember.sender,
getDeviceId: () => localRtcMember.deviceId,
getRoom: (rId) => (rId === roomId ? room : null),
} as Partial<MatrixClient> as MatrixClient;
const room = mockMatrixRoom({
relations: {
getChildEventsForEvent: () =>
vi.mocked({
getRelations: () => [],
}),
} as unknown as RelationsContainer,
client,
roomId,
getMember: (userId) => roomMembers.get(userId) ?? null,
getMxcAvatarUrl: () => null,
hasEncryptionStateEvent: vi.fn().mockReturnValue(true),
getCanonicalAlias: () => null,
currentState: {
getJoinRule: () => JoinRule.Invite,
} as Partial<RoomState> as RoomState,
});
const muteState = {
audio: { enabled: false },
video: { enabled: false },
} as MuteStates;
const livekitRoom = mockLivekitRoom(
{
localParticipant,
},
{
remoteParticipants$: of([remoteParticipant]),
},
);
const { vm, rtcSession } = getBasicCallViewModelEnvironment([local, alice]);
rtcSession.joined = true;
const renderResult = render(
<BrowserRouter>
<ReactionsSenderProvider
vm={vm}
rtcSession={rtcSession as unknown as MatrixRTCSession}
>
<TooltipProvider>
<RoomContext.Provider value={livekitRoom}>
<InCallView
client={client}
hideHeader={true}
rtcSession={rtcSession as unknown as MatrixRTCSession}
muteStates={muteState}
vm={vm}
matrixInfo={{
userId: "",
displayName: "",
avatarUrl: "",
roomId: "",
roomName: "",
roomAlias: null,
roomAvatar: null,
e2eeSystem: {
kind: E2eeType.NONE,
},
}}
livekitRoom={livekitRoom}
participantCount={0}
onLeave={function (): void {
throw new Error("Function not implemented.");
}}
connState={ConnectionState.Connected}
onShareClick={null}
/>
</RoomContext.Provider>
</TooltipProvider>
</ReactionsSenderProvider>
</BrowserRouter>,
);
return {
...renderResult,
rtcSession,
};
}
describe("InCallView", () => {
describe("rendering", () => {
it("renders", () => {
const { container } = createInCallView();
expect(container).toMatchSnapshot();
});
});
describe("toDevice label", () => {
it("is shown if setting activated and room encrypted", () => {
useRoomEncryptionSystemMock.mockReturnValue({
kind: E2eeType.PER_PARTICIPANT,
});
useExperimentalToDeviceTransportSetting.setValue(true);
developerModeSetting.setValue(true);
const { getByText } = createInCallView();
expect(getByText("using to Device key transport")).toBeInTheDocument();
});
it("is not shown in unenecrypted room", () => {
useRoomEncryptionSystemMock.mockReturnValue({
kind: E2eeType.NONE,
});
useExperimentalToDeviceTransportSetting.setValue(true);
developerModeSetting.setValue(true);
const { queryByText } = createInCallView();
expect(
queryByText("using to Device key transport"),
).not.toBeInTheDocument();
});
it("is hidden once fallback was triggered", async () => {
useRoomEncryptionSystemMock.mockReturnValue({
kind: E2eeType.PER_PARTICIPANT,
});
useExperimentalToDeviceTransportSetting.setValue(true);
developerModeSetting.setValue(true);
const { rtcSession, queryByText } = createInCallView();
expect(queryByText("using to Device key transport")).toBeInTheDocument();
expect(rtcSession).toBeDefined();
await act(() =>
rtcSession.emit(RoomAndToDeviceEvents.EnabledTransportsChanged, {
toDevice: true,
room: true,
}),
);
expect(
queryByText("using to Device key transport"),
).not.toBeInTheDocument();
});
it("is not shown if setting is disabled", () => {
useExperimentalToDeviceTransportSetting.setValue(false);
developerModeSetting.setValue(true);
useRoomEncryptionSystemMock.mockReturnValue({
kind: E2eeType.PER_PARTICIPANT,
});
const { queryByText } = createInCallView();
expect(
queryByText("using to Device key transport"),
).not.toBeInTheDocument();
});
it("is not shown if developer mode is disabled", () => {
useExperimentalToDeviceTransportSetting.setValue(true);
developerModeSetting.setValue(false);
useRoomEncryptionSystemMock.mockReturnValue({
kind: E2eeType.PER_PARTICIPANT,
});
const { queryByText } = createInCallView();
expect(
queryByText("using to Device key transport"),
).not.toBeInTheDocument();
});
});
});

View File

@@ -56,7 +56,7 @@ import { type OTelGroupCallMembership } from "../otel/OTelGroupCallMembership";
import { SettingsModal, defaultSettingsTab } from "../settings/SettingsModal";
import { useRageshakeRequestModal } from "../settings/submit-rageshake";
import { RageshakeRequestModal } from "./RageshakeRequestModal";
import { useLiveKit } from "../livekit/useLiveKit";
import { useLivekit } from "../livekit/useLivekit.ts";
import { useWakeLock } from "../useWakeLock";
import { useMergedRefs } from "../useMergedRefs";
import { type MuteStates } from "./MuteStates";
@@ -73,7 +73,10 @@ import {
import { Grid, type TileProps } from "../grid/Grid";
import { useInitial } from "../useInitial";
import { SpotlightTile } from "../tile/SpotlightTile";
import { type EncryptionSystem } from "../e2ee/sharedKeyManagement";
import {
useRoomEncryptionSystem,
type EncryptionSystem,
} from "../e2ee/sharedKeyManagement";
import { E2eeType } from "../e2ee/e2eeType";
import { makeGridLayout } from "../grid/GridLayout";
import {
@@ -96,7 +99,9 @@ import { ReactionsOverlay } from "./ReactionsOverlay";
import { CallEventAudioRenderer } from "./CallEventAudioRenderer";
import {
debugTileLayout as debugTileLayoutSetting,
useExperimentalToDeviceTransportSetting,
useExperimentalToDeviceTransport as useExperimentalToDeviceTransportSetting,
muteAllAudio as muteAllAudioSetting,
developerMode as developerModeSetting,
useSetting,
} from "../settings/settings";
import { ReactionsReader } from "../reactions/ReactionsReader";
@@ -114,7 +119,7 @@ export interface ActiveCallProps
export const ActiveCall: FC<ActiveCallProps> = (props) => {
const sfuConfig = useOpenIDSFU(props.client, props.rtcSession);
const { livekitRoom, connState } = useLiveKit(
const { livekitRoom, connState } = useLivekit(
props.rtcSession,
props.muteStates,
sfuConfig,
@@ -233,19 +238,34 @@ export const InCallView: FC<InCallViewProps> = ({
room: livekitRoom,
});
const [toDeviceEncryptionSetting] = useSetting(
useExperimentalToDeviceTransportSetting,
);
const [showToDeviceEncryption, setShowToDeviceEncryption] = useState(
() => toDeviceEncryptionSetting,
);
useEffect(() => {
setShowToDeviceEncryption(toDeviceEncryptionSetting);
}, [toDeviceEncryptionSetting]);
const [muteAllAudio] = useSetting(muteAllAudioSetting);
// This seems like it might be enough logic to use move it into the call view model?
const [didFallbackToRoomKey, setDidFallbackToRoomKey] = useState(false);
useTypedEventEmitter(
rtcSession,
RoomAndToDeviceEvents.EnabledTransportsChanged,
(enabled) => setShowToDeviceEncryption(enabled.to_device),
(enabled) => setDidFallbackToRoomKey(enabled.room),
);
const [developerMode] = useSetting(developerModeSetting);
const [useExperimentalToDeviceTransport] = useSetting(
useExperimentalToDeviceTransportSetting,
);
const encryptionSystem = useRoomEncryptionSystem(rtcSession.room.roomId);
const showToDeviceEncryption = useMemo(
() =>
developerMode &&
useExperimentalToDeviceTransport &&
encryptionSystem.kind === E2eeType.PER_PARTICIPANT &&
!didFallbackToRoomKey,
[
developerMode,
useExperimentalToDeviceTransport,
encryptionSystem.kind,
didFallbackToRoomKey,
],
);
const toggleMicrophone = useCallback(
@@ -706,10 +726,10 @@ export const InCallView: FC<InCallViewProps> = ({
</Text>
)
}
<RoomAudioRenderer />
<RoomAudioRenderer muted={muteAllAudio} />
{renderContent()}
<CallEventAudioRenderer vm={vm} />
<ReactionsAudioRenderer vm={vm} />
<CallEventAudioRenderer vm={vm} muted={muteAllAudio} />
<ReactionsAudioRenderer vm={vm} muted={muteAllAudio} />
<ReactionsOverlay vm={vm} />
{footer}
{layout.type !== "pip" && (

View File

@@ -21,8 +21,8 @@ import { act, type ReactNode } from "react";
import { ReactionsAudioRenderer } from "./ReactionAudioRenderer";
import {
playReactionsSound,
soundEffectVolumeSetting,
playReactionsSound as playReactionsSoundSetting,
soundEffectVolume as soundEffectVolumeSetting,
} from "../settings/settings";
import { useAudioContext } from "../useAudioContext";
import { GenericReaction, ReactionSet } from "../reactions";
@@ -50,7 +50,7 @@ vitest.mock("../soundUtils");
afterEach(() => {
vitest.resetAllMocks();
playReactionsSound.setValue(playReactionsSound.defaultValue);
playReactionsSoundSetting.setValue(playReactionsSoundSetting.defaultValue);
soundEffectVolumeSetting.setValue(soundEffectVolumeSetting.defaultValue);
});
@@ -74,7 +74,7 @@ beforeEach(() => {
test("preloads all audio elements", () => {
const { vm } = getBasicCallViewModelEnvironment([local, alice]);
playReactionsSound.setValue(true);
playReactionsSoundSetting.setValue(true);
render(<TestComponent vm={vm} />);
expect(prefetchSounds).toHaveBeenCalledOnce();
});
@@ -84,7 +84,7 @@ test("will play an audio sound when there is a reaction", () => {
local,
alice,
]);
playReactionsSound.setValue(true);
playReactionsSoundSetting.setValue(true);
render(<TestComponent vm={vm} />);
// Find the first reaction with a sound effect
@@ -110,7 +110,7 @@ test("will play the generic audio sound when there is soundless reaction", () =>
local,
alice,
]);
playReactionsSound.setValue(true);
playReactionsSoundSetting.setValue(true);
render(<TestComponent vm={vm} />);
// Find the first reaction with a sound effect
@@ -136,7 +136,7 @@ test("will play multiple audio sounds when there are multiple different reaction
local,
alice,
]);
playReactionsSound.setValue(true);
playReactionsSoundSetting.setValue(true);
render(<TestComponent vm={vm} />);
// Find the first reaction with a sound effect

View File

@@ -24,8 +24,10 @@ const soundMap = Object.fromEntries([
export function ReactionsAudioRenderer({
vm,
muted,
}: {
vm: CallViewModel;
muted?: boolean;
}): ReactNode {
const [shouldPlay] = useSetting(playReactionsSound);
const [soundCache, setSoundCache] = useState<ReturnType<
@@ -34,6 +36,7 @@ export function ReactionsAudioRenderer({
const audioEngineCtx = useAudioContext({
sounds: soundCache,
latencyHint: "interactive",
muted,
});
const audioEngineRef = useLatest(audioEngineCtx);

View File

@@ -0,0 +1,181 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
exports[`InCallView > rendering > renders 1`] = `
<div>
<div
class="inRoom"
>
<div
class="header filler"
/>
<div>
mocked: RoomAudioRenderer
</div>
<div
class="scrollingGrid grid"
>
<div
class="layer"
>
<div
class="container slot"
data-id="1"
>
<div
class="slot local slot"
data-block-alignment="start"
data-id="0"
data-inline-alignment="end"
/>
</div>
</div>
</div>
<div
class="fixedGrid grid"
>
<div />
</div>
<div
class="container"
/>
<div
class="footer"
>
<div
class="buttons"
>
<button
aria-disabled="false"
aria-labelledby=":r0:"
class="_button_i91xf_17 _has-icon_i91xf_66 _icon-only_i91xf_59"
data-kind="primary"
data-size="lg"
data-testid="incall_mute"
role="button"
tabindex="0"
>
<svg
aria-hidden="true"
fill="currentColor"
height="24"
viewBox="0 0 24 24"
width="24"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M8 8v-.006l6.831 6.832-.002.002 1.414 1.415.003-.003 1.414 1.414-.003.003L20.5 20.5a1 1 0 0 1-1.414 1.414l-3.022-3.022A7.949 7.949 0 0 1 13 19.938V21a1 1 0 0 1-2 0v-1.062A8.001 8.001 0 0 1 4 12a1 1 0 1 1 2 0 6 6 0 0 0 8.587 5.415l-1.55-1.55A4.005 4.005 0 0 1 8 12v-1.172L2.086 4.914A1 1 0 0 1 3.5 3.5L8 8Zm9.417 6.583 1.478 1.477A7.963 7.963 0 0 0 20 12a1 1 0 0 0-2 0c0 .925-.21 1.8-.583 2.583ZM8.073 5.238l7.793 7.793c.087-.329.134-.674.134-1.031V6a4 4 0 0 0-7.927-.762Z"
/>
</svg>
</button>
<button
aria-disabled="false"
aria-labelledby=":r5:"
class="_button_i91xf_17 _has-icon_i91xf_66 _icon-only_i91xf_59"
data-kind="primary"
data-size="lg"
data-testid="incall_videomute"
role="button"
tabindex="0"
>
<svg
aria-hidden="true"
fill="currentColor"
height="24"
viewBox="0 0 24 24"
width="24"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M2.747 2.753 4.35 4.355l.007-.003L18 17.994v.012l3.247 3.247a1 1 0 0 1-1.414 1.414l-2.898-2.898A1.992 1.992 0 0 1 16 20H6a4 4 0 0 1-4-4V8c0-.892.292-1.715.785-2.38L1.333 4.166a1 1 0 0 1 1.414-1.414ZM18 15.166 6.834 4H16a2 2 0 0 1 2 2v4.286l3.35-2.871a1 1 0 0 1 1.65.76v7.65a1 1 0 0 1-1.65.76L18 13.715v1.45Z"
/>
</svg>
</button>
<button
aria-labelledby=":ra:"
class="_button_i91xf_17 _has-icon_i91xf_66 _icon-only_i91xf_59"
data-kind="secondary"
data-size="lg"
role="button"
tabindex="0"
>
<svg
aria-hidden="true"
fill="currentColor"
height="24"
viewBox="0 0 24 24"
width="24"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M12.731 2C13.432 2 14 2.568 14 3.269c0 .578.396 1.074.935 1.286.085.034.17.07.253.106.531.23 1.162.16 1.572-.25a1.269 1.269 0 0 1 1.794 0l1.034 1.035a1.269 1.269 0 0 1 0 1.794c-.41.41-.48 1.04-.248 1.572.036.084.07.168.105.253.212.539.708.935 1.286.935.701 0 1.269.568 1.269 1.269v1.462c0 .701-.568 1.269-1.269 1.269-.578 0-1.074.396-1.287.935-.033.085-.068.17-.104.253-.232.531-.161 1.162.248 1.572a1.269 1.269 0 0 1 0 1.794l-1.034 1.034a1.269 1.269 0 0 1-1.794 0c-.41-.41-1.04-.48-1.572-.248a7.935 7.935 0 0 1-.253.105c-.539.212-.935.708-.935 1.286 0 .701-.568 1.269-1.269 1.269H11.27c-.702 0-1.27-.568-1.27-1.269 0-.578-.396-1.074-.935-1.287a7.975 7.975 0 0 1-.253-.104c-.531-.232-1.162-.161-1.572.248a1.269 1.269 0 0 1-1.794 0l-1.034-1.034a1.269 1.269 0 0 1 0-1.794c.41-.41.48-1.04.249-1.572a7.89 7.89 0 0 1-.106-.253C4.343 14.396 3.847 14 3.27 14 2.568 14 2 13.432 2 12.731V11.27c0-.702.568-1.27 1.269-1.27.578 0 1.074-.396 1.286-.935.034-.085.07-.17.106-.253.23-.531.16-1.162-.25-1.572a1.269 1.269 0 0 1 0-1.794l1.035-1.034a1.269 1.269 0 0 1 1.794 0c.41.41 1.04.48 1.572.249a7.93 7.93 0 0 1 .253-.106c.539-.212.935-.708.935-1.286C10 2.568 10.568 2 11.269 2h1.462ZM12 16a4 4 0 1 0 0-8 4 4 0 0 0 0 8Z"
/>
</svg>
</button>
<button
aria-labelledby=":rf:"
class="_button_i91xf_17 endCall _has-icon_i91xf_66 _icon-only_i91xf_59 _destructive_i91xf_116"
data-kind="primary"
data-size="lg"
data-testid="incall_leave"
role="button"
tabindex="0"
>
<svg
aria-hidden="true"
fill="currentColor"
height="24"
viewBox="0 0 24 24"
width="24"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="m2.765 16.02-2.47-2.416A1.018 1.018 0 0 1 0 12.852c0-.304.098-.555.295-.751a15.64 15.64 0 0 1 5.316-3.786A15.89 15.89 0 0 1 12 7c2.237 0 4.367.443 6.39 1.329a15.977 15.977 0 0 1 5.315 3.772c.197.196.295.447.295.751 0 .305-.098.555-.295.752l-2.47 2.416a1.047 1.047 0 0 1-1.396.108l-3.114-2.363a1.067 1.067 0 0 1-.322-.376 1.066 1.066 0 0 1-.108-.483v-2.27a13.593 13.593 0 0 0-2.12-.524C13.459 9.996 12 9.937 12 9.937s-1.459.059-2.175.175c-.715.116-1.422.29-2.12.523v2.271c0 .179-.036.34-.108.483a1.066 1.066 0 0 1-.322.376l-3.114 2.363a1.047 1.047 0 0 1-1.396-.107Z"
/>
</svg>
</button>
</div>
<div
class="toggle layout"
>
<input
aria-labelledby=":rk:"
name="layout"
type="radio"
value="spotlight"
/>
<svg
aria-hidden="true"
fill="currentColor"
height="24"
viewBox="0 0 24 24"
width="24"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M5 5h14v8h-5a1 1 0 0 0-1 1v5H5V5Zm10 14v-4h4v4h-4ZM5 21h14a2 2 0 0 0 2-2V5a2 2 0 0 0-2-2H5a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2Z"
/>
</svg>
<input
aria-labelledby=":rp:"
checked=""
name="layout"
type="radio"
value="grid"
/>
<svg
aria-hidden="true"
fill="currentColor"
height="24"
viewBox="0 0 24 24"
width="24"
xmlns="http://www.w3.org/2000/svg"
>
<path
d="M4 11a.967.967 0 0 1-.712-.287A.968.968 0 0 1 3 10V4c0-.283.096-.52.288-.712A.968.968 0 0 1 4 3h6a.97.97 0 0 1 .713.288A.968.968 0 0 1 11 4v6c0 .283-.096.52-.287.713A.968.968 0 0 1 10 11H4Zm5-2V5H5v4h4Zm5 12a.968.968 0 0 1-.713-.288A.968.968 0 0 1 13 20v-6c0-.283.096-.52.287-.713A.968.968 0 0 1 14 13h6a.97.97 0 0 1 .712.287c.192.192.288.43.288.713v6c0 .283-.096.52-.288.712A.968.968 0 0 1 20 21h-6Zm5-2v-4h-4v4h4ZM4 21a.967.967 0 0 1-.712-.288A.968.968 0 0 1 3 20v-6a.97.97 0 0 1 .288-.713A.967.967 0 0 1 4 13h6c.283 0 .52.096.713.287.191.192.287.43.287.713v6a.97.97 0 0 1-.287.712A.968.968 0 0 1 10 21H4Zm5-2v-4H5v4h4Zm5-8a.968.968 0 0 1-.713-.287A.968.968 0 0 1 13 10V4a.97.97 0 0 1 .287-.712A.968.968 0 0 1 14 3h6c.283 0 .52.096.712.288A.965.965 0 0 1 21 4v6a.97.97 0 0 1-.288.713A.968.968 0 0 1 20 11h-6Zm5-2V5h-4v4h4Z"
/>
</svg>
</div>
</div>
</div>
</div>
`;

View File

@@ -15,8 +15,9 @@ import {
debugTileLayout as debugTileLayoutSetting,
showNonMemberTiles as showNonMemberTilesSetting,
showConnectionStats as showConnectionStatsSetting,
useNewMembershipManagerSetting,
useExperimentalToDeviceTransportSetting,
useNewMembershipManager as useNewMembershipManagerSetting,
useExperimentalToDeviceTransport as useExperimentalToDeviceTransportSetting,
muteAllAudio as muteAllAudioSetting,
} from "./settings";
import type { MatrixClient } from "matrix-js-sdk";
import type { Room as LivekitRoom } from "livekit-client";
@@ -49,6 +50,9 @@ export const DeveloperSettingsTab: FC<Props> = ({ client, livekitRoom }) => {
useExperimentalToDeviceTransport,
setUseExperimentalToDeviceTransport,
] = useSetting(useExperimentalToDeviceTransportSetting);
const [muteAllAudio, setMuteAllAudio] = useSetting(muteAllAudioSetting);
const urlParams = useUrlParams();
const sfuUrl = useMemo((): URL | null => {
@@ -175,6 +179,20 @@ export const DeveloperSettingsTab: FC<Props> = ({ client, livekitRoom }) => {
)}
/>
</FieldRow>
<FieldRow>
<InputField
id="muteAllAudio"
type="checkbox"
label={t("developer_mode.mute_all_audio")}
checked={muteAllAudio}
onChange={useCallback(
(event: ChangeEvent<HTMLInputElement>): void => {
setMuteAllAudio(event.target.checked);
},
[setMuteAllAudio],
)}
/>
</FieldRow>
{livekitRoom ? (
<>
<p>

View File

@@ -23,7 +23,7 @@ import {
import { widget } from "../widget";
import {
useSetting,
soundEffectVolumeSetting,
soundEffectVolume as soundEffectVolumeSetting,
backgroundBlur as backgroundBlurSetting,
developerMode,
} from "./settings";

View File

@@ -110,19 +110,21 @@ export const playReactionsSound = new Setting<boolean>(
true,
);
export const soundEffectVolumeSetting = new Setting<number>(
export const soundEffectVolume = new Setting<number>(
"sound-effect-volume",
0.5,
);
export const useNewMembershipManagerSetting = new Setting<boolean>(
export const useNewMembershipManager = new Setting<boolean>(
"new-membership-manager",
true,
);
export const useExperimentalToDeviceTransportSetting = new Setting<boolean>(
export const useExperimentalToDeviceTransport = new Setting<boolean>(
"experimental-to-device-transport",
true,
);
export const muteAllAudio = new Setting<boolean>("mute-all-audio", false);
export const alwaysShowSelf = new Setting<boolean>("always-show-self", true);

View File

@@ -12,7 +12,7 @@ import userEvent from "@testing-library/user-event";
import { deviceStub, MediaDevicesContext } from "./livekit/MediaDevicesContext";
import { useAudioContext } from "./useAudioContext";
import { soundEffectVolumeSetting } from "./settings/settings";
import { soundEffectVolume as soundEffectVolumeSetting } from "./settings/settings";
const staticSounds = Promise.resolve({
aSound: new ArrayBuffer(0),

View File

@@ -9,7 +9,7 @@ import { logger } from "matrix-js-sdk/lib/logger";
import { useState, useEffect } from "react";
import {
soundEffectVolumeSetting as effectSoundVolumeSetting,
soundEffectVolume as soundEffectVolumeSetting,
useSetting,
} from "./settings/settings";
import { useMediaDevices } from "./livekit/MediaDevicesContext";
@@ -47,6 +47,7 @@ interface Props<S extends string> {
*/
sounds: PrefetchedSounds<S> | null;
latencyHint: AudioContextLatencyCategory;
muted?: boolean;
}
interface UseAudioContext<S> {
@@ -62,7 +63,7 @@ interface UseAudioContext<S> {
export function useAudioContext<S extends string>(
props: Props<S>,
): UseAudioContext<S> | null {
const [effectSoundVolume] = useSetting(effectSoundVolumeSetting);
const [effectSoundVolume] = useSetting(soundEffectVolumeSetting);
const devices = useMediaDevices();
const [audioContext, setAudioContext] = useState<AudioContext>();
const [audioBuffers, setAudioBuffers] = useState<Record<S, AudioBuffer>>();
@@ -112,7 +113,7 @@ export function useAudioContext<S extends string>(
}, [audioContext, devices]);
// Don't return a function until we're ready.
if (!audioContext || !audioBuffers) {
if (!audioContext || !audioBuffers || props.muted) {
return null;
}
return {

View File

@@ -29,6 +29,10 @@ import {
type Room as LivekitRoom,
} from "livekit-client";
import { randomUUID } from "crypto";
import {
type RoomAndToDeviceEvents,
type RoomAndToDeviceEventsHandlerMap,
} from "matrix-js-sdk/lib/matrixrtc/RoomAndToDeviceKeyTransport";
import {
LocalUserMediaViewModel,
@@ -269,8 +273,8 @@ export function mockConfig(config: Partial<ResolvedConfigOptions> = {}): void {
}
export class MockRTCSession extends TypedEventEmitter<
MatrixRTCSessionEvent,
MatrixRTCSessionEventHandlerMap
MatrixRTCSessionEvent | RoomAndToDeviceEvents,
MatrixRTCSessionEventHandlerMap & RoomAndToDeviceEventsHandlerMap
> {
public readonly statistics = {
counters: {},

247
yarn.lock
View File

@@ -2527,12 +2527,12 @@ __metadata:
languageName: node
linkType: hard
"@livekit/protocol@npm:1.36.1, @livekit/protocol@npm:^1.33.0":
version: 1.36.1
resolution: "@livekit/protocol@npm:1.36.1"
"@livekit/protocol@npm:1.38.0, @livekit/protocol@npm:^1.38.0":
version: 1.38.0
resolution: "@livekit/protocol@npm:1.38.0"
dependencies:
"@bufbuild/protobuf": "npm:^1.10.0"
checksum: 10c0/bb2e56785c542446bef3e2f2fd20b33d01db43b786be87ccb834feee8a664fd32c8231e249b4e1915d7a8eda13af0d59eea479fa710327079a1a370daf05c42e
checksum: 10c0/ca64d4f984853054ff60574730b08a761afcd3bdc084e5218663e54b0e7f395aa2022d9d15d982fa094bbc0179cb19ef6a96ec74b1aa3265d118a85d1a4fde33
languageName: node
linkType: hard
@@ -4007,142 +4007,142 @@ __metadata:
languageName: node
linkType: hard
"@rollup/rollup-android-arm-eabi@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-android-arm-eabi@npm:4.37.0"
"@rollup/rollup-android-arm-eabi@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-android-arm-eabi@npm:4.40.2"
conditions: os=android & cpu=arm
languageName: node
linkType: hard
"@rollup/rollup-android-arm64@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-android-arm64@npm:4.37.0"
"@rollup/rollup-android-arm64@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-android-arm64@npm:4.40.2"
conditions: os=android & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-darwin-arm64@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-darwin-arm64@npm:4.37.0"
"@rollup/rollup-darwin-arm64@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-darwin-arm64@npm:4.40.2"
conditions: os=darwin & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-darwin-x64@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-darwin-x64@npm:4.37.0"
"@rollup/rollup-darwin-x64@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-darwin-x64@npm:4.40.2"
conditions: os=darwin & cpu=x64
languageName: node
linkType: hard
"@rollup/rollup-freebsd-arm64@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-freebsd-arm64@npm:4.37.0"
"@rollup/rollup-freebsd-arm64@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-freebsd-arm64@npm:4.40.2"
conditions: os=freebsd & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-freebsd-x64@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-freebsd-x64@npm:4.37.0"
"@rollup/rollup-freebsd-x64@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-freebsd-x64@npm:4.40.2"
conditions: os=freebsd & cpu=x64
languageName: node
linkType: hard
"@rollup/rollup-linux-arm-gnueabihf@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.37.0"
"@rollup/rollup-linux-arm-gnueabihf@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-arm-gnueabihf@npm:4.40.2"
conditions: os=linux & cpu=arm & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-arm-musleabihf@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.37.0"
"@rollup/rollup-linux-arm-musleabihf@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-arm-musleabihf@npm:4.40.2"
conditions: os=linux & cpu=arm & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-arm64-gnu@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.37.0"
"@rollup/rollup-linux-arm64-gnu@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-arm64-gnu@npm:4.40.2"
conditions: os=linux & cpu=arm64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-arm64-musl@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-arm64-musl@npm:4.37.0"
"@rollup/rollup-linux-arm64-musl@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-arm64-musl@npm:4.40.2"
conditions: os=linux & cpu=arm64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-loongarch64-gnu@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-loongarch64-gnu@npm:4.37.0"
"@rollup/rollup-linux-loongarch64-gnu@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-loongarch64-gnu@npm:4.40.2"
conditions: os=linux & cpu=loong64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-powerpc64le-gnu@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.37.0"
"@rollup/rollup-linux-powerpc64le-gnu@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-powerpc64le-gnu@npm:4.40.2"
conditions: os=linux & cpu=ppc64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-riscv64-gnu@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.37.0"
"@rollup/rollup-linux-riscv64-gnu@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-riscv64-gnu@npm:4.40.2"
conditions: os=linux & cpu=riscv64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-riscv64-musl@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.37.0"
"@rollup/rollup-linux-riscv64-musl@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-riscv64-musl@npm:4.40.2"
conditions: os=linux & cpu=riscv64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-linux-s390x-gnu@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.37.0"
"@rollup/rollup-linux-s390x-gnu@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-s390x-gnu@npm:4.40.2"
conditions: os=linux & cpu=s390x & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-x64-gnu@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-x64-gnu@npm:4.37.0"
"@rollup/rollup-linux-x64-gnu@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-x64-gnu@npm:4.40.2"
conditions: os=linux & cpu=x64 & libc=glibc
languageName: node
linkType: hard
"@rollup/rollup-linux-x64-musl@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-linux-x64-musl@npm:4.37.0"
"@rollup/rollup-linux-x64-musl@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-linux-x64-musl@npm:4.40.2"
conditions: os=linux & cpu=x64 & libc=musl
languageName: node
linkType: hard
"@rollup/rollup-win32-arm64-msvc@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.37.0"
"@rollup/rollup-win32-arm64-msvc@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-win32-arm64-msvc@npm:4.40.2"
conditions: os=win32 & cpu=arm64
languageName: node
linkType: hard
"@rollup/rollup-win32-ia32-msvc@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.37.0"
"@rollup/rollup-win32-ia32-msvc@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-win32-ia32-msvc@npm:4.40.2"
conditions: os=win32 & cpu=ia32
languageName: node
linkType: hard
"@rollup/rollup-win32-x64-msvc@npm:4.37.0":
version: 4.37.0
resolution: "@rollup/rollup-win32-x64-msvc@npm:4.37.0"
"@rollup/rollup-win32-x64-msvc@npm:4.40.2":
version: 4.40.2
resolution: "@rollup/rollup-win32-x64-msvc@npm:4.40.2"
conditions: os=win32 & cpu=x64
languageName: node
linkType: hard
@@ -4621,7 +4621,30 @@ __metadata:
languageName: node
linkType: hard
"@types/estree@npm:1.0.6, @types/estree@npm:^1.0.0":
"@types/dom-mediacapture-transform@npm:^0.1.11":
version: 0.1.11
resolution: "@types/dom-mediacapture-transform@npm:0.1.11"
dependencies:
"@types/dom-webcodecs": "npm:*"
checksum: 10c0/19c76d54cf31aa2a925011fc5f973dff9a10bdecfdf2285e5e568e61850a0fa2b8c9f1807a1462cbefd57ec26d32eeaa9c359117aca9d9fe7f0d6f2fff33f51e
languageName: node
linkType: hard
"@types/dom-webcodecs@npm:*":
version: 0.1.15
resolution: "@types/dom-webcodecs@npm:0.1.15"
checksum: 10c0/1407f0352156c99c9b4378fb4c0c799b061520d031903a7f359ad09a6f706cc1fd56bafb272bb1a3decffcb32e54a51d2f07442eb72622464a950cff7f9e8862
languageName: node
linkType: hard
"@types/estree@npm:1.0.7":
version: 1.0.7
resolution: "@types/estree@npm:1.0.7"
checksum: 10c0/be815254316882f7c40847336cd484c3bc1c3e34f710d197160d455dc9d6d050ffbf4c3bc76585dba86f737f020ab20bdb137ebe0e9116b0c86c7c0342221b8c
languageName: node
linkType: hard
"@types/estree@npm:^1.0.0":
version: 1.0.6
resolution: "@types/estree@npm:1.0.6"
checksum: 10c0/cdfd751f6f9065442cd40957c07fd80361c962869aa853c1c2fd03e101af8b9389d8ff4955a43a6fcfa223dd387a089937f95be0f3eec21ca527039fd2d9859a
@@ -6921,7 +6944,7 @@ __metadata:
"@formatjs/intl-segmenter": "npm:^11.7.3"
"@livekit/components-core": "npm:^0.12.0"
"@livekit/components-react": "npm:^2.0.0"
"@livekit/protocol": "npm:^1.33.0"
"@livekit/protocol": "npm:^1.38.0"
"@livekit/track-processors": "npm:^0.5.5"
"@mediapipe/tasks-vision": "npm:^0.10.18"
"@opentelemetry/api": "npm:^1.4.0"
@@ -6944,6 +6967,7 @@ __metadata:
"@testing-library/react": "npm:^16.0.0"
"@testing-library/user-event": "npm:^14.5.1"
"@types/content-type": "npm:^1.1.5"
"@types/dom-mediacapture-transform": "npm:^0.1.11"
"@types/grecaptcha": "npm:^3.0.9"
"@types/jsdom": "npm:^21.1.7"
"@types/lodash-es": "npm:^4.17.12"
@@ -7832,6 +7856,18 @@ __metadata:
languageName: node
linkType: hard
"fdir@npm:^6.4.4":
version: 6.4.4
resolution: "fdir@npm:6.4.4"
peerDependencies:
picomatch: ^3 || ^4
peerDependenciesMeta:
picomatch:
optional: true
checksum: 10c0/6ccc33be16945ee7bc841e1b4178c0b4cf18d3804894cb482aa514651c962a162f96da7ffc6ebfaf0df311689fb70091b04dd6caffe28d56b9ebdc0e7ccadfdd
languageName: node
linkType: hard
"fflate@npm:^0.4.8":
version: 0.4.8
resolution: "fflate@npm:0.4.8"
@@ -9351,11 +9387,11 @@ __metadata:
linkType: hard
"livekit-client@npm:^2.11.3":
version: 2.11.3
resolution: "livekit-client@npm:2.11.3"
version: 2.12.0
resolution: "livekit-client@npm:2.12.0"
dependencies:
"@livekit/mutex": "npm:1.1.1"
"@livekit/protocol": "npm:1.36.1"
"@livekit/protocol": "npm:1.38.0"
events: "npm:^3.3.0"
loglevel: "npm:^1.9.2"
sdp-transform: "npm:^2.15.0"
@@ -9363,7 +9399,7 @@ __metadata:
tslib: "npm:2.8.1"
typed-emitter: "npm:^2.1.0"
webrtc-adapter: "npm:^9.0.1"
checksum: 10c0/d56444f31c107b46ccd5532038ac77bd21038042910619008267c17894f1d3f054262ae2354d89df6fe0ba325aba01909b0612ad4c290906487c40d91641f6e4
checksum: 10c0/8a4657aa6c0f0bc5d1fe77c2cd9603a3b07d4acefa634f1c5151190eed69711e7e599dd09c07915939a418dc8770d87e3529ecf1b029f2a9af7f2172d83acb1c
languageName: node
linkType: hard
@@ -11550,31 +11586,31 @@ __metadata:
languageName: node
linkType: hard
"rollup@npm:^4.30.1":
version: 4.37.0
resolution: "rollup@npm:4.37.0"
"rollup@npm:^4.34.9":
version: 4.40.2
resolution: "rollup@npm:4.40.2"
dependencies:
"@rollup/rollup-android-arm-eabi": "npm:4.37.0"
"@rollup/rollup-android-arm64": "npm:4.37.0"
"@rollup/rollup-darwin-arm64": "npm:4.37.0"
"@rollup/rollup-darwin-x64": "npm:4.37.0"
"@rollup/rollup-freebsd-arm64": "npm:4.37.0"
"@rollup/rollup-freebsd-x64": "npm:4.37.0"
"@rollup/rollup-linux-arm-gnueabihf": "npm:4.37.0"
"@rollup/rollup-linux-arm-musleabihf": "npm:4.37.0"
"@rollup/rollup-linux-arm64-gnu": "npm:4.37.0"
"@rollup/rollup-linux-arm64-musl": "npm:4.37.0"
"@rollup/rollup-linux-loongarch64-gnu": "npm:4.37.0"
"@rollup/rollup-linux-powerpc64le-gnu": "npm:4.37.0"
"@rollup/rollup-linux-riscv64-gnu": "npm:4.37.0"
"@rollup/rollup-linux-riscv64-musl": "npm:4.37.0"
"@rollup/rollup-linux-s390x-gnu": "npm:4.37.0"
"@rollup/rollup-linux-x64-gnu": "npm:4.37.0"
"@rollup/rollup-linux-x64-musl": "npm:4.37.0"
"@rollup/rollup-win32-arm64-msvc": "npm:4.37.0"
"@rollup/rollup-win32-ia32-msvc": "npm:4.37.0"
"@rollup/rollup-win32-x64-msvc": "npm:4.37.0"
"@types/estree": "npm:1.0.6"
"@rollup/rollup-android-arm-eabi": "npm:4.40.2"
"@rollup/rollup-android-arm64": "npm:4.40.2"
"@rollup/rollup-darwin-arm64": "npm:4.40.2"
"@rollup/rollup-darwin-x64": "npm:4.40.2"
"@rollup/rollup-freebsd-arm64": "npm:4.40.2"
"@rollup/rollup-freebsd-x64": "npm:4.40.2"
"@rollup/rollup-linux-arm-gnueabihf": "npm:4.40.2"
"@rollup/rollup-linux-arm-musleabihf": "npm:4.40.2"
"@rollup/rollup-linux-arm64-gnu": "npm:4.40.2"
"@rollup/rollup-linux-arm64-musl": "npm:4.40.2"
"@rollup/rollup-linux-loongarch64-gnu": "npm:4.40.2"
"@rollup/rollup-linux-powerpc64le-gnu": "npm:4.40.2"
"@rollup/rollup-linux-riscv64-gnu": "npm:4.40.2"
"@rollup/rollup-linux-riscv64-musl": "npm:4.40.2"
"@rollup/rollup-linux-s390x-gnu": "npm:4.40.2"
"@rollup/rollup-linux-x64-gnu": "npm:4.40.2"
"@rollup/rollup-linux-x64-musl": "npm:4.40.2"
"@rollup/rollup-win32-arm64-msvc": "npm:4.40.2"
"@rollup/rollup-win32-ia32-msvc": "npm:4.40.2"
"@rollup/rollup-win32-x64-msvc": "npm:4.40.2"
"@types/estree": "npm:1.0.7"
fsevents: "npm:~2.3.2"
dependenciesMeta:
"@rollup/rollup-android-arm-eabi":
@@ -11621,7 +11657,7 @@ __metadata:
optional: true
bin:
rollup: dist/bin/rollup
checksum: 10c0/2e00382e08938636edfe0a7547ea2eaa027205dc0b6ff85d8b82be0fbe55a4ef88a1995fee2a5059e33dbccf12d1376c236825353afb89c96298cc95c5160a46
checksum: 10c0/cbe9b766891da74fbf7c3b50420bb75102e5c59afc0ea45751f7e43a581d2cd93367763f521f820b72e341cf1f6b9951fbdcd3be67a1b0aa774b754525a8b9c7
languageName: node
linkType: hard
@@ -12454,6 +12490,16 @@ __metadata:
languageName: node
linkType: hard
"tinyglobby@npm:^0.2.13":
version: 0.2.13
resolution: "tinyglobby@npm:0.2.13"
dependencies:
fdir: "npm:^6.4.4"
picomatch: "npm:^4.0.2"
checksum: 10c0/ef07dfaa7b26936601d3f6d999f7928a4d1c6234c5eb36896bb88681947c0d459b7ebe797022400e555fe4b894db06e922b95d0ce60cb05fd827a0a66326b18c
languageName: node
linkType: hard
"tinypool@npm:^1.0.2":
version: 1.0.2
resolution: "tinypool@npm:1.0.2"
@@ -13162,13 +13208,16 @@ __metadata:
linkType: hard
"vite@npm:^5.0.0 || ^6.0.0, vite@npm:^6.0.0":
version: 6.2.6
resolution: "vite@npm:6.2.6"
version: 6.3.5
resolution: "vite@npm:6.3.5"
dependencies:
esbuild: "npm:^0.25.0"
fdir: "npm:^6.4.4"
fsevents: "npm:~2.3.3"
picomatch: "npm:^4.0.2"
postcss: "npm:^8.5.3"
rollup: "npm:^4.30.1"
rollup: "npm:^4.34.9"
tinyglobby: "npm:^0.2.13"
peerDependencies:
"@types/node": ^18.0.0 || ^20.0.0 || >=22.0.0
jiti: ">=1.21.0"
@@ -13209,7 +13258,7 @@ __metadata:
optional: true
bin:
vite: bin/vite.js
checksum: 10c0/68a2ed3e61bdd654c59b817b4f3203065241c66d1739faa707499130f3007bc3a666c7a8320a4198e275e62b5e4d34d9b78a6533f69e321d366e76f5093b2071
checksum: 10c0/df70201659085133abffc6b88dcdb8a57ef35f742a01311fc56a4cfcda6a404202860729cc65a2c401a724f6e25f9ab40ce4339ed4946f550541531ced6fe41c
languageName: node
linkType: hard