Merge branch 'livekit' into fkwp/setup_zimor

This commit is contained in:
fkwp
2026-03-11 13:09:59 +01:00
26 changed files with 1035 additions and 220 deletions

39
.github/PULL_REQUEST_TEMPLATE.md vendored Normal file
View File

@@ -0,0 +1,39 @@
<!-- Thanks for submitting a PR! Please ensure the following requirements are met in order for us to review your PR -->
## Content
<!-- Describe shortly what has been changed -->
## Motivation and context
<!-- Provide link to the corresponding issue if applicable or explain the context -->
## Screenshots / GIFs
<!--
You can use a table like this to show screenshots comparison.
Uncomment this markdown table below and edit the last line `|||`:
|copy screenshot of before here|copy screenshot of after here|
|Before|After|
|-|-|
|||
-->
## Tests
<!-- Explain how you tested your development -->
- Step 1
- Step 2
- Step ...
-
## Checklist
- [ ] I have read through [CONTRIBUTING.md](https://github.com/element-hq/element-call/blob/livekit/CONTRIBUTING.md).
- [ ] Pull request includes screenshots or videos if containing UI changes
- [ ] Tests written for new code (and old code if feasible).
- [ ] Linter and other CI checks pass.
- [ ] I have licensed the changes to Element by completing the [Contributor License Agreement (CLA)](https://cla-assistant.io/element-hq/element-call)

View File

@@ -51,6 +51,7 @@ jobs:
packages: write
id-token: write
uses: ./.github/workflows/build-and-publish-docker.yaml
secrets: inherit
with:
artifact_run_id: ${{ github.run_id }}
docker_tags: |

View File

@@ -62,6 +62,7 @@ jobs:
packages: write
id-token: write
uses: ./.github/workflows/build-and-publish-docker.yaml
secrets: inherit
with:
artifact_run_id: ${{ github.event.workflow_run.id || github.run_id }}
docker_tags: |

View File

@@ -57,6 +57,7 @@ jobs:
packages: write
id-token: write
uses: ./.github/workflows/build-and-publish-docker.yaml
secrets: inherit
with:
artifact_run_id: ${{ github.event.workflow_run.id || github.run_id }}
docker_tags: |

View File

@@ -34,6 +34,12 @@ export default {
// then Knip will flag it as a false positive
// https://github.com/webpro-nl/knip/issues/766
"@vector-im/compound-web",
// Yarn plugins are allowed to depend on packages provided by the Yarn
// runtime. These shouldn't be listed in package.json, because plugins
// should work before Yarn even installs dependencies for the first time.
// https://yarnpkg.com/advanced/plugin-tutorial#what-does-a-plugin-look-like
"@yarnpkg/core",
"@yarnpkg/parsers",
"matrix-widget-api",
],
ignoreExportsUsedInFile: true,

View File

@@ -250,11 +250,11 @@
"video_tile": {
"always_show": "Always show",
"camera_starting": "Video loading...",
"change_fit_contain": "Fit to frame",
"collapse": "Collapse",
"expand": "Expand",
"mute_for_me": "Mute for me",
"muted_for_me": "Muted for me",
"screen_share_volume": "Screen share volume",
"volume": "Volume",
"waiting_for_media": "Waiting for media..."
}

View File

@@ -48,7 +48,7 @@
"@livekit/components-core": "^0.12.0",
"@livekit/components-react": "^2.0.0",
"@livekit/protocol": "^1.42.2",
"@livekit/track-processors": "^0.6.0 || ^0.7.1",
"@livekit/track-processors": "^0.7.1",
"@mediapipe/tasks-vision": "^0.10.18",
"@playwright/test": "^1.57.0",
"@radix-ui/react-dialog": "^1.0.4",
@@ -101,7 +101,7 @@
"i18next-browser-languagedetector": "^8.0.0",
"i18next-parser": "^9.1.0",
"jsdom": "^26.0.0",
"knip": "5.82.1",
"knip": "^5.86.0",
"livekit-client": "^2.13.0",
"lodash-es": "^4.17.21",
"loglevel": "^1.9.1",

View File

@@ -60,7 +60,7 @@ widgetTest("Create and join a group call", async ({ addUser, browserName }) => {
// The only way to know if it is muted or not is to look at the data-kind attribute..
const videoButton = frame.getByTestId("incall_videomute");
await expect(videoButton).toBeVisible();
// video should be off by default in a voice call
// video should be on
await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/);
}

View File

@@ -0,0 +1,74 @@
/*
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { expect, test } from "@playwright/test";
import { widgetTest } from "../fixtures/widget-user.ts";
import { HOST1, TestHelpers } from "./test-helpers.ts";
widgetTest("Put call in PIP", async ({ addUser, browserName }) => {
test.skip(
browserName === "firefox",
"The is test is not working on firefox CI environment. No mic/audio device inputs so cam/mic are disabled",
);
test.slow();
const valere = await addUser("Valere", HOST1);
const timo = await addUser("Timo", HOST1);
const callRoom = "TeamRoom";
await TestHelpers.createRoom(callRoom, valere.page, [timo.mxId]);
await TestHelpers.createRoom("DoubleTask", valere.page);
await TestHelpers.acceptRoomInvite(callRoom, timo.page);
await TestHelpers.switchToRoomNamed(valere.page, callRoom);
// Start the call as Valere
await TestHelpers.startCallInCurrentRoom(valere.page, false);
await expect(
valere.page.locator('iframe[title="Element Call"]'),
).toBeVisible();
await TestHelpers.joinCallFromLobby(valere.page);
await TestHelpers.joinCallInCurrentRoom(timo.page);
{
const frame = timo.page
.locator('iframe[title="Element Call"]')
.contentFrame();
const videoButton = frame.getByTestId("incall_videomute");
await expect(videoButton).toBeVisible();
// check that the video is on
await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/);
}
// Switch to the other room, the call should go to PIP
await TestHelpers.switchToRoomNamed(valere.page, "DoubleTask");
// We should see the PIP overlay
await expect(valere.page.locator(".mx_WidgetPip_overlay")).toBeVisible();
{
// wait a bit so that the PIP has rendered the video
await valere.page.waitForTimeout(600);
// Check for a bug where the video had the wrong fit in PIP
const frame = valere.page
.locator('iframe[title="Element Call"]')
.contentFrame();
const videoElements = await frame.locator("video").all();
expect(videoElements.length).toBe(1);
const pipVideo = videoElements[0];
await expect(pipVideo).toHaveCSS("object-fit", "cover");
}
});

View File

@@ -276,4 +276,16 @@ export class TestHelpers {
});
}
}
/**
* Switches to a room in the room list by its name.
* @param page - The EW page
* @param roomName - The name of the room to switch to
*/
public static async switchToRoomNamed(
page: Page,
roomName: string,
): Promise<void> {
await page.getByRole("option", { name: `Open room ${roomName}` }).click();
}
}

View File

@@ -27,7 +27,13 @@ interface Props<M, R extends HTMLElement> {
state: Parameters<Handler<"drag", EventTypes["drag"]>>[0],
) => void
> | null;
/**
* The width this tile will have once its animations have settled.
*/
targetWidth: number;
/**
* The width this tile will have once its animations have settled.
*/
targetHeight: number;
model: M;
Tile: ComponentType<TileProps<M, R>>;

View File

@@ -65,6 +65,7 @@ Please see LICENSE in the repository root for full details.
.footer.overlay.hidden {
display: grid;
opacity: 0;
pointer-events: none;
}
.footer.overlay:has(:focus-visible) {

View File

@@ -9,8 +9,8 @@ import { IconButton, Text, Tooltip } from "@vector-im/compound-web";
import { type MatrixClient, type Room as MatrixRoom } from "matrix-js-sdk";
import {
type FC,
type PointerEvent,
type TouchEvent,
type MouseEvent as ReactMouseEvent,
type PointerEvent as ReactPointerEvent,
useCallback,
useEffect,
useMemo,
@@ -110,8 +110,6 @@ import { ObservableScope } from "../state/ObservableScope.ts";
const logger = rootLogger.getChild("[InCallView]");
const maxTapDurationMs = 400;
export interface ActiveCallProps extends Omit<
InCallViewProps,
"vm" | "livekitRoom" | "connState"
@@ -334,40 +332,20 @@ export const InCallView: FC<InCallViewProps> = ({
) : null;
}, [ringOverlay]);
// Ideally we could detect taps by listening for click events and checking
// that the pointerType of the event is "touch", but this isn't yet supported
// in Safari: https://developer.mozilla.org/en-US/docs/Web/API/Element/click_event#browser_compatibility
// Instead we have to watch for sufficiently fast touch events.
const touchStart = useRef<number | null>(null);
const onTouchStart = useCallback(() => (touchStart.current = Date.now()), []);
const onTouchEnd = useCallback(() => {
const start = touchStart.current;
if (start !== null && Date.now() - start <= maxTapDurationMs)
vm.tapScreen();
touchStart.current = null;
}, [vm]);
const onTouchCancel = useCallback(() => (touchStart.current = null), []);
// We also need to tell the footer controls to prevent touch events from
// bubbling up, or else the footer will be dismissed before a click/change
// event can be registered on the control
const onControlsTouchEnd = useCallback(
(e: TouchEvent) => {
// Somehow applying pointer-events: none to the controls when the footer
// is hidden is not enough to stop clicks from happening as the footer
// becomes visible, so we check manually whether the footer is shown
if (showFooter) {
e.stopPropagation();
vm.tapControls();
} else {
e.preventDefault();
}
const onViewClick = useCallback(
(e: ReactMouseEvent) => {
if (
(e.nativeEvent as PointerEvent).pointerType === "touch" &&
// If an interactive element was tapped, don't count this as a tap on the screen
(e.target as Element).closest?.("button, input") === null
)
vm.tapScreen();
},
[vm, showFooter],
[vm],
);
const onPointerMove = useCallback(
(e: PointerEvent) => {
(e: ReactPointerEvent) => {
if (e.pointerType === "mouse") vm.hoverScreen();
},
[vm],
@@ -606,8 +584,8 @@ export const InCallView: FC<InCallViewProps> = ({
vm={layout.spotlight}
expanded
onToggleExpanded={null}
targetWidth={gridBounds.height}
targetHeight={gridBounds.width}
targetWidth={gridBounds.width}
targetHeight={gridBounds.height}
showIndicators={false}
focusable={!contentObscured}
aria-hidden={contentObscured}
@@ -667,7 +645,6 @@ export const InCallView: FC<InCallViewProps> = ({
key="audio"
muted={!audioEnabled}
onClick={toggleAudio ?? undefined}
onTouchEnd={onControlsTouchEnd}
disabled={toggleAudio === null}
data-testid="incall_mute"
/>,
@@ -675,7 +652,6 @@ export const InCallView: FC<InCallViewProps> = ({
key="video"
muted={!videoEnabled}
onClick={toggleVideo ?? undefined}
onTouchEnd={onControlsTouchEnd}
disabled={toggleVideo === null}
data-testid="incall_videomute"
/>,
@@ -687,7 +663,6 @@ export const InCallView: FC<InCallViewProps> = ({
className={styles.shareScreen}
enabled={sharingScreen}
onClick={vm.toggleScreenSharing}
onTouchEnd={onControlsTouchEnd}
data-testid="incall_screenshare"
/>,
);
@@ -699,18 +674,11 @@ export const InCallView: FC<InCallViewProps> = ({
key="raise_hand"
className={styles.raiseHand}
identifier={`${client.getUserId()}:${client.getDeviceId()}`}
onTouchEnd={onControlsTouchEnd}
/>,
);
}
if (layout.type !== "pip")
buttons.push(
<SettingsButton
key="settings"
onClick={openSettings}
onTouchEnd={onControlsTouchEnd}
/>,
);
buttons.push(<SettingsButton key="settings" onClick={openSettings} />);
buttons.push(
<EndCallButton
@@ -718,7 +686,6 @@ export const InCallView: FC<InCallViewProps> = ({
onClick={function (): void {
vm.hangup();
}}
onTouchEnd={onControlsTouchEnd}
data-testid="incall_leave"
/>,
);
@@ -751,7 +718,6 @@ export const InCallView: FC<InCallViewProps> = ({
className={styles.layout}
layout={gridMode}
setLayout={setGridMode}
onTouchEnd={onControlsTouchEnd}
/>
)}
</div>
@@ -760,12 +726,13 @@ export const InCallView: FC<InCallViewProps> = ({
const allConnections = useBehavior(vm.allConnections$);
return (
// The onClick handler here exists to control the visibility of the footer,
// and the footer is also viewable by moving focus into it, so this is fine.
// eslint-disable-next-line jsx-a11y/no-static-element-interactions, jsx-a11y/click-events-have-key-events
<div
className={styles.inRoom}
ref={containerRef}
onTouchStart={onTouchStart}
onTouchEnd={onTouchEnd}
onTouchCancel={onTouchCancel}
onClick={onViewClick}
onPointerMove={onPointerMove}
onPointerOut={onPointerOut}
>

View File

@@ -5,7 +5,7 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type ChangeEvent, type FC, type TouchEvent, useCallback } from "react";
import { type ChangeEvent, type FC, useCallback } from "react";
import { useTranslation } from "react-i18next";
import { Tooltip } from "@vector-im/compound-web";
import {
@@ -22,15 +22,9 @@ interface Props {
layout: Layout;
setLayout: (layout: Layout) => void;
className?: string;
onTouchEnd?: (e: TouchEvent) => void;
}
export const LayoutToggle: FC<Props> = ({
layout,
setLayout,
className,
onTouchEnd,
}) => {
export const LayoutToggle: FC<Props> = ({ layout, setLayout, className }) => {
const { t } = useTranslation();
const onChange = useCallback(
@@ -47,7 +41,6 @@ export const LayoutToggle: FC<Props> = ({
value="spotlight"
checked={layout === "spotlight"}
onChange={onChange}
onTouchEnd={onTouchEnd}
/>
</Tooltip>
<SpotlightIcon aria-hidden width={24} height={24} />
@@ -58,7 +51,6 @@ export const LayoutToggle: FC<Props> = ({
value="grid"
checked={layout === "grid"}
onChange={onChange}
onTouchEnd={onTouchEnd}
/>
</Tooltip>
<GridIcon aria-hidden width={24} height={24} />

View File

@@ -9,6 +9,7 @@ import { expect, onTestFinished, test, vi } from "vitest";
import {
type LocalTrackPublication,
LocalVideoTrack,
Track,
TrackEvent,
} from "livekit-client";
import { waitFor } from "@testing-library/dom";
@@ -21,6 +22,7 @@ import {
mockRemoteMedia,
withTestScheduler,
mockRemoteParticipant,
mockRemoteScreenShare,
} from "../../utils/test";
import { constant } from "../Behavior";
@@ -91,17 +93,69 @@ test("control a participant's volume", () => {
});
});
test("toggle fit/contain for a participant's video", () => {
const vm = mockRemoteMedia(rtcMembership, {}, mockRemoteParticipant({}));
test("control a participant's screen share volume", () => {
const setVolumeSpy = vi.fn();
const vm = mockRemoteScreenShare(
rtcMembership,
{},
mockRemoteParticipant({ setVolume: setVolumeSpy }),
);
withTestScheduler(({ expectObservable, schedule }) => {
schedule("-ab|", {
a: () => vm.toggleCropVideo(),
b: () => vm.toggleCropVideo(),
schedule("-ab---c---d|", {
a() {
// Try muting by toggling
vm.togglePlaybackMuted();
expect(setVolumeSpy).toHaveBeenLastCalledWith(
0,
Track.Source.ScreenShareAudio,
);
},
b() {
// Try unmuting by dragging the slider back up
vm.adjustPlaybackVolume(0.6);
vm.adjustPlaybackVolume(0.8);
vm.commitPlaybackVolume();
expect(setVolumeSpy).toHaveBeenCalledWith(
0.6,
Track.Source.ScreenShareAudio,
);
expect(setVolumeSpy).toHaveBeenLastCalledWith(
0.8,
Track.Source.ScreenShareAudio,
);
},
c() {
// Try muting by dragging the slider back down
vm.adjustPlaybackVolume(0.2);
vm.adjustPlaybackVolume(0);
vm.commitPlaybackVolume();
expect(setVolumeSpy).toHaveBeenCalledWith(
0.2,
Track.Source.ScreenShareAudio,
);
expect(setVolumeSpy).toHaveBeenLastCalledWith(
0,
Track.Source.ScreenShareAudio,
);
},
d() {
// Try unmuting by toggling
vm.togglePlaybackMuted();
// The volume should return to the last non-zero committed volume
expect(setVolumeSpy).toHaveBeenLastCalledWith(
0.8,
Track.Source.ScreenShareAudio,
);
},
});
expectObservable(vm.cropVideo$).toBe("abc", {
a: true,
b: false,
c: true,
expectObservable(vm.playbackVolume$).toBe("ab(cd)(ef)g", {
a: 1,
b: 0,
c: 0.6,
d: 0.8,
e: 0.2,
f: 0,
g: 0.8,
});
});
});

View File

@@ -6,8 +6,8 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type RemoteParticipant } from "livekit-client";
import { map } from "rxjs";
import { Track, type RemoteParticipant } from "livekit-client";
import { map, of, switchMap } from "rxjs";
import { type Behavior } from "../Behavior";
import {
@@ -16,13 +16,20 @@ import {
createBaseScreenShare,
} from "./ScreenShareViewModel";
import { type ObservableScope } from "../ObservableScope";
import { createVolumeControls, type VolumeControls } from "../VolumeControls";
import { observeTrackReference$ } from "../observeTrackReference";
export interface RemoteScreenShareViewModel extends BaseScreenShareViewModel {
export interface RemoteScreenShareViewModel
extends BaseScreenShareViewModel, VolumeControls {
local: false;
/**
* Whether this screen share's video should be displayed.
*/
videoEnabled$: Behavior<boolean>;
/**
* Whether this screen share should be considered to have an audio track.
*/
audioEnabled$: Behavior<boolean>;
}
export interface RemoteScreenShareInputs extends BaseScreenShareInputs {
@@ -36,9 +43,30 @@ export function createRemoteScreenShare(
): RemoteScreenShareViewModel {
return {
...createBaseScreenShare(scope, inputs),
...createVolumeControls(scope, {
pretendToBeDisconnected$,
sink$: scope.behavior(
inputs.participant$.pipe(
map(
(p) => (volume) =>
p?.setVolume(volume, Track.Source.ScreenShareAudio),
),
),
),
}),
local: false,
videoEnabled$: scope.behavior(
pretendToBeDisconnected$.pipe(map((disconnected) => !disconnected)),
),
audioEnabled$: scope.behavior(
inputs.participant$.pipe(
switchMap((p) =>
p
? observeTrackReference$(p, Track.Source.ScreenShareAudio)
: of(null),
),
map(Boolean),
),
),
};
}

View File

@@ -7,6 +7,7 @@ Please see LICENSE in the repository root for full details.
*/
import {
BehaviorSubject,
combineLatest,
map,
type Observable,
@@ -30,9 +31,9 @@ import {
} from "./MemberMediaViewModel";
import { type RemoteUserMediaViewModel } from "./RemoteUserMediaViewModel";
import { type ObservableScope } from "../ObservableScope";
import { createToggle$ } from "../../utils/observable";
import { showConnectionStats } from "../../settings/settings";
import { observeRtpStreamStats$ } from "./observeRtpStreamStats";
import { videoFit$, videoSizeFromParticipant$ } from "../../utils/videoFit.ts";
/**
* A participant's user media (i.e. their microphone and camera feed).
@@ -46,7 +47,7 @@ export interface BaseUserMediaViewModel extends MemberMediaViewModel {
speaking$: Behavior<boolean>;
audioEnabled$: Behavior<boolean>;
videoEnabled$: Behavior<boolean>;
cropVideo$: Behavior<boolean>;
videoFit$: Behavior<"cover" | "contain">;
toggleCropVideo: () => void;
/**
* The expected identity of the LiveKit participant. Exposed for debugging.
@@ -60,6 +61,13 @@ export interface BaseUserMediaViewModel extends MemberMediaViewModel {
videoStreamStats$: Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
/**
* Set the target dimensions of the HTML element (final dimension after anim).
* This can be used to determine the best video fit (fit to frame / keep ratio).
* @param targetWidth - The target width of the HTML element displaying the video.
* @param targetHeight - The target height of the HTML element displaying the video.
*/
setTargetDimensions: (targetWidth: number, targetHeight: number) => void;
}
export interface BaseUserMediaInputs extends Omit<
@@ -90,6 +98,12 @@ export function createBaseUserMedia(
);
const toggleCropVideo$ = new Subject<void>();
// The target size of the video element, used to determine the best video fit.
// The target size is the final size of the HTML element after any animations have completed.
const targetSize$ = new BehaviorSubject<
{ width: number; height: number } | undefined
>(undefined);
return {
...createMemberMedia(scope, {
...inputs,
@@ -115,7 +129,11 @@ export function createBaseUserMedia(
videoEnabled$: scope.behavior(
media$.pipe(map((m) => m?.cameraTrack?.isMuted === false)),
),
cropVideo$: createToggle$(scope, true, toggleCropVideo$),
videoFit$: videoFit$(
scope,
videoSizeFromParticipant$(participant$),
targetSize$,
),
toggleCropVideo: () => toggleCropVideo$.next(),
rtcBackendIdentity,
handRaised$,
@@ -139,5 +157,8 @@ export function createBaseUserMedia(
return observeRtpStreamStats$(p, Track.Source.Camera, statsType);
}),
),
setTargetDimensions: (targetWidth: number, targetHeight: number): void => {
targetSize$.next({ width: targetWidth, height: targetHeight });
},
};
}

View File

@@ -67,3 +67,12 @@ export function observeInboundRtpStreamStats$(
map((x) => x as RTCInboundRtpStreamStats | undefined),
);
}
export function observeOutboundRtpStreamStats$(
participant: Participant,
source: Track.Source,
): Observable<RTCOutboundRtpStreamStats | undefined> {
return observeRtpStreamStats$(participant, source, "outbound-rtp").pipe(
map((x) => x as RTCOutboundRtpStreamStats | undefined),
);
}

View File

@@ -11,6 +11,7 @@ import {
type ReactNode,
type Ref,
useCallback,
useEffect,
useRef,
useState,
} from "react";
@@ -26,7 +27,6 @@ import {
VolumeOffIcon,
VisibilityOnIcon,
UserProfileIcon,
ExpandIcon,
VolumeOffSolidIcon,
SwitchCameraSolidIcon,
} from "@vector-im/compound-design-tokens/assets/web/icons";
@@ -87,6 +87,8 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
displayName,
mxcAvatarUrl,
focusable,
targetWidth,
targetHeight,
...props
}) => {
const { toggleRaisedHand } = useReactionsSender();
@@ -103,18 +105,19 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
const audioEnabled = useBehavior(vm.audioEnabled$);
const videoEnabled = useBehavior(vm.videoEnabled$);
const speaking = useBehavior(vm.speaking$);
const cropVideo = useBehavior(vm.cropVideo$);
const onSelectFitContain = useCallback(
(e: Event) => {
e.preventDefault();
vm.toggleCropVideo();
},
[vm],
);
const videoFit = useBehavior(vm.videoFit$);
const rtcBackendIdentity = vm.rtcBackendIdentity;
const handRaised = useBehavior(vm.handRaised$);
const reaction = useBehavior(vm.reaction$);
// Whenever bounds change, inform the viewModel
useEffect(() => {
if (targetWidth > 0 && targetHeight > 0) {
vm.setTargetDimensions(targetWidth, targetHeight);
}
}, [targetWidth, targetHeight, vm]);
const AudioIcon = playbackMuted
? VolumeOffSolidIcon
: audioEnabled
@@ -130,12 +133,10 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
const menu = (
<>
{menuStart}
<ToggleMenuItem
Icon={ExpandIcon}
label={t("video_tile.change_fit_contain")}
checked={cropVideo}
onSelect={onSelectFitContain}
/>
{/*
No additional menu item (used to be the manual fit to frame.
Placeholder for future menu items that should be placed here.
*/}
{menuEnd}
</>
);
@@ -154,7 +155,7 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
unencryptedWarning={unencryptedWarning}
encryptionStatus={encryptionStatus}
videoEnabled={videoEnabled}
videoFit={cropVideo ? "cover" : "contain"}
videoFit={videoFit}
className={classNames(className, styles.tile, {
[styles.speaking]: showSpeaking,
[styles.handRaised]: !showSpeaking && handRaised,
@@ -200,6 +201,8 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
audioStreamStats={audioStreamStats}
videoStreamStats={videoStreamStats}
rtcBackendIdentity={rtcBackendIdentity}
targetWidth={targetWidth}
targetHeight={targetHeight}
{...props}
/>
);

View File

@@ -84,7 +84,6 @@ Please see LICENSE in the repository root for full details.
.expand {
appearance: none;
cursor: pointer;
opacity: 0;
padding: var(--cpd-space-2x);
border: none;
border-radius: var(--cpd-radius-pill-effect);
@@ -108,6 +107,35 @@ Please see LICENSE in the repository root for full details.
z-index: 1;
}
.volumeSlider {
width: 100%;
min-width: 172px;
}
/* Disable the hover effect for the screen share volume menu button */
.volumeMenuItem:hover {
background: transparent;
cursor: default;
}
.volumeMenuItem {
gap: var(--cpd-space-3x);
}
.menuMuteButton {
appearance: none;
background: none;
border: none;
padding: 0;
cursor: pointer;
display: flex;
}
/* Make icons change color with the theme */
.menuMuteButton > svg {
color: var(--cpd-color-icon-primary);
}
.expand > svg {
display: block;
color: var(--cpd-color-icon-primary);
@@ -119,17 +147,22 @@ Please see LICENSE in the repository root for full details.
}
}
.expand:active {
.expand:active,
.expand[data-state="open"] {
background: var(--cpd-color-gray-100);
}
@media (hover) {
.tile > div > button {
opacity: 0;
}
.tile:hover > div > button {
opacity: 1;
}
}
.tile:has(:focus-visible) > div > button {
.tile:has(:focus-visible) > div > button,
.tile > div:has([data-state="open"]) > button {
opacity: 1;
}

View File

@@ -9,6 +9,7 @@ import { test, expect, vi } from "vitest";
import { isInaccessible, render, screen } from "@testing-library/react";
import { axe } from "vitest-axe";
import userEvent from "@testing-library/user-event";
import { TooltipProvider } from "@vector-im/compound-web";
import { SpotlightTile } from "./SpotlightTile";
import {
@@ -18,6 +19,7 @@ import {
mockLocalMedia,
mockRemoteMedia,
mockRemoteParticipant,
mockRemoteScreenShare,
} from "../utils/test";
import { SpotlightTileViewModel } from "../state/TileViewModel";
import { constant } from "../state/Behavior";
@@ -78,3 +80,63 @@ test("SpotlightTile is accessible", async () => {
await user.click(screen.getByRole("button", { name: "Expand" }));
expect(toggleExpanded).toHaveBeenCalled();
});
test("Screen share volume UI is shown when screen share has audio", async () => {
const vm = mockRemoteScreenShare(
mockRtcMembership("@alice:example.org", "AAAA"),
{},
mockRemoteParticipant({}),
);
vi.spyOn(vm, "audioEnabled$", "get").mockReturnValue(constant(true));
const toggleExpanded = vi.fn();
const { container } = render(
<TooltipProvider>
<SpotlightTile
vm={new SpotlightTileViewModel(constant([vm]), constant(false))}
targetWidth={300}
targetHeight={200}
expanded={false}
onToggleExpanded={toggleExpanded}
showIndicators
focusable
/>
</TooltipProvider>,
);
expect(await axe(container)).toHaveNoViolations();
// Volume menu button should exist
expect(screen.queryByRole("button", { name: /volume/i })).toBeInTheDocument();
});
test("Screen share volume UI is hidden when screen share has no audio", async () => {
const vm = mockRemoteScreenShare(
mockRtcMembership("@alice:example.org", "AAAA"),
{},
mockRemoteParticipant({}),
);
vi.spyOn(vm, "audioEnabled$", "get").mockReturnValue(constant(false));
const toggleExpanded = vi.fn();
const { container } = render(
<SpotlightTile
vm={new SpotlightTileViewModel(constant([vm]), constant(false))}
targetWidth={300}
targetHeight={200}
expanded={false}
onToggleExpanded={toggleExpanded}
showIndicators
focusable
/>,
);
expect(await axe(container)).toHaveNoViolations();
// Volume menu button should not exist
expect(
screen.queryByRole("button", { name: /volume/i }),
).not.toBeInTheDocument();
});

View File

@@ -20,6 +20,10 @@ import {
CollapseIcon,
ChevronLeftIcon,
ChevronRightIcon,
VolumeOffIcon,
VolumeOnIcon,
VolumeOffSolidIcon,
VolumeOnSolidIcon,
} from "@vector-im/compound-design-tokens/assets/web/icons";
import { animated } from "@react-spring/web";
import { type Observable, map } from "rxjs";
@@ -27,6 +31,7 @@ import { useObservableRef } from "observable-hooks";
import { useTranslation } from "react-i18next";
import classNames from "classnames";
import { type TrackReferenceOrPlaceholder } from "@livekit/components-core";
import { Menu, MenuItem } from "@vector-im/compound-web";
import FullScreenMaximiseIcon from "../icons/FullScreenMaximise.svg?react";
import FullScreenMinimiseIcon from "../icons/FullScreenMinimise.svg?react";
@@ -45,6 +50,8 @@ import { type UserMediaViewModel } from "../state/media/UserMediaViewModel";
import { type ScreenShareViewModel } from "../state/media/ScreenShareViewModel";
import { type RemoteScreenShareViewModel } from "../state/media/RemoteScreenShareViewModel";
import { type MediaViewModel } from "../state/media/MediaViewModel";
import { Slider } from "../Slider";
import { platform } from "../Platform";
interface SpotlightItemBaseProps {
ref?: Ref<HTMLDivElement>;
@@ -104,12 +111,12 @@ const SpotlightUserMediaItem: FC<SpotlightUserMediaItemProps> = ({
vm,
...props
}) => {
const cropVideo = useBehavior(vm.cropVideo$);
const videoFit = useBehavior(vm.videoFit$);
const videoEnabled = useBehavior(vm.videoEnabled$);
const baseProps: SpotlightUserMediaItemBaseProps &
RefAttributes<HTMLDivElement> = {
videoFit: cropVideo ? "cover" : "contain",
videoFit,
videoEnabled,
...props,
};
@@ -151,7 +158,13 @@ const SpotlightRemoteScreenShareItem: FC<
interface SpotlightItemProps {
ref?: Ref<HTMLDivElement>;
vm: MediaViewModel;
/**
* The width this tile will have once its animations have settled.
*/
targetWidth: number;
/**
* The height this tile will have once its animations have settled.
*/
targetHeight: number;
focusable: boolean;
intersectionObserver$: Observable<IntersectionObserver>;
@@ -173,6 +186,16 @@ const SpotlightItem: FC<SpotlightItemProps> = ({
"aria-hidden": ariaHidden,
}) => {
const ourRef = useRef<HTMLDivElement | null>(null);
// Whenever target bounds change, inform the viewModel
useEffect(() => {
if (targetWidth > 0 && targetHeight > 0) {
if (vm.type != "screen share") {
vm.setTargetDimensions(targetWidth, targetHeight);
}
}
}, [targetWidth, targetHeight, vm]);
const ref = useMergedRefs(ourRef, theirRef);
const focusUrl = useBehavior(vm.focusUrl$);
const displayName = useBehavior(vm.displayName$);
@@ -224,6 +247,73 @@ const SpotlightItem: FC<SpotlightItemProps> = ({
SpotlightItem.displayName = "SpotlightItem";
interface ScreenShareVolumeButtonProps {
vm: RemoteScreenShareViewModel;
}
const ScreenShareVolumeButton: FC<ScreenShareVolumeButtonProps> = ({ vm }) => {
const { t } = useTranslation();
const audioEnabled = useBehavior(vm.audioEnabled$);
const playbackMuted = useBehavior(vm.playbackMuted$);
const playbackVolume = useBehavior(vm.playbackVolume$);
const VolumeIcon = playbackMuted ? VolumeOffIcon : VolumeOnIcon;
const VolumeSolidIcon = playbackMuted
? VolumeOffSolidIcon
: VolumeOnSolidIcon;
const [volumeMenuOpen, setVolumeMenuOpen] = useState(false);
const onMuteButtonClick = useCallback(() => vm.togglePlaybackMuted(), [vm]);
const onVolumeChange = useCallback(
(v: number) => vm.adjustPlaybackVolume(v),
[vm],
);
const onVolumeCommit = useCallback(() => vm.commitPlaybackVolume(), [vm]);
return (
audioEnabled && (
<Menu
open={volumeMenuOpen}
onOpenChange={setVolumeMenuOpen}
title={t("video_tile.screen_share_volume")}
side="top"
align="end"
trigger={
<button
className={styles.expand}
aria-label={t("video_tile.screen_share_volume")}
>
<VolumeSolidIcon aria-hidden width={20} height={20} />
</button>
}
>
<MenuItem
as="div"
className={styles.volumeMenuItem}
onSelect={null}
label={null}
hideChevron={true}
>
<button className={styles.menuMuteButton} onClick={onMuteButtonClick}>
<VolumeIcon aria-hidden width={24} height={24} />
</button>
<Slider
className={styles.volumeSlider}
label={t("video_tile.volume")}
value={playbackVolume}
min={0}
max={1}
step={0.01}
onValueChange={onVolumeChange}
onValueCommit={onVolumeCommit}
/>
</MenuItem>
</Menu>
)
);
};
interface Props {
ref?: Ref<HTMLDivElement>;
vm: SpotlightTileViewModel;
@@ -258,6 +348,7 @@ export const SpotlightTile: FC<Props> = ({
const latestMedia = useLatest(media);
const latestVisibleId = useLatest(visibleId);
const visibleIndex = media.findIndex((vm) => vm.id === visibleId);
const visibleMedia = media.at(visibleIndex);
const canGoBack = visibleIndex > 0;
const canGoToNext = visibleIndex !== -1 && visibleIndex < media.length - 1;
@@ -365,16 +456,21 @@ export const SpotlightTile: FC<Props> = ({
/>
))}
</div>
<div className={styles.bottomRightButtons}>
<button
className={classNames(styles.expand)}
aria-label={"maximise"}
onClick={onToggleFullscreen}
tabIndex={focusable ? undefined : -1}
>
<FullScreenIcon aria-hidden width={20} height={20} />
</button>
<div className={styles.bottomRightButtons}>
{visibleMedia?.type === "screen share" && !visibleMedia.local && (
<ScreenShareVolumeButton vm={visibleMedia} />
)}
{platform === "desktop" && (
<button
className={classNames(styles.expand)}
aria-label={"maximise"}
onClick={onToggleFullscreen}
tabIndex={focusable ? undefined : -1}
>
<FullScreenIcon aria-hidden width={20} height={20} />
</button>
)}
{onToggleExpanded && (
<button
className={classNames(styles.expand)}

View File

@@ -70,6 +70,10 @@ import {
createRemoteUserMedia,
type RemoteUserMediaViewModel,
} from "../state/media/RemoteUserMediaViewModel";
import {
createRemoteScreenShare,
type RemoteScreenShareViewModel,
} from "../state/media/RemoteScreenShareViewModel";
export function withFakeTimers(continuation: () => void): void {
vi.useFakeTimers();
@@ -393,6 +397,31 @@ export function mockRemoteMedia(
});
}
export function mockRemoteScreenShare(
rtcMember: CallMembership,
roomMember: Partial<RoomMember>,
participant: RemoteParticipant | null,
livekitRoom: LivekitRoom | undefined = mockLivekitRoom(
{},
{
remoteParticipants$: of(participant ? [participant] : []),
},
),
): RemoteScreenShareViewModel {
const member = mockMatrixRoomMember(rtcMember, roomMember);
return createRemoteScreenShare(testScope(), {
id: "screenshare",
userId: member.userId,
participant$: constant(participant),
encryptionSystem: { kind: E2eeType.PER_PARTICIPANT },
livekitRoom$: constant(livekitRoom),
focusUrl$: constant("https://rtc-example.org"),
pretendToBeDisconnected$: constant(false),
displayName$: constant(member.rawDisplayName ?? "nodisplayname"),
mxcAvatarUrl$: constant(member.getMxcAvatarUrl()),
});
}
export function mockConfig(
config: Partial<ResolvedConfigOptions> = {},
): MockInstance<() => ResolvedConfigOptions> {

263
src/utils/videoFit.test.ts Normal file
View File

@@ -0,0 +1,263 @@
/*
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { describe, expect, test, vi } from "vitest";
import {
LocalTrack,
type LocalTrackPublication,
type RemoteTrackPublication,
Track,
} from "livekit-client";
import { ObservableScope } from "../state/ObservableScope";
import { videoFit$, videoSizeFromParticipant$ } from "./videoFit";
import { constant } from "../state/Behavior";
import {
flushPromises,
mockLocalParticipant,
mockRemoteParticipant,
} from "./test";
describe("videoFit$ defaults", () => {
test.each([
{
videoSize: { width: 1920, height: 1080 },
tileSize: undefined,
},
{
videoSize: { width: 1080, height: 1920 },
tileSize: undefined,
},
{
videoSize: undefined,
tileSize: { width: 1920, height: 1080 },
},
{
videoSize: undefined,
tileSize: { width: 1080, height: 1920 },
},
])(
"videoFit$ returns `cover` when videoSize is $videoSize and tileSize is $tileSize",
({ videoSize, tileSize }) => {
const scope = new ObservableScope();
const videoSize$ = constant(videoSize);
const tileSize$ = constant(tileSize);
const fit = videoFit$(scope, videoSize$, tileSize$);
expect(fit.value).toBe("cover");
},
);
});
const VIDEO_480_L = { width: 640, height: 480 };
const VIDEO_720_L = { width: 1280, height: 720 };
const VIDEO_1080_L = { width: 1920, height: 1080 };
// Some sizes from real world testing, which don't match the standard video sizes exactly
const TILE_SIZE_1_L = { width: 180, height: 135 };
const TILE_SIZE_3_P = { width: 379, height: 542 };
const TILE_SIZE_4_L = { width: 957, height: 542 };
// This is the size of an iPhone Xr in portrait mode
const TILE_SIZE_5_P = { width: 414, height: 896 };
export function invertSize(size: { width: number; height: number }): {
width: number;
height: number;
} {
return {
width: size.height,
height: size.width,
};
}
test.each([
{
videoSize: VIDEO_480_L,
tileSize: TILE_SIZE_1_L,
expected: "cover",
},
{
videoSize: invertSize(VIDEO_480_L),
tileSize: TILE_SIZE_1_L,
expected: "contain",
},
{
videoSize: VIDEO_720_L,
tileSize: TILE_SIZE_4_L,
expected: "cover",
},
{
videoSize: invertSize(VIDEO_720_L),
tileSize: TILE_SIZE_4_L,
expected: "contain",
},
{
videoSize: invertSize(VIDEO_1080_L),
tileSize: TILE_SIZE_3_P,
expected: "cover",
},
{
videoSize: VIDEO_1080_L,
tileSize: TILE_SIZE_5_P,
expected: "contain",
},
{
videoSize: invertSize(VIDEO_1080_L),
tileSize: TILE_SIZE_5_P,
expected: "cover",
},
{
// square video
videoSize: { width: 400, height: 400 },
tileSize: VIDEO_480_L,
expected: "contain",
},
{
// Should default to cover if the initial size is 0:0.
// Or else it will cause a flash of "contain" mode until the real size is loaded, which can be jarring.
videoSize: VIDEO_480_L,
tileSize: { width: 0, height: 0 },
expected: "cover",
},
{
videoSize: { width: 0, height: 0 },
tileSize: VIDEO_480_L,
expected: "cover",
},
])(
"videoFit$ returns $expected when videoSize is $videoSize and tileSize is $tileSize",
({ videoSize, tileSize, expected }) => {
const scope = new ObservableScope();
const videoSize$ = constant(videoSize);
const tileSize$ = constant(tileSize);
const fit = videoFit$(scope, videoSize$, tileSize$);
expect(fit.value).toBe(expected);
},
);
describe("extracting video size from participant stats", () => {
function createMockRtpStats(
isInbound: boolean,
props: Partial<RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats> = {},
): RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats {
const baseStats = {
id: "mock-stats-id",
timestamp: Date.now(),
type: isInbound ? "inbound-rtp" : "outbound-rtp",
kind: "video",
...props,
};
return baseStats as RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats;
}
test("get stats for local user", async () => {
const localParticipant = mockLocalParticipant({
identity: "@local:example.org:AAAAAA",
});
const mockReport: RTCStatsReport = new Map([
[
"OT01V639885149",
createMockRtpStats(false, {
frameWidth: 1280,
frameHeight: 720,
}),
],
]);
const track = {
source: Track.Source.Camera,
getRTCStatsReport: vi
.fn()
.mockImplementation(async () => Promise.resolve(mockReport)),
} as Partial<LocalTrack> as LocalTrack;
// Set up the prototype chain (there is an instanceof check in getRTCStatsReport)
Object.setPrototypeOf(track, LocalTrack.prototype);
localParticipant.getTrackPublication = vi
.fn()
.mockImplementation((source: Track.Source) => {
if (source === Track.Source.Camera) {
return {
track,
} as unknown as LocalTrackPublication;
} else {
return undefined;
}
});
const videoDimensions$ = videoSizeFromParticipant$(
constant(localParticipant),
);
const publishedDimensions: { width: number; height: number }[] = [];
videoDimensions$.subscribe((dimensions) => {
if (dimensions) publishedDimensions.push(dimensions);
});
await flushPromises();
const dimension = publishedDimensions.pop();
expect(dimension).toEqual({ width: 1280, height: 720 });
});
test("get stats for remote user", async () => {
// vi.useFakeTimers()
const remoteParticipant = mockRemoteParticipant({
identity: "@bob:example.org:AAAAAA",
});
const mockReport: RTCStatsReport = new Map([
[
"OT01V639885149",
createMockRtpStats(true, {
frameWidth: 480,
frameHeight: 640,
}),
],
]);
const track = {
source: Track.Source.Camera,
getRTCStatsReport: vi
.fn()
.mockImplementation(async () => Promise.resolve(mockReport)),
} as Partial<LocalTrack> as LocalTrack;
// Set up the prototype chain (there is an instanceof check in getRTCStatsReport)
Object.setPrototypeOf(track, LocalTrack.prototype);
remoteParticipant.getTrackPublication = vi
.fn()
.mockImplementation((source: Track.Source) => {
if (source === Track.Source.Camera) {
return {
track,
} as unknown as RemoteTrackPublication;
} else {
return undefined;
}
});
const videoDimensions$ = videoSizeFromParticipant$(
constant(remoteParticipant),
);
const publishedDimensions: { width: number; height: number }[] = [];
videoDimensions$.subscribe((dimensions) => {
if (dimensions) publishedDimensions.push(dimensions);
});
await flushPromises();
const dimension = publishedDimensions.pop();
expect(dimension).toEqual({ width: 480, height: 640 });
});
});

111
src/utils/videoFit.ts Normal file
View File

@@ -0,0 +1,111 @@
/*
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { combineLatest, map, type Observable, of, switchMap } from "rxjs";
import {
type LocalParticipant,
type RemoteParticipant,
Track,
} from "livekit-client";
import { type ObservableScope } from "../state/ObservableScope.ts";
import { type Behavior } from "../state/Behavior.ts";
import {
observeInboundRtpStreamStats$,
observeOutboundRtpStreamStats$,
} from "../state/media/observeRtpStreamStats";
type Size = {
width: number;
height: number;
};
/**
* Computes the appropriate video fit mode ("cover" or "contain") based on the aspect ratios of the video and the tile.
* - If the video and tile have the same orientation (both landscape or both portrait), we use "cover" to fill the tile, even if it means cropping.
* - If the video and tile have different orientations, we use "contain" to ensure the entire video is visible, even if it means letterboxing (black bars).
* @param scope - the ObservableScope to create the Behavior in
* @param videoSize$ - an Observable of the video size (width and height) or undefined if the size is not yet known (no data yet received).
* @param tileSize$ - an Observable of the tile size (width and height) or undefined if the size is not yet known (not yet rendered).
*/
export function videoFit$(
scope: ObservableScope,
videoSize$: Observable<Size | undefined>,
tileSize$: Observable<Size | undefined>,
): Behavior<"cover" | "contain"> {
const fit$ = combineLatest([videoSize$, tileSize$]).pipe(
map(([videoSize, tileSize]) => {
if (!videoSize || !tileSize) {
// If we don't have the sizes, default to cover to avoid black bars.
// This is a reasonable default as it will ensure the video fills the tile, even if it means cropping.
return "cover";
}
if (
videoSize.width === 0 ||
videoSize.height === 0 ||
tileSize.width === 0 ||
tileSize.height === 0
) {
// If we have invalid sizes (e.g. width or height is 0), default to cover to avoid black bars.
return "cover";
}
const videoAspectRatio = videoSize.width / videoSize.height;
const tileAspectRatio = tileSize.width / tileSize.height;
// If video is landscape (ratio > 1) and tile is portrait (ratio < 1) or vice versa,
// we want to use "contain" (fit) mode to avoid excessive cropping
const videoIsLandscape = videoAspectRatio > 1;
const tileIsLandscape = tileAspectRatio > 1;
// If the orientations are the same, use the cover mode (Preserves the aspect ratio, and the image fills the container.)
// If they're not the same orientation, use the contain mode (Preserves the aspect ratio, but the image is letterboxed - black bars- to fit within the container.)
return videoIsLandscape === tileIsLandscape ? "cover" : "contain";
}),
);
return scope.behavior(fit$, "cover");
}
/**
* Helper function to get the video size from a participant.
* It observes the participant's video track stats and extracts the frame width and height.
* @param participant$ - an Observable of a LocalParticipant or RemoteParticipant, or null if no participant is selected.
* @returns an Observable of the video size (width and height) or undefined if the size cannot be determined.
*/
export function videoSizeFromParticipant$(
participant$: Observable<LocalParticipant | RemoteParticipant | null>,
): Observable<{ width: number; height: number } | undefined> {
return participant$
.pipe(
// If we have a participant, observe their video track stats. If not, return undefined.
switchMap((p) => {
if (!p) return of(undefined);
if (p.isLocal) {
return observeOutboundRtpStreamStats$(p, Track.Source.Camera);
} else {
return observeInboundRtpStreamStats$(p, Track.Source.Camera);
}
}),
)
.pipe(
// Extract the frame width and height from the stats. If we don't have valid stats, return undefined.
map((stats) => {
if (!stats) return undefined;
if (
// For video tracks, frameWidth and frameHeight should be numbers. If they're not, we can't determine the size.
typeof stats.frameWidth !== "number" ||
typeof stats.frameHeight !== "number"
) {
return undefined;
}
return {
width: stats.frameWidth,
height: stats.frameHeight,
};
}),
);
}

224
yarn.lock
View File

@@ -3345,15 +3345,15 @@ __metadata:
languageName: node
linkType: hard
"@livekit/track-processors@npm:^0.6.0 || ^0.7.1":
version: 0.6.1
resolution: "@livekit/track-processors@npm:0.6.1"
"@livekit/track-processors@npm:^0.7.1":
version: 0.7.2
resolution: "@livekit/track-processors@npm:0.7.2"
dependencies:
"@mediapipe/tasks-vision": "npm:0.10.14"
peerDependencies:
"@types/dom-mediacapture-transform": ^0.1.9
livekit-client: ^1.12.0 || ^2.1.0
checksum: 10c0/80f54663c7e13de299de9e2565b6cbd2ba74ea0a4a8adf8a366e8cfd0e19dedfb9d699899137f1a6133414f28779877eeb3200074c03893bc63aeb0d8c912a91
checksum: 10c0/d5638942205ea05a507254f61157696881332b866ff538d1e93bd2267c31ce80c9a81dfc4f8c4b7c96910452b13511ee06be1ae7ab30c299b73fe04baf80a673
languageName: node
linkType: hard
@@ -3371,7 +3371,7 @@ __metadata:
languageName: node
linkType: hard
"@napi-rs/wasm-runtime@npm:^1.1.0":
"@napi-rs/wasm-runtime@npm:^1.1.1":
version: 1.1.1
resolution: "@napi-rs/wasm-runtime@npm:1.1.1"
dependencies:
@@ -3551,144 +3551,144 @@ __metadata:
languageName: node
linkType: hard
"@oxc-resolver/binding-android-arm-eabi@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-android-arm-eabi@npm:11.16.2"
"@oxc-resolver/binding-android-arm-eabi@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-android-arm-eabi@npm:11.19.1"
conditions: os=android & cpu=arm
languageName: node
linkType: hard
"@oxc-resolver/binding-android-arm64@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-android-arm64@npm:11.16.2"
"@oxc-resolver/binding-android-arm64@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-android-arm64@npm:11.19.1"
conditions: os=android & cpu=arm64
languageName: node
linkType: hard
"@oxc-resolver/binding-darwin-arm64@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-darwin-arm64@npm:11.16.2"
"@oxc-resolver/binding-darwin-arm64@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-darwin-arm64@npm:11.19.1"
conditions: os=darwin & cpu=arm64
languageName: node
linkType: hard
"@oxc-resolver/binding-darwin-x64@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-darwin-x64@npm:11.16.2"
"@oxc-resolver/binding-darwin-x64@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-darwin-x64@npm:11.19.1"
conditions: os=darwin & cpu=x64
languageName: node
linkType: hard
"@oxc-resolver/binding-freebsd-x64@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-freebsd-x64@npm:11.16.2"
"@oxc-resolver/binding-freebsd-x64@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-freebsd-x64@npm:11.19.1"
conditions: os=freebsd & cpu=x64
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-arm-gnueabihf@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-arm-gnueabihf@npm:11.16.2"
"@oxc-resolver/binding-linux-arm-gnueabihf@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-arm-gnueabihf@npm:11.19.1"
conditions: os=linux & cpu=arm
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-arm-musleabihf@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-arm-musleabihf@npm:11.16.2"
"@oxc-resolver/binding-linux-arm-musleabihf@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-arm-musleabihf@npm:11.19.1"
conditions: os=linux & cpu=arm
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-arm64-gnu@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-arm64-gnu@npm:11.16.2"
"@oxc-resolver/binding-linux-arm64-gnu@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-arm64-gnu@npm:11.19.1"
conditions: os=linux & cpu=arm64 & libc=glibc
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-arm64-musl@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-arm64-musl@npm:11.16.2"
"@oxc-resolver/binding-linux-arm64-musl@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-arm64-musl@npm:11.19.1"
conditions: os=linux & cpu=arm64 & libc=musl
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-ppc64-gnu@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-ppc64-gnu@npm:11.16.2"
"@oxc-resolver/binding-linux-ppc64-gnu@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-ppc64-gnu@npm:11.19.1"
conditions: os=linux & cpu=ppc64 & libc=glibc
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-riscv64-gnu@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-riscv64-gnu@npm:11.16.2"
"@oxc-resolver/binding-linux-riscv64-gnu@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-riscv64-gnu@npm:11.19.1"
conditions: os=linux & cpu=riscv64 & libc=glibc
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-riscv64-musl@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-riscv64-musl@npm:11.16.2"
"@oxc-resolver/binding-linux-riscv64-musl@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-riscv64-musl@npm:11.19.1"
conditions: os=linux & cpu=riscv64 & libc=musl
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-s390x-gnu@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-s390x-gnu@npm:11.16.2"
"@oxc-resolver/binding-linux-s390x-gnu@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-s390x-gnu@npm:11.19.1"
conditions: os=linux & cpu=s390x & libc=glibc
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-x64-gnu@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-x64-gnu@npm:11.16.2"
"@oxc-resolver/binding-linux-x64-gnu@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-x64-gnu@npm:11.19.1"
conditions: os=linux & cpu=x64 & libc=glibc
languageName: node
linkType: hard
"@oxc-resolver/binding-linux-x64-musl@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-linux-x64-musl@npm:11.16.2"
"@oxc-resolver/binding-linux-x64-musl@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-linux-x64-musl@npm:11.19.1"
conditions: os=linux & cpu=x64 & libc=musl
languageName: node
linkType: hard
"@oxc-resolver/binding-openharmony-arm64@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-openharmony-arm64@npm:11.16.2"
"@oxc-resolver/binding-openharmony-arm64@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-openharmony-arm64@npm:11.19.1"
conditions: os=openharmony & cpu=arm64
languageName: node
linkType: hard
"@oxc-resolver/binding-wasm32-wasi@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-wasm32-wasi@npm:11.16.2"
"@oxc-resolver/binding-wasm32-wasi@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-wasm32-wasi@npm:11.19.1"
dependencies:
"@napi-rs/wasm-runtime": "npm:^1.1.0"
"@napi-rs/wasm-runtime": "npm:^1.1.1"
conditions: cpu=wasm32
languageName: node
linkType: hard
"@oxc-resolver/binding-win32-arm64-msvc@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-win32-arm64-msvc@npm:11.16.2"
"@oxc-resolver/binding-win32-arm64-msvc@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-win32-arm64-msvc@npm:11.19.1"
conditions: os=win32 & cpu=arm64
languageName: node
linkType: hard
"@oxc-resolver/binding-win32-ia32-msvc@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-win32-ia32-msvc@npm:11.16.2"
"@oxc-resolver/binding-win32-ia32-msvc@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-win32-ia32-msvc@npm:11.19.1"
conditions: os=win32 & cpu=ia32
languageName: node
linkType: hard
"@oxc-resolver/binding-win32-x64-msvc@npm:11.16.2":
version: 11.16.2
resolution: "@oxc-resolver/binding-win32-x64-msvc@npm:11.16.2"
"@oxc-resolver/binding-win32-x64-msvc@npm:11.19.1":
version: 11.19.1
resolution: "@oxc-resolver/binding-win32-x64-msvc@npm:11.19.1"
conditions: os=win32 & cpu=x64
languageName: node
linkType: hard
@@ -8380,7 +8380,7 @@ __metadata:
"@livekit/components-core": "npm:^0.12.0"
"@livekit/components-react": "npm:^2.0.0"
"@livekit/protocol": "npm:^1.42.2"
"@livekit/track-processors": "npm:^0.6.0 || ^0.7.1"
"@livekit/track-processors": "npm:^0.7.1"
"@mediapipe/tasks-vision": "npm:^0.10.18"
"@playwright/test": "npm:^1.57.0"
"@radix-ui/react-dialog": "npm:^1.0.4"
@@ -8433,7 +8433,7 @@ __metadata:
i18next-browser-languagedetector: "npm:^8.0.0"
i18next-parser: "npm:^9.1.0"
jsdom: "npm:^26.0.0"
knip: "npm:5.82.1"
knip: "npm:^5.86.0"
livekit-client: "npm:^2.13.0"
lodash-es: "npm:^4.17.21"
loglevel: "npm:^1.9.1"
@@ -11039,17 +11039,6 @@ __metadata:
languageName: node
linkType: hard
"js-yaml@npm:^4.1.1":
version: 4.1.1
resolution: "js-yaml@npm:4.1.1"
dependencies:
argparse: "npm:^2.0.1"
bin:
js-yaml: bin/js-yaml.js
checksum: 10c0/561c7d7088c40a9bb53cc75becbfb1df6ae49b34b5e6e5a81744b14ae8667ec564ad2527709d1a6e7d5e5fa6d483aa0f373a50ad98d42fde368ec4a190d4fae7
languageName: node
linkType: hard
"jsbn@npm:1.1.0":
version: 1.1.0
resolution: "jsbn@npm:1.1.0"
@@ -11232,21 +11221,22 @@ __metadata:
languageName: node
linkType: hard
"knip@npm:5.82.1":
version: 5.82.1
resolution: "knip@npm:5.82.1"
"knip@npm:^5.86.0":
version: 5.86.0
resolution: "knip@npm:5.86.0"
dependencies:
"@nodelib/fs.walk": "npm:^1.2.3"
fast-glob: "npm:^3.3.3"
formatly: "npm:^0.3.0"
jiti: "npm:^2.6.0"
js-yaml: "npm:^4.1.1"
minimist: "npm:^1.2.8"
oxc-resolver: "npm:^11.15.0"
oxc-resolver: "npm:^11.19.1"
picocolors: "npm:^1.1.1"
picomatch: "npm:^4.0.1"
smol-toml: "npm:^1.5.2"
strip-json-comments: "npm:5.0.3"
unbash: "npm:^2.2.0"
yaml: "npm:^2.8.2"
zod: "npm:^4.1.11"
peerDependencies:
"@types/node": ">=18"
@@ -11254,7 +11244,7 @@ __metadata:
bin:
knip: bin/knip.js
knip-bun: bin/knip-bun.js
checksum: 10c0/c3bfe898fe3103bb6a59ee2ba4297f05ea4d2db474571db89ae199ebbd74eafa5061d05b3bc2c75e4ec2322ba7ffee44493c76132d3d8991fae66ba742b9ccb4
checksum: 10c0/6905c3c2bd21b1f5d51bf83568d1eff67d9d74dd9547c428f810b0dbc3624225a0c41b8e8caccbb111df2db175933aa853345798a05f91f9344ce3aca26898ff
languageName: node
linkType: hard
@@ -12173,30 +12163,30 @@ __metadata:
languageName: node
linkType: hard
"oxc-resolver@npm:^11.15.0":
version: 11.16.2
resolution: "oxc-resolver@npm:11.16.2"
"oxc-resolver@npm:^11.19.1":
version: 11.19.1
resolution: "oxc-resolver@npm:11.19.1"
dependencies:
"@oxc-resolver/binding-android-arm-eabi": "npm:11.16.2"
"@oxc-resolver/binding-android-arm64": "npm:11.16.2"
"@oxc-resolver/binding-darwin-arm64": "npm:11.16.2"
"@oxc-resolver/binding-darwin-x64": "npm:11.16.2"
"@oxc-resolver/binding-freebsd-x64": "npm:11.16.2"
"@oxc-resolver/binding-linux-arm-gnueabihf": "npm:11.16.2"
"@oxc-resolver/binding-linux-arm-musleabihf": "npm:11.16.2"
"@oxc-resolver/binding-linux-arm64-gnu": "npm:11.16.2"
"@oxc-resolver/binding-linux-arm64-musl": "npm:11.16.2"
"@oxc-resolver/binding-linux-ppc64-gnu": "npm:11.16.2"
"@oxc-resolver/binding-linux-riscv64-gnu": "npm:11.16.2"
"@oxc-resolver/binding-linux-riscv64-musl": "npm:11.16.2"
"@oxc-resolver/binding-linux-s390x-gnu": "npm:11.16.2"
"@oxc-resolver/binding-linux-x64-gnu": "npm:11.16.2"
"@oxc-resolver/binding-linux-x64-musl": "npm:11.16.2"
"@oxc-resolver/binding-openharmony-arm64": "npm:11.16.2"
"@oxc-resolver/binding-wasm32-wasi": "npm:11.16.2"
"@oxc-resolver/binding-win32-arm64-msvc": "npm:11.16.2"
"@oxc-resolver/binding-win32-ia32-msvc": "npm:11.16.2"
"@oxc-resolver/binding-win32-x64-msvc": "npm:11.16.2"
"@oxc-resolver/binding-android-arm-eabi": "npm:11.19.1"
"@oxc-resolver/binding-android-arm64": "npm:11.19.1"
"@oxc-resolver/binding-darwin-arm64": "npm:11.19.1"
"@oxc-resolver/binding-darwin-x64": "npm:11.19.1"
"@oxc-resolver/binding-freebsd-x64": "npm:11.19.1"
"@oxc-resolver/binding-linux-arm-gnueabihf": "npm:11.19.1"
"@oxc-resolver/binding-linux-arm-musleabihf": "npm:11.19.1"
"@oxc-resolver/binding-linux-arm64-gnu": "npm:11.19.1"
"@oxc-resolver/binding-linux-arm64-musl": "npm:11.19.1"
"@oxc-resolver/binding-linux-ppc64-gnu": "npm:11.19.1"
"@oxc-resolver/binding-linux-riscv64-gnu": "npm:11.19.1"
"@oxc-resolver/binding-linux-riscv64-musl": "npm:11.19.1"
"@oxc-resolver/binding-linux-s390x-gnu": "npm:11.19.1"
"@oxc-resolver/binding-linux-x64-gnu": "npm:11.19.1"
"@oxc-resolver/binding-linux-x64-musl": "npm:11.19.1"
"@oxc-resolver/binding-openharmony-arm64": "npm:11.19.1"
"@oxc-resolver/binding-wasm32-wasi": "npm:11.19.1"
"@oxc-resolver/binding-win32-arm64-msvc": "npm:11.19.1"
"@oxc-resolver/binding-win32-ia32-msvc": "npm:11.19.1"
"@oxc-resolver/binding-win32-x64-msvc": "npm:11.19.1"
dependenciesMeta:
"@oxc-resolver/binding-android-arm-eabi":
optional: true
@@ -12238,7 +12228,7 @@ __metadata:
optional: true
"@oxc-resolver/binding-win32-x64-msvc":
optional: true
checksum: 10c0/b20a0fea18fdf31dbaee51354ce7b987ba8f3e780c6c1de9034628033a69d0b3085f9596d9925797d9340bdf4b98cd72a258b0728d0d5e5de2b1748154921b42
checksum: 10c0/8ac4eaffa9c0bcbb9f4f4a2b43786457ec5a68684d8776cb78b5a15ce3d1a79d3e67262aa3c635f98a0c1cd6cd56a31fcb05bffb9a286100056e4ab06b928833
languageName: node
linkType: hard
@@ -15153,6 +15143,13 @@ __metadata:
languageName: node
linkType: hard
"unbash@npm:^2.2.0":
version: 2.2.0
resolution: "unbash@npm:2.2.0"
checksum: 10c0/f218a30e2b65147dba16fcea5d9cbfe5af9d9518e98083b9790b9884959c82c5c8f85e7feeea717430e2ea6b352a1d57ad98e90fe488638606de12c9254cbf35
languageName: node
linkType: hard
"unbox-primitive@npm:^1.1.0":
version: 1.1.0
resolution: "unbox-primitive@npm:1.1.0"
@@ -16105,6 +16102,15 @@ __metadata:
languageName: node
linkType: hard
"yaml@npm:^2.8.2":
version: 2.8.2
resolution: "yaml@npm:2.8.2"
bin:
yaml: bin.mjs
checksum: 10c0/703e4dc1e34b324aa66876d63618dcacb9ed49f7e7fe9b70f1e703645be8d640f68ab84f12b86df8ac960bac37acf5513e115de7c970940617ce0343c8c9cd96
languageName: node
linkType: hard
"yargs-parser@npm:^18.1.2":
version: 18.1.3
resolution: "yargs-parser@npm:18.1.3"