diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md new file mode 100644 index 00000000..787ddc73 --- /dev/null +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -0,0 +1,39 @@ + + +## Content + + + +## Motivation and context + + + +## Screenshots / GIFs + + + +## Tests + + + +- Step 1 +- Step 2 +- Step ... +- + +## Checklist + +- [ ] I have read through [CONTRIBUTING.md](https://github.com/element-hq/element-call/blob/livekit/CONTRIBUTING.md). +- [ ] Pull request includes screenshots or videos if containing UI changes +- [ ] Tests written for new code (and old code if feasible). +- [ ] Linter and other CI checks pass. +- [ ] I have licensed the changes to Element by completing the [Contributor License Agreement (CLA)](https://cla-assistant.io/element-hq/element-call) diff --git a/.github/workflows/build.yaml b/.github/workflows/build.yaml index 4f9e80f2..32ce25c9 100644 --- a/.github/workflows/build.yaml +++ b/.github/workflows/build.yaml @@ -51,6 +51,7 @@ jobs: packages: write id-token: write uses: ./.github/workflows/build-and-publish-docker.yaml + secrets: inherit with: artifact_run_id: ${{ github.run_id }} docker_tags: | diff --git a/.github/workflows/pr-deploy.yaml b/.github/workflows/pr-deploy.yaml index 62b37aca..138ab2b5 100644 --- a/.github/workflows/pr-deploy.yaml +++ b/.github/workflows/pr-deploy.yaml @@ -62,6 +62,7 @@ jobs: packages: write id-token: write uses: ./.github/workflows/build-and-publish-docker.yaml + secrets: inherit with: artifact_run_id: ${{ github.event.workflow_run.id || github.run_id }} docker_tags: | diff --git a/.github/workflows/publish.yaml b/.github/workflows/publish.yaml index 5af8a06c..0675b1b1 100644 --- a/.github/workflows/publish.yaml +++ b/.github/workflows/publish.yaml @@ -57,6 +57,7 @@ jobs: packages: write id-token: write uses: ./.github/workflows/build-and-publish-docker.yaml + secrets: inherit with: artifact_run_id: ${{ github.event.workflow_run.id || github.run_id }} docker_tags: | diff --git a/knip.ts b/knip.ts index d23d42fe..3be3e653 100644 --- a/knip.ts +++ b/knip.ts @@ -34,6 +34,12 @@ export default { // then Knip will flag it as a false positive // https://github.com/webpro-nl/knip/issues/766 "@vector-im/compound-web", + // Yarn plugins are allowed to depend on packages provided by the Yarn + // runtime. These shouldn't be listed in package.json, because plugins + // should work before Yarn even installs dependencies for the first time. + // https://yarnpkg.com/advanced/plugin-tutorial#what-does-a-plugin-look-like + "@yarnpkg/core", + "@yarnpkg/parsers", "matrix-widget-api", ], ignoreExportsUsedInFile: true, diff --git a/locales/en/app.json b/locales/en/app.json index 0b0ac7b4..9b1a5675 100644 --- a/locales/en/app.json +++ b/locales/en/app.json @@ -250,11 +250,11 @@ "video_tile": { "always_show": "Always show", "camera_starting": "Video loading...", - "change_fit_contain": "Fit to frame", "collapse": "Collapse", "expand": "Expand", "mute_for_me": "Mute for me", "muted_for_me": "Muted for me", + "screen_share_volume": "Screen share volume", "volume": "Volume", "waiting_for_media": "Waiting for media..." } diff --git a/package.json b/package.json index 705b0f10..9ee0ad26 100644 --- a/package.json +++ b/package.json @@ -48,7 +48,7 @@ "@livekit/components-core": "^0.12.0", "@livekit/components-react": "^2.0.0", "@livekit/protocol": "^1.42.2", - "@livekit/track-processors": "^0.6.0 || ^0.7.1", + "@livekit/track-processors": "^0.7.1", "@mediapipe/tasks-vision": "^0.10.18", "@playwright/test": "^1.57.0", "@radix-ui/react-dialog": "^1.0.4", @@ -101,7 +101,7 @@ "i18next-browser-languagedetector": "^8.0.0", "i18next-parser": "^9.1.0", "jsdom": "^26.0.0", - "knip": "5.82.1", + "knip": "^5.86.0", "livekit-client": "^2.13.0", "lodash-es": "^4.17.21", "loglevel": "^1.9.1", diff --git a/playwright/widget/huddle-call.test.ts b/playwright/widget/huddle-call.test.ts index b42c0ab2..d4ba0006 100644 --- a/playwright/widget/huddle-call.test.ts +++ b/playwright/widget/huddle-call.test.ts @@ -60,7 +60,7 @@ widgetTest("Create and join a group call", async ({ addUser, browserName }) => { // The only way to know if it is muted or not is to look at the data-kind attribute.. const videoButton = frame.getByTestId("incall_videomute"); await expect(videoButton).toBeVisible(); - // video should be off by default in a voice call + // video should be on await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/); } diff --git a/playwright/widget/pip-call.test.ts b/playwright/widget/pip-call.test.ts new file mode 100644 index 00000000..49ebec52 --- /dev/null +++ b/playwright/widget/pip-call.test.ts @@ -0,0 +1,74 @@ +/* +Copyright 2026 Element Creations Ltd. + +SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial +Please see LICENSE in the repository root for full details. +*/ + +import { expect, test } from "@playwright/test"; + +import { widgetTest } from "../fixtures/widget-user.ts"; +import { HOST1, TestHelpers } from "./test-helpers.ts"; + +widgetTest("Put call in PIP", async ({ addUser, browserName }) => { + test.skip( + browserName === "firefox", + "The is test is not working on firefox CI environment. No mic/audio device inputs so cam/mic are disabled", + ); + + test.slow(); + + const valere = await addUser("Valere", HOST1); + const timo = await addUser("Timo", HOST1); + + const callRoom = "TeamRoom"; + await TestHelpers.createRoom(callRoom, valere.page, [timo.mxId]); + + await TestHelpers.createRoom("DoubleTask", valere.page); + + await TestHelpers.acceptRoomInvite(callRoom, timo.page); + + await TestHelpers.switchToRoomNamed(valere.page, callRoom); + + // Start the call as Valere + await TestHelpers.startCallInCurrentRoom(valere.page, false); + await expect( + valere.page.locator('iframe[title="Element Call"]'), + ).toBeVisible(); + + await TestHelpers.joinCallFromLobby(valere.page); + + await TestHelpers.joinCallInCurrentRoom(timo.page); + + { + const frame = timo.page + .locator('iframe[title="Element Call"]') + .contentFrame(); + + const videoButton = frame.getByTestId("incall_videomute"); + await expect(videoButton).toBeVisible(); + // check that the video is on + await expect(videoButton).toHaveAttribute("aria-label", /^Stop video$/); + } + + // Switch to the other room, the call should go to PIP + await TestHelpers.switchToRoomNamed(valere.page, "DoubleTask"); + + // We should see the PIP overlay + await expect(valere.page.locator(".mx_WidgetPip_overlay")).toBeVisible(); + + { + // wait a bit so that the PIP has rendered the video + await valere.page.waitForTimeout(600); + + // Check for a bug where the video had the wrong fit in PIP + const frame = valere.page + .locator('iframe[title="Element Call"]') + .contentFrame(); + const videoElements = await frame.locator("video").all(); + expect(videoElements.length).toBe(1); + + const pipVideo = videoElements[0]; + await expect(pipVideo).toHaveCSS("object-fit", "cover"); + } +}); diff --git a/playwright/widget/test-helpers.ts b/playwright/widget/test-helpers.ts index 6fe4479b..4562ba5a 100644 --- a/playwright/widget/test-helpers.ts +++ b/playwright/widget/test-helpers.ts @@ -276,4 +276,16 @@ export class TestHelpers { }); } } + + /** + * Switches to a room in the room list by its name. + * @param page - The EW page + * @param roomName - The name of the room to switch to + */ + public static async switchToRoomNamed( + page: Page, + roomName: string, + ): Promise { + await page.getByRole("option", { name: `Open room ${roomName}` }).click(); + } } diff --git a/src/grid/TileWrapper.tsx b/src/grid/TileWrapper.tsx index 1bed08da..00689a78 100644 --- a/src/grid/TileWrapper.tsx +++ b/src/grid/TileWrapper.tsx @@ -27,7 +27,13 @@ interface Props { state: Parameters>[0], ) => void > | null; + /** + * The width this tile will have once its animations have settled. + */ targetWidth: number; + /** + * The width this tile will have once its animations have settled. + */ targetHeight: number; model: M; Tile: ComponentType>; diff --git a/src/room/InCallView.module.css b/src/room/InCallView.module.css index 96b8a368..55724932 100644 --- a/src/room/InCallView.module.css +++ b/src/room/InCallView.module.css @@ -65,6 +65,7 @@ Please see LICENSE in the repository root for full details. .footer.overlay.hidden { display: grid; opacity: 0; + pointer-events: none; } .footer.overlay:has(:focus-visible) { diff --git a/src/room/InCallView.tsx b/src/room/InCallView.tsx index 135745eb..d8803b22 100644 --- a/src/room/InCallView.tsx +++ b/src/room/InCallView.tsx @@ -9,8 +9,8 @@ import { IconButton, Text, Tooltip } from "@vector-im/compound-web"; import { type MatrixClient, type Room as MatrixRoom } from "matrix-js-sdk"; import { type FC, - type PointerEvent, - type TouchEvent, + type MouseEvent as ReactMouseEvent, + type PointerEvent as ReactPointerEvent, useCallback, useEffect, useMemo, @@ -110,8 +110,6 @@ import { ObservableScope } from "../state/ObservableScope.ts"; const logger = rootLogger.getChild("[InCallView]"); -const maxTapDurationMs = 400; - export interface ActiveCallProps extends Omit< InCallViewProps, "vm" | "livekitRoom" | "connState" @@ -334,40 +332,20 @@ export const InCallView: FC = ({ ) : null; }, [ringOverlay]); - // Ideally we could detect taps by listening for click events and checking - // that the pointerType of the event is "touch", but this isn't yet supported - // in Safari: https://developer.mozilla.org/en-US/docs/Web/API/Element/click_event#browser_compatibility - // Instead we have to watch for sufficiently fast touch events. - const touchStart = useRef(null); - const onTouchStart = useCallback(() => (touchStart.current = Date.now()), []); - const onTouchEnd = useCallback(() => { - const start = touchStart.current; - if (start !== null && Date.now() - start <= maxTapDurationMs) - vm.tapScreen(); - touchStart.current = null; - }, [vm]); - const onTouchCancel = useCallback(() => (touchStart.current = null), []); - - // We also need to tell the footer controls to prevent touch events from - // bubbling up, or else the footer will be dismissed before a click/change - // event can be registered on the control - const onControlsTouchEnd = useCallback( - (e: TouchEvent) => { - // Somehow applying pointer-events: none to the controls when the footer - // is hidden is not enough to stop clicks from happening as the footer - // becomes visible, so we check manually whether the footer is shown - if (showFooter) { - e.stopPropagation(); - vm.tapControls(); - } else { - e.preventDefault(); - } + const onViewClick = useCallback( + (e: ReactMouseEvent) => { + if ( + (e.nativeEvent as PointerEvent).pointerType === "touch" && + // If an interactive element was tapped, don't count this as a tap on the screen + (e.target as Element).closest?.("button, input") === null + ) + vm.tapScreen(); }, - [vm, showFooter], + [vm], ); const onPointerMove = useCallback( - (e: PointerEvent) => { + (e: ReactPointerEvent) => { if (e.pointerType === "mouse") vm.hoverScreen(); }, [vm], @@ -606,8 +584,8 @@ export const InCallView: FC = ({ vm={layout.spotlight} expanded onToggleExpanded={null} - targetWidth={gridBounds.height} - targetHeight={gridBounds.width} + targetWidth={gridBounds.width} + targetHeight={gridBounds.height} showIndicators={false} focusable={!contentObscured} aria-hidden={contentObscured} @@ -667,7 +645,6 @@ export const InCallView: FC = ({ key="audio" muted={!audioEnabled} onClick={toggleAudio ?? undefined} - onTouchEnd={onControlsTouchEnd} disabled={toggleAudio === null} data-testid="incall_mute" />, @@ -675,7 +652,6 @@ export const InCallView: FC = ({ key="video" muted={!videoEnabled} onClick={toggleVideo ?? undefined} - onTouchEnd={onControlsTouchEnd} disabled={toggleVideo === null} data-testid="incall_videomute" />, @@ -687,7 +663,6 @@ export const InCallView: FC = ({ className={styles.shareScreen} enabled={sharingScreen} onClick={vm.toggleScreenSharing} - onTouchEnd={onControlsTouchEnd} data-testid="incall_screenshare" />, ); @@ -699,18 +674,11 @@ export const InCallView: FC = ({ key="raise_hand" className={styles.raiseHand} identifier={`${client.getUserId()}:${client.getDeviceId()}`} - onTouchEnd={onControlsTouchEnd} />, ); } if (layout.type !== "pip") - buttons.push( - , - ); + buttons.push(); buttons.push( = ({ onClick={function (): void { vm.hangup(); }} - onTouchEnd={onControlsTouchEnd} data-testid="incall_leave" />, ); @@ -751,7 +718,6 @@ export const InCallView: FC = ({ className={styles.layout} layout={gridMode} setLayout={setGridMode} - onTouchEnd={onControlsTouchEnd} /> )} @@ -760,12 +726,13 @@ export const InCallView: FC = ({ const allConnections = useBehavior(vm.allConnections$); return ( + // The onClick handler here exists to control the visibility of the footer, + // and the footer is also viewable by moving focus into it, so this is fine. + // eslint-disable-next-line jsx-a11y/no-static-element-interactions, jsx-a11y/click-events-have-key-events
diff --git a/src/room/LayoutToggle.tsx b/src/room/LayoutToggle.tsx index 6cddc95f..ca6aa467 100644 --- a/src/room/LayoutToggle.tsx +++ b/src/room/LayoutToggle.tsx @@ -5,7 +5,7 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial Please see LICENSE in the repository root for full details. */ -import { type ChangeEvent, type FC, type TouchEvent, useCallback } from "react"; +import { type ChangeEvent, type FC, useCallback } from "react"; import { useTranslation } from "react-i18next"; import { Tooltip } from "@vector-im/compound-web"; import { @@ -22,15 +22,9 @@ interface Props { layout: Layout; setLayout: (layout: Layout) => void; className?: string; - onTouchEnd?: (e: TouchEvent) => void; } -export const LayoutToggle: FC = ({ - layout, - setLayout, - className, - onTouchEnd, -}) => { +export const LayoutToggle: FC = ({ layout, setLayout, className }) => { const { t } = useTranslation(); const onChange = useCallback( @@ -47,7 +41,6 @@ export const LayoutToggle: FC = ({ value="spotlight" checked={layout === "spotlight"} onChange={onChange} - onTouchEnd={onTouchEnd} /> @@ -58,7 +51,6 @@ export const LayoutToggle: FC = ({ value="grid" checked={layout === "grid"} onChange={onChange} - onTouchEnd={onTouchEnd} /> diff --git a/src/state/media/MediaViewModel.test.ts b/src/state/media/MediaViewModel.test.ts index 71475b8c..9d873ccb 100644 --- a/src/state/media/MediaViewModel.test.ts +++ b/src/state/media/MediaViewModel.test.ts @@ -9,6 +9,7 @@ import { expect, onTestFinished, test, vi } from "vitest"; import { type LocalTrackPublication, LocalVideoTrack, + Track, TrackEvent, } from "livekit-client"; import { waitFor } from "@testing-library/dom"; @@ -21,6 +22,7 @@ import { mockRemoteMedia, withTestScheduler, mockRemoteParticipant, + mockRemoteScreenShare, } from "../../utils/test"; import { constant } from "../Behavior"; @@ -91,17 +93,69 @@ test("control a participant's volume", () => { }); }); -test("toggle fit/contain for a participant's video", () => { - const vm = mockRemoteMedia(rtcMembership, {}, mockRemoteParticipant({})); +test("control a participant's screen share volume", () => { + const setVolumeSpy = vi.fn(); + const vm = mockRemoteScreenShare( + rtcMembership, + {}, + mockRemoteParticipant({ setVolume: setVolumeSpy }), + ); withTestScheduler(({ expectObservable, schedule }) => { - schedule("-ab|", { - a: () => vm.toggleCropVideo(), - b: () => vm.toggleCropVideo(), + schedule("-ab---c---d|", { + a() { + // Try muting by toggling + vm.togglePlaybackMuted(); + expect(setVolumeSpy).toHaveBeenLastCalledWith( + 0, + Track.Source.ScreenShareAudio, + ); + }, + b() { + // Try unmuting by dragging the slider back up + vm.adjustPlaybackVolume(0.6); + vm.adjustPlaybackVolume(0.8); + vm.commitPlaybackVolume(); + expect(setVolumeSpy).toHaveBeenCalledWith( + 0.6, + Track.Source.ScreenShareAudio, + ); + expect(setVolumeSpy).toHaveBeenLastCalledWith( + 0.8, + Track.Source.ScreenShareAudio, + ); + }, + c() { + // Try muting by dragging the slider back down + vm.adjustPlaybackVolume(0.2); + vm.adjustPlaybackVolume(0); + vm.commitPlaybackVolume(); + expect(setVolumeSpy).toHaveBeenCalledWith( + 0.2, + Track.Source.ScreenShareAudio, + ); + expect(setVolumeSpy).toHaveBeenLastCalledWith( + 0, + Track.Source.ScreenShareAudio, + ); + }, + d() { + // Try unmuting by toggling + vm.togglePlaybackMuted(); + // The volume should return to the last non-zero committed volume + expect(setVolumeSpy).toHaveBeenLastCalledWith( + 0.8, + Track.Source.ScreenShareAudio, + ); + }, }); - expectObservable(vm.cropVideo$).toBe("abc", { - a: true, - b: false, - c: true, + expectObservable(vm.playbackVolume$).toBe("ab(cd)(ef)g", { + a: 1, + b: 0, + c: 0.6, + d: 0.8, + e: 0.2, + f: 0, + g: 0.8, }); }); }); diff --git a/src/state/media/RemoteScreenShareViewModel.ts b/src/state/media/RemoteScreenShareViewModel.ts index eff6d9c1..8c46aeb3 100644 --- a/src/state/media/RemoteScreenShareViewModel.ts +++ b/src/state/media/RemoteScreenShareViewModel.ts @@ -6,8 +6,8 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial Please see LICENSE in the repository root for full details. */ -import { type RemoteParticipant } from "livekit-client"; -import { map } from "rxjs"; +import { Track, type RemoteParticipant } from "livekit-client"; +import { map, of, switchMap } from "rxjs"; import { type Behavior } from "../Behavior"; import { @@ -16,13 +16,20 @@ import { createBaseScreenShare, } from "./ScreenShareViewModel"; import { type ObservableScope } from "../ObservableScope"; +import { createVolumeControls, type VolumeControls } from "../VolumeControls"; +import { observeTrackReference$ } from "../observeTrackReference"; -export interface RemoteScreenShareViewModel extends BaseScreenShareViewModel { +export interface RemoteScreenShareViewModel + extends BaseScreenShareViewModel, VolumeControls { local: false; /** * Whether this screen share's video should be displayed. */ videoEnabled$: Behavior; + /** + * Whether this screen share should be considered to have an audio track. + */ + audioEnabled$: Behavior; } export interface RemoteScreenShareInputs extends BaseScreenShareInputs { @@ -36,9 +43,30 @@ export function createRemoteScreenShare( ): RemoteScreenShareViewModel { return { ...createBaseScreenShare(scope, inputs), + ...createVolumeControls(scope, { + pretendToBeDisconnected$, + sink$: scope.behavior( + inputs.participant$.pipe( + map( + (p) => (volume) => + p?.setVolume(volume, Track.Source.ScreenShareAudio), + ), + ), + ), + }), local: false, videoEnabled$: scope.behavior( pretendToBeDisconnected$.pipe(map((disconnected) => !disconnected)), ), + audioEnabled$: scope.behavior( + inputs.participant$.pipe( + switchMap((p) => + p + ? observeTrackReference$(p, Track.Source.ScreenShareAudio) + : of(null), + ), + map(Boolean), + ), + ), }; } diff --git a/src/state/media/UserMediaViewModel.ts b/src/state/media/UserMediaViewModel.ts index 8da5e63a..16af7f26 100644 --- a/src/state/media/UserMediaViewModel.ts +++ b/src/state/media/UserMediaViewModel.ts @@ -7,6 +7,7 @@ Please see LICENSE in the repository root for full details. */ import { + BehaviorSubject, combineLatest, map, type Observable, @@ -30,9 +31,9 @@ import { } from "./MemberMediaViewModel"; import { type RemoteUserMediaViewModel } from "./RemoteUserMediaViewModel"; import { type ObservableScope } from "../ObservableScope"; -import { createToggle$ } from "../../utils/observable"; import { showConnectionStats } from "../../settings/settings"; import { observeRtpStreamStats$ } from "./observeRtpStreamStats"; +import { videoFit$, videoSizeFromParticipant$ } from "../../utils/videoFit.ts"; /** * A participant's user media (i.e. their microphone and camera feed). @@ -46,7 +47,7 @@ export interface BaseUserMediaViewModel extends MemberMediaViewModel { speaking$: Behavior; audioEnabled$: Behavior; videoEnabled$: Behavior; - cropVideo$: Behavior; + videoFit$: Behavior<"cover" | "contain">; toggleCropVideo: () => void; /** * The expected identity of the LiveKit participant. Exposed for debugging. @@ -60,6 +61,13 @@ export interface BaseUserMediaViewModel extends MemberMediaViewModel { videoStreamStats$: Observable< RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined >; + /** + * Set the target dimensions of the HTML element (final dimension after anim). + * This can be used to determine the best video fit (fit to frame / keep ratio). + * @param targetWidth - The target width of the HTML element displaying the video. + * @param targetHeight - The target height of the HTML element displaying the video. + */ + setTargetDimensions: (targetWidth: number, targetHeight: number) => void; } export interface BaseUserMediaInputs extends Omit< @@ -90,6 +98,12 @@ export function createBaseUserMedia( ); const toggleCropVideo$ = new Subject(); + // The target size of the video element, used to determine the best video fit. + // The target size is the final size of the HTML element after any animations have completed. + const targetSize$ = new BehaviorSubject< + { width: number; height: number } | undefined + >(undefined); + return { ...createMemberMedia(scope, { ...inputs, @@ -115,7 +129,11 @@ export function createBaseUserMedia( videoEnabled$: scope.behavior( media$.pipe(map((m) => m?.cameraTrack?.isMuted === false)), ), - cropVideo$: createToggle$(scope, true, toggleCropVideo$), + videoFit$: videoFit$( + scope, + videoSizeFromParticipant$(participant$), + targetSize$, + ), toggleCropVideo: () => toggleCropVideo$.next(), rtcBackendIdentity, handRaised$, @@ -139,5 +157,8 @@ export function createBaseUserMedia( return observeRtpStreamStats$(p, Track.Source.Camera, statsType); }), ), + setTargetDimensions: (targetWidth: number, targetHeight: number): void => { + targetSize$.next({ width: targetWidth, height: targetHeight }); + }, }; } diff --git a/src/state/media/observeRtpStreamStats.ts b/src/state/media/observeRtpStreamStats.ts index d1640382..63fb1a1b 100644 --- a/src/state/media/observeRtpStreamStats.ts +++ b/src/state/media/observeRtpStreamStats.ts @@ -67,3 +67,12 @@ export function observeInboundRtpStreamStats$( map((x) => x as RTCInboundRtpStreamStats | undefined), ); } + +export function observeOutboundRtpStreamStats$( + participant: Participant, + source: Track.Source, +): Observable { + return observeRtpStreamStats$(participant, source, "outbound-rtp").pipe( + map((x) => x as RTCOutboundRtpStreamStats | undefined), + ); +} diff --git a/src/tile/GridTile.tsx b/src/tile/GridTile.tsx index 9c3adea7..c8052a65 100644 --- a/src/tile/GridTile.tsx +++ b/src/tile/GridTile.tsx @@ -11,6 +11,7 @@ import { type ReactNode, type Ref, useCallback, + useEffect, useRef, useState, } from "react"; @@ -26,7 +27,6 @@ import { VolumeOffIcon, VisibilityOnIcon, UserProfileIcon, - ExpandIcon, VolumeOffSolidIcon, SwitchCameraSolidIcon, } from "@vector-im/compound-design-tokens/assets/web/icons"; @@ -87,6 +87,8 @@ const UserMediaTile: FC = ({ displayName, mxcAvatarUrl, focusable, + targetWidth, + targetHeight, ...props }) => { const { toggleRaisedHand } = useReactionsSender(); @@ -103,18 +105,19 @@ const UserMediaTile: FC = ({ const audioEnabled = useBehavior(vm.audioEnabled$); const videoEnabled = useBehavior(vm.videoEnabled$); const speaking = useBehavior(vm.speaking$); - const cropVideo = useBehavior(vm.cropVideo$); - const onSelectFitContain = useCallback( - (e: Event) => { - e.preventDefault(); - vm.toggleCropVideo(); - }, - [vm], - ); + const videoFit = useBehavior(vm.videoFit$); + const rtcBackendIdentity = vm.rtcBackendIdentity; const handRaised = useBehavior(vm.handRaised$); const reaction = useBehavior(vm.reaction$); + // Whenever bounds change, inform the viewModel + useEffect(() => { + if (targetWidth > 0 && targetHeight > 0) { + vm.setTargetDimensions(targetWidth, targetHeight); + } + }, [targetWidth, targetHeight, vm]); + const AudioIcon = playbackMuted ? VolumeOffSolidIcon : audioEnabled @@ -130,12 +133,10 @@ const UserMediaTile: FC = ({ const menu = ( <> {menuStart} - + {/* + No additional menu item (used to be the manual fit to frame. + Placeholder for future menu items that should be placed here. + */} {menuEnd} ); @@ -154,7 +155,7 @@ const UserMediaTile: FC = ({ unencryptedWarning={unencryptedWarning} encryptionStatus={encryptionStatus} videoEnabled={videoEnabled} - videoFit={cropVideo ? "cover" : "contain"} + videoFit={videoFit} className={classNames(className, styles.tile, { [styles.speaking]: showSpeaking, [styles.handRaised]: !showSpeaking && handRaised, @@ -200,6 +201,8 @@ const UserMediaTile: FC = ({ audioStreamStats={audioStreamStats} videoStreamStats={videoStreamStats} rtcBackendIdentity={rtcBackendIdentity} + targetWidth={targetWidth} + targetHeight={targetHeight} {...props} /> ); diff --git a/src/tile/SpotlightTile.module.css b/src/tile/SpotlightTile.module.css index 622496d2..af0e0add 100644 --- a/src/tile/SpotlightTile.module.css +++ b/src/tile/SpotlightTile.module.css @@ -84,7 +84,6 @@ Please see LICENSE in the repository root for full details. .expand { appearance: none; cursor: pointer; - opacity: 0; padding: var(--cpd-space-2x); border: none; border-radius: var(--cpd-radius-pill-effect); @@ -108,6 +107,35 @@ Please see LICENSE in the repository root for full details. z-index: 1; } +.volumeSlider { + width: 100%; + min-width: 172px; +} + +/* Disable the hover effect for the screen share volume menu button */ +.volumeMenuItem:hover { + background: transparent; + cursor: default; +} + +.volumeMenuItem { + gap: var(--cpd-space-3x); +} + +.menuMuteButton { + appearance: none; + background: none; + border: none; + padding: 0; + cursor: pointer; + display: flex; +} + +/* Make icons change color with the theme */ +.menuMuteButton > svg { + color: var(--cpd-color-icon-primary); +} + .expand > svg { display: block; color: var(--cpd-color-icon-primary); @@ -119,17 +147,22 @@ Please see LICENSE in the repository root for full details. } } -.expand:active { +.expand:active, +.expand[data-state="open"] { background: var(--cpd-color-gray-100); } @media (hover) { + .tile > div > button { + opacity: 0; + } .tile:hover > div > button { opacity: 1; } } -.tile:has(:focus-visible) > div > button { +.tile:has(:focus-visible) > div > button, +.tile > div:has([data-state="open"]) > button { opacity: 1; } diff --git a/src/tile/SpotlightTile.test.tsx b/src/tile/SpotlightTile.test.tsx index a5332194..aac81b9c 100644 --- a/src/tile/SpotlightTile.test.tsx +++ b/src/tile/SpotlightTile.test.tsx @@ -9,6 +9,7 @@ import { test, expect, vi } from "vitest"; import { isInaccessible, render, screen } from "@testing-library/react"; import { axe } from "vitest-axe"; import userEvent from "@testing-library/user-event"; +import { TooltipProvider } from "@vector-im/compound-web"; import { SpotlightTile } from "./SpotlightTile"; import { @@ -18,6 +19,7 @@ import { mockLocalMedia, mockRemoteMedia, mockRemoteParticipant, + mockRemoteScreenShare, } from "../utils/test"; import { SpotlightTileViewModel } from "../state/TileViewModel"; import { constant } from "../state/Behavior"; @@ -78,3 +80,63 @@ test("SpotlightTile is accessible", async () => { await user.click(screen.getByRole("button", { name: "Expand" })); expect(toggleExpanded).toHaveBeenCalled(); }); + +test("Screen share volume UI is shown when screen share has audio", async () => { + const vm = mockRemoteScreenShare( + mockRtcMembership("@alice:example.org", "AAAA"), + {}, + mockRemoteParticipant({}), + ); + + vi.spyOn(vm, "audioEnabled$", "get").mockReturnValue(constant(true)); + + const toggleExpanded = vi.fn(); + const { container } = render( + + + , + ); + + expect(await axe(container)).toHaveNoViolations(); + + // Volume menu button should exist + expect(screen.queryByRole("button", { name: /volume/i })).toBeInTheDocument(); +}); + +test("Screen share volume UI is hidden when screen share has no audio", async () => { + const vm = mockRemoteScreenShare( + mockRtcMembership("@alice:example.org", "AAAA"), + {}, + mockRemoteParticipant({}), + ); + + vi.spyOn(vm, "audioEnabled$", "get").mockReturnValue(constant(false)); + + const toggleExpanded = vi.fn(); + const { container } = render( + , + ); + + expect(await axe(container)).toHaveNoViolations(); + + // Volume menu button should not exist + expect( + screen.queryByRole("button", { name: /volume/i }), + ).not.toBeInTheDocument(); +}); diff --git a/src/tile/SpotlightTile.tsx b/src/tile/SpotlightTile.tsx index 75c69479..aa66d6b6 100644 --- a/src/tile/SpotlightTile.tsx +++ b/src/tile/SpotlightTile.tsx @@ -20,6 +20,10 @@ import { CollapseIcon, ChevronLeftIcon, ChevronRightIcon, + VolumeOffIcon, + VolumeOnIcon, + VolumeOffSolidIcon, + VolumeOnSolidIcon, } from "@vector-im/compound-design-tokens/assets/web/icons"; import { animated } from "@react-spring/web"; import { type Observable, map } from "rxjs"; @@ -27,6 +31,7 @@ import { useObservableRef } from "observable-hooks"; import { useTranslation } from "react-i18next"; import classNames from "classnames"; import { type TrackReferenceOrPlaceholder } from "@livekit/components-core"; +import { Menu, MenuItem } from "@vector-im/compound-web"; import FullScreenMaximiseIcon from "../icons/FullScreenMaximise.svg?react"; import FullScreenMinimiseIcon from "../icons/FullScreenMinimise.svg?react"; @@ -45,6 +50,8 @@ import { type UserMediaViewModel } from "../state/media/UserMediaViewModel"; import { type ScreenShareViewModel } from "../state/media/ScreenShareViewModel"; import { type RemoteScreenShareViewModel } from "../state/media/RemoteScreenShareViewModel"; import { type MediaViewModel } from "../state/media/MediaViewModel"; +import { Slider } from "../Slider"; +import { platform } from "../Platform"; interface SpotlightItemBaseProps { ref?: Ref; @@ -104,12 +111,12 @@ const SpotlightUserMediaItem: FC = ({ vm, ...props }) => { - const cropVideo = useBehavior(vm.cropVideo$); + const videoFit = useBehavior(vm.videoFit$); const videoEnabled = useBehavior(vm.videoEnabled$); const baseProps: SpotlightUserMediaItemBaseProps & RefAttributes = { - videoFit: cropVideo ? "cover" : "contain", + videoFit, videoEnabled, ...props, }; @@ -151,7 +158,13 @@ const SpotlightRemoteScreenShareItem: FC< interface SpotlightItemProps { ref?: Ref; vm: MediaViewModel; + /** + * The width this tile will have once its animations have settled. + */ targetWidth: number; + /** + * The height this tile will have once its animations have settled. + */ targetHeight: number; focusable: boolean; intersectionObserver$: Observable; @@ -173,6 +186,16 @@ const SpotlightItem: FC = ({ "aria-hidden": ariaHidden, }) => { const ourRef = useRef(null); + + // Whenever target bounds change, inform the viewModel + useEffect(() => { + if (targetWidth > 0 && targetHeight > 0) { + if (vm.type != "screen share") { + vm.setTargetDimensions(targetWidth, targetHeight); + } + } + }, [targetWidth, targetHeight, vm]); + const ref = useMergedRefs(ourRef, theirRef); const focusUrl = useBehavior(vm.focusUrl$); const displayName = useBehavior(vm.displayName$); @@ -224,6 +247,73 @@ const SpotlightItem: FC = ({ SpotlightItem.displayName = "SpotlightItem"; +interface ScreenShareVolumeButtonProps { + vm: RemoteScreenShareViewModel; +} + +const ScreenShareVolumeButton: FC = ({ vm }) => { + const { t } = useTranslation(); + + const audioEnabled = useBehavior(vm.audioEnabled$); + const playbackMuted = useBehavior(vm.playbackMuted$); + const playbackVolume = useBehavior(vm.playbackVolume$); + + const VolumeIcon = playbackMuted ? VolumeOffIcon : VolumeOnIcon; + const VolumeSolidIcon = playbackMuted + ? VolumeOffSolidIcon + : VolumeOnSolidIcon; + + const [volumeMenuOpen, setVolumeMenuOpen] = useState(false); + const onMuteButtonClick = useCallback(() => vm.togglePlaybackMuted(), [vm]); + const onVolumeChange = useCallback( + (v: number) => vm.adjustPlaybackVolume(v), + [vm], + ); + const onVolumeCommit = useCallback(() => vm.commitPlaybackVolume(), [vm]); + + return ( + audioEnabled && ( + + + + } + > + + + + + + ) + ); +}; + interface Props { ref?: Ref; vm: SpotlightTileViewModel; @@ -258,6 +348,7 @@ export const SpotlightTile: FC = ({ const latestMedia = useLatest(media); const latestVisibleId = useLatest(visibleId); const visibleIndex = media.findIndex((vm) => vm.id === visibleId); + const visibleMedia = media.at(visibleIndex); const canGoBack = visibleIndex > 0; const canGoToNext = visibleIndex !== -1 && visibleIndex < media.length - 1; @@ -365,16 +456,21 @@ export const SpotlightTile: FC = ({ /> ))}
-
- +
+ {visibleMedia?.type === "screen share" && !visibleMedia.local && ( + + )} + {platform === "desktop" && ( + + )} {onToggleExpanded && (