Merge pull request #3755 from element-hq/robin/ringing

Convert media view model classes to interfaces
This commit is contained in:
Robin
2026-02-27 17:27:58 +01:00
committed by GitHub
31 changed files with 1444 additions and 1310 deletions

View File

@@ -51,14 +51,7 @@ import { v4 as uuidv4 } from "uuid";
import { type IMembershipManager } from "matrix-js-sdk/lib/matrixrtc/IMembershipManager";
import {
LocalUserMediaViewModel,
type MediaViewModel,
type RemoteUserMediaViewModel,
ScreenShareViewModel,
type UserMediaViewModel,
} from "../MediaViewModel";
import {
accumulate,
createToggle$,
filterBehavior,
generateItem,
generateItems,
@@ -92,8 +85,6 @@ import { type MuteStates } from "../MuteStates";
import { getUrlParams } from "../../UrlParams";
import { type ProcessorState } from "../../livekit/TrackProcessorContext";
import { ElementWidgetActions, widget } from "../../widget";
import { UserMedia } from "../UserMedia.ts";
import { ScreenShare } from "../ScreenShare.ts";
import {
type GridLayoutMedia,
type Layout,
@@ -144,6 +135,14 @@ import {
import { Publisher } from "./localMember/Publisher.ts";
import { type Connection } from "./remoteMembers/Connection.ts";
import { createLayoutModeSwitch } from "./LayoutSwitch.ts";
import {
createWrappedUserMedia,
type MediaItem,
type WrappedUserMediaViewModel,
} from "../media/MediaItem.ts";
import { type ScreenShareViewModel } from "../media/ScreenShareViewModel.ts";
import { type UserMediaViewModel } from "../media/UserMediaViewModel.ts";
import { type MediaViewModel } from "../media/MediaViewModel.ts";
const logger = rootLogger.getChild("[CallViewModel]");
//TODO
@@ -193,7 +192,6 @@ interface LayoutScanState {
tiles: TileStore;
}
type MediaItem = UserMedia | ScreenShare;
export type LivekitRoomItem = {
livekitRoom: LivekitRoom;
participants: string[];
@@ -283,7 +281,6 @@ export interface CallViewModel {
allConnections$: Behavior<ConnectionManagerData>;
/** Participants sorted by livekit room so they can be used in the audio rendering */
livekitRoomItems$: Behavior<LivekitRoomItem[]>;
userMedia$: Behavior<UserMedia[]>;
/** use the layout instead, this is just for the sdk export. */
matrixLivekitMembers$: Behavior<RemoteMatrixLivekitMember[]>;
localMatrixLivekitMember$: Behavior<LocalMatrixLivekitMember | null>;
@@ -334,10 +331,6 @@ export interface CallViewModel {
gridMode$: Behavior<GridMode>;
setGridMode: (value: GridMode) => void;
// media view models and layout
grid$: Behavior<UserMediaViewModel[]>;
spotlight$: Behavior<MediaViewModel[]>;
pip$: Behavior<UserMediaViewModel | null>;
/**
* The layout of tiles in the call interface.
*/
@@ -721,7 +714,7 @@ export function createCallViewModel$(
/**
* List of user media (camera feeds) that we want tiles for.
*/
const userMedia$ = scope.behavior<UserMedia[]>(
const userMedia$ = scope.behavior<WrappedUserMediaViewModel[]>(
combineLatest([
localMatrixLivekitMember$,
matrixLivekitMembers$,
@@ -767,36 +760,35 @@ export function createCallViewModel$(
}
}
},
(scope, _, dup, mediaId, userId, participant, connection$, rtcId) => {
const livekitRoom$ = scope.behavior(
connection$.pipe(map((c) => c?.livekitRoom)),
);
const focusUrl$ = scope.behavior(
connection$.pipe(map((c) => c?.transport.livekit_service_url)),
);
const displayName$ = scope.behavior(
matrixMemberMetadataStore
.createDisplayNameBehavior$(userId)
.pipe(map((name) => name ?? userId)),
);
return new UserMedia(
scope,
`${mediaId}:${dup}`,
(scope, _, dup, mediaId, userId, participant, connection$, rtcId) =>
createWrappedUserMedia(scope, {
id: `${mediaId}:${dup}`,
userId,
rtcId,
rtcBackendIdentity: rtcId,
participant,
options.encryptionSystem,
livekitRoom$,
focusUrl$,
encryptionSystem: options.encryptionSystem,
livekitRoom$: scope.behavior(
connection$.pipe(map((c) => c?.livekitRoom)),
),
focusUrl$: scope.behavior(
connection$.pipe(map((c) => c?.transport.livekit_service_url)),
),
mediaDevices,
localMembership.reconnecting$,
displayName$,
matrixMemberMetadataStore.createAvatarUrlBehavior$(userId),
handsRaised$.pipe(map((v) => v[mediaId]?.time ?? null)),
reactions$.pipe(map((v) => v[mediaId] ?? undefined)),
);
},
pretendToBeDisconnected$: localMembership.reconnecting$,
displayName$: scope.behavior(
matrixMemberMetadataStore
.createDisplayNameBehavior$(userId)
.pipe(map((name) => name ?? userId)),
),
mxcAvatarUrl$:
matrixMemberMetadataStore.createAvatarUrlBehavior$(userId),
handRaised$: scope.behavior(
handsRaised$.pipe(map((v) => v[mediaId]?.time ?? null)),
),
reaction$: scope.behavior(
reactions$.pipe(map((v) => v[mediaId] ?? undefined)),
),
}),
),
),
);
@@ -821,11 +813,9 @@ export function createCallViewModel$(
/**
* List of MediaItems that we want to display, that are of type ScreenShare
*/
const screenShares$ = scope.behavior<ScreenShare[]>(
const screenShares$ = scope.behavior<ScreenShareViewModel[]>(
mediaItems$.pipe(
map((mediaItems) =>
mediaItems.filter((m): m is ScreenShare => m instanceof ScreenShare),
),
map((mediaItems) => mediaItems.filter((m) => m.type === "screen share")),
),
);
@@ -888,39 +878,39 @@ export function createCallViewModel$(
merge(userHangup$, widgetHangup$).pipe(map(() => "user" as const)),
).pipe(scope.share);
const spotlightSpeaker$ = scope.behavior<UserMediaViewModel | null>(
const spotlightSpeaker$ = scope.behavior<UserMediaViewModel | undefined>(
userMedia$.pipe(
switchMap((mediaItems) =>
mediaItems.length === 0
? of([])
: combineLatest(
mediaItems.map((m) =>
m.vm.speaking$.pipe(map((s) => [m, s] as const)),
m.speaking$.pipe(map((s) => [m, s] as const)),
),
),
),
scan<(readonly [UserMedia, boolean])[], UserMedia | undefined, null>(
(prev, mediaItems) => {
// Only remote users that are still in the call should be sticky
const [stickyMedia, stickySpeaking] =
(!prev?.vm.local && mediaItems.find(([m]) => m === prev)) || [];
// Decide who to spotlight:
// If the previous speaker is still speaking, stick with them rather
// than switching eagerly to someone else
return stickySpeaking
? stickyMedia!
: // Otherwise, select any remote user who is speaking
(mediaItems.find(([m, s]) => !m.vm.local && s)?.[0] ??
// Otherwise, stick with the person who was last speaking
stickyMedia ??
// Otherwise, spotlight an arbitrary remote user
mediaItems.find(([m]) => !m.vm.local)?.[0] ??
// Otherwise, spotlight the local user
mediaItems.find(([m]) => m.vm.local)?.[0]);
},
null,
),
map((speaker) => speaker?.vm ?? null),
scan<
(readonly [UserMediaViewModel, boolean])[],
UserMediaViewModel | undefined,
undefined
>((prev, mediaItems) => {
// Only remote users that are still in the call should be sticky
const [stickyMedia, stickySpeaking] =
(!prev?.local && mediaItems.find(([m]) => m === prev)) || [];
// Decide who to spotlight:
// If the previous speaker is still speaking, stick with them rather
// than switching eagerly to someone else
return stickySpeaking
? stickyMedia!
: // Otherwise, select any remote user who is speaking
(mediaItems.find(([m, s]) => !m.local && s)?.[0] ??
// Otherwise, stick with the person who was last speaking
stickyMedia ??
// Otherwise, spotlight an arbitrary remote user
mediaItems.find(([m]) => !m.local)?.[0] ??
// Otherwise, spotlight the local user
mediaItems.find(([m]) => m.local)?.[0]);
}, undefined),
),
);
@@ -934,7 +924,7 @@ export function createCallViewModel$(
return bins.length === 0
? of([])
: combineLatest(bins, (...bins) =>
bins.sort(([, bin1], [, bin2]) => bin1 - bin2).map(([m]) => m.vm),
bins.sort(([, bin1], [, bin2]) => bin1 - bin2).map(([m]) => m),
);
}),
distinctUntilChanged(shallowEquals),
@@ -944,9 +934,7 @@ export function createCallViewModel$(
const spotlight$ = scope.behavior<MediaViewModel[]>(
screenShares$.pipe(
switchMap((screenShares) => {
if (screenShares.length > 0) {
return of(screenShares.map((m) => m.vm));
}
if (screenShares.length > 0) return of(screenShares);
return spotlightSpeaker$.pipe(
map((speaker) => (speaker ? [speaker] : [])),
@@ -956,7 +944,7 @@ export function createCallViewModel$(
),
);
const pip$ = scope.behavior<UserMediaViewModel | null>(
const pip$ = scope.behavior<UserMediaViewModel | undefined>(
combineLatest([
// TODO This also needs epoch logic to dedupe the screenshares and mediaItems emits
screenShares$,
@@ -968,28 +956,17 @@ export function createCallViewModel$(
return spotlightSpeaker$;
}
if (!spotlight || spotlight.local) {
return of(null);
return of(undefined);
}
const localUserMedia = mediaItems.find(
(m) => m.vm instanceof LocalUserMediaViewModel,
) as UserMedia | undefined;
const localUserMediaViewModel = localUserMedia?.vm as
| LocalUserMediaViewModel
| undefined;
if (!localUserMediaViewModel) {
return of(null);
(m) => m.type === "user" && m.local,
);
if (!localUserMedia) {
return of(undefined);
}
return localUserMediaViewModel.alwaysShow$.pipe(
map((alwaysShow) => {
if (alwaysShow) {
return localUserMediaViewModel;
}
return null;
}),
return localUserMedia.alwaysShow$.pipe(
map((alwaysShow) => (alwaysShow ? localUserMedia : undefined)),
);
}),
),
@@ -998,7 +975,7 @@ export function createCallViewModel$(
const hasRemoteScreenShares$ = scope.behavior<boolean>(
spotlight$.pipe(
map((spotlight) =>
spotlight.some((vm) => !vm.local && vm instanceof ScreenShareViewModel),
spotlight.some((vm) => vm.type === "screen share" && !vm.local),
),
),
);
@@ -1039,8 +1016,10 @@ export function createCallViewModel$(
);
const spotlightExpandedToggle$ = new Subject<void>();
const spotlightExpanded$ = scope.behavior<boolean>(
spotlightExpandedToggle$.pipe(accumulate(false, (expanded) => !expanded)),
const spotlightExpanded$ = createToggle$(
scope,
false,
spotlightExpandedToggle$,
);
const { setGridMode, gridMode$ } = createLayoutModeSwitch(
@@ -1053,7 +1032,7 @@ export function createCallViewModel$(
[grid$, spotlight$],
(grid, spotlight) => ({
type: "grid",
spotlight: spotlight.some((vm) => vm instanceof ScreenShareViewModel)
spotlight: spotlight.some((vm) => vm.type === "screen share")
? spotlight
: undefined,
grid,
@@ -1085,12 +1064,8 @@ export function createCallViewModel$(
mediaItems$.pipe(
map((mediaItems) => {
if (mediaItems.length !== 2) return null;
const local = mediaItems.find((vm) => vm.vm.local)?.vm as
| LocalUserMediaViewModel
| undefined;
const remote = mediaItems.find((vm) => !vm.vm.local)?.vm as
| RemoteUserMediaViewModel
| undefined;
const local = mediaItems.find((vm) => vm.type === "user" && vm.local);
const remote = mediaItems.find((vm) => vm.type === "user" && !vm.local);
// There might not be a remote tile if there are screen shares, or if
// only the local user is in the call and they're using the duplicate
// tiles option
@@ -1138,7 +1113,7 @@ export function createCallViewModel$(
oneOnOne === null
? combineLatest([grid$, spotlight$], (grid, spotlight) =>
grid.length > smallMobileCallThreshold ||
spotlight.some((vm) => vm instanceof ScreenShareViewModel)
spotlight.some((vm) => vm.type === "screen share")
? spotlightPortraitLayoutMedia$
: gridLayoutMedia$,
).pipe(switchAll())
@@ -1245,7 +1220,7 @@ export function createCallViewModel$(
// screen sharing feeds are in the spotlight we still need them.
return l.spotlight.media$.pipe(
map((models: MediaViewModel[]) =>
models.some((m) => m instanceof ScreenShareViewModel),
models.some((m) => m.type === "screen share"),
),
);
// In expanded spotlight layout, the active speaker is always shown in
@@ -1552,11 +1527,7 @@ export function createCallViewModel$(
toggleSpotlightExpanded$: toggleSpotlightExpanded$,
gridMode$: gridMode$,
setGridMode: setGridMode,
grid$: grid$,
spotlight$: spotlight$,
pip$: pip$,
layout$: layout$,
userMedia$,
localMatrixLivekitMember$,
matrixLivekitMembers$: scope.behavior(
matrixLivekitMembers$.pipe(

View File

@@ -30,7 +30,7 @@ import {
trackProcessorSync,
} from "../../../livekit/TrackProcessorContext.tsx";
import { getUrlParams } from "../../../UrlParams.ts";
import { observeTrackReference$ } from "../../MediaViewModel.ts";
import { observeTrackReference$ } from "../../observeTrackReference";
import { type Connection } from "../remoteMembers/Connection.ts";
import { ObservableScope } from "../../ObservableScope.ts";

View File

@@ -1,806 +0,0 @@
/*
Copyright 2023, 2024 New Vector Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
type AudioSource,
type VideoSource,
type TrackReference,
observeParticipantEvents,
observeParticipantMedia,
roomEventSelector,
} from "@livekit/components-core";
import {
type LocalParticipant,
LocalTrack,
LocalVideoTrack,
type Participant,
ParticipantEvent,
type RemoteParticipant,
Track,
TrackEvent,
facingModeFromLocalTrack,
type Room as LivekitRoom,
RoomEvent as LivekitRoomEvent,
RemoteTrack,
} from "livekit-client";
import { logger } from "matrix-js-sdk/lib/logger";
import {
BehaviorSubject,
type Observable,
Subject,
combineLatest,
filter,
fromEvent,
interval,
map,
merge,
of,
startWith,
switchMap,
throttleTime,
distinctUntilChanged,
} from "rxjs";
import { alwaysShowSelf } from "../settings/settings";
import { showConnectionStats } from "../settings/settings";
import { accumulate } from "../utils/observable";
import { type EncryptionSystem } from "../e2ee/sharedKeyManagement";
import { E2eeType } from "../e2ee/e2eeType";
import { type ReactionOption } from "../reactions";
import { platform } from "../Platform";
import { type MediaDevices } from "./MediaDevices";
import { type Behavior } from "./Behavior";
import { type ObservableScope } from "./ObservableScope";
export function observeTrackReference$(
participant: Participant,
source: Track.Source,
): Observable<TrackReference | undefined> {
return observeParticipantMedia(participant).pipe(
map(() => participant.getTrackPublication(source)),
distinctUntilChanged(),
map((publication) => publication && { participant, publication, source }),
);
}
export function observeRtpStreamStats$(
participant: Participant,
source: Track.Source,
type: "inbound-rtp" | "outbound-rtp",
): Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
> {
return combineLatest([
observeTrackReference$(participant, source),
interval(1000).pipe(startWith(0)),
]).pipe(
switchMap(async ([trackReference]) => {
const track = trackReference?.publication?.track;
if (
!track ||
!(track instanceof RemoteTrack || track instanceof LocalTrack)
) {
return undefined;
}
const report = await track.getRTCStatsReport();
if (!report) {
return undefined;
}
for (const v of report.values()) {
if (v.type === type) {
return v;
}
}
return undefined;
}),
startWith(undefined),
);
}
export function observeInboundRtpStreamStats$(
participant: Participant,
source: Track.Source,
): Observable<RTCInboundRtpStreamStats | undefined> {
return observeRtpStreamStats$(participant, source, "inbound-rtp").pipe(
map((x) => x as RTCInboundRtpStreamStats | undefined),
);
}
export function observeOutboundRtpStreamStats$(
participant: Participant,
source: Track.Source,
): Observable<RTCOutboundRtpStreamStats | undefined> {
return observeRtpStreamStats$(participant, source, "outbound-rtp").pipe(
map((x) => x as RTCOutboundRtpStreamStats | undefined),
);
}
function observeRemoteTrackReceivingOkay$(
participant: Participant,
source: Track.Source,
): Observable<boolean | undefined> {
let lastStats: {
framesDecoded: number | undefined;
framesDropped: number | undefined;
framesReceived: number | undefined;
} = {
framesDecoded: undefined,
framesDropped: undefined,
framesReceived: undefined,
};
return observeInboundRtpStreamStats$(participant, source).pipe(
map((stats) => {
if (!stats) return undefined;
const { framesDecoded, framesDropped, framesReceived } = stats;
return {
framesDecoded,
framesDropped,
framesReceived,
};
}),
filter((newStats) => !!newStats),
map((newStats): boolean | undefined => {
const oldStats = lastStats;
lastStats = newStats;
if (
typeof newStats.framesReceived === "number" &&
typeof oldStats.framesReceived === "number" &&
typeof newStats.framesDecoded === "number" &&
typeof oldStats.framesDecoded === "number"
) {
const framesReceivedDelta =
newStats.framesReceived - oldStats.framesReceived;
const framesDecodedDelta =
newStats.framesDecoded - oldStats.framesDecoded;
// if we received >0 frames and managed to decode >0 frames then we treat that as success
if (framesReceivedDelta > 0) {
return framesDecodedDelta > 0;
}
}
// no change
return undefined;
}),
filter((x) => typeof x === "boolean"),
startWith(undefined),
);
}
function encryptionErrorObservable$(
room$: Behavior<LivekitRoom | undefined>,
participant: Participant,
encryptionSystem: EncryptionSystem,
criteria: string,
): Observable<boolean> {
return room$.pipe(
switchMap((room) => {
if (room === undefined) return of(false);
return roomEventSelector(room, LivekitRoomEvent.EncryptionError).pipe(
map((e) => {
const [err] = e;
if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return (
// Ideally we would pull the participant identity from the field on the error.
// However, it gets lost in the serialization process between workers.
// So, instead we do a string match
(err?.message.includes(participant.identity) &&
err?.message.includes(criteria)) ??
false
);
} else if (encryptionSystem.kind === E2eeType.SHARED_KEY) {
return !!err?.message.includes(criteria);
}
return false;
}),
);
}),
distinctUntilChanged(),
throttleTime(1000), // Throttle to avoid spamming the UI
startWith(false),
);
}
export enum EncryptionStatus {
Connecting,
Okay,
KeyMissing,
KeyInvalid,
PasswordInvalid,
}
abstract class BaseMediaViewModel {
/**
* The LiveKit video track for this media.
*/
public readonly video$: Behavior<TrackReference | undefined>;
/**
* Whether there should be a warning that this media is unencrypted.
*/
public readonly unencryptedWarning$: Behavior<boolean>;
public readonly encryptionStatus$: Behavior<EncryptionStatus>;
/**
* Whether this media corresponds to the local participant.
*/
public abstract readonly local: boolean;
private observeTrackReference$(
source: Track.Source,
): Behavior<TrackReference | undefined> {
return this.scope.behavior(
this.participant$.pipe(
switchMap((p) =>
!p ? of(undefined) : observeTrackReference$(p, source),
),
),
);
}
public constructor(
protected readonly scope: ObservableScope,
/**
* An opaque identifier for this media.
*/
public readonly id: string,
/**
* The Matrix user to which this media belongs.
*/
public readonly userId: string,
// We don't necessarily have a participant if a user connects via MatrixRTC but not (yet) through
// livekit.
protected readonly participant$: Behavior<
LocalParticipant | RemoteParticipant | null
>,
encryptionSystem: EncryptionSystem,
audioSource: AudioSource,
videoSource: VideoSource,
protected readonly livekitRoom$: Behavior<LivekitRoom | undefined>,
public readonly focusUrl$: Behavior<string | undefined>,
public readonly displayName$: Behavior<string>,
public readonly mxcAvatarUrl$: Behavior<string | undefined>,
) {
const audio$ = this.observeTrackReference$(audioSource);
this.video$ = this.observeTrackReference$(videoSource);
this.unencryptedWarning$ = this.scope.behavior(
combineLatest(
[audio$, this.video$],
(a, v) =>
encryptionSystem.kind !== E2eeType.NONE &&
(a?.publication.isEncrypted === false ||
v?.publication.isEncrypted === false),
),
);
this.encryptionStatus$ = this.scope.behavior(
this.participant$.pipe(
switchMap((participant): Observable<EncryptionStatus> => {
if (!participant) {
return of(EncryptionStatus.Connecting);
} else if (
participant.isLocal ||
encryptionSystem.kind === E2eeType.NONE
) {
return of(EncryptionStatus.Okay);
} else if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"MissingKey",
),
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(([keyMissing, keyInvalid, audioOkay, videoOkay]) => {
if (keyMissing) return EncryptionStatus.KeyMissing;
if (keyInvalid) return EncryptionStatus.KeyInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
}),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
} else {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(
([keyInvalid, audioOkay, videoOkay]):
| EncryptionStatus
| undefined => {
if (keyInvalid) return EncryptionStatus.PasswordInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
},
),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
}
}),
),
);
}
}
/**
* Some participant's media.
*/
export type MediaViewModel = UserMediaViewModel | ScreenShareViewModel;
export type UserMediaViewModel =
| LocalUserMediaViewModel
| RemoteUserMediaViewModel;
/**
* Some participant's user media.
*/
abstract class BaseUserMediaViewModel extends BaseMediaViewModel {
private readonly _speaking$ = this.scope.behavior(
this.participant$.pipe(
switchMap((p) =>
p
? observeParticipantEvents(
p,
ParticipantEvent.IsSpeakingChanged,
).pipe(map((p) => p.isSpeaking))
: of(false),
),
),
);
/**
* Whether the participant is speaking.
*/
// Getter backed by a private field so that subclasses can override it
public get speaking$(): Behavior<boolean> {
return this._speaking$;
}
/**
* Whether this participant is sending audio (i.e. is unmuted on their side).
*/
public readonly audioEnabled$: Behavior<boolean>;
private readonly _videoEnabled$: Behavior<boolean>;
/**
* Whether this participant is sending video.
*/
// Getter backed by a private field so that subclasses can override it
public get videoEnabled$(): Behavior<boolean> {
return this._videoEnabled$;
}
private readonly _cropVideo$ = new BehaviorSubject(true);
/**
* Whether the tile video should be contained inside the tile or be cropped to fit.
*/
public readonly cropVideo$: Behavior<boolean> = this._cropVideo$;
public constructor(
scope: ObservableScope,
id: string,
userId: string,
/**
* The expected identity of the LiveKit participant. Exposed for debugging.
*/
public readonly rtcBackendIdentity: string,
participant$: Behavior<LocalParticipant | RemoteParticipant | null>,
encryptionSystem: EncryptionSystem,
livekitRoom$: Behavior<LivekitRoom | undefined>,
focusUrl$: Behavior<string | undefined>,
displayName$: Behavior<string>,
mxcAvatarUrl$: Behavior<string | undefined>,
public readonly handRaised$: Behavior<Date | null>,
public readonly reaction$: Behavior<ReactionOption | null>,
) {
super(
scope,
id,
userId,
participant$,
encryptionSystem,
Track.Source.Microphone,
Track.Source.Camera,
livekitRoom$,
focusUrl$,
displayName$,
mxcAvatarUrl$,
);
const media$ = this.scope.behavior(
participant$.pipe(
switchMap((p) => (p && observeParticipantMedia(p)) ?? of(undefined)),
),
);
this.audioEnabled$ = this.scope.behavior(
media$.pipe(map((m) => m?.microphoneTrack?.isMuted === false)),
);
this._videoEnabled$ = this.scope.behavior(
media$.pipe(map((m) => m?.cameraTrack?.isMuted === false)),
);
}
public toggleFitContain(): void {
this._cropVideo$.next(!this._cropVideo$.value);
}
public get local(): boolean {
return this instanceof LocalUserMediaViewModel;
}
public abstract get audioStreamStats$(): Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
public abstract get videoStreamStats$(): Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
}
/**
* The local participant's user media.
*/
export class LocalUserMediaViewModel extends BaseUserMediaViewModel {
/**
* The local video track as an observable that emits whenever the track
* changes, the camera is switched, or the track is muted.
*/
private readonly videoTrack$: Observable<LocalVideoTrack | null> =
this.video$.pipe(
switchMap((v) => {
const track = v?.publication.track;
if (!(track instanceof LocalVideoTrack)) return of(null);
return merge(
// Watch for track restarts because they indicate a camera switch.
// This event is also emitted when unmuting the track object.
fromEvent(track, TrackEvent.Restarted).pipe(
startWith(null),
map(() => track),
),
// When the track object is muted, reset it to null.
fromEvent(track, TrackEvent.Muted).pipe(map(() => null)),
);
}),
);
/**
* Whether the video should be mirrored.
*/
public readonly mirror$ = this.scope.behavior(
this.videoTrack$.pipe(
// Mirror only front-facing cameras (those that face the user)
map(
(track) =>
track !== null &&
facingModeFromLocalTrack(track).facingMode === "user",
),
),
);
/**
* Whether to show this tile in a highly visible location near the start of
* the grid.
*/
public readonly alwaysShow$ = alwaysShowSelf.value$;
public readonly setAlwaysShow = alwaysShowSelf.setValue;
/**
* Callback for switching between the front and back cameras.
*/
public readonly switchCamera$: Behavior<(() => void) | null> =
this.scope.behavior(
platform === "desktop"
? of(null)
: this.videoTrack$.pipe(
map((track) => {
if (track === null) return null;
const facingMode = facingModeFromLocalTrack(track).facingMode;
// If the camera isn't front or back-facing, don't provide a switch
// camera shortcut at all
if (facingMode !== "user" && facingMode !== "environment")
return null;
// Restart the track with a camera facing the opposite direction
return (): void =>
void track
.restartTrack({
facingMode: facingMode === "user" ? "environment" : "user",
})
.then(() => {
// Inform the MediaDevices which camera was chosen
const deviceId =
track.mediaStreamTrack.getSettings().deviceId;
if (deviceId !== undefined)
this.mediaDevices.videoInput.select(deviceId);
})
.catch((e) =>
logger.error("Failed to switch camera", facingMode, e),
);
}),
),
);
public constructor(
scope: ObservableScope,
id: string,
userId: string,
rtcBackendIdentity: string,
participant$: Behavior<LocalParticipant | null>,
encryptionSystem: EncryptionSystem,
livekitRoom$: Behavior<LivekitRoom | undefined>,
focusUrl$: Behavior<string | undefined>,
private readonly mediaDevices: MediaDevices,
displayName$: Behavior<string>,
mxcAvatarUrl$: Behavior<string | undefined>,
handRaised$: Behavior<Date | null>,
reaction$: Behavior<ReactionOption | null>,
) {
super(
scope,
id,
userId,
rtcBackendIdentity,
participant$,
encryptionSystem,
livekitRoom$,
focusUrl$,
displayName$,
mxcAvatarUrl$,
handRaised$,
reaction$,
);
}
public audioStreamStats$ = combineLatest([
this.participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
if (!p || !showConnectionStats) return of(undefined);
return observeOutboundRtpStreamStats$(p, Track.Source.Microphone);
}),
);
public videoStreamStats$ = combineLatest([
this.participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
if (!p || !showConnectionStats) return of(undefined);
return observeOutboundRtpStreamStats$(p, Track.Source.Camera);
}),
);
}
/**
* A remote participant's user media.
*/
export class RemoteUserMediaViewModel extends BaseUserMediaViewModel {
/**
* Whether we are waiting for this user's LiveKit participant to exist. This
* could be because either we or the remote party are still connecting.
*/
public readonly waitingForMedia$ = this.scope.behavior<boolean>(
combineLatest(
[this.livekitRoom$, this.participant$],
(livekitRoom, participant) =>
// If livekitRoom is undefined, the user is not attempting to publish on
// any transport and so we shouldn't expect a participant. (They might
// be a subscribe-only bot for example.)
livekitRoom !== undefined && participant === null,
),
);
// This private field is used to override the value from the superclass
private __speaking$: Behavior<boolean>;
public get speaking$(): Behavior<boolean> {
return this.__speaking$;
}
private readonly locallyMutedToggle$ = new Subject<void>();
private readonly localVolumeAdjustment$ = new Subject<number>();
private readonly localVolumeCommit$ = new Subject<void>();
/**
* The volume to which this participant's audio is set, as a scalar
* multiplier.
*/
public readonly localVolume$ = this.scope.behavior<number>(
merge(
this.locallyMutedToggle$.pipe(map(() => "toggle mute" as const)),
this.localVolumeAdjustment$,
this.localVolumeCommit$.pipe(map(() => "commit" as const)),
).pipe(
accumulate({ volume: 1, committedVolume: 1 }, (state, event) => {
switch (event) {
case "toggle mute":
return {
...state,
volume: state.volume === 0 ? state.committedVolume : 0,
};
case "commit":
// Dragging the slider to zero should have the same effect as
// muting: keep the original committed volume, as if it were never
// dragged
return {
...state,
committedVolume:
state.volume === 0 ? state.committedVolume : state.volume,
};
default:
// Volume adjustment
return { ...state, volume: event };
}
}),
map(({ volume }) => volume),
),
);
// This private field is used to override the value from the superclass
private __videoEnabled$: Behavior<boolean>;
public get videoEnabled$(): Behavior<boolean> {
return this.__videoEnabled$;
}
/**
* Whether this participant's audio is disabled.
*/
public readonly locallyMuted$ = this.scope.behavior<boolean>(
this.localVolume$.pipe(map((volume) => volume === 0)),
);
public constructor(
scope: ObservableScope,
id: string,
userId: string,
rtcBackendIdentity: string,
participant$: Behavior<RemoteParticipant | null>,
encryptionSystem: EncryptionSystem,
livekitRoom$: Behavior<LivekitRoom | undefined>,
focusUrl$: Behavior<string | undefined>,
private readonly pretendToBeDisconnected$: Behavior<boolean>,
displayName$: Behavior<string>,
mxcAvatarUrl$: Behavior<string | undefined>,
handRaised$: Behavior<Date | null>,
reaction$: Behavior<ReactionOption | null>,
) {
super(
scope,
id,
userId,
rtcBackendIdentity,
participant$,
encryptionSystem,
livekitRoom$,
focusUrl$,
displayName$,
mxcAvatarUrl$,
handRaised$,
reaction$,
);
this.__speaking$ = this.scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : super.speaking$,
),
),
);
this.__videoEnabled$ = this.scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : super.videoEnabled$,
),
),
);
// Sync the local volume with LiveKit
combineLatest([
participant$,
// The local volume, taking into account whether we're supposed to pretend
// that the audio stream is disconnected (since we don't necessarily want
// that to modify the UI state).
this.pretendToBeDisconnected$.pipe(
switchMap((disconnected) => (disconnected ? of(0) : this.localVolume$)),
this.scope.bind(),
),
]).subscribe(([p, volume]) => p?.setVolume(volume));
}
public toggleLocallyMuted(): void {
this.locallyMutedToggle$.next();
}
public setLocalVolume(value: number): void {
this.localVolumeAdjustment$.next(value);
}
public commitLocalVolume(): void {
this.localVolumeCommit$.next();
}
public audioStreamStats$ = combineLatest([
this.participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
if (!p || !showConnectionStats) return of(undefined);
return observeInboundRtpStreamStats$(p, Track.Source.Microphone);
}),
);
public videoStreamStats$ = combineLatest([
this.participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
if (!p || !showConnectionStats) return of(undefined);
return observeInboundRtpStreamStats$(p, Track.Source.Camera);
}),
);
}
/**
* Some participant's screen share media.
*/
export class ScreenShareViewModel extends BaseMediaViewModel {
/**
* Whether this screen share's video should be displayed.
*/
public readonly videoEnabled$ = this.scope.behavior(
this.pretendToBeDisconnected$.pipe(map((disconnected) => !disconnected)),
);
public constructor(
scope: ObservableScope,
id: string,
userId: string,
participant$: Behavior<LocalParticipant | RemoteParticipant>,
encryptionSystem: EncryptionSystem,
livekitRoom$: Behavior<LivekitRoom | undefined>,
focusUrl$: Behavior<string | undefined>,
private readonly pretendToBeDisconnected$: Behavior<boolean>,
displayName$: Behavior<string>,
mxcAvatarUrl$: Behavior<string | undefined>,
public readonly local: boolean,
) {
super(
scope,
id,
userId,
participant$,
encryptionSystem,
Track.Source.ScreenShareAudio,
Track.Source.ScreenShare,
livekitRoom$,
focusUrl$,
displayName$,
mxcAvatarUrl$,
);
}
}

View File

@@ -1,53 +0,0 @@
/*
Copyright 2025 New Vector Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
type LocalParticipant,
type RemoteParticipant,
type Room as LivekitRoom,
} from "livekit-client";
import { type ObservableScope } from "./ObservableScope.ts";
import { ScreenShareViewModel } from "./MediaViewModel.ts";
import type { EncryptionSystem } from "../e2ee/sharedKeyManagement.ts";
import { constant, type Behavior } from "./Behavior.ts";
/**
* A screen share media item to be presented in a tile. This is a thin wrapper
* around ScreenShareViewModel which essentially just establishes an
* ObservableScope for behaviors that the view model depends on.
*/
export class ScreenShare {
public readonly vm: ScreenShareViewModel;
public constructor(
private readonly scope: ObservableScope,
id: string,
userId: string,
participant: LocalParticipant | RemoteParticipant,
encryptionSystem: EncryptionSystem,
livekitRoom$: Behavior<LivekitRoom | undefined>,
focusUrl$: Behavior<string | undefined>,
pretendToBeDisconnected$: Behavior<boolean>,
displayName$: Behavior<string>,
mxcAvatarUrl$: Behavior<string | undefined>,
) {
this.vm = new ScreenShareViewModel(
this.scope,
id,
userId,
constant(participant),
encryptionSystem,
livekitRoom$,
focusUrl$,
pretendToBeDisconnected$,
displayName$,
mxcAvatarUrl$,
participant.isLocal,
);
}
}

View File

@@ -8,10 +8,11 @@ Please see LICENSE in the repository root for full details.
import { BehaviorSubject } from "rxjs";
import { logger } from "matrix-js-sdk/lib/logger";
import { type MediaViewModel, type UserMediaViewModel } from "./MediaViewModel";
import { GridTileViewModel, SpotlightTileViewModel } from "./TileViewModel";
import { fillGaps } from "../utils/iter";
import { debugTileLayout } from "../settings/settings";
import { type MediaViewModel } from "./media/MediaViewModel";
import { type UserMediaViewModel } from "./media/UserMediaViewModel";
function debugEntries(entries: GridTileData[]): string[] {
return entries.map((e) => e.media.displayName$.value);

View File

@@ -5,8 +5,9 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type MediaViewModel, type UserMediaViewModel } from "./MediaViewModel";
import { type Behavior } from "./Behavior";
import { type MediaViewModel } from "./media/MediaViewModel";
import { type UserMediaViewModel } from "./media/UserMediaViewModel";
let nextId = 0;
function createId(): string {

View File

@@ -1,209 +0,0 @@
/*
Copyright 2025 New Vector Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { combineLatest, map, type Observable, of, switchMap } from "rxjs";
import {
type LocalParticipant,
ParticipantEvent,
type RemoteParticipant,
type Room as LivekitRoom,
} from "livekit-client";
import { observeParticipantEvents } from "@livekit/components-core";
import { type ObservableScope } from "./ObservableScope.ts";
import {
LocalUserMediaViewModel,
RemoteUserMediaViewModel,
type UserMediaViewModel,
} from "./MediaViewModel.ts";
import type { Behavior } from "./Behavior.ts";
import type { EncryptionSystem } from "../e2ee/sharedKeyManagement.ts";
import type { MediaDevices } from "./MediaDevices.ts";
import type { ReactionOption } from "../reactions";
import { observeSpeaker$ } from "./observeSpeaker.ts";
import { generateItems } from "../utils/observable.ts";
import { ScreenShare } from "./ScreenShare.ts";
import { type TaggedParticipant } from "./CallViewModel/remoteMembers/MatrixLivekitMembers.ts";
/**
* Sorting bins defining the order in which media tiles appear in the layout.
*/
enum SortingBin {
/**
* Yourself, when the "always show self" option is on.
*/
SelfAlwaysShown,
/**
* Participants that are sharing their screen.
*/
Presenters,
/**
* Participants that have been speaking recently.
*/
Speakers,
/**
* Participants that have their hand raised.
*/
HandRaised,
/**
* Participants with video.
*/
Video,
/**
* Participants not sharing any video.
*/
NoVideo,
/**
* Yourself, when the "always show self" option is off.
*/
SelfNotAlwaysShown,
}
/**
* A user media item to be presented in a tile. This is a thin wrapper around
* UserMediaViewModel which additionally determines the media item's sorting bin
* for inclusion in the call layout and tracks associated screen shares.
*/
export class UserMedia {
public readonly vm: UserMediaViewModel =
this.participant.type === "local"
? new LocalUserMediaViewModel(
this.scope,
this.id,
this.userId,
this.rtcBackendIdentity,
this.participant.value$,
this.encryptionSystem,
this.livekitRoom$,
this.focusUrl$,
this.mediaDevices,
this.displayName$,
this.mxcAvatarUrl$,
this.scope.behavior(this.handRaised$),
this.scope.behavior(this.reaction$),
)
: new RemoteUserMediaViewModel(
this.scope,
this.id,
this.userId,
this.rtcBackendIdentity,
this.participant.value$,
this.encryptionSystem,
this.livekitRoom$,
this.focusUrl$,
this.pretendToBeDisconnected$,
this.displayName$,
this.mxcAvatarUrl$,
this.scope.behavior(this.handRaised$),
this.scope.behavior(this.reaction$),
);
private readonly speaker$ = this.scope.behavior(
observeSpeaker$(this.vm.speaking$),
);
// TypeScript needs this widening of the type to happen in a separate statement
private readonly participant$: Behavior<
LocalParticipant | RemoteParticipant | null
> = this.participant.value$;
/**
* All screen share media associated with this user media.
*/
public readonly screenShares$ = this.scope.behavior(
this.participant$.pipe(
switchMap((p) =>
p === null
? of([])
: observeParticipantEvents(
p,
ParticipantEvent.TrackPublished,
ParticipantEvent.TrackUnpublished,
ParticipantEvent.LocalTrackPublished,
ParticipantEvent.LocalTrackUnpublished,
).pipe(
// Technically more than one screen share might be possible... our
// MediaViewModels don't support it though since they look for a unique
// track for the given source. So generateItems here is a bit overkill.
generateItems(
`${this.id} screenShares$`,
function* (p) {
if (p.isScreenShareEnabled)
yield {
keys: ["screen-share"],
data: undefined,
};
},
(scope, _data$, key) =>
new ScreenShare(
scope,
`${this.id}:${key}`,
this.userId,
p,
this.encryptionSystem,
this.livekitRoom$,
this.focusUrl$,
this.pretendToBeDisconnected$,
this.displayName$,
this.mxcAvatarUrl$,
),
),
),
),
),
);
private readonly presenter$ = this.scope.behavior(
this.screenShares$.pipe(map((screenShares) => screenShares.length > 0)),
);
/**
* Which sorting bin the media item should be placed in.
*/
// This is exposed here rather than by UserMediaViewModel because it's only
// relevant to the layout algorithms; the MediaView component should be
// ignorant of this value.
public readonly bin$ = combineLatest(
[
this.speaker$,
this.presenter$,
this.vm.videoEnabled$,
this.vm.handRaised$,
this.vm instanceof LocalUserMediaViewModel
? this.vm.alwaysShow$
: of(false),
],
(speaker, presenter, video, handRaised, alwaysShow) => {
if (this.vm.local)
return alwaysShow
? SortingBin.SelfAlwaysShown
: SortingBin.SelfNotAlwaysShown;
else if (presenter) return SortingBin.Presenters;
else if (speaker) return SortingBin.Speakers;
else if (handRaised) return SortingBin.HandRaised;
else if (video) return SortingBin.Video;
else return SortingBin.NoVideo;
},
);
public constructor(
private readonly scope: ObservableScope,
public readonly id: string,
private readonly userId: string,
private readonly rtcBackendIdentity: string,
private readonly participant: TaggedParticipant,
private readonly encryptionSystem: EncryptionSystem,
private readonly livekitRoom$: Behavior<LivekitRoom | undefined>,
private readonly focusUrl$: Behavior<string | undefined>,
private readonly mediaDevices: MediaDevices,
private readonly pretendToBeDisconnected$: Behavior<boolean>,
private readonly displayName$: Behavior<string>,
private readonly mxcAvatarUrl$: Behavior<string | undefined>,
private readonly handRaised$: Observable<Date | null>,
private readonly reaction$: Observable<ReactionOption | null>,
) {}
}

101
src/state/VolumeControls.ts Normal file
View File

@@ -0,0 +1,101 @@
/*
Copyright 2026 Element Software Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { combineLatest, map, merge, of, Subject, switchMap } from "rxjs";
import { type Behavior } from "./Behavior";
import { type ObservableScope } from "./ObservableScope";
import { accumulate } from "../utils/observable";
/**
* Controls for audio playback volume.
*/
export interface VolumeControls {
/**
* The volume to which the audio is set, as a scalar multiplier.
*/
playbackVolume$: Behavior<number>;
/**
* Whether playback of this audio is disabled.
*/
playbackMuted$: Behavior<boolean>;
togglePlaybackMuted: () => void;
adjustPlaybackVolume: (value: number) => void;
commitPlaybackVolume: () => void;
}
interface VolumeControlsInputs {
pretendToBeDisconnected$: Behavior<boolean>;
/**
* The callback to run to notify the module performing audio playback of the
* requested volume.
*/
sink$: Behavior<(volume: number) => void>;
}
/**
* Creates a set of controls for audio playback volume and syncs this with the
* audio playback module for the duration of the scope.
*/
export function createVolumeControls(
scope: ObservableScope,
{ pretendToBeDisconnected$, sink$ }: VolumeControlsInputs,
): VolumeControls {
const toggleMuted$ = new Subject<"toggle mute">();
const adjustVolume$ = new Subject<number>();
const commitVolume$ = new Subject<"commit">();
const playbackVolume$ = scope.behavior<number>(
merge(toggleMuted$, adjustVolume$, commitVolume$).pipe(
accumulate({ volume: 1, committedVolume: 1 }, (state, event) => {
switch (event) {
case "toggle mute":
return {
...state,
volume: state.volume === 0 ? state.committedVolume : 0,
};
case "commit":
// Dragging the slider to zero should have the same effect as
// muting: keep the original committed volume, as if it were never
// dragged
return {
...state,
committedVolume:
state.volume === 0 ? state.committedVolume : state.volume,
};
default:
// Volume adjustment
return { ...state, volume: event };
}
}),
map(({ volume }) => volume),
),
);
// Sync the requested volume with the audio playback module
combineLatest([
sink$,
// The playback volume, taking into account whether we're supposed to
// pretend that the audio stream is disconnected (since we don't necessarily
// want that to modify the UI state).
pretendToBeDisconnected$.pipe(
switchMap((disconnected) => (disconnected ? of(0) : playbackVolume$)),
),
])
.pipe(scope.bind())
.subscribe(([sink, volume]) => sink(volume));
return {
playbackVolume$,
playbackMuted$: scope.behavior<boolean>(
playbackVolume$.pipe(map((volume) => volume === 0)),
),
togglePlaybackMuted: () => toggleMuted$.next("toggle mute"),
adjustPlaybackVolume: (value: number) => adjustVolume$.next(value),
commitPlaybackVolume: () => commitVolume$.next("commit"),
};
}

View File

@@ -5,16 +5,14 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type LocalUserMediaViewModel } from "./media/LocalUserMediaViewModel.ts";
import { type MediaViewModel } from "./media/MediaViewModel.ts";
import { type RemoteUserMediaViewModel } from "./media/RemoteUserMediaViewModel.ts";
import { type UserMediaViewModel } from "./media/UserMediaViewModel.ts";
import {
type GridTileViewModel,
type SpotlightTileViewModel,
} from "./TileViewModel.ts";
import {
type LocalUserMediaViewModel,
type RemoteUserMediaViewModel,
type MediaViewModel,
type UserMediaViewModel,
} from "./MediaViewModel.ts";
export interface GridLayoutMedia {
type: "grid";

View File

@@ -0,0 +1,32 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type LocalParticipant } from "livekit-client";
import { type Behavior } from "../Behavior";
import {
type BaseScreenShareInputs,
type BaseScreenShareViewModel,
createBaseScreenShare,
} from "./ScreenShareViewModel";
import { type ObservableScope } from "../ObservableScope";
export interface LocalScreenShareViewModel extends BaseScreenShareViewModel {
local: true;
}
export interface LocalScreenShareInputs extends BaseScreenShareInputs {
participant$: Behavior<LocalParticipant | null>;
}
export function createLocalScreenShare(
scope: ObservableScope,
inputs: LocalScreenShareInputs,
): LocalScreenShareViewModel {
return { ...createBaseScreenShare(scope, inputs), local: true };
}

View File

@@ -0,0 +1,137 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
facingModeFromLocalTrack,
type LocalParticipant,
LocalVideoTrack,
TrackEvent,
} from "livekit-client";
import {
fromEvent,
map,
merge,
type Observable,
of,
startWith,
switchMap,
} from "rxjs";
import { logger } from "matrix-js-sdk/lib/logger";
import { type Behavior } from "../Behavior";
import {
type BaseUserMediaInputs,
type BaseUserMediaViewModel,
createBaseUserMedia,
} from "./UserMediaViewModel";
import { type ObservableScope } from "../ObservableScope";
import { alwaysShowSelf } from "../../settings/settings";
import { platform } from "../../Platform";
import { type MediaDevices } from "../MediaDevices";
export interface LocalUserMediaViewModel extends BaseUserMediaViewModel {
local: true;
/**
* Whether the video should be mirrored.
*/
mirror$: Behavior<boolean>;
/**
* Whether to show this tile in a highly visible location near the start of
* the grid.
*/
alwaysShow$: Behavior<boolean>;
setAlwaysShow: (value: boolean) => void;
switchCamera$: Behavior<(() => void) | null>;
}
export interface LocalUserMediaInputs extends Omit<
BaseUserMediaInputs,
"statsType"
> {
participant$: Behavior<LocalParticipant | null>;
mediaDevices: MediaDevices;
}
export function createLocalUserMedia(
scope: ObservableScope,
{ mediaDevices, ...inputs }: LocalUserMediaInputs,
): LocalUserMediaViewModel {
const baseUserMedia = createBaseUserMedia(scope, {
...inputs,
statsType: "outbound-rtp",
});
/**
* The local video track as an observable that emits whenever the track
* changes, the camera is switched, or the track is muted.
*/
const videoTrack$: Observable<LocalVideoTrack | null> =
baseUserMedia.video$.pipe(
switchMap((v) => {
const track = v?.publication.track;
if (!(track instanceof LocalVideoTrack)) return of(null);
return merge(
// Watch for track restarts because they indicate a camera switch.
// This event is also emitted when unmuting the track object.
fromEvent(track, TrackEvent.Restarted).pipe(
startWith(null),
map(() => track),
),
// When the track object is muted, reset it to null.
fromEvent(track, TrackEvent.Muted).pipe(map(() => null)),
);
}),
);
return {
...baseUserMedia,
local: true,
mirror$: scope.behavior(
videoTrack$.pipe(
// Mirror only front-facing cameras (those that face the user)
map(
(track) =>
track !== null &&
facingModeFromLocalTrack(track).facingMode === "user",
),
),
),
alwaysShow$: alwaysShowSelf.value$,
setAlwaysShow: alwaysShowSelf.setValue,
switchCamera$: scope.behavior(
platform === "desktop"
? of(null)
: videoTrack$.pipe(
map((track) => {
if (track === null) return null;
const facingMode = facingModeFromLocalTrack(track).facingMode;
// If the camera isn't front or back-facing, don't provide a switch
// camera shortcut at all
if (facingMode !== "user" && facingMode !== "environment")
return null;
// Restart the track with a camera facing the opposite direction
return (): void =>
void track
.restartTrack({
facingMode: facingMode === "user" ? "environment" : "user",
})
.then(() => {
// Inform the MediaDevices which camera was chosen
const deviceId =
track.mediaStreamTrack.getSettings().deviceId;
if (deviceId !== undefined)
mediaDevices.videoInput.select(deviceId);
})
.catch((e) =>
logger.error("Failed to switch camera", facingMode, e),
);
}),
),
),
};
}

View File

@@ -0,0 +1,198 @@
/*
Copyright 2025-2026 Element Software Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { combineLatest, map, of, switchMap } from "rxjs";
import {
type LocalParticipant,
ParticipantEvent,
type RemoteParticipant,
} from "livekit-client";
import { observeParticipantEvents } from "@livekit/components-core";
import { type ObservableScope } from "../ObservableScope.ts";
import type { Behavior } from "../Behavior.ts";
import type { MediaDevices } from "../MediaDevices.ts";
import { observeSpeaker$ } from "./observeSpeaker.ts";
import { generateItems } from "../../utils/observable.ts";
import { type TaggedParticipant } from "../CallViewModel/remoteMembers/MatrixLivekitMembers.ts";
import { type UserMediaViewModel } from "./UserMediaViewModel.ts";
import { type ScreenShareViewModel } from "./ScreenShareViewModel.ts";
import {
createLocalUserMedia,
type LocalUserMediaInputs,
} from "./LocalUserMediaViewModel.ts";
import {
createRemoteUserMedia,
type RemoteUserMediaInputs,
} from "./RemoteUserMediaViewModel.ts";
import { createLocalScreenShare } from "./LocalScreenShareViewModel.ts";
import { createRemoteScreenShare } from "./RemoteScreenShareViewModel.ts";
/**
* Sorting bins defining the order in which media tiles appear in the layout.
*/
enum SortingBin {
/**
* Yourself, when the "always show self" option is on.
*/
SelfAlwaysShown,
/**
* Participants that are sharing their screen.
*/
Presenters,
/**
* Participants that have been speaking recently.
*/
Speakers,
/**
* Participants that have their hand raised.
*/
HandRaised,
/**
* Participants with video.
*/
Video,
/**
* Participants not sharing any video.
*/
NoVideo,
/**
* Yourself, when the "always show self" option is off.
*/
SelfNotAlwaysShown,
}
/**
* A user media item to be presented in a tile. This is a thin wrapper around
* UserMediaViewModel which additionally carries data relevant to the tile
* layout algorithms (data which the MediaView component should be ignorant of).
*/
export type WrappedUserMediaViewModel = UserMediaViewModel & {
/**
* All screen share media associated with this user media.
*/
screenShares$: Behavior<ScreenShareViewModel[]>;
/**
* Which sorting bin the media item should be placed in.
*/
bin$: Behavior<SortingBin>;
};
interface WrappedUserMediaInputs extends Omit<
LocalUserMediaInputs & RemoteUserMediaInputs,
"participant$"
> {
participant: TaggedParticipant;
mediaDevices: MediaDevices;
pretendToBeDisconnected$: Behavior<boolean>;
}
export function createWrappedUserMedia(
scope: ObservableScope,
{
participant,
mediaDevices,
pretendToBeDisconnected$,
...inputs
}: WrappedUserMediaInputs,
): WrappedUserMediaViewModel {
const userMedia =
participant.type === "local"
? createLocalUserMedia(scope, {
participant$: participant.value$,
mediaDevices,
...inputs,
})
: createRemoteUserMedia(scope, {
participant$: participant.value$,
pretendToBeDisconnected$,
...inputs,
});
// TypeScript needs this widening of the type to happen in a separate statement
const participant$: Behavior<LocalParticipant | RemoteParticipant | null> =
participant.value$;
const screenShares$ = scope.behavior(
participant$.pipe(
switchMap((p) =>
p === null
? of([])
: observeParticipantEvents(
p,
ParticipantEvent.TrackPublished,
ParticipantEvent.TrackUnpublished,
ParticipantEvent.LocalTrackPublished,
ParticipantEvent.LocalTrackUnpublished,
).pipe(
// Technically more than one screen share might be possible... our
// MediaViewModels don't support it though since they look for a unique
// track for the given source. So generateItems here is a bit overkill.
generateItems(
`${inputs.id} screenShares$`,
function* (p) {
if (p.isScreenShareEnabled)
yield {
keys: ["screen-share"],
data: undefined,
};
},
(scope, _data$, key) => {
const id = `${inputs.id}:${key}`;
return participant.type === "local"
? createLocalScreenShare(scope, {
...inputs,
id,
participant$: participant.value$,
})
: createRemoteScreenShare(scope, {
...inputs,
id,
participant$: participant.value$,
pretendToBeDisconnected$,
});
},
),
),
),
),
);
const speaker$ = scope.behavior(observeSpeaker$(userMedia.speaking$));
const presenter$ = scope.behavior(
screenShares$.pipe(map((screenShares) => screenShares.length > 0)),
);
return {
...userMedia,
screenShares$,
bin$: scope.behavior(
combineLatest(
[
speaker$,
presenter$,
userMedia.videoEnabled$,
userMedia.handRaised$,
userMedia.local ? userMedia.alwaysShow$ : of<boolean | null>(null),
],
(speaker, presenter, video, handRaised, alwaysShow) => {
if (alwaysShow !== null)
return alwaysShow
? SortingBin.SelfAlwaysShown
: SortingBin.SelfNotAlwaysShown;
else if (presenter) return SortingBin.Presenters;
else if (speaker) return SortingBin.Speakers;
else if (handRaised) return SortingBin.HandRaised;
else if (video) return SortingBin.Video;
else return SortingBin.NoVideo;
},
),
),
};
}
export type MediaItem = WrappedUserMediaViewModel | ScreenShareViewModel;

View File

@@ -17,13 +17,12 @@ import {
mockLocalParticipant,
mockMediaDevices,
mockRtcMembership,
createLocalMedia,
createRemoteMedia,
mockLocalMedia,
mockRemoteMedia,
withTestScheduler,
mockRemoteParticipant,
} from "../utils/test";
import { getValue } from "../utils/observable";
import { constant } from "./Behavior";
} from "../../utils/test";
import { constant } from "../Behavior";
global.MediaStreamTrack = class {} as unknown as {
new (): MediaStreamTrack;
@@ -35,7 +34,7 @@ global.MediaStream = class {} as unknown as {
};
const platformMock = vi.hoisted(() => vi.fn(() => "desktop"));
vi.mock("../Platform", () => ({
vi.mock("../../Platform", () => ({
get platform(): string {
return platformMock();
},
@@ -45,7 +44,7 @@ const rtcMembership = mockRtcMembership("@alice:example.org", "AAAA");
test("control a participant's volume", () => {
const setVolumeSpy = vi.fn();
const vm = createRemoteMedia(
const vm = mockRemoteMedia(
rtcMembership,
{},
mockRemoteParticipant({ setVolume: setVolumeSpy }),
@@ -54,33 +53,33 @@ test("control a participant's volume", () => {
schedule("-ab---c---d|", {
a() {
// Try muting by toggling
vm.toggleLocallyMuted();
vm.togglePlaybackMuted();
expect(setVolumeSpy).toHaveBeenLastCalledWith(0);
},
b() {
// Try unmuting by dragging the slider back up
vm.setLocalVolume(0.6);
vm.setLocalVolume(0.8);
vm.commitLocalVolume();
vm.adjustPlaybackVolume(0.6);
vm.adjustPlaybackVolume(0.8);
vm.commitPlaybackVolume();
expect(setVolumeSpy).toHaveBeenCalledWith(0.6);
expect(setVolumeSpy).toHaveBeenLastCalledWith(0.8);
},
c() {
// Try muting by dragging the slider back down
vm.setLocalVolume(0.2);
vm.setLocalVolume(0);
vm.commitLocalVolume();
vm.adjustPlaybackVolume(0.2);
vm.adjustPlaybackVolume(0);
vm.commitPlaybackVolume();
expect(setVolumeSpy).toHaveBeenCalledWith(0.2);
expect(setVolumeSpy).toHaveBeenLastCalledWith(0);
},
d() {
// Try unmuting by toggling
vm.toggleLocallyMuted();
vm.togglePlaybackMuted();
// The volume should return to the last non-zero committed volume
expect(setVolumeSpy).toHaveBeenLastCalledWith(0.8);
},
});
expectObservable(vm.localVolume$).toBe("ab(cd)(ef)g", {
expectObservable(vm.playbackVolume$).toBe("ab(cd)(ef)g", {
a: 1,
b: 0,
c: 0.6,
@@ -93,11 +92,11 @@ test("control a participant's volume", () => {
});
test("toggle fit/contain for a participant's video", () => {
const vm = createRemoteMedia(rtcMembership, {}, mockRemoteParticipant({}));
const vm = mockRemoteMedia(rtcMembership, {}, mockRemoteParticipant({}));
withTestScheduler(({ expectObservable, schedule }) => {
schedule("-ab|", {
a: () => vm.toggleFitContain(),
b: () => vm.toggleFitContain(),
a: () => vm.toggleCropVideo(),
b: () => vm.toggleCropVideo(),
});
expectObservable(vm.cropVideo$).toBe("abc", {
a: true,
@@ -108,7 +107,7 @@ test("toggle fit/contain for a participant's video", () => {
});
test("local media remembers whether it should always be shown", () => {
const vm1 = createLocalMedia(
const vm1 = mockLocalMedia(
rtcMembership,
{},
mockLocalParticipant({}),
@@ -120,7 +119,7 @@ test("local media remembers whether it should always be shown", () => {
});
// Next local media should start out *not* always shown
const vm2 = createLocalMedia(
const vm2 = mockLocalMedia(
rtcMembership,
{},
mockLocalParticipant({}),
@@ -166,7 +165,7 @@ test("switch cameras", async () => {
const selectVideoInput = vi.fn();
const vm = createLocalMedia(
const vm = mockLocalMedia(
rtcMembership,
{},
mockLocalParticipant({
@@ -184,7 +183,7 @@ test("switch cameras", async () => {
);
// Switch to back camera
getValue(vm.switchCamera$)!();
vm.switchCamera$.value!();
expect(restartTrack).toHaveBeenCalledExactlyOnceWith({
facingMode: "environment",
});
@@ -195,7 +194,7 @@ test("switch cameras", async () => {
expect(deviceId).toBe("back camera");
// Switch to front camera
getValue(vm.switchCamera$)!();
vm.switchCamera$.value!();
expect(restartTrack).toHaveBeenCalledTimes(2);
expect(restartTrack).toHaveBeenLastCalledWith({ facingMode: "user" });
await waitFor(() => {
@@ -206,17 +205,17 @@ test("switch cameras", async () => {
});
test("remote media is in waiting state when participant has not yet connected", () => {
const vm = createRemoteMedia(rtcMembership, {}, null); // null participant
const vm = mockRemoteMedia(rtcMembership, {}, null); // null participant
expect(vm.waitingForMedia$.value).toBe(true);
});
test("remote media is not in waiting state when participant is connected", () => {
const vm = createRemoteMedia(rtcMembership, {}, mockRemoteParticipant({}));
const vm = mockRemoteMedia(rtcMembership, {}, mockRemoteParticipant({}));
expect(vm.waitingForMedia$.value).toBe(false);
});
test("remote media is not in waiting state when participant is connected with no publications", () => {
const vm = createRemoteMedia(
const vm = mockRemoteMedia(
rtcMembership,
{},
mockRemoteParticipant({
@@ -228,7 +227,7 @@ test("remote media is not in waiting state when participant is connected with no
});
test("remote media is not in waiting state when user does not intend to publish anywhere", () => {
const vm = createRemoteMedia(
const vm = mockRemoteMedia(
rtcMembership,
{},
mockRemoteParticipant({}),

View File

@@ -0,0 +1,44 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type Behavior } from "../Behavior";
import { type ScreenShareViewModel } from "./ScreenShareViewModel";
import { type UserMediaViewModel } from "./UserMediaViewModel";
/**
* A participant's media.
*/
export type MediaViewModel = UserMediaViewModel | ScreenShareViewModel;
/**
* Properties which are common to all MediaViewModels.
*/
export interface BaseMediaViewModel {
/**
* An opaque identifier for this media.
*/
id: string;
/**
* The Matrix user to which this media belongs.
*/
userId: string;
displayName$: Behavior<string>;
mxcAvatarUrl$: Behavior<string | undefined>;
}
export type BaseMediaInputs = BaseMediaViewModel;
// All this function does is strip out superfluous data from the input object
export function createBaseMedia({
id,
userId,
displayName$,
mxcAvatarUrl$,
}: BaseMediaInputs): BaseMediaViewModel {
return { id, userId, displayName$, mxcAvatarUrl$ };
}

View File

@@ -0,0 +1,272 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
type Room as LivekitRoom,
RoomEvent as LivekitRoomEvent,
type Participant,
type Track,
} from "livekit-client";
import {
type AudioSource,
roomEventSelector,
type TrackReference,
type VideoSource,
} from "@livekit/components-core";
import { type LocalParticipant, type RemoteParticipant } from "livekit-client";
import {
combineLatest,
distinctUntilChanged,
filter,
map,
type Observable,
of,
startWith,
switchMap,
throttleTime,
} from "rxjs";
import { type Behavior } from "../Behavior";
import { type BaseMediaViewModel, createBaseMedia } from "./MediaViewModel";
import { type EncryptionSystem } from "../../e2ee/sharedKeyManagement";
import { type ObservableScope } from "../ObservableScope";
import { observeTrackReference$ } from "../observeTrackReference";
import { E2eeType } from "../../e2ee/e2eeType";
import { observeInboundRtpStreamStats$ } from "./observeRtpStreamStats";
// TODO: Encryption status is kinda broken and thus unused right now. Remove?
export enum EncryptionStatus {
Connecting,
Okay,
KeyMissing,
KeyInvalid,
PasswordInvalid,
}
/**
* Media belonging to an active member of the RTC session.
*/
export interface MemberMediaViewModel extends BaseMediaViewModel {
/**
* The LiveKit video track for this media.
*/
video$: Behavior<TrackReference | undefined>;
/**
* The URL of the LiveKit focus on which this member should be publishing.
* Exposed for debugging.
*/
focusUrl$: Behavior<string | undefined>;
/**
* Whether there should be a warning that this media is unencrypted.
*/
unencryptedWarning$: Behavior<boolean>;
encryptionStatus$: Behavior<EncryptionStatus>;
}
export interface MemberMediaInputs extends BaseMediaViewModel {
participant$: Behavior<LocalParticipant | RemoteParticipant | null>;
livekitRoom$: Behavior<LivekitRoom | undefined>;
audioSource: AudioSource;
videoSource: VideoSource;
focusUrl$: Behavior<string | undefined>;
encryptionSystem: EncryptionSystem;
}
export function createMemberMedia(
scope: ObservableScope,
{
participant$,
livekitRoom$,
audioSource,
videoSource,
focusUrl$,
encryptionSystem,
...inputs
}: MemberMediaInputs,
): MemberMediaViewModel {
const trackBehavior$ = (
source: Track.Source,
): Behavior<TrackReference | undefined> =>
scope.behavior(
participant$.pipe(
switchMap((p) =>
!p ? of(undefined) : observeTrackReference$(p, source),
),
),
);
const audio$ = trackBehavior$(audioSource);
const video$ = trackBehavior$(videoSource);
return {
...createBaseMedia(inputs),
video$,
focusUrl$,
unencryptedWarning$: scope.behavior(
combineLatest(
[audio$, video$],
(a, v) =>
encryptionSystem.kind !== E2eeType.NONE &&
(a?.publication.isEncrypted === false ||
v?.publication.isEncrypted === false),
),
),
encryptionStatus$: scope.behavior(
participant$.pipe(
switchMap((participant): Observable<EncryptionStatus> => {
if (!participant) {
return of(EncryptionStatus.Connecting);
} else if (
participant.isLocal ||
encryptionSystem.kind === E2eeType.NONE
) {
return of(EncryptionStatus.Okay);
} else if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"MissingKey",
),
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(([keyMissing, keyInvalid, audioOkay, videoOkay]) => {
if (keyMissing) return EncryptionStatus.KeyMissing;
if (keyInvalid) return EncryptionStatus.KeyInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
}),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
} else {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(
([keyInvalid, audioOkay, videoOkay]):
| EncryptionStatus
| undefined => {
if (keyInvalid) return EncryptionStatus.PasswordInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
},
),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
}
}),
),
),
};
}
function encryptionErrorObservable$(
room$: Behavior<LivekitRoom | undefined>,
participant: Participant,
encryptionSystem: EncryptionSystem,
criteria: string,
): Observable<boolean> {
return room$.pipe(
switchMap((room) => {
if (room === undefined) return of(false);
return roomEventSelector(room, LivekitRoomEvent.EncryptionError).pipe(
map((e) => {
const [err] = e;
if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return (
// Ideally we would pull the participant identity from the field on the error.
// However, it gets lost in the serialization process between workers.
// So, instead we do a string match
(err?.message.includes(participant.identity) &&
err?.message.includes(criteria)) ??
false
);
} else if (encryptionSystem.kind === E2eeType.SHARED_KEY) {
return !!err?.message.includes(criteria);
}
return false;
}),
);
}),
distinctUntilChanged(),
throttleTime(1000), // Throttle to avoid spamming the UI
startWith(false),
);
}
function observeRemoteTrackReceivingOkay$(
participant: Participant,
source: Track.Source,
): Observable<boolean | undefined> {
let lastStats: {
framesDecoded: number | undefined;
framesDropped: number | undefined;
framesReceived: number | undefined;
} = {
framesDecoded: undefined,
framesDropped: undefined,
framesReceived: undefined,
};
return observeInboundRtpStreamStats$(participant, source).pipe(
map((stats) => {
if (!stats) return undefined;
const { framesDecoded, framesDropped, framesReceived } = stats;
return {
framesDecoded,
framesDropped,
framesReceived,
};
}),
filter((newStats) => !!newStats),
map((newStats): boolean | undefined => {
const oldStats = lastStats;
lastStats = newStats;
if (
typeof newStats.framesReceived === "number" &&
typeof oldStats.framesReceived === "number" &&
typeof newStats.framesDecoded === "number" &&
typeof oldStats.framesDecoded === "number"
) {
const framesReceivedDelta =
newStats.framesReceived - oldStats.framesReceived;
const framesDecodedDelta =
newStats.framesDecoded - oldStats.framesDecoded;
// if we received >0 frames and managed to decode >0 frames then we treat that as success
if (framesReceivedDelta > 0) {
return framesDecodedDelta > 0;
}
}
// no change
return undefined;
}),
filter((x) => typeof x === "boolean"),
startWith(undefined),
);
}

View File

@@ -0,0 +1,44 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type RemoteParticipant } from "livekit-client";
import { map } from "rxjs";
import { type Behavior } from "../Behavior";
import {
type BaseScreenShareInputs,
type BaseScreenShareViewModel,
createBaseScreenShare,
} from "./ScreenShareViewModel";
import { type ObservableScope } from "../ObservableScope";
export interface RemoteScreenShareViewModel extends BaseScreenShareViewModel {
local: false;
/**
* Whether this screen share's video should be displayed.
*/
videoEnabled$: Behavior<boolean>;
}
export interface RemoteScreenShareInputs extends BaseScreenShareInputs {
participant$: Behavior<RemoteParticipant | null>;
pretendToBeDisconnected$: Behavior<boolean>;
}
export function createRemoteScreenShare(
scope: ObservableScope,
{ pretendToBeDisconnected$, ...inputs }: RemoteScreenShareInputs,
): RemoteScreenShareViewModel {
return {
...createBaseScreenShare(scope, inputs),
local: false,
videoEnabled$: scope.behavior(
pretendToBeDisconnected$.pipe(map((disconnected) => !disconnected)),
),
};
}

View File

@@ -0,0 +1,82 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type RemoteParticipant } from "livekit-client";
import { combineLatest, map, of, switchMap } from "rxjs";
import { type Behavior } from "../Behavior";
import { createVolumeControls, type VolumeControls } from "../VolumeControls";
import {
type BaseUserMediaInputs,
type BaseUserMediaViewModel,
createBaseUserMedia,
} from "./UserMediaViewModel";
import { type ObservableScope } from "../ObservableScope";
export interface RemoteUserMediaViewModel
extends BaseUserMediaViewModel, VolumeControls {
local: false;
/**
* Whether we are waiting for this user's LiveKit participant to exist. This
* could be because either we or the remote party are still connecting.
*/
waitingForMedia$: Behavior<boolean>;
}
export interface RemoteUserMediaInputs extends Omit<
BaseUserMediaInputs,
"statsType"
> {
participant$: Behavior<RemoteParticipant | null>;
pretendToBeDisconnected$: Behavior<boolean>;
}
export function createRemoteUserMedia(
scope: ObservableScope,
{ pretendToBeDisconnected$, ...inputs }: RemoteUserMediaInputs,
): RemoteUserMediaViewModel {
const baseUserMedia = createBaseUserMedia(scope, {
...inputs,
statsType: "inbound-rtp",
});
return {
...baseUserMedia,
...createVolumeControls(scope, {
pretendToBeDisconnected$,
sink$: scope.behavior(
inputs.participant$.pipe(map((p) => (volume) => p?.setVolume(volume))),
),
}),
local: false,
speaking$: scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : baseUserMedia.speaking$,
),
),
),
videoEnabled$: scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : baseUserMedia.videoEnabled$,
),
),
),
waitingForMedia$: scope.behavior(
combineLatest(
[inputs.livekitRoom$, inputs.participant$],
(livekitRoom, participant) =>
// If livekitRoom is undefined, the user is not attempting to publish on
// any transport and so we shouldn't expect a participant. (They might
// be a subscribe-only bot for example.)
livekitRoom !== undefined && participant === null,
),
),
};
}

View File

@@ -0,0 +1,51 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { Track } from "livekit-client";
import { type ObservableScope } from "../ObservableScope";
import { type LocalScreenShareViewModel } from "./LocalScreenShareViewModel";
import {
createMemberMedia,
type MemberMediaInputs,
type MemberMediaViewModel,
} from "./MemberMediaViewModel";
import { type RemoteScreenShareViewModel } from "./RemoteScreenShareViewModel";
/**
* A participant's screen share media.
*/
export type ScreenShareViewModel =
| LocalScreenShareViewModel
| RemoteScreenShareViewModel;
/**
* Properties which are common to all ScreenShareViewModels.
*/
export interface BaseScreenShareViewModel extends MemberMediaViewModel {
type: "screen share";
}
export type BaseScreenShareInputs = Omit<
MemberMediaInputs,
"audioSource" | "videoSource"
>;
export function createBaseScreenShare(
scope: ObservableScope,
inputs: BaseScreenShareInputs,
): BaseScreenShareViewModel {
return {
...createMemberMedia(scope, {
...inputs,
audioSource: Track.Source.ScreenShareAudio,
videoSource: Track.Source.ScreenShare,
}),
type: "screen share",
};
}

View File

@@ -0,0 +1,143 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
combineLatest,
map,
type Observable,
of,
Subject,
switchMap,
} from "rxjs";
import {
observeParticipantEvents,
observeParticipantMedia,
} from "@livekit/components-core";
import { ParticipantEvent, Track } from "livekit-client";
import { type ReactionOption } from "../../reactions";
import { type Behavior } from "../Behavior";
import { type LocalUserMediaViewModel } from "./LocalUserMediaViewModel";
import {
createMemberMedia,
type MemberMediaInputs,
type MemberMediaViewModel,
} from "./MemberMediaViewModel";
import { type RemoteUserMediaViewModel } from "./RemoteUserMediaViewModel";
import { type ObservableScope } from "../ObservableScope";
import { createToggle$ } from "../../utils/observable";
import { showConnectionStats } from "../../settings/settings";
import { observeRtpStreamStats$ } from "./observeRtpStreamStats";
/**
* A participant's user media (i.e. their microphone and camera feed).
*/
export type UserMediaViewModel =
| LocalUserMediaViewModel
| RemoteUserMediaViewModel;
export interface BaseUserMediaViewModel extends MemberMediaViewModel {
type: "user";
speaking$: Behavior<boolean>;
audioEnabled$: Behavior<boolean>;
videoEnabled$: Behavior<boolean>;
cropVideo$: Behavior<boolean>;
toggleCropVideo: () => void;
/**
* The expected identity of the LiveKit participant. Exposed for debugging.
*/
rtcBackendIdentity: string;
handRaised$: Behavior<Date | null>;
reaction$: Behavior<ReactionOption | null>;
audioStreamStats$: Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
videoStreamStats$: Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
}
export interface BaseUserMediaInputs extends Omit<
MemberMediaInputs,
"audioSource" | "videoSource"
> {
rtcBackendIdentity: string;
handRaised$: Behavior<Date | null>;
reaction$: Behavior<ReactionOption | null>;
statsType: "inbound-rtp" | "outbound-rtp";
}
export function createBaseUserMedia(
scope: ObservableScope,
{
rtcBackendIdentity,
handRaised$,
reaction$,
statsType,
...inputs
}: BaseUserMediaInputs,
): BaseUserMediaViewModel {
const { participant$ } = inputs;
const media$ = scope.behavior(
participant$.pipe(
switchMap((p) => (p && observeParticipantMedia(p)) ?? of(undefined)),
),
);
const toggleCropVideo$ = new Subject<void>();
return {
...createMemberMedia(scope, {
...inputs,
audioSource: Track.Source.Microphone,
videoSource: Track.Source.Camera,
}),
type: "user",
speaking$: scope.behavior(
participant$.pipe(
switchMap((p) =>
p
? observeParticipantEvents(
p,
ParticipantEvent.IsSpeakingChanged,
).pipe(map((p) => p.isSpeaking))
: of(false),
),
),
),
audioEnabled$: scope.behavior(
media$.pipe(map((m) => m?.microphoneTrack?.isMuted === false)),
),
videoEnabled$: scope.behavior(
media$.pipe(map((m) => m?.cameraTrack?.isMuted === false)),
),
cropVideo$: createToggle$(scope, true, toggleCropVideo$),
toggleCropVideo: () => toggleCropVideo$.next(),
rtcBackendIdentity,
handRaised$,
reaction$,
audioStreamStats$: combineLatest([
participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
//
if (!p || !showConnectionStats) return of(undefined);
return observeRtpStreamStats$(p, Track.Source.Microphone, statsType);
}),
),
videoStreamStats$: combineLatest([
participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
if (!p || !showConnectionStats) return of(undefined);
return observeRtpStreamStats$(p, Track.Source.Camera, statsType);
}),
),
};
}

View File

@@ -0,0 +1,69 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
LocalTrack,
type Participant,
RemoteTrack,
type Track,
} from "livekit-client";
import {
combineLatest,
interval,
type Observable,
startWith,
switchMap,
map,
} from "rxjs";
import { observeTrackReference$ } from "../observeTrackReference";
export function observeRtpStreamStats$(
participant: Participant,
source: Track.Source,
type: "inbound-rtp" | "outbound-rtp",
): Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
> {
return combineLatest([
observeTrackReference$(participant, source),
interval(1000).pipe(startWith(0)),
]).pipe(
switchMap(async ([trackReference]) => {
const track = trackReference?.publication?.track;
if (
!track ||
!(track instanceof RemoteTrack || track instanceof LocalTrack)
) {
return undefined;
}
const report = await track.getRTCStatsReport();
if (!report) {
return undefined;
}
for (const v of report.values()) {
if (v.type === type) {
return v;
}
}
return undefined;
}),
startWith(undefined),
);
}
export function observeInboundRtpStreamStats$(
participant: Participant,
source: Track.Source,
): Observable<RTCInboundRtpStreamStats | undefined> {
return observeRtpStreamStats$(participant, source, "inbound-rtp").pipe(
map((x) => x as RTCInboundRtpStreamStats | undefined),
);
}

View File

@@ -7,7 +7,7 @@ Please see LICENSE in the repository root for full details.
import { describe, test } from "vitest";
import { withTestScheduler } from "../utils/test";
import { withTestScheduler } from "../../utils/test";
import { observeSpeaker$ } from "./observeSpeaker";
const yesNo = {

View File

@@ -0,0 +1,28 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
observeParticipantMedia,
type TrackReference,
} from "@livekit/components-core";
import { type Participant, type Track } from "livekit-client";
import { distinctUntilChanged, map, type Observable } from "rxjs";
/**
* Reactively reads a participant's track reference for a given media source.
*/
export function observeTrackReference$(
participant: Participant,
source: Track.Source,
): Observable<TrackReference | undefined> {
return observeParticipantMedia(participant).pipe(
map(() => participant.getTrackPublication(source)),
distinctUntilChanged(),
map((publication) => publication && { participant, publication, source }),
);
}

View File

@@ -14,7 +14,7 @@ import { type MatrixRTCSession } from "matrix-js-sdk/lib/matrixrtc";
import { GridTile } from "./GridTile";
import {
mockRtcMembership,
createRemoteMedia,
mockRemoteMedia,
mockRemoteParticipant,
} from "../utils/test";
import { GridTileViewModel } from "../state/TileViewModel";
@@ -29,7 +29,7 @@ global.IntersectionObserver = class MockIntersectionObserver {
} as unknown as typeof IntersectionObserver;
test("GridTile is accessible", async () => {
const vm = createRemoteMedia(
const vm = mockRemoteMedia(
mockRtcMembership("@alice:example.org", "AAAA"),
{
rawDisplayName: "Alice",

View File

@@ -39,11 +39,6 @@ import {
import { useObservableEagerState } from "observable-hooks";
import styles from "./GridTile.module.css";
import {
type UserMediaViewModel,
LocalUserMediaViewModel,
type RemoteUserMediaViewModel,
} from "../state/MediaViewModel";
import { Slider } from "../Slider";
import { MediaView } from "./MediaView";
import { useLatest } from "../useLatest";
@@ -51,6 +46,9 @@ import { type GridTileViewModel } from "../state/TileViewModel";
import { useMergedRefs } from "../useMergedRefs";
import { useReactionsSender } from "../reactions/useReactionsSender";
import { useBehavior } from "../useBehavior";
import { type LocalUserMediaViewModel } from "../state/media/LocalUserMediaViewModel";
import { type RemoteUserMediaViewModel } from "../state/media/RemoteUserMediaViewModel";
import { type UserMediaViewModel } from "../state/media/UserMediaViewModel";
interface TileProps {
ref?: Ref<HTMLDivElement>;
@@ -68,7 +66,7 @@ interface TileProps {
interface UserMediaTileProps extends TileProps {
vm: UserMediaViewModel;
mirror: boolean;
locallyMuted: boolean;
playbackMuted: boolean;
waitingForMedia?: boolean;
primaryButton?: ReactNode;
menuStart?: ReactNode;
@@ -79,7 +77,7 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
ref,
vm,
showSpeakingIndicators,
locallyMuted,
playbackMuted,
waitingForMedia,
primaryButton,
menuStart,
@@ -109,7 +107,7 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
const onSelectFitContain = useCallback(
(e: Event) => {
e.preventDefault();
vm.toggleFitContain();
vm.toggleCropVideo();
},
[vm],
);
@@ -117,12 +115,12 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
const handRaised = useBehavior(vm.handRaised$);
const reaction = useBehavior(vm.reaction$);
const AudioIcon = locallyMuted
const AudioIcon = playbackMuted
? VolumeOffSolidIcon
: audioEnabled
? MicOnSolidIcon
: MicOffSolidIcon;
const audioIconLabel = locallyMuted
const audioIconLabel = playbackMuted
? t("video_tile.muted_for_me")
: audioEnabled
? t("microphone_on")
@@ -166,7 +164,7 @@ const UserMediaTile: FC<UserMediaTileProps> = ({
width={20}
height={20}
aria-label={audioIconLabel}
data-muted={locallyMuted || !audioEnabled}
data-muted={playbackMuted || !audioEnabled}
className={styles.muteIcon}
/>
}
@@ -245,7 +243,7 @@ const LocalUserMediaTile: FC<LocalUserMediaTileProps> = ({
<UserMediaTile
ref={ref}
vm={vm}
locallyMuted={false}
playbackMuted={false}
mirror={mirror}
primaryButton={
switchCamera === null ? undefined : (
@@ -295,36 +293,31 @@ const RemoteUserMediaTile: FC<RemoteUserMediaTileProps> = ({
}) => {
const { t } = useTranslation();
const waitingForMedia = useBehavior(vm.waitingForMedia$);
const locallyMuted = useBehavior(vm.locallyMuted$);
const localVolume = useBehavior(vm.localVolume$);
const playbackMuted = useBehavior(vm.playbackMuted$);
const playbackVolume = useBehavior(vm.playbackVolume$);
const onSelectMute = useCallback(
(e: Event) => {
e.preventDefault();
vm.toggleLocallyMuted();
vm.togglePlaybackMuted();
},
[vm],
);
const onChangeLocalVolume = useCallback(
(v: number) => vm.setLocalVolume(v),
[vm],
);
const onCommitLocalVolume = useCallback(() => vm.commitLocalVolume(), [vm]);
const VolumeIcon = locallyMuted ? VolumeOffIcon : VolumeOnIcon;
const VolumeIcon = playbackMuted ? VolumeOffIcon : VolumeOnIcon;
return (
<UserMediaTile
ref={ref}
vm={vm}
waitingForMedia={waitingForMedia}
locallyMuted={locallyMuted}
playbackMuted={playbackMuted}
mirror={false}
menuStart={
<>
<ToggleMenuItem
Icon={MicOffIcon}
label={t("video_tile.mute_for_me")}
checked={locallyMuted}
checked={playbackMuted}
onSelect={onSelectMute}
/>
{/* TODO: Figure out how to make this slider keyboard accessible */}
@@ -332,9 +325,9 @@ const RemoteUserMediaTile: FC<RemoteUserMediaTileProps> = ({
<Slider
className={styles.volumeSlider}
label={t("video_tile.volume")}
value={localVolume}
onValueChange={onChangeLocalVolume}
onValueCommit={onCommitLocalVolume}
value={playbackVolume}
onValueChange={vm.adjustPlaybackVolume}
onValueCommit={vm.commitPlaybackVolume}
min={0}
max={1}
step={0.01}
@@ -374,7 +367,7 @@ export const GridTile: FC<GridTileProps> = ({
const displayName = useBehavior(media.displayName$);
const mxcAvatarUrl = useBehavior(media.mxcAvatarUrl$);
if (media instanceof LocalUserMediaViewModel) {
if (media.local) {
return (
<LocalUserMediaTile
ref={ref}

View File

@@ -18,7 +18,7 @@ import { TrackInfo } from "@livekit/protocol";
import { type ComponentProps } from "react";
import { MediaView } from "./MediaView";
import { EncryptionStatus } from "../state/MediaViewModel";
import { EncryptionStatus } from "../state/media/MemberMediaViewModel";
import { mockLocalParticipant } from "../utils/test";
describe("MediaView", () => {

View File

@@ -16,10 +16,10 @@ import { ErrorSolidIcon } from "@vector-im/compound-design-tokens/assets/web/ico
import styles from "./MediaView.module.css";
import { Avatar } from "../Avatar";
import { type EncryptionStatus } from "../state/MediaViewModel";
import { type EncryptionStatus } from "../state/media/MemberMediaViewModel";
import { RaisedHandIndicator } from "../reactions/RaisedHandIndicator";
import {
showConnectionStats,
showConnectionStats as showConnectionStatsSetting,
showHandRaisedTimer,
useSetting,
} from "../settings/settings";
@@ -85,7 +85,7 @@ export const MediaView: FC<Props> = ({
}) => {
const { t } = useTranslation();
const [handRaiseTimerVisible] = useSetting(showHandRaisedTimer);
const [showConnectioStats] = useSetting(showConnectionStats);
const [showConnectionStats] = useSetting(showConnectionStatsSetting);
const avatarSize = Math.round(Math.min(targetWidth, targetHeight) / 2);
@@ -139,10 +139,10 @@ export const MediaView: FC<Props> = ({
{waitingForMedia && (
<div className={styles.status}>
{t("video_tile.waiting_for_media")}
{showConnectioStats ? " " + rtcBackendIdentity : ""}
{showConnectionStats ? " " + rtcBackendIdentity : ""}
</div>
)}
{(audioStreamStats || videoStreamStats) && (
{showConnectionStats && (
<>
<RTCConnectionStats
audio={audioStreamStats}

View File

@@ -15,8 +15,8 @@ import {
mockLocalParticipant,
mockMediaDevices,
mockRtcMembership,
createLocalMedia,
createRemoteMedia,
mockLocalMedia,
mockRemoteMedia,
mockRemoteParticipant,
} from "../utils/test";
import { SpotlightTileViewModel } from "../state/TileViewModel";
@@ -28,7 +28,7 @@ global.IntersectionObserver = class MockIntersectionObserver {
} as unknown as typeof IntersectionObserver;
test("SpotlightTile is accessible", async () => {
const vm1 = createRemoteMedia(
const vm1 = mockRemoteMedia(
mockRtcMembership("@alice:example.org", "AAAA"),
{
rawDisplayName: "Alice",
@@ -37,7 +37,7 @@ test("SpotlightTile is accessible", async () => {
mockRemoteParticipant({}),
);
const vm2 = createLocalMedia(
const vm2 = mockLocalMedia(
mockRtcMembership("@bob:example.org", "BBBB"),
{
rawDisplayName: "Bob",

View File

@@ -32,20 +32,19 @@ import FullScreenMaximiseIcon from "../icons/FullScreenMaximise.svg?react";
import FullScreenMinimiseIcon from "../icons/FullScreenMinimise.svg?react";
import { MediaView } from "./MediaView";
import styles from "./SpotlightTile.module.css";
import {
type EncryptionStatus,
LocalUserMediaViewModel,
type MediaViewModel,
ScreenShareViewModel,
type UserMediaViewModel,
type RemoteUserMediaViewModel,
} from "../state/MediaViewModel";
import { useInitial } from "../useInitial";
import { useMergedRefs } from "../useMergedRefs";
import { useReactiveState } from "../useReactiveState";
import { useLatest } from "../useLatest";
import { type SpotlightTileViewModel } from "../state/TileViewModel";
import { useBehavior } from "../useBehavior";
import { type EncryptionStatus } from "../state/media/MemberMediaViewModel";
import { type LocalUserMediaViewModel } from "../state/media/LocalUserMediaViewModel";
import { type RemoteUserMediaViewModel } from "../state/media/RemoteUserMediaViewModel";
import { type UserMediaViewModel } from "../state/media/UserMediaViewModel";
import { type ScreenShareViewModel } from "../state/media/ScreenShareViewModel";
import { type RemoteScreenShareViewModel } from "../state/media/RemoteScreenShareViewModel";
import { type MediaViewModel } from "../state/media/MediaViewModel";
interface SpotlightItemBaseProps {
ref?: Ref<HTMLDivElement>;
@@ -54,7 +53,6 @@ interface SpotlightItemBaseProps {
targetWidth: number;
targetHeight: number;
video: TrackReferenceOrPlaceholder | undefined;
videoEnabled: boolean;
userId: string;
unencryptedWarning: boolean;
encryptionStatus: EncryptionStatus;
@@ -67,6 +65,7 @@ interface SpotlightItemBaseProps {
interface SpotlightUserMediaItemBaseProps extends SpotlightItemBaseProps {
videoFit: "contain" | "cover";
videoEnabled: boolean;
}
interface SpotlightLocalUserMediaItemProps extends SpotlightUserMediaItemBaseProps {
@@ -106,14 +105,16 @@ const SpotlightUserMediaItem: FC<SpotlightUserMediaItemProps> = ({
...props
}) => {
const cropVideo = useBehavior(vm.cropVideo$);
const videoEnabled = useBehavior(vm.videoEnabled$);
const baseProps: SpotlightUserMediaItemBaseProps &
RefAttributes<HTMLDivElement> = {
videoFit: cropVideo ? "cover" : "contain",
videoEnabled,
...props,
};
return vm instanceof LocalUserMediaViewModel ? (
return vm.local ? (
<SpotlightLocalUserMediaItem vm={vm} {...baseProps} />
) : (
<SpotlightRemoteUserMediaItem vm={vm} {...baseProps} />
@@ -122,6 +123,31 @@ const SpotlightUserMediaItem: FC<SpotlightUserMediaItemProps> = ({
SpotlightUserMediaItem.displayName = "SpotlightUserMediaItem";
interface SpotlightScreenShareItemProps extends SpotlightItemBaseProps {
vm: ScreenShareViewModel;
videoEnabled: boolean;
}
const SpotlightScreenShareItem: FC<SpotlightScreenShareItemProps> = ({
vm,
...props
}) => {
return <MediaView videoFit="contain" mirror={false} {...props} />;
};
interface SpotlightRemoteScreenShareItemProps extends SpotlightItemBaseProps {
vm: RemoteScreenShareViewModel;
}
const SpotlightRemoteScreenShareItem: FC<
SpotlightRemoteScreenShareItemProps
> = ({ vm, ...props }) => {
const videoEnabled = useBehavior(vm.videoEnabled$);
return (
<SpotlightScreenShareItem vm={vm} videoEnabled={videoEnabled} {...props} />
);
};
interface SpotlightItemProps {
ref?: Ref<HTMLDivElement>;
vm: MediaViewModel;
@@ -152,7 +178,6 @@ const SpotlightItem: FC<SpotlightItemProps> = ({
const displayName = useBehavior(vm.displayName$);
const mxcAvatarUrl = useBehavior(vm.mxcAvatarUrl$);
const video = useBehavior(vm.video$);
const videoEnabled = useBehavior(vm.videoEnabled$);
const unencryptedWarning = useBehavior(vm.unencryptedWarning$);
const encryptionStatus = useBehavior(vm.encryptionStatus$);
@@ -178,7 +203,6 @@ const SpotlightItem: FC<SpotlightItemProps> = ({
targetWidth,
targetHeight,
video: video ?? undefined,
videoEnabled,
userId: vm.userId,
unencryptedWarning,
focusUrl,
@@ -189,10 +213,12 @@ const SpotlightItem: FC<SpotlightItemProps> = ({
"aria-hidden": ariaHidden,
};
return vm instanceof ScreenShareViewModel ? (
<MediaView videoFit="contain" mirror={false} {...baseProps} />
if (vm.type === "user")
return <SpotlightUserMediaItem vm={vm} {...baseProps} />;
return vm.local ? (
<SpotlightScreenShareItem vm={vm} videoEnabled {...baseProps} />
) : (
<SpotlightUserMediaItem vm={vm} {...baseProps} />
<SpotlightRemoteScreenShareItem vm={vm} {...baseProps} />
);
};

View File

@@ -61,6 +61,20 @@ export function accumulate<State, Event>(
events$.pipe(scan(update, initial), startWith(initial));
}
/**
* Given a source of toggle events, creates a Behavior whose value toggles
* between `true` and `false`.
*/
export function createToggle$(
scope: ObservableScope,
initialValue: boolean,
toggle$: Observable<void>,
): Behavior<boolean> {
return scope.behavior(
toggle$.pipe(accumulate(initialValue, (state) => !state)),
);
}
const switchSymbol = Symbol("switch");
/**

View File

@@ -52,10 +52,6 @@ import {
} from "matrix-js-sdk/lib/matrixrtc/IKeyTransport";
import { type CallMembershipIdentityParts } from "matrix-js-sdk/lib/matrixrtc/EncryptionManager";
import {
LocalUserMediaViewModel,
RemoteUserMediaViewModel,
} from "../state/MediaViewModel";
import { E2eeType } from "../e2ee/e2eeType";
import {
DEFAULT_CONFIG,
@@ -66,6 +62,14 @@ import { type MediaDevices } from "../state/MediaDevices";
import { type Behavior, constant } from "../state/Behavior";
import { ObservableScope } from "../state/ObservableScope";
import { MuteStates } from "../state/MuteStates";
import {
createLocalUserMedia,
type LocalUserMediaViewModel,
} from "../state/media/LocalUserMediaViewModel";
import {
createRemoteUserMedia,
type RemoteUserMediaViewModel,
} from "../state/media/RemoteUserMediaViewModel";
export function withFakeTimers(continuation: () => void): void {
vi.useFakeTimers();
@@ -323,30 +327,27 @@ export function mockLocalParticipant(
} as Partial<LocalParticipant> as LocalParticipant;
}
export function createLocalMedia(
export function mockLocalMedia(
rtcMember: CallMembership,
roomMember: Partial<RoomMember>,
localParticipant: LocalParticipant,
mediaDevices: MediaDevices,
): LocalUserMediaViewModel {
const member = mockMatrixRoomMember(rtcMember, roomMember);
return new LocalUserMediaViewModel(
testScope(),
"local",
member.userId,
rtcMember.rtcBackendIdentity,
constant(localParticipant),
{
kind: E2eeType.PER_PARTICIPANT,
},
constant(mockLivekitRoom({ localParticipant })),
constant("https://rtc-example.org"),
return createLocalUserMedia(testScope(), {
id: "local",
userId: member.userId,
rtcBackendIdentity: rtcMember.rtcBackendIdentity,
participant$: constant(localParticipant),
encryptionSystem: { kind: E2eeType.PER_PARTICIPANT },
livekitRoom$: constant(mockLivekitRoom({ localParticipant })),
focusUrl$: constant("https://rtc-example.org"),
mediaDevices,
constant(member.rawDisplayName ?? "nodisplayname"),
constant(member.getMxcAvatarUrl()),
constant(null),
constant(null),
);
displayName$: constant(member.rawDisplayName ?? "nodisplayname"),
mxcAvatarUrl$: constant(member.getMxcAvatarUrl()),
handRaised$: constant(null),
reaction$: constant(null),
});
}
export function mockRemoteParticipant(
@@ -364,7 +365,7 @@ export function mockRemoteParticipant(
} as RemoteParticipant;
}
export function createRemoteMedia(
export function mockRemoteMedia(
rtcMember: CallMembership,
roomMember: Partial<RoomMember>,
participant: RemoteParticipant | null,
@@ -376,23 +377,20 @@ export function createRemoteMedia(
),
): RemoteUserMediaViewModel {
const member = mockMatrixRoomMember(rtcMember, roomMember);
return new RemoteUserMediaViewModel(
testScope(),
"remote",
member.userId,
rtcMember.rtcBackendIdentity,
constant(participant),
{
kind: E2eeType.PER_PARTICIPANT,
},
constant(livekitRoom),
constant("https://rtc-example.org"),
constant(false),
constant(member.rawDisplayName ?? "nodisplayname"),
constant(member.getMxcAvatarUrl()),
constant(null),
constant(null),
);
return createRemoteUserMedia(testScope(), {
id: "remote",
userId: member.userId,
rtcBackendIdentity: rtcMember.rtcBackendIdentity,
participant$: constant(participant),
encryptionSystem: { kind: E2eeType.PER_PARTICIPANT },
livekitRoom$: constant(livekitRoom),
focusUrl$: constant("https://rtc-example.org"),
pretendToBeDisconnected$: constant(false),
displayName$: constant(member.rawDisplayName ?? "nodisplayname"),
mxcAvatarUrl$: constant(member.getMxcAvatarUrl()),
handRaised$: constant(null),
reaction$: constant(null),
});
}
export function mockConfig(