Split MediaViewModel into multiple files

This commit is contained in:
Robin
2026-02-25 22:34:07 +01:00
parent 6995388a29
commit 6b51b7dc58
25 changed files with 957 additions and 773 deletions

View File

@@ -50,11 +50,6 @@ import { type CallMembershipIdentityParts } from "matrix-js-sdk/lib/matrixrtc/En
import { v4 as uuidv4 } from "uuid";
import { type IMembershipManager } from "matrix-js-sdk/lib/matrixrtc/IMembershipManager";
import {
type MediaViewModel,
type ScreenShareViewModel,
type UserMediaViewModel,
} from "../MediaViewModel";
import {
createToggle$,
filterBehavior,
@@ -142,9 +137,12 @@ import { type Connection } from "./remoteMembers/Connection.ts";
import { createLayoutModeSwitch } from "./LayoutSwitch.ts";
import {
createWrappedUserMedia,
type WrappedUserMediaViewModel,
type MediaItem,
} from "../MediaItem.ts";
type WrappedUserMediaViewModel,
} from "../media/MediaItem.ts";
import { type ScreenShareViewModel } from "../media/ScreenShareViewModel.ts";
import { type UserMediaViewModel } from "../media/UserMediaViewModel.ts";
import { type MediaViewModel } from "../media/MediaViewModel.ts";
const logger = rootLogger.getChild("[CallViewModel]");
//TODO

View File

@@ -30,7 +30,7 @@ import {
trackProcessorSync,
} from "../../../livekit/TrackProcessorContext.tsx";
import { getUrlParams } from "../../../UrlParams.ts";
import { observeTrackReference$ } from "../../MediaViewModel.ts";
import { observeTrackReference$ } from "../../observeTrackReference";
import { type Connection } from "../remoteMembers/Connection.ts";
import { ObservableScope } from "../../ObservableScope.ts";

View File

@@ -1,713 +0,0 @@
/*
Copyright 2023, 2024 New Vector Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
type AudioSource,
type VideoSource,
type TrackReference,
observeParticipantEvents,
observeParticipantMedia,
roomEventSelector,
} from "@livekit/components-core";
import {
type LocalParticipant,
LocalTrack,
LocalVideoTrack,
type Participant,
ParticipantEvent,
type RemoteParticipant,
Track,
TrackEvent,
facingModeFromLocalTrack,
type Room as LivekitRoom,
RoomEvent as LivekitRoomEvent,
RemoteTrack,
} from "livekit-client";
import { logger } from "matrix-js-sdk/lib/logger";
import {
type Observable,
Subject,
combineLatest,
filter,
fromEvent,
interval,
map,
merge,
of,
startWith,
switchMap,
throttleTime,
distinctUntilChanged,
} from "rxjs";
import { alwaysShowSelf } from "../settings/settings";
import { showConnectionStats } from "../settings/settings";
import { createToggle$ } from "../utils/observable";
import { type EncryptionSystem } from "../e2ee/sharedKeyManagement";
import { E2eeType } from "../e2ee/e2eeType";
import { type ReactionOption } from "../reactions";
import { platform } from "../Platform";
import { type MediaDevices } from "./MediaDevices";
import { type Behavior } from "./Behavior";
import { type ObservableScope } from "./ObservableScope";
import { createVolumeControls, type VolumeControls } from "./VolumeControls";
export function observeTrackReference$(
participant: Participant,
source: Track.Source,
): Observable<TrackReference | undefined> {
return observeParticipantMedia(participant).pipe(
map(() => participant.getTrackPublication(source)),
distinctUntilChanged(),
map((publication) => publication && { participant, publication, source }),
);
}
export function observeRtpStreamStats$(
participant: Participant,
source: Track.Source,
type: "inbound-rtp" | "outbound-rtp",
): Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
> {
return combineLatest([
observeTrackReference$(participant, source),
interval(1000).pipe(startWith(0)),
]).pipe(
switchMap(async ([trackReference]) => {
const track = trackReference?.publication?.track;
if (
!track ||
!(track instanceof RemoteTrack || track instanceof LocalTrack)
) {
return undefined;
}
const report = await track.getRTCStatsReport();
if (!report) {
return undefined;
}
for (const v of report.values()) {
if (v.type === type) {
return v;
}
}
return undefined;
}),
startWith(undefined),
);
}
function observeInboundRtpStreamStats$(
participant: Participant,
source: Track.Source,
): Observable<RTCInboundRtpStreamStats | undefined> {
return observeRtpStreamStats$(participant, source, "inbound-rtp").pipe(
map((x) => x as RTCInboundRtpStreamStats | undefined),
);
}
function observeRemoteTrackReceivingOkay$(
participant: Participant,
source: Track.Source,
): Observable<boolean | undefined> {
let lastStats: {
framesDecoded: number | undefined;
framesDropped: number | undefined;
framesReceived: number | undefined;
} = {
framesDecoded: undefined,
framesDropped: undefined,
framesReceived: undefined,
};
return observeInboundRtpStreamStats$(participant, source).pipe(
map((stats) => {
if (!stats) return undefined;
const { framesDecoded, framesDropped, framesReceived } = stats;
return {
framesDecoded,
framesDropped,
framesReceived,
};
}),
filter((newStats) => !!newStats),
map((newStats): boolean | undefined => {
const oldStats = lastStats;
lastStats = newStats;
if (
typeof newStats.framesReceived === "number" &&
typeof oldStats.framesReceived === "number" &&
typeof newStats.framesDecoded === "number" &&
typeof oldStats.framesDecoded === "number"
) {
const framesReceivedDelta =
newStats.framesReceived - oldStats.framesReceived;
const framesDecodedDelta =
newStats.framesDecoded - oldStats.framesDecoded;
// if we received >0 frames and managed to decode >0 frames then we treat that as success
if (framesReceivedDelta > 0) {
return framesDecodedDelta > 0;
}
}
// no change
return undefined;
}),
filter((x) => typeof x === "boolean"),
startWith(undefined),
);
}
function encryptionErrorObservable$(
room$: Behavior<LivekitRoom | undefined>,
participant: Participant,
encryptionSystem: EncryptionSystem,
criteria: string,
): Observable<boolean> {
return room$.pipe(
switchMap((room) => {
if (room === undefined) return of(false);
return roomEventSelector(room, LivekitRoomEvent.EncryptionError).pipe(
map((e) => {
const [err] = e;
if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return (
// Ideally we would pull the participant identity from the field on the error.
// However, it gets lost in the serialization process between workers.
// So, instead we do a string match
(err?.message.includes(participant.identity) &&
err?.message.includes(criteria)) ??
false
);
} else if (encryptionSystem.kind === E2eeType.SHARED_KEY) {
return !!err?.message.includes(criteria);
}
return false;
}),
);
}),
distinctUntilChanged(),
throttleTime(1000), // Throttle to avoid spamming the UI
startWith(false),
);
}
export enum EncryptionStatus {
Connecting,
Okay,
KeyMissing,
KeyInvalid,
PasswordInvalid,
}
interface BaseMediaViewModel {
/**
* An opaque identifier for this media.
*/
id: string;
/**
* The Matrix user to which this media belongs.
*/
userId: string;
displayName$: Behavior<string>;
mxcAvatarUrl$: Behavior<string | undefined>;
}
type BaseMediaInputs = BaseMediaViewModel;
// This function exists to strip out superfluous data from the input object
function createBaseMedia({
id,
userId,
displayName$,
mxcAvatarUrl$,
}: BaseMediaInputs): BaseMediaViewModel {
return { id, userId, displayName$, mxcAvatarUrl$ };
}
interface MemberMediaViewModel extends BaseMediaViewModel {
/**
* The LiveKit video track for this media.
*/
video$: Behavior<TrackReference | undefined>;
/**
* The URL of the LiveKit focus on which this member should be publishing.
* Exposed for debugging.
*/
focusUrl$: Behavior<string | undefined>;
/**
* Whether there should be a warning that this media is unencrypted.
*/
unencryptedWarning$: Behavior<boolean>;
encryptionStatus$: Behavior<EncryptionStatus>;
}
interface MemberMediaInputs extends BaseMediaViewModel {
participant$: Behavior<LocalParticipant | RemoteParticipant | null>;
livekitRoom$: Behavior<LivekitRoom | undefined>;
audioSource: AudioSource;
videoSource: VideoSource;
focusUrl$: Behavior<string | undefined>;
encryptionSystem: EncryptionSystem;
}
function createMemberMedia(
scope: ObservableScope,
{
participant$,
livekitRoom$,
audioSource,
videoSource,
focusUrl$,
encryptionSystem,
...inputs
}: MemberMediaInputs,
): MemberMediaViewModel {
const trackBehavior$ = (
source: Track.Source,
): Behavior<TrackReference | undefined> =>
scope.behavior(
participant$.pipe(
switchMap((p) =>
!p ? of(undefined) : observeTrackReference$(p, source),
),
),
);
const audio$ = trackBehavior$(audioSource);
const video$ = trackBehavior$(videoSource);
return {
...createBaseMedia(inputs),
video$,
focusUrl$,
unencryptedWarning$: scope.behavior(
combineLatest(
[audio$, video$],
(a, v) =>
encryptionSystem.kind !== E2eeType.NONE &&
(a?.publication.isEncrypted === false ||
v?.publication.isEncrypted === false),
),
),
encryptionStatus$: scope.behavior(
participant$.pipe(
switchMap((participant): Observable<EncryptionStatus> => {
if (!participant) {
return of(EncryptionStatus.Connecting);
} else if (
participant.isLocal ||
encryptionSystem.kind === E2eeType.NONE
) {
return of(EncryptionStatus.Okay);
} else if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"MissingKey",
),
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(([keyMissing, keyInvalid, audioOkay, videoOkay]) => {
if (keyMissing) return EncryptionStatus.KeyMissing;
if (keyInvalid) return EncryptionStatus.KeyInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
}),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
} else {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(
([keyInvalid, audioOkay, videoOkay]):
| EncryptionStatus
| undefined => {
if (keyInvalid) return EncryptionStatus.PasswordInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
},
),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
}
}),
),
),
};
}
interface BaseUserMediaViewModel extends MemberMediaViewModel {
type: "user";
speaking$: Behavior<boolean>;
audioEnabled$: Behavior<boolean>;
videoEnabled$: Behavior<boolean>;
cropVideo$: Behavior<boolean>;
toggleCropVideo: () => void;
/**
* The expected identity of the LiveKit participant. Exposed for debugging.
*/
rtcBackendIdentity: string;
handRaised$: Behavior<Date | null>;
reaction$: Behavior<ReactionOption | null>;
audioStreamStats$: Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
videoStreamStats$: Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
}
interface BaseUserMediaInputs extends Omit<
MemberMediaInputs,
"audioSource" | "videoSource"
> {
rtcBackendIdentity: string;
handRaised$: Behavior<Date | null>;
reaction$: Behavior<ReactionOption | null>;
statsType: "inbound-rtp" | "outbound-rtp";
}
function createBaseUserMedia(
scope: ObservableScope,
{
rtcBackendIdentity,
handRaised$,
reaction$,
statsType,
...inputs
}: BaseUserMediaInputs,
): BaseUserMediaViewModel {
const { participant$ } = inputs;
const media$ = scope.behavior(
participant$.pipe(
switchMap((p) => (p && observeParticipantMedia(p)) ?? of(undefined)),
),
);
const toggleCropVideo$ = new Subject<void>();
return {
...createMemberMedia(scope, {
...inputs,
audioSource: Track.Source.Microphone,
videoSource: Track.Source.Camera,
}),
type: "user",
speaking$: scope.behavior(
participant$.pipe(
switchMap((p) =>
p
? observeParticipantEvents(
p,
ParticipantEvent.IsSpeakingChanged,
).pipe(map((p) => p.isSpeaking))
: of(false),
),
),
),
audioEnabled$: scope.behavior(
media$.pipe(map((m) => m?.microphoneTrack?.isMuted === false)),
),
videoEnabled$: scope.behavior(
media$.pipe(map((m) => m?.cameraTrack?.isMuted === false)),
),
cropVideo$: createToggle$(scope, true, toggleCropVideo$),
toggleCropVideo: () => toggleCropVideo$.next(),
rtcBackendIdentity,
handRaised$,
reaction$,
audioStreamStats$: combineLatest([
participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
//
if (!p || !showConnectionStats) return of(undefined);
return observeRtpStreamStats$(p, Track.Source.Microphone, statsType);
}),
),
videoStreamStats$: combineLatest([
participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
if (!p || !showConnectionStats) return of(undefined);
return observeRtpStreamStats$(p, Track.Source.Camera, statsType);
}),
),
};
}
export interface LocalUserMediaViewModel extends BaseUserMediaViewModel {
local: true;
/**
* Whether the video should be mirrored.
*/
mirror$: Behavior<boolean>;
/**
* Whether to show this tile in a highly visible location near the start of
* the grid.
*/
alwaysShow$: Behavior<boolean>;
setAlwaysShow: (value: boolean) => void;
switchCamera$: Behavior<(() => void) | null>;
}
export interface LocalUserMediaInputs extends Omit<
BaseUserMediaInputs,
"statsType"
> {
participant$: Behavior<LocalParticipant | null>;
mediaDevices: MediaDevices;
}
export function createLocalUserMedia(
scope: ObservableScope,
{ mediaDevices, ...inputs }: LocalUserMediaInputs,
): LocalUserMediaViewModel {
const baseUserMedia = createBaseUserMedia(scope, {
...inputs,
statsType: "outbound-rtp",
});
/**
* The local video track as an observable that emits whenever the track
* changes, the camera is switched, or the track is muted.
*/
const videoTrack$: Observable<LocalVideoTrack | null> =
baseUserMedia.video$.pipe(
switchMap((v) => {
const track = v?.publication.track;
if (!(track instanceof LocalVideoTrack)) return of(null);
return merge(
// Watch for track restarts because they indicate a camera switch.
// This event is also emitted when unmuting the track object.
fromEvent(track, TrackEvent.Restarted).pipe(
startWith(null),
map(() => track),
),
// When the track object is muted, reset it to null.
fromEvent(track, TrackEvent.Muted).pipe(map(() => null)),
);
}),
);
return {
...baseUserMedia,
local: true,
mirror$: scope.behavior(
videoTrack$.pipe(
// Mirror only front-facing cameras (those that face the user)
map(
(track) =>
track !== null &&
facingModeFromLocalTrack(track).facingMode === "user",
),
),
),
alwaysShow$: alwaysShowSelf.value$,
setAlwaysShow: alwaysShowSelf.setValue,
switchCamera$: scope.behavior(
platform === "desktop"
? of(null)
: videoTrack$.pipe(
map((track) => {
if (track === null) return null;
const facingMode = facingModeFromLocalTrack(track).facingMode;
// If the camera isn't front or back-facing, don't provide a switch
// camera shortcut at all
if (facingMode !== "user" && facingMode !== "environment")
return null;
// Restart the track with a camera facing the opposite direction
return (): void =>
void track
.restartTrack({
facingMode: facingMode === "user" ? "environment" : "user",
})
.then(() => {
// Inform the MediaDevices which camera was chosen
const deviceId =
track.mediaStreamTrack.getSettings().deviceId;
if (deviceId !== undefined)
mediaDevices.videoInput.select(deviceId);
})
.catch((e) =>
logger.error("Failed to switch camera", facingMode, e),
);
}),
),
),
};
}
export interface RemoteUserMediaViewModel
extends BaseUserMediaViewModel, VolumeControls {
local: false;
/**
* Whether we are waiting for this user's LiveKit participant to exist. This
* could be because either we or the remote party are still connecting.
*/
waitingForMedia$: Behavior<boolean>;
}
export interface RemoteUserMediaInputs extends Omit<
BaseUserMediaInputs,
"statsType"
> {
participant$: Behavior<RemoteParticipant | null>;
pretendToBeDisconnected$: Behavior<boolean>;
}
export function createRemoteUserMedia(
scope: ObservableScope,
{ pretendToBeDisconnected$, ...inputs }: RemoteUserMediaInputs,
): RemoteUserMediaViewModel {
const baseUserMedia = createBaseUserMedia(scope, {
...inputs,
statsType: "inbound-rtp",
});
return {
...baseUserMedia,
...createVolumeControls(scope, {
pretendToBeDisconnected$,
sink$: scope.behavior(
inputs.participant$.pipe(map((p) => (volume) => p?.setVolume(volume))),
),
}),
local: false,
speaking$: scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : baseUserMedia.speaking$,
),
),
),
videoEnabled$: scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : baseUserMedia.videoEnabled$,
),
),
),
waitingForMedia$: scope.behavior(
combineLatest(
[inputs.livekitRoom$, inputs.participant$],
(livekitRoom, participant) =>
// If livekitRoom is undefined, the user is not attempting to publish on
// any transport and so we shouldn't expect a participant. (They might
// be a subscribe-only bot for example.)
livekitRoom !== undefined && participant === null,
),
),
};
}
interface BaseScreenShareViewModel extends MemberMediaViewModel {
type: "screen share";
}
type BaseScreenShareInputs = Omit<
MemberMediaInputs,
"audioSource" | "videoSource"
>;
function createBaseScreenShare(
scope: ObservableScope,
inputs: BaseScreenShareInputs,
): BaseScreenShareViewModel {
return {
...createMemberMedia(scope, {
...inputs,
audioSource: Track.Source.ScreenShareAudio,
videoSource: Track.Source.ScreenShare,
}),
type: "screen share",
};
}
export interface LocalScreenShareViewModel extends BaseScreenShareViewModel {
local: true;
}
interface LocalScreenShareInputs extends BaseScreenShareInputs {
participant$: Behavior<LocalParticipant | null>;
}
export function createLocalScreenShare(
scope: ObservableScope,
inputs: LocalScreenShareInputs,
): LocalScreenShareViewModel {
return { ...createBaseScreenShare(scope, inputs), local: true };
}
export interface RemoteScreenShareViewModel extends BaseScreenShareViewModel {
local: false;
/**
* Whether this screen share's video should be displayed.
*/
videoEnabled$: Behavior<boolean>;
}
interface RemoteScreenShareInputs extends BaseScreenShareInputs {
participant$: Behavior<RemoteParticipant | null>;
pretendToBeDisconnected$: Behavior<boolean>;
}
export function createRemoteScreenShare(
scope: ObservableScope,
{ pretendToBeDisconnected$, ...inputs }: RemoteScreenShareInputs,
): RemoteScreenShareViewModel {
return {
...createBaseScreenShare(scope, inputs),
local: false,
videoEnabled$: scope.behavior(
pretendToBeDisconnected$.pipe(map((disconnected) => !disconnected)),
),
};
}
/**
* Some participant's media.
*/
export type MediaViewModel = UserMediaViewModel | ScreenShareViewModel;
/**
* Some participant's user media (i.e. their microphone and camera feed).
*/
export type UserMediaViewModel =
| LocalUserMediaViewModel
| RemoteUserMediaViewModel;
/**
* Some participant's screen share media.
*/
export type ScreenShareViewModel =
| LocalScreenShareViewModel
| RemoteScreenShareViewModel;

View File

@@ -8,10 +8,11 @@ Please see LICENSE in the repository root for full details.
import { BehaviorSubject } from "rxjs";
import { logger } from "matrix-js-sdk/lib/logger";
import { type MediaViewModel, type UserMediaViewModel } from "./MediaViewModel";
import { GridTileViewModel, SpotlightTileViewModel } from "./TileViewModel";
import { fillGaps } from "../utils/iter";
import { debugTileLayout } from "../settings/settings";
import { type MediaViewModel } from "./media/MediaViewModel";
import { type UserMediaViewModel } from "./media/UserMediaViewModel";
function debugEntries(entries: GridTileData[]): string[] {
return entries.map((e) => e.media.displayName$.value);

View File

@@ -5,8 +5,9 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type MediaViewModel, type UserMediaViewModel } from "./MediaViewModel";
import { type Behavior } from "./Behavior";
import { type MediaViewModel } from "./media/MediaViewModel";
import { type UserMediaViewModel } from "./media/UserMediaViewModel";
let nextId = 0;
function createId(): string {

View File

@@ -5,16 +5,14 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type LocalUserMediaViewModel } from "./media/LocalUserMediaViewModel.ts";
import { type MediaViewModel } from "./media/MediaViewModel.ts";
import { type RemoteUserMediaViewModel } from "./media/RemoteUserMediaViewModel.ts";
import { type UserMediaViewModel } from "./media/UserMediaViewModel.ts";
import {
type GridTileViewModel,
type SpotlightTileViewModel,
} from "./TileViewModel.ts";
import {
type LocalUserMediaViewModel,
type RemoteUserMediaViewModel,
type MediaViewModel,
type UserMediaViewModel,
} from "./MediaViewModel.ts";
export interface GridLayoutMedia {
type: "grid";

View File

@@ -0,0 +1,32 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type LocalParticipant } from "livekit-client";
import { type Behavior } from "../Behavior";
import {
type BaseScreenShareInputs,
type BaseScreenShareViewModel,
createBaseScreenShare,
} from "./ScreenShareViewModel";
import { type ObservableScope } from "../ObservableScope";
export interface LocalScreenShareViewModel extends BaseScreenShareViewModel {
local: true;
}
export interface LocalScreenShareInputs extends BaseScreenShareInputs {
participant$: Behavior<LocalParticipant | null>;
}
export function createLocalScreenShare(
scope: ObservableScope,
inputs: LocalScreenShareInputs,
): LocalScreenShareViewModel {
return { ...createBaseScreenShare(scope, inputs), local: true };
}

View File

@@ -0,0 +1,137 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
facingModeFromLocalTrack,
type LocalParticipant,
LocalVideoTrack,
TrackEvent,
} from "livekit-client";
import {
fromEvent,
map,
merge,
type Observable,
of,
startWith,
switchMap,
} from "rxjs";
import { logger } from "matrix-js-sdk/lib/logger";
import { type Behavior } from "../Behavior";
import {
type BaseUserMediaInputs,
type BaseUserMediaViewModel,
createBaseUserMedia,
} from "./UserMediaViewModel";
import { type ObservableScope } from "../ObservableScope";
import { alwaysShowSelf } from "../../settings/settings";
import { platform } from "../../Platform";
import { type MediaDevices } from "../MediaDevices";
export interface LocalUserMediaViewModel extends BaseUserMediaViewModel {
local: true;
/**
* Whether the video should be mirrored.
*/
mirror$: Behavior<boolean>;
/**
* Whether to show this tile in a highly visible location near the start of
* the grid.
*/
alwaysShow$: Behavior<boolean>;
setAlwaysShow: (value: boolean) => void;
switchCamera$: Behavior<(() => void) | null>;
}
export interface LocalUserMediaInputs extends Omit<
BaseUserMediaInputs,
"statsType"
> {
participant$: Behavior<LocalParticipant | null>;
mediaDevices: MediaDevices;
}
export function createLocalUserMedia(
scope: ObservableScope,
{ mediaDevices, ...inputs }: LocalUserMediaInputs,
): LocalUserMediaViewModel {
const baseUserMedia = createBaseUserMedia(scope, {
...inputs,
statsType: "outbound-rtp",
});
/**
* The local video track as an observable that emits whenever the track
* changes, the camera is switched, or the track is muted.
*/
const videoTrack$: Observable<LocalVideoTrack | null> =
baseUserMedia.video$.pipe(
switchMap((v) => {
const track = v?.publication.track;
if (!(track instanceof LocalVideoTrack)) return of(null);
return merge(
// Watch for track restarts because they indicate a camera switch.
// This event is also emitted when unmuting the track object.
fromEvent(track, TrackEvent.Restarted).pipe(
startWith(null),
map(() => track),
),
// When the track object is muted, reset it to null.
fromEvent(track, TrackEvent.Muted).pipe(map(() => null)),
);
}),
);
return {
...baseUserMedia,
local: true,
mirror$: scope.behavior(
videoTrack$.pipe(
// Mirror only front-facing cameras (those that face the user)
map(
(track) =>
track !== null &&
facingModeFromLocalTrack(track).facingMode === "user",
),
),
),
alwaysShow$: alwaysShowSelf.value$,
setAlwaysShow: alwaysShowSelf.setValue,
switchCamera$: scope.behavior(
platform === "desktop"
? of(null)
: videoTrack$.pipe(
map((track) => {
if (track === null) return null;
const facingMode = facingModeFromLocalTrack(track).facingMode;
// If the camera isn't front or back-facing, don't provide a switch
// camera shortcut at all
if (facingMode !== "user" && facingMode !== "environment")
return null;
// Restart the track with a camera facing the opposite direction
return (): void =>
void track
.restartTrack({
facingMode: facingMode === "user" ? "environment" : "user",
})
.then(() => {
// Inform the MediaDevices which camera was chosen
const deviceId =
track.mediaStreamTrack.getSettings().deviceId;
if (deviceId !== undefined)
mediaDevices.videoInput.select(deviceId);
})
.catch((e) =>
logger.error("Failed to switch camera", facingMode, e),
);
}),
),
),
};
}

View File

@@ -13,22 +13,24 @@ import {
} from "livekit-client";
import { observeParticipantEvents } from "@livekit/components-core";
import { type ObservableScope } from "./ObservableScope.ts";
import {
createLocalScreenShare,
createLocalUserMedia,
createRemoteScreenShare,
createRemoteUserMedia,
type ScreenShareViewModel,
type UserMediaViewModel,
type LocalUserMediaInputs,
type RemoteUserMediaInputs,
} from "./MediaViewModel.ts";
import type { Behavior } from "./Behavior.ts";
import type { MediaDevices } from "./MediaDevices.ts";
import { type ObservableScope } from "../ObservableScope.ts";
import type { Behavior } from "../Behavior.ts";
import type { MediaDevices } from "../MediaDevices.ts";
import { observeSpeaker$ } from "./observeSpeaker.ts";
import { generateItems } from "../utils/observable.ts";
import { type TaggedParticipant } from "./CallViewModel/remoteMembers/MatrixLivekitMembers.ts";
import { generateItems } from "../../utils/observable.ts";
import { type TaggedParticipant } from "../CallViewModel/remoteMembers/MatrixLivekitMembers.ts";
import { type UserMediaViewModel } from "./UserMediaViewModel.ts";
import { type ScreenShareViewModel } from "./ScreenShareViewModel.ts";
import {
createLocalUserMedia,
type LocalUserMediaInputs,
} from "./LocalUserMediaViewModel.ts";
import {
createRemoteUserMedia,
type RemoteUserMediaInputs,
} from "./RemoteUserMediaViewModel.ts";
import { createLocalScreenShare } from "./LocalScreenShareViewModel.ts";
import { createRemoteScreenShare } from "./RemoteScreenShareViewModel.ts";
/**
* Sorting bins defining the order in which media tiles appear in the layout.

View File

@@ -21,9 +21,8 @@ import {
mockRemoteMedia,
withTestScheduler,
mockRemoteParticipant,
} from "../utils/test";
import { getValue } from "../utils/observable";
import { constant } from "./Behavior";
} from "../../utils/test";
import { constant } from "../Behavior";
global.MediaStreamTrack = class {} as unknown as {
new (): MediaStreamTrack;
@@ -35,7 +34,7 @@ global.MediaStream = class {} as unknown as {
};
const platformMock = vi.hoisted(() => vi.fn(() => "desktop"));
vi.mock("../Platform", () => ({
vi.mock("../../Platform", () => ({
get platform(): string {
return platformMock();
},
@@ -184,7 +183,7 @@ test("switch cameras", async () => {
);
// Switch to back camera
getValue(vm.switchCamera$)!();
vm.switchCamera$.value!();
expect(restartTrack).toHaveBeenCalledExactlyOnceWith({
facingMode: "environment",
});
@@ -195,7 +194,7 @@ test("switch cameras", async () => {
expect(deviceId).toBe("back camera");
// Switch to front camera
getValue(vm.switchCamera$)!();
vm.switchCamera$.value!();
expect(restartTrack).toHaveBeenCalledTimes(2);
expect(restartTrack).toHaveBeenLastCalledWith({ facingMode: "user" });
await waitFor(() => {

View File

@@ -0,0 +1,44 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type Behavior } from "../Behavior";
import { type ScreenShareViewModel } from "./ScreenShareViewModel";
import { type UserMediaViewModel } from "./UserMediaViewModel";
/**
* A participant's media.
*/
export type MediaViewModel = UserMediaViewModel | ScreenShareViewModel;
/**
* Properties which are common to all MediaViewModels.
*/
export interface BaseMediaViewModel {
/**
* An opaque identifier for this media.
*/
id: string;
/**
* The Matrix user to which this media belongs.
*/
userId: string;
displayName$: Behavior<string>;
mxcAvatarUrl$: Behavior<string | undefined>;
}
export type BaseMediaInputs = BaseMediaViewModel;
// All this function does is strip out superfluous data from the input object
export function createBaseMedia({
id,
userId,
displayName$,
mxcAvatarUrl$,
}: BaseMediaInputs): BaseMediaViewModel {
return { id, userId, displayName$, mxcAvatarUrl$ };
}

View File

@@ -0,0 +1,280 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
type Room as LivekitRoom,
RoomEvent as LivekitRoomEvent,
type Participant,
type Track,
} from "livekit-client";
import {
type AudioSource,
roomEventSelector,
type TrackReference,
type VideoSource,
} from "@livekit/components-core";
import { type LocalParticipant, type RemoteParticipant } from "livekit-client";
import {
combineLatest,
distinctUntilChanged,
filter,
map,
type Observable,
of,
startWith,
switchMap,
throttleTime,
} from "rxjs";
import { type Behavior } from "../Behavior";
import { type BaseMediaViewModel, createBaseMedia } from "./MediaViewModel";
import { type EncryptionSystem } from "../../e2ee/sharedKeyManagement";
import { type ObservableScope } from "../ObservableScope";
import { observeTrackReference$ } from "../observeTrackReference";
import { E2eeType } from "../../e2ee/e2eeType";
import { observeRtpStreamStats$ } from "./observeRtpStreamStats";
export enum EncryptionStatus {
Connecting,
Okay,
KeyMissing,
KeyInvalid,
PasswordInvalid,
}
/**
* Media belonging to an active member of the RTC session.
*/
export interface MemberMediaViewModel extends BaseMediaViewModel {
/**
* The LiveKit video track for this media.
*/
video$: Behavior<TrackReference | undefined>;
/**
* The URL of the LiveKit focus on which this member should be publishing.
* Exposed for debugging.
*/
focusUrl$: Behavior<string | undefined>;
/**
* Whether there should be a warning that this media is unencrypted.
*/
unencryptedWarning$: Behavior<boolean>;
encryptionStatus$: Behavior<EncryptionStatus>;
}
export interface MemberMediaInputs extends BaseMediaViewModel {
participant$: Behavior<LocalParticipant | RemoteParticipant | null>;
livekitRoom$: Behavior<LivekitRoom | undefined>;
audioSource: AudioSource;
videoSource: VideoSource;
focusUrl$: Behavior<string | undefined>;
encryptionSystem: EncryptionSystem;
}
export function createMemberMedia(
scope: ObservableScope,
{
participant$,
livekitRoom$,
audioSource,
videoSource,
focusUrl$,
encryptionSystem,
...inputs
}: MemberMediaInputs,
): MemberMediaViewModel {
const trackBehavior$ = (
source: Track.Source,
): Behavior<TrackReference | undefined> =>
scope.behavior(
participant$.pipe(
switchMap((p) =>
!p ? of(undefined) : observeTrackReference$(p, source),
),
),
);
const audio$ = trackBehavior$(audioSource);
const video$ = trackBehavior$(videoSource);
return {
...createBaseMedia(inputs),
video$,
focusUrl$,
unencryptedWarning$: scope.behavior(
combineLatest(
[audio$, video$],
(a, v) =>
encryptionSystem.kind !== E2eeType.NONE &&
(a?.publication.isEncrypted === false ||
v?.publication.isEncrypted === false),
),
),
encryptionStatus$: scope.behavior(
participant$.pipe(
switchMap((participant): Observable<EncryptionStatus> => {
if (!participant) {
return of(EncryptionStatus.Connecting);
} else if (
participant.isLocal ||
encryptionSystem.kind === E2eeType.NONE
) {
return of(EncryptionStatus.Okay);
} else if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"MissingKey",
),
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(([keyMissing, keyInvalid, audioOkay, videoOkay]) => {
if (keyMissing) return EncryptionStatus.KeyMissing;
if (keyInvalid) return EncryptionStatus.KeyInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
}),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
} else {
return combineLatest([
encryptionErrorObservable$(
livekitRoom$,
participant,
encryptionSystem,
"InvalidKey",
),
observeRemoteTrackReceivingOkay$(participant, audioSource),
observeRemoteTrackReceivingOkay$(participant, videoSource),
]).pipe(
map(
([keyInvalid, audioOkay, videoOkay]):
| EncryptionStatus
| undefined => {
if (keyInvalid) return EncryptionStatus.PasswordInvalid;
if (audioOkay || videoOkay) return EncryptionStatus.Okay;
return undefined; // no change
},
),
filter((x) => !!x),
startWith(EncryptionStatus.Connecting),
);
}
}),
),
),
};
}
function observeInboundRtpStreamStats$(
participant: Participant,
source: Track.Source,
): Observable<RTCInboundRtpStreamStats | undefined> {
return observeRtpStreamStats$(participant, source, "inbound-rtp").pipe(
map((x) => x as RTCInboundRtpStreamStats | undefined),
);
}
function encryptionErrorObservable$(
room$: Behavior<LivekitRoom | undefined>,
participant: Participant,
encryptionSystem: EncryptionSystem,
criteria: string,
): Observable<boolean> {
return room$.pipe(
switchMap((room) => {
if (room === undefined) return of(false);
return roomEventSelector(room, LivekitRoomEvent.EncryptionError).pipe(
map((e) => {
const [err] = e;
if (encryptionSystem.kind === E2eeType.PER_PARTICIPANT) {
return (
// Ideally we would pull the participant identity from the field on the error.
// However, it gets lost in the serialization process between workers.
// So, instead we do a string match
(err?.message.includes(participant.identity) &&
err?.message.includes(criteria)) ??
false
);
} else if (encryptionSystem.kind === E2eeType.SHARED_KEY) {
return !!err?.message.includes(criteria);
}
return false;
}),
);
}),
distinctUntilChanged(),
throttleTime(1000), // Throttle to avoid spamming the UI
startWith(false),
);
}
function observeRemoteTrackReceivingOkay$(
participant: Participant,
source: Track.Source,
): Observable<boolean | undefined> {
let lastStats: {
framesDecoded: number | undefined;
framesDropped: number | undefined;
framesReceived: number | undefined;
} = {
framesDecoded: undefined,
framesDropped: undefined,
framesReceived: undefined,
};
return observeInboundRtpStreamStats$(participant, source).pipe(
map((stats) => {
if (!stats) return undefined;
const { framesDecoded, framesDropped, framesReceived } = stats;
return {
framesDecoded,
framesDropped,
framesReceived,
};
}),
filter((newStats) => !!newStats),
map((newStats): boolean | undefined => {
const oldStats = lastStats;
lastStats = newStats;
if (
typeof newStats.framesReceived === "number" &&
typeof oldStats.framesReceived === "number" &&
typeof newStats.framesDecoded === "number" &&
typeof oldStats.framesDecoded === "number"
) {
const framesReceivedDelta =
newStats.framesReceived - oldStats.framesReceived;
const framesDecodedDelta =
newStats.framesDecoded - oldStats.framesDecoded;
// if we received >0 frames and managed to decode >0 frames then we treat that as success
if (framesReceivedDelta > 0) {
return framesDecodedDelta > 0;
}
}
// no change
return undefined;
}),
filter((x) => typeof x === "boolean"),
startWith(undefined),
);
}

View File

@@ -0,0 +1,44 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type RemoteParticipant } from "livekit-client";
import { map } from "rxjs";
import { type Behavior } from "../Behavior";
import {
type BaseScreenShareInputs,
type BaseScreenShareViewModel,
createBaseScreenShare,
} from "./ScreenShareViewModel";
import { type ObservableScope } from "../ObservableScope";
export interface RemoteScreenShareViewModel extends BaseScreenShareViewModel {
local: false;
/**
* Whether this screen share's video should be displayed.
*/
videoEnabled$: Behavior<boolean>;
}
export interface RemoteScreenShareInputs extends BaseScreenShareInputs {
participant$: Behavior<RemoteParticipant | null>;
pretendToBeDisconnected$: Behavior<boolean>;
}
export function createRemoteScreenShare(
scope: ObservableScope,
{ pretendToBeDisconnected$, ...inputs }: RemoteScreenShareInputs,
): RemoteScreenShareViewModel {
return {
...createBaseScreenShare(scope, inputs),
local: false,
videoEnabled$: scope.behavior(
pretendToBeDisconnected$.pipe(map((disconnected) => !disconnected)),
),
};
}

View File

@@ -0,0 +1,82 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { type RemoteParticipant } from "livekit-client";
import { combineLatest, map, of, switchMap } from "rxjs";
import { type Behavior } from "../Behavior";
import { createVolumeControls, type VolumeControls } from "../VolumeControls";
import {
type BaseUserMediaInputs,
type BaseUserMediaViewModel,
createBaseUserMedia,
} from "./UserMediaViewModel";
import { type ObservableScope } from "../ObservableScope";
export interface RemoteUserMediaViewModel
extends BaseUserMediaViewModel, VolumeControls {
local: false;
/**
* Whether we are waiting for this user's LiveKit participant to exist. This
* could be because either we or the remote party are still connecting.
*/
waitingForMedia$: Behavior<boolean>;
}
export interface RemoteUserMediaInputs extends Omit<
BaseUserMediaInputs,
"statsType"
> {
participant$: Behavior<RemoteParticipant | null>;
pretendToBeDisconnected$: Behavior<boolean>;
}
export function createRemoteUserMedia(
scope: ObservableScope,
{ pretendToBeDisconnected$, ...inputs }: RemoteUserMediaInputs,
): RemoteUserMediaViewModel {
const baseUserMedia = createBaseUserMedia(scope, {
...inputs,
statsType: "inbound-rtp",
});
return {
...baseUserMedia,
...createVolumeControls(scope, {
pretendToBeDisconnected$,
sink$: scope.behavior(
inputs.participant$.pipe(map((p) => (volume) => p?.setVolume(volume))),
),
}),
local: false,
speaking$: scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : baseUserMedia.speaking$,
),
),
),
videoEnabled$: scope.behavior(
pretendToBeDisconnected$.pipe(
switchMap((disconnected) =>
disconnected ? of(false) : baseUserMedia.videoEnabled$,
),
),
),
waitingForMedia$: scope.behavior(
combineLatest(
[inputs.livekitRoom$, inputs.participant$],
(livekitRoom, participant) =>
// If livekitRoom is undefined, the user is not attempting to publish on
// any transport and so we shouldn't expect a participant. (They might
// be a subscribe-only bot for example.)
livekitRoom !== undefined && participant === null,
),
),
};
}

View File

@@ -0,0 +1,51 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { Track } from "livekit-client";
import { type ObservableScope } from "../ObservableScope";
import { type LocalScreenShareViewModel } from "./LocalScreenShareViewModel";
import {
createMemberMedia,
type MemberMediaInputs,
type MemberMediaViewModel,
} from "./MemberMediaViewModel";
import { type RemoteScreenShareViewModel } from "./RemoteScreenShareViewModel";
/**
* A participant's screen share media.
*/
export type ScreenShareViewModel =
| LocalScreenShareViewModel
| RemoteScreenShareViewModel;
/**
* Properties which are common to all ScreenShareViewModels.
*/
export interface BaseScreenShareViewModel extends MemberMediaViewModel {
type: "screen share";
}
export type BaseScreenShareInputs = Omit<
MemberMediaInputs,
"audioSource" | "videoSource"
>;
export function createBaseScreenShare(
scope: ObservableScope,
inputs: BaseScreenShareInputs,
): BaseScreenShareViewModel {
return {
...createMemberMedia(scope, {
...inputs,
audioSource: Track.Source.ScreenShareAudio,
videoSource: Track.Source.ScreenShare,
}),
type: "screen share",
};
}

View File

@@ -0,0 +1,143 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
combineLatest,
map,
type Observable,
of,
Subject,
switchMap,
} from "rxjs";
import {
observeParticipantEvents,
observeParticipantMedia,
} from "@livekit/components-core";
import { ParticipantEvent, Track } from "livekit-client";
import { type ReactionOption } from "../../reactions";
import { type Behavior } from "../Behavior";
import { type LocalUserMediaViewModel } from "./LocalUserMediaViewModel";
import {
createMemberMedia,
type MemberMediaInputs,
type MemberMediaViewModel,
} from "./MemberMediaViewModel";
import { type RemoteUserMediaViewModel } from "./RemoteUserMediaViewModel";
import { type ObservableScope } from "../ObservableScope";
import { createToggle$ } from "../../utils/observable";
import { showConnectionStats } from "../../settings/settings";
import { observeRtpStreamStats$ } from "./observeRtpStreamStats";
/**
* A participant's user media (i.e. their microphone and camera feed).
*/
export type UserMediaViewModel =
| LocalUserMediaViewModel
| RemoteUserMediaViewModel;
export interface BaseUserMediaViewModel extends MemberMediaViewModel {
type: "user";
speaking$: Behavior<boolean>;
audioEnabled$: Behavior<boolean>;
videoEnabled$: Behavior<boolean>;
cropVideo$: Behavior<boolean>;
toggleCropVideo: () => void;
/**
* The expected identity of the LiveKit participant. Exposed for debugging.
*/
rtcBackendIdentity: string;
handRaised$: Behavior<Date | null>;
reaction$: Behavior<ReactionOption | null>;
audioStreamStats$: Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
videoStreamStats$: Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
>;
}
export interface BaseUserMediaInputs extends Omit<
MemberMediaInputs,
"audioSource" | "videoSource"
> {
rtcBackendIdentity: string;
handRaised$: Behavior<Date | null>;
reaction$: Behavior<ReactionOption | null>;
statsType: "inbound-rtp" | "outbound-rtp";
}
export function createBaseUserMedia(
scope: ObservableScope,
{
rtcBackendIdentity,
handRaised$,
reaction$,
statsType,
...inputs
}: BaseUserMediaInputs,
): BaseUserMediaViewModel {
const { participant$ } = inputs;
const media$ = scope.behavior(
participant$.pipe(
switchMap((p) => (p && observeParticipantMedia(p)) ?? of(undefined)),
),
);
const toggleCropVideo$ = new Subject<void>();
return {
...createMemberMedia(scope, {
...inputs,
audioSource: Track.Source.Microphone,
videoSource: Track.Source.Camera,
}),
type: "user",
speaking$: scope.behavior(
participant$.pipe(
switchMap((p) =>
p
? observeParticipantEvents(
p,
ParticipantEvent.IsSpeakingChanged,
).pipe(map((p) => p.isSpeaking))
: of(false),
),
),
),
audioEnabled$: scope.behavior(
media$.pipe(map((m) => m?.microphoneTrack?.isMuted === false)),
),
videoEnabled$: scope.behavior(
media$.pipe(map((m) => m?.cameraTrack?.isMuted === false)),
),
cropVideo$: createToggle$(scope, true, toggleCropVideo$),
toggleCropVideo: () => toggleCropVideo$.next(),
rtcBackendIdentity,
handRaised$,
reaction$,
audioStreamStats$: combineLatest([
participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
//
if (!p || !showConnectionStats) return of(undefined);
return observeRtpStreamStats$(p, Track.Source.Microphone, statsType);
}),
),
videoStreamStats$: combineLatest([
participant$,
showConnectionStats.value$,
]).pipe(
switchMap(([p, showConnectionStats]) => {
if (!p || !showConnectionStats) return of(undefined);
return observeRtpStreamStats$(p, Track.Source.Camera, statsType);
}),
),
};
}

View File

@@ -0,0 +1,59 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
LocalTrack,
type Participant,
RemoteTrack,
type Track,
} from "livekit-client";
import {
combineLatest,
interval,
type Observable,
startWith,
switchMap,
} from "rxjs";
import { observeTrackReference$ } from "../observeTrackReference";
export function observeRtpStreamStats$(
participant: Participant,
source: Track.Source,
type: "inbound-rtp" | "outbound-rtp",
): Observable<
RTCInboundRtpStreamStats | RTCOutboundRtpStreamStats | undefined
> {
return combineLatest([
observeTrackReference$(participant, source),
interval(1000).pipe(startWith(0)),
]).pipe(
switchMap(async ([trackReference]) => {
const track = trackReference?.publication?.track;
if (
!track ||
!(track instanceof RemoteTrack || track instanceof LocalTrack)
) {
return undefined;
}
const report = await track.getRTCStatsReport();
if (!report) {
return undefined;
}
for (const v of report.values()) {
if (v.type === type) {
return v;
}
}
return undefined;
}),
startWith(undefined),
);
}

View File

@@ -7,7 +7,7 @@ Please see LICENSE in the repository root for full details.
import { describe, test } from "vitest";
import { withTestScheduler } from "../utils/test";
import { withTestScheduler } from "../../utils/test";
import { observeSpeaker$ } from "./observeSpeaker";
const yesNo = {

View File

@@ -0,0 +1,28 @@
/*
Copyright 2023, 2024 New Vector Ltd.
Copyright 2026 Element Creations Ltd.
SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import {
observeParticipantMedia,
type TrackReference,
} from "@livekit/components-core";
import { type Participant, type Track } from "livekit-client";
import { distinctUntilChanged, map, type Observable } from "rxjs";
/**
* Reactively reads a participant's track reference for a given media source.
*/
export function observeTrackReference$(
participant: Participant,
source: Track.Source,
): Observable<TrackReference | undefined> {
return observeParticipantMedia(participant).pipe(
map(() => participant.getTrackPublication(source)),
distinctUntilChanged(),
map((publication) => publication && { participant, publication, source }),
);
}

View File

@@ -39,11 +39,6 @@ import {
import { useObservableEagerState } from "observable-hooks";
import styles from "./GridTile.module.css";
import {
type UserMediaViewModel,
type LocalUserMediaViewModel,
type RemoteUserMediaViewModel,
} from "../state/MediaViewModel";
import { Slider } from "../Slider";
import { MediaView } from "./MediaView";
import { useLatest } from "../useLatest";
@@ -51,6 +46,9 @@ import { type GridTileViewModel } from "../state/TileViewModel";
import { useMergedRefs } from "../useMergedRefs";
import { useReactionsSender } from "../reactions/useReactionsSender";
import { useBehavior } from "../useBehavior";
import { type LocalUserMediaViewModel } from "../state/media/LocalUserMediaViewModel";
import { type RemoteUserMediaViewModel } from "../state/media/RemoteUserMediaViewModel";
import { type UserMediaViewModel } from "../state/media/UserMediaViewModel";
interface TileProps {
ref?: Ref<HTMLDivElement>;

View File

@@ -18,7 +18,7 @@ import { TrackInfo } from "@livekit/protocol";
import { type ComponentProps } from "react";
import { MediaView } from "./MediaView";
import { EncryptionStatus } from "../state/MediaViewModel";
import { EncryptionStatus } from "../state/media/MemberMediaViewModel";
import { mockLocalParticipant } from "../utils/test";
describe("MediaView", () => {

View File

@@ -16,7 +16,7 @@ import { ErrorSolidIcon } from "@vector-im/compound-design-tokens/assets/web/ico
import styles from "./MediaView.module.css";
import { Avatar } from "../Avatar";
import { type EncryptionStatus } from "../state/MediaViewModel";
import { type EncryptionStatus } from "../state/media/MemberMediaViewModel";
import { RaisedHandIndicator } from "../reactions/RaisedHandIndicator";
import {
showConnectionStats as showConnectionStatsSetting,

View File

@@ -32,21 +32,19 @@ import FullScreenMaximiseIcon from "../icons/FullScreenMaximise.svg?react";
import FullScreenMinimiseIcon from "../icons/FullScreenMinimise.svg?react";
import { MediaView } from "./MediaView";
import styles from "./SpotlightTile.module.css";
import {
type EncryptionStatus,
type LocalUserMediaViewModel,
type MediaViewModel,
type UserMediaViewModel,
type RemoteUserMediaViewModel,
type ScreenShareViewModel,
type RemoteScreenShareViewModel,
} from "../state/MediaViewModel";
import { useInitial } from "../useInitial";
import { useMergedRefs } from "../useMergedRefs";
import { useReactiveState } from "../useReactiveState";
import { useLatest } from "../useLatest";
import { type SpotlightTileViewModel } from "../state/TileViewModel";
import { useBehavior } from "../useBehavior";
import { type EncryptionStatus } from "../state/media/MemberMediaViewModel";
import { type LocalUserMediaViewModel } from "../state/media/LocalUserMediaViewModel";
import { type RemoteUserMediaViewModel } from "../state/media/RemoteUserMediaViewModel";
import { type UserMediaViewModel } from "../state/media/UserMediaViewModel";
import { type ScreenShareViewModel } from "../state/media/ScreenShareViewModel";
import { type RemoteScreenShareViewModel } from "../state/media/RemoteScreenShareViewModel";
import { type MediaViewModel } from "../state/media/MediaViewModel";
interface SpotlightItemBaseProps {
ref?: Ref<HTMLDivElement>;

View File

@@ -52,12 +52,6 @@ import {
} from "matrix-js-sdk/lib/matrixrtc/IKeyTransport";
import { type CallMembershipIdentityParts } from "matrix-js-sdk/lib/matrixrtc/EncryptionManager";
import {
createLocalUserMedia,
createRemoteUserMedia,
type LocalUserMediaViewModel,
type RemoteUserMediaViewModel,
} from "../state/MediaViewModel";
import { E2eeType } from "../e2ee/e2eeType";
import {
DEFAULT_CONFIG,
@@ -68,6 +62,14 @@ import { type MediaDevices } from "../state/MediaDevices";
import { type Behavior, constant } from "../state/Behavior";
import { ObservableScope } from "../state/ObservableScope";
import { MuteStates } from "../state/MuteStates";
import {
createLocalUserMedia,
type LocalUserMediaViewModel,
} from "../state/media/LocalUserMediaViewModel";
import {
createRemoteUserMedia,
type RemoteUserMediaViewModel,
} from "../state/media/RemoteUserMediaViewModel";
export function withFakeTimers(continuation: () => void): void {
vi.useFakeTimers();