All my Friday work. Demoable!

This commit is contained in:
Robin
2025-08-29 18:46:24 +02:00
committed by Timo K
parent 386dc6c84d
commit e08f16f889
8 changed files with 220 additions and 298 deletions

View File

@@ -60,7 +60,7 @@ import { useRageshakeRequestModal } from "../settings/submit-rageshake";
import { RageshakeRequestModal } from "./RageshakeRequestModal";
import { useWakeLock } from "../useWakeLock";
import { useMergedRefs } from "../useMergedRefs";
import { type MuteStates } from "./MuteStates";
import { type MuteStates } from "../state/MuteStates";
import { type MatrixInfo } from "./VideoPreview";
import { InviteButton } from "../button/InviteButton";
import { LayoutToggle } from "./LayoutToggle";
@@ -143,6 +143,7 @@ export const ActiveCall: FC<ActiveCallProps> = (props) => {
props.rtcSession,
props.matrixRoom,
mediaDevices,
props.muteStates,
{
encryptionSystem: props.e2eeSystem,
autoLeaveWhenOthersLeft,
@@ -161,6 +162,7 @@ export const ActiveCall: FC<ActiveCallProps> = (props) => {
props.rtcSession,
props.matrixRoom,
mediaDevices,
props.muteStates,
props.e2eeSystem,
autoLeaveWhenOthersLeft,
sendNotificationType,
@@ -265,22 +267,19 @@ export const InCallView: FC<InCallViewProps> = ({
],
);
const toggleMicrophone = useCallback(
() => muteStates.audio.setEnabled?.((e) => !e),
[muteStates],
);
const toggleCamera = useCallback(
() => muteStates.video.setEnabled?.((e) => !e),
[muteStates],
);
const audioEnabled = useBehavior(muteStates.audio.enabled$);
const videoEnabled = useBehavior(muteStates.video.enabled$);
const toggleAudio = useBehavior(muteStates.audio.toggle$);
const toggleVideo = useBehavior(muteStates.video.toggle$);
const setAudioEnabled = useBehavior(muteStates.audio.setEnabled$);
// This function incorrectly assumes that there is a camera and microphone, which is not always the case.
// TODO: Make sure that this module is resilient when it comes to camera/microphone availability!
useCallViewKeyboardShortcuts(
containerRef1,
toggleMicrophone,
toggleCamera,
(muted) => muteStates.audio.setEnabled?.(!muted),
toggleAudio,
toggleVideo,
setAudioEnabled,
(reaction) => void sendReaction(reaction),
() => void toggleRaisedHand(),
);
@@ -764,18 +763,18 @@ export const InCallView: FC<InCallViewProps> = ({
buttons.push(
<MicButton
key="audio"
muted={!muteStates.audio.enabled}
onClick={toggleMicrophone}
muted={!audioEnabled}
onClick={toggleAudio ?? undefined}
onTouchEnd={onControlsTouchEnd}
disabled={muteStates.audio.setEnabled === null}
disabled={toggleAudio === null}
data-testid="incall_mute"
/>,
<VideoButton
key="video"
muted={!muteStates.video.enabled}
onClick={toggleCamera}
muted={!videoEnabled}
onClick={toggleVideo ?? undefined}
onTouchEnd={onControlsTouchEnd}
disabled={muteStates.video.setEnabled === null}
disabled={toggleVideo === null}
data-testid="incall_videomute"
/>,
);

View File

@@ -31,7 +31,7 @@ import inCallStyles from "./InCallView.module.css";
import styles from "./LobbyView.module.css";
import { Header, LeftNav, RightNav, RoomHeaderInfo } from "../Header";
import { type MatrixInfo, VideoPreview } from "./VideoPreview";
import { type MuteStates } from "./MuteStates";
import { type MuteStates } from "../state/MuteStates";
import { InviteButton } from "../button/InviteButton";
import {
EndCallButton,
@@ -50,8 +50,8 @@ import {
useTrackProcessorSync,
} from "../livekit/TrackProcessorContext";
import { usePageTitle } from "../usePageTitle";
import { useLatest } from "../useLatest";
import { getValue } from "../utils/observable";
import { useBehavior } from "../useBehavior";
interface Props {
client: MatrixClient;
@@ -88,14 +88,10 @@ export const LobbyView: FC<Props> = ({
const { t } = useTranslation();
usePageTitle(matrixInfo.roomName);
const onAudioPress = useCallback(
() => muteStates.audio.setEnabled?.((e) => !e),
[muteStates],
);
const onVideoPress = useCallback(
() => muteStates.video.setEnabled?.((e) => !e),
[muteStates],
);
const audioEnabled = useBehavior(muteStates.audio.enabled$);
const videoEnabled = useBehavior(muteStates.video.enabled$);
const toggleAudio = useBehavior(muteStates.audio.toggle$);
const toggleVideo = useBehavior(muteStates.video.toggle$);
const [settingsModalOpen, setSettingsModalOpen] = useState(false);
const [settingsTab, setSettingsTab] = useState(defaultSettingsTab);
@@ -133,7 +129,7 @@ export const LobbyView: FC<Props> = ({
// re-open the devices when they change (see below).
const initialAudioOptions = useInitial(
() =>
muteStates.audio.enabled && {
audioEnabled && {
deviceId: getValue(devices.audioInput.selected$)?.id,
},
);
@@ -150,27 +146,21 @@ export const LobbyView: FC<Props> = ({
// We also pass in a clone because livekit mutates the object passed in,
// which would cause the devices to be re-opened on the next render.
audio: Object.assign({}, initialAudioOptions),
video: muteStates.video.enabled && {
video: videoEnabled && {
deviceId: videoInputId,
processor: initialProcessor,
},
}),
[
initialAudioOptions,
muteStates.video.enabled,
videoInputId,
initialProcessor,
],
[initialAudioOptions, videoEnabled, videoInputId, initialProcessor],
);
const latestMuteStates = useLatest(muteStates);
const onError = useCallback(
(error: Error) => {
logger.error("Error while creating preview Tracks:", error);
latestMuteStates.current.audio.setEnabled?.(false);
latestMuteStates.current.video.setEnabled?.(false);
muteStates.audio.setEnabled$.value?.(false);
muteStates.video.setEnabled$.value?.(false);
},
[latestMuteStates],
[muteStates],
);
const tracks = usePreviewTracks(localTrackOptions, onError);
@@ -217,7 +207,7 @@ export const LobbyView: FC<Props> = ({
<div className={styles.content}>
<VideoPreview
matrixInfo={matrixInfo}
muteStates={muteStates}
videoEnabled={videoEnabled}
videoTrack={videoTrack}
>
<Button
@@ -239,14 +229,14 @@ export const LobbyView: FC<Props> = ({
{recentsButtonInFooter && recentsButton}
<div className={inCallStyles.buttons}>
<MicButton
muted={!muteStates.audio.enabled}
onClick={onAudioPress}
disabled={muteStates.audio.setEnabled === null}
muted={!audioEnabled}
onClick={toggleAudio ?? undefined}
disabled={toggleAudio === null}
/>
<VideoButton
muted={!muteStates.video.enabled}
onClick={onVideoPress}
disabled={muteStates.video.setEnabled === null}
muted={!videoEnabled}
onClick={toggleVideo ?? undefined}
disabled={toggleVideo === null}
/>
<SettingsButton onClick={openSettings} />
{!confineToRoom && <EndCallButton onClick={onLeaveClick} />}

View File

@@ -13,7 +13,6 @@ import { useTranslation } from "react-i18next";
import { TileAvatar } from "../tile/TileAvatar";
import styles from "./VideoPreview.module.css";
import { type MuteStates } from "./MuteStates";
import { type EncryptionSystem } from "../e2ee/sharedKeyManagement";
export type MatrixInfo = {
@@ -29,14 +28,14 @@ export type MatrixInfo = {
interface Props {
matrixInfo: MatrixInfo;
muteStates: MuteStates;
videoEnabled: boolean;
videoTrack: LocalVideoTrack | null;
children: ReactNode;
}
export const VideoPreview: FC<Props> = ({
matrixInfo,
muteStates,
videoEnabled,
videoTrack,
children,
}) => {
@@ -56,8 +55,8 @@ export const VideoPreview: FC<Props> = ({
}, [videoTrack]);
const cameraIsStarting = useMemo(
() => muteStates.video.enabled && !videoTrack,
[muteStates.video.enabled, videoTrack],
() => videoEnabled && !videoTrack,
[videoEnabled, videoTrack],
);
return (
@@ -76,7 +75,7 @@ export const VideoPreview: FC<Props> = ({
tabIndex={-1}
disablePictureInPicture
/>
{(!muteStates.video.enabled || cameraIsStarting) && (
{(!videoEnabled || cameraIsStarting) && (
<>
<div className={styles.avatarContainer}>
{cameraIsStarting && (

View File

@@ -34,6 +34,7 @@ import {
Subject,
combineLatest,
concat,
concatMap,
distinctUntilChanged,
endWith,
filter,
@@ -121,6 +122,7 @@ import { E2eeType } from "../e2ee/e2eeType";
import { MatrixKeyProvider } from "../e2ee/matrixKeyProvider";
import { type ECConnectionState } from "../livekit/useECConnectionState";
import { Connection, PublishConnection } from "./Connection";
import { type MuteStates } from "./MuteStates";
export interface CallViewModelOptions {
encryptionSystem: EncryptionSystem;
@@ -447,6 +449,7 @@ export class CallViewModel extends ViewModel {
this.scope,
this.membershipsAndFocusMap$,
this.mediaDevices,
this.muteStates,
this.livekitE2EERoomOptions,
),
);
@@ -536,6 +539,14 @@ export class CallViewModel extends ViewModel {
return { start, stop };
}),
this.scope.share,
);
private readonly startConnection$ = this.connectionInstructions$.pipe(
concatMap(({ start }) => start),
);
private readonly stopConnection$ = this.connectionInstructions$.pipe(
concatMap(({ stop }) => stop),
);
private readonly userId = this.matrixRoom.client.getUserId();
@@ -623,15 +634,15 @@ export class CallViewModel extends ViewModel {
),
);
private readonly participants$ = this.scope
.behavior<
{
participant: LocalParticipant | RemoteParticipant;
member: RoomMember;
livekitRoom: LivekitRoom;
}[]
>(
from(this.localConnection).pipe(
private readonly participants$ = this.scope.behavior<
{
participant: LocalParticipant | RemoteParticipant;
member: RoomMember;
livekitRoom: LivekitRoom;
}[]
>(
from(this.localConnection)
.pipe(
switchMap((localConnection) => {
const memberError = (): never => {
throw new Error("No room member for call membership");
@@ -645,7 +656,7 @@ export class CallViewModel extends ViewModel {
return this.remoteConnections$.pipe(
switchMap((connections) =>
combineLatest(
[...connections.values()].map((c) =>
[localConnection, ...connections.values()].map((c) =>
c.publishingParticipants$.pipe(
map((ps) =>
ps.map(({ participant, membership }) => ({
@@ -663,14 +674,14 @@ export class CallViewModel extends ViewModel {
),
),
map((remoteParticipants) => [
...remoteParticipants.flat(1),
localParticipant,
...remoteParticipants.flat(1),
]),
);
}),
),
)
.pipe(startWith([]), pauseWhen(this.pretendToBeDisconnected$));
)
.pipe(startWith([]), pauseWhen(this.pretendToBeDisconnected$)),
);
/**
* Displaynames for each member of the call. This will disambiguate
@@ -681,18 +692,23 @@ export class CallViewModel extends ViewModel {
// than on Chrome/Firefox). This means it is important that we multicast the result so that we
// don't do this work more times than we need to. This is achieved by converting to a behavior:
public readonly memberDisplaynames$ = this.scope.behavior(
// React to call memberships and also display name updates
// (calculateDisplayName implicitly depends on the room member data)
combineLatest(
[
this.memberships$,
fromEvent(this.matrixRoom, RoomStateEvent.Members).pipe(
startWith(null),
pauseWhen(this.pretendToBeDisconnected$),
),
],
(memberships, _members) => {
const displaynameMap = new Map<string, string>();
merge(
// Handle call membership changes.
fromEvent(
this.matrixRTCSession,
MatrixRTCSessionEvent.MembershipsChanged,
),
// Handle room membership changes (and displayname updates)
fromEvent(this.matrixRoom, RoomStateEvent.Members),
// TODO: do we need: pauseWhen(this.pretendToBeDisconnected$),
).pipe(
startWith(null),
map(() => {
const memberships = this.matrixRTCSession.memberships;
const displaynameMap = new Map<string, string>([
["local", this.matrixRoom.getMember(this.userId!)!.rawDisplayName],
]);
const room = this.matrixRoom;
// We only consider RTC members for disambiguation as they are the only visible members.
@@ -1753,6 +1769,7 @@ export class CallViewModel extends ViewModel {
private readonly matrixRTCSession: MatrixRTCSession,
private readonly matrixRoom: MatrixRoom,
private readonly mediaDevices: MediaDevices,
private readonly muteStates: MuteStates,
private readonly options: CallViewModelOptions,
private readonly handsRaisedSubject$: Observable<
Record<string, RaisedHandInfo>
@@ -1774,12 +1791,12 @@ export class CallViewModel extends ViewModel {
// eslint-disable-next-line no-console
.catch((e) => console.error("failed to start publishing", e)),
);
this.connectionInstructions$
this.startConnection$
.pipe(this.scope.bind())
.subscribe(({ start, stop }) => {
for (const connection of start) void connection.start();
for (const connection of stop) connection.stop();
});
.subscribe((c) => void c.start());
this.stopConnection$.pipe(this.scope.bind()).subscribe((c) => c.stop());
combineLatest([this.localFocus, this.join$])
.pipe(this.scope.bind())
.subscribe(([localFocus]) => {
@@ -1789,6 +1806,7 @@ export class CallViewModel extends ViewModel {
this.options.encryptionSystem.kind !== E2eeType.PER_PARTICIPANT,
);
});
this.join$.pipe(this.scope.bind()).subscribe(() => {
leaveRTCSession(
this.matrixRTCSession,
@@ -1861,6 +1879,7 @@ function getE2eeOptions(
e2eeSystem: EncryptionSystem,
rtcSession: MatrixRTCSession,
): E2EEOptions | undefined {
return undefined;
if (e2eeSystem.kind === E2eeType.NONE) return undefined;
if (e2eeSystem.kind === E2eeType.PER_PARTICIPANT) {

View File

@@ -6,13 +6,14 @@ SPDX-License-Identifier: AGPL-3.0-only OR LicenseRef-Element-Commercial
Please see LICENSE in the repository root for full details.
*/
import { connectedParticipantsObserver } from "@livekit/components-core";
import {
connectedParticipantsObserver,
connectionStateObserver,
} from "@livekit/components-core";
import {
ConnectionState,
Room as LivekitRoom,
type RoomOptions,
type E2EEOptions,
RoomEvent,
Track,
} from "livekit-client";
import { type MatrixClient } from "matrix-js-sdk";
@@ -21,10 +22,7 @@ import {
type CallMembership,
} from "matrix-js-sdk/lib/matrixrtc";
import {
BehaviorSubject,
combineLatest,
filter,
fromEvent,
map,
NEVER,
type Observable,
@@ -35,12 +33,12 @@ import { logger } from "matrix-js-sdk/lib/logger";
import { type SelectedDevice, type MediaDevices } from "./MediaDevices";
import { getSFUConfigWithOpenID } from "../livekit/openIDSFU";
import { constant, type Behavior } from "./Behavior";
import { type Behavior } from "./Behavior";
import { type ObservableScope } from "./ObservableScope";
import { defaultLiveKitOptions } from "../livekit/options";
import { getValue } from "../utils/observable";
import { getUrlParams } from "../UrlParams";
import { type MuteStates } from "../room/MuteStates";
import { type MuteStates } from "./MuteStates";
export class Connection {
protected stopped = false;
@@ -64,7 +62,7 @@ export class Connection {
public readonly participantsIncludingSubscribers$;
public readonly publishingParticipants$;
public livekitRoom: LivekitRoom;
public readonly livekitRoom: LivekitRoom;
public connectionState$: Behavior<ConnectionState>;
public constructor(
@@ -76,11 +74,14 @@ export class Connection {
{ membership: CallMembership; focus: LivekitFocus }[]
>,
e2eeLivekitOptions: E2EEOptions | undefined,
livekitRoom: LivekitRoom | undefined = undefined,
) {
this.livekitRoom = new LivekitRoom({
...defaultLiveKitOptions,
e2ee: e2eeLivekitOptions,
});
this.livekitRoom =
livekitRoom ??
new LivekitRoom({
...defaultLiveKitOptions,
e2ee: e2eeLivekitOptions,
});
this.participantsIncludingSubscribers$ = this.scope.behavior(
connectedParticipantsObserver(this.livekitRoom),
[],
@@ -112,10 +113,7 @@ export class Connection {
[],
);
this.connectionState$ = this.scope.behavior<ConnectionState>(
fromEvent<ConnectionState>(
this.livekitRoom,
RoomEvent.ConnectionStateChanged,
),
connectionStateObserver(this.livekitRoom),
);
}
}
@@ -128,8 +126,8 @@ export class PublishConnection extends Connection {
if (!this.stopped) {
const tracks = await this.livekitRoom.localParticipant.createTracks({
audio: true,
video: true,
audio: this.muteStates.audio.enabled$.value,
video: this.muteStates.video.enabled$.value,
});
for (const track of tracks) {
await this.livekitRoom.localParticipant.publishTrack(track);
@@ -142,53 +140,32 @@ export class PublishConnection extends Connection {
this.stopped = true;
}
public readonly participantsIncludingSubscribers$ = this.scope.behavior(
connectedParticipantsObserver(this.livekitRoom),
[],
);
private readonly muteStates$: Behavior<MuteStates>;
private updatingMuteStates$ = new BehaviorSubject(false);
public constructor(
protected readonly focus: LivekitFocus,
protected readonly livekitAlias: string,
protected readonly client: MatrixClient,
protected readonly scope: ObservableScope,
protected readonly membershipsFocusMap$: Behavior<
focus: LivekitFocus,
livekitAlias: string,
client: MatrixClient,
scope: ObservableScope,
membershipsFocusMap$: Behavior<
{ membership: CallMembership; focus: LivekitFocus }[]
>,
protected readonly devices: MediaDevices,
devices: MediaDevices,
private readonly muteStates: MuteStates,
e2eeLivekitOptions: E2EEOptions | undefined,
) {
super(
focus,
livekitAlias,
client,
scope,
membershipsFocusMap$,
e2eeLivekitOptions,
);
// TODO-MULTI-SFU use actual mute states
this.muteStates$ = constant({
audio: { enabled: true, setEnabled: (enabled) => {} },
video: { enabled: true, setEnabled: (enabled) => {} },
});
logger.info("[LivekitRoom] Create LiveKit room");
const { controlledAudioDevices } = getUrlParams();
const roomOptions: RoomOptions = {
const room = new LivekitRoom({
...defaultLiveKitOptions,
videoCaptureDefaults: {
...defaultLiveKitOptions.videoCaptureDefaults,
deviceId: getValue(this.devices.videoInput.selected$)?.id,
deviceId: devices.videoInput.selected$.value?.id,
// TODO-MULTI-SFU add processor support back
// processor,
},
audioCaptureDefaults: {
...defaultLiveKitOptions.audioCaptureDefaults,
deviceId: getValue(devices.audioInput.selected$)?.id,
deviceId: devices.audioInput.selected$.value?.id,
},
audioOutput: {
// When using controlled audio devices, we don't want to set the
@@ -199,150 +176,38 @@ export class PublishConnection extends Connection {
: getValue(devices.audioOutput.selected$)?.id,
},
e2ee: e2eeLivekitOptions,
};
// We have to create the room manually here due to a bug inside
// @livekit/components-react. JSON.stringify() is used in deps of a
// useEffect() with an argument that references itself, if E2EE is enabled
const room = new LivekitRoom(roomOptions);
});
room.setE2EEEnabled(e2eeLivekitOptions !== undefined).catch((e) => {
logger.error("Failed to set E2EE enabled on room", e);
});
this.livekitRoom = room;
// sync mute states TODO-MULTI_SFU This possibly can be simplified quite a bit.
combineLatest([
this.connectionState$,
this.muteStates$,
this.updatingMuteStates$,
])
.pipe(
filter(([_c, _m, updating]) => !updating),
this.scope.bind(),
)
.subscribe(([connectionState, muteStates, _]) => {
// Sync the requested mute states with LiveKit's mute states. We do it this
// way around rather than using LiveKit as the source of truth, so that the
// states can be consistent throughout the lobby and loading screens.
// It's important that we only do this in the connected state, because
// LiveKit's internal mute states aren't consistent during connection setup,
// and setting tracks to be enabled during this time causes errors.
if (
this.livekitRoom !== undefined &&
connectionState === ConnectionState.Connected
) {
const participant = this.livekitRoom.localParticipant;
super(
focus,
livekitAlias,
client,
scope,
membershipsFocusMap$,
e2eeLivekitOptions,
room,
);
enum MuteDevice {
Microphone,
Camera,
}
const syncMuteState = async (
iterCount: number,
type: MuteDevice,
): Promise<void> => {
// The approach for muting is to always bring the actual livekit state in sync with the button
// This allows for a very predictable and reactive behavior for the user.
// (the new state is the old state when pressing the button n times (where n is even))
// (the new state is different to the old state when pressing the button n times (where n is uneven))
// In case there are issues with the device there might be situations where setMicrophoneEnabled/setCameraEnabled
// return immediately. This should be caught with the Error("track with new mute state could not be published").
// For now we are still using an iterCount to limit the recursion loop to 10.
// This could happen if the device just really does not want to turn on (hardware based issue)
// but the mute button is in unmute state.
// For now our fail mode is to just stay in this state.
// TODO: decide for a UX on how that fail mode should be treated (disable button, hide button, sync button back to muted without user input)
if (iterCount > 10) {
logger.error(
"Stop trying to sync the input device with current mute state after 10 failed tries",
);
return;
}
let devEnabled;
let btnEnabled;
switch (type) {
case MuteDevice.Microphone:
devEnabled = participant.isMicrophoneEnabled;
btnEnabled = muteStates.audio.enabled;
break;
case MuteDevice.Camera:
devEnabled = participant.isCameraEnabled;
btnEnabled = muteStates.video.enabled;
break;
}
if (devEnabled !== btnEnabled && !this.updatingMuteStates$.value) {
this.updatingMuteStates$.next(true);
try {
let trackPublication;
switch (type) {
case MuteDevice.Microphone:
trackPublication = await participant.setMicrophoneEnabled(
btnEnabled,
this.livekitRoom.options.audioCaptureDefaults,
);
break;
case MuteDevice.Camera:
trackPublication = await participant.setCameraEnabled(
btnEnabled,
this.livekitRoom.options.videoCaptureDefaults,
);
break;
}
if (trackPublication) {
// await participant.setMicrophoneEnabled can return immediately in some instances,
// so that participant.isMicrophoneEnabled !== buttonEnabled.current.audio still holds true.
// This happens if the device is still in a pending state
// "sleeping" here makes sure we let react do its thing so that participant.isMicrophoneEnabled is updated,
// so we do not end up in a recursion loop.
await new Promise((r) => setTimeout(r, 100));
// track got successfully changed to mute/unmute
// Run the check again after the change is done. Because the user
// can update the state (presses mute button) while the device is enabling
// itself we need might need to update the mute state right away.
// This async recursion makes sure that setCamera/MicrophoneEnabled is
// called as little times as possible.
await syncMuteState(iterCount + 1, type);
} else {
throw new Error(
"track with new mute state could not be published",
);
}
} catch (e) {
if ((e as DOMException).name === "NotAllowedError") {
logger.error(
"Fatal error while syncing mute state: resetting",
e,
);
if (type === MuteDevice.Microphone) {
muteStates.audio.setEnabled?.(false);
} else {
muteStates.video.setEnabled?.(false);
}
} else {
logger.error(
"Failed to sync audio mute state with LiveKit (will retry to sync in 1s):",
e,
);
setTimeout(() => {
this.updatingMuteStates$.next(false);
}, 1000);
}
}
}
};
syncMuteState(0, MuteDevice.Microphone).catch((e) => {
logger.error("Failed to sync audio mute state with LiveKit", e);
});
syncMuteState(0, MuteDevice.Camera).catch((e) => {
logger.error("Failed to sync video mute state with LiveKit", e);
});
}
});
this.muteStates.audio.setHandler(async (desired) => {
try {
await this.livekitRoom.localParticipant.setMicrophoneEnabled(desired);
} catch (e) {
logger.error("Failed to update LiveKit audio input mute state", e);
}
return this.livekitRoom.localParticipant.isMicrophoneEnabled;
});
this.muteStates.video.setHandler(async (desired) => {
try {
await this.livekitRoom.localParticipant.setCameraEnabled(desired);
} catch (e) {
logger.error("Failed to update LiveKit video input mute state", e);
}
return this.livekitRoom.localParticipant.isCameraEnabled;
});
// TODO-MULTI-SFU: Unset mute state handlers on destroy
const syncDevice = (
kind: MediaDeviceKind,

View File

@@ -8,12 +8,14 @@ Please see LICENSE in the repository root for full details.
import { type IWidgetApiRequest } from "matrix-widget-api";
import { logger } from "matrix-js-sdk/lib/logger";
import {
BehaviorSubject,
combineLatest,
distinctUntilChanged,
firstValueFrom,
fromEvent,
map,
merge,
type Observable,
Observable,
of,
Subject,
switchMap,
@@ -25,7 +27,6 @@ import { ElementWidgetActions, widget } from "../widget";
import { Config } from "../config/Config";
import { getUrlParams } from "../UrlParams";
import { type ObservableScope } from "./ObservableScope";
import { accumulate } from "../utils/observable";
import { type Behavior } from "./Behavior";
interface MuteStateData {
@@ -34,12 +35,25 @@ interface MuteStateData {
toggle: (() => void) | null;
}
export type Handler = (desired: boolean) => Promise<boolean>;
const defaultHandler: Handler = async (desired) => Promise.resolve(desired);
class MuteState<Label, Selected> {
private readonly enabledByDefault$ =
this.enabledByConfig && !getUrlParams().skipLobby
? this.joined$.pipe(map((isJoined) => !isJoined))
: of(false);
private readonly handler$ = new BehaviorSubject(defaultHandler);
public setHandler(handler: Handler): void {
if (this.handler$.value !== defaultHandler)
throw new Error("Multiple mute state handlers are not supported");
this.handler$.next(handler);
}
public unsetHandler(): void {
this.handler$.next(defaultHandler);
}
private readonly data$ = this.scope.behavior<MuteStateData>(
this.device.available$.pipe(
map((available) => available.size > 0),
@@ -50,20 +64,49 @@ class MuteState<Label, Selected> {
if (!devicesConnected)
return { enabled$: of(false), set: null, toggle: null };
// Assume the default value only once devices are actually connected
let enabled = enabledByDefault;
const set$ = new Subject<boolean>();
const toggle$ = new Subject<void>();
const desired$ = merge(set$, toggle$.pipe(map(() => !enabled)));
const enabled$ = new Observable<boolean>((subscriber) => {
subscriber.next(enabled);
let latestDesired = enabledByDefault;
let syncing = false;
const sync = async (): Promise<void> => {
if (enabled === latestDesired) syncing = false;
else {
const previouslyEnabled = enabled;
enabled = await firstValueFrom(
this.handler$.pipe(
switchMap(async (handler) => handler(latestDesired)),
),
);
if (enabled === previouslyEnabled) {
syncing = false;
} else {
subscriber.next(enabled);
syncing = true;
sync();
}
}
};
const s = desired$.subscribe((desired) => {
latestDesired = desired;
if (syncing === false) {
syncing = true;
sync();
}
});
return (): void => s.unsubscribe();
});
return {
set: (enabled: boolean): void => set$.next(enabled),
toggle: (): void => toggle$.next(),
// Assume the default value only once devices are actually connected
enabled$: merge(
set$,
toggle$.pipe(map(() => "toggle" as const)),
).pipe(
accumulate(enabledByDefault, (prev, update) =>
update === "toggle" ? !prev : update,
),
),
enabled$,
};
},
),

View File

@@ -9,6 +9,7 @@ import {
BehaviorSubject,
distinctUntilChanged,
type Observable,
share,
Subject,
takeUntil,
} from "rxjs";
@@ -35,6 +36,12 @@ export class ObservableScope {
return this.bindImpl;
}
private readonly shareImpl: MonoTypeOperator = share({ resetOnError: false, resetOnComplete: false, resetOnRefCountZero: false })
/**
* Shares (multicasts) the Observable as a hot Observable.
*/
public readonly share: MonoTypeOperator = (input$) => input$.pipe(this.bindImpl, this.shareImpl)
/**
* Converts an Observable to a Behavior. If no initial value is specified, the
* Observable must synchronously emit an initial value.

View File

@@ -29,9 +29,9 @@ const KeyToReactionMap: Record<string, ReactionOption> = Object.fromEntries(
export function useCallViewKeyboardShortcuts(
focusElement: RefObject<HTMLElement | null>,
toggleMicrophoneMuted: () => void,
toggleLocalVideoMuted: () => void,
setMicrophoneMuted: (muted: boolean) => void,
toggleAudio: (() => void) | null,
toggleVideo: (() => void) | null,
setAudioEnabled: ((enabled: boolean) => void) | null,
sendReaction: (reaction: ReactionOption) => void,
toggleHandRaised: () => void,
): void {
@@ -52,15 +52,15 @@ export function useCallViewKeyboardShortcuts(
if (event.key === "m") {
event.preventDefault();
toggleMicrophoneMuted();
} else if (event.key == "v") {
toggleAudio?.();
} else if (event.key === "v") {
event.preventDefault();
toggleLocalVideoMuted();
toggleVideo?.();
} else if (event.key === " ") {
event.preventDefault();
if (!spacebarHeld.current) {
spacebarHeld.current = true;
setMicrophoneMuted(false);
setAudioEnabled?.(true);
}
} else if (event.key === "h") {
event.preventDefault();
@@ -72,9 +72,9 @@ export function useCallViewKeyboardShortcuts(
},
[
focusElement,
toggleLocalVideoMuted,
toggleMicrophoneMuted,
setMicrophoneMuted,
toggleVideo,
toggleAudio,
setAudioEnabled,
sendReaction,
toggleHandRaised,
],
@@ -95,10 +95,10 @@ export function useCallViewKeyboardShortcuts(
if (event.key === " ") {
spacebarHeld.current = false;
setMicrophoneMuted(true);
setAudioEnabled?.(false);
}
},
[focusElement, setMicrophoneMuted],
[focusElement, setAudioEnabled],
),
);
@@ -108,8 +108,8 @@ export function useCallViewKeyboardShortcuts(
useCallback(() => {
if (spacebarHeld.current) {
spacebarHeld.current = false;
setMicrophoneMuted(true);
setAudioEnabled?.(true);
}
}, [setMicrophoneMuted, spacebarHeld]),
}, [setAudioEnabled, spacebarHeld]),
);
}