mirror of
https://github.com/vector-im/element-call.git
synced 2026-02-14 04:37:03 +00:00
refactor
- still should not work without a fixing upstream LK: https://github.com/livekit/components-js/pull/1042 https://github.com/livekit/components-js/pull/1043
This commit is contained in:
54
src/App.tsx
54
src/App.tsx
@@ -28,6 +28,7 @@ import { Initializer } from "./initializer";
|
||||
import { MediaDevicesProvider } from "./livekit/MediaDevicesContext";
|
||||
import { widget } from "./widget";
|
||||
import { useTheme } from "./useTheme";
|
||||
import { ProcessorProvider } from "./livekit/TrackProcessorContext";
|
||||
|
||||
const SentryRoute = Sentry.withSentryRouting(Route);
|
||||
|
||||
@@ -82,27 +83,25 @@ export const App: FC<AppProps> = ({ history }) => {
|
||||
<TooltipProvider>
|
||||
{loaded ? (
|
||||
<Suspense fallback={null}>
|
||||
<ClientProvider>
|
||||
<MediaDevicesProvider>
|
||||
<Sentry.ErrorBoundary fallback={errorPage}>
|
||||
<DisconnectedBanner />
|
||||
<Switch>
|
||||
<SentryRoute exact path="/">
|
||||
<HomePage />
|
||||
</SentryRoute>
|
||||
<SentryRoute exact path="/login">
|
||||
<LoginPage />
|
||||
</SentryRoute>
|
||||
<SentryRoute exact path="/register">
|
||||
<RegisterPage />
|
||||
</SentryRoute>
|
||||
<SentryRoute path="*">
|
||||
<RoomPage />
|
||||
</SentryRoute>
|
||||
</Switch>
|
||||
</Sentry.ErrorBoundary>
|
||||
</MediaDevicesProvider>
|
||||
</ClientProvider>
|
||||
<Providers>
|
||||
<Sentry.ErrorBoundary fallback={errorPage}>
|
||||
<DisconnectedBanner />
|
||||
<Switch>
|
||||
<SentryRoute exact path="/">
|
||||
<HomePage />
|
||||
</SentryRoute>
|
||||
<SentryRoute exact path="/login">
|
||||
<LoginPage />
|
||||
</SentryRoute>
|
||||
<SentryRoute exact path="/register">
|
||||
<RegisterPage />
|
||||
</SentryRoute>
|
||||
<SentryRoute path="*">
|
||||
<RoomPage />
|
||||
</SentryRoute>
|
||||
</Switch>
|
||||
</Sentry.ErrorBoundary>
|
||||
</Providers>
|
||||
</Suspense>
|
||||
) : (
|
||||
<LoadingView />
|
||||
@@ -113,3 +112,16 @@ export const App: FC<AppProps> = ({ history }) => {
|
||||
</Router>
|
||||
);
|
||||
};
|
||||
|
||||
const Providers: FC<{
|
||||
children: JSX.Element;
|
||||
}> = ({ children }) => {
|
||||
// We use this to stack all used providers to not make the App component to verbose
|
||||
return (
|
||||
<ClientProvider>
|
||||
<MediaDevicesProvider>
|
||||
<ProcessorProvider>{children}</ProcessorProvider>
|
||||
</MediaDevicesProvider>
|
||||
</ClientProvider>
|
||||
);
|
||||
};
|
||||
|
||||
111
src/livekit/TrackProcessorContext.tsx
Normal file
111
src/livekit/TrackProcessorContext.tsx
Normal file
@@ -0,0 +1,111 @@
|
||||
/*
|
||||
Copyright 2024 New Vector Ltd.
|
||||
|
||||
SPDX-License-Identifier: AGPL-3.0-only
|
||||
Please see LICENSE in the repository root for full details.
|
||||
*/
|
||||
|
||||
import {
|
||||
BackgroundBlur as backgroundBlur,
|
||||
BackgroundOptions,
|
||||
ProcessorWrapper,
|
||||
} from "@livekit/track-processors";
|
||||
import {
|
||||
createContext,
|
||||
FC,
|
||||
useCallback,
|
||||
useContext,
|
||||
useEffect,
|
||||
useRef,
|
||||
useState,
|
||||
} from "react";
|
||||
import { logger } from "matrix-js-sdk/src/logger";
|
||||
import { LocalVideoTrack } from "livekit-client";
|
||||
|
||||
import {
|
||||
backgroundBlur as backgroundBlurSettings,
|
||||
useSetting,
|
||||
} from "../settings/settings";
|
||||
|
||||
type ProcessorState = {
|
||||
supported: boolean | undefined;
|
||||
processor: undefined | ProcessorWrapper<BackgroundOptions>;
|
||||
/**
|
||||
* Call this method to try to initialize a processor.
|
||||
* This only needs to happen if supported is undefined.
|
||||
* If the backgroundBlur setting is set to true this does not need to be called
|
||||
* and the processorState.supported will update automatically to the correct value.
|
||||
*/
|
||||
checkSupported: () => void;
|
||||
};
|
||||
const ProcessorContext = createContext<ProcessorState | undefined>(undefined);
|
||||
|
||||
export const useTrackProcessor = (): ProcessorState | undefined =>
|
||||
useContext(ProcessorContext);
|
||||
|
||||
export const useTrackProcessorSync = (
|
||||
videoTrack: LocalVideoTrack | null,
|
||||
): void => {
|
||||
const { processor } = useTrackProcessor() || {};
|
||||
useEffect(() => {
|
||||
if (processor && !videoTrack?.getProcessor()) {
|
||||
void videoTrack?.setProcessor(processor);
|
||||
}
|
||||
if (!processor && videoTrack?.getProcessor()) {
|
||||
void videoTrack?.stopProcessor();
|
||||
}
|
||||
}, [processor, videoTrack]);
|
||||
};
|
||||
|
||||
interface Props {
|
||||
children: JSX.Element;
|
||||
}
|
||||
export const ProcessorProvider: FC<Props> = ({ children }) => {
|
||||
// The setting the user wants to have
|
||||
const [blurActivated] = useSetting(backgroundBlurSettings);
|
||||
|
||||
// If `ProcessorState.supported` is undefined the user can activate that we want
|
||||
// to have it at least checked (this is useful to show the settings menu properly)
|
||||
// We dont want to try initializing the blur if the user is not even looking at the setting
|
||||
const [shouldCheckSupport, setShouldCheckSupport] = useState(blurActivated);
|
||||
|
||||
// Cache the processor so we only need to initialize it once.
|
||||
const blur = useRef<ProcessorWrapper<BackgroundOptions> | undefined>(
|
||||
undefined,
|
||||
);
|
||||
|
||||
const checkSupported = useCallback(() => {
|
||||
setShouldCheckSupport(true);
|
||||
}, []);
|
||||
// This is the actual state exposed through the context
|
||||
const [processorState, setProcessorState] = useState<ProcessorState>(() => ({
|
||||
supported: false,
|
||||
processor: undefined,
|
||||
checkSupported,
|
||||
}));
|
||||
|
||||
useEffect(() => {
|
||||
if (!shouldCheckSupport) return;
|
||||
try {
|
||||
if (!blur.current) blur.current = backgroundBlur(15, { delegate: "GPU" });
|
||||
setProcessorState({
|
||||
checkSupported,
|
||||
supported: true,
|
||||
processor: blurActivated ? blur.current : undefined,
|
||||
});
|
||||
} catch (e) {
|
||||
setProcessorState({
|
||||
checkSupported,
|
||||
supported: false,
|
||||
processor: undefined,
|
||||
});
|
||||
logger.error("disable background blur", e);
|
||||
}
|
||||
}, [blurActivated, checkSupported, shouldCheckSupport]);
|
||||
|
||||
return (
|
||||
<ProcessorContext.Provider value={processorState}>
|
||||
{children}
|
||||
</ProcessorContext.Provider>
|
||||
);
|
||||
};
|
||||
@@ -9,9 +9,8 @@ import {
|
||||
ConnectionState,
|
||||
E2EEOptions,
|
||||
ExternalE2EEKeyProvider,
|
||||
LocalTrackPublication,
|
||||
LocalVideoTrack,
|
||||
Room,
|
||||
RoomEvent,
|
||||
RoomOptions,
|
||||
Track,
|
||||
} from "livekit-client";
|
||||
@@ -19,7 +18,6 @@ import { useEffect, useMemo, useRef } from "react";
|
||||
import E2EEWorker from "livekit-client/e2ee-worker?worker";
|
||||
import { logger } from "matrix-js-sdk/src/logger";
|
||||
import { MatrixRTCSession } from "matrix-js-sdk/src/matrixrtc/MatrixRTCSession";
|
||||
import { BackgroundBlur as backgroundBlur } from "@livekit/track-processors";
|
||||
|
||||
import { defaultLiveKitOptions } from "./options";
|
||||
import { SFUConfig } from "./openIDSFU";
|
||||
@@ -29,7 +27,6 @@ import {
|
||||
MediaDevices,
|
||||
useMediaDevices,
|
||||
} from "./MediaDevicesContext";
|
||||
import { backgroundBlur as backgroundBlurSettings } from "../settings/settings";
|
||||
import {
|
||||
ECConnectionState,
|
||||
useECConnectionState,
|
||||
@@ -37,7 +34,11 @@ import {
|
||||
import { MatrixKeyProvider } from "../e2ee/matrixKeyProvider";
|
||||
import { E2eeType } from "../e2ee/e2eeType";
|
||||
import { EncryptionSystem } from "../e2ee/sharedKeyManagement";
|
||||
import { useSetting } from "../settings/settings";
|
||||
import {
|
||||
useTrackProcessor,
|
||||
useTrackProcessorSync,
|
||||
} from "./TrackProcessorContext";
|
||||
import { useInitial } from "../useInitial";
|
||||
|
||||
interface UseLivekitResult {
|
||||
livekitRoom?: Room;
|
||||
@@ -83,22 +84,16 @@ export function useLiveKit(
|
||||
const initialMuteStates = useRef<MuteStates>(muteStates);
|
||||
const devices = useMediaDevices();
|
||||
const initialDevices = useRef<MediaDevices>(devices);
|
||||
const blur = useMemo(() => {
|
||||
let b = undefined;
|
||||
try {
|
||||
b = backgroundBlur(15, { delegate: "GPU" });
|
||||
} catch (e) {
|
||||
logger.error("disable background blur", e);
|
||||
}
|
||||
return b;
|
||||
}, []);
|
||||
|
||||
const { processor } = useTrackProcessor() || {};
|
||||
const initialProcessor = useInitial(() => processor);
|
||||
const roomOptions = useMemo(
|
||||
(): RoomOptions => ({
|
||||
...defaultLiveKitOptions,
|
||||
videoCaptureDefaults: {
|
||||
...defaultLiveKitOptions.videoCaptureDefaults,
|
||||
deviceId: initialDevices.current.videoInput.selectedId,
|
||||
processor: blur,
|
||||
processor: initialProcessor,
|
||||
},
|
||||
audioCaptureDefaults: {
|
||||
...defaultLiveKitOptions.audioCaptureDefaults,
|
||||
@@ -109,7 +104,7 @@ export function useLiveKit(
|
||||
},
|
||||
e2ee: e2eeOptions,
|
||||
}),
|
||||
[blur, e2eeOptions],
|
||||
[e2eeOptions, initialProcessor],
|
||||
);
|
||||
|
||||
// Store if audio/video are currently updating. If to prohibit unnecessary calls
|
||||
@@ -134,6 +129,15 @@ export function useLiveKit(
|
||||
return r;
|
||||
}, [roomOptions, e2eeSystem]);
|
||||
|
||||
const videoTrack = useMemo(
|
||||
() =>
|
||||
Array.from(room.localParticipant.videoTrackPublications.values()).find(
|
||||
(v) => v.source === Track.Source.Camera,
|
||||
)?.track as LocalVideoTrack | null,
|
||||
[room.localParticipant.videoTrackPublications],
|
||||
);
|
||||
useTrackProcessorSync(videoTrack);
|
||||
|
||||
const connectionState = useECConnectionState(
|
||||
{
|
||||
deviceId: initialDevices.current.audioInput.selectedId,
|
||||
@@ -143,58 +147,6 @@ export function useLiveKit(
|
||||
sfuConfig,
|
||||
);
|
||||
|
||||
const [showBackgroundBlur] = useSetting(backgroundBlurSettings);
|
||||
const videoTrackPromise = useRef<
|
||||
undefined | Promise<LocalTrackPublication | undefined>
|
||||
>(undefined);
|
||||
|
||||
useEffect(() => {
|
||||
// Don't even try if we cannot blur on this platform
|
||||
if (!blur) return;
|
||||
if (!room || videoTrackPromise.current) return;
|
||||
const update = async (): Promise<void> => {
|
||||
let publishCallback: undefined | ((track: LocalTrackPublication) => void);
|
||||
videoTrackPromise.current = new Promise<
|
||||
LocalTrackPublication | undefined
|
||||
>((resolve) => {
|
||||
const videoTrack = Array.from(
|
||||
room.localParticipant.videoTrackPublications.values(),
|
||||
).find((v) => v.source === Track.Source.Camera);
|
||||
if (videoTrack) {
|
||||
resolve(videoTrack);
|
||||
}
|
||||
publishCallback = (videoTrack: LocalTrackPublication): void => {
|
||||
if (videoTrack.source === Track.Source.Camera) {
|
||||
resolve(videoTrack);
|
||||
}
|
||||
};
|
||||
room.on(RoomEvent.LocalTrackPublished, publishCallback);
|
||||
});
|
||||
|
||||
const videoTrack = await videoTrackPromise.current;
|
||||
|
||||
if (publishCallback)
|
||||
room.off(RoomEvent.LocalTrackPublished, publishCallback);
|
||||
|
||||
if (videoTrack !== undefined) {
|
||||
if (
|
||||
showBackgroundBlur &&
|
||||
videoTrack.track?.getProcessor()?.name !== "background-blur"
|
||||
) {
|
||||
logger.info("Blur: set blur");
|
||||
|
||||
void videoTrack.track?.setProcessor(blur);
|
||||
} else if (
|
||||
videoTrack.track?.getProcessor()?.name === "background-blur"
|
||||
) {
|
||||
void videoTrack.track?.stopProcessor();
|
||||
}
|
||||
}
|
||||
videoTrackPromise.current = undefined;
|
||||
};
|
||||
void update();
|
||||
}, [blur, room, showBackgroundBlur]);
|
||||
|
||||
useEffect(() => {
|
||||
// Sync the requested mute states with LiveKit's mute states. We do it this
|
||||
// way around rather than using LiveKit as the source of truth, so that the
|
||||
@@ -261,6 +213,7 @@ export function useLiveKit(
|
||||
audioMuteUpdating.current = true;
|
||||
trackPublication = await participant.setMicrophoneEnabled(
|
||||
buttonEnabled.current.audio,
|
||||
room.options.audioCaptureDefaults,
|
||||
);
|
||||
audioMuteUpdating.current = false;
|
||||
break;
|
||||
@@ -268,6 +221,7 @@ export function useLiveKit(
|
||||
videoMuteUpdating.current = true;
|
||||
trackPublication = await participant.setCameraEnabled(
|
||||
buttonEnabled.current.video,
|
||||
room.options.videoCaptureDefaults,
|
||||
);
|
||||
videoMuteUpdating.current = false;
|
||||
break;
|
||||
|
||||
@@ -13,10 +13,13 @@ import classNames from "classnames";
|
||||
import { useHistory } from "react-router-dom";
|
||||
import { logger } from "matrix-js-sdk/src/logger";
|
||||
import { usePreviewTracks } from "@livekit/components-react";
|
||||
import { LocalVideoTrack, Track } from "livekit-client";
|
||||
import {
|
||||
CreateLocalTracksOptions,
|
||||
LocalVideoTrack,
|
||||
Track,
|
||||
} from "livekit-client";
|
||||
import { useObservable } from "observable-hooks";
|
||||
import { map } from "rxjs";
|
||||
import { BackgroundBlur as backgroundBlur } from "@livekit/track-processors";
|
||||
|
||||
import inCallStyles from "./InCallView.module.css";
|
||||
import styles from "./LobbyView.module.css";
|
||||
@@ -33,14 +36,16 @@ import {
|
||||
VideoButton,
|
||||
} from "../button/Button";
|
||||
import { SettingsModal, defaultSettingsTab } from "../settings/SettingsModal";
|
||||
import { backgroundBlur as backgroundBlurSettings } from "../settings/settings";
|
||||
import { useMediaQuery } from "../useMediaQuery";
|
||||
import { E2eeType } from "../e2ee/e2eeType";
|
||||
import { Link } from "../button/Link";
|
||||
import { useMediaDevices } from "../livekit/MediaDevicesContext";
|
||||
import { useInitial } from "../useInitial";
|
||||
import { useSwitchCamera as useShowSwitchCamera } from "./useSwitchCamera";
|
||||
import { useSetting } from "../settings/settings";
|
||||
import {
|
||||
useTrackProcessor,
|
||||
useTrackProcessorSync,
|
||||
} from "../livekit/TrackProcessorContext";
|
||||
|
||||
interface Props {
|
||||
client: MatrixClient;
|
||||
@@ -111,20 +116,10 @@ export const LobbyView: FC<Props> = ({
|
||||
muteStates.audio.enabled && { deviceId: devices.audioInput.selectedId },
|
||||
);
|
||||
|
||||
const blur = useMemo(() => {
|
||||
let b = undefined;
|
||||
try {
|
||||
b = backgroundBlur(15, { delegate: "GPU" });
|
||||
} catch (e) {
|
||||
logger.error(
|
||||
"disable background blur because its not supported by the platform.",
|
||||
e,
|
||||
);
|
||||
}
|
||||
return b;
|
||||
}, []);
|
||||
const { processor } = useTrackProcessor() || {};
|
||||
|
||||
const localTrackOptions = useMemo(
|
||||
const initialProcessor = useInitial(() => processor);
|
||||
const localTrackOptions = useMemo<CreateLocalTracksOptions>(
|
||||
() => ({
|
||||
// The only reason we request audio here is to get the audio permission
|
||||
// request over with at the same time. But changing the audio settings
|
||||
@@ -135,14 +130,14 @@ export const LobbyView: FC<Props> = ({
|
||||
audio: Object.assign({}, initialAudioOptions),
|
||||
video: muteStates.video.enabled && {
|
||||
deviceId: devices.videoInput.selectedId,
|
||||
// It should be possible to set a processor here:
|
||||
// processor: blur,
|
||||
processor: initialProcessor,
|
||||
},
|
||||
}),
|
||||
[
|
||||
initialAudioOptions,
|
||||
muteStates.video.enabled,
|
||||
devices.videoInput.selectedId,
|
||||
initialProcessor,
|
||||
],
|
||||
);
|
||||
|
||||
@@ -157,28 +152,11 @@ export const LobbyView: FC<Props> = ({
|
||||
|
||||
const tracks = usePreviewTracks(localTrackOptions, onError);
|
||||
|
||||
const videoTrack = useMemo(
|
||||
() =>
|
||||
(tracks?.find((t) => t.kind === Track.Kind.Video) ??
|
||||
null) as LocalVideoTrack | null,
|
||||
[tracks],
|
||||
);
|
||||
|
||||
const [showBackgroundBlur] = useSetting(backgroundBlurSettings);
|
||||
|
||||
useEffect(() => {
|
||||
// Fon't even try if we cannot blur on this platform
|
||||
if (!blur) return;
|
||||
const updateBlur = async (showBlur: boolean): Promise<void> => {
|
||||
if (showBlur && !videoTrack?.getProcessor()) {
|
||||
await videoTrack?.setProcessor(blur);
|
||||
} else {
|
||||
await videoTrack?.stopProcessor();
|
||||
}
|
||||
};
|
||||
if (videoTrack) void updateBlur(showBackgroundBlur);
|
||||
}, [videoTrack, showBackgroundBlur, blur]);
|
||||
|
||||
const videoTrack = useMemo(() => {
|
||||
const track = tracks?.find((t) => t.kind === Track.Kind.Video);
|
||||
return track as LocalVideoTrack | null;
|
||||
}, [tracks]);
|
||||
useTrackProcessorSync(videoTrack);
|
||||
const showSwitchCamera = useShowSwitchCamera(
|
||||
useObservable(
|
||||
(inputs) => inputs.pipe(map(([video]) => video)),
|
||||
|
||||
@@ -5,12 +5,10 @@ SPDX-License-Identifier: AGPL-3.0-only
|
||||
Please see LICENSE in the repository root for full details.
|
||||
*/
|
||||
|
||||
import { ChangeEvent, FC, ReactNode, useCallback, useState } from "react";
|
||||
import { ChangeEvent, FC, ReactNode, useCallback, useEffect, useState } from "react";
|
||||
import { Trans, useTranslation } from "react-i18next";
|
||||
import { MatrixClient } from "matrix-js-sdk/src/matrix";
|
||||
import { Root as Form, Separator, Text } from "@vector-im/compound-web";
|
||||
import { BackgroundBlur as backgroundBlur } from "@livekit/track-processors";
|
||||
import { logger } from "matrix-js-sdk/src/logger";
|
||||
|
||||
import { Modal } from "../Modal";
|
||||
import styles from "./SettingsModal.module.css";
|
||||
@@ -36,6 +34,7 @@ import { isFirefox } from "../Platform";
|
||||
import { PreferencesSettingsTab } from "./PreferencesSettingsTab";
|
||||
import { Slider } from "../Slider";
|
||||
import { DeviceSelection } from "./DeviceSelection";
|
||||
import { useTrackProcessor } from "../livekit/TrackProcessorContext";
|
||||
|
||||
type SettingsTab =
|
||||
| "audio"
|
||||
@@ -75,18 +74,11 @@ export const SettingsModal: FC<Props> = ({
|
||||
|
||||
// Generate a `Checkbox` input to turn blur on or off.
|
||||
const BlurCheckbox: React.FC = (): ReactNode => {
|
||||
const [blur, setBlur] = useSetting(backgroundBlurSetting);
|
||||
let canBlur = true;
|
||||
try {
|
||||
backgroundBlur(15);
|
||||
} catch (e) {
|
||||
logger.debug(
|
||||
"Cannot blur, so we do not show the option in settings. error: ",
|
||||
e,
|
||||
);
|
||||
canBlur = false;
|
||||
setBlur(false);
|
||||
}
|
||||
const { supported, checkSupported } = useTrackProcessor() || {};
|
||||
useEffect(() => checkSupported?.(), [checkSupported]);
|
||||
|
||||
const [blurActive, setBlurActive] = useSetting(backgroundBlurSetting);
|
||||
|
||||
return (
|
||||
<>
|
||||
<h4>{t("settings.background_blur_header")}</h4>
|
||||
@@ -96,12 +88,12 @@ export const SettingsModal: FC<Props> = ({
|
||||
id="activateBackgroundBlur"
|
||||
label={t("settings.background_blur_label")}
|
||||
description={
|
||||
canBlur ? "" : t("settings.blur_not_supported_by_browser")
|
||||
supported ? "" : t("settings.blur_not_supported_by_browser")
|
||||
}
|
||||
type="checkbox"
|
||||
checked={blur}
|
||||
onChange={(b): void => setBlur(b.target.checked)}
|
||||
disabled={!canBlur}
|
||||
checked={!!blurActive}
|
||||
onChange={(b): void => setBlurActive(b.target.checked)}
|
||||
disabled={!supported}
|
||||
/>
|
||||
</FieldRow>
|
||||
</>
|
||||
|
||||
Reference in New Issue
Block a user