Get shared components into parity with current version of prebuilt
This commit is contained in:
parent
9a39dc2410
commit
838948bf93
|
|
@ -22,9 +22,7 @@ export const NetworkAside = () => {
|
||||||
}, [callObject]);
|
}, [callObject]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!callObject) {
|
if (!callObject) return;
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
updateStats();
|
updateStats();
|
||||||
|
|
||||||
|
|
@ -38,7 +36,7 @@ export const NetworkAside = () => {
|
||||||
Math.round(
|
Math.round(
|
||||||
(networkStats?.stats?.latest?.videoRecvBitsPerSecond ?? 0) / 1000
|
(networkStats?.stats?.latest?.videoRecvBitsPerSecond ?? 0) / 1000
|
||||||
),
|
),
|
||||||
[networkStats]
|
[networkStats?.stats?.latest?.videoRecvBitsPerSecond]
|
||||||
);
|
);
|
||||||
|
|
||||||
const uploadKbs = useMemo(
|
const uploadKbs = useMemo(
|
||||||
|
|
@ -46,7 +44,7 @@ export const NetworkAside = () => {
|
||||||
Math.round(
|
Math.round(
|
||||||
(networkStats?.stats?.latest?.videoSendBitsPerSecond ?? 0) / 1000
|
(networkStats?.stats?.latest?.videoSendBitsPerSecond ?? 0) / 1000
|
||||||
),
|
),
|
||||||
[networkStats]
|
[networkStats?.stats?.latest?.videoSendBitsPerSecond]
|
||||||
);
|
);
|
||||||
|
|
||||||
if (!showAside || showAside !== NETWORK_ASIDE) {
|
if (!showAside || showAside !== NETWORK_ASIDE) {
|
||||||
|
|
|
||||||
|
|
@ -8,22 +8,30 @@
|
||||||
* into into a single audio node using the CombinedAudioTrack component
|
* into into a single audio node using the CombinedAudioTrack component
|
||||||
*/
|
*/
|
||||||
import React, { useEffect, useMemo } from 'react';
|
import React, { useEffect, useMemo } from 'react';
|
||||||
|
import { useCallState } from '@custom/shared/contexts/CallProvider';
|
||||||
import { useTracks } from '@custom/shared/contexts/TracksProvider';
|
import { useTracks } from '@custom/shared/contexts/TracksProvider';
|
||||||
|
import { useUIState } from '@custom/shared/contexts/UIStateProvider';
|
||||||
|
import { isScreenId } from '@custom/shared/contexts/participantsState';
|
||||||
import Bowser from 'bowser';
|
import Bowser from 'bowser';
|
||||||
import { Portal } from 'react-portal';
|
import { Portal } from 'react-portal';
|
||||||
import AudioTrack from './AudioTrack';
|
import AudioTrack from './AudioTrack';
|
||||||
import CombinedAudioTrack from './CombinedAudioTrack';
|
import CombinedAudioTrack from './CombinedAudioTrack';
|
||||||
|
|
||||||
|
|
||||||
export const Audio = () => {
|
export const Audio = () => {
|
||||||
|
const { disableAudio } = useCallState();
|
||||||
const { audioTracks } = useTracks();
|
const { audioTracks } = useTracks();
|
||||||
|
const { setShowAutoplayFailedModal } = useUIState();
|
||||||
|
|
||||||
const renderedTracks = useMemo(
|
const renderedTracks = useMemo(
|
||||||
() =>
|
() =>
|
||||||
Object.entries(audioTracks).reduce(
|
Object.entries(audioTracks).reduce((tracks, [id, track]) => {
|
||||||
(tracks, [id, track]) => ({ ...tracks, [id]: track }),
|
if (!disableAudio || isScreenId(id)) {
|
||||||
{}
|
tracks[id] = track;
|
||||||
),
|
}
|
||||||
[audioTracks]
|
return tracks;
|
||||||
|
}, {}),
|
||||||
|
[audioTracks, disableAudio]
|
||||||
);
|
);
|
||||||
|
|
||||||
// On iOS safari, when headphones are disconnected, all audio elements are paused.
|
// On iOS safari, when headphones are disconnected, all audio elements are paused.
|
||||||
|
|
@ -32,13 +40,15 @@ export const Audio = () => {
|
||||||
// To fix that, we call `play` on each audio track on all devicechange events.
|
// To fix that, we call `play` on each audio track on all devicechange events.
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const playTracks = () => {
|
const playTracks = () => {
|
||||||
document.querySelectorAll('.audioTracks audio').forEach(async (audio) => {
|
document
|
||||||
|
.querySelectorAll('.audioTracks audio')
|
||||||
|
.forEach(async (audio) => {
|
||||||
try {
|
try {
|
||||||
if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
|
if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
|
||||||
await audio?.play();
|
await audio?.play();
|
||||||
}
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
// Auto play failed
|
setShowAutoplayFailedModal(true);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
|
|
@ -46,11 +56,15 @@ export const Audio = () => {
|
||||||
return () => {
|
return () => {
|
||||||
navigator.mediaDevices.removeEventListener('devicechange', playTracks);
|
navigator.mediaDevices.removeEventListener('devicechange', playTracks);
|
||||||
};
|
};
|
||||||
}, []);
|
}, [setShowAutoplayFailedModal]);
|
||||||
|
|
||||||
const tracksComponent = useMemo(() => {
|
const tracksComponent = useMemo(() => {
|
||||||
const { browser } = Bowser.parse(navigator.userAgent);
|
const { browser, platform, os } = Bowser.parse(navigator.userAgent);
|
||||||
if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) {
|
if (
|
||||||
|
browser.name === 'Chrome' &&
|
||||||
|
parseInt(browser.version, 10) >= 92 &&
|
||||||
|
(platform.type === 'desktop' || os.name === 'Android')
|
||||||
|
) {
|
||||||
return <CombinedAudioTrack tracks={renderedTracks} />;
|
return <CombinedAudioTrack tracks={renderedTracks} />;
|
||||||
}
|
}
|
||||||
return Object.entries(renderedTracks).map(([id, track]) => (
|
return Object.entries(renderedTracks).map(([id, track]) => (
|
||||||
|
|
|
||||||
|
|
@ -1,38 +1,35 @@
|
||||||
import React, { useRef, useEffect } from 'react';
|
import React, { useRef, useEffect } from 'react';
|
||||||
|
import { useUIState } from '@custom/shared/contexts/UIStateProvider';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
|
|
||||||
const AudioTrack = ({ track }) => {
|
export const AudioTrack = ({ track }) => {
|
||||||
const audioRef = useRef(null);
|
const audioRef = useRef(null);
|
||||||
|
const { setShowAutoplayFailedModal } = useUIState();
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
if (!audioRef.current) return false;
|
const audioTag = audioRef.current;
|
||||||
|
if (!audioTag) return false;
|
||||||
let playTimeout;
|
let playTimeout;
|
||||||
|
|
||||||
const handleCanPlay = () => {
|
const handleCanPlay = () => {
|
||||||
playTimeout = setTimeout(() => {
|
playTimeout = setTimeout(() => {
|
||||||
console.log('Unable to autoplay audio element');
|
setShowAutoplayFailedModal(true);
|
||||||
}, 1500);
|
}, 1500);
|
||||||
};
|
};
|
||||||
const handlePlay = () => {
|
const handlePlay = () => {
|
||||||
clearTimeout(playTimeout);
|
clearTimeout(playTimeout);
|
||||||
};
|
};
|
||||||
audioRef.current.addEventListener('canplay', handleCanPlay);
|
audioTag.addEventListener('canplay', handleCanPlay);
|
||||||
audioRef.current.addEventListener('play', handlePlay);
|
audioTag.addEventListener('play', handlePlay);
|
||||||
audioRef.current.srcObject = new MediaStream([track]);
|
audioTag.srcObject = new MediaStream([track]);
|
||||||
|
|
||||||
const audioEl = audioRef.current;
|
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
audioEl?.removeEventListener('canplay', handleCanPlay);
|
audioTag?.removeEventListener('canplay', handleCanPlay);
|
||||||
audioEl?.removeEventListener('play', handlePlay);
|
audioTag?.removeEventListener('play', handlePlay);
|
||||||
};
|
};
|
||||||
}, [track]);
|
}, [setShowAutoplayFailedModal, track]);
|
||||||
|
|
||||||
return track ? (
|
return track ? <audio autoPlay playsInline ref={audioRef} /> : null;
|
||||||
<audio autoPlay playsInline ref={audioRef}>
|
|
||||||
<track kind="captions" />
|
|
||||||
</audio>
|
|
||||||
) : null;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
AudioTrack.propTypes = {
|
AudioTrack.propTypes = {
|
||||||
|
|
|
||||||
|
|
@ -2,7 +2,7 @@ import React, { useEffect, useRef } from 'react';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare';
|
import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare';
|
||||||
|
|
||||||
const CombinedAudioTrack = ({ tracks }) => {
|
export const CombinedAudioTrack = ({ tracks }) => {
|
||||||
const audioEl = useRef(null);
|
const audioEl = useRef(null);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
|
@ -25,12 +25,21 @@ const CombinedAudioTrack = ({ tracks }) => {
|
||||||
allTracks.forEach((track) => {
|
allTracks.forEach((track) => {
|
||||||
const persistentTrack = track?.persistentTrack;
|
const persistentTrack = track?.persistentTrack;
|
||||||
if (persistentTrack) {
|
if (persistentTrack) {
|
||||||
|
switch (persistentTrack.readyState) {
|
||||||
|
case 'ended':
|
||||||
|
stream.removeTrack(persistentTrack);
|
||||||
|
break;
|
||||||
|
case 'live':
|
||||||
persistentTrack.addEventListener(
|
persistentTrack.addEventListener(
|
||||||
'ended',
|
'ended',
|
||||||
(ev) => stream.removeTrack(ev.target),
|
(ev) => {
|
||||||
|
stream.removeTrack(ev.target);
|
||||||
|
},
|
||||||
{ once: true }
|
{ once: true }
|
||||||
);
|
);
|
||||||
stream.addTrack(persistentTrack);
|
stream.addTrack(persistentTrack);
|
||||||
|
break;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
@ -53,11 +62,7 @@ const CombinedAudioTrack = ({ tracks }) => {
|
||||||
playAudio();
|
playAudio();
|
||||||
}, [tracks, trackIds]);
|
}, [tracks, trackIds]);
|
||||||
|
|
||||||
return (
|
return <audio autoPlay playsInline ref={audioEl} />;
|
||||||
<audio autoPlay playsInline ref={audioEl}>
|
|
||||||
<track kind="captions" />
|
|
||||||
</audio>
|
|
||||||
);
|
|
||||||
};
|
};
|
||||||
|
|
||||||
CombinedAudioTrack.propTypes = {
|
CombinedAudioTrack.propTypes = {
|
||||||
|
|
|
||||||
|
|
@ -6,6 +6,7 @@ export const Capsule = ({ children, variant }) => (
|
||||||
<span className={classNames('capsule', variant)}>
|
<span className={classNames('capsule', variant)}>
|
||||||
{children}
|
{children}
|
||||||
<style jsx>{`
|
<style jsx>{`
|
||||||
|
.capsule {
|
||||||
display: inline-flex;
|
display: inline-flex;
|
||||||
padding: 4px 6px;
|
padding: 4px 6px;
|
||||||
margin: 0 6px;
|
margin: 0 6px;
|
||||||
|
|
@ -17,7 +18,7 @@ export const Capsule = ({ children, variant }) => (
|
||||||
font-weight: var(--weight-bold);
|
font-weight: var(--weight-bold);
|
||||||
text-transform: uppercase;
|
text-transform: uppercase;
|
||||||
letter-spacing: 1px;
|
letter-spacing: 1px;
|
||||||
|
}
|
||||||
.capsule.success {
|
.capsule.success {
|
||||||
background-color: var(--green-default);
|
background-color: var(--green-default);
|
||||||
color: #ffffff;
|
color: #ffffff;
|
||||||
|
|
|
||||||
|
|
@ -50,7 +50,9 @@ export const CardBody = ({ children }) => (
|
||||||
<div className="card-body">
|
<div className="card-body">
|
||||||
{children}
|
{children}
|
||||||
<style jsx>{`
|
<style jsx>{`
|
||||||
|
.card-body {
|
||||||
color: var(--text-mid);
|
color: var(--text-mid);
|
||||||
|
}
|
||||||
|
|
||||||
& + :global(.card-footer) {
|
& + :global(.card-footer) {
|
||||||
margin-top: var(--spacing-md);
|
margin-top: var(--spacing-md);
|
||||||
|
|
|
||||||
|
|
@ -105,17 +105,12 @@ export const HairCheck = () => {
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const hasError = useMemo(() => {
|
const hasError = useMemo(() => {
|
||||||
if (
|
return !(!deviceState ||
|
||||||
!deviceState ||
|
|
||||||
[
|
[
|
||||||
DEVICE_STATE_LOADING,
|
DEVICE_STATE_LOADING,
|
||||||
DEVICE_STATE_PENDING,
|
DEVICE_STATE_PENDING,
|
||||||
DEVICE_STATE_GRANTED,
|
DEVICE_STATE_GRANTED,
|
||||||
].includes(deviceState)
|
].includes(deviceState));
|
||||||
) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}, [deviceState]);
|
}, [deviceState]);
|
||||||
|
|
||||||
const camErrorVerbose = useMemo(() => {
|
const camErrorVerbose = useMemo(() => {
|
||||||
|
|
|
||||||
|
|
@ -1,10 +1,36 @@
|
||||||
import React, { useMemo, forwardRef, memo, useEffect } from 'react';
|
import React, { useMemo, forwardRef, memo, useEffect, useState } from 'react';
|
||||||
|
import { useCallState } from '@custom/shared/contexts/CallProvider';
|
||||||
|
import { useUIState } from '@custom/shared/contexts/UIStateProvider';
|
||||||
|
import { isScreenId } from '@custom/shared/contexts/participantsState';
|
||||||
import Bowser from 'bowser';
|
import Bowser from 'bowser';
|
||||||
|
import classNames from 'classnames';
|
||||||
import PropTypes from 'prop-types';
|
import PropTypes from 'prop-types';
|
||||||
import { shallowEqualObjects } from 'shallow-equal';
|
import { shallowEqualObjects } from 'shallow-equal';
|
||||||
|
import { useDeepCompareMemo } from 'use-deep-compare';
|
||||||
|
|
||||||
export const Video = memo(
|
export const Video = memo(
|
||||||
forwardRef(({ participantId, videoTrack, ...rest }, videoEl) => {
|
forwardRef(({ fit = 'contain', participantId, videoTrack, ...rest }, videoEl) => {
|
||||||
|
const { callObject } = useCallState();
|
||||||
|
const { isMobile } = useUIState();
|
||||||
|
|
||||||
|
const isLocalCam = useMemo(() => {
|
||||||
|
const localParticipant = callObject.participants()?.local;
|
||||||
|
return participantId === localParticipant.session_id && !isScreenId(participantId);
|
||||||
|
}, [callObject, participantId]);
|
||||||
|
|
||||||
|
const [isMirrored, setIsMirrored] = useState(isLocalCam);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Considered as playable video:
|
||||||
|
* - local cam feed
|
||||||
|
* - any screen share
|
||||||
|
* - remote cam feed that is subscribed and reported as playable
|
||||||
|
*/
|
||||||
|
const isPlayable = useDeepCompareMemo(
|
||||||
|
() => isLocalCam || isScreenId(participantId),
|
||||||
|
[isLocalCam, isScreenId(participantId)]
|
||||||
|
);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Memo: Chrome >= 92?
|
* Memo: Chrome >= 92?
|
||||||
* See: https://bugs.chromium.org/p/chromium/issues/detail?id=1232649
|
* See: https://bugs.chromium.org/p/chromium/issues/detail?id=1232649
|
||||||
|
|
@ -19,33 +45,114 @@ export const Video = memo(
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Effect: Umount
|
* Determine if video needs to be mirrored.
|
||||||
* Note: nullify src to ensure media object is not counted
|
*/
|
||||||
|
useEffect(() => {
|
||||||
|
if (!videoTrack) return;
|
||||||
|
|
||||||
|
const videoTrackSettings = videoTrack.getSettings();
|
||||||
|
const isUsersFrontCamera =
|
||||||
|
'facingMode' in videoTrackSettings
|
||||||
|
? isLocalCam && videoTrackSettings.facingMode === 'user'
|
||||||
|
: isLocalCam;
|
||||||
|
// only apply mirror effect to user facing camera
|
||||||
|
if (isMirrored !== isUsersFrontCamera) {
|
||||||
|
setIsMirrored(isUsersFrontCamera);
|
||||||
|
}
|
||||||
|
}, [isMirrored, isLocalCam, videoTrack]);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Handle canplay & picture-in-picture events.
|
||||||
*/
|
*/
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const video = videoEl.current;
|
const video = videoEl.current;
|
||||||
if (!video) return false;
|
if (!video) return;
|
||||||
// clean up when video renders for different participant
|
const handleCanPlay = () => {
|
||||||
video.srcObject = null;
|
if (!video.paused) return;
|
||||||
if (isChrome92) video.load();
|
video.play();
|
||||||
return () => {
|
|
||||||
// clean up when unmounted
|
|
||||||
video.srcObject = null;
|
|
||||||
if (isChrome92) video.load();
|
|
||||||
};
|
};
|
||||||
}, [videoEl, isChrome92, participantId]);
|
const handleEnterPIP = () => {
|
||||||
|
video.style.transform = 'scale(1)';
|
||||||
|
};
|
||||||
|
const handleLeavePIP = () => {
|
||||||
|
video.style.transform = '';
|
||||||
|
setTimeout(() => {
|
||||||
|
if (video.paused) video.play();
|
||||||
|
}, 100);
|
||||||
|
};
|
||||||
|
video.addEventListener('canplay', handleCanPlay);
|
||||||
|
video.addEventListener('enterpictureinpicture', handleEnterPIP);
|
||||||
|
video.addEventListener('leavepictureinpicture', handleLeavePIP);
|
||||||
|
return () => {
|
||||||
|
video.removeEventListener('canplay', handleCanPlay);
|
||||||
|
video.removeEventListener('enterpictureinpicture', handleEnterPIP);
|
||||||
|
video.removeEventListener('leavepictureinpicture', handleLeavePIP);
|
||||||
|
};
|
||||||
|
}, [isChrome92, videoEl]);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Effect: mount source (and force load on Chrome)
|
* Update srcObject.
|
||||||
*/
|
*/
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const video = videoEl.current;
|
const video = videoEl.current;
|
||||||
if (!video || !videoTrack) return;
|
if (!video || !videoTrack) return;
|
||||||
video.srcObject = new MediaStream([videoTrack]);
|
video.srcObject = new MediaStream([videoTrack]);
|
||||||
if (isChrome92) video.load();
|
if (isChrome92) video.load();
|
||||||
}, [videoEl, isChrome92, videoTrack]);
|
return () => {
|
||||||
|
// clean up when unmounted
|
||||||
|
video.srcObject = null;
|
||||||
|
if (isChrome92) video.load();
|
||||||
|
};
|
||||||
|
}, [isChrome92, participantId, videoEl, videoTrack, videoTrack?.id]);
|
||||||
|
|
||||||
return <video autoPlay muted playsInline ref={videoEl} {...rest} />;
|
return (
|
||||||
|
<>
|
||||||
|
<video
|
||||||
|
className={classNames(fit, {
|
||||||
|
isMirrored,
|
||||||
|
isMobile,
|
||||||
|
playable: isPlayable && videoTrack?.enabled,
|
||||||
|
})}
|
||||||
|
autoPlay
|
||||||
|
muted
|
||||||
|
playsInline
|
||||||
|
ref={videoEl}
|
||||||
|
{...props}
|
||||||
|
/>
|
||||||
|
<style jsx>{`
|
||||||
|
video {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
video.playable {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
video.isMirrored {
|
||||||
|
transform: scale(-1, 1);
|
||||||
|
}
|
||||||
|
video.isMobile {
|
||||||
|
border-radius: 4px;
|
||||||
|
display: block;
|
||||||
|
height: 100%;
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
video:not(.isMobile) {
|
||||||
|
height: calc(100% + 4px);
|
||||||
|
left: -2px;
|
||||||
|
object-position: center;
|
||||||
|
position: absolute;
|
||||||
|
top: -2px;
|
||||||
|
width: calc(100% + 4px);
|
||||||
|
}
|
||||||
|
video.contain {
|
||||||
|
object-fit: contain;
|
||||||
|
}
|
||||||
|
video.cover {
|
||||||
|
object-fit: cover;
|
||||||
|
}
|
||||||
|
`}</style>
|
||||||
|
</>
|
||||||
|
);
|
||||||
}),
|
}),
|
||||||
(p, n) => shallowEqualObjects(p, n)
|
(p, n) => shallowEqualObjects(p, n)
|
||||||
);
|
);
|
||||||
|
|
|
||||||
|
|
@ -20,6 +20,7 @@ export const UIStateProvider = ({
|
||||||
customTrayComponent,
|
customTrayComponent,
|
||||||
children,
|
children,
|
||||||
}) => {
|
}) => {
|
||||||
|
const [isMobile, setIsMobile] = useState(false);
|
||||||
const [pinnedId, setPinnedId] = useState(null);
|
const [pinnedId, setPinnedId] = useState(null);
|
||||||
const [preferredViewMode, setPreferredViewMode] = useState(VIEW_MODE_SPEAKER);
|
const [preferredViewMode, setPreferredViewMode] = useState(VIEW_MODE_SPEAKER);
|
||||||
const [viewMode, setViewMode] = useState(preferredViewMode);
|
const [viewMode, setViewMode] = useState(preferredViewMode);
|
||||||
|
|
@ -28,6 +29,7 @@ export const UIStateProvider = ({
|
||||||
const [showAside, setShowAside] = useState();
|
const [showAside, setShowAside] = useState();
|
||||||
const [activeModals, setActiveModals] = useState({});
|
const [activeModals, setActiveModals] = useState({});
|
||||||
const [customCapsule, setCustomCapsule] = useState();
|
const [customCapsule, setCustomCapsule] = useState();
|
||||||
|
const [showAutoplayFailedModal, setShowAutoplayFailedModal] = useState(false);
|
||||||
|
|
||||||
const openModal = useCallback((modalName) => {
|
const openModal = useCallback((modalName) => {
|
||||||
setActiveModals((prevState) => ({
|
setActiveModals((prevState) => ({
|
||||||
|
|
@ -87,6 +89,10 @@ export const UIStateProvider = ({
|
||||||
setShowParticipantsBar,
|
setShowParticipantsBar,
|
||||||
customCapsule,
|
customCapsule,
|
||||||
setCustomCapsule,
|
setCustomCapsule,
|
||||||
|
showAutoplayFailedModal,
|
||||||
|
setShowAutoplayFailedModal,
|
||||||
|
isMobile,
|
||||||
|
setIsMobile,
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
{children}
|
{children}
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue