Get shared components into parity with current version of prebuilt

This commit is contained in:
harshithpabbati 2021-12-17 16:20:09 +05:30
parent 9a39dc2410
commit 838948bf93
9 changed files with 210 additions and 85 deletions

View File

@ -22,9 +22,7 @@ export const NetworkAside = () => {
}, [callObject]);
useEffect(() => {
if (!callObject) {
return;
}
if (!callObject) return;
updateStats();
@ -38,7 +36,7 @@ export const NetworkAside = () => {
Math.round(
(networkStats?.stats?.latest?.videoRecvBitsPerSecond ?? 0) / 1000
),
[networkStats]
[networkStats?.stats?.latest?.videoRecvBitsPerSecond]
);
const uploadKbs = useMemo(
@ -46,7 +44,7 @@ export const NetworkAside = () => {
Math.round(
(networkStats?.stats?.latest?.videoSendBitsPerSecond ?? 0) / 1000
),
[networkStats]
[networkStats?.stats?.latest?.videoSendBitsPerSecond]
);
if (!showAside || showAside !== NETWORK_ASIDE) {

View File

@ -8,22 +8,30 @@
* into into a single audio node using the CombinedAudioTrack component
*/
import React, { useEffect, useMemo } from 'react';
import { useCallState } from '@custom/shared/contexts/CallProvider';
import { useTracks } from '@custom/shared/contexts/TracksProvider';
import { useUIState } from '@custom/shared/contexts/UIStateProvider';
import { isScreenId } from '@custom/shared/contexts/participantsState';
import Bowser from 'bowser';
import { Portal } from 'react-portal';
import AudioTrack from './AudioTrack';
import CombinedAudioTrack from './CombinedAudioTrack';
export const Audio = () => {
const { disableAudio } = useCallState();
const { audioTracks } = useTracks();
const { setShowAutoplayFailedModal } = useUIState();
const renderedTracks = useMemo(
() =>
Object.entries(audioTracks).reduce(
(tracks, [id, track]) => ({ ...tracks, [id]: track }),
{}
),
[audioTracks]
Object.entries(audioTracks).reduce((tracks, [id, track]) => {
if (!disableAudio || isScreenId(id)) {
tracks[id] = track;
}
return tracks;
}, {}),
[audioTracks, disableAudio]
);
// On iOS safari, when headphones are disconnected, all audio elements are paused.
@ -32,13 +40,15 @@ export const Audio = () => {
// To fix that, we call `play` on each audio track on all devicechange events.
useEffect(() => {
const playTracks = () => {
document.querySelectorAll('.audioTracks audio').forEach(async (audio) => {
document
.querySelectorAll('.audioTracks audio')
.forEach(async (audio) => {
try {
if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
await audio?.play();
}
} catch (e) {
// Auto play failed
setShowAutoplayFailedModal(true);
}
});
};
@ -46,11 +56,15 @@ export const Audio = () => {
return () => {
navigator.mediaDevices.removeEventListener('devicechange', playTracks);
};
}, []);
}, [setShowAutoplayFailedModal]);
const tracksComponent = useMemo(() => {
const { browser } = Bowser.parse(navigator.userAgent);
if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) {
const { browser, platform, os } = Bowser.parse(navigator.userAgent);
if (
browser.name === 'Chrome' &&
parseInt(browser.version, 10) >= 92 &&
(platform.type === 'desktop' || os.name === 'Android')
) {
return <CombinedAudioTrack tracks={renderedTracks} />;
}
return Object.entries(renderedTracks).map(([id, track]) => (

View File

@ -1,38 +1,35 @@
import React, { useRef, useEffect } from 'react';
import { useUIState } from '@custom/shared/contexts/UIStateProvider';
import PropTypes from 'prop-types';
const AudioTrack = ({ track }) => {
export const AudioTrack = ({ track }) => {
const audioRef = useRef(null);
const { setShowAutoplayFailedModal } = useUIState();
useEffect(() => {
if (!audioRef.current) return false;
const audioTag = audioRef.current;
if (!audioTag) return false;
let playTimeout;
const handleCanPlay = () => {
playTimeout = setTimeout(() => {
console.log('Unable to autoplay audio element');
setShowAutoplayFailedModal(true);
}, 1500);
};
const handlePlay = () => {
clearTimeout(playTimeout);
};
audioRef.current.addEventListener('canplay', handleCanPlay);
audioRef.current.addEventListener('play', handlePlay);
audioRef.current.srcObject = new MediaStream([track]);
const audioEl = audioRef.current;
audioTag.addEventListener('canplay', handleCanPlay);
audioTag.addEventListener('play', handlePlay);
audioTag.srcObject = new MediaStream([track]);
return () => {
audioEl?.removeEventListener('canplay', handleCanPlay);
audioEl?.removeEventListener('play', handlePlay);
audioTag?.removeEventListener('canplay', handleCanPlay);
audioTag?.removeEventListener('play', handlePlay);
};
}, [track]);
}, [setShowAutoplayFailedModal, track]);
return track ? (
<audio autoPlay playsInline ref={audioRef}>
<track kind="captions" />
</audio>
) : null;
return track ? <audio autoPlay playsInline ref={audioRef} /> : null;
};
AudioTrack.propTypes = {

View File

@ -2,7 +2,7 @@ import React, { useEffect, useRef } from 'react';
import PropTypes from 'prop-types';
import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare';
const CombinedAudioTrack = ({ tracks }) => {
export const CombinedAudioTrack = ({ tracks }) => {
const audioEl = useRef(null);
useEffect(() => {
@ -25,12 +25,21 @@ const CombinedAudioTrack = ({ tracks }) => {
allTracks.forEach((track) => {
const persistentTrack = track?.persistentTrack;
if (persistentTrack) {
switch (persistentTrack.readyState) {
case 'ended':
stream.removeTrack(persistentTrack);
break;
case 'live':
persistentTrack.addEventListener(
'ended',
(ev) => stream.removeTrack(ev.target),
(ev) => {
stream.removeTrack(ev.target);
},
{ once: true }
);
stream.addTrack(persistentTrack);
break;
}
}
});
@ -53,11 +62,7 @@ const CombinedAudioTrack = ({ tracks }) => {
playAudio();
}, [tracks, trackIds]);
return (
<audio autoPlay playsInline ref={audioEl}>
<track kind="captions" />
</audio>
);
return <audio autoPlay playsInline ref={audioEl} />;
};
CombinedAudioTrack.propTypes = {

View File

@ -6,6 +6,7 @@ export const Capsule = ({ children, variant }) => (
<span className={classNames('capsule', variant)}>
{children}
<style jsx>{`
.capsule {
display: inline-flex;
padding: 4px 6px;
margin: 0 6px;
@ -17,7 +18,7 @@ export const Capsule = ({ children, variant }) => (
font-weight: var(--weight-bold);
text-transform: uppercase;
letter-spacing: 1px;
}
.capsule.success {
background-color: var(--green-default);
color: #ffffff;

View File

@ -50,7 +50,9 @@ export const CardBody = ({ children }) => (
<div className="card-body">
{children}
<style jsx>{`
.card-body {
color: var(--text-mid);
}
& + :global(.card-footer) {
margin-top: var(--spacing-md);

View File

@ -105,17 +105,12 @@ export const HairCheck = () => {
]);
const hasError = useMemo(() => {
if (
!deviceState ||
return !(!deviceState ||
[
DEVICE_STATE_LOADING,
DEVICE_STATE_PENDING,
DEVICE_STATE_GRANTED,
].includes(deviceState)
) {
return false;
}
return true;
].includes(deviceState));
}, [deviceState]);
const camErrorVerbose = useMemo(() => {

View File

@ -1,10 +1,36 @@
import React, { useMemo, forwardRef, memo, useEffect } from 'react';
import React, { useMemo, forwardRef, memo, useEffect, useState } from 'react';
import { useCallState } from '@custom/shared/contexts/CallProvider';
import { useUIState } from '@custom/shared/contexts/UIStateProvider';
import { isScreenId } from '@custom/shared/contexts/participantsState';
import Bowser from 'bowser';
import classNames from 'classnames';
import PropTypes from 'prop-types';
import { shallowEqualObjects } from 'shallow-equal';
import { useDeepCompareMemo } from 'use-deep-compare';
export const Video = memo(
forwardRef(({ participantId, videoTrack, ...rest }, videoEl) => {
forwardRef(({ fit = 'contain', participantId, videoTrack, ...rest }, videoEl) => {
const { callObject } = useCallState();
const { isMobile } = useUIState();
const isLocalCam = useMemo(() => {
const localParticipant = callObject.participants()?.local;
return participantId === localParticipant.session_id && !isScreenId(participantId);
}, [callObject, participantId]);
const [isMirrored, setIsMirrored] = useState(isLocalCam);
/**
* Considered as playable video:
* - local cam feed
* - any screen share
* - remote cam feed that is subscribed and reported as playable
*/
const isPlayable = useDeepCompareMemo(
() => isLocalCam || isScreenId(participantId),
[isLocalCam, isScreenId(participantId)]
);
/**
* Memo: Chrome >= 92?
* See: https://bugs.chromium.org/p/chromium/issues/detail?id=1232649
@ -19,33 +45,114 @@ export const Video = memo(
}, []);
/**
* Effect: Umount
* Note: nullify src to ensure media object is not counted
* Determine if video needs to be mirrored.
*/
useEffect(() => {
if (!videoTrack) return;
const videoTrackSettings = videoTrack.getSettings();
const isUsersFrontCamera =
'facingMode' in videoTrackSettings
? isLocalCam && videoTrackSettings.facingMode === 'user'
: isLocalCam;
// only apply mirror effect to user facing camera
if (isMirrored !== isUsersFrontCamera) {
setIsMirrored(isUsersFrontCamera);
}
}, [isMirrored, isLocalCam, videoTrack]);
/**
* Handle canplay & picture-in-picture events.
*/
useEffect(() => {
const video = videoEl.current;
if (!video) return false;
// clean up when video renders for different participant
video.srcObject = null;
if (isChrome92) video.load();
return () => {
// clean up when unmounted
video.srcObject = null;
if (isChrome92) video.load();
if (!video) return;
const handleCanPlay = () => {
if (!video.paused) return;
video.play();
};
}, [videoEl, isChrome92, participantId]);
const handleEnterPIP = () => {
video.style.transform = 'scale(1)';
};
const handleLeavePIP = () => {
video.style.transform = '';
setTimeout(() => {
if (video.paused) video.play();
}, 100);
};
video.addEventListener('canplay', handleCanPlay);
video.addEventListener('enterpictureinpicture', handleEnterPIP);
video.addEventListener('leavepictureinpicture', handleLeavePIP);
return () => {
video.removeEventListener('canplay', handleCanPlay);
video.removeEventListener('enterpictureinpicture', handleEnterPIP);
video.removeEventListener('leavepictureinpicture', handleLeavePIP);
};
}, [isChrome92, videoEl]);
/**
* Effect: mount source (and force load on Chrome)
* Update srcObject.
*/
useEffect(() => {
const video = videoEl.current;
if (!video || !videoTrack) return;
video.srcObject = new MediaStream([videoTrack]);
if (isChrome92) video.load();
}, [videoEl, isChrome92, videoTrack]);
return () => {
// clean up when unmounted
video.srcObject = null;
if (isChrome92) video.load();
};
}, [isChrome92, participantId, videoEl, videoTrack, videoTrack?.id]);
return <video autoPlay muted playsInline ref={videoEl} {...rest} />;
return (
<>
<video
className={classNames(fit, {
isMirrored,
isMobile,
playable: isPlayable && videoTrack?.enabled,
})}
autoPlay
muted
playsInline
ref={videoEl}
{...props}
/>
<style jsx>{`
video {
opacity: 0;
}
video.playable {
opacity: 1;
}
video.isMirrored {
transform: scale(-1, 1);
}
video.isMobile {
border-radius: 4px;
display: block;
height: 100%;
position: relative;
width: 100%;
}
video:not(.isMobile) {
height: calc(100% + 4px);
left: -2px;
object-position: center;
position: absolute;
top: -2px;
width: calc(100% + 4px);
}
video.contain {
object-fit: contain;
}
video.cover {
object-fit: cover;
}
`}</style>
</>
);
}),
(p, n) => shallowEqualObjects(p, n)
);

View File

@ -20,6 +20,7 @@ export const UIStateProvider = ({
customTrayComponent,
children,
}) => {
const [isMobile, setIsMobile] = useState(false);
const [pinnedId, setPinnedId] = useState(null);
const [preferredViewMode, setPreferredViewMode] = useState(VIEW_MODE_SPEAKER);
const [viewMode, setViewMode] = useState(preferredViewMode);
@ -28,6 +29,7 @@ export const UIStateProvider = ({
const [showAside, setShowAside] = useState();
const [activeModals, setActiveModals] = useState({});
const [customCapsule, setCustomCapsule] = useState();
const [showAutoplayFailedModal, setShowAutoplayFailedModal] = useState(false);
const openModal = useCallback((modalName) => {
setActiveModals((prevState) => ({
@ -87,6 +89,10 @@ export const UIStateProvider = ({
setShowParticipantsBar,
customCapsule,
setCustomCapsule,
showAutoplayFailedModal,
setShowAutoplayFailedModal,
isMobile,
setIsMobile,
}}
>
{children}