merge Chrome92 changes

This commit is contained in:
Jon 2021-07-28 16:06:40 +01:00
commit 73dabf2758
8 changed files with 268 additions and 122 deletions

View File

@ -37,7 +37,6 @@ export const SpeakerTile = ({ participant, screenRef }) => {
const { height, finalRatio, videoFit } = useMemo(
() =>
// Avoid cropping mobile videos, which have the nativeAspectRatio set
({
height: (nativeAspectRatio ?? ratio) >= MIN_RATIO ? '100%' : null,
finalRatio:

View File

@ -1,79 +1,76 @@
/**
* Audio
* ---
* Renders audio tags for each audible participant / screen share in the call
* Note: it's very important to minimise DOM mutates for audio components
* as iOS / Safari do a lot of browser 'magic' that may result in muted
* tracks. We heavily memoize this component to avoid unnecassary re-renders.
* When working with audio elements it's very important to avoid mutating
* the DOM elements as much as possible to avoid audio pops and crackles.
* This component addresses to known browser quirks; Safari autoplay
* and Chrome's maximum media elements. On Chrome we add all audio tracks
* into into a single audio node using the CombinedAudioTrack component
*/
import React, { useRef, useEffect } from 'react';
import { useParticipants } from '@dailyjs/shared/contexts/ParticipantsProvider';
import useAudioTrack from '@dailyjs/shared/hooks/useAudioTrack';
import PropTypes from 'prop-types';
import React, { useEffect, useMemo } from 'react';
import { useTracks } from '@dailyjs/shared/contexts/TracksProvider';
import Bowser from 'bowser';
import { Portal } from 'react-portal';
import AudioTrack from './AudioTrack';
import CombinedAudioTrack from './CombinedAudioTrack';
const AudioItem = React.memo(
({ participant }) => {
const audioRef = useRef(null);
const audioTrack = useAudioTrack(participant);
export const Audio = () => {
const { audioTracks } = useTracks();
useEffect(() => {
if (!audioTrack || !audioRef.current) return;
const renderedTracks = useMemo(
() =>
Object.entries(audioTracks).reduce(
(tracks, [id, track]) => ({ ...tracks, [id]: track }),
{}
),
[audioTracks]
);
// quick sanity to check to make sure this is an audio track...
if (audioTrack.kind !== 'audio') return;
// On iOS safari, when headphones are disconnected, all audio elements are paused.
// This means that when a user disconnects their headphones, that user will not
// be able to hear any other users until they mute/unmute their mics.
// To fix that, we call `play` on each audio track on all devicechange events.
useEffect(() => {
const playTracks = () => {
document.querySelectorAll('.audioTracks audio').forEach(async (audio) => {
try {
if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
await audio?.play();
}
} catch (e) {
// Auto play failed
}
});
};
navigator.mediaDevices.addEventListener('devicechange', playTracks);
return () => {
navigator.mediaDevices.removeEventListener('devicechange', playTracks);
};
}, []);
audioRef.current.srcObject = new MediaStream([audioTrack]);
}, [audioTrack]);
useEffect(() => {
// On iOS safari, when headphones are disconnected, all audio elements are paused.
// This means that when a user disconnects their headphones, that user will not
// be able to hear any other users until they mute/unmute their mics.
// To fix that, we call `play` on each audio track on all devicechange events.
if (audioRef.currenet) {
return false;
}
const startPlayingTrack = () => {
audioRef.current?.play();
};
navigator.mediaDevices.addEventListener(
'devicechange',
startPlayingTrack
);
return () =>
navigator.mediaDevices.removeEventListener(
'devicechange',
startPlayingTrack
);
}, [audioRef]);
return (
<>
<audio autoPlay playsInline ref={audioRef}>
<track kind="captions" />
</audio>
</>
);
},
() => true
);
AudioItem.propTypes = {
participant: PropTypes.object,
};
export const Audio = React.memo(() => {
const { allParticipants } = useParticipants();
const tracksComponent = useMemo(() => {
const { browser } = Bowser.parse(navigator.userAgent);
if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) {
return <CombinedAudioTrack tracks={renderedTracks} />;
}
return Object.entries(renderedTracks).map(([id, track]) => (
<AudioTrack key={id} track={track.persistentTrack} />
));
}, [renderedTracks]);
return (
<>
{allParticipants.map(
(p) => !p.isLocal && <AudioItem participant={p} key={p.id} />
)}
</>
<Portal key="AudioTracks">
<div className="audioTracks">
{tracksComponent}
<style jsx>{`
.audioTracks {
position: absolute;
visibility: hidden;
}
`}</style>
</div>
</Portal>
);
});
};
export default Audio;

View File

@ -0,0 +1,45 @@
import React, { useRef, useEffect } from 'react';
import PropTypes from 'prop-types';
const AudioTrack = React.memo(
({ track }) => {
const audioRef = useRef(null);
useEffect(() => {
if (!audioRef.current) return false;
let playTimeout;
const handleCanPlay = () => {
playTimeout = setTimeout(() => {
console.log('Unable to autoplay audio element');
}, 1500);
};
const handlePlay = () => {
clearTimeout(playTimeout);
};
audioRef.current.addEventListener('canplay', handleCanPlay);
audioRef.current.addEventListener('play', handlePlay);
audioRef.current.srcObject = new MediaStream([track]);
const audioEl = audioRef.current;
return () => {
audioEl?.removeEventListener('canplay', handleCanPlay);
audioEl?.removeEventListener('play', handlePlay);
};
}, [track]);
return track ? (
<audio autoPlay playsInline ref={audioRef}>
<track kind="captions" />
</audio>
) : null;
},
() => true
);
AudioTrack.propTypes = {
track: PropTypes.object,
};
export default AudioTrack;

View File

@ -0,0 +1,60 @@
import React, { useEffect, useRef } from 'react';
import PropTypes from 'prop-types';
import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare';
const CombinedAudioTrack = ({ tracks }) => {
const audioEl = useRef(null);
useEffect(() => {
if (!audioEl) return;
audioEl.current.srcObject = new MediaStream();
}, []);
const trackIds = useDeepCompareMemo(
() => Object.values(tracks).map((t) => t?.persistentTrack?.id),
[tracks]
);
useDeepCompareEffect(() => {
const audio = audioEl.current;
if (!audio || !audio.srcObject) return;
const stream = audio.srcObject;
const allTracks = Object.values(tracks);
allTracks.forEach((track) => {
const persistentTrack = track?.persistentTrack;
if (persistentTrack) {
persistentTrack.addEventListener(
'ended',
(ev) => stream.removeTrack(ev.target),
{ once: true }
);
stream.addTrack(persistentTrack);
}
});
audio.load();
if (
stream
.getAudioTracks()
.some((t) => t.enabled && t.readyState === 'live') &&
audio.paused
) {
audio.play();
}
}, [tracks, trackIds]);
return (
<audio autoPlay playsInline ref={audioEl}>
<track kind="captions" />
</audio>
);
};
CombinedAudioTrack.propTypes = {
tracks: PropTypes.object,
};
export default CombinedAudioTrack;

View File

@ -1,4 +1,4 @@
import React, { useState, useEffect, useRef } from 'react';
import React, { memo, useEffect, useState, useRef } from 'react';
import useVideoTrack from '@dailyjs/shared/hooks/useVideoTrack';
import { ReactComponent as IconMicMute } from '@dailyjs/shared/icons/mic-off-sm.svg';
import classNames from 'classnames';
@ -7,30 +7,33 @@ import { DEFAULT_ASPECT_RATIO } from '../../constants';
import { Video } from './Video';
import { ReactComponent as Avatar } from './avatar.svg';
export const Tile = React.memo(
const SM_TILE_MAX_WIDTH = 300;
export const Tile = memo(
({
participant,
mirrored = true,
showName = true,
showAvatar = true,
showActiveSpeaker = true,
videoFit = 'contain',
aspectRatio = DEFAULT_ASPECT_RATIO,
onVideoResize,
videoFit = 'contain',
...props
}) => {
const videoTrack = useVideoTrack(participant);
const videoEl = useRef(null);
const [tileAspectRatio, setTileAspectRatio] = useState(aspectRatio);
const [layer, setLayer] = useState();
const videoRef = useRef(null);
const tileRef = useRef(null);
const [tileWidth, setTileWidth] = useState(0);
/**
* Effect: Resize
*
* Add optional event listener for resize event so the parent component
* can know the video's native aspect ratio.
*/
useEffect(() => {
const video = videoEl.current;
const video = videoRef.current;
if (!onVideoResize || !video) return false;
const handleResize = () => {
@ -44,50 +47,61 @@ export const Tile = React.memo(
};
handleResize();
video?.addEventListener('resize', handleResize);
return () => video?.removeEventListener('resize', handleResize);
}, [onVideoResize, videoEl, participant]);
}, [onVideoResize, videoRef, participant]);
/**
* Effect: Resize Observer
*
* Adjust size of text overlay based on tile size
*/
useEffect(() => {
if (aspectRatio === tileAspectRatio) return;
setTileAspectRatio(aspectRatio);
}, [aspectRatio, tileAspectRatio]);
useEffect(() => {
if (
typeof rtcpeers === 'undefined' ||
rtcpeers?.getCurrentType() !== 'sfu'
)
return false;
const i = setInterval(() => {
setLayer(
rtcpeers.sfu.consumers[`${participant.id}/cam-video`]?._preferredLayer
);
}, 1500);
return () => clearInterval(i);
}, [participant]);
const tile = tileRef.current;
if (!tile || typeof ResizeObserver === 'undefined') return false;
let frame;
const resizeObserver = new ResizeObserver(() => {
if (frame) cancelAnimationFrame(frame);
frame = requestAnimationFrame(() => {
if (!tile) return;
const dimensions = tile?.getBoundingClientRect();
const { width } = dimensions;
setTileWidth(width);
});
});
resizeObserver.observe(tile);
return () => {
if (frame) cancelAnimationFrame(frame);
resizeObserver.disconnect();
};
}, [tileRef]);
const cx = classNames('tile', videoFit, {
mirrored,
avatar: showAvatar && !videoTrack,
screenShare: participant.isScreenShare,
active: showActiveSpeaker && participant.isActiveSpeaker,
small: tileWidth < SM_TILE_MAX_WIDTH,
});
return (
<div className={cx} {...props}>
<div ref={tileRef} className={cx} {...props}>
<div className="content">
{showName && (
<div className="name">
{participant.isMicMuted && <IconMicMute />}
{participant.name} - {layer}
{participant.isMicMuted && !participant.isScreenShare && (
<IconMicMute />
)}
{participant.name}
</div>
)}
{videoTrack ? (
<Video ref={videoEl} videoTrack={videoTrack} />
<Video
ref={videoRef}
participantId={participant?.id}
videoTrack={videoTrack}
/>
) : (
showAvatar && (
<div className="avatar">
@ -98,7 +112,7 @@ export const Tile = React.memo(
</div>
<style jsx>{`
.tile .content {
padding-bottom: ${100 / tileAspectRatio}%;
padding-bottom: ${100 / aspectRatio}%;
}
`}</style>
<style jsx>{`
@ -144,6 +158,10 @@ export const Tile = React.memo(
color: var(--red-default);
}
.tile.small .name {
font-size: 12px;
}
.tile :global(video) {
height: calc(100% + 4px);
left: -2px;
@ -187,8 +205,8 @@ Tile.propTypes = {
showAvatar: PropTypes.bool,
aspectRatio: PropTypes.number,
onVideoResize: PropTypes.func,
videoFit: PropTypes.string,
showActiveSpeaker: PropTypes.bool,
videoFit: PropTypes.string,
};
export default Tile;

View File

@ -1,31 +1,49 @@
import React, { forwardRef, memo, useEffect } from 'react';
import React, { useMemo, forwardRef, memo, useEffect } from 'react';
import Bowser from 'bowser';
import PropTypes from 'prop-types';
import { shallowEqualObjects } from 'shallow-equal';
export const Video = memo(
forwardRef(({ videoTrack, ...rest }, videoEl) => {
forwardRef(({ participantId, videoTrack, ...rest }, videoEl) => {
/**
* Effect: mount source
* Memo: Chrome >= 92?
* See: https://bugs.chromium.org/p/chromium/issues/detail?id=1232649
*/
useEffect(() => {
if (!videoEl?.current) return;
// eslint-disable-next-line no-param-reassign
videoEl.current.srcObject = new MediaStream([videoTrack]);
}, [videoEl, videoTrack]);
const isChrome92 = useMemo(() => {
const { browser, platform, os } = Bowser.parse(navigator.userAgent);
return (
browser.name === 'Chrome' &&
parseInt(browser.version, 10) >= 92 &&
(platform.type === 'desktop' || os.name === 'Android')
);
}, []);
/**
* Effect: unmount
* Effect: Umount
* Note: nullify src to ensure media object is not counted
*/
useEffect(
() => () => {
if (videoEl?.current?.srcObject) {
videoEl.current.srcObject.getVideoTracks().forEach((t) => t.stop());
// eslint-disable-next-line no-param-reassign
videoEl.current.srcObject = null;
}
},
[videoEl]
);
useEffect(() => {
const video = videoEl.current;
if (!video) return false;
// clean up when video renders for different participant
video.srcObject = null;
if (isChrome92) video.load();
return () => {
// clean up when unmounted
video.srcObject = null;
if (isChrome92) video.load();
};
}, [videoEl, isChrome92, participantId]);
/**
* Effect: mount source (and force load on Chrome)
*/
useEffect(() => {
const video = videoEl.current;
if (!video || !videoTrack) return;
video.srcObject = new MediaStream([videoTrack]);
if (isChrome92) video.load();
}, [videoEl, isChrome92, videoTrack]);
return <video autoPlay muted playsInline ref={videoEl} {...rest} />;
}),
@ -35,6 +53,7 @@ export const Video = memo(
Video.propTypes = {
videoTrack: PropTypes.any,
mirrored: PropTypes.bool,
participantId: PropTypes.string,
};
export default Video;

View File

@ -14,6 +14,7 @@
"prop-types": "^15.7.2",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-portal": "^4.2.1",
"shallow-equal": "^1.2.1",
"use-deep-compare": "^1.1.0"
}

View File

@ -2807,7 +2807,7 @@ progress@^2.0.0:
resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==
prop-types@15.7.2, prop-types@^15.7.2:
prop-types@15.7.2, prop-types@^15.5.8, prop-types@^15.7.2:
version "15.7.2"
resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5"
integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==
@ -2914,6 +2914,13 @@ react-is@^16.8.1:
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
react-portal@^4.2.1:
version "4.2.1"
resolved "https://registry.yarnpkg.com/react-portal/-/react-portal-4.2.1.tgz#12c1599238c06fb08a9800f3070bea2a3f78b1a6"
integrity sha512-fE9kOBagwmTXZ3YGRYb4gcMy+kSA+yLO0xnPankjRlfBv4uCpFXqKPfkpsGQQR15wkZ9EssnvTOl1yMzbkxhPQ==
dependencies:
prop-types "^15.5.8"
react-refresh@0.8.3:
version "0.8.3"
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f"