From ef545d04807aa754b835c43e5927a317499a5969 Mon Sep 17 00:00:00 2001 From: Jon Date: Wed, 28 Jul 2021 12:20:58 +0100 Subject: [PATCH 1/2] audio merge for > chrome 92 --- dailyjs/shared/components/Audio/Audio.js | 125 ++++++++---------- dailyjs/shared/components/Audio/AudioTrack.js | 45 +++++++ .../components/Audio/CombinedAudioTrack.js | 60 +++++++++ dailyjs/shared/package.json | 1 + yarn.lock | 9 +- 5 files changed, 172 insertions(+), 68 deletions(-) create mode 100644 dailyjs/shared/components/Audio/AudioTrack.js create mode 100644 dailyjs/shared/components/Audio/CombinedAudioTrack.js diff --git a/dailyjs/shared/components/Audio/Audio.js b/dailyjs/shared/components/Audio/Audio.js index a0e1497..07271c5 100644 --- a/dailyjs/shared/components/Audio/Audio.js +++ b/dailyjs/shared/components/Audio/Audio.js @@ -1,79 +1,70 @@ /** * Audio - * --- - * Renders audio tags for each audible participant / screen share in the call - * Note: it's very important to minimise DOM mutates for audio components - * as iOS / Safari do a lot of browser 'magic' that may result in muted - * tracks. We heavily memoize this component to avoid unnecassary re-renders. */ -import React, { useRef, useEffect } from 'react'; -import { useParticipants } from '@dailyjs/shared/contexts/ParticipantsProvider'; -import useAudioTrack from '@dailyjs/shared/hooks/useAudioTrack'; -import PropTypes from 'prop-types'; +import React, { useEffect, useMemo } from 'react'; +import { useTracks } from '@dailyjs/shared/contexts/TracksProvider'; +import Bowser from 'bowser'; +import { Portal } from 'react-portal'; +import AudioTrack from './AudioTrack'; +import CombinedAudioTrack from './CombinedAudioTrack'; -const AudioItem = React.memo( - ({ participant }) => { - const audioRef = useRef(null); - const audioTrack = useAudioTrack(participant); +export const Audio = () => { + const { audioTracks } = useTracks(); - useEffect(() => { - if (!audioTrack || !audioRef.current) return; + const renderedTracks = useMemo( + () => + Object.entries(audioTracks).reduce( + (tracks, [id, track]) => ({ ...tracks, [id]: track }), + {} + ), + [audioTracks] + ); - // quick sanity to check to make sure this is an audio track... - if (audioTrack.kind !== 'audio') return; + // On iOS safari, when headphones are disconnected, all audio elements are paused. + // This means that when a user disconnects their headphones, that user will not + // be able to hear any other users until they mute/unmute their mics. + // To fix that, we call `play` on each audio track on all devicechange events. + useEffect(() => { + const playTracks = () => { + document.querySelectorAll('.audioTracks audio').forEach(async (audio) => { + try { + if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) { + await audio?.play(); + } + } catch (e) { + // Auto play failed + } + }); + }; + navigator.mediaDevices.addEventListener('devicechange', playTracks); + return () => { + navigator.mediaDevices.removeEventListener('devicechange', playTracks); + }; + }, []); - audioRef.current.srcObject = new MediaStream([audioTrack]); - }, [audioTrack]); - - useEffect(() => { - // On iOS safari, when headphones are disconnected, all audio elements are paused. - // This means that when a user disconnects their headphones, that user will not - // be able to hear any other users until they mute/unmute their mics. - // To fix that, we call `play` on each audio track on all devicechange events. - if (audioRef.currenet) { - return false; - } - const startPlayingTrack = () => { - audioRef.current?.play(); - }; - - navigator.mediaDevices.addEventListener( - 'devicechange', - startPlayingTrack - ); - - return () => - navigator.mediaDevices.removeEventListener( - 'devicechange', - startPlayingTrack - ); - }, [audioRef]); - - return ( - <> - - - ); - }, - () => true -); - -AudioItem.propTypes = { - participant: PropTypes.object, -}; - -export const Audio = React.memo(() => { - const { allParticipants } = useParticipants(); + const tracksComponent = useMemo(() => { + const { browser } = Bowser.parse(navigator.userAgent); + if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) { + return ; + } + return Object.entries(renderedTracks).map(([id, track]) => ( + + )); + }, [renderedTracks]); return ( - <> - {allParticipants.map( - (p) => !p.isLocal && - )} - + +
+ {tracksComponent} + +
+
); -}); +}; export default Audio; diff --git a/dailyjs/shared/components/Audio/AudioTrack.js b/dailyjs/shared/components/Audio/AudioTrack.js new file mode 100644 index 0000000..e8e53ce --- /dev/null +++ b/dailyjs/shared/components/Audio/AudioTrack.js @@ -0,0 +1,45 @@ +import React, { useRef, useEffect } from 'react'; +import PropTypes from 'prop-types'; + +const AudioTrack = React.memo( + ({ track }) => { + const audioRef = useRef(null); + + useEffect(() => { + if (!audioRef.current) return false; + let playTimeout; + + const handleCanPlay = () => { + playTimeout = setTimeout(() => { + console.log('Unable to autoplay audio element'); + }, 1500); + }; + const handlePlay = () => { + clearTimeout(playTimeout); + }; + audioRef.current.addEventListener('canplay', handleCanPlay); + audioRef.current.addEventListener('play', handlePlay); + audioRef.current.srcObject = new MediaStream([track]); + + const audioEl = audioRef.current; + + return () => { + audioEl?.removeEventListener('canplay', handleCanPlay); + audioEl?.removeEventListener('play', handlePlay); + }; + }, [track]); + + return track ? ( + + ) : null; + }, + () => true +); + +AudioTrack.propTypes = { + track: PropTypes.object, +}; + +export default AudioTrack; diff --git a/dailyjs/shared/components/Audio/CombinedAudioTrack.js b/dailyjs/shared/components/Audio/CombinedAudioTrack.js new file mode 100644 index 0000000..87a1f26 --- /dev/null +++ b/dailyjs/shared/components/Audio/CombinedAudioTrack.js @@ -0,0 +1,60 @@ +import React, { useEffect, useRef } from 'react'; +import PropTypes from 'prop-types'; +import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare'; + +const CombinedAudioTrack = ({ tracks }) => { + const audioEl = useRef(null); + + useEffect(() => { + if (!audioEl) return; + audioEl.current.srcObject = new MediaStream(); + }, []); + + const trackIds = useDeepCompareMemo( + () => Object.values(tracks).map((t) => t?.persistentTrack?.id), + [tracks] + ); + + useDeepCompareEffect(() => { + const audio = audioEl.current; + if (!audio || !audio.srcObject) return; + + const stream = audio.srcObject; + const allTracks = Object.values(tracks); + + allTracks.forEach((track) => { + const persistentTrack = track?.persistentTrack; + if (persistentTrack) { + persistentTrack.addEventListener( + 'ended', + (ev) => stream.removeTrack(ev.target), + { once: true } + ); + stream.addTrack(persistentTrack); + } + }); + + audio.load(); + + if ( + stream + .getAudioTracks() + .some((t) => t.enabled && t.readyState === 'live') && + audio.paused + ) { + audio.play(); + } + }, [tracks, trackIds]); + + return ( + + ); +}; + +CombinedAudioTrack.propTypes = { + tracks: PropTypes.object, +}; + +export default CombinedAudioTrack; diff --git a/dailyjs/shared/package.json b/dailyjs/shared/package.json index ed9c59a..975474a 100644 --- a/dailyjs/shared/package.json +++ b/dailyjs/shared/package.json @@ -13,6 +13,7 @@ "prop-types": "^15.7.2", "react": "^17.0.2", "react-dom": "^17.0.2", + "react-portal": "^4.2.1", "shallow-equal": "^1.2.1", "use-deep-compare": "^1.1.0" } diff --git a/yarn.lock b/yarn.lock index 61dd260..b92f1b2 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2800,7 +2800,7 @@ progress@^2.0.0: resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== -prop-types@15.7.2, prop-types@^15.7.2: +prop-types@15.7.2, prop-types@^15.5.8, prop-types@^15.7.2: version "15.7.2" resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5" integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ== @@ -2907,6 +2907,13 @@ react-is@^16.8.1: resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== +react-portal@^4.2.1: + version "4.2.1" + resolved "https://registry.yarnpkg.com/react-portal/-/react-portal-4.2.1.tgz#12c1599238c06fb08a9800f3070bea2a3f78b1a6" + integrity sha512-fE9kOBagwmTXZ3YGRYb4gcMy+kSA+yLO0xnPankjRlfBv4uCpFXqKPfkpsGQQR15wkZ9EssnvTOl1yMzbkxhPQ== + dependencies: + prop-types "^15.5.8" + react-refresh@0.8.3: version "0.8.3" resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f" From c9596f14c95f20daea130075a2e9c890ea962b3a Mon Sep 17 00:00:00 2001 From: Jon Date: Wed, 28 Jul 2021 15:35:01 +0100 Subject: [PATCH 2/2] updated Tile & Video to nullify src object --- dailyjs/shared/components/Audio/Audio.js | 6 ++ dailyjs/shared/components/Tile/Tile.js | 34 ++++++++++- dailyjs/shared/components/Tile/Video/Video.js | 56 ++++++++++++------- 3 files changed, 74 insertions(+), 22 deletions(-) diff --git a/dailyjs/shared/components/Audio/Audio.js b/dailyjs/shared/components/Audio/Audio.js index 07271c5..931ee56 100644 --- a/dailyjs/shared/components/Audio/Audio.js +++ b/dailyjs/shared/components/Audio/Audio.js @@ -1,5 +1,11 @@ /** * Audio + * --- + * When working with audio elements it's very important to avoid mutating + * the DOM elements as much as possible to avoid audio pops and crackles. + * This component addresses to known browser quirks; Safari autoplay + * and Chrome's maximum media elements. On Chrome we add all audio tracks + * into into a single audio node using the CombinedAudioTrack component */ import React, { useEffect, useMemo } from 'react'; import { useTracks } from '@dailyjs/shared/contexts/TracksProvider'; diff --git a/dailyjs/shared/components/Tile/Tile.js b/dailyjs/shared/components/Tile/Tile.js index 53afe11..efaeb3c 100644 --- a/dailyjs/shared/components/Tile/Tile.js +++ b/dailyjs/shared/components/Tile/Tile.js @@ -1,4 +1,4 @@ -import React, { useRef } from 'react'; +import React, { useEffect, useRef } from 'react'; import useVideoTrack from '@dailyjs/shared/hooks/useVideoTrack'; import { ReactComponent as IconMicMute } from '@dailyjs/shared/icons/mic-off-sm.svg'; import classNames from 'classnames'; @@ -14,11 +14,36 @@ export const Tile = React.memo( showName = true, showAvatar = true, aspectRatio = DEFAULT_ASPECT_RATIO, + onVideoResize, ...props }) => { const videoTrack = useVideoTrack(participant); const videoEl = useRef(null); + /** + * Add optional event listener for resize event so the parent component + * can know the video's native aspect ratio. + */ + useEffect(() => { + const video = videoEl.current; + if (!onVideoResize || !video) return false; + + const handleResize = () => { + if (!video) return; + const width = video?.videoWidth; + const height = video?.videoHeight; + if (width && height) { + // Return the video's aspect ratio to the parent's handler + onVideoResize(width / height); + } + }; + + handleResize(); + video?.addEventListener('resize', handleResize); + + return () => video?.removeEventListener('resize', handleResize); + }, [onVideoResize, videoEl, participant]); + const cx = classNames('tile', { mirrored, avatar: showAvatar && !videoTrack, @@ -35,7 +60,11 @@ export const Tile = React.memo( )} {videoTrack ? ( -