diff --git a/dailyjs/shared/components/Audio/Audio.js b/dailyjs/shared/components/Audio/Audio.js index a0e1497..931ee56 100644 --- a/dailyjs/shared/components/Audio/Audio.js +++ b/dailyjs/shared/components/Audio/Audio.js @@ -1,79 +1,76 @@ /** * Audio * --- - * Renders audio tags for each audible participant / screen share in the call - * Note: it's very important to minimise DOM mutates for audio components - * as iOS / Safari do a lot of browser 'magic' that may result in muted - * tracks. We heavily memoize this component to avoid unnecassary re-renders. + * When working with audio elements it's very important to avoid mutating + * the DOM elements as much as possible to avoid audio pops and crackles. + * This component addresses to known browser quirks; Safari autoplay + * and Chrome's maximum media elements. On Chrome we add all audio tracks + * into into a single audio node using the CombinedAudioTrack component */ -import React, { useRef, useEffect } from 'react'; -import { useParticipants } from '@dailyjs/shared/contexts/ParticipantsProvider'; -import useAudioTrack from '@dailyjs/shared/hooks/useAudioTrack'; -import PropTypes from 'prop-types'; +import React, { useEffect, useMemo } from 'react'; +import { useTracks } from '@dailyjs/shared/contexts/TracksProvider'; +import Bowser from 'bowser'; +import { Portal } from 'react-portal'; +import AudioTrack from './AudioTrack'; +import CombinedAudioTrack from './CombinedAudioTrack'; -const AudioItem = React.memo( - ({ participant }) => { - const audioRef = useRef(null); - const audioTrack = useAudioTrack(participant); +export const Audio = () => { + const { audioTracks } = useTracks(); - useEffect(() => { - if (!audioTrack || !audioRef.current) return; + const renderedTracks = useMemo( + () => + Object.entries(audioTracks).reduce( + (tracks, [id, track]) => ({ ...tracks, [id]: track }), + {} + ), + [audioTracks] + ); - // quick sanity to check to make sure this is an audio track... - if (audioTrack.kind !== 'audio') return; + // On iOS safari, when headphones are disconnected, all audio elements are paused. + // This means that when a user disconnects their headphones, that user will not + // be able to hear any other users until they mute/unmute their mics. + // To fix that, we call `play` on each audio track on all devicechange events. + useEffect(() => { + const playTracks = () => { + document.querySelectorAll('.audioTracks audio').forEach(async (audio) => { + try { + if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) { + await audio?.play(); + } + } catch (e) { + // Auto play failed + } + }); + }; + navigator.mediaDevices.addEventListener('devicechange', playTracks); + return () => { + navigator.mediaDevices.removeEventListener('devicechange', playTracks); + }; + }, []); - audioRef.current.srcObject = new MediaStream([audioTrack]); - }, [audioTrack]); - - useEffect(() => { - // On iOS safari, when headphones are disconnected, all audio elements are paused. - // This means that when a user disconnects their headphones, that user will not - // be able to hear any other users until they mute/unmute their mics. - // To fix that, we call `play` on each audio track on all devicechange events. - if (audioRef.currenet) { - return false; - } - const startPlayingTrack = () => { - audioRef.current?.play(); - }; - - navigator.mediaDevices.addEventListener( - 'devicechange', - startPlayingTrack - ); - - return () => - navigator.mediaDevices.removeEventListener( - 'devicechange', - startPlayingTrack - ); - }, [audioRef]); - - return ( - <> - - - ); - }, - () => true -); - -AudioItem.propTypes = { - participant: PropTypes.object, -}; - -export const Audio = React.memo(() => { - const { allParticipants } = useParticipants(); + const tracksComponent = useMemo(() => { + const { browser } = Bowser.parse(navigator.userAgent); + if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) { + return ; + } + return Object.entries(renderedTracks).map(([id, track]) => ( + + )); + }, [renderedTracks]); return ( - <> - {allParticipants.map( - (p) => !p.isLocal && - )} - + +
+ {tracksComponent} + +
+
); -}); +}; export default Audio; diff --git a/dailyjs/shared/components/Audio/AudioTrack.js b/dailyjs/shared/components/Audio/AudioTrack.js new file mode 100644 index 0000000..e8e53ce --- /dev/null +++ b/dailyjs/shared/components/Audio/AudioTrack.js @@ -0,0 +1,45 @@ +import React, { useRef, useEffect } from 'react'; +import PropTypes from 'prop-types'; + +const AudioTrack = React.memo( + ({ track }) => { + const audioRef = useRef(null); + + useEffect(() => { + if (!audioRef.current) return false; + let playTimeout; + + const handleCanPlay = () => { + playTimeout = setTimeout(() => { + console.log('Unable to autoplay audio element'); + }, 1500); + }; + const handlePlay = () => { + clearTimeout(playTimeout); + }; + audioRef.current.addEventListener('canplay', handleCanPlay); + audioRef.current.addEventListener('play', handlePlay); + audioRef.current.srcObject = new MediaStream([track]); + + const audioEl = audioRef.current; + + return () => { + audioEl?.removeEventListener('canplay', handleCanPlay); + audioEl?.removeEventListener('play', handlePlay); + }; + }, [track]); + + return track ? ( + + ) : null; + }, + () => true +); + +AudioTrack.propTypes = { + track: PropTypes.object, +}; + +export default AudioTrack; diff --git a/dailyjs/shared/components/Audio/CombinedAudioTrack.js b/dailyjs/shared/components/Audio/CombinedAudioTrack.js new file mode 100644 index 0000000..87a1f26 --- /dev/null +++ b/dailyjs/shared/components/Audio/CombinedAudioTrack.js @@ -0,0 +1,60 @@ +import React, { useEffect, useRef } from 'react'; +import PropTypes from 'prop-types'; +import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare'; + +const CombinedAudioTrack = ({ tracks }) => { + const audioEl = useRef(null); + + useEffect(() => { + if (!audioEl) return; + audioEl.current.srcObject = new MediaStream(); + }, []); + + const trackIds = useDeepCompareMemo( + () => Object.values(tracks).map((t) => t?.persistentTrack?.id), + [tracks] + ); + + useDeepCompareEffect(() => { + const audio = audioEl.current; + if (!audio || !audio.srcObject) return; + + const stream = audio.srcObject; + const allTracks = Object.values(tracks); + + allTracks.forEach((track) => { + const persistentTrack = track?.persistentTrack; + if (persistentTrack) { + persistentTrack.addEventListener( + 'ended', + (ev) => stream.removeTrack(ev.target), + { once: true } + ); + stream.addTrack(persistentTrack); + } + }); + + audio.load(); + + if ( + stream + .getAudioTracks() + .some((t) => t.enabled && t.readyState === 'live') && + audio.paused + ) { + audio.play(); + } + }, [tracks, trackIds]); + + return ( + + ); +}; + +CombinedAudioTrack.propTypes = { + tracks: PropTypes.object, +}; + +export default CombinedAudioTrack; diff --git a/dailyjs/shared/components/Tile/Tile.js b/dailyjs/shared/components/Tile/Tile.js index 53afe11..efaeb3c 100644 --- a/dailyjs/shared/components/Tile/Tile.js +++ b/dailyjs/shared/components/Tile/Tile.js @@ -1,4 +1,4 @@ -import React, { useRef } from 'react'; +import React, { useEffect, useRef } from 'react'; import useVideoTrack from '@dailyjs/shared/hooks/useVideoTrack'; import { ReactComponent as IconMicMute } from '@dailyjs/shared/icons/mic-off-sm.svg'; import classNames from 'classnames'; @@ -14,11 +14,36 @@ export const Tile = React.memo( showName = true, showAvatar = true, aspectRatio = DEFAULT_ASPECT_RATIO, + onVideoResize, ...props }) => { const videoTrack = useVideoTrack(participant); const videoEl = useRef(null); + /** + * Add optional event listener for resize event so the parent component + * can know the video's native aspect ratio. + */ + useEffect(() => { + const video = videoEl.current; + if (!onVideoResize || !video) return false; + + const handleResize = () => { + if (!video) return; + const width = video?.videoWidth; + const height = video?.videoHeight; + if (width && height) { + // Return the video's aspect ratio to the parent's handler + onVideoResize(width / height); + } + }; + + handleResize(); + video?.addEventListener('resize', handleResize); + + return () => video?.removeEventListener('resize', handleResize); + }, [onVideoResize, videoEl, participant]); + const cx = classNames('tile', { mirrored, avatar: showAvatar && !videoTrack, @@ -35,7 +60,11 @@ export const Tile = React.memo( )} {videoTrack ? ( -