diff --git a/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js b/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js index 88f8dc9..f75163e 100644 --- a/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js +++ b/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js @@ -37,7 +37,6 @@ export const SpeakerTile = ({ participant, screenRef }) => { const { height, finalRatio, videoFit } = useMemo( () => // Avoid cropping mobile videos, which have the nativeAspectRatio set - ({ height: (nativeAspectRatio ?? ratio) >= MIN_RATIO ? '100%' : null, finalRatio: diff --git a/dailyjs/shared/components/Audio/Audio.js b/dailyjs/shared/components/Audio/Audio.js index a0e1497..931ee56 100644 --- a/dailyjs/shared/components/Audio/Audio.js +++ b/dailyjs/shared/components/Audio/Audio.js @@ -1,79 +1,76 @@ /** * Audio * --- - * Renders audio tags for each audible participant / screen share in the call - * Note: it's very important to minimise DOM mutates for audio components - * as iOS / Safari do a lot of browser 'magic' that may result in muted - * tracks. We heavily memoize this component to avoid unnecassary re-renders. + * When working with audio elements it's very important to avoid mutating + * the DOM elements as much as possible to avoid audio pops and crackles. + * This component addresses to known browser quirks; Safari autoplay + * and Chrome's maximum media elements. On Chrome we add all audio tracks + * into into a single audio node using the CombinedAudioTrack component */ -import React, { useRef, useEffect } from 'react'; -import { useParticipants } from '@dailyjs/shared/contexts/ParticipantsProvider'; -import useAudioTrack from '@dailyjs/shared/hooks/useAudioTrack'; -import PropTypes from 'prop-types'; +import React, { useEffect, useMemo } from 'react'; +import { useTracks } from '@dailyjs/shared/contexts/TracksProvider'; +import Bowser from 'bowser'; +import { Portal } from 'react-portal'; +import AudioTrack from './AudioTrack'; +import CombinedAudioTrack from './CombinedAudioTrack'; -const AudioItem = React.memo( - ({ participant }) => { - const audioRef = useRef(null); - const audioTrack = useAudioTrack(participant); +export const Audio = () => { + const { audioTracks } = useTracks(); - useEffect(() => { - if (!audioTrack || !audioRef.current) return; + const renderedTracks = useMemo( + () => + Object.entries(audioTracks).reduce( + (tracks, [id, track]) => ({ ...tracks, [id]: track }), + {} + ), + [audioTracks] + ); - // quick sanity to check to make sure this is an audio track... - if (audioTrack.kind !== 'audio') return; + // On iOS safari, when headphones are disconnected, all audio elements are paused. + // This means that when a user disconnects their headphones, that user will not + // be able to hear any other users until they mute/unmute their mics. + // To fix that, we call `play` on each audio track on all devicechange events. + useEffect(() => { + const playTracks = () => { + document.querySelectorAll('.audioTracks audio').forEach(async (audio) => { + try { + if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) { + await audio?.play(); + } + } catch (e) { + // Auto play failed + } + }); + }; + navigator.mediaDevices.addEventListener('devicechange', playTracks); + return () => { + navigator.mediaDevices.removeEventListener('devicechange', playTracks); + }; + }, []); - audioRef.current.srcObject = new MediaStream([audioTrack]); - }, [audioTrack]); - - useEffect(() => { - // On iOS safari, when headphones are disconnected, all audio elements are paused. - // This means that when a user disconnects their headphones, that user will not - // be able to hear any other users until they mute/unmute their mics. - // To fix that, we call `play` on each audio track on all devicechange events. - if (audioRef.currenet) { - return false; - } - const startPlayingTrack = () => { - audioRef.current?.play(); - }; - - navigator.mediaDevices.addEventListener( - 'devicechange', - startPlayingTrack - ); - - return () => - navigator.mediaDevices.removeEventListener( - 'devicechange', - startPlayingTrack - ); - }, [audioRef]); - - return ( - <> - - - ); - }, - () => true -); - -AudioItem.propTypes = { - participant: PropTypes.object, -}; - -export const Audio = React.memo(() => { - const { allParticipants } = useParticipants(); + const tracksComponent = useMemo(() => { + const { browser } = Bowser.parse(navigator.userAgent); + if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) { + return ; + } + return Object.entries(renderedTracks).map(([id, track]) => ( + + )); + }, [renderedTracks]); return ( - <> - {allParticipants.map( - (p) => !p.isLocal && - )} - + +
+ {tracksComponent} + +
+
); -}); +}; export default Audio; diff --git a/dailyjs/shared/components/Audio/AudioTrack.js b/dailyjs/shared/components/Audio/AudioTrack.js new file mode 100644 index 0000000..e8e53ce --- /dev/null +++ b/dailyjs/shared/components/Audio/AudioTrack.js @@ -0,0 +1,45 @@ +import React, { useRef, useEffect } from 'react'; +import PropTypes from 'prop-types'; + +const AudioTrack = React.memo( + ({ track }) => { + const audioRef = useRef(null); + + useEffect(() => { + if (!audioRef.current) return false; + let playTimeout; + + const handleCanPlay = () => { + playTimeout = setTimeout(() => { + console.log('Unable to autoplay audio element'); + }, 1500); + }; + const handlePlay = () => { + clearTimeout(playTimeout); + }; + audioRef.current.addEventListener('canplay', handleCanPlay); + audioRef.current.addEventListener('play', handlePlay); + audioRef.current.srcObject = new MediaStream([track]); + + const audioEl = audioRef.current; + + return () => { + audioEl?.removeEventListener('canplay', handleCanPlay); + audioEl?.removeEventListener('play', handlePlay); + }; + }, [track]); + + return track ? ( + + ) : null; + }, + () => true +); + +AudioTrack.propTypes = { + track: PropTypes.object, +}; + +export default AudioTrack; diff --git a/dailyjs/shared/components/Audio/CombinedAudioTrack.js b/dailyjs/shared/components/Audio/CombinedAudioTrack.js new file mode 100644 index 0000000..87a1f26 --- /dev/null +++ b/dailyjs/shared/components/Audio/CombinedAudioTrack.js @@ -0,0 +1,60 @@ +import React, { useEffect, useRef } from 'react'; +import PropTypes from 'prop-types'; +import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare'; + +const CombinedAudioTrack = ({ tracks }) => { + const audioEl = useRef(null); + + useEffect(() => { + if (!audioEl) return; + audioEl.current.srcObject = new MediaStream(); + }, []); + + const trackIds = useDeepCompareMemo( + () => Object.values(tracks).map((t) => t?.persistentTrack?.id), + [tracks] + ); + + useDeepCompareEffect(() => { + const audio = audioEl.current; + if (!audio || !audio.srcObject) return; + + const stream = audio.srcObject; + const allTracks = Object.values(tracks); + + allTracks.forEach((track) => { + const persistentTrack = track?.persistentTrack; + if (persistentTrack) { + persistentTrack.addEventListener( + 'ended', + (ev) => stream.removeTrack(ev.target), + { once: true } + ); + stream.addTrack(persistentTrack); + } + }); + + audio.load(); + + if ( + stream + .getAudioTracks() + .some((t) => t.enabled && t.readyState === 'live') && + audio.paused + ) { + audio.play(); + } + }, [tracks, trackIds]); + + return ( + + ); +}; + +CombinedAudioTrack.propTypes = { + tracks: PropTypes.object, +}; + +export default CombinedAudioTrack; diff --git a/dailyjs/shared/components/Tile/Tile.js b/dailyjs/shared/components/Tile/Tile.js index 5014940..31c3924 100644 --- a/dailyjs/shared/components/Tile/Tile.js +++ b/dailyjs/shared/components/Tile/Tile.js @@ -1,4 +1,4 @@ -import React, { useState, useEffect, useRef } from 'react'; +import React, { memo, useEffect, useState, useRef } from 'react'; import useVideoTrack from '@dailyjs/shared/hooks/useVideoTrack'; import { ReactComponent as IconMicMute } from '@dailyjs/shared/icons/mic-off-sm.svg'; import classNames from 'classnames'; @@ -7,30 +7,33 @@ import { DEFAULT_ASPECT_RATIO } from '../../constants'; import { Video } from './Video'; import { ReactComponent as Avatar } from './avatar.svg'; -export const Tile = React.memo( +const SM_TILE_MAX_WIDTH = 300; + +export const Tile = memo( ({ participant, mirrored = true, showName = true, showAvatar = true, showActiveSpeaker = true, + videoFit = 'contain', aspectRatio = DEFAULT_ASPECT_RATIO, onVideoResize, - videoFit = 'contain', ...props }) => { const videoTrack = useVideoTrack(participant); - const videoEl = useRef(null); - const [tileAspectRatio, setTileAspectRatio] = useState(aspectRatio); - - const [layer, setLayer] = useState(); + const videoRef = useRef(null); + const tileRef = useRef(null); + const [tileWidth, setTileWidth] = useState(0); /** + * Effect: Resize + * * Add optional event listener for resize event so the parent component * can know the video's native aspect ratio. */ useEffect(() => { - const video = videoEl.current; + const video = videoRef.current; if (!onVideoResize || !video) return false; const handleResize = () => { @@ -44,50 +47,61 @@ export const Tile = React.memo( }; handleResize(); - video?.addEventListener('resize', handleResize); return () => video?.removeEventListener('resize', handleResize); - }, [onVideoResize, videoEl, participant]); + }, [onVideoResize, videoRef, participant]); + /** + * Effect: Resize Observer + * + * Adjust size of text overlay based on tile size + */ useEffect(() => { - if (aspectRatio === tileAspectRatio) return; - setTileAspectRatio(aspectRatio); - }, [aspectRatio, tileAspectRatio]); - - useEffect(() => { - if ( - typeof rtcpeers === 'undefined' || - rtcpeers?.getCurrentType() !== 'sfu' - ) - return false; - - const i = setInterval(() => { - setLayer( - rtcpeers.sfu.consumers[`${participant.id}/cam-video`]?._preferredLayer - ); - }, 1500); - - return () => clearInterval(i); - }, [participant]); + const tile = tileRef.current; + if (!tile || typeof ResizeObserver === 'undefined') return false; + let frame; + const resizeObserver = new ResizeObserver(() => { + if (frame) cancelAnimationFrame(frame); + frame = requestAnimationFrame(() => { + if (!tile) return; + const dimensions = tile?.getBoundingClientRect(); + const { width } = dimensions; + setTileWidth(width); + }); + }); + resizeObserver.observe(tile); + return () => { + if (frame) cancelAnimationFrame(frame); + resizeObserver.disconnect(); + }; + }, [tileRef]); const cx = classNames('tile', videoFit, { mirrored, avatar: showAvatar && !videoTrack, + screenShare: participant.isScreenShare, active: showActiveSpeaker && participant.isActiveSpeaker, + small: tileWidth < SM_TILE_MAX_WIDTH, }); return ( -
+
{showName && (
- {participant.isMicMuted && } - {participant.name} - {layer} + {participant.isMicMuted && !participant.isScreenShare && ( + + )} + {participant.name}
)} {videoTrack ? ( -