diff --git a/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js b/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js
index 88f8dc9..f75163e 100644
--- a/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js
+++ b/dailyjs/active-speaker/components/SpeakerView/SpeakerTile/SpeakerTile.js
@@ -37,7 +37,6 @@ export const SpeakerTile = ({ participant, screenRef }) => {
const { height, finalRatio, videoFit } = useMemo(
() =>
// Avoid cropping mobile videos, which have the nativeAspectRatio set
-
({
height: (nativeAspectRatio ?? ratio) >= MIN_RATIO ? '100%' : null,
finalRatio:
diff --git a/dailyjs/shared/components/Audio/Audio.js b/dailyjs/shared/components/Audio/Audio.js
index a0e1497..931ee56 100644
--- a/dailyjs/shared/components/Audio/Audio.js
+++ b/dailyjs/shared/components/Audio/Audio.js
@@ -1,79 +1,76 @@
/**
* Audio
* ---
- * Renders audio tags for each audible participant / screen share in the call
- * Note: it's very important to minimise DOM mutates for audio components
- * as iOS / Safari do a lot of browser 'magic' that may result in muted
- * tracks. We heavily memoize this component to avoid unnecassary re-renders.
+ * When working with audio elements it's very important to avoid mutating
+ * the DOM elements as much as possible to avoid audio pops and crackles.
+ * This component addresses to known browser quirks; Safari autoplay
+ * and Chrome's maximum media elements. On Chrome we add all audio tracks
+ * into into a single audio node using the CombinedAudioTrack component
*/
-import React, { useRef, useEffect } from 'react';
-import { useParticipants } from '@dailyjs/shared/contexts/ParticipantsProvider';
-import useAudioTrack from '@dailyjs/shared/hooks/useAudioTrack';
-import PropTypes from 'prop-types';
+import React, { useEffect, useMemo } from 'react';
+import { useTracks } from '@dailyjs/shared/contexts/TracksProvider';
+import Bowser from 'bowser';
+import { Portal } from 'react-portal';
+import AudioTrack from './AudioTrack';
+import CombinedAudioTrack from './CombinedAudioTrack';
-const AudioItem = React.memo(
- ({ participant }) => {
- const audioRef = useRef(null);
- const audioTrack = useAudioTrack(participant);
+export const Audio = () => {
+ const { audioTracks } = useTracks();
- useEffect(() => {
- if (!audioTrack || !audioRef.current) return;
+ const renderedTracks = useMemo(
+ () =>
+ Object.entries(audioTracks).reduce(
+ (tracks, [id, track]) => ({ ...tracks, [id]: track }),
+ {}
+ ),
+ [audioTracks]
+ );
- // quick sanity to check to make sure this is an audio track...
- if (audioTrack.kind !== 'audio') return;
+ // On iOS safari, when headphones are disconnected, all audio elements are paused.
+ // This means that when a user disconnects their headphones, that user will not
+ // be able to hear any other users until they mute/unmute their mics.
+ // To fix that, we call `play` on each audio track on all devicechange events.
+ useEffect(() => {
+ const playTracks = () => {
+ document.querySelectorAll('.audioTracks audio').forEach(async (audio) => {
+ try {
+ if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
+ await audio?.play();
+ }
+ } catch (e) {
+ // Auto play failed
+ }
+ });
+ };
+ navigator.mediaDevices.addEventListener('devicechange', playTracks);
+ return () => {
+ navigator.mediaDevices.removeEventListener('devicechange', playTracks);
+ };
+ }, []);
- audioRef.current.srcObject = new MediaStream([audioTrack]);
- }, [audioTrack]);
-
- useEffect(() => {
- // On iOS safari, when headphones are disconnected, all audio elements are paused.
- // This means that when a user disconnects their headphones, that user will not
- // be able to hear any other users until they mute/unmute their mics.
- // To fix that, we call `play` on each audio track on all devicechange events.
- if (audioRef.currenet) {
- return false;
- }
- const startPlayingTrack = () => {
- audioRef.current?.play();
- };
-
- navigator.mediaDevices.addEventListener(
- 'devicechange',
- startPlayingTrack
- );
-
- return () =>
- navigator.mediaDevices.removeEventListener(
- 'devicechange',
- startPlayingTrack
- );
- }, [audioRef]);
-
- return (
- <>
-
- >
- );
- },
- () => true
-);
-
-AudioItem.propTypes = {
- participant: PropTypes.object,
-};
-
-export const Audio = React.memo(() => {
- const { allParticipants } = useParticipants();
+ const tracksComponent = useMemo(() => {
+ const { browser } = Bowser.parse(navigator.userAgent);
+ if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) {
+ return ;
+ }
+ return Object.entries(renderedTracks).map(([id, track]) => (
+
+ ));
+ }, [renderedTracks]);
return (
- <>
- {allParticipants.map(
- (p) => !p.isLocal &&
- )}
- >
+
+