audio merge for > chrome 92

This commit is contained in:
Jon 2021-07-28 12:20:58 +01:00
parent be1bfde45d
commit ef545d0480
5 changed files with 172 additions and 68 deletions

View File

@ -1,79 +1,70 @@
/**
* Audio
* ---
* Renders audio tags for each audible participant / screen share in the call
* Note: it's very important to minimise DOM mutates for audio components
* as iOS / Safari do a lot of browser 'magic' that may result in muted
* tracks. We heavily memoize this component to avoid unnecassary re-renders.
*/
import React, { useRef, useEffect } from 'react';
import { useParticipants } from '@dailyjs/shared/contexts/ParticipantsProvider';
import useAudioTrack from '@dailyjs/shared/hooks/useAudioTrack';
import PropTypes from 'prop-types';
import React, { useEffect, useMemo } from 'react';
import { useTracks } from '@dailyjs/shared/contexts/TracksProvider';
import Bowser from 'bowser';
import { Portal } from 'react-portal';
import AudioTrack from './AudioTrack';
import CombinedAudioTrack from './CombinedAudioTrack';
const AudioItem = React.memo(
({ participant }) => {
const audioRef = useRef(null);
const audioTrack = useAudioTrack(participant);
export const Audio = () => {
const { audioTracks } = useTracks();
useEffect(() => {
if (!audioTrack || !audioRef.current) return;
const renderedTracks = useMemo(
() =>
Object.entries(audioTracks).reduce(
(tracks, [id, track]) => ({ ...tracks, [id]: track }),
{}
),
[audioTracks]
);
// quick sanity to check to make sure this is an audio track...
if (audioTrack.kind !== 'audio') return;
// On iOS safari, when headphones are disconnected, all audio elements are paused.
// This means that when a user disconnects their headphones, that user will not
// be able to hear any other users until they mute/unmute their mics.
// To fix that, we call `play` on each audio track on all devicechange events.
useEffect(() => {
const playTracks = () => {
document.querySelectorAll('.audioTracks audio').forEach(async (audio) => {
try {
if (audio.paused && audio.readyState === audio.HAVE_ENOUGH_DATA) {
await audio?.play();
}
} catch (e) {
// Auto play failed
}
});
};
navigator.mediaDevices.addEventListener('devicechange', playTracks);
return () => {
navigator.mediaDevices.removeEventListener('devicechange', playTracks);
};
}, []);
audioRef.current.srcObject = new MediaStream([audioTrack]);
}, [audioTrack]);
useEffect(() => {
// On iOS safari, when headphones are disconnected, all audio elements are paused.
// This means that when a user disconnects their headphones, that user will not
// be able to hear any other users until they mute/unmute their mics.
// To fix that, we call `play` on each audio track on all devicechange events.
if (audioRef.currenet) {
return false;
}
const startPlayingTrack = () => {
audioRef.current?.play();
};
navigator.mediaDevices.addEventListener(
'devicechange',
startPlayingTrack
);
return () =>
navigator.mediaDevices.removeEventListener(
'devicechange',
startPlayingTrack
);
}, [audioRef]);
return (
<>
<audio autoPlay playsInline ref={audioRef}>
<track kind="captions" />
</audio>
</>
);
},
() => true
);
AudioItem.propTypes = {
participant: PropTypes.object,
};
export const Audio = React.memo(() => {
const { allParticipants } = useParticipants();
const tracksComponent = useMemo(() => {
const { browser } = Bowser.parse(navigator.userAgent);
if (browser.name === 'Chrome' && parseInt(browser.version, 10) >= 92) {
return <CombinedAudioTrack tracks={renderedTracks} />;
}
return Object.entries(renderedTracks).map(([id, track]) => (
<AudioTrack key={id} track={track.persistentTrack} />
));
}, [renderedTracks]);
return (
<>
{allParticipants.map(
(p) => !p.isLocal && <AudioItem participant={p} key={p.id} />
)}
</>
<Portal key="AudioTracks">
<div className="audioTracks">
{tracksComponent}
<style jsx>{`
.audioTracks {
position: absolute;
visibility: hidden;
}
`}</style>
</div>
</Portal>
);
});
};
export default Audio;

View File

@ -0,0 +1,45 @@
import React, { useRef, useEffect } from 'react';
import PropTypes from 'prop-types';
const AudioTrack = React.memo(
({ track }) => {
const audioRef = useRef(null);
useEffect(() => {
if (!audioRef.current) return false;
let playTimeout;
const handleCanPlay = () => {
playTimeout = setTimeout(() => {
console.log('Unable to autoplay audio element');
}, 1500);
};
const handlePlay = () => {
clearTimeout(playTimeout);
};
audioRef.current.addEventListener('canplay', handleCanPlay);
audioRef.current.addEventListener('play', handlePlay);
audioRef.current.srcObject = new MediaStream([track]);
const audioEl = audioRef.current;
return () => {
audioEl?.removeEventListener('canplay', handleCanPlay);
audioEl?.removeEventListener('play', handlePlay);
};
}, [track]);
return track ? (
<audio autoPlay playsInline ref={audioRef}>
<track kind="captions" />
</audio>
) : null;
},
() => true
);
AudioTrack.propTypes = {
track: PropTypes.object,
};
export default AudioTrack;

View File

@ -0,0 +1,60 @@
import React, { useEffect, useRef } from 'react';
import PropTypes from 'prop-types';
import { useDeepCompareEffect, useDeepCompareMemo } from 'use-deep-compare';
const CombinedAudioTrack = ({ tracks }) => {
const audioEl = useRef(null);
useEffect(() => {
if (!audioEl) return;
audioEl.current.srcObject = new MediaStream();
}, []);
const trackIds = useDeepCompareMemo(
() => Object.values(tracks).map((t) => t?.persistentTrack?.id),
[tracks]
);
useDeepCompareEffect(() => {
const audio = audioEl.current;
if (!audio || !audio.srcObject) return;
const stream = audio.srcObject;
const allTracks = Object.values(tracks);
allTracks.forEach((track) => {
const persistentTrack = track?.persistentTrack;
if (persistentTrack) {
persistentTrack.addEventListener(
'ended',
(ev) => stream.removeTrack(ev.target),
{ once: true }
);
stream.addTrack(persistentTrack);
}
});
audio.load();
if (
stream
.getAudioTracks()
.some((t) => t.enabled && t.readyState === 'live') &&
audio.paused
) {
audio.play();
}
}, [tracks, trackIds]);
return (
<audio autoPlay playsInline ref={audioEl}>
<track kind="captions" />
</audio>
);
};
CombinedAudioTrack.propTypes = {
tracks: PropTypes.object,
};
export default CombinedAudioTrack;

View File

@ -13,6 +13,7 @@
"prop-types": "^15.7.2",
"react": "^17.0.2",
"react-dom": "^17.0.2",
"react-portal": "^4.2.1",
"shallow-equal": "^1.2.1",
"use-deep-compare": "^1.1.0"
}

View File

@ -2800,7 +2800,7 @@ progress@^2.0.0:
resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8"
integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==
prop-types@15.7.2, prop-types@^15.7.2:
prop-types@15.7.2, prop-types@^15.5.8, prop-types@^15.7.2:
version "15.7.2"
resolved "https://registry.yarnpkg.com/prop-types/-/prop-types-15.7.2.tgz#52c41e75b8c87e72b9d9360e0206b99dcbffa6c5"
integrity sha512-8QQikdH7//R2vurIJSutZ1smHYTcLpRWEOlHnzcWHmBYrOGUysKwSsrC89BCiFj3CbrfJ/nXFdJepOVrY1GCHQ==
@ -2907,6 +2907,13 @@ react-is@^16.8.1:
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
react-portal@^4.2.1:
version "4.2.1"
resolved "https://registry.yarnpkg.com/react-portal/-/react-portal-4.2.1.tgz#12c1599238c06fb08a9800f3070bea2a3f78b1a6"
integrity sha512-fE9kOBagwmTXZ3YGRYb4gcMy+kSA+yLO0xnPankjRlfBv4uCpFXqKPfkpsGQQR15wkZ9EssnvTOl1yMzbkxhPQ==
dependencies:
prop-types "^15.5.8"
react-refresh@0.8.3:
version "0.8.3"
resolved "https://registry.yarnpkg.com/react-refresh/-/react-refresh-0.8.3.tgz#721d4657672d400c5e3c75d063c4a85fb2d5d68f"