change the way we treat the audio

This commit is contained in:
Reinaldo Neto 2023-08-16 16:52:56 -03:00
parent 7900fad9ac
commit 927225c0bb
6 changed files with 147 additions and 58 deletions

View File

@ -5,12 +5,20 @@ import styles from './styles';
import { useTheme } from '../../../../theme'; import { useTheme } from '../../../../theme';
import Touchable from '../../Touchable'; import Touchable from '../../Touchable';
const AudioRate = ({ onChange, loaded = false }: { onChange: (value: number) => void; loaded: boolean }) => { const AudioRate = ({
const [rate, setRate] = useState(1.0); onChange,
loaded = false,
rate: rateProps = 1
}: {
onChange: (value: number) => void;
loaded: boolean;
rate: number;
}) => {
const [rate, setRate] = useState(rateProps);
const { colors } = useTheme(); const { colors } = useTheme();
const onPress = () => { const onPress = () => {
const nextRate = rate === 2.0 ? 0.5 : rate + 0.5; const nextRate = rate === 2 ? 0.5 : rate + 0.5;
setRate(nextRate); setRate(nextRate);
onChange(nextRate); onChange(nextRate);
}; };

View File

@ -1,8 +1,7 @@
import React, { useContext, useEffect, useRef, useState } from 'react'; import React, { useContext, useEffect, useRef, useState } from 'react';
import { StyleProp, TextStyle, View } from 'react-native'; import { StyleProp, TextStyle, View } from 'react-native';
import { AVPlaybackStatus, Audio, InterruptionModeAndroid, InterruptionModeIOS } from 'expo-av'; import { AVPlaybackStatus } from 'expo-av';
import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake'; import { activateKeepAwakeAsync, deactivateKeepAwake } from 'expo-keep-awake';
import { Sound } from 'expo-av/build/Audio/Sound';
import { useSharedValue } from 'react-native-reanimated'; import { useSharedValue } from 'react-native-reanimated';
import Markdown from '../../../markdown'; import Markdown from '../../../markdown';
@ -11,13 +10,12 @@ import { TGetCustomEmoji } from '../../../../definitions/IEmoji';
import { IAttachment, IUserMessage } from '../../../../definitions'; import { IAttachment, IUserMessage } from '../../../../definitions';
import { useTheme } from '../../../../theme'; import { useTheme } from '../../../../theme';
import { downloadMediaFile, getMediaCache } from '../../../../lib/methods/handleMediaDownload'; import { downloadMediaFile, getMediaCache } from '../../../../lib/methods/handleMediaDownload';
import EventEmitter from '../../../../lib/methods/helpers/events';
import { PAUSE_AUDIO } from '../../constants';
import { fetchAutoDownloadEnabled } from '../../../../lib/methods/autoDownloadPreference'; import { fetchAutoDownloadEnabled } from '../../../../lib/methods/autoDownloadPreference';
import styles from './styles'; import styles from './styles';
import Slider from './Slider'; import Slider from './Slider';
import AudioRate from './AudioRate'; import AudioRate from './AudioRate';
import PlayButton from './PlayButton'; import PlayButton from './PlayButton';
import handleAudioMedia from '../../../../lib/methods/handleAudioMedia';
interface IMessageAudioProps { interface IMessageAudioProps {
file: IAttachment; file: IAttachment;
@ -27,16 +25,6 @@ interface IMessageAudioProps {
author?: IUserMessage; author?: IUserMessage;
} }
const mode = {
allowsRecordingIOS: false,
playsInSilentModeIOS: true,
staysActiveInBackground: true,
shouldDuckAndroid: true,
playThroughEarpieceAndroid: false,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
};
const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessageAudioProps) => { const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessageAudioProps) => {
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [paused, setPaused] = useState(true); const [paused, setPaused] = useState(true);
@ -48,10 +36,12 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
const { baseUrl, user } = useContext(MessageContext); const { baseUrl, user } = useContext(MessageContext);
const { colors } = useTheme(); const { colors } = useTheme();
const sound = useRef<Sound | null>(null); const audioUri = useRef<string>('');
const rate = useRef<number>(1);
const onPlaybackStatusUpdate = (status: AVPlaybackStatus) => { const onPlaybackStatusUpdate = (status: AVPlaybackStatus) => {
if (status) { if (status) {
onPlaying(status);
onLoad(status); onLoad(status);
onProgress(status); onProgress(status);
onEnd(status); onEnd(status);
@ -59,15 +49,24 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
}; };
const loadAudio = async (audio: string) => { const loadAudio = async (audio: string) => {
const { sound: soundLoaded } = await Audio.Sound.createAsync({ uri: audio }); await handleAudioMedia.loadAudio(audio);
sound.current = soundLoaded; audioUri.current = audio;
sound.current.setOnPlaybackStatusUpdate(onPlaybackStatusUpdate); handleAudioMedia.setOnPlaybackStatusUpdate(audio, onPlaybackStatusUpdate);
};
const onPlaying = (data: AVPlaybackStatus) => {
if (data.isLoaded && data.isPlaying) {
setPaused(false);
} else {
setPaused(true);
}
}; };
const onLoad = (data: AVPlaybackStatus) => { const onLoad = (data: AVPlaybackStatus) => {
if (data.isLoaded && data.durationMillis) { if (data.isLoaded && data.durationMillis) {
const durationSeconds = data.durationMillis / 1000; const durationSeconds = data.durationMillis / 1000;
duration.value = durationSeconds > 0 ? durationSeconds : 0; duration.value = durationSeconds > 0 ? durationSeconds : 0;
rate.current = data.rate;
} }
}; };
@ -80,13 +79,11 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
} }
}; };
const onEnd = async (data: AVPlaybackStatus) => { const onEnd = (data: AVPlaybackStatus) => {
if (data.isLoaded) { if (data.isLoaded) {
if (data.didJustFinish) { if (data.didJustFinish) {
try { try {
await sound.current?.stopAsync();
setPaused(true); setPaused(true);
EventEmitter.removeListener(PAUSE_AUDIO, pauseSound.current);
currentTime.value = 0; currentTime.value = 0;
} catch { } catch {
// do nothing // do nothing
@ -96,15 +93,9 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
}; };
const setPosition = async (time: number) => { const setPosition = async (time: number) => {
await sound.current?.setPositionAsync(time); await handleAudioMedia.setPositionAsync(audioUri.current, time);
}; };
const pauseSound = useRef(() => {
EventEmitter.removeListener(PAUSE_AUDIO, pauseSound.current);
setPaused(true);
playPause(true);
});
const getUrl = () => { const getUrl = () => {
let url = file.audio_url; let url = file.audio_url;
if (url && !url.startsWith('http')) { if (url && !url.startsWith('http')) {
@ -113,21 +104,12 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
return url; return url;
}; };
const togglePlayPause = () => { const togglePlayPause = async () => {
setPaused(!paused);
playPause(!paused);
};
const playPause = async (isPaused: boolean) => {
try { try {
if (isPaused) { if (!paused) {
await sound.current?.pauseAsync(); await handleAudioMedia.pauseAudio(audioUri.current);
EventEmitter.removeListener(PAUSE_AUDIO, pauseSound.current);
} else { } else {
EventEmitter.emit(PAUSE_AUDIO); await handleAudioMedia.playAudio(audioUri.current);
EventEmitter.addEventListener(PAUSE_AUDIO, pauseSound.current);
await Audio.setAudioModeAsync(mode);
await sound.current?.playAsync();
} }
} catch { } catch {
// Do nothing // Do nothing
@ -135,7 +117,7 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
}; };
const setRate = async (value = 1.0) => { const setRate = async (value = 1.0) => {
await sound.current?.setRateAsync(value, true); await handleAudioMedia.setRateAsync(audioUri.current, value);
}; };
const handleDownload = async () => { const handleDownload = async () => {
@ -207,18 +189,6 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
await handleAutoDownload(); await handleAutoDownload();
}; };
handleCache(); handleCache();
return () => {
EventEmitter.removeListener(PAUSE_AUDIO, pauseSound.current);
const unloadAsync = async () => {
try {
await sound.current?.unloadAsync();
} catch {
// Do nothing
}
};
unloadAsync();
};
}, []); }, []);
useEffect(() => { useEffect(() => {
@ -243,7 +213,7 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style }: IMessage
> >
<PlayButton disabled={isReply} loading={loading} paused={paused} cached={cached} onPress={onPress} /> <PlayButton disabled={isReply} loading={loading} paused={paused} cached={cached} onPress={onPress} />
<Slider currentTime={currentTime} duration={duration} loaded={!isReply && cached} onChangeTime={setPosition} /> <Slider currentTime={currentTime} duration={duration} loaded={!isReply && cached} onChangeTime={setPosition} />
<AudioRate onChange={setRate} loaded={!isReply && cached} /> <AudioRate onChange={setRate} loaded={!isReply && cached} rate={rate.current} />
</View> </View>
</> </>
); );

View File

@ -0,0 +1,103 @@
import { AVPlaybackStatus, Audio, InterruptionModeAndroid, InterruptionModeIOS } from 'expo-av';
import { Sound } from 'expo-av/build/Audio/Sound';
const mode = {
allowsRecordingIOS: false,
playsInSilentModeIOS: true,
staysActiveInBackground: true,
shouldDuckAndroid: true,
playThroughEarpieceAndroid: false,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
};
class HandleAudioMedia {
private audioQueue: { [uri: string]: Sound };
private audioPlaying: string;
constructor() {
this.audioQueue = {};
this.audioPlaying = '';
}
async loadAudio(uri: string): Promise<Sound> {
if (this.audioQueue[uri]) {
return this.audioQueue[uri];
}
const { sound } = await Audio.Sound.createAsync({ uri });
this.audioQueue[uri] = sound;
return sound;
}
onPlaybackStatusUpdate(uri: string, status: AVPlaybackStatus, callback: (status: AVPlaybackStatus) => void) {
if (status) {
callback(status);
this.onEnd(uri, status);
}
}
async onEnd(uri: string, status: AVPlaybackStatus) {
if (status.isLoaded) {
if (status.didJustFinish) {
try {
await this.audioQueue[uri]?.stopAsync();
this.audioPlaying = '';
} catch {
// do nothing
}
}
}
}
setOnPlaybackStatusUpdate(uri: string, callback: (status: AVPlaybackStatus) => void): void {
return this.audioQueue[uri]?.setOnPlaybackStatusUpdate(status => this.onPlaybackStatusUpdate(uri, status, callback));
}
async playAudio(uri: string) {
if (this.audioPlaying) {
await this.pauseAudio(this.audioPlaying);
}
await Audio.setAudioModeAsync(mode);
await this.audioQueue[uri]?.playAsync();
this.audioPlaying = uri;
}
async pauseAudio(uri: string) {
await this.audioQueue[uri]?.pauseAsync();
this.audioPlaying = '';
}
async setPositionAsync(uri: string, time: number) {
try {
await this.audioQueue[uri]?.setPositionAsync(time);
} catch {
// Do nothing
// It's returning a error with this code: E_AV_SEEKING, however it's working as expected
}
}
async setRateAsync(uri: string, value = 1.0) {
await this.audioQueue[uri].setRateAsync(value, true);
}
async unloadAudio(uri: string) {
await this.audioQueue[uri]?.stopAsync();
await this.audioQueue[uri]?.unloadAsync();
this.audioPlaying = '';
}
async unloadAllAudios() {
const audiosLoaded = Object.values(this.audioQueue);
await Promise.allSettled(
audiosLoaded.map(async audio => {
await audio?.stopAsync();
await audio?.unloadAsync();
})
);
this.audioPlaying = '';
this.audioQueue = {};
}
}
const handleAudioMedia = new HandleAudioMedia();
export default handleAudioMedia;

View File

@ -37,3 +37,4 @@ export * from './crashReport';
export * from './parseSettings'; export * from './parseSettings';
export * from './subscribeRooms'; export * from './subscribeRooms';
export * from './serializeAsciiUrl'; export * from './serializeAsciiUrl';
export * from './handleAudioMedia';

View File

@ -93,6 +93,7 @@ import {
import { Services } from '../../lib/services'; import { Services } from '../../lib/services';
import { withActionSheet, IActionSheetProvider } from '../../containers/ActionSheet'; import { withActionSheet, IActionSheetProvider } from '../../containers/ActionSheet';
import { goRoom, TGoRoomItem } from '../../lib/methods/helpers/goRoom'; import { goRoom, TGoRoomItem } from '../../lib/methods/helpers/goRoom';
import handleAudioMedia from '../../lib/methods/handleAudioMedia';
type TStateAttrsUpdate = keyof IRoomViewState; type TStateAttrsUpdate = keyof IRoomViewState;
@ -411,6 +412,7 @@ class RoomView extends React.Component<IRoomViewProps, IRoomViewState> {
const { editing, room } = this.state; const { editing, room } = this.state;
const db = database.active; const db = database.active;
this.mounted = false; this.mounted = false;
await handleAudioMedia.unloadAllAudios();
if (!editing && this.messagebox && this.messagebox.current) { if (!editing && this.messagebox && this.messagebox.current) {
const { text } = this.messagebox.current; const { text } = this.messagebox.current;
let obj: TSubscriptionModel | TThreadModel | null = null; let obj: TSubscriptionModel | TThreadModel | null = null;

View File

@ -69,3 +69,8 @@ jest.mock('react-native-math-view', () => {
MathText: react.View // {...} Named export MathText: react.View // {...} Named export
}; };
}); });
jest.mock('expo-av', () => ({
InterruptionModeIOS: { DoNotMix: 1 },
InterruptionModeAndroid: { DoNotMix: 1 }
}));