feat: play audio in sequence (#5526)
Co-authored-by: Sathurshan <jsathu07@gmail.com> Co-authored-by: Diego Mello <diegolmello@gmail.com>
This commit is contained in:
parent
37e936e1d6
commit
316f771a8f
|
@ -10,10 +10,10 @@ import styles from './styles';
|
||||||
import Seek from './Seek';
|
import Seek from './Seek';
|
||||||
import PlaybackSpeed from './PlaybackSpeed';
|
import PlaybackSpeed from './PlaybackSpeed';
|
||||||
import PlayButton from './PlayButton';
|
import PlayButton from './PlayButton';
|
||||||
import EventEmitter from '../../lib/methods/helpers/events';
|
import AudioManager from '../../lib/methods/AudioManager';
|
||||||
import audioPlayer, { AUDIO_FOCUSED } from '../../lib/methods/audioPlayer';
|
|
||||||
import { AUDIO_PLAYBACK_SPEED, AVAILABLE_SPEEDS } from './constants';
|
import { AUDIO_PLAYBACK_SPEED, AVAILABLE_SPEEDS } from './constants';
|
||||||
import { TDownloadState } from '../../lib/methods/handleMediaDownload';
|
import { TDownloadState } from '../../lib/methods/handleMediaDownload';
|
||||||
|
import { emitter } from '../../lib/methods/helpers';
|
||||||
import { TAudioState } from './types';
|
import { TAudioState } from './types';
|
||||||
import { useUserPreferences } from '../../lib/methods';
|
import { useUserPreferences } from '../../lib/methods';
|
||||||
|
|
||||||
|
@ -86,15 +86,15 @@ const AudioPlayer = ({
|
||||||
};
|
};
|
||||||
|
|
||||||
const setPosition = async (time: number) => {
|
const setPosition = async (time: number) => {
|
||||||
await audioPlayer.setPositionAsync(audioUri.current, time);
|
await AudioManager.setPositionAsync(audioUri.current, time);
|
||||||
};
|
};
|
||||||
|
|
||||||
const togglePlayPause = async () => {
|
const togglePlayPause = async () => {
|
||||||
try {
|
try {
|
||||||
if (!paused) {
|
if (!paused) {
|
||||||
await audioPlayer.pauseAudio(audioUri.current);
|
await AudioManager.pauseAudio();
|
||||||
} else {
|
} else {
|
||||||
await audioPlayer.playAudio(audioUri.current);
|
await AudioManager.playAudio(audioUri.current);
|
||||||
}
|
}
|
||||||
} catch {
|
} catch {
|
||||||
// Do nothing
|
// Do nothing
|
||||||
|
@ -102,7 +102,7 @@ const AudioPlayer = ({
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
audioPlayer.setRateAsync(audioUri.current, playbackSpeed);
|
AudioManager.setRateAsync(audioUri.current, playbackSpeed);
|
||||||
}, [playbackSpeed]);
|
}, [playbackSpeed]);
|
||||||
|
|
||||||
const onPress = () => {
|
const onPress = () => {
|
||||||
|
@ -116,11 +116,13 @@ const AudioPlayer = ({
|
||||||
};
|
};
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
if (fileUri) {
|
||||||
InteractionManager.runAfterInteractions(async () => {
|
InteractionManager.runAfterInteractions(async () => {
|
||||||
audioUri.current = await audioPlayer.loadAudio({ msgId, rid, uri: fileUri });
|
audioUri.current = await AudioManager.loadAudio({ msgId, rid, uri: fileUri });
|
||||||
audioPlayer.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
|
AudioManager.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
|
||||||
audioPlayer.setRateAsync(audioUri.current, playbackSpeed);
|
AudioManager.setRateAsync(audioUri.current, playbackSpeed);
|
||||||
});
|
});
|
||||||
|
}
|
||||||
}, [fileUri]);
|
}, [fileUri]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
|
@ -133,20 +135,26 @@ const AudioPlayer = ({
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const unsubscribeFocus = navigation.addListener('focus', () => {
|
const unsubscribeFocus = navigation.addListener('focus', () => {
|
||||||
audioPlayer.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
|
AudioManager.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
|
||||||
|
AudioManager.addAudioRendered(audioUri.current);
|
||||||
|
});
|
||||||
|
const unsubscribeBlur = navigation.addListener('blur', () => {
|
||||||
|
AudioManager.removeAudioRendered(audioUri.current);
|
||||||
});
|
});
|
||||||
|
|
||||||
return () => {
|
return () => {
|
||||||
unsubscribeFocus();
|
unsubscribeFocus();
|
||||||
|
unsubscribeBlur();
|
||||||
};
|
};
|
||||||
}, [navigation]);
|
}, [navigation]);
|
||||||
|
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const listener = EventEmitter.addEventListener(AUDIO_FOCUSED, ({ audioFocused }: { audioFocused: string }) => {
|
const audioFocusedEventHandler = (audioFocused: string) => {
|
||||||
setFocused(audioFocused === audioUri.current);
|
setFocused(!!audioFocused && audioFocused === audioUri.current);
|
||||||
});
|
};
|
||||||
|
emitter.on('audioFocused', audioFocusedEventHandler);
|
||||||
return () => {
|
return () => {
|
||||||
EventEmitter.removeListener(AUDIO_FOCUSED, listener);
|
emitter.off('audioFocused', audioFocusedEventHandler);
|
||||||
};
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
|
|
|
@ -12,13 +12,13 @@ import sharedStyles from '../../../../views/Styles';
|
||||||
import { ReviewButton } from './ReviewButton';
|
import { ReviewButton } from './ReviewButton';
|
||||||
import { useMessageComposerApi } from '../../context';
|
import { useMessageComposerApi } from '../../context';
|
||||||
import { sendFileMessage } from '../../../../lib/methods';
|
import { sendFileMessage } from '../../../../lib/methods';
|
||||||
import { IUpload } from '../../../../definitions';
|
import { RECORDING_EXTENSION, RECORDING_MODE, RECORDING_SETTINGS } from '../../../../lib/constants';
|
||||||
import log from '../../../../lib/methods/helpers/log';
|
|
||||||
import { useRoomContext } from '../../../../views/RoomView/context';
|
|
||||||
import { useAppSelector } from '../../../../lib/hooks';
|
import { useAppSelector } from '../../../../lib/hooks';
|
||||||
|
import log from '../../../../lib/methods/helpers/log';
|
||||||
|
import { IUpload } from '../../../../definitions';
|
||||||
|
import { useRoomContext } from '../../../../views/RoomView/context';
|
||||||
import { useCanUploadFile } from '../../hooks';
|
import { useCanUploadFile } from '../../hooks';
|
||||||
import { Duration, IDurationRef } from './Duration';
|
import { Duration, IDurationRef } from './Duration';
|
||||||
import { RECORDING_EXTENSION, RECORDING_MODE, RECORDING_SETTINGS } from './constants';
|
|
||||||
import AudioPlayer from '../../../AudioPlayer';
|
import AudioPlayer from '../../../AudioPlayer';
|
||||||
import { CancelButton } from './CancelButton';
|
import { CancelButton } from './CancelButton';
|
||||||
import i18n from '../../../../i18n';
|
import i18n from '../../../../i18n';
|
||||||
|
|
|
@ -14,7 +14,8 @@ import {
|
||||||
} from '../../lib/methods/handleMediaDownload';
|
} from '../../lib/methods/handleMediaDownload';
|
||||||
import { fetchAutoDownloadEnabled } from '../../lib/methods/autoDownloadPreference';
|
import { fetchAutoDownloadEnabled } from '../../lib/methods/autoDownloadPreference';
|
||||||
import AudioPlayer from '../AudioPlayer';
|
import AudioPlayer from '../AudioPlayer';
|
||||||
import { useAppSelector } from '../../lib/hooks';
|
import { useAudioUrl } from './hooks/useAudioUrl';
|
||||||
|
import { getAudioUrlToCache } from '../../lib/methods/getAudioUrl';
|
||||||
|
|
||||||
interface IMessageAudioProps {
|
interface IMessageAudioProps {
|
||||||
file: IAttachment;
|
file: IAttachment;
|
||||||
|
@ -30,17 +31,8 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
|
||||||
const [downloadState, setDownloadState] = useState<TDownloadState>('loading');
|
const [downloadState, setDownloadState] = useState<TDownloadState>('loading');
|
||||||
const [fileUri, setFileUri] = useState('');
|
const [fileUri, setFileUri] = useState('');
|
||||||
const { baseUrl, user, id, rid } = useContext(MessageContext);
|
const { baseUrl, user, id, rid } = useContext(MessageContext);
|
||||||
const { cdnPrefix } = useAppSelector(state => ({
|
|
||||||
cdnPrefix: state.settings.CDN_PREFIX as string
|
|
||||||
}));
|
|
||||||
|
|
||||||
const getUrl = () => {
|
const audioUrl = useAudioUrl({ audioUrl: file.audio_url });
|
||||||
let url = file.audio_url;
|
|
||||||
if (url && !url.startsWith('http')) {
|
|
||||||
url = `${cdnPrefix || baseUrl}${file.audio_url}`;
|
|
||||||
}
|
|
||||||
return url;
|
|
||||||
};
|
|
||||||
|
|
||||||
const onPlayButtonPress = async () => {
|
const onPlayButtonPress = async () => {
|
||||||
if (downloadState === 'to-download') {
|
if (downloadState === 'to-download') {
|
||||||
|
@ -55,10 +47,9 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
|
||||||
const handleDownload = async () => {
|
const handleDownload = async () => {
|
||||||
setDownloadState('loading');
|
setDownloadState('loading');
|
||||||
try {
|
try {
|
||||||
const url = getUrl();
|
if (audioUrl) {
|
||||||
if (url) {
|
|
||||||
const audio = await downloadMediaFile({
|
const audio = await downloadMediaFile({
|
||||||
downloadUrl: `${url}?rc_uid=${user.id}&rc_token=${user.token}`,
|
downloadUrl: getAudioUrlToCache({ token: user.token, userId: user.id, url: audioUrl }),
|
||||||
type: 'audio',
|
type: 'audio',
|
||||||
mimeType: file.audio_type
|
mimeType: file.audio_type
|
||||||
});
|
});
|
||||||
|
@ -71,9 +62,8 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
|
||||||
};
|
};
|
||||||
|
|
||||||
const handleAutoDownload = async () => {
|
const handleAutoDownload = async () => {
|
||||||
const url = getUrl();
|
|
||||||
try {
|
try {
|
||||||
if (url) {
|
if (audioUrl) {
|
||||||
const isCurrentUserAuthor = author?._id === user.id;
|
const isCurrentUserAuthor = author?._id === user.id;
|
||||||
const isAutoDownloadEnabled = fetchAutoDownloadEnabled('audioPreferenceDownload');
|
const isAutoDownloadEnabled = fetchAutoDownloadEnabled('audioPreferenceDownload');
|
||||||
if (isAutoDownloadEnabled || isCurrentUserAuthor) {
|
if (isAutoDownloadEnabled || isCurrentUserAuthor) {
|
||||||
|
@ -91,7 +81,7 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
|
||||||
const cachedAudioResult = await getMediaCache({
|
const cachedAudioResult = await getMediaCache({
|
||||||
type: 'audio',
|
type: 'audio',
|
||||||
mimeType: file.audio_type,
|
mimeType: file.audio_type,
|
||||||
urlToCache: getUrl()
|
urlToCache: audioUrl
|
||||||
});
|
});
|
||||||
if (cachedAudioResult?.exists) {
|
if (cachedAudioResult?.exists) {
|
||||||
setFileUri(cachedAudioResult.uri);
|
setFileUri(cachedAudioResult.uri);
|
||||||
|
@ -103,10 +93,9 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
|
||||||
const handleResumeDownload = async () => {
|
const handleResumeDownload = async () => {
|
||||||
try {
|
try {
|
||||||
setDownloadState('loading');
|
setDownloadState('loading');
|
||||||
const url = getUrl();
|
if (audioUrl) {
|
||||||
if (url) {
|
|
||||||
const videoUri = await resumeMediaFile({
|
const videoUri = await resumeMediaFile({
|
||||||
downloadUrl: url
|
downloadUrl: audioUrl
|
||||||
});
|
});
|
||||||
setFileUri(videoUri);
|
setFileUri(videoUri);
|
||||||
setDownloadState('downloaded');
|
setDownloadState('downloaded');
|
||||||
|
@ -122,19 +111,21 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
|
||||||
if (isAudioCached) {
|
if (isAudioCached) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
const audioUrl = getUrl();
|
|
||||||
if (audioUrl && isDownloadActive(audioUrl)) {
|
if (audioUrl && isDownloadActive(audioUrl)) {
|
||||||
handleResumeDownload();
|
handleResumeDownload();
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
await handleAutoDownload();
|
await handleAutoDownload();
|
||||||
};
|
};
|
||||||
|
if (audioUrl) {
|
||||||
handleCache();
|
handleCache();
|
||||||
}, []);
|
}
|
||||||
|
}, [audioUrl]);
|
||||||
|
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<Markdown msg={msg} style={[isReply && style]} username={user.username} getCustomEmoji={getCustomEmoji} />
|
<Markdown msg={msg} style={[isReply && style]} username={user.username} getCustomEmoji={getCustomEmoji} />
|
||||||
|
|
|
@ -0,0 +1,25 @@
|
||||||
|
import { useState, useEffect } from 'react';
|
||||||
|
|
||||||
|
import { useAppSelector } from '../../../lib/hooks';
|
||||||
|
import { getAudioUrl } from '../../../lib/methods/getAudioUrl';
|
||||||
|
|
||||||
|
export const useAudioUrl = ({ audioUrl }: { audioUrl?: string }): string => {
|
||||||
|
const [filePath, setFilePath] = useState<string>('');
|
||||||
|
|
||||||
|
const { cdnPrefix, baseUrl } = useAppSelector(state => ({
|
||||||
|
cdnPrefix: state.settings.CDN_PREFIX as string,
|
||||||
|
baseUrl: state.server.server
|
||||||
|
}));
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
if (!audioUrl) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const url = getAudioUrl({ baseUrl, cdnPrefix, audioUrl });
|
||||||
|
if (url) {
|
||||||
|
setFilePath(url);
|
||||||
|
}
|
||||||
|
}, [audioUrl, baseUrl, cdnPrefix]);
|
||||||
|
|
||||||
|
return filePath;
|
||||||
|
};
|
|
@ -36,3 +36,13 @@ export const RECORDING_MODE: AudioMode = {
|
||||||
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
|
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
|
||||||
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
|
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const AUDIO_MODE: AudioMode = {
|
||||||
|
allowsRecordingIOS: false,
|
||||||
|
playsInSilentModeIOS: true,
|
||||||
|
staysActiveInBackground: true,
|
||||||
|
shouldDuckAndroid: true,
|
||||||
|
playThroughEarpieceAndroid: false,
|
||||||
|
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
|
||||||
|
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
|
||||||
|
};
|
|
@ -1,3 +1,4 @@
|
||||||
|
export * from './audio';
|
||||||
export * from './colors';
|
export * from './colors';
|
||||||
export * from './constantDisplayMode';
|
export * from './constantDisplayMode';
|
||||||
export * from './emojis';
|
export * from './emojis';
|
||||||
|
|
|
@ -0,0 +1,179 @@
|
||||||
|
import { AVPlaybackStatus, Audio } from 'expo-av';
|
||||||
|
import { Q } from '@nozbe/watermelondb';
|
||||||
|
import moment from 'moment';
|
||||||
|
|
||||||
|
import { getMessageById } from '../database/services/Message';
|
||||||
|
import database from '../database';
|
||||||
|
import { getFilePathAudio } from './getFilePathAudio';
|
||||||
|
import { TMessageModel } from '../../definitions';
|
||||||
|
import { AUDIO_MODE } from '../constants';
|
||||||
|
import { emitter } from './helpers';
|
||||||
|
|
||||||
|
const getAudioKey = ({ msgId, rid, uri }: { msgId?: string; rid: string; uri: string }) => `${msgId}-${rid}-${uri}`;
|
||||||
|
|
||||||
|
class AudioManagerClass {
|
||||||
|
private audioQueue: { [audioKey: string]: Audio.Sound };
|
||||||
|
private audioPlaying: string;
|
||||||
|
private audiosRendered: Set<string>;
|
||||||
|
|
||||||
|
constructor() {
|
||||||
|
this.audioQueue = {};
|
||||||
|
this.audioPlaying = '';
|
||||||
|
this.audiosRendered = new Set<string>();
|
||||||
|
}
|
||||||
|
|
||||||
|
addAudioRendered = (audioKey: string) => {
|
||||||
|
this.audiosRendered.add(audioKey);
|
||||||
|
};
|
||||||
|
|
||||||
|
removeAudioRendered = (audioKey: string) => {
|
||||||
|
this.audiosRendered.delete(audioKey);
|
||||||
|
};
|
||||||
|
|
||||||
|
async loadAudio({ msgId, rid, uri }: { rid: string; msgId?: string; uri: string }): Promise<string> {
|
||||||
|
const audioKey = getAudioKey({ msgId, rid, uri });
|
||||||
|
this.addAudioRendered(audioKey);
|
||||||
|
if (this.audioQueue[audioKey]) {
|
||||||
|
return audioKey;
|
||||||
|
}
|
||||||
|
const { sound } = await Audio.Sound.createAsync({ uri }, { androidImplementation: 'MediaPlayer' });
|
||||||
|
this.audioQueue[audioKey] = sound;
|
||||||
|
return audioKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
async playAudio(audioKey: string) {
|
||||||
|
if (this.audioPlaying) {
|
||||||
|
await this.pauseAudio();
|
||||||
|
}
|
||||||
|
await Audio.setAudioModeAsync(AUDIO_MODE);
|
||||||
|
await this.audioQueue[audioKey]?.playAsync();
|
||||||
|
this.audioPlaying = audioKey;
|
||||||
|
emitter.emit('audioFocused', audioKey);
|
||||||
|
}
|
||||||
|
|
||||||
|
async pauseAudio() {
|
||||||
|
if (this.audioPlaying) {
|
||||||
|
await this.audioQueue[this.audioPlaying]?.pauseAsync();
|
||||||
|
this.audioPlaying = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async setPositionAsync(audioKey: string, time: number) {
|
||||||
|
try {
|
||||||
|
await this.audioQueue[audioKey]?.setPositionAsync(time);
|
||||||
|
} catch {
|
||||||
|
// Do nothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async setRateAsync(audioKey: string, value = 1.0) {
|
||||||
|
try {
|
||||||
|
await this.audioQueue[audioKey].setRateAsync(value, true);
|
||||||
|
} catch {
|
||||||
|
// Do nothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
onPlaybackStatusUpdate(audioKey: string, status: AVPlaybackStatus, callback: (status: AVPlaybackStatus) => void) {
|
||||||
|
if (status) {
|
||||||
|
callback(status);
|
||||||
|
this.onEnd(audioKey, status);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
setOnPlaybackStatusUpdate(audioKey: string, callback: (status: AVPlaybackStatus) => void): void {
|
||||||
|
return this.audioQueue[audioKey]?.setOnPlaybackStatusUpdate(status =>
|
||||||
|
this.onPlaybackStatusUpdate(audioKey, status, callback)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async onEnd(audioKey: string, status: AVPlaybackStatus) {
|
||||||
|
if (status.isLoaded && status.didJustFinish) {
|
||||||
|
try {
|
||||||
|
await this.audioQueue[audioKey]?.stopAsync();
|
||||||
|
this.audioPlaying = '';
|
||||||
|
emitter.emit('audioFocused', '');
|
||||||
|
await this.playNextAudioInSequence(audioKey);
|
||||||
|
} catch {
|
||||||
|
// do nothing
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
getNextAudioKey = ({ message, rid }: { message: TMessageModel; rid: string }) => {
|
||||||
|
if (!message.attachments) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const { audio_url: audioUrl, audio_type: audioType } = message.attachments[0];
|
||||||
|
const uri = getFilePathAudio({ audioUrl, audioType });
|
||||||
|
if (!uri) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
return getAudioKey({
|
||||||
|
msgId: message.id,
|
||||||
|
rid,
|
||||||
|
uri
|
||||||
|
});
|
||||||
|
};
|
||||||
|
|
||||||
|
async getNextAudioMessage(msgId: string, rid: string) {
|
||||||
|
const msg = await getMessageById(msgId);
|
||||||
|
if (msg) {
|
||||||
|
const db = database.active;
|
||||||
|
const whereClause: Q.Clause[] = [
|
||||||
|
Q.experimentalSortBy('ts', Q.asc),
|
||||||
|
Q.where('ts', Q.gt(moment(msg.ts).valueOf())),
|
||||||
|
Q.experimentalTake(1)
|
||||||
|
];
|
||||||
|
|
||||||
|
if (msg.tmid) {
|
||||||
|
whereClause.push(Q.where('tmid', msg.tmid || msg.id));
|
||||||
|
} else {
|
||||||
|
whereClause.push(Q.where('rid', rid), Q.where('tmid', null));
|
||||||
|
}
|
||||||
|
|
||||||
|
const [message] = await db
|
||||||
|
.get('messages')
|
||||||
|
.query(...whereClause)
|
||||||
|
.fetch();
|
||||||
|
return message;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async playNextAudioInSequence(previousAudioKey: string) {
|
||||||
|
const [msgId, rid] = previousAudioKey.split('-');
|
||||||
|
const nextMessage = await this.getNextAudioMessage(msgId, rid);
|
||||||
|
if (nextMessage && nextMessage.attachments) {
|
||||||
|
const nextAudioInSeqKey = this.getNextAudioKey({ message: nextMessage, rid });
|
||||||
|
if (nextAudioInSeqKey && this.audioQueue?.[nextAudioInSeqKey] && this.audiosRendered.has(nextAudioInSeqKey)) {
|
||||||
|
await this.playAudio(nextAudioInSeqKey);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async unloadRoomAudios(rid?: string) {
|
||||||
|
if (!rid) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const regExp = new RegExp(rid);
|
||||||
|
const roomAudioKeysLoaded = Object.keys(this.audioQueue).filter(audioKey => regExp.test(audioKey));
|
||||||
|
const roomAudiosLoaded = roomAudioKeysLoaded.map(key => this.audioQueue[key]);
|
||||||
|
try {
|
||||||
|
await Promise.all(
|
||||||
|
roomAudiosLoaded.map(async audio => {
|
||||||
|
await audio?.stopAsync();
|
||||||
|
await audio?.unloadAsync();
|
||||||
|
})
|
||||||
|
);
|
||||||
|
} catch {
|
||||||
|
// Do nothing
|
||||||
|
}
|
||||||
|
roomAudioKeysLoaded.forEach(key => delete this.audioQueue[key]);
|
||||||
|
this.audioPlaying = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const AudioManager = new AudioManagerClass();
|
||||||
|
export default AudioManager;
|
|
@ -1,152 +0,0 @@
|
||||||
import { AVPlaybackStatus, Audio, InterruptionModeAndroid, InterruptionModeIOS } from 'expo-av';
|
|
||||||
|
|
||||||
import EventEmitter from './helpers/events';
|
|
||||||
|
|
||||||
export const AUDIO_FOCUSED = 'AUDIO_FOCUSED';
|
|
||||||
|
|
||||||
const AUDIO_MODE = {
|
|
||||||
allowsRecordingIOS: false,
|
|
||||||
playsInSilentModeIOS: true,
|
|
||||||
staysActiveInBackground: true,
|
|
||||||
shouldDuckAndroid: true,
|
|
||||||
playThroughEarpieceAndroid: false,
|
|
||||||
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
|
|
||||||
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
|
|
||||||
};
|
|
||||||
|
|
||||||
class AudioPlayer {
|
|
||||||
private audioQueue: { [audioKey: string]: Audio.Sound };
|
|
||||||
private audioPlaying: string;
|
|
||||||
|
|
||||||
constructor() {
|
|
||||||
this.audioQueue = {};
|
|
||||||
this.audioPlaying = '';
|
|
||||||
}
|
|
||||||
|
|
||||||
async loadAudio({ msgId, rid, uri }: { rid: string; msgId?: string; uri: string }): Promise<string> {
|
|
||||||
const audioKey = `${msgId}-${rid}-${uri}`;
|
|
||||||
if (this.audioQueue[audioKey]) {
|
|
||||||
return audioKey;
|
|
||||||
}
|
|
||||||
const { sound } = await Audio.Sound.createAsync({ uri }, { androidImplementation: 'MediaPlayer' });
|
|
||||||
this.audioQueue[audioKey] = sound;
|
|
||||||
return audioKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
onPlaybackStatusUpdate(audioKey: string, status: AVPlaybackStatus, callback: (status: AVPlaybackStatus) => void) {
|
|
||||||
if (status) {
|
|
||||||
callback(status);
|
|
||||||
this.onEnd(audioKey, status);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async onEnd(audioKey: string, status: AVPlaybackStatus) {
|
|
||||||
if (status.isLoaded) {
|
|
||||||
if (status.didJustFinish) {
|
|
||||||
try {
|
|
||||||
await this.audioQueue[audioKey]?.stopAsync();
|
|
||||||
this.audioPlaying = '';
|
|
||||||
EventEmitter.emit(AUDIO_FOCUSED, { audioFocused: '' });
|
|
||||||
} catch {
|
|
||||||
// do nothing
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
setOnPlaybackStatusUpdate(audioKey: string, callback: (status: AVPlaybackStatus) => void): void {
|
|
||||||
return this.audioQueue[audioKey]?.setOnPlaybackStatusUpdate(status =>
|
|
||||||
this.onPlaybackStatusUpdate(audioKey, status, callback)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
async playAudio(audioKey: string) {
|
|
||||||
if (this.audioPlaying) {
|
|
||||||
await this.pauseAudio(this.audioPlaying);
|
|
||||||
}
|
|
||||||
await Audio.setAudioModeAsync(AUDIO_MODE);
|
|
||||||
await this.audioQueue[audioKey]?.playAsync();
|
|
||||||
this.audioPlaying = audioKey;
|
|
||||||
EventEmitter.emit(AUDIO_FOCUSED, { audioFocused: audioKey });
|
|
||||||
}
|
|
||||||
|
|
||||||
async pauseAudio(audioKey: string) {
|
|
||||||
await this.audioQueue[audioKey]?.pauseAsync();
|
|
||||||
this.audioPlaying = '';
|
|
||||||
}
|
|
||||||
|
|
||||||
async pauseCurrentAudio() {
|
|
||||||
if (this.audioPlaying) {
|
|
||||||
await this.pauseAudio(this.audioPlaying);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async setPositionAsync(audioKey: string, time: number) {
|
|
||||||
try {
|
|
||||||
await this.audioQueue[audioKey]?.setPositionAsync(time);
|
|
||||||
} catch {
|
|
||||||
// Do nothing
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async setRateAsync(audioKey: string, value = 1.0) {
|
|
||||||
try {
|
|
||||||
await this.audioQueue[audioKey].setRateAsync(value, true);
|
|
||||||
} catch {
|
|
||||||
// Do nothing
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async unloadAudio(audioKey: string) {
|
|
||||||
await this.audioQueue[audioKey]?.stopAsync();
|
|
||||||
await this.audioQueue[audioKey]?.unloadAsync();
|
|
||||||
delete this.audioQueue[audioKey];
|
|
||||||
this.audioPlaying = '';
|
|
||||||
}
|
|
||||||
|
|
||||||
async unloadCurrentAudio() {
|
|
||||||
if (this.audioPlaying) {
|
|
||||||
await this.unloadAudio(this.audioPlaying);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async unloadRoomAudios(rid?: string) {
|
|
||||||
if (!rid) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const regExp = new RegExp(rid);
|
|
||||||
const roomAudioKeysLoaded = Object.keys(this.audioQueue).filter(audioKey => regExp.test(audioKey));
|
|
||||||
const roomAudiosLoaded = roomAudioKeysLoaded.map(key => this.audioQueue[key]);
|
|
||||||
try {
|
|
||||||
await Promise.all(
|
|
||||||
roomAudiosLoaded.map(async audio => {
|
|
||||||
await audio?.stopAsync();
|
|
||||||
await audio?.unloadAsync();
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} catch {
|
|
||||||
// Do nothing
|
|
||||||
}
|
|
||||||
roomAudioKeysLoaded.forEach(key => delete this.audioQueue[key]);
|
|
||||||
this.audioPlaying = '';
|
|
||||||
}
|
|
||||||
|
|
||||||
async unloadAllAudios() {
|
|
||||||
const audiosLoaded = Object.values(this.audioQueue);
|
|
||||||
try {
|
|
||||||
await Promise.all(
|
|
||||||
audiosLoaded.map(async audio => {
|
|
||||||
await audio?.stopAsync();
|
|
||||||
await audio?.unloadAsync();
|
|
||||||
})
|
|
||||||
);
|
|
||||||
} catch {
|
|
||||||
// Do nothing
|
|
||||||
}
|
|
||||||
this.audioPlaying = '';
|
|
||||||
this.audioQueue = {};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const audioPlayer = new AudioPlayer();
|
|
||||||
export default audioPlayer;
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
export const getAudioUrl = ({ audioUrl, baseUrl, cdnPrefix }: { audioUrl?: string; baseUrl: string; cdnPrefix: string }) => {
|
||||||
|
if (audioUrl && !audioUrl.startsWith('http')) {
|
||||||
|
audioUrl = `${cdnPrefix || baseUrl}${audioUrl}`;
|
||||||
|
}
|
||||||
|
return audioUrl;
|
||||||
|
};
|
||||||
|
|
||||||
|
export const getAudioUrlToCache = ({ token, userId, url }: { url?: string; userId: string; token: string }) =>
|
||||||
|
`${url}?rc_uid=${userId}&rc_token=${token}`;
|
|
@ -0,0 +1,16 @@
|
||||||
|
import { getAudioUrl, getAudioUrlToCache } from './getAudioUrl';
|
||||||
|
import { store } from '../store/auxStore';
|
||||||
|
import { getFilePath } from './handleMediaDownload';
|
||||||
|
import { getUserSelector } from '../../selectors/login';
|
||||||
|
|
||||||
|
export const getFilePathAudio = ({ audioUrl, audioType }: { audioUrl?: string; audioType?: string }): string | null => {
|
||||||
|
const baseUrl = store.getState().server.server;
|
||||||
|
const cdnPrefix = store.getState().settings.CDN_PREFIX as string;
|
||||||
|
const { id: userId, token } = getUserSelector(store.getState());
|
||||||
|
const url = getAudioUrl({ baseUrl, cdnPrefix, audioUrl });
|
||||||
|
return getFilePath({
|
||||||
|
urlToCache: getAudioUrlToCache({ token, userId, url }),
|
||||||
|
type: 'audio',
|
||||||
|
mimeType: audioType
|
||||||
|
});
|
||||||
|
};
|
|
@ -108,9 +108,17 @@ const ensureDirAsync = async (dir: string, intermediates = true): Promise<void>
|
||||||
return ensureDirAsync(dir, intermediates);
|
return ensureDirAsync(dir, intermediates);
|
||||||
};
|
};
|
||||||
|
|
||||||
const getFilePath = ({ type, mimeType, urlToCache }: { type: MediaTypes; mimeType?: string; urlToCache?: string }) => {
|
export const getFilePath = ({
|
||||||
|
type,
|
||||||
|
mimeType,
|
||||||
|
urlToCache
|
||||||
|
}: {
|
||||||
|
type: MediaTypes;
|
||||||
|
mimeType?: string;
|
||||||
|
urlToCache?: string;
|
||||||
|
}): string | null => {
|
||||||
if (!urlToCache) {
|
if (!urlToCache) {
|
||||||
return;
|
return null;
|
||||||
}
|
}
|
||||||
const folderPath = getFolderPath(urlToCache);
|
const folderPath = getFolderPath(urlToCache);
|
||||||
const urlWithoutQueryString = urlToCache.split('?')[0];
|
const urlWithoutQueryString = urlToCache.split('?')[0];
|
||||||
|
|
|
@ -11,6 +11,7 @@ export type TEmitterEvents = {
|
||||||
setKeyboardHeightThread: number;
|
setKeyboardHeightThread: number;
|
||||||
setComposerHeight: number;
|
setComposerHeight: number;
|
||||||
setComposerHeightThread: number;
|
setComposerHeightThread: number;
|
||||||
|
audioFocused: string;
|
||||||
};
|
};
|
||||||
|
|
||||||
export type TKeyEmitterEvent = keyof TEmitterEvents;
|
export type TKeyEmitterEvent = keyof TEmitterEvents;
|
||||||
|
|
|
@ -12,8 +12,7 @@ type TEventEmitterEmmitArgs =
|
||||||
| { visible: boolean; onCancel?: null | Function }
|
| { visible: boolean; onCancel?: null | Function }
|
||||||
| { cancel: () => void }
|
| { cancel: () => void }
|
||||||
| { submit: (param: string) => void }
|
| { submit: (param: string) => void }
|
||||||
| IEmitUserInteraction
|
| IEmitUserInteraction;
|
||||||
| { audioFocused: string };
|
|
||||||
|
|
||||||
class EventEmitter {
|
class EventEmitter {
|
||||||
private events: { [key: string]: any };
|
private events: { [key: string]: any };
|
||||||
|
|
|
@ -37,7 +37,7 @@ export * from './crashReport';
|
||||||
export * from './parseSettings';
|
export * from './parseSettings';
|
||||||
export * from './subscribeRooms';
|
export * from './subscribeRooms';
|
||||||
export * from './serializeAsciiUrl';
|
export * from './serializeAsciiUrl';
|
||||||
export * from './audioPlayer';
|
export * from './AudioManager';
|
||||||
export * from './isRoomFederated';
|
export * from './isRoomFederated';
|
||||||
export * from './checkSupportedVersions';
|
export * from './checkSupportedVersions';
|
||||||
export * from './getServerInfo';
|
export * from './getServerInfo';
|
||||||
|
|
|
@ -34,7 +34,7 @@ import {
|
||||||
} from '../../definitions';
|
} from '../../definitions';
|
||||||
import { Services } from '../../lib/services';
|
import { Services } from '../../lib/services';
|
||||||
import { TNavigation } from '../../stacks/stackType';
|
import { TNavigation } from '../../stacks/stackType';
|
||||||
import audioPlayer from '../../lib/methods/audioPlayer';
|
import AudioManager from '../../lib/methods/AudioManager';
|
||||||
|
|
||||||
interface IMessagesViewProps {
|
interface IMessagesViewProps {
|
||||||
user: {
|
user: {
|
||||||
|
@ -102,7 +102,7 @@ class MessagesView extends React.Component<IMessagesViewProps, IMessagesViewStat
|
||||||
}
|
}
|
||||||
|
|
||||||
componentWillUnmount(): void {
|
componentWillUnmount(): void {
|
||||||
audioPlayer.pauseCurrentAudio();
|
AudioManager.pauseAudio();
|
||||||
}
|
}
|
||||||
|
|
||||||
shouldComponentUpdate(nextProps: IMessagesViewProps, nextState: IMessagesViewState) {
|
shouldComponentUpdate(nextProps: IMessagesViewProps, nextState: IMessagesViewState) {
|
||||||
|
|
|
@ -94,7 +94,7 @@ import { withActionSheet } from '../../containers/ActionSheet';
|
||||||
import { goRoom, TGoRoomItem } from '../../lib/methods/helpers/goRoom';
|
import { goRoom, TGoRoomItem } from '../../lib/methods/helpers/goRoom';
|
||||||
import { IMessageComposerRef, MessageComposerContainer } from '../../containers/MessageComposer';
|
import { IMessageComposerRef, MessageComposerContainer } from '../../containers/MessageComposer';
|
||||||
import { RoomContext } from './context';
|
import { RoomContext } from './context';
|
||||||
import audioPlayer from '../../lib/methods/audioPlayer';
|
import AudioManager from '../../lib/methods/AudioManager';
|
||||||
import { IListContainerRef, TListRef } from './List/definitions';
|
import { IListContainerRef, TListRef } from './List/definitions';
|
||||||
import { getMessageById } from '../../lib/database/services/Message';
|
import { getMessageById } from '../../lib/database/services/Message';
|
||||||
import { getThreadById } from '../../lib/database/services/Thread';
|
import { getThreadById } from '../../lib/database/services/Thread';
|
||||||
|
@ -236,9 +236,8 @@ class RoomView extends React.Component<IRoomViewProps, IRoomViewState> {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
EventEmitter.addEventListener('ROOM_REMOVED', this.handleRoomRemoved);
|
EventEmitter.addEventListener('ROOM_REMOVED', this.handleRoomRemoved);
|
||||||
// TODO: Refactor when audio becomes global
|
|
||||||
this.unsubscribeBlur = navigation.addListener('blur', () => {
|
this.unsubscribeBlur = navigation.addListener('blur', () => {
|
||||||
audioPlayer.pauseCurrentAudio();
|
AudioManager.pauseAudio();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -342,8 +341,7 @@ class RoomView extends React.Component<IRoomViewProps, IRoomViewState> {
|
||||||
EventEmitter.removeListener('connected', this.handleConnected);
|
EventEmitter.removeListener('connected', this.handleConnected);
|
||||||
EventEmitter.removeListener('ROOM_REMOVED', this.handleRoomRemoved);
|
EventEmitter.removeListener('ROOM_REMOVED', this.handleRoomRemoved);
|
||||||
if (!this.tmid) {
|
if (!this.tmid) {
|
||||||
// TODO: Refactor when audio becomes global
|
await AudioManager.unloadRoomAudios(this.rid);
|
||||||
await audioPlayer.unloadRoomAudios(this.rid);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue