feat: play audio in sequence (#5526)

Co-authored-by: Sathurshan <jsathu07@gmail.com>
Co-authored-by: Diego Mello <diegolmello@gmail.com>
This commit is contained in:
Reinaldo Neto 2024-02-20 17:17:46 -03:00 committed by GitHub
parent 37e936e1d6
commit 316f771a8f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
16 changed files with 300 additions and 207 deletions

View File

@ -10,10 +10,10 @@ import styles from './styles';
import Seek from './Seek';
import PlaybackSpeed from './PlaybackSpeed';
import PlayButton from './PlayButton';
import EventEmitter from '../../lib/methods/helpers/events';
import audioPlayer, { AUDIO_FOCUSED } from '../../lib/methods/audioPlayer';
import AudioManager from '../../lib/methods/AudioManager';
import { AUDIO_PLAYBACK_SPEED, AVAILABLE_SPEEDS } from './constants';
import { TDownloadState } from '../../lib/methods/handleMediaDownload';
import { emitter } from '../../lib/methods/helpers';
import { TAudioState } from './types';
import { useUserPreferences } from '../../lib/methods';
@ -86,15 +86,15 @@ const AudioPlayer = ({
};
const setPosition = async (time: number) => {
await audioPlayer.setPositionAsync(audioUri.current, time);
await AudioManager.setPositionAsync(audioUri.current, time);
};
const togglePlayPause = async () => {
try {
if (!paused) {
await audioPlayer.pauseAudio(audioUri.current);
await AudioManager.pauseAudio();
} else {
await audioPlayer.playAudio(audioUri.current);
await AudioManager.playAudio(audioUri.current);
}
} catch {
// Do nothing
@ -102,7 +102,7 @@ const AudioPlayer = ({
};
useEffect(() => {
audioPlayer.setRateAsync(audioUri.current, playbackSpeed);
AudioManager.setRateAsync(audioUri.current, playbackSpeed);
}, [playbackSpeed]);
const onPress = () => {
@ -116,11 +116,13 @@ const AudioPlayer = ({
};
useEffect(() => {
InteractionManager.runAfterInteractions(async () => {
audioUri.current = await audioPlayer.loadAudio({ msgId, rid, uri: fileUri });
audioPlayer.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
audioPlayer.setRateAsync(audioUri.current, playbackSpeed);
});
if (fileUri) {
InteractionManager.runAfterInteractions(async () => {
audioUri.current = await AudioManager.loadAudio({ msgId, rid, uri: fileUri });
AudioManager.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
AudioManager.setRateAsync(audioUri.current, playbackSpeed);
});
}
}, [fileUri]);
useEffect(() => {
@ -133,20 +135,26 @@ const AudioPlayer = ({
useEffect(() => {
const unsubscribeFocus = navigation.addListener('focus', () => {
audioPlayer.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
AudioManager.setOnPlaybackStatusUpdate(audioUri.current, onPlaybackStatusUpdate);
AudioManager.addAudioRendered(audioUri.current);
});
const unsubscribeBlur = navigation.addListener('blur', () => {
AudioManager.removeAudioRendered(audioUri.current);
});
return () => {
unsubscribeFocus();
unsubscribeBlur();
};
}, [navigation]);
useEffect(() => {
const listener = EventEmitter.addEventListener(AUDIO_FOCUSED, ({ audioFocused }: { audioFocused: string }) => {
setFocused(audioFocused === audioUri.current);
});
const audioFocusedEventHandler = (audioFocused: string) => {
setFocused(!!audioFocused && audioFocused === audioUri.current);
};
emitter.on('audioFocused', audioFocusedEventHandler);
return () => {
EventEmitter.removeListener(AUDIO_FOCUSED, listener);
emitter.off('audioFocused', audioFocusedEventHandler);
};
}, []);

View File

@ -12,13 +12,13 @@ import sharedStyles from '../../../../views/Styles';
import { ReviewButton } from './ReviewButton';
import { useMessageComposerApi } from '../../context';
import { sendFileMessage } from '../../../../lib/methods';
import { IUpload } from '../../../../definitions';
import log from '../../../../lib/methods/helpers/log';
import { useRoomContext } from '../../../../views/RoomView/context';
import { RECORDING_EXTENSION, RECORDING_MODE, RECORDING_SETTINGS } from '../../../../lib/constants';
import { useAppSelector } from '../../../../lib/hooks';
import log from '../../../../lib/methods/helpers/log';
import { IUpload } from '../../../../definitions';
import { useRoomContext } from '../../../../views/RoomView/context';
import { useCanUploadFile } from '../../hooks';
import { Duration, IDurationRef } from './Duration';
import { RECORDING_EXTENSION, RECORDING_MODE, RECORDING_SETTINGS } from './constants';
import AudioPlayer from '../../../AudioPlayer';
import { CancelButton } from './CancelButton';
import i18n from '../../../../i18n';

View File

@ -14,7 +14,8 @@ import {
} from '../../lib/methods/handleMediaDownload';
import { fetchAutoDownloadEnabled } from '../../lib/methods/autoDownloadPreference';
import AudioPlayer from '../AudioPlayer';
import { useAppSelector } from '../../lib/hooks';
import { useAudioUrl } from './hooks/useAudioUrl';
import { getAudioUrlToCache } from '../../lib/methods/getAudioUrl';
interface IMessageAudioProps {
file: IAttachment;
@ -30,17 +31,8 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
const [downloadState, setDownloadState] = useState<TDownloadState>('loading');
const [fileUri, setFileUri] = useState('');
const { baseUrl, user, id, rid } = useContext(MessageContext);
const { cdnPrefix } = useAppSelector(state => ({
cdnPrefix: state.settings.CDN_PREFIX as string
}));
const getUrl = () => {
let url = file.audio_url;
if (url && !url.startsWith('http')) {
url = `${cdnPrefix || baseUrl}${file.audio_url}`;
}
return url;
};
const audioUrl = useAudioUrl({ audioUrl: file.audio_url });
const onPlayButtonPress = async () => {
if (downloadState === 'to-download') {
@ -55,10 +47,9 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
const handleDownload = async () => {
setDownloadState('loading');
try {
const url = getUrl();
if (url) {
if (audioUrl) {
const audio = await downloadMediaFile({
downloadUrl: `${url}?rc_uid=${user.id}&rc_token=${user.token}`,
downloadUrl: getAudioUrlToCache({ token: user.token, userId: user.id, url: audioUrl }),
type: 'audio',
mimeType: file.audio_type
});
@ -71,9 +62,8 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
};
const handleAutoDownload = async () => {
const url = getUrl();
try {
if (url) {
if (audioUrl) {
const isCurrentUserAuthor = author?._id === user.id;
const isAutoDownloadEnabled = fetchAutoDownloadEnabled('audioPreferenceDownload');
if (isAutoDownloadEnabled || isCurrentUserAuthor) {
@ -91,7 +81,7 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
const cachedAudioResult = await getMediaCache({
type: 'audio',
mimeType: file.audio_type,
urlToCache: getUrl()
urlToCache: audioUrl
});
if (cachedAudioResult?.exists) {
setFileUri(cachedAudioResult.uri);
@ -103,10 +93,9 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
const handleResumeDownload = async () => {
try {
setDownloadState('loading');
const url = getUrl();
if (url) {
if (audioUrl) {
const videoUri = await resumeMediaFile({
downloadUrl: url
downloadUrl: audioUrl
});
setFileUri(videoUri);
setDownloadState('downloaded');
@ -122,19 +111,21 @@ const MessageAudio = ({ file, getCustomEmoji, author, isReply, style, msg }: IMe
if (isAudioCached) {
return;
}
const audioUrl = getUrl();
if (audioUrl && isDownloadActive(audioUrl)) {
handleResumeDownload();
return;
}
await handleAutoDownload();
};
handleCache();
}, []);
if (audioUrl) {
handleCache();
}
}, [audioUrl]);
if (!baseUrl) {
return null;
}
return (
<>
<Markdown msg={msg} style={[isReply && style]} username={user.username} getCustomEmoji={getCustomEmoji} />

View File

@ -0,0 +1,25 @@
import { useState, useEffect } from 'react';
import { useAppSelector } from '../../../lib/hooks';
import { getAudioUrl } from '../../../lib/methods/getAudioUrl';
export const useAudioUrl = ({ audioUrl }: { audioUrl?: string }): string => {
const [filePath, setFilePath] = useState<string>('');
const { cdnPrefix, baseUrl } = useAppSelector(state => ({
cdnPrefix: state.settings.CDN_PREFIX as string,
baseUrl: state.server.server
}));
useEffect(() => {
if (!audioUrl) {
return;
}
const url = getAudioUrl({ baseUrl, cdnPrefix, audioUrl });
if (url) {
setFilePath(url);
}
}, [audioUrl, baseUrl, cdnPrefix]);
return filePath;
};

View File

@ -36,3 +36,13 @@ export const RECORDING_MODE: AudioMode = {
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
};
export const AUDIO_MODE: AudioMode = {
allowsRecordingIOS: false,
playsInSilentModeIOS: true,
staysActiveInBackground: true,
shouldDuckAndroid: true,
playThroughEarpieceAndroid: false,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
};

View File

@ -1,3 +1,4 @@
export * from './audio';
export * from './colors';
export * from './constantDisplayMode';
export * from './emojis';

View File

@ -0,0 +1,179 @@
import { AVPlaybackStatus, Audio } from 'expo-av';
import { Q } from '@nozbe/watermelondb';
import moment from 'moment';
import { getMessageById } from '../database/services/Message';
import database from '../database';
import { getFilePathAudio } from './getFilePathAudio';
import { TMessageModel } from '../../definitions';
import { AUDIO_MODE } from '../constants';
import { emitter } from './helpers';
const getAudioKey = ({ msgId, rid, uri }: { msgId?: string; rid: string; uri: string }) => `${msgId}-${rid}-${uri}`;
class AudioManagerClass {
private audioQueue: { [audioKey: string]: Audio.Sound };
private audioPlaying: string;
private audiosRendered: Set<string>;
constructor() {
this.audioQueue = {};
this.audioPlaying = '';
this.audiosRendered = new Set<string>();
}
addAudioRendered = (audioKey: string) => {
this.audiosRendered.add(audioKey);
};
removeAudioRendered = (audioKey: string) => {
this.audiosRendered.delete(audioKey);
};
async loadAudio({ msgId, rid, uri }: { rid: string; msgId?: string; uri: string }): Promise<string> {
const audioKey = getAudioKey({ msgId, rid, uri });
this.addAudioRendered(audioKey);
if (this.audioQueue[audioKey]) {
return audioKey;
}
const { sound } = await Audio.Sound.createAsync({ uri }, { androidImplementation: 'MediaPlayer' });
this.audioQueue[audioKey] = sound;
return audioKey;
}
async playAudio(audioKey: string) {
if (this.audioPlaying) {
await this.pauseAudio();
}
await Audio.setAudioModeAsync(AUDIO_MODE);
await this.audioQueue[audioKey]?.playAsync();
this.audioPlaying = audioKey;
emitter.emit('audioFocused', audioKey);
}
async pauseAudio() {
if (this.audioPlaying) {
await this.audioQueue[this.audioPlaying]?.pauseAsync();
this.audioPlaying = '';
}
}
async setPositionAsync(audioKey: string, time: number) {
try {
await this.audioQueue[audioKey]?.setPositionAsync(time);
} catch {
// Do nothing
}
}
async setRateAsync(audioKey: string, value = 1.0) {
try {
await this.audioQueue[audioKey].setRateAsync(value, true);
} catch {
// Do nothing
}
}
onPlaybackStatusUpdate(audioKey: string, status: AVPlaybackStatus, callback: (status: AVPlaybackStatus) => void) {
if (status) {
callback(status);
this.onEnd(audioKey, status);
}
}
setOnPlaybackStatusUpdate(audioKey: string, callback: (status: AVPlaybackStatus) => void): void {
return this.audioQueue[audioKey]?.setOnPlaybackStatusUpdate(status =>
this.onPlaybackStatusUpdate(audioKey, status, callback)
);
}
async onEnd(audioKey: string, status: AVPlaybackStatus) {
if (status.isLoaded && status.didJustFinish) {
try {
await this.audioQueue[audioKey]?.stopAsync();
this.audioPlaying = '';
emitter.emit('audioFocused', '');
await this.playNextAudioInSequence(audioKey);
} catch {
// do nothing
}
}
}
getNextAudioKey = ({ message, rid }: { message: TMessageModel; rid: string }) => {
if (!message.attachments) {
return;
}
const { audio_url: audioUrl, audio_type: audioType } = message.attachments[0];
const uri = getFilePathAudio({ audioUrl, audioType });
if (!uri) {
return;
}
return getAudioKey({
msgId: message.id,
rid,
uri
});
};
async getNextAudioMessage(msgId: string, rid: string) {
const msg = await getMessageById(msgId);
if (msg) {
const db = database.active;
const whereClause: Q.Clause[] = [
Q.experimentalSortBy('ts', Q.asc),
Q.where('ts', Q.gt(moment(msg.ts).valueOf())),
Q.experimentalTake(1)
];
if (msg.tmid) {
whereClause.push(Q.where('tmid', msg.tmid || msg.id));
} else {
whereClause.push(Q.where('rid', rid), Q.where('tmid', null));
}
const [message] = await db
.get('messages')
.query(...whereClause)
.fetch();
return message;
}
return null;
}
async playNextAudioInSequence(previousAudioKey: string) {
const [msgId, rid] = previousAudioKey.split('-');
const nextMessage = await this.getNextAudioMessage(msgId, rid);
if (nextMessage && nextMessage.attachments) {
const nextAudioInSeqKey = this.getNextAudioKey({ message: nextMessage, rid });
if (nextAudioInSeqKey && this.audioQueue?.[nextAudioInSeqKey] && this.audiosRendered.has(nextAudioInSeqKey)) {
await this.playAudio(nextAudioInSeqKey);
}
}
}
async unloadRoomAudios(rid?: string) {
if (!rid) {
return;
}
const regExp = new RegExp(rid);
const roomAudioKeysLoaded = Object.keys(this.audioQueue).filter(audioKey => regExp.test(audioKey));
const roomAudiosLoaded = roomAudioKeysLoaded.map(key => this.audioQueue[key]);
try {
await Promise.all(
roomAudiosLoaded.map(async audio => {
await audio?.stopAsync();
await audio?.unloadAsync();
})
);
} catch {
// Do nothing
}
roomAudioKeysLoaded.forEach(key => delete this.audioQueue[key]);
this.audioPlaying = '';
}
}
const AudioManager = new AudioManagerClass();
export default AudioManager;

View File

@ -1,152 +0,0 @@
import { AVPlaybackStatus, Audio, InterruptionModeAndroid, InterruptionModeIOS } from 'expo-av';
import EventEmitter from './helpers/events';
export const AUDIO_FOCUSED = 'AUDIO_FOCUSED';
const AUDIO_MODE = {
allowsRecordingIOS: false,
playsInSilentModeIOS: true,
staysActiveInBackground: true,
shouldDuckAndroid: true,
playThroughEarpieceAndroid: false,
interruptionModeIOS: InterruptionModeIOS.DoNotMix,
interruptionModeAndroid: InterruptionModeAndroid.DoNotMix
};
class AudioPlayer {
private audioQueue: { [audioKey: string]: Audio.Sound };
private audioPlaying: string;
constructor() {
this.audioQueue = {};
this.audioPlaying = '';
}
async loadAudio({ msgId, rid, uri }: { rid: string; msgId?: string; uri: string }): Promise<string> {
const audioKey = `${msgId}-${rid}-${uri}`;
if (this.audioQueue[audioKey]) {
return audioKey;
}
const { sound } = await Audio.Sound.createAsync({ uri }, { androidImplementation: 'MediaPlayer' });
this.audioQueue[audioKey] = sound;
return audioKey;
}
onPlaybackStatusUpdate(audioKey: string, status: AVPlaybackStatus, callback: (status: AVPlaybackStatus) => void) {
if (status) {
callback(status);
this.onEnd(audioKey, status);
}
}
async onEnd(audioKey: string, status: AVPlaybackStatus) {
if (status.isLoaded) {
if (status.didJustFinish) {
try {
await this.audioQueue[audioKey]?.stopAsync();
this.audioPlaying = '';
EventEmitter.emit(AUDIO_FOCUSED, { audioFocused: '' });
} catch {
// do nothing
}
}
}
}
setOnPlaybackStatusUpdate(audioKey: string, callback: (status: AVPlaybackStatus) => void): void {
return this.audioQueue[audioKey]?.setOnPlaybackStatusUpdate(status =>
this.onPlaybackStatusUpdate(audioKey, status, callback)
);
}
async playAudio(audioKey: string) {
if (this.audioPlaying) {
await this.pauseAudio(this.audioPlaying);
}
await Audio.setAudioModeAsync(AUDIO_MODE);
await this.audioQueue[audioKey]?.playAsync();
this.audioPlaying = audioKey;
EventEmitter.emit(AUDIO_FOCUSED, { audioFocused: audioKey });
}
async pauseAudio(audioKey: string) {
await this.audioQueue[audioKey]?.pauseAsync();
this.audioPlaying = '';
}
async pauseCurrentAudio() {
if (this.audioPlaying) {
await this.pauseAudio(this.audioPlaying);
}
}
async setPositionAsync(audioKey: string, time: number) {
try {
await this.audioQueue[audioKey]?.setPositionAsync(time);
} catch {
// Do nothing
}
}
async setRateAsync(audioKey: string, value = 1.0) {
try {
await this.audioQueue[audioKey].setRateAsync(value, true);
} catch {
// Do nothing
}
}
async unloadAudio(audioKey: string) {
await this.audioQueue[audioKey]?.stopAsync();
await this.audioQueue[audioKey]?.unloadAsync();
delete this.audioQueue[audioKey];
this.audioPlaying = '';
}
async unloadCurrentAudio() {
if (this.audioPlaying) {
await this.unloadAudio(this.audioPlaying);
}
}
async unloadRoomAudios(rid?: string) {
if (!rid) {
return;
}
const regExp = new RegExp(rid);
const roomAudioKeysLoaded = Object.keys(this.audioQueue).filter(audioKey => regExp.test(audioKey));
const roomAudiosLoaded = roomAudioKeysLoaded.map(key => this.audioQueue[key]);
try {
await Promise.all(
roomAudiosLoaded.map(async audio => {
await audio?.stopAsync();
await audio?.unloadAsync();
})
);
} catch {
// Do nothing
}
roomAudioKeysLoaded.forEach(key => delete this.audioQueue[key]);
this.audioPlaying = '';
}
async unloadAllAudios() {
const audiosLoaded = Object.values(this.audioQueue);
try {
await Promise.all(
audiosLoaded.map(async audio => {
await audio?.stopAsync();
await audio?.unloadAsync();
})
);
} catch {
// Do nothing
}
this.audioPlaying = '';
this.audioQueue = {};
}
}
const audioPlayer = new AudioPlayer();
export default audioPlayer;

View File

@ -0,0 +1,9 @@
export const getAudioUrl = ({ audioUrl, baseUrl, cdnPrefix }: { audioUrl?: string; baseUrl: string; cdnPrefix: string }) => {
if (audioUrl && !audioUrl.startsWith('http')) {
audioUrl = `${cdnPrefix || baseUrl}${audioUrl}`;
}
return audioUrl;
};
export const getAudioUrlToCache = ({ token, userId, url }: { url?: string; userId: string; token: string }) =>
`${url}?rc_uid=${userId}&rc_token=${token}`;

View File

@ -0,0 +1,16 @@
import { getAudioUrl, getAudioUrlToCache } from './getAudioUrl';
import { store } from '../store/auxStore';
import { getFilePath } from './handleMediaDownload';
import { getUserSelector } from '../../selectors/login';
export const getFilePathAudio = ({ audioUrl, audioType }: { audioUrl?: string; audioType?: string }): string | null => {
const baseUrl = store.getState().server.server;
const cdnPrefix = store.getState().settings.CDN_PREFIX as string;
const { id: userId, token } = getUserSelector(store.getState());
const url = getAudioUrl({ baseUrl, cdnPrefix, audioUrl });
return getFilePath({
urlToCache: getAudioUrlToCache({ token, userId, url }),
type: 'audio',
mimeType: audioType
});
};

View File

@ -108,9 +108,17 @@ const ensureDirAsync = async (dir: string, intermediates = true): Promise<void>
return ensureDirAsync(dir, intermediates);
};
const getFilePath = ({ type, mimeType, urlToCache }: { type: MediaTypes; mimeType?: string; urlToCache?: string }) => {
export const getFilePath = ({
type,
mimeType,
urlToCache
}: {
type: MediaTypes;
mimeType?: string;
urlToCache?: string;
}): string | null => {
if (!urlToCache) {
return;
return null;
}
const folderPath = getFolderPath(urlToCache);
const urlWithoutQueryString = urlToCache.split('?')[0];

View File

@ -11,6 +11,7 @@ export type TEmitterEvents = {
setKeyboardHeightThread: number;
setComposerHeight: number;
setComposerHeightThread: number;
audioFocused: string;
};
export type TKeyEmitterEvent = keyof TEmitterEvents;

View File

@ -12,8 +12,7 @@ type TEventEmitterEmmitArgs =
| { visible: boolean; onCancel?: null | Function }
| { cancel: () => void }
| { submit: (param: string) => void }
| IEmitUserInteraction
| { audioFocused: string };
| IEmitUserInteraction;
class EventEmitter {
private events: { [key: string]: any };

View File

@ -37,7 +37,7 @@ export * from './crashReport';
export * from './parseSettings';
export * from './subscribeRooms';
export * from './serializeAsciiUrl';
export * from './audioPlayer';
export * from './AudioManager';
export * from './isRoomFederated';
export * from './checkSupportedVersions';
export * from './getServerInfo';

View File

@ -34,7 +34,7 @@ import {
} from '../../definitions';
import { Services } from '../../lib/services';
import { TNavigation } from '../../stacks/stackType';
import audioPlayer from '../../lib/methods/audioPlayer';
import AudioManager from '../../lib/methods/AudioManager';
interface IMessagesViewProps {
user: {
@ -102,7 +102,7 @@ class MessagesView extends React.Component<IMessagesViewProps, IMessagesViewStat
}
componentWillUnmount(): void {
audioPlayer.pauseCurrentAudio();
AudioManager.pauseAudio();
}
shouldComponentUpdate(nextProps: IMessagesViewProps, nextState: IMessagesViewState) {

View File

@ -94,7 +94,7 @@ import { withActionSheet } from '../../containers/ActionSheet';
import { goRoom, TGoRoomItem } from '../../lib/methods/helpers/goRoom';
import { IMessageComposerRef, MessageComposerContainer } from '../../containers/MessageComposer';
import { RoomContext } from './context';
import audioPlayer from '../../lib/methods/audioPlayer';
import AudioManager from '../../lib/methods/AudioManager';
import { IListContainerRef, TListRef } from './List/definitions';
import { getMessageById } from '../../lib/database/services/Message';
import { getThreadById } from '../../lib/database/services/Thread';
@ -236,9 +236,8 @@ class RoomView extends React.Component<IRoomViewProps, IRoomViewState> {
}
});
EventEmitter.addEventListener('ROOM_REMOVED', this.handleRoomRemoved);
// TODO: Refactor when audio becomes global
this.unsubscribeBlur = navigation.addListener('blur', () => {
audioPlayer.pauseCurrentAudio();
AudioManager.pauseAudio();
});
}
@ -342,8 +341,7 @@ class RoomView extends React.Component<IRoomViewProps, IRoomViewState> {
EventEmitter.removeListener('connected', this.handleConnected);
EventEmitter.removeListener('ROOM_REMOVED', this.handleRoomRemoved);
if (!this.tmid) {
// TODO: Refactor when audio becomes global
await audioPlayer.unloadRoomAudios(this.rid);
await AudioManager.unloadRoomAudios(this.rid);
}
}