Skip to content
2 changes: 1 addition & 1 deletion apps/common-app/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,6 @@
"react": "19.1.1",
"react-native": "0.82.0",
"react-test-renderer": "19.1.1",
"typescript": "5.8.3"
"typescript": "~5.8.3"
}
}
10 changes: 5 additions & 5 deletions apps/common-app/src/examples/AudioVisualizer/AudioVisualizer.tsx
Original file line number Diff line number Diff line change
@@ -1,15 +1,15 @@
import React, { useState, useEffect, useRef } from 'react';
import React, { useEffect, useRef, useState } from 'react';
import { ActivityIndicator, View } from 'react-native';
import {
AudioContext,
AnalyserNode,
AudioBuffer,
AudioBufferSourceNode,
AudioContext,
} from 'react-native-audio-api';
import { ActivityIndicator, View } from 'react-native';

import FreqTimeChart from './FreqTimeChart';
import { Container, Button } from '../../components';
import { Button, Container } from '../../components';
import { layout } from '../../styles';
import FreqTimeChart from './FreqTimeChart';

const FFT_SIZE = 512;

Expand Down
322 changes: 200 additions & 122 deletions apps/common-app/src/examples/Record/Record.tsx
Original file line number Diff line number Diff line change
@@ -1,147 +1,91 @@
import React, { useRef, FC, useEffect } from 'react';
import React, { FC, useMemo, useState } from 'react';
import {
AndroidFormat,
AudioContext,
AudioManager,
AudioRecorder,
RecorderAdapterNode,
AudioBufferSourceNode,
AudioBuffer,
BitDepth,
FileDirectory,
IOSAudioQuality,
IOSFormat,
} from 'react-native-audio-api';

import { Container, Button } from '../../components';
import { View, Text } from 'react-native';
import { Text, View } from 'react-native';
import { Button, Container } from '../../components';
import { colors } from '../../styles';

const SAMPLE_RATE = 16000;

AudioManager.setAudioSessionOptions({
iosCategory: 'playAndRecord',
iosMode: 'default',
iosOptions: ['defaultToSpeaker', 'allowBluetoothA2DP'],
});

const Record: FC = () => {
const recorderRef = useRef<AudioRecorder | null>(null);
const aCtxRef = useRef<AudioContext | null>(null);
const recorderAdapterRef = useRef<RecorderAdapterNode | null>(null);
const audioBuffersRef = useRef<AudioBuffer[]>([]);
const sourcesRef = useRef<AudioBufferSourceNode[]>([]);

useEffect(() => {
const setup = async () => {
await AudioManager.requestRecordingPermissions();
recorderRef.current = new AudioRecorder({
sampleRate: SAMPLE_RATE,
bufferLengthInSamples: SAMPLE_RATE,
});
};
const [isRecording, setIsRecording] = useState(false);
const [lastOutput, setLastOutput] = useState<string | null>(null);

setup();
return () => {
aCtxRef.current?.close();
stopRecorder();
};
const audioContext = useMemo(() => {
return new AudioContext({ initSuspended: true });
}, []);

const setupRecording = () => {
AudioManager.setAudioSessionOptions({
iosCategory: 'playAndRecord',
iosMode: 'spokenAudio',
iosOptions: ['defaultToSpeaker', 'allowBluetoothA2DP'],
const recorder = useMemo(() => {
const rec = new AudioRecorder();

rec.enableFileOutput({
sampleRate: 16000,
channels: 1,
bitRate: 32000,
bitDepth: BitDepth.Bit24,
directory: FileDirectory.Document,
ios: {
format: IOSFormat.M4A,
quality: IOSAudioQuality.Medium,
},
android: {},
});
};

const stopRecorder = () => {
if (recorderRef.current) {
recorderRef.current.stop();
console.log('Recording stopped');
// advised, but not required
AudioManager.setAudioSessionOptions({
iosCategory: 'playback',
iosMode: 'default',
});
} else {
console.error('AudioRecorder is not initialized');
}
};
return rec;
}, []);

const startEcho = () => {
if (!recorderRef.current) {
console.error('AudioContext or AudioRecorder is not initialized');
return;
}
setupRecording();

aCtxRef.current = new AudioContext({ sampleRate: SAMPLE_RATE });
recorderAdapterRef.current = aCtxRef.current.createRecorderAdapter();
recorderAdapterRef.current.connect(aCtxRef.current.destination);
recorderRef.current.connect(recorderAdapterRef.current);

recorderRef.current.start();
console.log('Recording started');
console.log('Audio context state:', aCtxRef.current.state);
if (aCtxRef.current.state === 'suspended') {
console.log('Resuming audio context');
aCtxRef.current.resume();
}
};
const onStartRecording = async () => {
await AudioManager.setAudioSessionActivity(true);

/// This stops only the recording, not the audio context
const stopEcho = () => {
stopRecorder();
aCtxRef.current = null;
recorderAdapterRef.current = null;
recorder.start();
setIsRecording(true);
};

const startRecordReplay = () => {
if (!recorderRef.current) {
console.error('AudioRecorder is not initialized');
return;
}
setupRecording();
audioBuffersRef.current = [];

recorderRef.current.onAudioReady((event) => {
const { buffer, numFrames } = event;

console.log('Audio recorder buffer ready:', buffer.duration, numFrames);
audioBuffersRef.current.push(buffer);
});
const onStopRecording = async () => {
setIsRecording(false);
const output = recorder.stop();

recorderRef.current.start();
await AudioManager.setAudioSessionActivity(false);

setTimeout(() => {
stopRecorder();
}, 5000);
setLastOutput(typeof output === 'string' ? output : null);
};

const stopRecordReplay = () => {
const aCtx = new AudioContext({ sampleRate: SAMPLE_RATE });
aCtxRef.current = aCtx;

if (aCtx.state === 'suspended') {
aCtx.resume();
const onPlayOutput = async () => {
if (!lastOutput) {
return;
}

const tNow = aCtx.currentTime;
let nextStartAt = tNow + 1;
const buffers = audioBuffersRef.current;
await AudioManager.setAudioSessionActivity(true);

console.log(tNow, nextStartAt, buffers.length);
const buffer = await audioContext.decodeAudioData(lastOutput);
const source = audioContext.createBufferSource();
source.buffer = buffer;
source.connect(audioContext.destination);
source.start();

for (let i = 0; i < buffers.length; i++) {
const source = aCtx.createBufferSource();
source.buffer = buffers[i];

source.connect(aCtx.destination);
sourcesRef.current.push(source);
source.onEnded = async () => {
await audioContext.suspend();
await AudioManager.setAudioSessionActivity(false);
};

source.start(nextStartAt);
nextStartAt += buffers[i].duration;
if (audioContext.state === 'suspended') {
await audioContext.resume();
}

setTimeout(
() => {
console.log('clearing data');
audioBuffersRef.current = [];
sourcesRef.current = [];
},
(nextStartAt - tNow) * 1000
);
};

return (
Expand All @@ -150,19 +94,153 @@ const Record: FC = () => {
Sample rate: {SAMPLE_RATE}
</Text>
<View style={{ alignItems: 'center', gap: 10, paddingTop: 20 }}>
<Text style={{ color: colors.white, fontSize: 16 }}>Echo</Text>
<Button title="Start Recording" onPress={startEcho} />
<Button title="Stop Recording" onPress={stopEcho} />
</View>
<View style={{ alignItems: 'center', gap: 10, paddingTop: 40 }}>
<Text style={{ color: colors.white, fontSize: 16 }}>
Record & replay
{isRecording ? '🔴 Recording...' : '🎙️ Tap to Record'}
</Text>
<Button title="Record for Replay" onPress={startRecordReplay} />
<Button title="Replay" onPress={stopRecordReplay} />
<View style={{ height: 10 }} />
{isRecording ? (
<Button title="Stop Recording" onPress={onStopRecording} />
) : (
<Button title="Start Recording" onPress={onStartRecording} />
)}
<View style={{ height: 20 }} />
<Button
title="Play Last Recording"
onPress={onPlayOutput}
disabled={!lastOutput}
/>
</View>
</Container>
);
};

// const Record: FC = () => {
// // const recorderRef = useRef<AudioRecorder | null>(null);
// // const aCtxRef = useRef<AudioContext | null>(null);
// // const recorderAdapterRef = useRef<RecorderAdapterNode | null>(null);
// // const audioBuffersRef = useRef<AudioBuffer[]>([]);
// // const sourcesRef = useRef<AudioBufferSourceNode[]>([]);
// // useEffect(() => {
// // const setup = async () => {
// // await AudioManager.requestRecordingPermissions();
// // recorderRef.current = new AudioRecorder({
// // sampleRate: SAMPLE_RATE,
// // bufferLengthInSamples: SAMPLE_RATE,
// // });
// // };
// // setup();
// // return () => {
// // aCtxRef.current?.close();
// // stopRecorder();
// // };
// // }, []);
// // const setupRecording = () => {
// // AudioManager.setAudioSessionOptions({
// // iosCategory: 'playAndRecord',
// // iosMode: 'spokenAudio',
// // iosOptions: ['defaultToSpeaker', 'allowBluetoothA2DP'],
// // });
// // };
// // const stopRecorder = () => {
// // if (recorderRef.current) {
// // recorderRef.current.stop();
// // console.log('Recording stopped');
// // // advised, but not required
// // AudioManager.setAudioSessionOptions({
// // iosCategory: 'playback',
// // iosMode: 'default',
// // });
// // } else {
// // console.error('AudioRecorder is not initialized');
// // }
// // };
// // const startEcho = () => {
// // if (!recorderRef.current) {
// // console.error('AudioContext or AudioRecorder is not initialized');
// // return;
// // }
// // setupRecording();
// // aCtxRef.current = new AudioContext({ sampleRate: SAMPLE_RATE });
// // recorderAdapterRef.current = aCtxRef.current.createRecorderAdapter();
// // recorderAdapterRef.current.connect(aCtxRef.current.destination);
// // recorderRef.current.connect(recorderAdapterRef.current);
// // recorderRef.current.start();
// // console.log('Recording started');
// // console.log('Audio context state:', aCtxRef.current.state);
// // if (aCtxRef.current.state === 'suspended') {
// // console.log('Resuming audio context');
// // aCtxRef.current.resume();
// // }
// // };
// // /// This stops only the recording, not the audio context
// // const stopEcho = () => {
// // stopRecorder();
// // aCtxRef.current = null;
// // recorderAdapterRef.current = null;
// // };
// // const startRecordReplay = () => {
// // if (!recorderRef.current) {
// // console.error('AudioRecorder is not initialized');
// // return;
// // }
// // setupRecording();
// // audioBuffersRef.current = [];
// // recorderRef.current.onAudioReady((event) => {
// // const { buffer, numFrames } = event;
// // console.log('Audio recorder buffer ready:', buffer.duration, numFrames);
// // audioBuffersRef.current.push(buffer);
// // });
// // recorderRef.current.start();
// // setTimeout(() => {
// // stopRecorder();
// // }, 5000);
// // };
// // const stopRecordReplay = () => {
// // const aCtx = new AudioContext({ sampleRate: SAMPLE_RATE });
// // aCtxRef.current = aCtx;
// // if (aCtx.state === 'suspended') {
// // aCtx.resume();
// // }
// // const tNow = aCtx.currentTime;
// // let nextStartAt = tNow + 1;
// // const buffers = audioBuffersRef.current;
// // console.log(tNow, nextStartAt, buffers.length);
// // for (let i = 0; i < buffers.length; i++) {
// // const source = aCtx.createBufferSource();
// // source.buffer = buffers[i];
// // source.connect(aCtx.destination);
// // sourcesRef.current.push(source);
// // source.start(nextStartAt);
// // nextStartAt += buffers[i].duration;
// // }
// // setTimeout(
// // () => {
// // console.log('clearing data');
// // audioBuffersRef.current = [];
// // sourcesRef.current = [];
// // },
// // (nextStartAt - tNow) * 1000
// // );
// // };
// // return (
// // <Container style={{ gap: 40 }}>
// <Text style={{ color: colors.gray, fontSize: 18, textAlign: 'center' }}>
// Sample rate: {SAMPLE_RATE}
// </Text>
// // <View style={{ alignItems: 'center', gap: 10, paddingTop: 20 }}>
// // <Text style={{ color: colors.white, fontSize: 16 }}>Echo</Text>
// // <Button title="Start Recording" onPress={startEcho} />
// // <Button title="Stop Recording" onPress={stopEcho} />
// // </View>
// // <View style={{ alignItems: 'center', gap: 10, paddingTop: 40 }}>
// // <Text style={{ color: colors.white, fontSize: 16 }}>
// // Record & replay
// // </Text>
// // <Button title="Record for Replay" onPress={startRecordReplay} />
// // <Button title="Replay" onPress={stopRecordReplay} />
// // </View>
// // </Container>
// // );
// };

export default Record;
Loading