Skip to content

Instantly share code, notes, and snippets.

@imbharat420
Last active March 18, 2023 13:52
Show Gist options
  • Save imbharat420/fc359ccd6209eb40425491d61c7b0943 to your computer and use it in GitHub Desktop.
Save imbharat420/fc359ccd6209eb40425491d61c7b0943 to your computer and use it in GitHub Desktop.
Record React Native Audio
import React, {useState, useEffect} from 'react';
import {Text, View, StyleSheet, Button, PermissionsAndroid} from 'react-native';
import AudioRecord from 'react-native-audio-record';
import {Buffer} from 'buffer';
import Sound from 'react-native-sound';
import Permissions from 'react-native-permissions';
const Listen = () => {
const [sound, setSound] = useState<any>(null);
const [isRecording, setIsRecording] = useState(false);
const [state, setState] = useState({
audioFile: '',
recording: false,
loaded: false,
paused: true,
});
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const [chunk, setChunk] = useState<Buffer | null>(null);
useEffect(() => {
const loadConfig = async () => {
await checkPermission();
const options = {
sampleRate: 16000, // default 44100
channels: 1, // 1 or 2, default 1
bitsPerSample: 16, // 8 or 16, default 16
audioSource: 6, // android only (see below)
wavFile: 'test.wav', // default 'audio.wav'
};
AudioRecord.init(options);
AudioRecord.on('data', data => {
// base64-encoded audio data chunks
const buffer = Buffer.from(data, 'base64');
console.log('chunk size', buffer.length);
setChunk(buffer);
});
};
loadConfig();
}, []);
const checkPermission = async () => {
try {
const granted = await PermissionsAndroid.request(
PermissionsAndroid.PERMISSIONS.RECORD_AUDIO,
{
title: 'Audio Permission',
message: 'App needs access to your audio',
buttonNeutral: 'Ask Me Later',
buttonNegative: 'Cancel',
buttonPositive: 'OK',
},
);
const p = await Permissions.check('microphone');
console.log('p', p);
if (p === 'authorized') {
console.log('*_* You can use the audio ');
return requestPermission();
} else {
console.log('*_* Audio permission denied');
}
if (granted === PermissionsAndroid.RESULTS.GRANTED) {
console.log('*_* You can use the audio');
} else {
console.log('*_* Audio permission denied');
}
} catch (err) {
console.warn(err);
}
};
const requestPermission = async () => {
try {
const granted = await Permissions.request('microphone');
console.log('granted', granted);
if (granted === 'authorized') {
console.log('You can use the audio');
} else {
console.log('Audio permission denied');
}
} catch (err) {
console.warn(err);
}
};
async function startRecording() {
console.log('start recording');
setState(prevData => ({
...prevData,
audioFile: '',
recording: true,
loaded: false,
paused: true,
}));
AudioRecord.start();
}
async function stopRecording() {
if (!state.recording) {
console.log("audio isn't recording");
return;
}
// save to file
let audioFile = await AudioRecord.stop();
console.log('audioFile', audioFile);
setState(prevData => ({
...prevData,
audioFile,
recording: false,
}));
}
async function load() {
return new Promise(async (resolve, reject) => {
if (!state.audioFile) {
console.log('*_* file path is empty');
return reject('file path is empty');
}
console.log('Loading Audio');
let music = new Sound(state.audioFile, '', error => {
if (error) {
console.log('failed load', error);
return reject(error);
}
console.log('duration', music.getDuration());
setState(prevData => ({
...prevData,
loaded: true,
}));
});
console.log('music', music);
uploadFile();
setSound(music);
return resolve(music);
});
}
async function play() {
if (!state.loaded) {
try {
await load();
console.log('loaded', state.audioFile, sound);
console.log('play', state.audioFile, sound);
setState(prevData => ({
...prevData,
paused: false,
}));
console.log('loaded', {
audioFile: state.audioFile,
recording: false,
loaded: true,
paused: false,
sound: sound,
});
Sound.setCategory('Playback');
sound.setVolume(1);
sound.play((success: any) => {
if (success) {
console.log('*_* successfully finished playing');
} else {
console.log('*_* playback failed due to audio decoding errors');
}
setState((prevState: any) => ({
...prevState,
audioFile: prevState.audioFile,
recording: false,
paused: true,
}));
});
setState(prevData => ({
...prevData,
paused: false,
}));
} catch (err) {
console.log('*_* failed load', err);
}
}
}
const uploadFile = async () => {
const formData = new FormData();
formData.append('file', {
uri: state.audioFile,
type: 'audio/wav',
name: 'test.wav',
});
formData.append('sample_size', '10234');
const config = {
headers: {
'content-type': 'multipart/form-data',
},
};
try {
const res = await fetch({
method: 'POST',
url: 'http:://localhost:3000/upload',
data: formData,
config,
});
console.log('res', res);
} catch (err) {
console.log('err', err);
}
};
return (
<View style={styles.container}>
<View style={styles.p5}>
<Text style={styles.heading}>Heading</Text>
<Text>
Play some music and click the button to recognize songs now.
</Text>
<View>
<Button title="Start" onPress={startRecording}>
<Text>Start Recording</Text>
</Button>
<Button title="stop" onPress={stopRecording}>
<Text>Stop Recording</Text>
</Button>
<Button title="Play" onPress={play}>
<Text>Play Recording</Text>
</Button>
</View>
</View>
</View>
);
};
const Home = () => {
return (
<View style={styles.container}>
<View style={styles.p5}>
<Listen />
</View>
</View>
);
};
const styles = StyleSheet.create({
container: {
width: '100%',
height: '100%',
backgroundColor: '#fff',
},
p5: {
padding: 20,
},
heading: {
fontSize: 30,
color: 'rgba(246, 85, 100,1)',
fontWeight: 'bold',
marginBottom: 20,
},
});
export default Home;
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment