How to use the expo-av.Audio.Recording function in expo-av

To help you get started, we’ve selected a few expo-av examples, based on popular ways it is used in public projects.

Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.

github acrcloud / webapi_example / identify protocol 1 (recommended) / reactnative_ios.js View on Github external
async _findSong() {
    // Audio.setAudioModeAsync()
    const {status} = await Audio.requestPermissionsAsync();
    console.log('Current Status ' + status);
    const recording = new Audio.Recording();
    try {
      await Audio.setAudioModeAsync({
        playsInSilentModeIOS: true,
        allowsRecordingIOS: true,
      });
      const recordOptions = {
        android: {
          extension: '.m4a',
          outputFormat: Audio.RECORDING_OPTION_ANDROID_OUTPUT_FORMAT_MPEG_4,
          audioEncoder: Audio.RECORDING_OPTION_ANDROID_AUDIO_ENCODER_AAC,
          sampleRate: 44100,
          numberOfChannels: 2,
          bitRate: 128000,
        },
        ios: {
          extension: '.wav',
github expo / audio-recording-example / App.js View on Github external
}
    await Audio.setAudioModeAsync({
      allowsRecordingIOS: true,
      interruptionModeIOS: Audio.INTERRUPTION_MODE_IOS_DO_NOT_MIX,
      playsInSilentModeIOS: true,
      shouldDuckAndroid: true,
      interruptionModeAndroid: Audio.INTERRUPTION_MODE_ANDROID_DO_NOT_MIX,
      playThroughEarpieceAndroid: false,
      staysActiveInBackground: true,
    });
    if (this.recording !== null) {
      this.recording.setOnRecordingStatusUpdate(null);
      this.recording = null;
    }

    const recording = new Audio.Recording();
    await recording.prepareToRecordAsync(this.recordingSettings);
    recording.setOnRecordingStatusUpdate(this._updateScreenForRecordingStatus);

    this.recording = recording;
    await this.recording.startAsync(); // Will call this._updateScreenForRecordingStatus to update the screen.
    this.setState({
      isLoading: false,
    });
  }
github SCasarotto / casarotto-chat / src / pages / Main / Main.js View on Github external
.then((response) => {
				const { status, expires, permissions } = response
				if (status === 'granted') {
					const newRecording = new Audio.Recording()
					newRecording
						.prepareToRecordAsync(Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY)
						.then((response) => {
							return newRecording.startAsync()
						})
						.then((response) => {
							this.setState({
								recordingActive: true,
								recording: newRecording,
								recordAudioVisible: true,
							})
						})
						.catch((error) => console.log(error))
				}
			})
			.catch((error) => {
github serlo / serlo-abc / src / components / helpers / PlaySounds.tsx View on Github external
Permissions.askAsync(Permissions.AUDIO_RECORDING).then(() => {
        const recording = new Audio.Recording();

        playAll(this.props.sounds, this.props.delay)
          .then(() =>
            Audio.setAudioModeAsync({
              staysActiveInBackground: true,
              allowsRecordingIOS: true,
              interruptionModeIOS: Audio.INTERRUPTION_MODE_IOS_DO_NOT_MIX,
              playsInSilentModeIOS: true,
              interruptionModeAndroid:
                Audio.INTERRUPTION_MODE_ANDROID_DO_NOT_MIX,
              shouldDuckAndroid: true,
              playThroughEarpieceAndroid: false
            })
          )
          .then(() =>
            recording.prepareToRecordAsync(
github NervJS / taro / packages / taro-rn / src / api / media / record.ts View on Github external
static getInstance () {
    if (!RecorderManager.instance) {
      RecorderManager.recordInstance = new Audio.Recording()
      RecorderManager.instance = new RecorderManager()
    }
    return RecorderManager.instance
  }