如何在react native expo av中通过扬声器播放录音

iyfamqjs  于 2023-10-23  发布在  React
关注(0)|答案(2)|浏览(202)

我正在尝试使用mart-av为我的应用程序制作语音备忘录组件。我已经想出了如何创建录音,但当播放它,它只能通过耳机扬声器播放。有没有办法让我通过手机的主扬声器播放录音?我还没有尝试过在Android上测试,但对于iPhone,音频只能通过耳机扬声器播放。谢谢

import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '@expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';

export default function AppVoice() {
  const [recording, setRecording] = React.useState();
  const [recordings, setRecordings] = React.useState([]);
  const [message, setMessage] = React.useState("");

  async function startRecording() {
    try {
        
      const permission = await Audio.requestPermissionsAsync();

      if (permission.status === "granted") {
        await Audio.setAudioModeAsync({
          allowsRecordingIOS: true,
          playsInSilentModeIOS: true,
          
        });
        
        const { recording } = await Audio.Recording.createAsync(
          Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
          
        );

        setRecording(recording);
      } else {
        setMessage("Please grant permission to app to access microphone");
      }
    } catch (err) {
      console.error('Failed to start recording', err);
    }
  }

  async function stopRecording() {
    setRecording(undefined);
    await recording.stopAndUnloadAsync();
    

    let updatedRecordings = [...recordings];
    const { sound, status } = await recording.createNewLoadedSoundAsync();
    updatedRecordings.push({
      sound: sound,
      duration: getDurationFormatted(status.durationMillis),
      file: recording.getURI()
    });
    
    
    setRecordings(updatedRecordings);
  }

  function getDurationFormatted(millis) {
    const minutes = millis / 1000 / 60;
    const minutesDisplay = Math.floor(minutes);
    const seconds = Math.round((minutes - minutesDisplay) * 60);
    const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
    return `${minutesDisplay}:${secondsDisplay}`;
  }

  function getRecordingLines() {
    
    return recordings.map((recordingLine, index) => {
      return (
        <View key={index} style={styles.row}>
          <Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
          <Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
          <Button style={styles.button}  onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
        </View>
      );
    });
  }

  return (

    <Screen style={{flex:1,  backgroundColor:'black'}}>

        <View style={styles.container}>
            <View style={styles.recorder}>
                <TouchableOpacity  style={{position:'absolute', left:10}}>
                    <MaterialCommunityIcons  name="microphone" size={24} color="black" />
                </TouchableOpacity>
                <AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
                <TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>   
                    {recording ? <MaterialCommunityIcons  name="pause" size={28} color="black" /> : <MaterialCommunityIcons  name="record-circle-outline" size={28} color="red" />}
                    
                </TouchableOpacity>
            </View>
            <View style={{flex:1}}>
                {getRecordingLines()}
            </View>
        </View>
    </Screen>
  );
}

const styles = StyleSheet.create({
  recorder: {

    width:300,
    backgroundColor:'white',
    height:50,
    borderRadius: 100,
    justifyContent:'center'
  },
  container:{
    flex:1,
    
    
  },
  row: {
    flexDirection: 'row',
    alignItems: 'center',
    justifyContent: 'center',
  },
  fill: {
    flex: 1,
    margin: 16,
    color:'white'
  },
  button: {
    margin: 16
  }
});
4dc9hkyq

4dc9hkyq1#

我不知道我是怎么弄明白的,但你需要做的是,当记录时,你需要让“allowRecordingIOS”为真,当停止记录时,你必须将其设置为假。下面是更新的代码:

import React from 'react';
import { Button, StyleSheet, Text, View, TouchableOpacity } from 'react-native';
import { Audio } from 'expo-av';
import * as Sharing from 'expo-sharing';
import { MaterialCommunityIcons } from '@expo/vector-icons';
import AppText from './AppText';
import Screen from './Screen';

export default function AppVoice() {
  const [recording, setRecording] = React.useState();
  const [recordings, setRecordings] = React.useState([]);
  const [message, setMessage] = React.useState("");

  async function startRecording() {
    try {
        
      const permission = await Audio.requestPermissionsAsync();

      if (permission.status === "granted") {
        await Audio.setAudioModeAsync({
          allowsRecordingIOS: true,
          playsInSilentModeIOS: true,
          
        });
        
        const { recording } = await Audio.Recording.createAsync(
          Audio.RECORDING_OPTIONS_PRESET_HIGH_QUALITY
          
        );

        setRecording(recording);
        
      } else {
        setMessage("Please grant permission to app to access microphone");
      }
    } catch (err) {
      console.error('Failed to start recording', err);
    }
  }

  async function stopRecording() {
    setRecording(undefined);
    await recording.stopAndUnloadAsync();
    await Audio.setAudioModeAsync({
        allowsRecordingIOS: false,
        playsInSilentModeIOS: true,
        
      });

    let updatedRecordings = [...recordings];
    const { sound, status } = await recording.createNewLoadedSoundAsync();
    updatedRecordings.push({
      sound: sound,
      duration: getDurationFormatted(status.durationMillis),
      file: recording.getURI()
    });
    
    
    setRecordings(updatedRecordings);
  }

  function getDurationFormatted(millis) {
    const minutes = millis / 1000 / 60;
    const minutesDisplay = Math.floor(minutes);
    const seconds = Math.round((minutes - minutesDisplay) * 60);
    const secondsDisplay = seconds < 10 ? `0${seconds}` : seconds;
    return `${minutesDisplay}:${secondsDisplay}`;
  }

  function getRecordingLines() {
    
    return recordings.map((recordingLine, index) => {
      return (
        <View key={index} style={styles.row}>
          <Text style={styles.fill}>Recording {index + 1} - {recordingLine.duration}</Text>
          <Button style={styles.button} onPress={() => recordingLine.sound.replayAsync()} title="Play"></Button>
          <Button style={styles.button}  onPress={() => Sharing.shareAsync(recordingLine.file)} title="Share"></Button>
        </View>
      );
    });
  }

  return (

    <Screen style={{flex:1,  backgroundColor:'black'}}>

        <View style={styles.container}>
            <View style={styles.recorder}>
                <TouchableOpacity  style={{position:'absolute', left:10}}>
                    <MaterialCommunityIcons  name="microphone" size={24} color="black" />
                </TouchableOpacity>
                <AppText style={{color:'black', textAlign:'center'}}>Voice Memo</AppText>
                <TouchableOpacity onPress={recording ? stopRecording : startRecording} style={{position:'absolute', right:10}}>   
                    {recording ? <MaterialCommunityIcons  name="pause" size={28} color="black" /> : <MaterialCommunityIcons  name="record-circle-outline" size={28} color="red" />}
                    
                </TouchableOpacity>
            </View>
            <View style={{flex:1}}>
                {getRecordingLines()}
            </View>
        </View>
    </Screen>
  );
}

const styles = StyleSheet.create({
  recorder: {

    width:300,
    backgroundColor:'white',
    height:50,
    borderRadius: 100,
    justifyContent:'center'
  },
  container:{
    flex:1,
    
    
  },
  row: {
    flexDirection: 'row',
    alignItems: 'center',
    justifyContent: 'center',
  },
  fill: {
    flex: 1,
    margin: 16,
    color:'white'
  },
  button: {
    margin: 16
  }
});
7rfyedvj

7rfyedvj2#

当然,这是正确的代码:

import React, { useState, useEffect } from "react";
import {
  View,
  Text,
  StyleSheet,
  TouchableOpacity,
  ActivityIndicator,
  Platform,
} from "react-native";
import { Audio } from "expo-av";
import { height, width } from "./TextInputconponent";
import { ResetNavigationStack } from "../screens/AudioRecoder_main";

export default function AudioplayerComponent(props: any) {
  const [sound, setSound] = useState<Audio.Sound | null>(null);
  const [isPlaying, setIsPlaying] = useState(false);
  const [positionMillis, setPositionMillis] = useState(0);
  const [durationMillis, setDurationMillis] = useState(0);
  const [isLoading, setIsLoading] = useState(true);
  const [isLoaded, setIsLoaded] = useState(false);

  const sliderValue = positionMillis / durationMillis;
  const Thumbimage = require("../assets/images/BlackDotthumb.png");

  const onPlaybackStatusUpdate = async (status: Audio.PlaybackStatus) => {
    if (status.isLoaded) {
      setIsPlaying(status.isPlaying);
      setPositionMillis(status.positionMillis);
      setDurationMillis(status.durationMillis);

      if (status.didJustFinish) {
        // Reset the audio to the beginning
        console.log("Audio playback completed. Resetting.");
        // await seekTo(0);
        loadAudio();
        // Reset position and set not playing
        setPositionMillis(0);
        setIsPlaying(false);
      }

      setIsLoading(false); // Set isLoading to false once the sound is loaded
    }
  };

  useEffect(() => {
    loadAudio();
  }, []);

  useEffect(() => {
    if (props.onBackPressed == true) {
      pauseSound();
      // props.navigation.navigate("AudioListScreen");
      ResetNavigationStack({
        navigation: props.navigation,
        ScreenName: "AudioListScreen",
      });
    }
  }, [props.counter]);

  const audioFile = props?.audioFile?.filePathUrl ?? props?.audioFile;
  // console.log(">>>>>>>", audioFile);

  const loadAudio = async () => {
    try {
      // Load the audio file
      const { sound: audioSound } = await Audio.Sound.createAsync(
        {
          uri: audioFile,
        },
        { shouldPlay: false }
      );

      // Set the audio mode to play through the speaker
      await Audio.setAudioModeAsync({
        allowsRecordingIOS: false,
        playsInSilentModeIOS: true,
        playThroughEarpieceAndroid: false,
      });

      // Set the sound's playback status update callback
      audioSound.setOnPlaybackStatusUpdate(onPlaybackStatusUpdate);

      // Set the sound
      setSound(audioSound);

      // setIsLoaded to true once the sound is loaded
      setIsLoaded(true);
    } catch (error) {
      console.error("Error loading audio:", error);
    }
  };

  const playSound = async () => {
    if (!sound) {
      console.log("<<<>>>>>in if", sound);
      await Audio.setAudioModeAsync({
        allowsRecordingIOS: false,
        playsInSilentModeIOS: true,
      });
      await loadAudio();
    } else {
      console.log("in else=======================================>");
    }

    try {
      await sound?.playAsync();
    } catch (error) {
      console.error("Error playing sound:", error);
    }
  };

  const pauseSound = async () => {
    if (sound) {
      try {
        const status: any = await sound.getStatusAsync();
        if (status.isLoaded && status.isPlaying) {
          try {
            await sound.pauseAsync();
          } catch (error) {}

相关问题