音频采集器,音频管理提供管理音频的一些基础能力,包括对音频音量、音频设备的管理,以及对音频数据的采集

音频采集器

HarmonyOS
2024-05-20 21:08:28
浏览
收藏 0
回答 1
待解决
回答 1
按赞同
/
按时间
伊普洛先生

使用AudioCapturer采集音频数据

使用的核心API

@ohos.multimedia.audio (音频管理)

音频管理提供管理音频的一些基础能力,包括对音频音量、音频设备的管理,以及对音频数据的采集和渲染等。

该模块提供以下音频相关的常用功能:

  • AudioManager:音频管理。
  • AudioRenderer:音频渲染,用于播放PCM(Pulse Code Modulation)音频数据。
  • AudioCapturer:音频采集,用于录制PCM音频数据。
  • TonePlayer:用于管理和播放DTMF(Dual Tone Multi Frequency,双音多频)音调,如拨号音、通话回铃音等。

用时导入模块

import audio from '@ohos.multimedia.audio';

核心代码如下:

@Builder TabBuilder(index: number, btnId: string) { 
    Column() { 
      Text(index === 0 ? $r('app.string.NORMAL_CAPTURER') : $r('app.string.PARALLEL_CAPTURER')) 
        .fontColor(this.currentIndex === index ? this.selectedFontColor : this.fontColor) 
        .opacity(this.currentIndex === index ? 1 : 0.6) 
        .fontSize(16) 
        .fontWeight(this.currentIndex === index ? 500 : 400) 
        .lineHeight(22) 
        .margin({ top: 17, bottom: 7 }); 
      Divider() 
        .strokeWidth(2) 
        .color('#007DFF') 
        .opacity(this.currentIndex === index ? 1 : 0); 
    }.width(78).id('btn_' + btnId); 
  } 
  
  async aboutToAppear(): Promise<void> { 
    console.log('NomalCapturer aboutToAppear'); 
    await this.initResource(); 
  } 
  
  async initResource(): Promise<void> { 
    console.log('initResource 0'); 
    try { 
      console.log('initResource 1'); 
      this.audioCapturer = await audio.createAudioCapturer(this.audioCapturerOptions); 
      console.log('initResource 2'); 
      this.bufferSize = await this.audioCapturer.getBufferSize(); 
      this.recordState = 'init'; 
      this.title = `${this.getDate(2)}_${Math.floor(Math.random() * RANDOM_NUM)}`; 
      this.path = `/data/storage/el2/base/haps/entry/files/normal_capturer_${this.title}.pcm`; 
      this.date = this.getDate(1); 
      console.log('initResource 3'); 
      await this.openFile(this.path); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`NormalCapturer:createAudioCapturer err=${JSON.stringify(error)}`); 
    } 
  } 
  
  async releseResource(): Promise<void> { 
    if (this.fd > 0) { 
      this.closeFile(); 
      this.fd = 0; 
    } 
    if (this.interval) { 
      clearInterval(this.interval); 
    } 
    if (this.audioCapturer) { 
      console.log('NomalCapturer,audioCapturer released'); 
      await this.audioCapturer.release(); 
      this.audioCapturer = undefined; 
      this.recordState = 'init'; 
      clearInterval(this.interval); 
    } 
    if (this.audioRenderer) { 
      console.log('NomalCapturer,audioRenderer released'); 
      await this.audioRenderer.release(); 
      this.audioRenderer = undefined; 
    } 
  } 
  
  async aboutToDisappear(): Promise<void> { 
    console.log('NomalCapturer,aboutToDisappear is called'); 
    await this.releseResource(); 
  } 
  
  async openFile(path: string): Promise<void> { 
    console.log(path); 
    try { 
      await fs.open(path, 0o100); 
      console.log('file created success'); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log('file created err:' + JSON.stringify(error)); 
      return; 
    } 
  
    try { 
      let file = await fs.open(path, 0o2); 
      this.fd = file.fd; 
      console.log(`file open success for read and write mode,fd=${file.fd}`); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log('file open err:' + JSON.stringify(error)); 
      return; 
    } 
  } 
  
  async closeFile(): Promise<void> { 
    try { 
      await fs.close(this.fd); 
      console.log('file close success'); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log('file close err:' + JSON.stringify(error)); 
      return; 
    } 
  } 
  
  async capturerStart(): Promise<void> { 
    if (!this.audioCapturer) { 
      console.log(`NormalCapturer,capturerStart:audioCapturer is null`); 
      return; 
    } 
  
    try { 
      await this.audioCapturer.start(); 
      // when start,init recordSec 
      this.recordSec = 0; 
      this.recordState = 'started'; 
      console.log('audioCapturer start ok'); 
      clearInterval(this.interval); 
      this.interval = setInterval(async () => { 
        if (this.recordSec >= TOTAL_SECOND) { 
          // over TOTAL_SECOND,need to stop auto 
          clearInterval(this.interval); 
          if (this.audioCapturer && this.audioCapturer.state === audio.AudioState.STATE_RUNNING) { 
            await this.capturerStop(); 
          } 
          return; 
        } 
        this.recordSec++; 
        this.showTime = this.getTimesBySecond(this.recordSec); 
  
      }, INTERVAL_TIME); 
      setTimeout(async () => { 
        await this.readCapturer(); 
      }, READ_TIME_OUT); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`NormalCapturer:audioCapturer start err=${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderCreate(): Promise<void> { 
    try { 
      this.audioRenderer = await audio.createAudioRenderer(this.audioRendererOptions); 
      this.renderState = this.audioRenderer.state; 
      this.audioRenderer.on('stateChange', (state) => { 
        this.renderState = state; 
      }); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`createAudioRenderer err=${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderStart(): Promise<void> { 
    if (!this.audioRenderer) { 
      return; 
    } 
    let bufferSize = 0; 
    try { 
      bufferSize = await this.audioRenderer.getBufferSize(); 
      await this.audioRenderer.start(); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`start err:${JSON.stringify(error)}`); 
    } 
  
    try { 
      let stat = await fs.stat(this.path); 
      let buf = new ArrayBuffer(bufferSize); 
      console.log(`audioRenderer write start..........`); 
      let startOffset = this.start; 
      while (startOffset <= stat.size) { 
        if (this.audioRenderer.state === audio.AudioState.STATE_PAUSED) { 
          break; 
        } 
        // change tag,to stop 
        if (this.audioRenderer.state === audio.AudioState.STATE_STOPPED) { 
          break; 
        } 
        if (this.audioRenderer.state === audio.AudioState.STATE_RELEASED) { 
          return; 
        } 
        let options: Options = { 
          offset: startOffset, 
          length: bufferSize 
        }; 
        console.log('renderStart,options=' + JSON.stringify(options)); 
  
        await fs.read(this.fd, buf, options); 
        await this.audioRenderer.write(buf); 
        this.playSec = Math.round(startOffset / stat.size * this.recordSec); 
        startOffset = startOffset + bufferSize; 
        this.start = startOffset; 
      } 
      console.log(`audioRenderer write end..........`) 
      if (this.audioRenderer.state === audio.AudioState.STATE_RUNNING) { 
        this.start = 0; 
        await this.renderStop(); 
      } 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`write err:${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderPause(): Promise<void> { 
    if (!this.audioRenderer) { 
      return; 
    } 
    try { 
      await this.audioRenderer.pause(); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`pause err:${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderStop(): Promise<void> { 
    if (!this.audioRenderer) { 
      return; 
    } 
    try { 
      await this.audioRenderer.stop(); 
      this.start = 0; 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`stop err:${JSON.stringify(error)}`); 
    } 
  }

实现效果

适配版本信息

1. 本示例为Stage模型,支持API version 10。

2. 本示例需要使用DevEco Studio 3.1    Release以上版本进行编译运行。

分享
微博
QQ
微信
回复
2024-05-21 16:53:48
相关问题
鸿蒙JS开发音频管理,导入media错误
3928浏览 • 1回复 已解决
音频音频焦点请求和释放
1587浏览 • 1回复 待解决
音频发声设备切换与查询
851浏览 • 1回复 待解决
提供HarmonyOS音频播放实例代码
218浏览 • 0回复 待解决
HarmonyOS录音音频如何存放,以及遍历
283浏览 • 1回复 待解决
HarmonyOS音频录制相关
248浏览 • 1回复 待解决
HarmonyOS音频相关问题
342浏览 • 1回复 待解决
HarmonyOS音频播放问题
308浏览 • 1回复 待解决
HarmonyOS音频缓存问题
333浏览 • 1回复 待解决
HarmonyOS 音频后台播放问题
307浏览 • 1回复 待解决
基于AVPlayer音频后台播放
402浏览 • 1回复 待解决
HarmonyOS 怎样获取系统音频延时
262浏览 • 1回复 待解决
HarmonyOS 音频录制与播放
445浏览 • 1回复 待解决
SoundPool实现音频播放功能
1152浏览 • 1回复 待解决
音频录制开发相关问题
5579浏览 • 1回复 待解决
HarmonyOS实现后台播放音频
301浏览 • 1回复 待解决
HarmonyOS 如何进行音频合成
193浏览 • 1回复 待解决
AudioCapturer录音+AudioRenderer播放音频
1132浏览 • 1回复 待解决