音频采集器,音频管理提供管理音频的一些基础能力,包括对音频音量、音频设备的管理,以及对音频数据的采集

音频采集器

HarmonyOS
2024-05-20 21:08:28
浏览
收藏 0
回答 1
待解决
回答 1
按赞同
/
按时间
HearMe_cn

使用AudioCapturer采集音频数据

使用的核心API

@ohos.multimedia.audio (音频管理)

音频管理提供管理音频的一些基础能力,包括对音频音量、音频设备的管理,以及对音频数据的采集和渲染等。

该模块提供以下音频相关的常用功能:

  • AudioManager:音频管理。
  • AudioRenderer:音频渲染,用于播放PCM(Pulse Code Modulation)音频数据。
  • AudioCapturer:音频采集,用于录制PCM音频数据。
  • TonePlayer:用于管理和播放DTMF(Dual Tone Multi Frequency,双音多频)音调,如拨号音、通话回铃音等。

用时导入模块

import audio from '@ohos.multimedia.audio';

核心代码如下:

@Builder TabBuilder(index: number, btnId: string) { 
    Column() { 
      Text(index === 0 ? $r('app.string.NORMAL_CAPTURER') : $r('app.string.PARALLEL_CAPTURER')) 
        .fontColor(this.currentIndex === index ? this.selectedFontColor : this.fontColor) 
        .opacity(this.currentIndex === index ? 1 : 0.6) 
        .fontSize(16) 
        .fontWeight(this.currentIndex === index ? 500 : 400) 
        .lineHeight(22) 
        .margin({ top: 17, bottom: 7 }); 
      Divider() 
        .strokeWidth(2) 
        .color('#007DFF') 
        .opacity(this.currentIndex === index ? 1 : 0); 
    }.width(78).id('btn_' + btnId); 
  } 
  
  async aboutToAppear(): Promise<void> { 
    console.log('NomalCapturer aboutToAppear'); 
    await this.initResource(); 
  } 
  
  async initResource(): Promise<void> { 
    console.log('initResource 0'); 
    try { 
      console.log('initResource 1'); 
      this.audioCapturer = await audio.createAudioCapturer(this.audioCapturerOptions); 
      console.log('initResource 2'); 
      this.bufferSize = await this.audioCapturer.getBufferSize(); 
      this.recordState = 'init'; 
      this.title = `${this.getDate(2)}_${Math.floor(Math.random() * RANDOM_NUM)}`; 
      this.path = `/data/storage/el2/base/haps/entry/files/normal_capturer_${this.title}.pcm`; 
      this.date = this.getDate(1); 
      console.log('initResource 3'); 
      await this.openFile(this.path); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`NormalCapturer:createAudioCapturer err=${JSON.stringify(error)}`); 
    } 
  } 
  
  async releseResource(): Promise<void> { 
    if (this.fd > 0) { 
      this.closeFile(); 
      this.fd = 0; 
    } 
    if (this.interval) { 
      clearInterval(this.interval); 
    } 
    if (this.audioCapturer) { 
      console.log('NomalCapturer,audioCapturer released'); 
      await this.audioCapturer.release(); 
      this.audioCapturer = undefined; 
      this.recordState = 'init'; 
      clearInterval(this.interval); 
    } 
    if (this.audioRenderer) { 
      console.log('NomalCapturer,audioRenderer released'); 
      await this.audioRenderer.release(); 
      this.audioRenderer = undefined; 
    } 
  } 
  
  async aboutToDisappear(): Promise<void> { 
    console.log('NomalCapturer,aboutToDisappear is called'); 
    await this.releseResource(); 
  } 
  
  async openFile(path: string): Promise<void> { 
    console.log(path); 
    try { 
      await fs.open(path, 0o100); 
      console.log('file created success'); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log('file created err:' + JSON.stringify(error)); 
      return; 
    } 
  
    try { 
      let file = await fs.open(path, 0o2); 
      this.fd = file.fd; 
      console.log(`file open success for read and write mode,fd=${file.fd}`); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log('file open err:' + JSON.stringify(error)); 
      return; 
    } 
  } 
  
  async closeFile(): Promise<void> { 
    try { 
      await fs.close(this.fd); 
      console.log('file close success'); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log('file close err:' + JSON.stringify(error)); 
      return; 
    } 
  } 
  
  async capturerStart(): Promise<void> { 
    if (!this.audioCapturer) { 
      console.log(`NormalCapturer,capturerStart:audioCapturer is null`); 
      return; 
    } 
  
    try { 
      await this.audioCapturer.start(); 
      // when start,init recordSec 
      this.recordSec = 0; 
      this.recordState = 'started'; 
      console.log('audioCapturer start ok'); 
      clearInterval(this.interval); 
      this.interval = setInterval(async () => { 
        if (this.recordSec >= TOTAL_SECOND) { 
          // over TOTAL_SECOND,need to stop auto 
          clearInterval(this.interval); 
          if (this.audioCapturer && this.audioCapturer.state === audio.AudioState.STATE_RUNNING) { 
            await this.capturerStop(); 
          } 
          return; 
        } 
        this.recordSec++; 
        this.showTime = this.getTimesBySecond(this.recordSec); 
  
      }, INTERVAL_TIME); 
      setTimeout(async () => { 
        await this.readCapturer(); 
      }, READ_TIME_OUT); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`NormalCapturer:audioCapturer start err=${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderCreate(): Promise<void> { 
    try { 
      this.audioRenderer = await audio.createAudioRenderer(this.audioRendererOptions); 
      this.renderState = this.audioRenderer.state; 
      this.audioRenderer.on('stateChange', (state) => { 
        this.renderState = state; 
      }); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`createAudioRenderer err=${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderStart(): Promise<void> { 
    if (!this.audioRenderer) { 
      return; 
    } 
    let bufferSize = 0; 
    try { 
      bufferSize = await this.audioRenderer.getBufferSize(); 
      await this.audioRenderer.start(); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`start err:${JSON.stringify(error)}`); 
    } 
  
    try { 
      let stat = await fs.stat(this.path); 
      let buf = new ArrayBuffer(bufferSize); 
      console.log(`audioRenderer write start..........`); 
      let startOffset = this.start; 
      while (startOffset <= stat.size) { 
        if (this.audioRenderer.state === audio.AudioState.STATE_PAUSED) { 
          break; 
        } 
        // change tag,to stop 
        if (this.audioRenderer.state === audio.AudioState.STATE_STOPPED) { 
          break; 
        } 
        if (this.audioRenderer.state === audio.AudioState.STATE_RELEASED) { 
          return; 
        } 
        let options: Options = { 
          offset: startOffset, 
          length: bufferSize 
        }; 
        console.log('renderStart,options=' + JSON.stringify(options)); 
  
        await fs.read(this.fd, buf, options); 
        await this.audioRenderer.write(buf); 
        this.playSec = Math.round(startOffset / stat.size * this.recordSec); 
        startOffset = startOffset + bufferSize; 
        this.start = startOffset; 
      } 
      console.log(`audioRenderer write end..........`) 
      if (this.audioRenderer.state === audio.AudioState.STATE_RUNNING) { 
        this.start = 0; 
        await this.renderStop(); 
      } 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`write err:${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderPause(): Promise<void> { 
    if (!this.audioRenderer) { 
      return; 
    } 
    try { 
      await this.audioRenderer.pause(); 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`pause err:${JSON.stringify(error)}`); 
    } 
  } 
  
  async renderStop(): Promise<void> { 
    if (!this.audioRenderer) { 
      return; 
    } 
    try { 
      await this.audioRenderer.stop(); 
      this.start = 0; 
    } catch (err) { 
      let error = err as BusinessError; 
      console.log(`stop err:${JSON.stringify(error)}`); 
    } 
  }

实现效果

适配版本信息

1. 本示例为Stage模型,支持API version 10。

2. 本示例需要使用DevEco Studio 3.1    Release以上版本进行编译运行。

分享
微博
QQ
微信
回复
2024-05-21 16:53:48
相关问题
音频音频焦点请求和释放
439浏览 • 1回复 待解决
鸿蒙JS开发音频管理,导入media错误
2517浏览 • 1回复 已解决
音频发声设备切换与查询
444浏览 • 1回复 待解决
AudioCapturer录音+AudioRenderer播放音频
593浏览 • 1回复 待解决
音频播放长时任务不生效
440浏览 • 1回复 待解决
鸿蒙 如何使用 player 播放网络音频
5869浏览 • 1回复 已解决
使用AudioCapturer开发音频录制功能
534浏览 • 1回复 待解决
使用AudioRenderer开发音频播放功能
424浏览 • 1回复 待解决
音频录制开发相关问题
4299浏览 • 1回复 待解决
SoundPool实现音频播放功能
544浏览 • 1回复 待解决
如何用websocket发送音频流?
2243浏览 • 1回复 待解决
OH _Audio播放音频问题
485浏览 • 1回复 待解决
AVplayer开发音频播放功能
441浏览 • 1回复 待解决
鸿蒙-如何实现播放音频
9617浏览 • 2回复 待解决
如何判断音频焦点获取和丢失?
4472浏览 • 1回复 待解决
SoundPool播放音频是否支持WMV格式
659浏览 • 1回复 待解决