如何实现双路预览+录制功能

如何实现双路预览+录制功能

HarmonyOS
2024-05-20 21:38:39
浏览
收藏 0
回答 1
待解决
回答 1
按赞同
/
按时间
makeer

具体实现参考如下:

page页面代码:SluPage.ets

import image from '@ohos.multimedia.image'; 
import { BusinessError } from '@ohos.base'; 
import camera from '@ohos.multimedia.camera'; 
import MediaUtils from '../util/MediaUtils'; 
import mediaLibrary from '@ohos.multimedia.mediaLibrary'; 
import media from '@ohos.multimedia.media'; 
import common from '@ohos.app.ability.common'; 
import hilog from '@ohos.hilog'; 
 
 
let receiver: image.ImageReceiver = image.createImageReceiver(640, 480, 4, 8); 
let instance = MediaUtils.getInstance(getContext(this) as common.Context) 
let videoRecorder: media.AVRecorder | undefined = undefined; 
let videoConfig: media.AVRecorderConfig = { 
  audioSourceType: media.AudioSourceType.AUDIO_SOURCE_TYPE_MIC, 
  videoSourceType: media.VideoSourceType.VIDEO_SOURCE_TYPE_SURFACE_YUV, 
  profile: { 
    audioBitrate: 48000, 
    audioChannels: 2, 
    audioCodec: media.CodecMimeType.AUDIO_AAC, 
    audioSampleRate: 48000, 
    fileFormat: media.ContainerFormatType.CFT_MPEG_4, 
    videoBitrate: 512000, 
    videoCodec: media.CodecMimeType.VIDEO_AVC, 
    videoFrameWidth: 640, 
    videoFrameHeight: 480, 
    videoFrameRate: 30 
  }, 
  url: '', 
  rotation: 0 
}; 
let videoProfileObj: camera.VideoProfile = { 
  format: 1003, 
  size: { 
    width: 1920, 
    height: 1080 
  }, 
  frameRateRange: { 
    min: 30, 
    max: 30 
  } 
}; 
const TAG: string = 'testTag' 
 
@Entry 
@Component 
struct SluPage { 
  @State message: string = 'SluPage' 
 
  build() { 
    Row() { 
      Column() { 
        XComponentPage() 
      } 
      .width('100%') 
    } 
    .height('100%') 
  } 
} 
 
@Component 
struct XComponentPage { 
  mXComponentController: XComponentController = new XComponentController; 
 
  build() { 
    Flex() { 
      XComponent({ 
        id: '', 
        type: 'surface', 
        libraryname: '', 
        controller: this.mXComponentController 
      }) 
        .onLoad(() => { 
          // 设置Surface宽高(1920*1080),预览尺寸设置参考前面 previewProfilesArray 获取的当前设备所支持的预览分辨率大小去设置 
          this.mXComponentController.setXComponentSurfaceSize({ surfaceWidth: 1920, surfaceHeight: 1080 }); 
          // 获取Surface ID 
          let surfaceId: string = this.mXComponentController.getXComponentSurfaceId(); 
          onImageArrival(receiver) 
          createDualChannelPreview(camera.getCameraManager(null), surfaceId, receiver) 
        }) 
        .width('1920px') 
        .height('1080px') 
    } 
  } 
} 
 
/** 
 * 初始化录制适配地址 
 */ 
async function initUrl(): Promise<void> { 
  let fileAsset = await instance.createAndGetUri(mediaLibrary.MediaType.VIDEO); 
  let fd = await instance.getFdPath(fileAsset); 
  videoConfig.url = `fd://${fd.toString()}`; 
} 
 
/** 
 * 进行配置 
 */ 
function vConfig(videoConfig: media.AVRecorderConfig) { 
  videoConfig.videoSourceType = media.VideoSourceType.VIDEO_SOURCE_TYPE_SURFACE_YUV; 
  videoConfig.profile.videoCodec = media.CodecMimeType.VIDEO_MPEG4; 
  videoConfig.rotation = 90; 
  videoConfig.profile.videoFrameWidth = videoProfileObj.size.width; 
  videoConfig.profile.videoFrameHeight = videoProfileObj.size.height; 
  videoConfig.profile.videoFrameRate = videoProfileObj.frameRateRange.min; 
} 
 
/** 
 * 后续处理 
 */ 
function onImageArrival(receiver: image.ImageReceiver): void { 
  receiver.on('imageArrival', () => { 
    receiver.readNextImage((err: BusinessError, nextImage: image.Image) => { 
      if (err || nextImage === undefined) { 
        return; 
      } 
      nextImage.getComponent(image.ComponentType.JPEG, (err: BusinessError, imgComponent: image.Component) => { 
        if (err || imgComponent === undefined) { 
          return; 
        } 
        let buffer: ArrayBuffer; 
        if (imgComponent.byteBuffer as ArrayBuffer) { 
          buffer = imgComponent.byteBuffer; 
        } else { 
          return; 
        } 
        // do something...; 
      }) 
    }) 
  }) 
} 
 
async function createDualChannelPreview(cameraManager: camera.CameraManager, XComponentSurfaceId: string, receiver: image.ImageReceiver): Promise<void> { 
  let camerasDevices: Array<camera.CameraDevice> = cameraManager.getSupportedCameras(); // 获取支持的相机设备对象 
  let profile: camera.Profile = { format: 1003, 
    size: { 
      width: 1920, 
      height: 1080 
    } }; 
  let previewProfilesObj: camera.Profile = profile 
  let previewProfilesObj2: camera.Profile = profile 
  // 创建 预览流1 &2 输出对象 
  let previewOutput: camera.PreviewOutput = cameraManager.createPreviewOutput(previewProfilesObj, XComponentSurfaceId); 
  let imageReceiverSurfaceId: string = await receiver.getReceivingSurfaceId(); 
  let previewOutput2: camera.PreviewOutput = cameraManager.createPreviewOutput(previewProfilesObj2, imageReceiverSurfaceId); 
  if (videoRecorder) { 
    await videoRecorder.release() 
  } 
  videoRecorder = await media.createAVRecorder(); 
  await initUrl(); 
  vConfig(videoConfig) 
  await videoRecorder.prepare(videoConfig).catch((err: BusinessError): void => { 
    hilog.error(0x00000, TAG, 'err' + JSON.stringify(err)); 
  }); 
  let videoId = await videoRecorder.getInputSurface(); 
  let videoOutput = cameraManager.createVideoOutput(videoProfileObj, videoId); 
  // 创建cameraInput对象 
  let cameraInput: camera.CameraInput = cameraManager.createCameraInput(camerasDevices[0]); 
  // 打开相机 
  await cameraInput.open(); 
  // 会话流程 
  let captureSession: camera.CaptureSession = cameraManager.createCaptureSession(); 
  // 开始配置会话 
  captureSession.beginConfig(); 
  captureSession.addInput(cameraInput); 
  // 把 把CameraInput、 预览流1 &2 加入到会话 
  captureSession.addOutput(previewOutput) 
  captureSession.addOutput(previewOutput2); 
  captureSession.addOutput(videoOutput); 
  // 提交配置信息 
  await captureSession.commitConfig(); 
  await captureSession.start(); 
  await videoOutput.start() 
  await videoRecorder.start() 
  setTimeout(() => { 
    if (videoRecorder && videoOutput) { 
      videoRecorder.stop() 
      videoOutput.stop() 
    } 
  }, 10000) 
}

工具类:MediaUtils.ets

import mediaLibrary from '@ohos.multimedia.mediaLibrary'; 
import DateTimeUtil from './DateTimeUtil'; 
import hilog from '@ohos.hilog'; 
import common from '@ohos.app.ability.common'; 
const TAG: string = 'testTag'; 
enum PhotoType {  PNG = 0,  JPG,  BMP,  WEBP,  JPEG} 
export default class MediaUtils { 
  private mediaTest: mediaLibrary.MediaLibrary = undefined; 
  constructor(context: common.Context) { 
    this.mediaTest = mediaLibrary.getMediaLibrary(context); 
  } 
  private static instance: MediaUtils = undefined; 
  private num: number = 0; 
  public static getInstance(context: common.Context): MediaUtils { 
    if (this.instance === undefined) { 
      this.instance = new MediaUtils(context); 
    } 
    return this.instance; 
  } 
  async createAndGetUri(mediaType: number): Promise<mediaLibrary.FileAsset> { 
    let info = this.getInfoFromType(mediaType); 
    let dateTimeUtil = new DateTimeUtil(); 
    let name = `${dateTimeUtil.getDate()}_${dateTimeUtil.getTime()}`; 
    let displayName = `${info.prefix}${name}${info.suffix}`; 
    hilog.info(0x0000, TAG, `createAndGetUri displayName = ${displayName}, mediaType = ${mediaType}`); 
    let publicPath = await this.mediaTest.getPublicDirectory(info.directory); 
    hilog.debug(0x0000, TAG, `createAndGetUri publicPath = ${publicPath}`); 
    try { 
      return await this.mediaTest.createAsset(mediaType, displayName, publicPath); 
    } catch { 
      this.num++; 
      displayName = `${info.prefix}${name}_${this.num}${info.suffix}`; 
      return await this.mediaTest.createAsset(mediaType, displayName, publicPath); 
    } 
  } 
  async queryFile(fileAsset: mediaLibrary.FileAsset): Promise<mediaLibrary.FileAsset> { 
    let fileKeyObj = mediaLibrary.FileKey; 
    if (fileAsset !== undefined) { 
      let args = fileAsset.id.toString(); 
      let fetchOp: mediaLibrary.MediaFetchOptions = { 
        selections: `${fileKeyObj.ID}=?`, 
        selectionArgs: [args], 
      }; 
      const fetchFileResult: mediaLibrary.FetchFileResult = await this.mediaTest.getFileAssets(fetchOp); 
      hilog.info(0x0000, TAG, `fetchFileResult.getCount() = ${fetchFileResult.getCount()}`); 
      const fileAssets: Array<mediaLibrary.FileAsset> = await fetchFileResult.getAllObject(); 
      if (fileAssets.length) { 
        return fileAssets[0]; 
      } 
    } 
    return undefined; 
  } 
  async getFdPath(fileAsset: mediaLibrary.FileAsset): Promise<number> { 
    let fd: number = await fileAsset.open('Rw'); 
    hilog.info(0x0000, TAG, `fd = ${fd}`); 
    return fd; 
  } 
  async createFile(mediaType: number): Promise<number> { 
    let fileAsset = await this.createAndGetUri(mediaType); 
    if (fileAsset) { 
      fileAsset = await this.queryFile(fileAsset); 
      if (fileAsset) { 
        let fd = await this.getFdPath(fileAsset); 
        return fd; 
      } 
    } 
    return undefined; 
  } 
  onDateChange(callback: () => void): void { 
    this.mediaTest.on('albumChange', () => { 
      hilog.info(0x0000, TAG, 'albumChange called'); 
      callback(); 
    }); 
    this.mediaTest.on('imageChange', () => { 
      hilog.info(0x0000, TAG, 'imageChange called'); 
      callback(); 
    }); 
    this.mediaTest.on('audioChange', () => { 
      hilog.info(0x0000, TAG, 'audioChange called'); 
      callback(); 
    }); 
    this.mediaTest.on('videoChange', () => { 
      hilog.info(0x0000, TAG, 'videoChange called'); 
      callback(); 
    }); 
    this.mediaTest.on('fileChange', () => { 
      hilog.info(0x0000, TAG, 'fileChange called'); 
      callback(); 
    }); 
  } 
  offDateChange(): void { 
    this.mediaTest.off('albumChange'); 
    this.mediaTest.off('imageChange'); 
    this.mediaTest.off('audioChange'); 
    this.mediaTest.off('videoChange'); 
    this.mediaTest.off('fileChange'); 
  } 
  onChangePhotoFormat(): string { 
    if (globalThis.settingDataObj.photoFormat === PhotoType.PNG) { 
      return 'png'; 
    } 
    if (globalThis.settingDataObj.photoFormat === PhotoType.JPG) { 
      return 'jpg'; 
    } 
    if (globalThis.settingDataObj.photoFormat === PhotoType.BMP) { 
      return 'bmp'; 
    } 
    if (globalThis.settingDataObj.photoFormat === PhotoType.WEBP) { 
      return 'webp'; 
    } 
    if (globalThis.settingDataObj.photoFormat === PhotoType.JPEG) { 
      return 'jpeg'; 
    } 
    return undefined; 
  } 
  getInfoFromType(mediaType: number): { 
    prefix: string, 
    suffix: string, 
    directory: number 
  } { 
    let result = { 
      prefix: '', 
      suffix: '', 
      directory: 0 
    }; 
    switch (mediaType) { 
      case mediaLibrary.MediaType.FILE: 
        result.prefix = 'FILE_'; 
        result.suffix = '.txt'; 
        result.directory = mediaLibrary.DirectoryType.DIR_DOCUMENTS; 
        break; 
      case mediaLibrary.MediaType.IMAGE: 
        result.prefix = 'IMG_'; 
        result.suffix = `.${this.onChangePhotoFormat()}`; 
        result.directory = mediaLibrary.DirectoryType.DIR_CAMERA; 
        break; 
      case mediaLibrary.MediaType.VIDEO: 
        result.prefix = 'VID_'; 
        result.suffix = '.mp4'; 
        result.directory = mediaLibrary.DirectoryType.DIR_VIDEO; 
        break; 
      case mediaLibrary.MediaType.AUDIO: 
        result.prefix = 'AUD_'; 
        result.suffix = '.wav'; 
        result.directory = mediaLibrary.DirectoryType.DIR_AUDIO; 
        break; 
    } 
    return result; 
  } 
} 
工具类:DateTimeUtil.ts 
/** 
 * @file 日期工具 
 */ 
export default class DateTimeUtil { 
 
  /** 
   * 时分秒 
   */ 
  getTime(): string { 
    const DATETIME = new Date(); 
    return this.concatTime(DATETIME.getHours(), DATETIME.getMinutes(), DATETIME.getSeconds()); 
  } 
 
  /** 
   * 年月日 
   */ 
  getDate(): string { 
    const DATETIME = new Date(); 
    return this.concatDate(DATETIME.getFullYear(), DATETIME.getMonth() + 1, DATETIME.getDate()); 
  } 
 
  /** 
   * 日期不足两位补充0 
   * @param value-数据值 
   */ 
  fill(value: number): string { 
    let maxNumber = 9; 
    return (value > maxNumber ? '' : '0') + value; 
  } 
  /** 
   * 录制时间定时器 
   * @param millisecond-数据值 
   */ 
  getVideoTime(millisecond: number): string { 
    let millisecond2minute = 60000; 
    let millisecond2second = 1000; 
    let minute = Math.floor(millisecond / millisecond2minute); 
    let second = Math.floor((millisecond - minute * millisecond2minute) / millisecond2second); 
    return `${this.fill(minute)} : ${this.fill(second)}`; 
  } 
  /** 
   * 年月日格式修饰 
   * @param year 
   * @param month 
   * @param date 
   */ 
  concatDate(year: number, month: number, date: number): string { 
    return `${year}${this.fill(month)}${this.fill(date)}`; 
  } 
 
  /** 
   * 时分秒格式修饰 
   * @param hours 
   * @param minutes 
   * @param seconds 
   */ 
  concatTime(hours: number, minutes: number, seconds: number): string { 
    return `${this.fill(hours)}${this.fill(minutes)}${this.fill(seconds)}`; 
  } 
}
分享
微博
QQ
微信
回复
2024-05-21 17:41:22
相关问题
camera_lite预览功能如何实现
2363浏览 • 0回复 待解决
WebView支持4G & WIFI能力吗?
758浏览 • 1回复 待解决
OHAudio实现低时延音频录制和播放功能
1657浏览 • 1回复 待解决
使用AudioCapturer开发音频录制功能
1281浏览 • 1回复 待解决
Image组件如何实现指手势放大
2403浏览 • 1回复 待解决
HarmonyOS 咨询音频录制功能细节
336浏览 • 1回复 待解决
redis如何实现读一致问题?
3049浏览 • 1回复 待解决
HarmonyOS 如何实现图片预览
20浏览 • 1回复 待解决