總共兩個類,放到代碼里,就可以快速完成K歌的效果,但應用層這么做延遲是比較高的,只是做一個分享。
類代碼
import { audio } from '@kit.AudioKit';
import { BusinessError } from '@kit.BasicServicesKit';
import { AudioBufferFlow, AudioRingBuffer } from './AudioRingBuffer';
import { abilityAccessCtrl, PermissionRequestResult, Permissions } from '@kit.AbilityKit';
import { fileIo, WriteOptions } from '@kit.CoreFileKit';export class AudioRenderUtil {private readonly tag = "AudioRenderUtil";private audioRenderer?: audio.AudioRenderer;/**如果需要調試,存儲一份 pcm,可以把這里設置 true,拉文件出來,命令看官方文檔 */private readonly withWrite = false;private targetFile?: fileIo.File;private bufferSize = 0;/** RingBuffer 環緩沖區 */private ringBuffer: AudioRingBuffer;constructor(context: Context,streamInfo: audio.AudioStreamInfo,renderInfo: audio.AudioRendererInfo,) {this.ringBuffer = new AudioRingBuffer(streamInfo, 0.8, 0.2);const option: audio.AudioRendererOptions = {streamInfo: streamInfo,rendererInfo: renderInfo}LsLog.i(this.tag, `create by ${JSON.stringify(option)}`);if (this.withWrite) {try {this.targetFile = fileIo.openSync(context.cacheDir + `/renderer-test.pcm`,fileIo.OpenMode.READ_WRITE | fileIo.OpenMode.CREATE)LsLog.i(this.tag, `open file with path: ${this.targetFile.path}`);} catch (e) {LsLog.e(this.tag, `open file failed! -> ${(e as BusinessError).code}:${(e as BusinessError).message}`);}}audio.createAudioRenderer(option,(error, renderer) => {if (error) {LsLog.e(this.tag, `create audio renderer failed! -> ${error.code}:${error.message}`);} else {LsLog.i(this.tag, 'create audio renderer success');this.audioRenderer = renderer;if (renderer) {if (this.withWrite) {renderer.on('writeData', (buffer) => {this.ringBuffer.outFlow(buffer);if (this.targetFile) {const options: WriteOptions = {offset: this.bufferSize,length: buffer.byteLength,}renderer.setVolume(0.75);fileIo.writeSync(this.targetFile.fd, buffer, options);this.bufferSize += buffer.byteLength;}return audio.AudioDataCallbackResult.VALID;});} else {renderer.on('writeData', (buffer) => {this.ringBuffer.outFlow(buffer);return audio.AudioDataCallbackResult.VALID;});}}}});}/** 獲取輸入流入口 */get inFlow(): AudioBufferFlow {return this.ringBuffer.inFlow;}/** 開始播放 */start(): void {LsLog.i(this.tag, `do start, current state is [${this.audioRenderer?.state}]`);if (this.audioRenderer !== undefined) {let stateGroup = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED];if (stateGroup.indexOf(this.audioRenderer.state.valueOf()) === -1) {// 當且僅當狀態為prepared、paused和stopped之一時才能啟動渲染。LsLog.e(this.tag, 'start failed');return;}// 開始播放。this.audioRenderer.start((err: BusinessError) => {if (err) {LsLog.e(this.tag, `Renderer start failed. -> [${err.code}]:${err.message}`);} else {LsLog.i(this.tag, 'Renderer start success.');}this.ringBuffer.start();});}}/** 停止播放 */stop(): void {LsLog.i(this.tag, `do stop, current state is [${this.audioRenderer?.state}]`);if (this.audioRenderer !== undefined) {const notRunning = this.audioRenderer.state.valueOf() !== audio.AudioState.STATE_RUNNING;const notPaused = this.audioRenderer.state.valueOf() !== audio.AudioState.STATE_PAUSED;if (notRunning && notPaused) {// 只有渲染器狀態為running或paused的時候才可以停止。LsLog.i(this.tag, 'Renderer is not running or paused');return;}// 停止渲染。this.audioRenderer.stop((err: BusinessError) => {if (err) {LsLog.e(this.tag, `Renderer stop failed. -> [${err.code}]:${err.message}`);} else {LsLog.i(this.tag, 'Renderer stop success.');}this.ringBuffer.reset();});}}/** 釋放資源 */release(): void {if (this.audioRenderer) {this.audioRenderer.release((err: BusinessError) => {if (err) {LsLog.w(this.tag, `release failed! -> ${err.code}: ${err.message}`);} else {LsLog.i(this.tag, 'release success.')}})this.audioRenderer = undefined;}this.ringBuffer.reset();if (this.targetFile) {fileIo.close(this.targetFile.fd);this.targetFile = undefined;}}
}export class AudioCaptureUtil {private readonly tag = "AudioCaptureUtil";private audioCapturer?: audio.AudioCapturer;private waitStartTask?: () => void;private readonly withWrite = false;private targetFile?: fileIo.File;private bufferSize = 0;constructor(context: Context, options: audio.AudioCapturerOptions, flow: AudioBufferFlow) {let permissions: Array<Permissions> = ['ohos.permission.MICROPHONE'];let atManager = abilityAccessCtrl.createAtManager();try {atManager.requestPermissionsFromUser(context, permissions, async (err: BusinessError, data: PermissionRequestResult) => {if (err) {LsLog.e(this.tag, `Request permission failed: ${err.message}`);} else if (data.authResults.includes(-1) || data.authResults.includes(2)) {LsLog.e(this.tag, 'User denied permission');} else {// 用戶已授權,再調用 createAudioCapturerthis.prepare(options, flow);}});} catch (err) {LsLog.e(this.tag, `Request permission error: ${err.message}`);}if (this.withWrite) {try {this.targetFile = fileIo.openSync(context.cacheDir + `/capturer-test.pcm`,fileIo.OpenMode.READ_WRITE | fileIo.OpenMode.CREATE)LsLog.i(this.tag, `open file with path: ${this.targetFile.path}`);} catch (e) {LsLog.e(this.tag, `open file failed! -> ${(e as BusinessError).code}:${(e as BusinessError).message}`);}}}private prepare(options: audio.AudioCapturerOptions, flow: AudioBufferFlow) {LsLog.i(this.tag, `create by ${JSON.stringify(options)}`);this.bufferSize = 0;audio.createAudioCapturer(options,(error, capture) => {if (error) {LsLog.e(this.tag, `create audio capture failed! -> ${error.code}:${error.message}`);} else {LsLog.i(this.tag, 'create audio capture success');this.audioCapturer = capture;if (capture) {if (this.withWrite) {capture.on('readData', (buffer) => {if (this.targetFile) {const options: WriteOptions = {offset: this.bufferSize,length: buffer.byteLength,}fileIo.writeSync(this.targetFile.fd, buffer, options);this.bufferSize += buffer.byteLength;}flow(buffer);});} else {capture.on('readData', flow);}if (this.waitStartTask) {this.start(this.waitStartTask);}}}})}/** 開始錄制 */start(onStart: () => void): void {LsLog.i(this.tag, `do start, current state is [${this.audioCapturer?.state}]`);if (this.audioCapturer !== undefined) {this.waitStartTask = undefined;let stateGroup = [audio.AudioState.STATE_PREPARED, audio.AudioState.STATE_PAUSED, audio.AudioState.STATE_STOPPED];if (stateGroup.indexOf(this.audioCapturer.state.valueOf()) === -1) {// 當且僅當狀態為STATE_PREPARED、STATE_PAUSED和STATE_STOPPED之一時才能啟動采集。LsLog.e(this.tag, 'start failed');return;}// 啟動采集。this.audioCapturer.start((err: BusinessError) => {if (err) {LsLog.e(this.tag, `Capturer start failed. -> [${err.code}]:${err.message}`);} else {LsLog.i(this.tag, 'Capturer start success.');onStart();}});} else {this.waitStartTask = onStart;}}/** 停止錄制 */stop(): void {LsLog.i(this.tag, `do stop, current state is [${this.audioCapturer?.state}]`);this.waitStartTask = undefined;if (this.audioCapturer !== undefined) {// 只有采集器狀態為STATE_RUNNING或STATE_PAUSED的時候才可以停止。const notRunning = this.audioCapturer.state.valueOf() !== audio.AudioState.STATE_RUNNING;const notPaused = this.audioCapturer.state.valueOf() !== audio.AudioState.STATE_PAUSED;if (notRunning && notPaused) {LsLog.i(this.tag, 'Capturer is not running or paused');return;}//停止采集。this.audioCapturer.stop((err: BusinessError) => {if (err) {LsLog.e(this.tag, `Capturer stop failed. -> [${err.code}]:${err.message}`);} else {LsLog.i(this.tag, 'Capturer stop success.');}});}}/** 釋放資源 */release(): void {if (this.audioCapturer) {this.audioCapturer.release((err: BusinessError) => {if (err) {LsLog.w(this.tag, `release failed! -> ${err.code}: ${err.message}`);} else {LsLog.i(this.tag, 'release success.')}})this.audioCapturer = undefined;}this.waitStartTask = undefined;if (this.targetFile) {fileIo.close(this.targetFile.fd);this.targetFile = undefined;}}
}
import { audio } from '@kit.AudioKit';const tag = "AudioRingBuffer";/** 音頻buffer傳遞流 */
export type AudioBufferFlow = (buffer: ArrayBuffer) => void;/** 向 buffer 視圖寫入 */
type DataViewCopy = (from: DataView, to: DataView, fromOffset: number, toOffset: number) => void;/** 運行狀態 */
enum RunningState {/** 已停止 */Stop = 0,/** 等待 buffer */WaitingBuffer = 1,/** 正在運行 */Running = 2,
}enum StateIndex {RunningState = 0,ReadPos = 1,WritePos = 2,
}/** 音頻 buffer 環形緩沖器 */
export class AudioRingBuffer {/** 緩沖區存儲 */private buffer: SharedArrayBuffer;/** 緩沖區視圖(用于實際讀寫操作) */private bufferView: DataView;/** dataViewCopy 數據移動 */private dataViewCopy: DataViewCopy;/** 實際 DataView 可訪問的范圍 */private readonly bufferSize: number;/** 狀態、讀寫位置指針 */private state = new Int32Array([RunningState.Stop, 0, 1]);/** 音頻輸入流:將外部數據寫入環形緩沖區 */readonly inFlow: AudioBufferFlow = (inBuffer) => {this.workInRunning(() => this.writeToBuffer(inBuffer));};/** 音頻輸出流:從環形緩沖區讀取數據到外部 */readonly outFlow: AudioBufferFlow = (outBuffer) => {this.workInRunning(() => this.readFromBuffer(outBuffer));}/** 獲取 DataView 視圖的長度 */private dataViewLen: (dataView: DataView) => number;/** Buffer 發聲 threshold,buffer 到了此比例才會發聲 */private readonly readThreshold: number;/*** 構造音頻環形緩沖區* @param streamInfo 音頻格式* @param bufferDuration 緩沖時長(秒),建議0.1-1.0之間* @param readThreshold 首幀讀取閾值,增加這個值會增加延遲,降低有可能首幀斷音*/constructor(streamInfo: audio.AudioStreamInfo, bufferDuration: number = 0.5, readThreshold: number = 0.5) {if (bufferDuration <= 0 || bufferDuration > 1) {const def = 0.5;LsLog.w(tag, `unavalibale bufferDuration: ${bufferDuration}, use default => ${def}`);bufferDuration = def;}if (readThreshold <= 0 || readThreshold > 1) {const def = 0.5;LsLog.w(tag, `unavalibale readThreshold: ${readThreshold}, use default => ${def}`);readThreshold = def;}this.readThreshold = readThreshold;// 計算緩沖區大小:根據音頻參數動態計算// 每秒音頻數據量const bytesPerSample = this.calcBytesPerSample(streamInfo.sampleFormat);const bytesPerSecond = streamInfo.samplingRate * streamInfo.channels * bytesPerSample;let bufferSize = Math.ceil(bytesPerSecond * bufferDuration); // 緩沖時長對應的字節數// 確保緩沖區大小至少為1024字節,避免過小導致頻繁溢出bufferSize = Math.max(bufferSize, 1024);// 初始化緩沖區this.buffer = new SharedArrayBuffer(bufferSize);this.bufferView = new DataView(this.buffer);this.dataViewLen = (view) => Math.ceil(view.byteLength / bytesPerSample);this.bufferSize = this.dataViewLen(this.bufferView);// 初始化讀取器、寫入器、視圖生成器this.dataViewCopy = this.generateDataViewCopy(streamInfo.sampleFormat);LsLog.i(tag,`audio buffer init with ${bufferSize} bytes, duration: ${bufferDuration}s`);}/** 生成 buffer copy */private generateDataViewCopy(format: audio.AudioSampleFormat): DataViewCopy {switch (format) {case audio.AudioSampleFormat.SAMPLE_FORMAT_U8:return (from, to, fromOffset, toOffset) => to.setUint8(toOffset, from.getUint8(fromOffset));case audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE:return (from, to, fromOffset, toOffset) => to.setInt16(toOffset * 2, from.getInt16(fromOffset * 2, true), true);case audio.AudioSampleFormat.SAMPLE_FORMAT_S24LE:return (from, to, fromOffset, toOffset) => {const rawValue = from.getUint8(fromOffset * 4) |(from.getUint8(fromOffset * 4 + 1) << 8) |(from.getUint8(fromOffset * 4 + 2) << 16);// 處理符號擴展const sign = rawValue & 0x800000 ? -1 : 1;const adjustedValue = sign * (rawValue & 0x7FFFFF);to.setInt32(toOffset * 4, adjustedValue, true);}case audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE:return (from, to, fromOffset, toOffset) => to.setInt32(toOffset * 4, from.getInt32(fromOffset * 4, true), true);default:return (from, to, fromOffset, toOffset) => to.setUint8(toOffset, from.getUint8(fromOffset));}}/** 計算每個采樣點的數據量 */private calcBytesPerSample(format: audio.AudioSampleFormat): number {switch (format) {case audio.AudioSampleFormat.SAMPLE_FORMAT_U8:return 1;case audio.AudioSampleFormat.SAMPLE_FORMAT_S16LE:return 2;case audio.AudioSampleFormat.SAMPLE_FORMAT_S24LE:return 4;case audio.AudioSampleFormat.SAMPLE_FORMAT_S32LE:return 4;default:return 1;}}/*** 在運行狀態下執行任務* @param task 要執行的任務函數*/private workInRunning(task: () => void) {try {if (Atomics.load(this.state, 0) !== RunningState.Stop) {task();}} catch (err) {LsLog.e(tag, `任務執行錯誤: ${err}`);}}/*** 計算當前可用空間大小* 實際可用空間 = 總容量 - 已使用空間 - 1(預留判斷位)*/private getAvailableSpace(): number {return this.bufferSize - 1 - this.getUsedSpace();}/*** 計算當前已使用空間大小*/private getUsedSpace(): number {return (this.getState(StateIndex.WritePos) - this.getState(StateIndex.ReadPos) + this.bufferSize) % this.bufferSize;}/*** 將數據寫入環形緩沖區* @param inBuffer 輸入數據緩沖區*/private writeToBuffer(inBuffer: ArrayBuffer): void {const inputData = new DataView(inBuffer);const inputLength = this.dataViewLen(inputData);if (inputLength <= 0) {return;}// 獲取可用空間并計算實際可寫入長度const availableSpace = this.getAvailableSpace();if (inputLength > availableSpace) {LsLog.w(tag,`buffer fulled! has use ${this.getUsedSpace()}, available: ${availableSpace}`);return;}// 處理寫入(分是否需要環繞兩種情況)const writePos = this.getState(StateIndex.WritePos);const contiguousSpace = this.bufferSize - writePos;if (inputLength <= contiguousSpace) {// 無需環繞,直接寫入for (let i = 0; i < inputLength; i++) {this.dataViewCopy(inputData, this.bufferView, i, writePos + i);}this.setState(StateIndex.WritePos, (writePos + inputLength) % this.bufferSize);} else {// 需要環繞,分兩部分寫入for (let i = 0; i < contiguousSpace; i++) {this.dataViewCopy(inputData, this.bufferView, i, writePos + i);}const remaining = inputLength - contiguousSpace;for (let i = 0; i < remaining; i++) {this.dataViewCopy(inputData, this.bufferView, contiguousSpace + i, i);}this.setState(StateIndex.WritePos, remaining);}}/*** 從環形緩沖區讀取數據* @param outBuffer 輸出數據緩沖區*/private readFromBuffer(outBuffer: ArrayBuffer): void {const outputData = new DataView(outBuffer);const outputLength = this.dataViewLen(outputData);if (outputLength <= 0) {return;}// 計算可讀取數據量const usedSpace = this.getUsedSpace();if (this.getState(StateIndex.RunningState) === RunningState.WaitingBuffer) {if (usedSpace / this.bufferSize < this.readThreshold) {for (let i = 0; i < outputLength; i++) {outputData.setInt8(i, 0);}return;}}this.setState(StateIndex.RunningState, RunningState.Running);const readLength = Math.min(outputLength, usedSpace);// 處理讀取(分是否需要環繞兩種情況)const readPos = this.getState(StateIndex.ReadPos);const contiguousData = this.bufferSize - readPos;if (readLength <= contiguousData) {for (let i = 0; i < readLength; i++) {this.dataViewCopy(this.bufferView, outputData, readPos + i, i);}this.setState(StateIndex.ReadPos, (readPos + readLength) % this.bufferSize);} else {for (let i = 0; i < contiguousData; i++) {this.dataViewCopy(this.bufferView, outputData, readPos + i, i);}const remaining = readLength - contiguousData;for (let i = 0; i < remaining; i++) {this.dataViewCopy(this.bufferView, outputData, i, contiguousData + i);}this.setState(StateIndex.ReadPos, remaining);}if (readLength < outputLength) {LsLog.w(tag, `read ${outputLength}, but real avalible just ${readLength}, others fill with 0`);for (let i = readLength; i < outputLength; i++) {outputData.setInt8(i, 0);}}}private getState(index: StateIndex): number {return Atomics.load(this.state, index);}private setState(index: StateIndex, value: number) {Atomics.store(this.state, index, value);}/*** 開始流傳輸*/start() {this.setState(StateIndex.RunningState, RunningState.WaitingBuffer);LsLog.i(tag, "buffer start running");}/*** 重置流(清空緩沖區并重置指針)*/reset() {this.setState(StateIndex.RunningState, RunningState.Stop);this.setState(StateIndex.ReadPos, 0);this.setState(StateIndex.WritePos, 1);LsLog.i(tag, "buffer has reset");}
}
調用
1. 初始化
render = new AudioRenderUtil(context, streamInfo, render.renderInfo);
recordFlow = this.render.inFlow;
capture = new AudioCaptureUtil(context, {streamInfo: streamInfo,capturerInfo: capture.captureInfo}, recordFlow);
2. 開始
/** 開始 capture/render */
private _startKaraoke() {this.capture?.start(() => {// 在錄音成功啟動后,才有必要開始播放this.render?.start();});
}
3. 停止
/** 停止 capture/render */
private _stopKaraoke() {this.capture?.stop();this.render?.stop();
}
4. 釋放
onRelease(): void {this._stopKaraoke();this.capture?.release();this.capture = undefined;this.render?.release();this.render = undefined;
}