Files
qingdao-employment-service/hook/useRealtimeRecorderOnce.js
2025-12-29 14:08:11 +08:00

657 lines
20 KiB
JavaScript
Raw Permalink Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

import {
ref,
onUnmounted
} from 'vue'
import config from '@/config'
export function useRealtimeRecorderOnce() {
// --- 状态定义 ---
const isRecording = ref(false)
const isProcessing = ref(false)
const recordingDuration = ref(0)
const volumeLevel = ref(0) // 0-100
const recognizedText = ref('')
const audioData = ref(null)
const audioDataForDisplay = ref([])
// --- 内部变量 ---
let durationTimer = null
// --- APP/小程序 变量 ---
let recorderManager = null;
let appAudioChunks = [];
// --- H5 变量 ---
let audioContext = null;
let mediaRecorder = null;
let h5Stream = null;
let h5AudioChunks = [];
let analyser = null;
let dataArray = null;
// --- 配置项 ---
const RECORD_CONFIG = {
duration: 600000,
sampleRate: 16000,
numberOfChannels: 1,
format: 'wav',
encodeBitRate: 16000,
frameSize: 4096
}
// --- WAV文件头函数 ---
const encodeWAV = (samples, sampleRate = 16000, numChannels = 1, bitsPerSample = 16) => {
const bytesPerSample = bitsPerSample / 8;
const blockAlign = numChannels * bytesPerSample;
const byteRate = sampleRate * blockAlign;
const dataSize = samples.length * bytesPerSample;
const buffer = new ArrayBuffer(44 + dataSize);
const view = new DataView(buffer);
// RIFF chunk descriptor
writeString(view, 0, 'RIFF');
view.setUint32(4, 36 + dataSize, true);
writeString(view, 8, 'WAVE');
// fmt sub-chunk
writeString(view, 12, 'fmt ');
view.setUint32(16, 16, true); // Subchunk1Size (16 for PCM)
view.setUint16(20, 1, true); // AudioFormat (1 for PCM)
view.setUint16(22, numChannels, true);
view.setUint32(24, sampleRate, true);
view.setUint32(28, byteRate, true);
view.setUint16(32, blockAlign, true);
view.setUint16(34, bitsPerSample, true);
// data sub-chunk
writeString(view, 36, 'data');
view.setUint32(40, dataSize, true);
// Write audio samples
const volume = 1;
let offset = 44;
for (let i = 0; i < samples.length; i++) {
let sample = Math.max(-1, Math.min(1, samples[i]));
sample = sample * volume;
view.setInt16(offset, sample < 0 ? sample * 0x8000 : sample * 0x7FFF, true);
offset += 2;
}
return buffer;
}
const writeString = (view, offset, string) => {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i));
}
}
const floatTo16BitPCM = (output, offset, input) => {
for (let i = 0; i < input.length; i++, offset += 2) {
const s = Math.max(-1, Math.min(1, input[i]));
output.setInt16(offset, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
}
}
// --- 音量计算函数 ---
const calculateVolumeFromFloat32 = (float32Array) => {
if (!float32Array || float32Array.length === 0) return 0;
let sum = 0;
const length = float32Array.length;
// 计算RMS (均方根)
for (let i = 0; i < length; i++) {
// 绝对值方法更敏感
const absValue = Math.abs(float32Array[i]);
sum += absValue * absValue;
}
const rms = Math.sqrt(sum / length);
// 调试打印原始RMS值
// console.log('Float32 RMS:', rms);
// 转换为0-100的值
// 使用对数刻度,使小音量变化更明显
let volume = 0;
if (rms > 0) {
// 使用对数转换:-60dB到0dB映射到0-100
const db = 20 * Math.log10(rms);
// 静音阈值约-60dB
if (db > -60) {
volume = Math.min(100, Math.max(0, (db + 60) / 0.6));
}
}
// 如果没有计算到值使用旧方法作为fallback
if (volume === 0 && rms > 0) {
volume = Math.min(100, Math.floor(rms * 500));
}
// 确保最小值为0
return Math.max(0, volume);
}
const calculateVolumeFromInt16 = (int16Array) => {
if (!int16Array || int16Array.length === 0) return 0;
let sum = 0;
const length = int16Array.length;
// 计算RMS
for (let i = 0; i < length; i++) {
const normalized = int16Array[i] / 32768; // 归一化到[-1, 1]
const absValue = Math.abs(normalized);
sum += absValue * absValue;
}
const rms = Math.sqrt(sum / length);
// 调试打印原始RMS值
// console.log('Int16 RMS:', rms);
// 转换为0-100的值
// 使用对数刻度
let volume = 0;
if (rms > 0) {
const db = 20 * Math.log10(rms);
if (db > -60) {
volume = Math.min(100, Math.max(0, (db + 60) / 0.6));
}
}
// 如果没有计算到值使用旧方法作为fallback
if (volume === 0 && rms > 0) {
volume = Math.min(100, Math.floor(rms * 500));
}
// 确保最小值为0
return Math.max(0, volume);
}
/**
* 开始录音 (入口)
*/
const startRecording = async () => {
if (isRecording.value) return
try {
recognizedText.value = ''
volumeLevel.value = 0
audioData.value = null
audioDataForDisplay.value = []
appAudioChunks = []
h5AudioChunks = []
// #ifdef H5
if (location.protocol !== 'https:' && location.hostname !== 'localhost') {
uni.showToast({
title: 'H5录音需要HTTPS环境',
icon: 'none'
});
return;
}
// #endif
// #ifdef H5
await startH5Recording();
// #endif
// #ifndef H5
startAppRecording();
// #endif
isRecording.value = true;
recordingDuration.value = 0;
durationTimer = setInterval(() => recordingDuration.value++, 1000);
// 启动波形显示更新
updateAudioDataForDisplay();
} catch (err) {
console.error('启动失败:', err);
uni.showToast({
title: '启动失败: ' + (err.message || ''),
icon: 'none'
});
cleanup();
}
}
/**
* H5录音实现 - 手动构建WAV文件
*/
const startH5Recording = async () => {
try {
// 1. 获取麦克风流
const stream = await navigator.mediaDevices.getUserMedia({
audio: {
sampleRate: 16000,
channelCount: 1,
echoCancellation: true,
noiseSuppression: true,
autoGainControl: false
}
});
h5Stream = stream;
// 2. 创建 AudioContext 用于处理音频
const AudioContext = window.AudioContext || window.webkitAudioContext;
audioContext = new AudioContext({
sampleRate: 16000,
latencyHint: 'interactive'
});
// 创建音频处理节点
const source = audioContext.createMediaStreamSource(stream);
// 创建分析器用于音量计算
analyser = audioContext.createAnalyser();
analyser.fftSize = 256;
analyser.smoothingTimeConstant = 0.8;
dataArray = new Float32Array(analyser.frequencyBinCount);
source.connect(analyser);
// 创建脚本处理器用于收集音频数据
const processor = audioContext.createScriptProcessor(4096, 1, 1);
// 存储所有音频样本
let audioSamples = [];
processor.onaudioprocess = (e) => {
if (!isRecording.value) return;
// 获取输入数据
const inputData = e.inputBuffer.getChannelData(0);
// 计算音量
analyser.getFloatTimeDomainData(dataArray);
const volume = calculateVolumeFromFloat32(dataArray);
volumeLevel.value = volume;
// 收集音频样本
for (let i = 0; i < inputData.length; i++) {
audioSamples.push(inputData[i]);
}
// 存储当前音频数据块
const buffer = new Float32Array(inputData.length);
buffer.set(inputData);
h5AudioChunks.push(buffer);
};
source.connect(processor);
processor.connect(audioContext.destination);
// console.log('H5 16kHz WAV录音已启动');
} catch (err) {
console.error('H5录音启动失败:', err);
throw err;
}
}
/**
* 停止H5录音资源
*/
const stopH5Resources = () => {
// 断开所有连接
if (audioContext && audioContext.state !== 'closed') {
audioContext.close();
}
// 停止音轨
if (h5Stream) {
h5Stream.getTracks().forEach(track => track.stop());
}
audioContext = null;
analyser = null;
dataArray = null;
h5Stream = null;
}
/**
* APP/小程序录音实现
*/
const startAppRecording = () => {
recorderManager = uni.getRecorderManager();
recorderManager.onFrameRecorded((res) => {
const { frameBuffer } = res;
if (frameBuffer && frameBuffer.byteLength > 0) {
// 计算音量
const int16Data = new Int16Array(frameBuffer);
const volume = calculateVolumeFromInt16(int16Data);
volumeLevel.value = volume;
// 保存音频数据
appAudioChunks.push(frameBuffer);
}
});
recorderManager.onStart(() => {
// console.log('APP 16kHz WAV录音已开始');
});
recorderManager.onError((err) => {
console.error('APP录音报错:', err);
uni.showToast({
title: '录音失败: ' + err.errMsg,
icon: 'none'
});
cleanup();
});
recorderManager.start(RECORD_CONFIG);
}
/**
* 停止录音 (通用)
*/
const stopRecording = async () => {
if (!isRecording.value) return;
isRecording.value = false;
clearInterval(durationTimer);
// 停止硬件录音
stopHardwareResource();
// 处理录音数据
await processAudioData();
}
/**
* 取消录音
*/
const cancelRecording = () => {
if (!isRecording.value) return;
// console.log('取消录音 - 丢弃结果');
// 1. 停止硬件录音
stopHardwareResource();
// 2. 清理状态
recognizedText.value = '';
audioData.value = null;
audioDataForDisplay.value = [];
appAudioChunks = [];
h5AudioChunks = [];
// 3. 清理资源
cleanup();
}
/**
* 停止硬件资源
*/
const stopHardwareResource = () => {
// APP/小程序停止
if (recorderManager) {
recorderManager.stop();
}
// H5停止
// #ifdef H5
stopH5Resources();
// #endif
}
/**
* 更新音频数据显示
*/
const updateAudioDataForDisplay = () => {
const updateInterval = setInterval(() => {
if (!isRecording.value) {
clearInterval(updateInterval);
audioDataForDisplay.value = [];
return;
}
// 获取当前音量值
const currentVolume = volumeLevel.value;
// 调试:打印音量值
// console.log('Current Volume:', currentVolume);
// 生成适合 WaveDisplay 的数据
const data = [];
const center = 15; // 中心索引
const timeFactor = Date.now() / 150; // 更快的动画
// 根据音量动态调整波形强度
const volumeFactor = currentVolume / 100;
// 添加基础噪声,使波形在安静时也有轻微活动
const baseNoise = Math.random() * 0.1;
for (let i = 0; i < 31; i++) {
// 距离中心的位置
const distanceFromCenter = Math.abs(i - center) / center;
// 基础波形模式
const basePattern = 1 - Math.pow(distanceFromCenter, 1.2);
// 动态效果
const dynamicEffect = Math.sin(timeFactor + i * 0.3) * 0.3;
// 计算基础值
let value;
if (volumeFactor > 0.1) {
// 有音量时:音量因子占主导
value = volumeFactor * 0.8 * basePattern +
volumeFactor * 0.4 +
dynamicEffect * volumeFactor * 0.5;
} else {
// 安静时:使用动态效果和基础噪声
value = basePattern * 0.2 +
dynamicEffect * 0.1 +
baseNoise;
}
// 确保值在有效范围内
value = Math.max(0.15, Math.min(1, value));
// 随机微调
const randomVariance = volumeFactor > 0.1 ? 0.15 : 0.05;
value += (Math.random() - 0.5) * randomVariance;
value = Math.max(0.15, Math.min(1, value));
data.push(value);
}
audioDataForDisplay.value = data;
// 调试:检查生成的数据范围
// const min = Math.min(...data);
// const max = Math.max(...data);
// console.log(`Data range: ${min.toFixed(3)} - ${max.toFixed(3)}`);
}, 50);
}
/**
* 处理录音数据并生成WAV文件
*/
const processAudioData = async () => {
if (isProcessing.value) return;
isProcessing.value = true;
try {
let audioBlob = null;
// #ifdef H5
// H5端合并所有音频样本并生成WAV
if (h5AudioChunks.length > 0) {
// 合并所有Float32Array
const totalLength = h5AudioChunks.reduce((sum, chunk) => sum + chunk.length, 0);
const mergedSamples = new Float32Array(totalLength);
let offset = 0;
h5AudioChunks.forEach(chunk => {
mergedSamples.set(chunk, offset);
offset += chunk.length;
});
// 生成WAV文件
const wavBuffer = encodeWAV(mergedSamples, 16000, 1, 16);
audioBlob = new Blob([wavBuffer], { type: 'audio/wav' });
// console.log(`H5生成WAV文件: ${audioBlob.size} bytes, 时长: ${mergedSamples.length / 16000}秒`);
}
// #endif
// #ifndef H5
// APP/小程序端合并Int16数据并生成WAV
if (appAudioChunks.length > 0) {
// 合并所有Int16Array
const totalLength = appAudioChunks.reduce((sum, chunk) => sum + chunk.byteLength / 2, 0);
const mergedInt16 = new Int16Array(totalLength);
let offset = 0;
appAudioChunks.forEach(chunk => {
const int16Data = new Int16Array(chunk);
mergedInt16.set(int16Data, offset);
offset += int16Data.length;
});
// 转换为Float32用于生成WAV
const floatSamples = new Float32Array(mergedInt16.length);
for (let i = 0; i < mergedInt16.length; i++) {
floatSamples[i] = mergedInt16[i] / 32768;
}
// 生成WAV文件
const wavBuffer = encodeWAV(floatSamples, 16000, 1, 16);
audioBlob = new Blob([wavBuffer], { type: 'audio/wav' });
// console.log(`APP生成WAV文件: ${audioBlob.size} bytes, 时长: ${floatSamples.length / 16000}秒`);
}
// #endif
if (audioBlob && audioBlob.size > 44) { // 确保至少包含WAV头部
audioData.value = audioBlob;
// 保存文件用于调试(可选)
// debugSaveWavFile(audioBlob);
// 发送到服务器进行识别
await sendToASR(audioBlob);
} else {
throw new Error('录音数据为空或无效');
}
} catch (error) {
console.error('处理音频数据失败:', error);
uni.showToast({
title: '音频处理失败,请重试',
icon: 'none'
});
} finally {
isProcessing.value = false;
appAudioChunks = [];
h5AudioChunks = [];
}
}
/**
* 调试用保存WAV文件
*/
const debugSaveWavFile = (blob) => {
const url = URL.createObjectURL(blob);
const a = document.createElement('a');
a.href = url;
a.download = `recording_${Date.now()}.wav`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
URL.revokeObjectURL(url);
// console.log('WAV文件已保存用于调试');
}
/**
* 发送音频到ASR服务器
*/
const sendToASR = async (audioBlob) => {
try {
// 创建FormData
const formData = new FormData();
formData.append('file', audioBlob, 'recording.wav');
// 添加Token
const token = uni.getStorageSync('token') || '';
const asrUrl = `${config.baseUrl}/app/speech/asr`
const response = await fetch(asrUrl, {
method: 'POST',
headers: {
'Authorization': `Bearer ${token}`
},
body: formData
});
if (response.ok) {
const result = await response.json();
if(result.code == 200){
isProcessing.value = false
recognizedText.value = result.data || ''
}else{
$api.msg(result.msg || '识别失败')
}
} else {
const errorText = await response.text();
throw new Error(`ASR请求失败: ${response.status} - ${errorText}`);
}
} catch (error) {
console.error('ASR识别失败:', error);
}
}
/**
* 清理状态
*/
const cleanup = () => {
clearInterval(durationTimer);
isRecording.value = false;
isProcessing.value = false;
recordingDuration.value = 0;
volumeLevel.value = 0;
audioDataForDisplay.value = [];
if (recorderManager) {
recorderManager = null;
}
}
onUnmounted(() => {
if (isRecording.value) {
stopRecording();
}
cleanup();
})
return {
isRecording,
isProcessing,
recordingDuration,
volumeLevel,
recognizedText,
audioData,
audioDataForDisplay,
startRecording,
stopRecording,
cancelRecording
}
}