475 lines
15 KiB
JavaScript
475 lines
15 KiB
JavaScript
![]() |
import {
|
|||
|
ref,
|
|||
|
onUnmounted
|
|||
|
} from 'vue';
|
|||
|
|
|||
|
export function useAudioRecorder(wsUrl) {
|
|||
|
|
|||
|
// 状态变量
|
|||
|
const isRecording = ref(false);
|
|||
|
const isStopping = ref(false);
|
|||
|
const isSocketConnected = ref(false);
|
|||
|
const recordingDuration = ref(0);
|
|||
|
const bufferPressure = ref(0); // 缓冲区压力 (0-100)
|
|||
|
const currentInterval = ref(0); // 当前发送间隔
|
|||
|
|
|||
|
// 音频相关
|
|||
|
const audioContext = ref(null);
|
|||
|
const mediaStream = ref(null);
|
|||
|
const workletNode = ref(null);
|
|||
|
|
|||
|
// 网络相关
|
|||
|
const socket = ref(null);
|
|||
|
const audioBuffer = ref([]);
|
|||
|
const bufferInterval = ref(null);
|
|||
|
|
|||
|
// 配置常量
|
|||
|
const SAMPLE_RATE = 16000;
|
|||
|
const BASE_INTERVAL_MS = 300; // 基础发送间隔
|
|||
|
const MIN_INTERVAL_MS = 100; // 最小发送间隔
|
|||
|
const MAX_BUFFER_SIZE = 20; // 最大缓冲区块数
|
|||
|
const PRESSURE_THRESHOLD = 0.7; // 加快发送的阈值 (70%)
|
|||
|
|
|||
|
// AudioWorklet处理器代码
|
|||
|
const workletProcessorCode = `
|
|||
|
class AudioProcessor extends AudioWorkletProcessor {
|
|||
|
constructor(options) {
|
|||
|
super();
|
|||
|
this.sampleRate = options.processorOptions.sampleRate;
|
|||
|
this.samplesPerChunk = Math.floor(this.sampleRate * 0.1); // 每100ms的样本数
|
|||
|
this.buffer = new Int16Array(this.samplesPerChunk);
|
|||
|
this.index = 0;
|
|||
|
}
|
|||
|
|
|||
|
process(inputs) {
|
|||
|
const input = inputs[0];
|
|||
|
if (input.length > 0) {
|
|||
|
const inputChannel = input[0];
|
|||
|
|
|||
|
for (let i = 0; i < inputChannel.length; i++) {
|
|||
|
// 转换为16位PCM
|
|||
|
this.buffer[this.index++] = Math.max(-32768, Math.min(32767, inputChannel[i] * 32767));
|
|||
|
|
|||
|
// 当缓冲区满时发送
|
|||
|
if (this.index >= this.samplesPerChunk) {
|
|||
|
this.port.postMessage({
|
|||
|
audioData: this.buffer.buffer,
|
|||
|
timestamp: Date.now()
|
|||
|
}, [this.buffer.buffer]);
|
|||
|
|
|||
|
// 创建新缓冲区
|
|||
|
this.buffer = new Int16Array(this.samplesPerChunk);
|
|||
|
this.index = 0;
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
return true;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
registerProcessor('audio-processor', AudioProcessor);
|
|||
|
`;
|
|||
|
|
|||
|
// 初始化WebSocket连接
|
|||
|
const initSocket = (wsUrl) => {
|
|||
|
return new Promise((resolve, reject) => {
|
|||
|
socket.value = new WebSocket(wsUrl);
|
|||
|
|
|||
|
socket.value.onopen = () => {
|
|||
|
isSocketConnected.value = true;
|
|||
|
console.log('WebSocket连接已建立');
|
|||
|
resolve();
|
|||
|
};
|
|||
|
|
|||
|
socket.value.onerror = (error) => {
|
|||
|
console.error('WebSocket连接错误:', error);
|
|||
|
reject(error);
|
|||
|
};
|
|||
|
|
|||
|
socket.value.onclose = (event) => {
|
|||
|
console.log(`WebSocket连接关闭,代码: ${event.code}, 原因: ${event.reason}`);
|
|||
|
isSocketConnected.value = false;
|
|||
|
console.log('WebSocket连接已关闭');
|
|||
|
};
|
|||
|
});
|
|||
|
};
|
|||
|
|
|||
|
// 计算动态发送间隔
|
|||
|
const calculateDynamicInterval = () => {
|
|||
|
const pressureFactor = bufferPressure.value / 100;
|
|||
|
// 压力越大,间隔越小(发送越快)
|
|||
|
return Math.max(
|
|||
|
MIN_INTERVAL_MS,
|
|||
|
BASE_INTERVAL_MS - (pressureFactor * (BASE_INTERVAL_MS - MIN_INTERVAL_MS))
|
|||
|
);
|
|||
|
};
|
|||
|
|
|||
|
// 发送缓冲的音频数据
|
|||
|
const sendBufferedAudio = () => {
|
|||
|
if (audioBuffer.value.length === 0 || !socket.value || socket.value.readyState !== WebSocket.OPEN) {
|
|||
|
return;
|
|||
|
}
|
|||
|
|
|||
|
try {
|
|||
|
// 将缓冲区大小限制为8000字节 (小于8192)
|
|||
|
const MAX_CHUNK_SIZE = 8000 / 2; // 16位 = 2字节,所以4000个样本
|
|||
|
|
|||
|
let samplesToSend = [];
|
|||
|
let totalSamples = 0;
|
|||
|
|
|||
|
// 收集不超过限制的样本
|
|||
|
while (audioBuffer.value.length > 0 && totalSamples < MAX_CHUNK_SIZE) {
|
|||
|
const buffer = audioBuffer.value[0];
|
|||
|
const samples = new Int16Array(buffer);
|
|||
|
const remainingSpace = MAX_CHUNK_SIZE - totalSamples;
|
|||
|
|
|||
|
if (samples.length <= remainingSpace) {
|
|||
|
samplesToSend.push(samples);
|
|||
|
totalSamples += samples.length;
|
|||
|
audioBuffer.value.shift();
|
|||
|
} else {
|
|||
|
// 只取部分样本
|
|||
|
samplesToSend.push(samples.slice(0, remainingSpace));
|
|||
|
audioBuffer.value[0] = samples.slice(remainingSpace).buffer;
|
|||
|
totalSamples = MAX_CHUNK_SIZE;
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
// 合并样本并发送
|
|||
|
if (totalSamples > 0) {
|
|||
|
const combined = new Int16Array(totalSamples);
|
|||
|
let offset = 0;
|
|||
|
|
|||
|
samplesToSend.forEach(chunk => {
|
|||
|
combined.set(chunk, offset);
|
|||
|
offset += chunk.length;
|
|||
|
});
|
|||
|
|
|||
|
socket.value.send(combined.buffer);
|
|||
|
}
|
|||
|
|
|||
|
} catch (error) {
|
|||
|
console.error('发送音频数据时出错:', error);
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// 开始录音
|
|||
|
const startRecording = async () => {
|
|||
|
if (isRecording.value) return;
|
|||
|
|
|||
|
try {
|
|||
|
// 重置状态
|
|||
|
recordingDuration.value = 0;
|
|||
|
audioBuffer.value = [];
|
|||
|
bufferPressure.value = 0;
|
|||
|
currentInterval.value = BASE_INTERVAL_MS;
|
|||
|
console.log('正在初始化WebSocket连接...');
|
|||
|
// 初始化WebSocket
|
|||
|
await initSocket(wsUrl);
|
|||
|
console.log('正在获取音频设备权限...');
|
|||
|
// 获取音频流
|
|||
|
mediaStream.value = await navigator.mediaDevices.getUserMedia({
|
|||
|
audio: {
|
|||
|
sampleRate: SAMPLE_RATE,
|
|||
|
channelCount: 1,
|
|||
|
echoCancellation: false,
|
|||
|
noiseSuppression: false,
|
|||
|
autoGainControl: false
|
|||
|
},
|
|||
|
video: false
|
|||
|
});
|
|||
|
console.log('正在初始化音频上下文...');
|
|||
|
// 创建音频上下文
|
|||
|
audioContext.value = new(window.AudioContext || window.webkitAudioContext)({
|
|||
|
sampleRate: SAMPLE_RATE,
|
|||
|
latencyHint: 'interactive'
|
|||
|
});
|
|||
|
|
|||
|
// 注册AudioWorklet
|
|||
|
const blob = new Blob([workletProcessorCode], {
|
|||
|
type: 'application/javascript'
|
|||
|
});
|
|||
|
const workletUrl = URL.createObjectURL(blob);
|
|||
|
await audioContext.value.audioWorklet.addModule(workletUrl);
|
|||
|
URL.revokeObjectURL(workletUrl);
|
|||
|
|
|||
|
// 创建AudioWorkletNode
|
|||
|
workletNode.value = new AudioWorkletNode(audioContext.value, 'audio-processor', {
|
|||
|
numberOfInputs: 1,
|
|||
|
numberOfOutputs: 1,
|
|||
|
outputChannelCount: [1],
|
|||
|
processorOptions: {
|
|||
|
sampleRate: SAMPLE_RATE
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
// 处理音频数据
|
|||
|
workletNode.value.port.onmessage = (e) => {
|
|||
|
if (e.data.audioData instanceof ArrayBuffer) {
|
|||
|
audioBuffer.value.push(e.data.audioData);
|
|||
|
|
|||
|
// 当缓冲区压力超过阈值时立即尝试发送
|
|||
|
if (audioBuffer.value.length / MAX_BUFFER_SIZE > PRESSURE_THRESHOLD) {
|
|||
|
sendBufferedAudio();
|
|||
|
}
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// 连接音频节点
|
|||
|
const source = audioContext.value.createMediaStreamSource(mediaStream.value);
|
|||
|
source.connect(workletNode.value);
|
|||
|
workletNode.value.connect(audioContext.value.destination);
|
|||
|
|
|||
|
// 启动定时发送
|
|||
|
bufferInterval.value = setInterval(sendBufferedAudio, currentInterval.value);
|
|||
|
console.log('录音初始化完成,开始录制');
|
|||
|
// 更新状态
|
|||
|
isRecording.value = true;
|
|||
|
console.log(`开始录音,采样率: ${audioContext.value.sampleRate}Hz`);
|
|||
|
|
|||
|
} catch (error) {
|
|||
|
console.error('启动录音失败:', error);
|
|||
|
cleanup();
|
|||
|
throw error;
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// 停止录音并保存
|
|||
|
const stopRecording = async () => {
|
|||
|
if (!isRecording.value || isStopping.value) return;
|
|||
|
|
|||
|
isStopping.value = true;
|
|||
|
|
|||
|
try {
|
|||
|
// 停止定时器
|
|||
|
if (bufferInterval.value) {
|
|||
|
clearInterval(bufferInterval.value);
|
|||
|
bufferInterval.value = null;
|
|||
|
}
|
|||
|
|
|||
|
// 发送剩余音频数据
|
|||
|
if (audioBuffer.value.length > 0) {
|
|||
|
console.log(`正在发送剩余 ${audioBuffer.value.length} 个音频块...`);
|
|||
|
sendBufferedAudio();
|
|||
|
}
|
|||
|
|
|||
|
// 发送结束标记
|
|||
|
if (socket.value?.readyState === WebSocket.OPEN) {
|
|||
|
console.log('发送结束标记...');
|
|||
|
socket.value.send(JSON.stringify({
|
|||
|
action: 'end',
|
|||
|
duration: recordingDuration.value
|
|||
|
}));
|
|||
|
|
|||
|
// 等待数据发送完成
|
|||
|
await new Promise((resolve) => {
|
|||
|
if (socket.value.bufferedAmount === 0) {
|
|||
|
resolve();
|
|||
|
} else {
|
|||
|
console.log(`等待 ${socket.value.bufferedAmount} 字节数据发送...`);
|
|||
|
const timer = setInterval(() => {
|
|||
|
if (socket.value.bufferedAmount === 0) {
|
|||
|
clearInterval(timer);
|
|||
|
resolve();
|
|||
|
}
|
|||
|
}, 50);
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
// 关闭连接
|
|||
|
console.log('正在关闭WebSocket连接...');
|
|||
|
socket.value.close();
|
|||
|
}
|
|||
|
|
|||
|
cleanup();
|
|||
|
console.log('录音已停止并保存');
|
|||
|
|
|||
|
} catch (error) {
|
|||
|
console.error('停止录音时出错:', error);
|
|||
|
throw error;
|
|||
|
} finally {
|
|||
|
isStopping.value = false;
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// 取消录音
|
|||
|
const cancelRecording = async () => {
|
|||
|
if (!isRecording.value || isStopping.value) return;
|
|||
|
|
|||
|
isStopping.value = true;
|
|||
|
|
|||
|
try {
|
|||
|
// 停止定时器
|
|||
|
if (bufferInterval.value) {
|
|||
|
clearInterval(bufferInterval.value);
|
|||
|
bufferInterval.value = null;
|
|||
|
}
|
|||
|
|
|||
|
// 发送取消标记
|
|||
|
if (socket.value?.readyState === WebSocket.OPEN) {
|
|||
|
console.log('发送结束标记...');
|
|||
|
socket.value.send(JSON.stringify({
|
|||
|
action: 'cancel'
|
|||
|
}));
|
|||
|
socket.value.close();
|
|||
|
}
|
|||
|
console.log('清理资源...');
|
|||
|
cleanup();
|
|||
|
console.log('录音已成功停止');
|
|||
|
console.log('录音已取消');
|
|||
|
|
|||
|
} catch (error) {
|
|||
|
console.error('取消录音时出错:', error);
|
|||
|
throw error;
|
|||
|
} finally {
|
|||
|
isStopping.value = false;
|
|||
|
}
|
|||
|
};
|
|||
|
|
|||
|
// 清理资源
|
|||
|
const cleanup = () => {
|
|||
|
// 清除定时器
|
|||
|
if (bufferInterval.value) {
|
|||
|
clearInterval(bufferInterval.value);
|
|||
|
bufferInterval.value = null;
|
|||
|
}
|
|||
|
|
|||
|
// 关闭音频流
|
|||
|
if (mediaStream.value) {
|
|||
|
console.log('正在停止媒体流...');
|
|||
|
mediaStream.value.getTracks().forEach(track => track.stop());
|
|||
|
mediaStream.value = null;
|
|||
|
}
|
|||
|
|
|||
|
// 断开音频节点
|
|||
|
if (workletNode.value) {
|
|||
|
workletNode.value.disconnect();
|
|||
|
workletNode.value.port.onmessage = null;
|
|||
|
workletNode.value = null;
|
|||
|
}
|
|||
|
|
|||
|
// 关闭音频上下文
|
|||
|
if (audioContext.value) {
|
|||
|
if (audioContext.value.state !== 'closed') {
|
|||
|
audioContext.value.close().catch(e => {
|
|||
|
console.warn('关闭AudioContext时出错:', e);
|
|||
|
});
|
|||
|
}
|
|||
|
audioContext.value = null;
|
|||
|
}
|
|||
|
|
|||
|
// 清空缓冲区
|
|||
|
audioBuffer.value = [];
|
|||
|
bufferPressure.value = 0;
|
|||
|
|
|||
|
// 重置状态
|
|||
|
isRecording.value = false;
|
|||
|
isSocketConnected.value = false;
|
|||
|
};
|
|||
|
|
|||
|
// 组件卸载时自动清理
|
|||
|
onUnmounted(() => {
|
|||
|
if (isRecording.value) {
|
|||
|
cancelRecording();
|
|||
|
}
|
|||
|
});
|
|||
|
|
|||
|
return {
|
|||
|
isRecording,
|
|||
|
isStopping,
|
|||
|
isSocketConnected,
|
|||
|
recordingDuration,
|
|||
|
bufferPressure,
|
|||
|
currentInterval,
|
|||
|
startRecording,
|
|||
|
stopRecording,
|
|||
|
cancelRecording
|
|||
|
};
|
|||
|
}
|
|||
|
|
|||
|
|
|||
|
// import {
|
|||
|
// ref
|
|||
|
// } from 'vue'
|
|||
|
// export function useRealtimeRecorder(wsUrl) {
|
|||
|
// const isRecording = ref(false)
|
|||
|
// const mediaRecorder = ref(null)
|
|||
|
// const socket = ref(null)
|
|||
|
// const recognizedText = ref('')
|
|||
|
|
|||
|
// const startRecording = async () => {
|
|||
|
// if (!navigator.mediaDevices?.getUserMedia) {
|
|||
|
// uni.showToast({
|
|||
|
// title: '当前环境不支持录音',
|
|||
|
// icon: 'none'
|
|||
|
// })
|
|||
|
// return
|
|||
|
// }
|
|||
|
// recognizedText.value = ''
|
|||
|
// const stream = await navigator.mediaDevices.getUserMedia({
|
|||
|
// audio: {
|
|||
|
// sampleRate: 16000,
|
|||
|
// channelCount: 1,
|
|||
|
// echoCancellation: false,
|
|||
|
// noiseSuppression: false,
|
|||
|
// autoGainControl: false
|
|||
|
// },
|
|||
|
// video: false
|
|||
|
// })
|
|||
|
// socket.value = new WebSocket(wsUrl)
|
|||
|
|
|||
|
// socket.value.onopen = () => {
|
|||
|
// console.log('[WebSocket] 连接已建立')
|
|||
|
// }
|
|||
|
|
|||
|
// socket.value.onmessage = (event) => {
|
|||
|
// recognizedText.value = JSON.parse(event.data).text
|
|||
|
// }
|
|||
|
|
|||
|
// const recorder = new MediaRecorder(stream, {
|
|||
|
// mimeType: 'audio/webm;codecs=opus',
|
|||
|
// audioBitsPerSecond: 16000,
|
|||
|
// })
|
|||
|
|
|||
|
// recorder.ondataavailable = (e) => {
|
|||
|
// if (e.data.size > 0 && socket.value?.readyState === WebSocket.OPEN) {
|
|||
|
// socket.value.send(e.data)
|
|||
|
// }
|
|||
|
// }
|
|||
|
|
|||
|
// recorder.start(300) // 每 300ms 发送一段数据
|
|||
|
// mediaRecorder.value = recorder
|
|||
|
// isRecording.value = true
|
|||
|
// }
|
|||
|
|
|||
|
// const stopRecording = () => {
|
|||
|
// mediaRecorder.value?.stop()
|
|||
|
// mediaRecorder.value = null
|
|||
|
// isRecording.value = false
|
|||
|
|
|||
|
// if (socket.value?.readyState === WebSocket.OPEN) {
|
|||
|
// socket.value.send('[end]')
|
|||
|
// socket.value.close()
|
|||
|
// }
|
|||
|
// }
|
|||
|
|
|||
|
// const cancelRecording = () => {
|
|||
|
// mediaRecorder.value?.stop()
|
|||
|
// mediaRecorder.value = null
|
|||
|
// isRecording.value = false
|
|||
|
// recognizedText.value = ''
|
|||
|
|
|||
|
// if (socket.value?.readyState === WebSocket.OPEN) {
|
|||
|
// socket.value.send('[cancel]')
|
|||
|
// socket.value.close()
|
|||
|
// }
|
|||
|
// }
|
|||
|
|
|||
|
// return {
|
|||
|
// isRecording,
|
|||
|
// recognizedText,
|
|||
|
// startRecording,
|
|||
|
// stopRecording,
|
|||
|
// cancelRecording
|
|||
|
// }
|
|||
|
// }
|