flat: 暂存
This commit is contained in:
@@ -1,129 +0,0 @@
|
||||
// composables/useRealtimeRecorder.js
|
||||
import {
|
||||
ref
|
||||
} from 'vue'
|
||||
|
||||
export function useRealtimeRecorder(wsUrl) {
|
||||
const isRecording = ref(false)
|
||||
const recognizedText = ref('')
|
||||
|
||||
let audioContext = null
|
||||
let audioWorkletNode = null
|
||||
let sourceNode = null
|
||||
let socket = null
|
||||
|
||||
const startRecording = async () => {
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: true
|
||||
})
|
||||
|
||||
audioContext = new(window.AudioContext || window.webkitAudioContext)()
|
||||
const processorCode = `
|
||||
class RecorderProcessor extends AudioWorkletProcessor {
|
||||
constructor() {
|
||||
super()
|
||||
this.buffer = []
|
||||
this.inputSampleRate = sampleRate
|
||||
this.targetSampleRate = 16000
|
||||
}
|
||||
|
||||
process(inputs) {
|
||||
const input = inputs[0][0]
|
||||
if (!input) return true
|
||||
this.buffer.push(...input)
|
||||
const requiredSamples = this.inputSampleRate / 10 // 100ms
|
||||
|
||||
if (this.buffer.length >= requiredSamples) {
|
||||
const resampled = this.downsample(this.buffer, this.inputSampleRate, this.targetSampleRate)
|
||||
const int16Buffer = this.floatTo16BitPCM(resampled)
|
||||
this.port.postMessage(int16Buffer)
|
||||
this.buffer = []
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
downsample(buffer, inRate, outRate) {
|
||||
if (outRate === inRate) return buffer
|
||||
const ratio = inRate / outRate
|
||||
const len = Math.floor(buffer.length / ratio)
|
||||
const result = new Float32Array(len)
|
||||
for (let i = 0; i < len; i++) {
|
||||
const start = Math.floor(i * ratio)
|
||||
const end = Math.floor((i + 1) * ratio)
|
||||
let sum = 0
|
||||
for (let j = start; j < end && j < buffer.length; j++) sum += buffer[j]
|
||||
result[i] = sum / (end - start)
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
floatTo16BitPCM(input) {
|
||||
const output = new Int16Array(input.length)
|
||||
for (let i = 0; i < input.length; i++) {
|
||||
const s = Math.max(-1, Math.min(1, input[i]))
|
||||
output[i] = s < 0 ? s * 0x8000 : s * 0x7FFF
|
||||
}
|
||||
return output.buffer
|
||||
}
|
||||
}
|
||||
registerProcessor('recorder-processor', RecorderProcessor)
|
||||
`
|
||||
const blob = new Blob([processorCode], {
|
||||
type: 'application/javascript'
|
||||
})
|
||||
const blobUrl = URL.createObjectURL(blob)
|
||||
|
||||
await audioContext.audioWorklet.addModule(blobUrl)
|
||||
|
||||
socket = new WebSocket(wsUrl)
|
||||
socket.onmessage = (e) => {
|
||||
recognizedText.value = e.data
|
||||
}
|
||||
|
||||
sourceNode = audioContext.createMediaStreamSource(stream)
|
||||
audioWorkletNode = new AudioWorkletNode(audioContext, 'recorder-processor')
|
||||
|
||||
audioWorkletNode.port.onmessage = (e) => {
|
||||
const audioData = e.data
|
||||
if (socket && socket.readyState === WebSocket.OPEN) {
|
||||
socket.send(audioData)
|
||||
}
|
||||
}
|
||||
|
||||
sourceNode.connect(audioWorkletNode)
|
||||
audioWorkletNode.connect(audioContext.destination)
|
||||
|
||||
isRecording.value = true
|
||||
}
|
||||
|
||||
const stopRecording = () => {
|
||||
sourceNode?.disconnect()
|
||||
audioWorkletNode?.disconnect()
|
||||
audioContext?.close()
|
||||
|
||||
if (socket?.readyState === WebSocket.OPEN) {
|
||||
socket.send('[end]')
|
||||
socket.close()
|
||||
}
|
||||
|
||||
audioContext = null
|
||||
sourceNode = null
|
||||
audioWorkletNode = null
|
||||
socket = null
|
||||
|
||||
isRecording.value = false
|
||||
}
|
||||
|
||||
const cancelRecording = () => {
|
||||
stopRecording()
|
||||
recognizedText.value = ''
|
||||
}
|
||||
|
||||
return {
|
||||
isRecording,
|
||||
recognizedText,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
cancelRecording
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user