flat: 暂存

This commit is contained in:
史典卓
2025-03-29 20:46:23 +08:00
parent ca85430450
commit 2992cb200e
4 changed files with 81 additions and 193 deletions

View File

@@ -1,129 +0,0 @@
// composables/useRealtimeRecorder.js
import {
ref
} from 'vue'
export function useRealtimeRecorder(wsUrl) {
const isRecording = ref(false)
const recognizedText = ref('')
let audioContext = null
let audioWorkletNode = null
let sourceNode = null
let socket = null
const startRecording = async () => {
const stream = await navigator.mediaDevices.getUserMedia({
audio: true
})
audioContext = new(window.AudioContext || window.webkitAudioContext)()
const processorCode = `
class RecorderProcessor extends AudioWorkletProcessor {
constructor() {
super()
this.buffer = []
this.inputSampleRate = sampleRate
this.targetSampleRate = 16000
}
process(inputs) {
const input = inputs[0][0]
if (!input) return true
this.buffer.push(...input)
const requiredSamples = this.inputSampleRate / 10 // 100ms
if (this.buffer.length >= requiredSamples) {
const resampled = this.downsample(this.buffer, this.inputSampleRate, this.targetSampleRate)
const int16Buffer = this.floatTo16BitPCM(resampled)
this.port.postMessage(int16Buffer)
this.buffer = []
}
return true
}
downsample(buffer, inRate, outRate) {
if (outRate === inRate) return buffer
const ratio = inRate / outRate
const len = Math.floor(buffer.length / ratio)
const result = new Float32Array(len)
for (let i = 0; i < len; i++) {
const start = Math.floor(i * ratio)
const end = Math.floor((i + 1) * ratio)
let sum = 0
for (let j = start; j < end && j < buffer.length; j++) sum += buffer[j]
result[i] = sum / (end - start)
}
return result
}
floatTo16BitPCM(input) {
const output = new Int16Array(input.length)
for (let i = 0; i < input.length; i++) {
const s = Math.max(-1, Math.min(1, input[i]))
output[i] = s < 0 ? s * 0x8000 : s * 0x7FFF
}
return output.buffer
}
}
registerProcessor('recorder-processor', RecorderProcessor)
`
const blob = new Blob([processorCode], {
type: 'application/javascript'
})
const blobUrl = URL.createObjectURL(blob)
await audioContext.audioWorklet.addModule(blobUrl)
socket = new WebSocket(wsUrl)
socket.onmessage = (e) => {
recognizedText.value = e.data
}
sourceNode = audioContext.createMediaStreamSource(stream)
audioWorkletNode = new AudioWorkletNode(audioContext, 'recorder-processor')
audioWorkletNode.port.onmessage = (e) => {
const audioData = e.data
if (socket && socket.readyState === WebSocket.OPEN) {
socket.send(audioData)
}
}
sourceNode.connect(audioWorkletNode)
audioWorkletNode.connect(audioContext.destination)
isRecording.value = true
}
const stopRecording = () => {
sourceNode?.disconnect()
audioWorkletNode?.disconnect()
audioContext?.close()
if (socket?.readyState === WebSocket.OPEN) {
socket.send('[end]')
socket.close()
}
audioContext = null
sourceNode = null
audioWorkletNode = null
socket = null
isRecording.value = false
}
const cancelRecording = () => {
stopRecording()
recognizedText.value = ''
}
return {
isRecording,
recognizedText,
startRecording,
stopRecording,
cancelRecording
}
}

View File

@@ -172,9 +172,9 @@ function jobCollection() {
opacity: 0 !important;
}
.container
display flex
flex-direction column
background-color #f8f8f8
display flex
flex-direction column
background-color #f8f8f8
.job-header
padding 20rpx 40rpx
background-color #ffffff

View File

@@ -38,16 +38,15 @@
</view>
<view class="message" :class="{ messageNull: !msg.displayText }" v-else>
<!-- {{ msg.displayText }} -->
<md-render :content="msg.displayText"></md-render>
<view class="message-markdown">
<md-render :content="msg.displayText"></md-render>
<view class="message-controll"></view>
</view>
<!-- guess -->
<view
class="guess"
v-if="showGuess && !msg.self && messages.length - 1 === index && msg.displayText"
>
<view class="guess-top">
<image class="guess-icon" src="/static/icon/tips2.png" mode=""></image>
猜你所想
</view>
<view class="gulist">
<view
class="guess-list"
@@ -225,7 +224,6 @@ const sendMessage = () => {
useChatGroupDBStore()
.getStearm(values, normalArr, scrollToBottom)
.then(() => {
console.log(messages);
getGuess();
scrollToBottom();
});
@@ -506,25 +504,18 @@ defineExpose({ scrollToBottom, closeGuess, colseFile });
.msg-files:active
background: #e9e9e9
.guess
border-top: 2rpx solid #8c8c8c
padding: 20rpx 0 10rpx 0
.guess-top
padding: 0 0 10rpx 0
display: flex
align-items: center
color: rgba(255, 173, 71, 1)
font-size: 28rpx
.guess-icon
width: 43rpx
height: 43rpx
padding: 5rpx 0 10rpx 0
.guess-list
border: 2rpx solid #8c8c8c
padding: 6rpx 12rpx
border-radius: 10rpx;
width: fit-content
margin: 0 10rpx 10rpx 0
padding: 16rpx 24rpx
margin-top: 28rpx
font-size: 24rpx
color: #8c8c8c
width: 100%;
border-radius: 20rpx 20rpx 20rpx 20rpx;
border: 2rpx solid #E5E5E5;
font-size: 28rpx;
color: #333333;
line-height: 33rpx;
.gulist
display: flex
flex-wrap: wrap
@@ -599,19 +590,20 @@ image-margin-top = 40rpx
.chat-item.self {
justify-content: flex-end;
}
.message {
.message
margin-top: 40rpx
padding: 20rpx 20rpx 0 20rpx;
border-radius: 0 20rpx 20rpx 20rpx;
background: #F6F6F6;
// max-width: 80%;
width: 100%;
word-break: break-word;
color: #333333;
user-select: text;
-webkit-user-select: text;
}
.message-markdown
border-radius: 0 20rpx 20rpx 20rpx;
padding: 20rpx 20rpx 0 20rpx;
background: #F6F6F6;
.messageNull
background: transparent;
display: none
.msg-loading{
background: transparent;
font-size: 24rpx;
@@ -627,6 +619,7 @@ image-margin-top = 40rpx
background: linear-gradient( 225deg, #DAE2FE 0%, #E9E3FF 100%);
border-radius: 20rpx 0 20rpx 20rpx;
padding: 20rpx;
width: fit-content;
}
.input-area {
padding: 32rpx 28rpx 24rpx 28rpx;

View File

@@ -114,69 +114,93 @@ const useChatGroupDBStore = defineStore("messageGroup", () => {
return new Promise((resolve, reject) => {
try {
toggleTyping(true);
const params = {
data: text,
sessionId: chatSessionID.value,
};
if (fileUrls && fileUrls.length) {
params['fileUrl'] = fileUrls.map((item) => item.url)
}
const newMsg = {
text: '', // 存放完整的流式数据
self: false,
displayText: '' // 用于前端逐步显示
};
const index = messages.value.length;
messages.value.push(newMsg); // 先占位
let fullText = ''; // 存储完整的响应内容
if (fileUrls && fileUrls.length) {
params['fileUrl'] = fileUrls.map((item) => item.url);
}
const newMsg = {
text: [], // 存储原始结构化内容
self: false,
displayText: '' // 用于流式渲染展示
};
const index = messages.value.length;
messages.value.push(newMsg);
const rawParts = Array.isArray(text) ? text : [text]; // 统一处理
// 用于追加每个部分的流式数据
let partIndex = 0;
function handleUnload() {
newMsg.text = fullText
newMsg.parentGroupId = chatSessionID.value
newMsg.parentGroupId = chatSessionID.value;
baseDB.db.add(massageName.value, newMsg);
}
// 添加事件监听
window.addEventListener("unload", handleUnload);
// 实时数据渲染
function onDataReceived(data) {
// const parsedData = safeParseJSON(data);
fullText += data; // 累积完整内容
newMsg.displayText += data; // 逐步更新 UI
messages.value[index] = {
...newMsg
}; // 触发视图更新
progress && progress()
function renderPart(part) {
if (typeof part === 'string') {
newMsg.displayText += part;
} else if (typeof part === 'object' && part.type === 'highlight') {
newMsg.displayText += `<span class="highlight">${part.content}</span>`;
} else {
newMsg.displayText += String(part); // 兜底
}
}
function onDataReceived(data) {
let parsed;
try {
parsed = JSON.parse(data);
} catch {
parsed = data;
}
// 支持追加多个部分
if (Array.isArray(parsed)) {
parsed.forEach((part) => {
newMsg.text.push(part); // 存结构
renderPart(part); // 渲染显示
});
} else {
newMsg.text.push(parsed);
renderPart(parsed);
}
messages.value[index] = {
...newMsg
};
progress && progress();
}
// 异常处理
function onError(error) {
console.error('请求异常:', error);
msg('服务响应异常')
msg('服务响应异常');
reject(error);
}
// 完成处理
function onComplete() {
newMsg.text = fullText; // 保存完整响应
messages.value[index] = {
...newMsg
};
toggleTyping(false);
window.removeEventListener("unload", handleUnload);
handleUnload()
resolve && resolve();
handleUnload();
resolve();
}
$api.streamRequest('/chat', params, onDataReceived,
onError, onComplete)
$api.streamRequest('/chat', params, onDataReceived, onError, onComplete);
} catch (err) {
console.log(err);
reject(err);
}
})
});
}
// 状态控制