Compare commits
16 Commits
4cf75922da
...
shihezi
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
49af03f4bb | ||
| 4ae11e31f4 | |||
| bca0d997c6 | |||
|
|
b43eb98a1c | ||
|
|
44c297aac2 | ||
| f64c9e5dae | |||
| 975835baa5 | |||
| 1ac524e1f1 | |||
| 7e8bef0cb9 | |||
|
|
ce597b182d | ||
|
|
fdd5577c85 | ||
|
|
4dfc7bdfd8 | ||
|
|
4befbb05cc | ||
| 74dc6debcd | |||
| 534dfd8126 | |||
| 4c29882f36 |
16
App.vue
16
App.vue
@@ -24,6 +24,7 @@ onLaunch((options) => {
|
||||
getUserInfo();
|
||||
useUserStore().changMiniProgramAppStatus(false);
|
||||
useUserStore().changMachineEnv(false);
|
||||
useLocationStore().getLocationLoop()//循环获取定位
|
||||
return;
|
||||
}
|
||||
if (isY9MachineType()) {
|
||||
@@ -32,7 +33,18 @@ onLaunch((options) => {
|
||||
useUserStore().logOutApp();
|
||||
useUserStore().changMiniProgramAppStatus(true);
|
||||
useUserStore().changMachineEnv(true);
|
||||
useLocationStore().getLocation();
|
||||
(function loop() {
|
||||
console.log('📍一体机尝试获取定位')
|
||||
useLocationStore().getLocation().then(({longitude,latitude})=>{
|
||||
console.log(`✅一体机获取定位成功:lng:${longitude},lat${latitude}`)
|
||||
})
|
||||
.catch(err=>{
|
||||
console.log('❌一体机获取定位失败,30s后尝试重新获取')
|
||||
setTimeout(() => {
|
||||
loop()
|
||||
}, 3000);
|
||||
})
|
||||
})()
|
||||
uQRListen = new IncreaseRevie();
|
||||
inactivityManager = new GlobalInactivityManager(handleInactivity, 60 * 1000);
|
||||
inactivityManager.start();
|
||||
@@ -40,6 +52,7 @@ onLaunch((options) => {
|
||||
}
|
||||
// 正式上线去除此方法
|
||||
console.warn('浏览器环境');
|
||||
useLocationStore().getLocationLoop()//循环获取定位
|
||||
useUserStore().changMiniProgramAppStatus(true);
|
||||
useUserStore().changMachineEnv(false);
|
||||
useUserStore().initSeesionId(); //更新
|
||||
@@ -57,6 +70,7 @@ onLaunch((options) => {
|
||||
|
||||
onMounted(() => {});
|
||||
|
||||
|
||||
onShow(() => {
|
||||
console.log('App Show');
|
||||
});
|
||||
|
||||
@@ -630,6 +630,10 @@ export function sm4Encrypt(key, value, mode = "hex") {
|
||||
}
|
||||
}
|
||||
|
||||
export function reloadBrowser() {
|
||||
window.location.reload()
|
||||
}
|
||||
|
||||
|
||||
export const $api = {
|
||||
msg,
|
||||
@@ -679,5 +683,6 @@ export default {
|
||||
aes_Decrypt,
|
||||
sm2_Decrypt,
|
||||
sm2_Encrypt,
|
||||
safeReLaunch
|
||||
safeReLaunch,
|
||||
reloadBrowser
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
export default {
|
||||
// baseUrl: 'https://fw.rc.qingdao.gov.cn/rgpp-api/api', // 内网
|
||||
baseUrl: 'https://qd.zhaopinzao8dian.com/api', // 测试
|
||||
baseUrl: 'http://36.105.163.21:30081/rgpp/api', // 内网
|
||||
// baseUrl: 'https://qd.zhaopinzao8dian.com/api', // 测试
|
||||
// baseUrl: 'http://192.168.3.29:8081',
|
||||
// baseUrl: 'http://10.213.6.207:19010/api',
|
||||
// 语音转文字
|
||||
@@ -8,8 +8,8 @@ export default {
|
||||
// vioceBaseURl: 'wss://fw.rc.qingdao.gov.cn/rgpp-api/api/app/asr/connect', // 内网
|
||||
// 语音合成
|
||||
// speechSynthesis: 'wss://qd.zhaopinzao8dian.com/api/speech-synthesis',
|
||||
speechSynthesis2: 'wss://resource.zhuoson.com/synthesis/', //直接替换即可
|
||||
// speechSynthesis2: 'http://39.98.44.136:19527', //直接替换即可
|
||||
// speechSynthesis2: 'wss://resource.zhuoson.com/synthesis/', //直接替换即可
|
||||
speechSynthesis2: 'http://39.98.44.136:19527', //直接替换即可
|
||||
// indexedDB
|
||||
DBversion: 3,
|
||||
// 只使用本地缓寸的数据
|
||||
|
||||
459
hook/piper-bundle.js
Normal file
459
hook/piper-bundle.js
Normal file
@@ -0,0 +1,459 @@
|
||||
/**
|
||||
* PiperTTS Bundle (SDK + Worker + PCMPlayer)
|
||||
* Fix: Smart End Detection that supports Pause/Resume
|
||||
*/
|
||||
class PCMPlayer {
|
||||
constructor(options) {
|
||||
this.init(options);
|
||||
}
|
||||
|
||||
init(options) {
|
||||
this.option = Object.assign({}, {
|
||||
inputCodec: 'Int16',
|
||||
channels: 1,
|
||||
sampleRate: 16000,
|
||||
flushTime: 50,
|
||||
fftSize: 2048,
|
||||
}, options);
|
||||
|
||||
this.samples = new Float32Array();
|
||||
this.interval = setInterval(this.flush.bind(this), this.option.flushTime);
|
||||
this.convertValue = this.getConvertValue();
|
||||
this.typedArray = this.getTypedArray();
|
||||
|
||||
this.initAudioContext();
|
||||
this.bindAudioContextEvent();
|
||||
}
|
||||
|
||||
getConvertValue() {
|
||||
const map = {
|
||||
Int8: 128,
|
||||
Int16: 32768,
|
||||
Int32: 2147483648,
|
||||
Float32: 1
|
||||
};
|
||||
if (!map[this.option.inputCodec]) throw new Error('Codec Error');
|
||||
return map[this.option.inputCodec];
|
||||
}
|
||||
|
||||
getTypedArray() {
|
||||
const map = {
|
||||
Int8: Int8Array,
|
||||
Int16: Int16Array,
|
||||
Int32: Int32Array,
|
||||
Float32: Float32Array
|
||||
};
|
||||
if (!map[this.option.inputCodec]) throw new Error('Codec Error');
|
||||
return map[this.option.inputCodec];
|
||||
}
|
||||
|
||||
initAudioContext() {
|
||||
this.audioCtx = new(window.AudioContext || window.webkitAudioContext)();
|
||||
this.gainNode = this.audioCtx.createGain();
|
||||
this.gainNode.gain.value = 1.0;
|
||||
this.gainNode.connect(this.audioCtx.destination);
|
||||
this.startTime = this.audioCtx.currentTime;
|
||||
this.analyserNode = this.audioCtx.createAnalyser();
|
||||
this.analyserNode.fftSize = this.option.fftSize;
|
||||
}
|
||||
|
||||
static isTypedArray(data) {
|
||||
return (data.byteLength && data.buffer && data.buffer.constructor == ArrayBuffer) || data.constructor ==
|
||||
ArrayBuffer;
|
||||
}
|
||||
|
||||
isSupported(data) {
|
||||
if (!PCMPlayer.isTypedArray(data)) throw new Error('Data must be ArrayBuffer or TypedArray');
|
||||
return true;
|
||||
}
|
||||
|
||||
feed(data) {
|
||||
this.isSupported(data);
|
||||
data = this.getFormattedValue(data);
|
||||
const tmp = new Float32Array(this.samples.length + data.length);
|
||||
tmp.set(this.samples, 0);
|
||||
tmp.set(data, this.samples.length);
|
||||
this.samples = tmp;
|
||||
}
|
||||
|
||||
getFormattedValue(data) {
|
||||
data = data.constructor == ArrayBuffer ? new this.typedArray(data) : new this.typedArray(data.buffer);
|
||||
let float32 = new Float32Array(data.length);
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
float32[i] = data[i] / this.convertValue;
|
||||
}
|
||||
return float32;
|
||||
}
|
||||
|
||||
volume(val) {
|
||||
this.gainNode.gain.value = val;
|
||||
}
|
||||
|
||||
destroy() {
|
||||
if (this.interval) clearInterval(this.interval);
|
||||
this.samples = null;
|
||||
if (this.audioCtx) {
|
||||
this.audioCtx.close();
|
||||
this.audioCtx = null;
|
||||
}
|
||||
}
|
||||
|
||||
flush() {
|
||||
if (!this.samples.length) return;
|
||||
|
||||
const bufferSource = this.audioCtx.createBufferSource();
|
||||
if (typeof this.option.onended === 'function') {
|
||||
bufferSource.onended = (e) => this.option.onended(this, e);
|
||||
}
|
||||
|
||||
const length = this.samples.length / this.option.channels;
|
||||
const audioBuffer = this.audioCtx.createBuffer(this.option.channels, length, this.option.sampleRate);
|
||||
|
||||
for (let channel = 0; channel < this.option.channels; channel++) {
|
||||
const audioData = audioBuffer.getChannelData(channel);
|
||||
let offset = channel;
|
||||
let decrement = 50;
|
||||
for (let i = 0; i < length; i++) {
|
||||
audioData[i] = this.samples[offset];
|
||||
if (i < 50) audioData[i] = (audioData[i] * i) / 50;
|
||||
if (i >= length - 51) audioData[i] = (audioData[i] * decrement--) / 50;
|
||||
offset += this.option.channels;
|
||||
}
|
||||
}
|
||||
|
||||
if (this.startTime < this.audioCtx.currentTime) {
|
||||
this.startTime = this.audioCtx.currentTime;
|
||||
}
|
||||
|
||||
bufferSource.buffer = audioBuffer;
|
||||
bufferSource.connect(this.gainNode);
|
||||
bufferSource.connect(this.analyserNode);
|
||||
bufferSource.start(this.startTime);
|
||||
|
||||
this.startTime += audioBuffer.duration;
|
||||
this.samples = new Float32Array();
|
||||
}
|
||||
|
||||
async pause() {
|
||||
await this.audioCtx.suspend();
|
||||
}
|
||||
async continue () {
|
||||
await this.audioCtx.resume();
|
||||
}
|
||||
|
||||
bindAudioContextEvent() {
|
||||
if (typeof this.option.onstatechange === 'function') {
|
||||
this.audioCtx.onstatechange = (e) => {
|
||||
this.option.onstatechange(this, e, this.audioCtx.state);
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ==========================================
|
||||
// Worker 源码
|
||||
// ==========================================
|
||||
const WORKER_SOURCE = `
|
||||
let globalWs = null;
|
||||
|
||||
self.onmessage = function (e) {
|
||||
const { type, data } = e.data;
|
||||
switch (type) {
|
||||
case 'connect': connectWebSocket(data); break;
|
||||
case 'stop': closeWs(); break;
|
||||
}
|
||||
};
|
||||
|
||||
function closeWs() {
|
||||
if (globalWs) {
|
||||
globalWs.onerror = null;
|
||||
globalWs.onclose = null;
|
||||
globalWs.onmessage = null;
|
||||
try { globalWs.close(1000, 'User stopped'); } catch (e) {}
|
||||
globalWs = null;
|
||||
}
|
||||
}
|
||||
|
||||
function connectWebSocket(config) {
|
||||
closeWs();
|
||||
|
||||
const { url, text, options } = config;
|
||||
self.postMessage({ type: 'status', data: 'ws_connecting' });
|
||||
|
||||
try {
|
||||
const currentWs = new WebSocket(url);
|
||||
currentWs.binaryType = 'arraybuffer';
|
||||
globalWs = currentWs;
|
||||
|
||||
currentWs.onopen = () => {
|
||||
if (globalWs !== currentWs) return;
|
||||
self.postMessage({ type: 'status', data: 'ws_connected' });
|
||||
currentWs.send(JSON.stringify({
|
||||
text: text,
|
||||
speaker_id: options.speakerId || 0,
|
||||
length_scale: options.lengthScale || 1.0,
|
||||
noise_scale: options.noiseScale || 0.667,
|
||||
}));
|
||||
self.postMessage({ type: 'status', data: 'generating' });
|
||||
};
|
||||
|
||||
currentWs.onmessage = (event) => {
|
||||
if (globalWs !== currentWs) return;
|
||||
|
||||
if (typeof event.data === 'string' && event.data === 'END') {
|
||||
const wsToClose = currentWs;
|
||||
globalWs = null;
|
||||
|
||||
wsToClose.onmessage = null;
|
||||
wsToClose.onerror = null;
|
||||
wsToClose.onclose = null;
|
||||
|
||||
try { wsToClose.close(1000, 'Done'); } catch(e) {}
|
||||
|
||||
self.postMessage({ type: 'end' });
|
||||
} else {
|
||||
self.postMessage({ type: 'audio-data', buffer: event.data }, [event.data]);
|
||||
}
|
||||
};
|
||||
|
||||
currentWs.onclose = (e) => {
|
||||
if (globalWs === currentWs) {
|
||||
self.postMessage({ type: 'end' });
|
||||
globalWs = null;
|
||||
}
|
||||
};
|
||||
|
||||
currentWs.onerror = () => {
|
||||
if (globalWs === currentWs) {
|
||||
self.postMessage({ type: 'error', data: 'WebSocket error' });
|
||||
}
|
||||
};
|
||||
|
||||
} catch (e) {
|
||||
self.postMessage({ type: 'error', data: e.message });
|
||||
}
|
||||
}
|
||||
`;
|
||||
|
||||
// ==========================================
|
||||
// PiperTTS SDK
|
||||
// ==========================================
|
||||
class PiperTTS {
|
||||
constructor(config = {}) {
|
||||
this.baseUrl = config.baseUrl || 'http://localhost:5001';
|
||||
this.onStatus = config.onStatus || console.log;
|
||||
this.onStart = config.onStart || (() => {});
|
||||
this.onEnd = config.onEnd || (() => {});
|
||||
this.sampleRate = config.sampleRate || 16000;
|
||||
|
||||
this.player = null;
|
||||
this.worker = null;
|
||||
this.recordedChunks = [];
|
||||
this.isRecording = false;
|
||||
|
||||
// 新增:检测音频结束的定时器 ID
|
||||
this.endCheckInterval = null;
|
||||
|
||||
this._initWorker();
|
||||
}
|
||||
|
||||
_initWorker() {
|
||||
const blob = new Blob([WORKER_SOURCE], {
|
||||
type: 'application/javascript'
|
||||
});
|
||||
this.worker = new Worker(URL.createObjectURL(blob));
|
||||
|
||||
this.worker.onmessage = (e) => {
|
||||
const {
|
||||
type,
|
||||
data,
|
||||
buffer
|
||||
} = e.data;
|
||||
switch (type) {
|
||||
case 'status':
|
||||
const map = {
|
||||
ws_connecting: '正在连接...',
|
||||
ws_connected: '已连接',
|
||||
generating: '流式接收中...'
|
||||
};
|
||||
this.onStatus(map[data] || data, 'processing');
|
||||
break;
|
||||
case 'error':
|
||||
if (this.recordedChunks.length > 0) {
|
||||
this.onStatus('数据接收完毕', 'success');
|
||||
this._triggerEndWithDelay();
|
||||
} else {
|
||||
this.onStatus(`错误: ${data}`, 'error');
|
||||
this.stop();
|
||||
}
|
||||
break;
|
||||
case 'audio-data':
|
||||
this._handleAudio(buffer);
|
||||
break;
|
||||
case 'end':
|
||||
this.onStatus('数据接收完毕', 'success');
|
||||
this._triggerEndWithDelay();
|
||||
break;
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* 【核心修改】智能轮询检测
|
||||
* 只有当 AudioContext 处于 running 状态且时间走完时,才触发 onEnd
|
||||
*/
|
||||
_triggerEndWithDelay() {
|
||||
// 先清除可能存在的旧定时器
|
||||
if (this.endCheckInterval) clearInterval(this.endCheckInterval);
|
||||
|
||||
// 每 200ms 检查一次
|
||||
this.endCheckInterval = setInterval(() => {
|
||||
// 1. 如果播放器没了,直接结束
|
||||
if (!this.player || !this.player.audioCtx) {
|
||||
this._finishEndCheck();
|
||||
return;
|
||||
}
|
||||
|
||||
// 2. 如果处于暂停状态 (suspended),什么都不做,继续等
|
||||
if (this.player.audioCtx.state === 'suspended') {
|
||||
return;
|
||||
}
|
||||
|
||||
// 3. 计算剩余时间
|
||||
// startTime 是缓冲区结束的绝对时间,currentTime 是当前时间
|
||||
const remainingTime = this.player.startTime - this.player.audioCtx.currentTime;
|
||||
|
||||
// 4. 如果剩余时间小于 0.1秒(留点冗余),说明播完了
|
||||
if (remainingTime <= 0.1) {
|
||||
this._finishEndCheck();
|
||||
}
|
||||
}, 200);
|
||||
}
|
||||
|
||||
_finishEndCheck() {
|
||||
if (this.endCheckInterval) {
|
||||
clearInterval(this.endCheckInterval);
|
||||
this.endCheckInterval = null;
|
||||
}
|
||||
this.onEnd();
|
||||
}
|
||||
|
||||
_initPlayer() {
|
||||
if (this.player) {
|
||||
this.player.destroy();
|
||||
}
|
||||
|
||||
this.player = new PCMPlayer({
|
||||
inputCodec: 'Int16',
|
||||
channels: 1,
|
||||
sampleRate: this.sampleRate,
|
||||
flushTime: 50,
|
||||
});
|
||||
}
|
||||
|
||||
async speak(text, options = {}) {
|
||||
if (!text) return;
|
||||
this.stop();
|
||||
|
||||
this._initPlayer();
|
||||
if (this.player) {
|
||||
await this.player.continue();
|
||||
}
|
||||
|
||||
this.recordedChunks = [];
|
||||
this.isRecording = true;
|
||||
this.onStart();
|
||||
|
||||
const wsUrl = this.baseUrl.replace(/^http/, 'ws') + '/ws/synthesize';
|
||||
this.worker.postMessage({
|
||||
type: 'connect',
|
||||
data: {
|
||||
url: wsUrl,
|
||||
text,
|
||||
options
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
stop() {
|
||||
// 停止时必须清除轮询检测
|
||||
if (this.endCheckInterval) {
|
||||
clearInterval(this.endCheckInterval);
|
||||
this.endCheckInterval = null;
|
||||
}
|
||||
|
||||
this.worker.postMessage({
|
||||
type: 'stop'
|
||||
});
|
||||
if (this.player) {
|
||||
this.player.destroy();
|
||||
this.player = null;
|
||||
}
|
||||
this.onStatus('已停止', 'default');
|
||||
}
|
||||
|
||||
_handleAudio(arrayBuffer) {
|
||||
if (this.isRecording) {
|
||||
this.recordedChunks.push(arrayBuffer);
|
||||
}
|
||||
if (this.player) {
|
||||
this.player.feed(arrayBuffer);
|
||||
}
|
||||
}
|
||||
|
||||
getAnalyserNode() {
|
||||
return this.player ? this.player.analyserNode : null;
|
||||
}
|
||||
|
||||
downloadAudio(filename = 'tts_output.wav') {
|
||||
if (this.recordedChunks.length === 0) return;
|
||||
let totalLen = 0;
|
||||
for (let chunk of this.recordedChunks) totalLen += chunk.byteLength;
|
||||
const tmp = new Uint8Array(totalLen);
|
||||
let offset = 0;
|
||||
for (let chunk of this.recordedChunks) {
|
||||
tmp.set(new Uint8Array(chunk), offset);
|
||||
offset += chunk.byteLength;
|
||||
}
|
||||
const wavBuffer = this._encodeWAV(new Int16Array(tmp.buffer), this.sampleRate);
|
||||
const blob = new Blob([wavBuffer], {
|
||||
type: 'audio/wav'
|
||||
});
|
||||
const url = URL.createObjectURL(blob);
|
||||
const a = document.createElement('a');
|
||||
a.style = 'display: none';
|
||||
a.href = url;
|
||||
a.download = filename;
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
window.URL.revokeObjectURL(url);
|
||||
}
|
||||
|
||||
_encodeWAV(samples, sampleRate) {
|
||||
const buffer = new ArrayBuffer(44 + samples.length * 2);
|
||||
const view = new DataView(buffer);
|
||||
const writeString = (view, offset, string) => {
|
||||
for (let i = 0; i < string.length; i++) view.setUint8(offset + i, string.charCodeAt(i));
|
||||
};
|
||||
writeString(view, 0, 'RIFF');
|
||||
view.setUint32(4, 36 + samples.length * 2, true);
|
||||
writeString(view, 8, 'WAVE');
|
||||
writeString(view, 12, 'fmt ');
|
||||
view.setUint32(16, 16, true);
|
||||
view.setUint16(20, 1, true);
|
||||
view.setUint16(22, 1, true);
|
||||
view.setUint32(24, sampleRate, true);
|
||||
view.setUint32(28, sampleRate * 2, true);
|
||||
view.setUint16(32, 2, true);
|
||||
view.setUint16(34, 16, true);
|
||||
writeString(view, 36, 'data');
|
||||
view.setUint32(40, samples.length * 2, true);
|
||||
let offset = 44;
|
||||
for (let i = 0; i < samples.length; i++) {
|
||||
view.setInt16(offset, samples[i], true);
|
||||
offset += 2;
|
||||
}
|
||||
return view;
|
||||
}
|
||||
}
|
||||
export default PiperTTS;
|
||||
@@ -3,255 +3,344 @@ import {
|
||||
onUnmounted
|
||||
} from 'vue'
|
||||
import {
|
||||
$api,
|
||||
|
||||
} from '../common/globalFunction';
|
||||
|
||||
$api
|
||||
} from '../common/globalFunction'; // 你的请求封装
|
||||
import config from '@/config'
|
||||
|
||||
// Alibaba Cloud
|
||||
// 开源
|
||||
export function useAudioRecorder() {
|
||||
// --- 状态定义 ---
|
||||
const isRecording = ref(false)
|
||||
const isStopping = ref(false)
|
||||
const isSocketConnected = ref(false)
|
||||
const recordingDuration = ref(0)
|
||||
|
||||
const audioDataForDisplay = ref(new Array(16).fill(0))
|
||||
const volumeLevel = ref(0)
|
||||
|
||||
const volumeLevel = ref(0) // 0-100
|
||||
const recognizedText = ref('')
|
||||
const lastFinalText = ref('')
|
||||
|
||||
let audioStream = null
|
||||
let audioContext = null
|
||||
let audioInput = null
|
||||
let scriptProcessor = null
|
||||
let websocket = null
|
||||
// --- 内部变量 ---
|
||||
let socketTask = null
|
||||
let durationTimer = null
|
||||
|
||||
const generateUUID = () => {
|
||||
return ([1e7] + -1e3 + -4e3 + -8e3 + -1e11)
|
||||
.replace(/[018]/g, c =>
|
||||
(c ^ crypto.getRandomValues(new Uint8Array(1))[0] & 15 >> c / 4).toString(16)
|
||||
).replace(/-/g, '')
|
||||
// --- APP/小程序 变量 ---
|
||||
let recorderManager = null;
|
||||
|
||||
// --- H5 变量 ---
|
||||
let audioContext = null;
|
||||
let scriptProcessor = null;
|
||||
let mediaStreamSource = null;
|
||||
let h5Stream = null;
|
||||
|
||||
// --- 配置项 ---
|
||||
const RECORD_CONFIG = {
|
||||
duration: 600000,
|
||||
sampleRate: 16000,
|
||||
numberOfChannels: 1,
|
||||
format: 'pcm',
|
||||
frameSize: 4096
|
||||
}
|
||||
|
||||
const fetchWsUrl = async () => {
|
||||
const res = await $api.createRequest('/app/speech/getToken')
|
||||
if (res.code !== 200) throw new Error('无法获取语音识别 wsUrl')
|
||||
const wsUrl = res.msg
|
||||
return wsUrl
|
||||
/**
|
||||
* 获取 WebSocket 地址 (含 Token)
|
||||
*/
|
||||
const getWsUrl = async () => {
|
||||
let wsUrl = config.vioceBaseURl
|
||||
|
||||
// 拼接 Token
|
||||
const token = uni.getStorageSync('token') || '';
|
||||
if (token) {
|
||||
const separator = wsUrl.includes('?') ? '&' : '?';
|
||||
wsUrl = `${wsUrl}${separator}token=${encodeURIComponent(token)}`;
|
||||
}
|
||||
return wsUrl;
|
||||
}
|
||||
|
||||
function extractWsParams(wsUrl) {
|
||||
const url = new URL(wsUrl)
|
||||
const appkey = url.searchParams.get('appkey')
|
||||
const token = url.searchParams.get('token')
|
||||
return {
|
||||
appkey,
|
||||
token
|
||||
/**
|
||||
* 开始录音 (入口)
|
||||
*/
|
||||
const startRecording = async () => {
|
||||
if (isRecording.value) return
|
||||
|
||||
try {
|
||||
recognizedText.value = ''
|
||||
volumeLevel.value = 0
|
||||
|
||||
// #ifdef H5
|
||||
if (location.protocol !== 'https:' && location.hostname !== 'localhost') {
|
||||
uni.showToast({
|
||||
title: 'H5录音需要HTTPS环境',
|
||||
icon: 'none'
|
||||
});
|
||||
return;
|
||||
}
|
||||
// #endif
|
||||
|
||||
const url = await getWsUrl()
|
||||
console.log('正在连接 ASR:', url)
|
||||
|
||||
await connectSocket(url);
|
||||
|
||||
} catch (err) {
|
||||
console.error('启动失败:', err);
|
||||
uni.showToast({
|
||||
title: '启动失败: ' + (err.message || ''),
|
||||
icon: 'none'
|
||||
});
|
||||
cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
const connectWebSocket = async () => {
|
||||
const wsUrl = await fetchWsUrl()
|
||||
const {
|
||||
appkey,
|
||||
token
|
||||
} = extractWsParams(wsUrl)
|
||||
/**
|
||||
* 连接 WebSocket
|
||||
*/
|
||||
const connectSocket = (url) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
websocket = new WebSocket(wsUrl)
|
||||
websocket.binaryType = 'arraybuffer'
|
||||
socketTask = uni.connectSocket({
|
||||
url: url,
|
||||
success: () => console.log('Socket 连接请求发送'),
|
||||
fail: (err) => reject(err)
|
||||
});
|
||||
|
||||
websocket.onopen = () => {
|
||||
isSocketConnected.value = true
|
||||
socketTask.onOpen((res) => {
|
||||
console.log('WebSocket 已连接');
|
||||
isSocketConnected.value = true;
|
||||
|
||||
// 发送 StartTranscription 消息(参考 demo.html)
|
||||
const startTranscriptionMessage = {
|
||||
header: {
|
||||
appkey: appkey, // 不影响使用,可留空或由 wsUrl 带入
|
||||
namespace: 'SpeechTranscriber',
|
||||
name: 'StartTranscription',
|
||||
task_id: generateUUID(),
|
||||
message_id: generateUUID()
|
||||
},
|
||||
payload: {
|
||||
format: 'pcm',
|
||||
sample_rate: 16000,
|
||||
enable_intermediate_result: true,
|
||||
enable_punctuation_prediction: true,
|
||||
enable_inverse_text_normalization: true
|
||||
}
|
||||
// #ifdef H5
|
||||
startH5Recording().then(() => resolve()).catch(err => {
|
||||
socketTask.close();
|
||||
reject(err);
|
||||
});
|
||||
// #endif
|
||||
|
||||
// #ifndef H5
|
||||
startAppRecording();
|
||||
resolve();
|
||||
// #endif
|
||||
});
|
||||
|
||||
socketTask.onMessage((res) => {
|
||||
// 接收文本结果
|
||||
if (res.data) {
|
||||
recognizedText.value = res.data;
|
||||
}
|
||||
websocket.send(JSON.stringify(startTranscriptionMessage))
|
||||
resolve()
|
||||
}
|
||||
});
|
||||
|
||||
websocket.onerror = (e) => {
|
||||
isSocketConnected.value = false
|
||||
reject(e)
|
||||
}
|
||||
socketTask.onError((err) => {
|
||||
console.error('Socket 错误:', err);
|
||||
isSocketConnected.value = false;
|
||||
stopRecording();
|
||||
});
|
||||
|
||||
websocket.onclose = () => {
|
||||
isSocketConnected.value = false
|
||||
}
|
||||
|
||||
websocket.onmessage = (e) => {
|
||||
const msg = JSON.parse(e.data)
|
||||
const name = msg?.header?.name
|
||||
const payload = msg?.payload
|
||||
|
||||
switch (name) {
|
||||
case 'TranscriptionResultChanged': {
|
||||
// 中间识别文本(可选:使用 stash_result.unfixedText 更精确)
|
||||
const text = payload?.unfixed_result || payload?.result || ''
|
||||
lastFinalText.value = text
|
||||
break
|
||||
}
|
||||
case 'SentenceBegin': {
|
||||
// 可选:开始新的一句,重置状态
|
||||
// console.log('开始新的句子识别')
|
||||
break
|
||||
}
|
||||
case 'SentenceEnd': {
|
||||
const text = payload?.result || ''
|
||||
const confidence = payload?.confidence || 0
|
||||
if (text && confidence > 0.5) {
|
||||
recognizedText.value += text
|
||||
lastFinalText.value = ''
|
||||
// console.log('识别完成:', {
|
||||
// text,
|
||||
// confidence
|
||||
// })
|
||||
}
|
||||
break
|
||||
}
|
||||
case 'TranscriptionStarted': {
|
||||
// console.log('识别任务已开始')
|
||||
break
|
||||
}
|
||||
case 'TranscriptionCompleted': {
|
||||
lastFinalText.value = ''
|
||||
// console.log('识别全部完成')
|
||||
break
|
||||
}
|
||||
case 'TaskFailed': {
|
||||
console.error('识别失败:', msg?.header?.status_text)
|
||||
break
|
||||
}
|
||||
default:
|
||||
console.log('未知消息类型:', name, msg)
|
||||
break
|
||||
}
|
||||
}
|
||||
socketTask.onClose(() => {
|
||||
isSocketConnected.value = false;
|
||||
console.log('Socket 已关闭');
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
const startRecording = async () => {
|
||||
if (isRecording.value) return
|
||||
const startH5Recording = async () => {
|
||||
try {
|
||||
recognizedText.value = ''
|
||||
lastFinalText.value = ''
|
||||
await connectWebSocket()
|
||||
|
||||
audioStream = await navigator.mediaDevices.getUserMedia({
|
||||
// 1. 获取麦克风流
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: true
|
||||
})
|
||||
audioContext = new(window.AudioContext || window.webkitAudioContext)({
|
||||
});
|
||||
h5Stream = stream;
|
||||
|
||||
// 2. 创建 AudioContext
|
||||
const AudioContext = window.AudioContext || window.webkitAudioContext;
|
||||
audioContext = new AudioContext({
|
||||
sampleRate: 16000
|
||||
})
|
||||
audioInput = audioContext.createMediaStreamSource(audioStream)
|
||||
scriptProcessor = audioContext.createScriptProcessor(2048, 1, 1)
|
||||
});
|
||||
|
||||
mediaStreamSource = audioContext.createMediaStreamSource(stream);
|
||||
scriptProcessor = audioContext.createScriptProcessor(4096, 1, 1);
|
||||
|
||||
scriptProcessor.onaudioprocess = (event) => {
|
||||
const input = event.inputBuffer.getChannelData(0)
|
||||
const pcm = new Int16Array(input.length)
|
||||
let sum = 0
|
||||
for (let i = 0; i < input.length; ++i) {
|
||||
const s = Math.max(-1, Math.min(1, input[i]))
|
||||
pcm[i] = s * 0x7FFF
|
||||
sum += s * s
|
||||
if (!isSocketConnected.value || !socketTask) return;
|
||||
|
||||
const inputData = event.inputBuffer.getChannelData(0);
|
||||
|
||||
calculateVolume(inputData, true);
|
||||
|
||||
const buffer = new ArrayBuffer(inputData.length * 2);
|
||||
const view = new DataView(buffer);
|
||||
for (let i = 0; i < inputData.length; i++) {
|
||||
let s = Math.max(-1, Math.min(1, inputData[i]));
|
||||
view.setInt16(i * 2, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
|
||||
}
|
||||
|
||||
volumeLevel.value = Math.sqrt(sum / input.length)
|
||||
audioDataForDisplay.value = Array(16).fill(volumeLevel.value)
|
||||
socketTask.send({
|
||||
data: buffer,
|
||||
fail: (e) => console.error('发送音频失败', e)
|
||||
});
|
||||
};
|
||||
|
||||
if (websocket?.readyState === WebSocket.OPEN) {
|
||||
websocket.send(pcm.buffer)
|
||||
}
|
||||
}
|
||||
mediaStreamSource.connect(scriptProcessor);
|
||||
scriptProcessor.connect(audioContext.destination);
|
||||
|
||||
audioInput.connect(scriptProcessor)
|
||||
scriptProcessor.connect(audioContext.destination)
|
||||
isRecording.value = true;
|
||||
recordingDuration.value = 0;
|
||||
durationTimer = setInterval(() => recordingDuration.value++, 1000);
|
||||
|
||||
console.log('H5 录音已启动');
|
||||
|
||||
isRecording.value = true
|
||||
recordingDuration.value = 0
|
||||
durationTimer = setInterval(() => recordingDuration.value++, 1000)
|
||||
} catch (err) {
|
||||
console.error('启动失败:', err)
|
||||
cleanup()
|
||||
console.error('H5 录音启动失败:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const stopRecording = () => {
|
||||
if (!isRecording.value || isStopping.value) return
|
||||
isStopping.value = true
|
||||
const stopH5Resources = () => {
|
||||
if (scriptProcessor) scriptProcessor.disconnect();
|
||||
if (mediaStreamSource) mediaStreamSource.disconnect();
|
||||
if (audioContext) audioContext.close();
|
||||
if (h5Stream) h5Stream.getTracks().forEach(track => track.stop());
|
||||
|
||||
if (websocket?.readyState === WebSocket.OPEN) {
|
||||
websocket.send(JSON.stringify({
|
||||
header: {
|
||||
namespace: 'SpeechTranscriber',
|
||||
name: 'StopTranscription',
|
||||
message_id: generateUUID()
|
||||
}
|
||||
}))
|
||||
websocket.close()
|
||||
scriptProcessor = null;
|
||||
mediaStreamSource = null;
|
||||
audioContext = null;
|
||||
h5Stream = null;
|
||||
}
|
||||
|
||||
const startAppRecording = () => {
|
||||
recorderManager = uni.getRecorderManager();
|
||||
|
||||
recorderManager.onFrameRecorded((res) => {
|
||||
const {
|
||||
frameBuffer
|
||||
} = res;
|
||||
|
||||
calculateVolume(frameBuffer, false);
|
||||
|
||||
if (isSocketConnected.value && socketTask) {
|
||||
socketTask.send({
|
||||
data: frameBuffer
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
recorderManager.onStart(() => {
|
||||
console.log('APP 录音已开始');
|
||||
isRecording.value = true;
|
||||
recordingDuration.value = 0;
|
||||
durationTimer = setInterval(() => recordingDuration.value++, 1000);
|
||||
});
|
||||
|
||||
recorderManager.onError((err) => {
|
||||
console.error('APP 录音报错:', err);
|
||||
cleanup();
|
||||
});
|
||||
|
||||
recorderManager.start(RECORD_CONFIG);
|
||||
}
|
||||
const stopHardwareResource = () => {
|
||||
// APP/小程序停止
|
||||
if (recorderManager) {
|
||||
recorderManager.stop();
|
||||
}
|
||||
|
||||
cleanup()
|
||||
isStopping.value = false
|
||||
// H5停止
|
||||
// #ifdef H5
|
||||
if (scriptProcessor) scriptProcessor.disconnect();
|
||||
if (mediaStreamSource) mediaStreamSource.disconnect();
|
||||
if (audioContext) audioContext.close();
|
||||
if (h5Stream) h5Stream.getTracks().forEach(track => track.stop());
|
||||
|
||||
scriptProcessor = null;
|
||||
mediaStreamSource = null;
|
||||
audioContext = null;
|
||||
h5Stream = null;
|
||||
// #endif
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止录音 (通用)
|
||||
*/
|
||||
const stopRecording = () => {
|
||||
// 停止 APP 录音
|
||||
if (recorderManager) {
|
||||
recorderManager.stop();
|
||||
}
|
||||
|
||||
// 停止 H5 录音资源
|
||||
// #ifdef H5
|
||||
stopH5Resources();
|
||||
// #endif
|
||||
|
||||
// 关闭 Socket
|
||||
if (socketTask) {
|
||||
socketTask.close();
|
||||
}
|
||||
|
||||
cleanup();
|
||||
}
|
||||
|
||||
const cancelRecording = () => {
|
||||
if (!isRecording.value || isStopping.value) return
|
||||
isStopping.value = true
|
||||
websocket?.close()
|
||||
cleanup()
|
||||
isStopping.value = false
|
||||
if (!isRecording.value) return;
|
||||
|
||||
console.log('取消录音 - 丢弃结果');
|
||||
|
||||
// 1. 停止硬件录音
|
||||
stopHardwareResource();
|
||||
|
||||
// 2. 强制关闭 Socket
|
||||
if (socketTask) {
|
||||
socketTask.close();
|
||||
}
|
||||
|
||||
// 3. 关键:清空已识别的文本
|
||||
recognizedText.value = '';
|
||||
|
||||
// 4. 清理资源
|
||||
cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* 清理状态
|
||||
*/
|
||||
const cleanup = () => {
|
||||
clearInterval(durationTimer)
|
||||
clearInterval(durationTimer);
|
||||
isRecording.value = false;
|
||||
isSocketConnected.value = false;
|
||||
socketTask = null;
|
||||
recorderManager = null;
|
||||
volumeLevel.value = 0;
|
||||
}
|
||||
|
||||
scriptProcessor?.disconnect()
|
||||
audioInput?.disconnect()
|
||||
audioStream?.getTracks().forEach(track => track.stop())
|
||||
audioContext?.close()
|
||||
/**
|
||||
* 计算音量 (兼容 Float32 和 Int16/ArrayBuffer)
|
||||
*/
|
||||
const calculateVolume = (data, isFloat32) => {
|
||||
let sum = 0;
|
||||
let length = 0;
|
||||
|
||||
audioStream = null
|
||||
audioContext = null
|
||||
audioInput = null
|
||||
scriptProcessor = null
|
||||
websocket = null
|
||||
|
||||
isRecording.value = false
|
||||
isSocketConnected.value = false
|
||||
if (isFloat32) {
|
||||
length = data.length;
|
||||
for (let i = 0; i < length; i += 10) {
|
||||
sum += Math.abs(data[i]);
|
||||
}
|
||||
volumeLevel.value = Math.min(100, Math.floor((sum / (length / 10)) * 100 * 3));
|
||||
} else {
|
||||
const int16Data = new Int16Array(data);
|
||||
length = int16Data.length;
|
||||
for (let i = 0; i < length; i += 10) {
|
||||
sum += Math.abs(int16Data[i]);
|
||||
}
|
||||
const avg = sum / (length / 10);
|
||||
volumeLevel.value = Math.min(100, Math.floor((avg / 10000) * 100));
|
||||
}
|
||||
}
|
||||
|
||||
onUnmounted(() => {
|
||||
if (isRecording.value) stopRecording()
|
||||
if (isRecording.value) {
|
||||
stopRecording();
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
isRecording,
|
||||
isStopping,
|
||||
isSocketConnected,
|
||||
recordingDuration,
|
||||
audioDataForDisplay,
|
||||
volumeLevel,
|
||||
recognizedText,
|
||||
lastFinalText,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
cancelRecording
|
||||
|
||||
@@ -1,348 +0,0 @@
|
||||
import {
|
||||
ref,
|
||||
onUnmounted
|
||||
} from 'vue'
|
||||
import {
|
||||
$api
|
||||
} from '../common/globalFunction'; // 你的请求封装
|
||||
import config from '@/config'
|
||||
|
||||
// 开源
|
||||
export function useAudioRecorder() {
|
||||
// --- 状态定义 ---
|
||||
const isRecording = ref(false)
|
||||
const isSocketConnected = ref(false)
|
||||
const recordingDuration = ref(0)
|
||||
const volumeLevel = ref(0) // 0-100
|
||||
const recognizedText = ref('')
|
||||
|
||||
// --- 内部变量 ---
|
||||
let socketTask = null
|
||||
let durationTimer = null
|
||||
|
||||
// --- APP/小程序 变量 ---
|
||||
let recorderManager = null;
|
||||
|
||||
// --- H5 变量 ---
|
||||
let audioContext = null;
|
||||
let scriptProcessor = null;
|
||||
let mediaStreamSource = null;
|
||||
let h5Stream = null;
|
||||
|
||||
// --- 配置项 ---
|
||||
const RECORD_CONFIG = {
|
||||
duration: 600000,
|
||||
sampleRate: 16000,
|
||||
numberOfChannels: 1,
|
||||
format: 'pcm',
|
||||
frameSize: 4096
|
||||
}
|
||||
|
||||
/**
|
||||
* 获取 WebSocket 地址 (含 Token)
|
||||
*/
|
||||
const getWsUrl = async () => {
|
||||
let wsUrl = config.vioceBaseURl
|
||||
|
||||
// 拼接 Token
|
||||
const token = uni.getStorageSync('token') || '';
|
||||
if (token) {
|
||||
const separator = wsUrl.includes('?') ? '&' : '?';
|
||||
wsUrl = `${wsUrl}${separator}token=${encodeURIComponent(token)}`;
|
||||
}
|
||||
return wsUrl;
|
||||
}
|
||||
|
||||
/**
|
||||
* 开始录音 (入口)
|
||||
*/
|
||||
const startRecording = async () => {
|
||||
if (isRecording.value) return
|
||||
|
||||
try {
|
||||
recognizedText.value = ''
|
||||
volumeLevel.value = 0
|
||||
|
||||
// #ifdef H5
|
||||
if (location.protocol !== 'https:' && location.hostname !== 'localhost') {
|
||||
uni.showToast({
|
||||
title: 'H5录音需要HTTPS环境',
|
||||
icon: 'none'
|
||||
});
|
||||
return;
|
||||
}
|
||||
// #endif
|
||||
|
||||
const url = await getWsUrl()
|
||||
console.log('正在连接 ASR:', url)
|
||||
|
||||
await connectSocket(url);
|
||||
|
||||
} catch (err) {
|
||||
console.error('启动失败:', err);
|
||||
uni.showToast({
|
||||
title: '启动失败: ' + (err.message || ''),
|
||||
icon: 'none'
|
||||
});
|
||||
cleanup();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 连接 WebSocket
|
||||
*/
|
||||
const connectSocket = (url) => {
|
||||
return new Promise((resolve, reject) => {
|
||||
socketTask = uni.connectSocket({
|
||||
url: url,
|
||||
success: () => console.log('Socket 连接请求发送'),
|
||||
fail: (err) => reject(err)
|
||||
});
|
||||
|
||||
socketTask.onOpen((res) => {
|
||||
console.log('WebSocket 已连接');
|
||||
isSocketConnected.value = true;
|
||||
|
||||
// #ifdef H5
|
||||
startH5Recording().then(() => resolve()).catch(err => {
|
||||
socketTask.close();
|
||||
reject(err);
|
||||
});
|
||||
// #endif
|
||||
|
||||
// #ifndef H5
|
||||
startAppRecording();
|
||||
resolve();
|
||||
// #endif
|
||||
});
|
||||
|
||||
socketTask.onMessage((res) => {
|
||||
// 接收文本结果
|
||||
if (res.data) {
|
||||
recognizedText.value = res.data;
|
||||
}
|
||||
});
|
||||
|
||||
socketTask.onError((err) => {
|
||||
console.error('Socket 错误:', err);
|
||||
isSocketConnected.value = false;
|
||||
stopRecording();
|
||||
});
|
||||
|
||||
socketTask.onClose(() => {
|
||||
isSocketConnected.value = false;
|
||||
console.log('Socket 已关闭');
|
||||
});
|
||||
})
|
||||
}
|
||||
|
||||
const startH5Recording = async () => {
|
||||
try {
|
||||
// 1. 获取麦克风流
|
||||
const stream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: true
|
||||
});
|
||||
h5Stream = stream;
|
||||
|
||||
// 2. 创建 AudioContext
|
||||
const AudioContext = window.AudioContext || window.webkitAudioContext;
|
||||
audioContext = new AudioContext({
|
||||
sampleRate: 16000
|
||||
});
|
||||
|
||||
mediaStreamSource = audioContext.createMediaStreamSource(stream);
|
||||
scriptProcessor = audioContext.createScriptProcessor(4096, 1, 1);
|
||||
|
||||
scriptProcessor.onaudioprocess = (event) => {
|
||||
if (!isSocketConnected.value || !socketTask) return;
|
||||
|
||||
const inputData = event.inputBuffer.getChannelData(0);
|
||||
|
||||
calculateVolume(inputData, true);
|
||||
|
||||
const buffer = new ArrayBuffer(inputData.length * 2);
|
||||
const view = new DataView(buffer);
|
||||
for (let i = 0; i < inputData.length; i++) {
|
||||
let s = Math.max(-1, Math.min(1, inputData[i]));
|
||||
view.setInt16(i * 2, s < 0 ? s * 0x8000 : s * 0x7FFF, true);
|
||||
}
|
||||
|
||||
socketTask.send({
|
||||
data: buffer,
|
||||
fail: (e) => console.error('发送音频失败', e)
|
||||
});
|
||||
};
|
||||
|
||||
mediaStreamSource.connect(scriptProcessor);
|
||||
scriptProcessor.connect(audioContext.destination);
|
||||
|
||||
isRecording.value = true;
|
||||
recordingDuration.value = 0;
|
||||
durationTimer = setInterval(() => recordingDuration.value++, 1000);
|
||||
|
||||
console.log('H5 录音已启动');
|
||||
|
||||
} catch (err) {
|
||||
console.error('H5 录音启动失败:', err);
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
const stopH5Resources = () => {
|
||||
if (scriptProcessor) scriptProcessor.disconnect();
|
||||
if (mediaStreamSource) mediaStreamSource.disconnect();
|
||||
if (audioContext) audioContext.close();
|
||||
if (h5Stream) h5Stream.getTracks().forEach(track => track.stop());
|
||||
|
||||
scriptProcessor = null;
|
||||
mediaStreamSource = null;
|
||||
audioContext = null;
|
||||
h5Stream = null;
|
||||
}
|
||||
|
||||
const startAppRecording = () => {
|
||||
recorderManager = uni.getRecorderManager();
|
||||
|
||||
recorderManager.onFrameRecorded((res) => {
|
||||
const {
|
||||
frameBuffer
|
||||
} = res;
|
||||
|
||||
calculateVolume(frameBuffer, false);
|
||||
|
||||
if (isSocketConnected.value && socketTask) {
|
||||
socketTask.send({
|
||||
data: frameBuffer
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
recorderManager.onStart(() => {
|
||||
console.log('APP 录音已开始');
|
||||
isRecording.value = true;
|
||||
recordingDuration.value = 0;
|
||||
durationTimer = setInterval(() => recordingDuration.value++, 1000);
|
||||
});
|
||||
|
||||
recorderManager.onError((err) => {
|
||||
console.error('APP 录音报错:', err);
|
||||
cleanup();
|
||||
});
|
||||
|
||||
recorderManager.start(RECORD_CONFIG);
|
||||
}
|
||||
const stopHardwareResource = () => {
|
||||
// APP/小程序停止
|
||||
if (recorderManager) {
|
||||
recorderManager.stop();
|
||||
}
|
||||
|
||||
// H5停止
|
||||
// #ifdef H5
|
||||
if (scriptProcessor) scriptProcessor.disconnect();
|
||||
if (mediaStreamSource) mediaStreamSource.disconnect();
|
||||
if (audioContext) audioContext.close();
|
||||
if (h5Stream) h5Stream.getTracks().forEach(track => track.stop());
|
||||
|
||||
scriptProcessor = null;
|
||||
mediaStreamSource = null;
|
||||
audioContext = null;
|
||||
h5Stream = null;
|
||||
// #endif
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止录音 (通用)
|
||||
*/
|
||||
const stopRecording = () => {
|
||||
// 停止 APP 录音
|
||||
if (recorderManager) {
|
||||
recorderManager.stop();
|
||||
}
|
||||
|
||||
// 停止 H5 录音资源
|
||||
// #ifdef H5
|
||||
stopH5Resources();
|
||||
// #endif
|
||||
|
||||
// 关闭 Socket
|
||||
if (socketTask) {
|
||||
socketTask.close();
|
||||
}
|
||||
|
||||
cleanup();
|
||||
}
|
||||
|
||||
const cancelRecording = () => {
|
||||
if (!isRecording.value) return;
|
||||
|
||||
console.log('取消录音 - 丢弃结果');
|
||||
|
||||
// 1. 停止硬件录音
|
||||
stopHardwareResource();
|
||||
|
||||
// 2. 强制关闭 Socket
|
||||
if (socketTask) {
|
||||
socketTask.close();
|
||||
}
|
||||
|
||||
// 3. 关键:清空已识别的文本
|
||||
recognizedText.value = '';
|
||||
|
||||
// 4. 清理资源
|
||||
cleanup();
|
||||
}
|
||||
|
||||
/**
|
||||
* 清理状态
|
||||
*/
|
||||
const cleanup = () => {
|
||||
clearInterval(durationTimer);
|
||||
isRecording.value = false;
|
||||
isSocketConnected.value = false;
|
||||
socketTask = null;
|
||||
recorderManager = null;
|
||||
volumeLevel.value = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* 计算音量 (兼容 Float32 和 Int16/ArrayBuffer)
|
||||
*/
|
||||
const calculateVolume = (data, isFloat32) => {
|
||||
let sum = 0;
|
||||
let length = 0;
|
||||
|
||||
if (isFloat32) {
|
||||
length = data.length;
|
||||
for (let i = 0; i < length; i += 10) {
|
||||
sum += Math.abs(data[i]);
|
||||
}
|
||||
volumeLevel.value = Math.min(100, Math.floor((sum / (length / 10)) * 100 * 3));
|
||||
} else {
|
||||
const int16Data = new Int16Array(data);
|
||||
length = int16Data.length;
|
||||
for (let i = 0; i < length; i += 10) {
|
||||
sum += Math.abs(int16Data[i]);
|
||||
}
|
||||
const avg = sum / (length / 10);
|
||||
volumeLevel.value = Math.min(100, Math.floor((avg / 10000) * 100));
|
||||
}
|
||||
}
|
||||
|
||||
onUnmounted(() => {
|
||||
if (isRecording.value) {
|
||||
stopRecording();
|
||||
}
|
||||
})
|
||||
|
||||
return {
|
||||
isRecording,
|
||||
isSocketConnected,
|
||||
recordingDuration,
|
||||
volumeLevel,
|
||||
recognizedText,
|
||||
startRecording,
|
||||
stopRecording,
|
||||
cancelRecording
|
||||
}
|
||||
}
|
||||
@@ -1,158 +0,0 @@
|
||||
import {
|
||||
ref,
|
||||
onUnmounted,
|
||||
readonly
|
||||
} from 'vue';
|
||||
|
||||
const defaultExtractSpeechText = (text) => text;
|
||||
|
||||
|
||||
export function useTTSPlayer() {
|
||||
const synth = window.speechSynthesis;
|
||||
const isSpeaking = ref(false);
|
||||
const isPaused = ref(false);
|
||||
const utteranceRef = ref(null);
|
||||
|
||||
const cleanup = () => {
|
||||
isSpeaking.value = false;
|
||||
isPaused.value = false;
|
||||
utteranceRef.value = null;
|
||||
};
|
||||
|
||||
/**
|
||||
* @param {string} text - The text to be spoken.
|
||||
* @param {object} [options] - Optional settings for the speech.
|
||||
* @param {string} [options.lang] - Language (e.g., 'en-US', 'es-ES').
|
||||
* @param {number} [options.rate] - Speed (0.1 to 10, default 1).
|
||||
* @param {number} [options.pitch] - Pitch (0 to 2, default 1).
|
||||
* @param {SpeechSynthesisVoice} [options.voice] - A specific voice object.
|
||||
* @param {function(string): string} [options.extractSpeechText] - A function to filter/clean the text before speaking.
|
||||
*/
|
||||
const speak = (text, options = {}) => {
|
||||
if (!synth) {
|
||||
console.error('SpeechSynthesis API is not supported in this browser.');
|
||||
return;
|
||||
}
|
||||
|
||||
if (isSpeaking.value) {
|
||||
synth.cancel();
|
||||
}
|
||||
|
||||
const filteredText = extractSpeechText(text);
|
||||
|
||||
if (!filteredText || typeof filteredText !== 'string' || filteredText.trim() === '') {
|
||||
console.warn('Text to speak is empty after filtering.');
|
||||
cleanup(); // Ensure state is clean
|
||||
return;
|
||||
}
|
||||
|
||||
const newUtterance = new SpeechSynthesisUtterance(filteredText); // Use filtered text
|
||||
utteranceRef.value = newUtterance;
|
||||
|
||||
newUtterance.lang = 'zh-CN';
|
||||
newUtterance.rate = options.rate || 1;
|
||||
newUtterance.pitch = options.pitch || 1;
|
||||
if (options.voice) {
|
||||
newUtterance.voice = options.voice;
|
||||
}
|
||||
|
||||
newUtterance.onstart = () => {
|
||||
isSpeaking.value = true;
|
||||
isPaused.value = false;
|
||||
};
|
||||
|
||||
newUtterance.onpause = () => {
|
||||
isPaused.value = true;
|
||||
};
|
||||
newUtterance.onresume = () => {
|
||||
isPaused.value = false;
|
||||
};
|
||||
newUtterance.onend = () => {
|
||||
cleanup();
|
||||
};
|
||||
newUtterance.onerror = (event) => {
|
||||
console.error('SpeechSynthesis Error:', event.error);
|
||||
cleanup();
|
||||
};
|
||||
|
||||
synth.speak(newUtterance);
|
||||
};
|
||||
|
||||
const pause = () => {
|
||||
if (synth && isSpeaking.value && !isPaused.value) {
|
||||
synth.pause();
|
||||
}
|
||||
};
|
||||
|
||||
const resume = () => {
|
||||
if (synth && isPaused.value) {
|
||||
synth.resume();
|
||||
}
|
||||
};
|
||||
|
||||
const cancelAudio = () => {
|
||||
if (synth) {
|
||||
synth.cancel();
|
||||
}
|
||||
cleanup();
|
||||
};
|
||||
|
||||
onUnmounted(() => {
|
||||
cancelAudio();
|
||||
});
|
||||
|
||||
return {
|
||||
speak,
|
||||
pause,
|
||||
resume,
|
||||
cancelAudio,
|
||||
isSpeaking: readonly(isSpeaking),
|
||||
isPaused: readonly(isPaused),
|
||||
};
|
||||
}
|
||||
|
||||
function extractSpeechText(markdown) {
|
||||
const jobRegex = /``` job-json\s*({[\s\S]*?})\s*```/g;
|
||||
const jobs = [];
|
||||
let match;
|
||||
let lastJobEndIndex = 0;
|
||||
let firstJobStartIndex = -1;
|
||||
|
||||
// 提取岗位 json 数据及前后位置
|
||||
while ((match = jobRegex.exec(markdown)) !== null) {
|
||||
const jobStr = match[1];
|
||||
try {
|
||||
const job = JSON.parse(jobStr);
|
||||
jobs.push(job);
|
||||
if (firstJobStartIndex === -1) {
|
||||
firstJobStartIndex = match.index;
|
||||
}
|
||||
lastJobEndIndex = jobRegex.lastIndex;
|
||||
} catch (e) {
|
||||
console.warn('JSON 解析失败', e);
|
||||
}
|
||||
}
|
||||
|
||||
// 提取引导语(第一个 job-json 之前的文字)
|
||||
const guideText = firstJobStartIndex > 0 ?
|
||||
markdown.slice(0, firstJobStartIndex).trim() :
|
||||
'';
|
||||
|
||||
// 提取结束语(最后一个 job-json 之后的文字)
|
||||
const endingText = lastJobEndIndex < markdown.length ?
|
||||
markdown.slice(lastJobEndIndex).trim() :
|
||||
'';
|
||||
|
||||
// 岗位信息格式化为语音文本
|
||||
const jobTexts = jobs.map((job, index) => {
|
||||
return `第 ${index + 1} 个岗位,岗位名称是:${job.jobTitle},公司是:${job.companyName},薪资:${job.salary},地点:${job.location},学历要求:${job.education},经验要求:${job.experience}。`;
|
||||
});
|
||||
|
||||
// 拼接总语音内容
|
||||
const finalTextParts = [];
|
||||
if (guideText) finalTextParts.push(guideText);
|
||||
finalTextParts.push(...jobTexts);
|
||||
if (endingText) finalTextParts.push(endingText);
|
||||
|
||||
return finalTextParts.join('\n');
|
||||
}
|
||||
@@ -1,203 +0,0 @@
|
||||
import {
|
||||
ref,
|
||||
readonly,
|
||||
onUnmounted
|
||||
} from 'vue';
|
||||
|
||||
// 检查 API 兼容性
|
||||
const SpeechRecognition = window.SpeechRecognition || window.webkitSpeechRecognition;
|
||||
const isApiSupported = !!SpeechRecognition && !!navigator.mediaDevices && !!window.AudioContext;
|
||||
|
||||
/**
|
||||
* @param {object} [options]
|
||||
* @param {string} [options.lang] - Language code (e.g., 'zh-CN', 'en-US')
|
||||
* @returns {object}
|
||||
*/
|
||||
export function useAudioRecorder(options = {}) {
|
||||
const lang = options.lang || 'zh-CN'; // 默认使用中文
|
||||
|
||||
const isRecording = ref(false);
|
||||
const recognizedText = ref(''); // 完整的识别文本(包含临时的)
|
||||
const lastFinalText = ref(''); // 最后一段已确定的文本
|
||||
const volumeLevel = ref(0); // 音量 (0-100)
|
||||
const audioDataForDisplay = ref(new Uint8Array()); // 波形数据
|
||||
|
||||
let recognition = null;
|
||||
let audioContext = null;
|
||||
let analyser = null;
|
||||
let mediaStreamSource = null;
|
||||
let mediaStream = null;
|
||||
let dataArray = null; // 用于音量和波形
|
||||
let animationFrameId = null;
|
||||
|
||||
if (!isApiSupported) {
|
||||
console.warn(
|
||||
'此浏览器不支持Web语音API或Web音频API。钩子无法正常工作。'
|
||||
);
|
||||
return {
|
||||
isRecording: readonly(isRecording),
|
||||
startRecording: () => console.error('Audio recording not supported.'),
|
||||
stopRecording: () => {},
|
||||
cancelRecording: () => {},
|
||||
audioDataForDisplay: readonly(audioDataForDisplay),
|
||||
volumeLevel: readonly(volumeLevel),
|
||||
recognizedText: readonly(recognizedText),
|
||||
lastFinalText: readonly(lastFinalText),
|
||||
};
|
||||
}
|
||||
|
||||
const setupRecognition = () => {
|
||||
recognition = new SpeechRecognition();
|
||||
recognition.lang = lang;
|
||||
recognition.continuous = true; // 持续识别
|
||||
recognition.interimResults = true; // 返回临时结果
|
||||
|
||||
recognition.onstart = () => {
|
||||
isRecording.value = true;
|
||||
};
|
||||
|
||||
recognition.onend = () => {
|
||||
isRecording.value = false;
|
||||
stopAudioAnalysis(); // 语音识别停止时,也停止音频分析
|
||||
};
|
||||
|
||||
recognition.onerror = (event) => {
|
||||
console.error('SpeechRecognition Error:', event.error);
|
||||
isRecording.value = false;
|
||||
stopAudioAnalysis();
|
||||
};
|
||||
|
||||
recognition.onresult = (event) => {
|
||||
let interim = '';
|
||||
let final = '';
|
||||
|
||||
for (let i = 0; i < event.results.length; i++) {
|
||||
const transcript = event.results[i][0].transcript;
|
||||
if (event.results[i].isFinal) {
|
||||
final += transcript;
|
||||
lastFinalText.value = transcript; // 存储最后一段确定的文本
|
||||
} else {
|
||||
interim += transcript;
|
||||
}
|
||||
}
|
||||
recognizedText.value = final + interim; // 组合为完整文本
|
||||
};
|
||||
};
|
||||
|
||||
const startAudioAnalysis = async () => {
|
||||
try {
|
||||
mediaStream = await navigator.mediaDevices.getUserMedia({
|
||||
audio: true
|
||||
});
|
||||
audioContext = new AudioContext();
|
||||
analyser = audioContext.createAnalyser();
|
||||
mediaStreamSource = audioContext.createMediaStreamSource(mediaStream);
|
||||
|
||||
// 设置 Analyser
|
||||
analyser.fftSize = 512; // 必须是 2 的幂
|
||||
const bufferLength = analyser.frequencyBinCount;
|
||||
dataArray = new Uint8Array(bufferLength); // 用于波形
|
||||
|
||||
// 连接节点
|
||||
mediaStreamSource.connect(analyser);
|
||||
|
||||
// 开始循环分析
|
||||
updateAudioData();
|
||||
} catch (err) {
|
||||
console.error('Failed to get media stream or setup AudioContext:', err);
|
||||
if (err.name === 'NotAllowedError' || err.name === 'PermissionDeniedError') {
|
||||
alert('麦克风权限被拒绝。请在浏览器设置中允许访问麦克风。');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const updateAudioData = () => {
|
||||
if (!isRecording.value) return; // 如果停止了就退出循环
|
||||
|
||||
// 获取时域数据 (波形)
|
||||
analyser.getByteTimeDomainData(dataArray);
|
||||
audioDataForDisplay.value = new Uint8Array(dataArray); // 复制数组以触发响应式
|
||||
|
||||
// 计算音量 (RMS)
|
||||
let sumSquares = 0.0;
|
||||
for (const amplitude of dataArray) {
|
||||
const normalized = (amplitude / 128.0) - 1.0; // 转换为 -1.0 到 1.0
|
||||
sumSquares += normalized * normalized;
|
||||
}
|
||||
const rms = Math.sqrt(sumSquares / dataArray.length);
|
||||
volumeLevel.value = Math.min(100, Math.floor(rms * 250)); // 放大 RMS 值到 0-100 范围
|
||||
|
||||
animationFrameId = requestAnimationFrame(updateAudioData);
|
||||
};
|
||||
|
||||
const stopAudioAnalysis = () => {
|
||||
if (animationFrameId) {
|
||||
cancelAnimationFrame(animationFrameId);
|
||||
animationFrameId = null;
|
||||
}
|
||||
// 停止麦克风轨道
|
||||
mediaStream?.getTracks().forEach((track) => track.stop());
|
||||
// 关闭 AudioContext
|
||||
audioContext?.close().catch((e) => console.error('Error closing AudioContext', e));
|
||||
|
||||
mediaStream = null;
|
||||
audioContext = null;
|
||||
analyser = null;
|
||||
mediaStreamSource = null;
|
||||
volumeLevel.value = 0;
|
||||
audioDataForDisplay.value = new Uint8Array();
|
||||
};
|
||||
|
||||
const startRecording = async () => {
|
||||
if (isRecording.value) return;
|
||||
|
||||
// 重置状态
|
||||
recognizedText.value = '';
|
||||
lastFinalText.value = '';
|
||||
|
||||
try {
|
||||
// 必须先启动音频分析以获取麦克风权限
|
||||
await startAudioAnalysis();
|
||||
|
||||
// 如果音频启动成功 (mediaStream 存在),则启动语音识别
|
||||
if (mediaStream) {
|
||||
setupRecognition();
|
||||
recognition.start();
|
||||
}
|
||||
} catch (error) {
|
||||
console.error("Error starting recording:", error);
|
||||
}
|
||||
};
|
||||
|
||||
const stopRecording = () => {
|
||||
if (!isRecording.value || !recognition) return;
|
||||
recognition.stop(); // 这将触发 onend 事件,自动停止音频分析
|
||||
};
|
||||
|
||||
const cancelRecording = () => {
|
||||
if (!recognition) return;
|
||||
isRecording.value = false; // 立即设置状态
|
||||
recognition.abort(); // 这也会触发 onend
|
||||
recognizedText.value = '';
|
||||
lastFinalText.value = '';
|
||||
};
|
||||
|
||||
onUnmounted(() => {
|
||||
if (recognition) {
|
||||
recognition.abort();
|
||||
}
|
||||
stopAudioAnalysis();
|
||||
});
|
||||
|
||||
return {
|
||||
isRecording: readonly(isRecording),
|
||||
startRecording,
|
||||
stopRecording,
|
||||
cancelRecording,
|
||||
audioDataForDisplay: readonly(audioDataForDisplay),
|
||||
volumeLevel: readonly(volumeLevel),
|
||||
recognizedText: readonly(recognizedText),
|
||||
lastFinalText: readonly(lastFinalText),
|
||||
isApiSupported, // 导出支持状态
|
||||
};
|
||||
}
|
||||
@@ -1,217 +1,205 @@
|
||||
import {
|
||||
ref,
|
||||
onUnmounted,
|
||||
onBeforeUnmount,
|
||||
onMounted
|
||||
onUnmounted
|
||||
} from 'vue'
|
||||
import {
|
||||
onHide,
|
||||
onUnload
|
||||
} from '@dcloudio/uni-app'
|
||||
import WavDecoder from '@/lib/wav-decoder@1.3.0.js'
|
||||
import config from '@/config'
|
||||
import PiperTTS from './piper-bundle.js'
|
||||
|
||||
export function useTTSPlayer() {
|
||||
const isSpeaking = ref(false)
|
||||
const isPaused = ref(false)
|
||||
const isComplete = ref(false)
|
||||
// UI 状态
|
||||
const isSpeaking = ref(false) // 是否正在交互(含播放、暂停、加载)
|
||||
const isPaused = ref(false) // 是否处于暂停状态
|
||||
const isLoading = ref(false) // 是否正在加载/连接
|
||||
|
||||
const audioContext = new(window.AudioContext || window.webkitAudioContext)()
|
||||
let playTime = audioContext.currentTime
|
||||
let sourceNodes = []
|
||||
let socket = null
|
||||
let sampleRate = 16000
|
||||
let numChannels = 1
|
||||
let isHeaderDecoded = false
|
||||
let pendingText = null
|
||||
// 单例 Piper 实例
|
||||
let piper = null
|
||||
|
||||
let currentPlayId = 0
|
||||
let activePlayId = 0
|
||||
/**
|
||||
* 获取或创建 SDK 实例
|
||||
*/
|
||||
const getPiperInstance = () => {
|
||||
if (!piper) {
|
||||
let baseUrl = config.speechSynthesis2 || ''
|
||||
baseUrl = baseUrl.replace(/\/$/, '')
|
||||
|
||||
const speak = (text) => {
|
||||
currentPlayId++
|
||||
const myPlayId = currentPlayId
|
||||
reset()
|
||||
pendingText = text
|
||||
activePlayId = myPlayId
|
||||
}
|
||||
|
||||
const pause = () => {
|
||||
if (audioContext.state === 'running') {
|
||||
audioContext.suspend()
|
||||
isPaused.value = true
|
||||
isSpeaking.value = false
|
||||
}
|
||||
}
|
||||
|
||||
const resume = () => {
|
||||
if (audioContext.state === 'suspended') {
|
||||
audioContext.resume()
|
||||
isPaused.value = false
|
||||
isSpeaking.value = true
|
||||
}
|
||||
}
|
||||
|
||||
const cancelAudio = () => {
|
||||
stop()
|
||||
}
|
||||
|
||||
const stop = () => {
|
||||
isSpeaking.value = false
|
||||
isPaused.value = false
|
||||
isComplete.value = false
|
||||
playTime = audioContext.currentTime
|
||||
|
||||
sourceNodes.forEach(node => {
|
||||
try {
|
||||
node.stop()
|
||||
node.disconnect()
|
||||
} catch (e) {}
|
||||
})
|
||||
sourceNodes = []
|
||||
|
||||
if (socket) {
|
||||
socket.close()
|
||||
socket = null
|
||||
}
|
||||
|
||||
isHeaderDecoded = false
|
||||
pendingText = null
|
||||
}
|
||||
|
||||
const reset = () => {
|
||||
stop()
|
||||
isSpeaking.value = false
|
||||
isPaused.value = false
|
||||
isComplete.value = false
|
||||
playTime = audioContext.currentTime
|
||||
initWebSocket()
|
||||
}
|
||||
|
||||
const initWebSocket = () => {
|
||||
const thisPlayId = currentPlayId
|
||||
socket = new WebSocket(config.speechSynthesis)
|
||||
socket.binaryType = 'arraybuffer'
|
||||
|
||||
socket.onopen = () => {
|
||||
if (pendingText && thisPlayId === activePlayId) {
|
||||
const seepdText = extractSpeechText(pendingText)
|
||||
console.log(seepdText)
|
||||
socket.send(seepdText)
|
||||
pendingText = null
|
||||
}
|
||||
}
|
||||
|
||||
socket.onmessage = async (e) => {
|
||||
if (thisPlayId !== activePlayId) return // 忽略旧播放的消息
|
||||
|
||||
if (typeof e.data === 'string') {
|
||||
try {
|
||||
const msg = JSON.parse(e.data)
|
||||
if (msg.status === 'complete') {
|
||||
isComplete.value = true
|
||||
setTimeout(() => {
|
||||
if (thisPlayId === activePlayId) {
|
||||
isSpeaking.value = false
|
||||
}
|
||||
}, (playTime - audioContext.currentTime) * 1000)
|
||||
piper = new PiperTTS({
|
||||
baseUrl: baseUrl,
|
||||
sampleRate: 16000,
|
||||
onStatus: (msg, type) => {
|
||||
if (type === 'error') {
|
||||
console.error('[TTS Error]', msg)
|
||||
resetState()
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('[TTSPlayer] 文本消息:', e.data)
|
||||
},
|
||||
onStart: () => {
|
||||
isLoading.value = false
|
||||
isSpeaking.value = true
|
||||
isPaused.value = false
|
||||
},
|
||||
onEnd: () => {
|
||||
// 只有非暂停状态下的结束,才重置所有状态
|
||||
// 如果是用户手动暂停导致的中断,不应视为自然播放结束
|
||||
isSpeaking.value = false
|
||||
isLoading.value = false
|
||||
isPaused.value = false
|
||||
}
|
||||
} else if (e.data instanceof ArrayBuffer) {
|
||||
if (!isHeaderDecoded) {
|
||||
try {
|
||||
const decoded = await WavDecoder.decode(e.data)
|
||||
sampleRate = decoded.sampleRate
|
||||
numChannels = decoded.channelData.length
|
||||
decoded.channelData.forEach((channel, i) => {
|
||||
const audioBuffer = audioContext.createBuffer(1, channel.length,
|
||||
sampleRate)
|
||||
audioBuffer.copyToChannel(channel, 0)
|
||||
playBuffer(audioBuffer)
|
||||
})
|
||||
isHeaderDecoded = true
|
||||
} catch (err) {
|
||||
console.error('WAV 解码失败:', err)
|
||||
}
|
||||
} else {
|
||||
const pcm = new Int16Array(e.data)
|
||||
const audioBuffer = pcmToAudioBuffer(pcm, sampleRate, numChannels)
|
||||
playBuffer(audioBuffer)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
return piper
|
||||
}
|
||||
|
||||
const pcmToAudioBuffer = (pcm, sampleRate, numChannels) => {
|
||||
const length = pcm.length / numChannels
|
||||
const audioBuffer = audioContext.createBuffer(numChannels, length, sampleRate)
|
||||
for (let ch = 0; ch < numChannels; ch++) {
|
||||
const channelData = audioBuffer.getChannelData(ch)
|
||||
for (let i = 0; i < length; i++) {
|
||||
const sample = pcm[i * numChannels + ch]
|
||||
channelData[i] = sample / 32768
|
||||
}
|
||||
}
|
||||
return audioBuffer
|
||||
}
|
||||
/**
|
||||
* 核心朗读方法
|
||||
*/
|
||||
const speak = async (text) => {
|
||||
if (!text) return
|
||||
|
||||
const playBuffer = (audioBuffer) => {
|
||||
if (!isSpeaking.value) {
|
||||
playTime = audioContext.currentTime
|
||||
}
|
||||
const source = audioContext.createBufferSource()
|
||||
source.buffer = audioBuffer
|
||||
source.connect(audioContext.destination)
|
||||
source.start(playTime)
|
||||
sourceNodes.push(source)
|
||||
playTime += audioBuffer.duration
|
||||
const processedText = extractSpeechText(text)
|
||||
if (!processedText) return
|
||||
|
||||
const instance = getPiperInstance()
|
||||
|
||||
// 重置状态
|
||||
isLoading.value = true
|
||||
isPaused.value = false
|
||||
isSpeaking.value = true
|
||||
|
||||
try {
|
||||
// 直接调用 speak,SDK 内部会自动处理 init 和 stop
|
||||
await instance.speak(processedText, {
|
||||
speakerId: 0,
|
||||
noiseScale: 0.667,
|
||||
lengthScale: 1.0
|
||||
})
|
||||
} catch (e) {
|
||||
console.error('TTS Speak Error:', e)
|
||||
resetState()
|
||||
}
|
||||
}
|
||||
|
||||
onUnmounted(() => {
|
||||
stop()
|
||||
})
|
||||
/**
|
||||
* 暂停
|
||||
*/
|
||||
const pause = async () => {
|
||||
// 1. 只有正在播放且未暂停时,才执行暂停
|
||||
if (!isSpeaking.value || isPaused.value) return
|
||||
|
||||
// 页面刷新/关闭时
|
||||
onMounted(() => {
|
||||
if (typeof window !== 'undefined') {
|
||||
window.addEventListener('beforeunload', cancelAudio)
|
||||
// 2. 检查播放器实例是否存在
|
||||
if (piper && piper.player) {
|
||||
try {
|
||||
// 执行音频挂起
|
||||
await piper.player.pause()
|
||||
// 3. 成功后更新 UI
|
||||
isPaused.value = true
|
||||
} catch (e) {
|
||||
console.error("Pause failed:", e)
|
||||
// 即使报错,如果不是致命错误,也可以尝试强制更新 UI
|
||||
// isPaused.value = true
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
onBeforeUnmount(() => {
|
||||
cancelAudio()
|
||||
if (typeof window !== 'undefined') {
|
||||
window.removeEventListener('beforeunload', cancelAudio)
|
||||
/**
|
||||
* 恢复 (继续播放)
|
||||
*/
|
||||
const resume = async () => {
|
||||
// 1. 只有处于暂停状态时,才执行恢复
|
||||
if (!isPaused.value) return
|
||||
|
||||
if (piper && piper.player) {
|
||||
try {
|
||||
await piper.player.continue()
|
||||
// 2. 成功后更新 UI
|
||||
isPaused.value = false
|
||||
isSpeaking.value = true
|
||||
} catch (e) {
|
||||
console.error("Resume failed:", e)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
onHide(cancelAudio)
|
||||
onUnload(cancelAudio)
|
||||
/**
|
||||
* 切换 播放/暂停 (方便按钮绑定)
|
||||
*/
|
||||
const togglePlay = () => {
|
||||
if (isPaused.value) {
|
||||
resume()
|
||||
} else {
|
||||
pause()
|
||||
}
|
||||
}
|
||||
|
||||
initWebSocket()
|
||||
/**
|
||||
* 停止 (中断)
|
||||
*/
|
||||
const stop = () => {
|
||||
if (piper) {
|
||||
piper.stop()
|
||||
}
|
||||
resetState()
|
||||
}
|
||||
|
||||
/**
|
||||
* 彻底销毁
|
||||
*/
|
||||
const destroy = () => {
|
||||
if (piper) {
|
||||
piper.stop()
|
||||
piper = null
|
||||
}
|
||||
resetState()
|
||||
}
|
||||
|
||||
const resetState = () => {
|
||||
isSpeaking.value = false
|
||||
isPaused.value = false
|
||||
isLoading.value = false
|
||||
}
|
||||
|
||||
// === 生命周期管理 ===
|
||||
|
||||
onUnmounted(destroy)
|
||||
|
||||
if (typeof onHide === 'function') {
|
||||
onHide(() => {
|
||||
togglePlay()
|
||||
// stop()
|
||||
})
|
||||
}
|
||||
|
||||
if (typeof onUnload === 'function') {
|
||||
onUnload(destroy)
|
||||
}
|
||||
|
||||
return {
|
||||
speak,
|
||||
pause,
|
||||
resume,
|
||||
cancelAudio,
|
||||
togglePlay, // 新增:单按钮切换功能
|
||||
stop,
|
||||
cancelAudio: stop,
|
||||
isSpeaking,
|
||||
isPaused,
|
||||
isComplete
|
||||
isLoading
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 文本提取工具函数 (保持原样)
|
||||
*/
|
||||
function extractSpeechText(markdown) {
|
||||
if (!markdown || markdown.indexOf('job-json') === -1) {
|
||||
return markdown;
|
||||
}
|
||||
const jobRegex = /``` job-json\s*({[\s\S]*?})\s*```/g;
|
||||
const jobs = [];
|
||||
let match;
|
||||
let lastJobEndIndex = 0;
|
||||
let firstJobStartIndex = -1;
|
||||
|
||||
// 提取岗位 json 数据及前后位置
|
||||
while ((match = jobRegex.exec(markdown)) !== null) {
|
||||
const jobStr = match[1];
|
||||
try {
|
||||
@@ -225,27 +213,16 @@ function extractSpeechText(markdown) {
|
||||
console.warn('JSON 解析失败', e);
|
||||
}
|
||||
}
|
||||
|
||||
// 提取引导语(第一个 job-json 之前的文字)
|
||||
const guideText = firstJobStartIndex > 0 ?
|
||||
markdown.slice(0, firstJobStartIndex).trim() :
|
||||
'';
|
||||
|
||||
// 提取结束语(最后一个 job-json 之后的文字)
|
||||
markdown.slice(0, firstJobStartIndex).trim() : '';
|
||||
const endingText = lastJobEndIndex < markdown.length ?
|
||||
markdown.slice(lastJobEndIndex).trim() :
|
||||
'';
|
||||
|
||||
// 岗位信息格式化为语音文本
|
||||
markdown.slice(lastJobEndIndex).trim() : '';
|
||||
const jobTexts = jobs.map((job, index) => {
|
||||
return `第 ${index + 1} 个岗位,岗位名称是:${job.jobTitle},公司是:${job.companyName},薪资:${job.salary},地点:${job.location},学历要求:${job.education},经验要求:${job.experience}。`;
|
||||
});
|
||||
|
||||
// 拼接总语音内容
|
||||
const finalTextParts = [];
|
||||
if (guideText) finalTextParts.push(guideText);
|
||||
finalTextParts.push(...jobTexts);
|
||||
if (endingText) finalTextParts.push(endingText);
|
||||
|
||||
return finalTextParts.join('\n');
|
||||
}
|
||||
@@ -1,216 +0,0 @@
|
||||
import {
|
||||
ref,
|
||||
onUnmounted,
|
||||
onMounted,
|
||||
watch
|
||||
} from 'vue'
|
||||
import {
|
||||
onHide,
|
||||
onUnload
|
||||
} from '@dcloudio/uni-app'
|
||||
import config from '@/config'
|
||||
// 请确保 piper-sdk.js 已经正确 export class PiperTTS
|
||||
import {
|
||||
PiperTTS
|
||||
} from './piper-sdk.js'
|
||||
|
||||
export function useTTSPlayer() {
|
||||
// UI 状态
|
||||
const isSpeaking = ref(false)
|
||||
const isPaused = ref(false)
|
||||
const isLoading = ref(false)
|
||||
|
||||
// SDK 实例
|
||||
let piper = null
|
||||
|
||||
/**
|
||||
* 初始化 SDK 实例
|
||||
* 每次 stop 后 piper 会被置空,这里会重新创建
|
||||
*/
|
||||
const initPiper = () => {
|
||||
if (piper) return
|
||||
|
||||
let baseUrl = config.speechSynthesis2 || ''
|
||||
baseUrl = baseUrl.replace(/\/$/, '')
|
||||
|
||||
piper = new PiperTTS({
|
||||
baseUrl: baseUrl,
|
||||
onStatus: (msg, type) => {
|
||||
if (type === 'error') {
|
||||
console.error('[TTS Error]', msg)
|
||||
// 出错时不重置状态,交给用户手动处理或结束事件处理
|
||||
resetState()
|
||||
}
|
||||
},
|
||||
onStart: () => {
|
||||
isLoading.value = false
|
||||
isSpeaking.value = true
|
||||
isPaused.value = false
|
||||
},
|
||||
onEnd: () => {
|
||||
resetState()
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* 核心朗读方法
|
||||
*/
|
||||
const speak = async (text) => {
|
||||
if (!text) return
|
||||
|
||||
const processedText = extractSpeechText(text)
|
||||
if (!processedText) return
|
||||
|
||||
// 1. 【关键修改】先彻底停止并销毁旧实例
|
||||
// 这会断开 socket 并且 close AudioContext,确保上一个声音立即消失
|
||||
await stop()
|
||||
|
||||
// 2. 初始化新实例 (因为 stop() 把 piper 设为了 null)
|
||||
initPiper()
|
||||
|
||||
// 3. 更新 UI 为加载中
|
||||
isLoading.value = true
|
||||
isPaused.value = false
|
||||
isSpeaking.value = true // 预先设为 true,防止按钮闪烁
|
||||
|
||||
try {
|
||||
// 4. 激活音频引擎 (移动端防静音关键)
|
||||
await piper.init()
|
||||
|
||||
// 5. 发送请求
|
||||
piper.speak(processedText, {
|
||||
speakerId: 0,
|
||||
noiseScale: 0.667,
|
||||
lengthScale: 1.0
|
||||
})
|
||||
} catch (e) {
|
||||
console.error('TTS Speak Error:', e)
|
||||
resetState()
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 暂停
|
||||
*/
|
||||
const pause = async () => {
|
||||
if (piper && piper.audioCtx && piper.audioCtx.state === 'running') {
|
||||
await piper.audioCtx.suspend()
|
||||
isPaused.value = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 恢复
|
||||
*/
|
||||
const resume = async () => {
|
||||
if (piper && piper.audioCtx && piper.audioCtx.state === 'suspended') {
|
||||
await piper.audioCtx.resume()
|
||||
isPaused.value = false
|
||||
isSpeaking.value = true
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 停止并重置 (核打击模式)
|
||||
*/
|
||||
const stop = async () => {
|
||||
if (piper) {
|
||||
// 1. 断开 WebSocket
|
||||
piper.stop()
|
||||
|
||||
// 2. 【关键】关闭 AudioContext
|
||||
// Web Audio API 中,已经 schedule 的 buffer 很难单独取消
|
||||
// 最直接的方法是关闭整个 Context
|
||||
if (piper.audioCtx && piper.audioCtx.state !== 'closed') {
|
||||
try {
|
||||
await piper.audioCtx.close()
|
||||
} catch (e) {
|
||||
console.warn('AudioContext close failed', e)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. 销毁实例引用
|
||||
piper = null
|
||||
}
|
||||
resetState()
|
||||
}
|
||||
|
||||
// UI 状态重置
|
||||
const resetState = () => {
|
||||
isSpeaking.value = false
|
||||
isPaused.value = false
|
||||
isLoading.value = false
|
||||
}
|
||||
|
||||
// === 生命周期 ===
|
||||
onMounted(() => {
|
||||
// 预初始化可以不做,等到点击时再做,避免空闲占用 AudioContext 资源
|
||||
// initPiper()
|
||||
})
|
||||
|
||||
onUnmounted(() => {
|
||||
stop()
|
||||
})
|
||||
|
||||
// Uniapp 生命周期
|
||||
if (typeof onHide === 'function') onHide(stop)
|
||||
if (typeof onUnload === 'function') onUnload(stop)
|
||||
|
||||
return {
|
||||
speak,
|
||||
pause,
|
||||
resume,
|
||||
stop,
|
||||
cancelAudio: stop,
|
||||
isSpeaking,
|
||||
isPaused,
|
||||
isLoading
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* 提取文本逻辑 (保持不变)
|
||||
*/
|
||||
function extractSpeechText(markdown) {
|
||||
if (!markdown || markdown.indexOf('job-json') === -1) {
|
||||
return markdown;
|
||||
}
|
||||
|
||||
const jobRegex = /``` job-json\s*({[\s\S]*?})\s*```/g;
|
||||
const jobs = [];
|
||||
let match;
|
||||
let lastJobEndIndex = 0;
|
||||
let firstJobStartIndex = -1;
|
||||
|
||||
while ((match = jobRegex.exec(markdown)) !== null) {
|
||||
const jobStr = match[1];
|
||||
try {
|
||||
const job = JSON.parse(jobStr);
|
||||
jobs.push(job);
|
||||
if (firstJobStartIndex === -1) {
|
||||
firstJobStartIndex = match.index;
|
||||
}
|
||||
lastJobEndIndex = jobRegex.lastIndex;
|
||||
} catch (e) {
|
||||
console.warn('JSON 解析失败', e);
|
||||
}
|
||||
}
|
||||
|
||||
const guideText = firstJobStartIndex > 0 ?
|
||||
markdown.slice(0, firstJobStartIndex).trim() : '';
|
||||
|
||||
const endingText = lastJobEndIndex < markdown.length ?
|
||||
markdown.slice(lastJobEndIndex).trim() : '';
|
||||
|
||||
const jobTexts = jobs.map((job, index) => {
|
||||
return `第 ${index + 1} 个岗位,岗位名称是:${job.jobTitle},公司是:${job.companyName},薪资:${job.salary},地点:${job.location},学历要求:${job.education},经验要求:${job.experience}。`;
|
||||
});
|
||||
|
||||
const finalTextParts = [];
|
||||
if (guideText) finalTextParts.push(guideText);
|
||||
finalTextParts.push(...jobTexts);
|
||||
if (endingText) finalTextParts.push(endingText);
|
||||
|
||||
return finalTextParts.join('\n');
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name" : "qingdao-employment-service",
|
||||
"appid" : "__UNI__2496162",
|
||||
"appid" : "__UNI__C939371",
|
||||
"description" : "招聘",
|
||||
"versionName" : "1.0.0",
|
||||
"versionCode" : "100",
|
||||
|
||||
@@ -82,6 +82,7 @@ const { userInfo } = storeToRefs(useUserStore());
|
||||
const { getUserResume } = useUserStore();
|
||||
const { dictLabel, oneDictData } = useDictStore();
|
||||
const openSelectPopup = inject('openSelectPopup');
|
||||
import { FileValidator } from '@/utils/fileValidator.js'; //文件校验
|
||||
|
||||
const percent = ref('0%');
|
||||
const state = reactive({
|
||||
@@ -278,15 +279,26 @@ function selectAvatar() {
|
||||
sizeType: ['original', 'compressed'],
|
||||
sourceType: ['album', 'camera'],
|
||||
count: 1,
|
||||
success: ({ tempFilePaths, tempFiles }) => {
|
||||
$api.uploadFile(tempFilePaths[0], true)
|
||||
.then((res) => {
|
||||
res = JSON.parse(res);
|
||||
if (res.msg) fromValue.avatar = res.msg;
|
||||
})
|
||||
.catch((err) => {
|
||||
$api.msg('上传失败');
|
||||
});
|
||||
success: async (res) => {
|
||||
const tempFilePaths = res.tempFilePaths;
|
||||
const file = res.tempFiles[0];
|
||||
|
||||
const imageValidator = new FileValidator();
|
||||
|
||||
try {
|
||||
await imageValidator.validate(file);
|
||||
|
||||
$api.uploadFile(tempFilePaths[0], true)
|
||||
.then((res) => {
|
||||
res = JSON.parse(res);
|
||||
if (res.msg) fromValue.avatar = res.msg;
|
||||
})
|
||||
.catch((err) => {
|
||||
$api.msg('上传失败');
|
||||
});
|
||||
} catch (error) {
|
||||
$api.msg(error);
|
||||
}
|
||||
},
|
||||
fail: (error) => {},
|
||||
});
|
||||
@@ -392,4 +404,4 @@ function selectAvatar() {
|
||||
color: #FFFFFF;
|
||||
text-align: center;
|
||||
line-height: 90rpx
|
||||
</style>
|
||||
</style>
|
||||
@@ -210,7 +210,6 @@
|
||||
<view class="uploadfiles-list">
|
||||
<view
|
||||
class="file-uploadsend"
|
||||
:class="{ 'file-border': isImage(file.type) }"
|
||||
v-for="(file, index) in filesList"
|
||||
:key="index"
|
||||
>
|
||||
@@ -219,7 +218,7 @@
|
||||
@click="preViewImage(file)"
|
||||
v-if="isImage(file.type)"
|
||||
:src="file.url"
|
||||
mode="scaleToFill"
|
||||
mode="heightFix"
|
||||
></image>
|
||||
<view class="file-doc" @click="jumpUrl(file)" v-else>
|
||||
<FileIcon class="doc-icon" :type="file.type"></FileIcon>
|
||||
@@ -273,10 +272,8 @@ import FileText from './fileText.vue';
|
||||
import useScreenStore from '@/stores/useScreenStore'
|
||||
const screenStore = useScreenStore();
|
||||
// 系统功能hook和阿里云hook
|
||||
import { useAudioRecorder } from '@/hook/useRealtimeRecorder2.js';
|
||||
// import { useAudioRecorder } from '@/hook/useSystemSpeechReader.js';
|
||||
import { useTTSPlayer } from '@/hook/useTTSPlayer2.js';
|
||||
// import { useTTSPlayer } from '@/hook/useSystemPlayer.js';
|
||||
import { useAudioRecorder } from '@/hook/useRealtimeRecorder.js';
|
||||
import { useTTSPlayer } from '@/hook/useTTSPlayer.js';
|
||||
// 全局
|
||||
const { $api, navTo, throttle } = inject('globalFunction');
|
||||
const emit = defineEmits(['onConfirm']);
|
||||
@@ -284,6 +281,8 @@ const { messages, isTyping, textInput, chatSessionID } = storeToRefs(useChatGrou
|
||||
import successIcon from '@/static/icon/success.png';
|
||||
import useUserStore from '@/stores/useUserStore';
|
||||
const { isMachineEnv } = storeToRefs(useUserStore());
|
||||
|
||||
import { FileValidator } from '@/utils/fileValidator.js'; //文件校验
|
||||
// hook
|
||||
// 语音识别
|
||||
const {
|
||||
@@ -539,22 +538,29 @@ function uploadCamera(type = 'camera') {
|
||||
count: 1, //默认9
|
||||
sizeType: ['original', 'compressed'], //可以指定是原图还是压缩图,默认二者都有
|
||||
sourceType: [type], //从相册选择
|
||||
success: function (res) {
|
||||
success: async (res)=> {
|
||||
const tempFilePaths = res.tempFilePaths;
|
||||
const file = res.tempFiles[0];
|
||||
// 继续上传
|
||||
$api.uploadFile(tempFilePaths[0], true).then((resData) => {
|
||||
resData = JSON.parse(resData);
|
||||
console.log(file.type,'++')
|
||||
if (isImage(file.type)) {
|
||||
filesList.value.push({
|
||||
url: resData.msg,
|
||||
type: file.type,
|
||||
name: file.name,
|
||||
});
|
||||
textInput.value = state.uploadFileTips;
|
||||
}
|
||||
});
|
||||
|
||||
const imageValidator = new FileValidator()
|
||||
try {
|
||||
await imageValidator.validate(file)
|
||||
|
||||
$api.uploadFile(tempFilePaths[0], true).then((resData) => {
|
||||
resData = JSON.parse(resData);
|
||||
console.log(file.type,'++')
|
||||
if (isImage(file.type)) {
|
||||
filesList.value.push({
|
||||
url: resData.msg,
|
||||
type: file.type,
|
||||
name: file.name,
|
||||
});
|
||||
textInput.value = state.uploadFileTips;
|
||||
}
|
||||
});
|
||||
} catch (error) {
|
||||
$api.msg(error)
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -563,25 +569,30 @@ function getUploadFile(type = 'camera') {
|
||||
if (VerifyNumberFiles()) return;
|
||||
uni.chooseFile({
|
||||
count: 1,
|
||||
success: (res) => {
|
||||
success: async(res) => {
|
||||
const tempFilePaths = res.tempFilePaths;
|
||||
const file = res.tempFiles[0];
|
||||
const allowedTypes = config.allowedFileTypes || [];
|
||||
const size = $api.formatFileSize(file.size);
|
||||
if (!allowedTypes.includes(file.type)) {
|
||||
return $api.msg('仅支持 txt md word pdf ppt csv excel 格式类型');
|
||||
}
|
||||
// 继续上传
|
||||
$api.uploadFile(tempFilePaths[0], true).then((resData) => {
|
||||
resData = JSON.parse(resData);
|
||||
filesList.value.push({
|
||||
url: resData.msg,
|
||||
type: file.type,
|
||||
name: file.name,
|
||||
size: size,
|
||||
|
||||
const imageValidator = new FileValidator({allowedExtensions:config.allowedFileTypes})
|
||||
|
||||
try{
|
||||
await imageValidator.validate(file)
|
||||
|
||||
$api.uploadFile(tempFilePaths[0], true).then((resData) => {
|
||||
resData = JSON.parse(resData);
|
||||
filesList.value.push({
|
||||
url: resData.msg,
|
||||
type: file.type,
|
||||
name: file.name,
|
||||
size: size,
|
||||
});
|
||||
textInput.value = state.uploadFileTips;
|
||||
});
|
||||
textInput.value = state.uploadFileTips;
|
||||
});
|
||||
}catch(error){
|
||||
$api.msg(error)
|
||||
}
|
||||
},
|
||||
});
|
||||
}
|
||||
@@ -1107,22 +1118,23 @@ image-margin-top = 40rpx
|
||||
padding: 16rpx 20rpx 18rpx 20rpx
|
||||
height: calc(100% - 40rpx)
|
||||
.doc-icon
|
||||
width: 60rpx
|
||||
height: 76rpx
|
||||
margin-right: 20rpx
|
||||
width: 60rpx;
|
||||
height: 76rpx;
|
||||
margin-right: 20rpx;
|
||||
.doc-con
|
||||
flex: 1
|
||||
width: 0
|
||||
max-width:320rpx;
|
||||
overflow :hidden;
|
||||
padding-right:40rpx;
|
||||
box-sizing:border-box;
|
||||
.file-uploadsend
|
||||
margin: 10rpx 18rpx 0 0;
|
||||
height: 100%
|
||||
font-size: 24rpx
|
||||
position: relative
|
||||
min-width: 460rpx;
|
||||
height: 160rpx;
|
||||
border-radius: 12rpx 12rpx 12rpx 12rpx;
|
||||
border: 2rpx solid #E2E2E2;
|
||||
overflow: hidden
|
||||
flex-shrink: 0;
|
||||
.file-del
|
||||
position: absolute
|
||||
right: 25rpx
|
||||
@@ -1155,8 +1167,8 @@ image-margin-top = 40rpx
|
||||
color: #7B7B7B;
|
||||
max-width: 100%
|
||||
.file-iconImg
|
||||
// height: 100%
|
||||
width: 100%
|
||||
height: 100%
|
||||
// width: 100%
|
||||
.filerow
|
||||
display: flex
|
||||
align-items: center
|
||||
@@ -1166,8 +1178,6 @@ image-margin-top = 40rpx
|
||||
height: 20rpx
|
||||
width: 2rpx
|
||||
background: rgba(226, 226, 226, .9)
|
||||
.file-border
|
||||
width: 160rpx !important;
|
||||
|
||||
@keyframes ai-circle {
|
||||
0% {
|
||||
|
||||
@@ -195,7 +195,7 @@ function makeQrcode() {
|
||||
} else {
|
||||
pathPrefix = '';
|
||||
}
|
||||
const htmlPath = `${protocol}//${host}${pathPrefix}/static/upload.html?sessionId=${uuid.value}&uploadApi=${config.baseUrl}/app/kiosk/upload`;
|
||||
const htmlPath = `${protocol}//${host}${pathPrefix}/static/upload.html?sessionId=${uuid.value}&uploadApi=${config.baseUrl}/app/kiosk/upload&fileCount=${props.leaveFileCount}`;
|
||||
|
||||
// const htmlPath = `${window.location.host}/static/upload.html?sessionId=${uuid.value}&uploadApi=${
|
||||
// config.baseUrl + '/app/kiosk/upload'
|
||||
|
||||
@@ -11,7 +11,12 @@
|
||||
<image class="bg-text" mode="widthFix" src="@/static/icon/index-text-bg.png"></image>
|
||||
<view class="search-inner">
|
||||
<view class="inner-left">
|
||||
<image class="bg-text2" mode="widthFix" src="@/static/icon/index-text-bg2.png"></image>
|
||||
<image
|
||||
class="bg-text2"
|
||||
mode="widthFix"
|
||||
@click="reloadBrowser()"
|
||||
src="@/static/icon/index-text-bg2.png"
|
||||
></image>
|
||||
<view class="search-input button-click" @click="navTo('/pages/search/search')">
|
||||
<image class="icon" src="@/static/icon/index-search.png"></image>
|
||||
<text class="inpute">请告诉我想找什么工作</text>
|
||||
@@ -20,7 +25,7 @@
|
||||
<image class="bg-robot button-click" mode="widthFix" src="@/static/icon/index-robot.png"></image>
|
||||
</view>
|
||||
</view>
|
||||
<view v-if="!isMachineEnv" class="ai-card-out" >
|
||||
<view v-if="!isMachineEnv" class="ai-card-out">
|
||||
<view class="ai-card">
|
||||
<image class="ai-card-bg" src="@/static/icon/ai-card-bg.png" />
|
||||
<view class="ai-card-inner">
|
||||
@@ -56,7 +61,7 @@
|
||||
</view>
|
||||
</view>
|
||||
</view>
|
||||
<view v-if="hasLogin" :class="{'match-move-top':isMachineEnv}" class="match-card-out">
|
||||
<view v-if="hasLogin" :class="{ 'match-move-top': isMachineEnv }" class="match-card-out">
|
||||
<view class="match-card">
|
||||
<image class="match-card-bg" src="@/static/icon/match-card-bg.png" />
|
||||
<view class="title">简历匹配职位</view>
|
||||
@@ -65,7 +70,7 @@
|
||||
</view>
|
||||
</view>
|
||||
</view>
|
||||
<view :class="{'cards-move-top':isMachineEnv && !hasLogin}" class="cards">
|
||||
<view :class="{ 'cards-move-top': isMachineEnv && !hasLogin }" class="cards">
|
||||
<view class="card card1 press-button" @click="navTo('/pages/nearby/nearby')">
|
||||
<view class="card-title">附近工作</view>
|
||||
<view class="card-text">好岗职等你来</view>
|
||||
@@ -254,11 +259,11 @@
|
||||
import { reactive, inject, watch, ref, onMounted, watchEffect, nextTick, getCurrentInstance } from 'vue';
|
||||
import img from '@/static/icon/filter.png';
|
||||
import dictLabel from '@/components/dict-Label/dict-Label.vue';
|
||||
const { $api, navTo, vacanciesTo, formatTotal, throttle } = inject('globalFunction');
|
||||
const { $api, navTo, vacanciesTo, formatTotal, throttle, reloadBrowser } = inject('globalFunction');
|
||||
import { onLoad, onShow } from '@dcloudio/uni-app';
|
||||
import { storeToRefs } from 'pinia';
|
||||
import useUserStore from '@/stores/useUserStore';
|
||||
const { userInfo, hasLogin ,isMachineEnv} = storeToRefs(useUserStore());
|
||||
const { userInfo, hasLogin, isMachineEnv } = storeToRefs(useUserStore());
|
||||
import useDictStore from '@/stores/useDictStore';
|
||||
const { getTransformChildren, oneDictData } = useDictStore();
|
||||
import useLocationStore from '@/stores/useLocationStore';
|
||||
@@ -271,7 +276,6 @@ const recommedIndexDb = useRecommedIndexedDBStore();
|
||||
import config from '@/config';
|
||||
import AIMatch from './AIMatch.vue';
|
||||
|
||||
|
||||
const { proxy } = getCurrentInstance();
|
||||
|
||||
const maskFirstEntry = ref(true);
|
||||
@@ -363,7 +367,7 @@ onMounted(() => {
|
||||
let firstEntry = uni.getStorageSync('firstEntry') === false ? false : true; // 默认未读
|
||||
maskFirstEntry.value = firstEntry;
|
||||
getMatchTags();
|
||||
console.log(isMachineEnv.value,'+++++++++')
|
||||
// console.log(isMachineEnv.value, '+++++++++');
|
||||
});
|
||||
|
||||
async function getMatchTags() {
|
||||
@@ -466,7 +470,6 @@ const { columnCount, columnSpace } = useColumnCount(() => {
|
||||
getJobRecommend('refresh');
|
||||
nextTick(() => {
|
||||
waterfallsFlowRef.value?.refresh?.();
|
||||
useLocationStore().getLocation();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -129,7 +129,6 @@ const { columnCount, columnSpace } = useColumnCount(() => {
|
||||
pageSize.value = 10 * (columnCount.value - 1);
|
||||
nextTick(() => {
|
||||
waterfallsFlowRef.value?.refresh?.();
|
||||
useLocationStore().getLocation();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
@@ -2,13 +2,14 @@
|
||||
<scroll-view :scroll-y="true" class="nearby-scroll" @scrolltolower="scrollBottom">
|
||||
<view class="nearby-map" @touchmove.stop.prevent>
|
||||
<map
|
||||
style="width: 100%; height: 410rpx"
|
||||
style="width: 100%; height: 690rpx"
|
||||
:latitude="latitudeVal"
|
||||
:longitude="longitudeVal"
|
||||
:markers="mapCovers"
|
||||
:circles="mapCircles"
|
||||
:controls="mapControls"
|
||||
@controltap="handleControl"
|
||||
:scale="mapScale"
|
||||
></map>
|
||||
<view class="nearby-select">
|
||||
<view class="select-view" @click="changeRangeShow">
|
||||
@@ -106,16 +107,18 @@ const tMap = ref();
|
||||
const progress = ref();
|
||||
const mapCovers = ref([]);
|
||||
const mapCircles = ref([]);
|
||||
const mapScale = ref(14.5)
|
||||
const mapControls = ref([
|
||||
{
|
||||
id: 1,
|
||||
position: {
|
||||
// 控件位置
|
||||
left: customSystem.systemInfo.screenWidth - 48 - 14,
|
||||
top: 320,
|
||||
width: 48,
|
||||
height: 48,
|
||||
left: customSystem.systemInfo.screenWidth - uni.upx2px(75 + 30),
|
||||
top: uni.upx2px(655 - 75 - 30),
|
||||
width: uni.upx2px(75),
|
||||
height: uni.upx2px(75),
|
||||
},
|
||||
width:100,
|
||||
iconPath: LocationPng, // 控件图标
|
||||
},
|
||||
]);
|
||||
@@ -148,6 +151,8 @@ function changeRangeShow() {
|
||||
|
||||
function changeRadius(item) {
|
||||
console.log(item);
|
||||
if(item > 1) mapScale.value = 14.5 - item * 0.3
|
||||
else mapScale.value = 14.5
|
||||
pageState.search.radius = item;
|
||||
rangeShow.value = false;
|
||||
progressChange(item);
|
||||
@@ -221,27 +226,23 @@ onMounted(() => {
|
||||
});
|
||||
|
||||
function getInit() {
|
||||
useLocationStore()
|
||||
.getLocation()
|
||||
.then((res) => {
|
||||
mapCovers.value = [
|
||||
{
|
||||
latitude: res.latitude,
|
||||
longitude: res.longitude,
|
||||
iconPath: point2,
|
||||
},
|
||||
];
|
||||
mapCircles.value = [
|
||||
{
|
||||
latitude: res.latitude,
|
||||
longitude: res.longitude,
|
||||
radius: 1000,
|
||||
fillColor: '#1c52fa25',
|
||||
color: '#256BFA',
|
||||
},
|
||||
];
|
||||
getJobList('refresh');
|
||||
});
|
||||
mapCovers.value = [
|
||||
{
|
||||
latitude: latitudeVal.value,
|
||||
longitude: longitudeVal.value,
|
||||
iconPath: point2,
|
||||
},
|
||||
];
|
||||
mapCircles.value = [
|
||||
{
|
||||
latitude: latitudeVal.value,
|
||||
longitude:longitudeVal.value,
|
||||
radius: 1000,
|
||||
fillColor: '#1c52fa25',
|
||||
color: '#256BFA',
|
||||
},
|
||||
];
|
||||
getJobList('refresh');
|
||||
}
|
||||
|
||||
function progressChange(value) {
|
||||
@@ -363,7 +364,7 @@ defineExpose({ loadData, handleFilterConfirm });
|
||||
height: 100%;
|
||||
background: #f4f4f4;
|
||||
.nearby-map
|
||||
height: 400rpx;
|
||||
height: 655rpx;
|
||||
background: #e8e8e8;
|
||||
overflow: hidden
|
||||
.nearby-list
|
||||
|
||||
@@ -151,7 +151,6 @@ const { columnCount, columnSpace } = useColumnCount(() => {
|
||||
pageSize.value = 10 * (columnCount.value - 1);
|
||||
nextTick(() => {
|
||||
waterfallsFlowRef.value?.refresh?.();
|
||||
useLocationStore().getLocation();
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
170
static/js/fileValidator.js
Normal file
170
static/js/fileValidator.js
Normal file
@@ -0,0 +1,170 @@
|
||||
const KNOWN_SIGNATURES = {
|
||||
png: '89504E470D0A1A0A',
|
||||
jpg: 'FFD8FF',
|
||||
jpeg: 'FFD8FF',
|
||||
gif: '47494638',
|
||||
webp: '52494646',
|
||||
docx: '504B0304',
|
||||
xlsx: '504B0304',
|
||||
pptx: '504B0304',
|
||||
doc: 'D0CF11E0',
|
||||
xls: 'D0CF11E0',
|
||||
ppt: 'D0CF11E0',
|
||||
pdf: '25504446',
|
||||
txt: 'TYPE_TEXT',
|
||||
csv: 'TYPE_TEXT',
|
||||
md: 'TYPE_TEXT',
|
||||
json: 'TYPE_TEXT',
|
||||
};
|
||||
export class FileValidator {
|
||||
version = '1.0.0';
|
||||
signs = Object.keys(KNOWN_SIGNATURES);
|
||||
constructor(options = {}) {
|
||||
this.maxSizeMB = options.maxSizeMB || 10;
|
||||
if (options.allowedExtensions && Array.isArray(options.allowedExtensions)) {
|
||||
this.allowedConfig = {};
|
||||
options.allowedExtensions.forEach((ext) => {
|
||||
const key = ext.toLowerCase();
|
||||
if (KNOWN_SIGNATURES[key]) {
|
||||
this.allowedConfig[key] = KNOWN_SIGNATURES[key];
|
||||
} else {
|
||||
console.warn(`[FileValidator] 未知的文件类型: .${key},已忽略`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.allowedConfig = {
|
||||
...KNOWN_SIGNATURES,
|
||||
};
|
||||
}
|
||||
}
|
||||
_isValidUTF8(buffer) {
|
||||
try {
|
||||
const decoder = new TextDecoder('utf-8', {
|
||||
fatal: true,
|
||||
});
|
||||
decoder.decode(buffer);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
_bufferToHex(buffer) {
|
||||
return Array.prototype.map
|
||||
.call(new Uint8Array(buffer), (x) => ('00' + x.toString(16)).slice(-2))
|
||||
.join('')
|
||||
.toUpperCase();
|
||||
}
|
||||
_countCSVRows(buffer) {
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const text = decoder.decode(buffer);
|
||||
let rowCount = 0;
|
||||
let inQuote = false;
|
||||
let len = text.length;
|
||||
for (let i = 0; i < len; i++) {
|
||||
const char = text[i];
|
||||
if (char === '"') {
|
||||
inQuote = !inQuote;
|
||||
} else if (char === '\n' && !inQuote) {
|
||||
rowCount++;
|
||||
}
|
||||
}
|
||||
if (len > 0 && text[len - 1] !== '\n') {
|
||||
rowCount++;
|
||||
}
|
||||
return rowCount;
|
||||
}
|
||||
_validateTextContent(buffer, extension) {
|
||||
let contentStr = '';
|
||||
try {
|
||||
const decoder = new TextDecoder('utf-8', {
|
||||
fatal: true,
|
||||
});
|
||||
contentStr = decoder.decode(buffer);
|
||||
} catch (e) {
|
||||
console.warn('UTF-8 解码失败', e);
|
||||
return false;
|
||||
}
|
||||
if (contentStr.includes('\0')) {
|
||||
return false;
|
||||
}
|
||||
if (extension === 'json') {
|
||||
try {
|
||||
JSON.parse(contentStr);
|
||||
} catch (e) {
|
||||
console.warn('无效的 JSON 格式');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
validate(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!file || !file.name) return reject('无效的文件对象');
|
||||
if (file.size > this.maxSizeMB * 1024 * 1024) {
|
||||
return reject(`文件大小超出限制 (最大 ${this.maxSizeMB}MB)`);
|
||||
}
|
||||
const fileName = file.name.toLowerCase();
|
||||
const extension = fileName.substring(fileName.lastIndexOf('.') + 1);
|
||||
const expectedMagic = this.allowedConfig[extension];
|
||||
if (!expectedMagic) {
|
||||
return reject(`不支持的文件格式: .${extension}`);
|
||||
}
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
const buffer = e.target.result;
|
||||
let isSafe = false;
|
||||
if (expectedMagic === 'TYPE_TEXT') {
|
||||
if (this._validateTextContent(buffer, extension)) {
|
||||
isSafe = true;
|
||||
} else {
|
||||
if (extension === 'json') {
|
||||
return reject(`文件异常:不是有效的 JSON 文件`);
|
||||
}
|
||||
return reject(`文件异常:.${extension} 包含非法二进制内容或编码错误`);
|
||||
}
|
||||
if (extension === 'csv' && this.csvMaxRows > 0) {
|
||||
const rows = this._countCSVRows(buffer);
|
||||
if (rows > this.csvMaxRows) {
|
||||
return reject(`CSV 行数超出限制 (当前 ${rows} 行,最大允许 ${this.csvMaxRows} 行)`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const fileHeader = this._bufferToHex(buffer.slice(0, 8));
|
||||
if (fileHeader.startsWith(expectedMagic)) {
|
||||
isSafe = true;
|
||||
} else {
|
||||
return reject(`文件可能已被篡改 (真实类型与 .${extension} 不符)`);
|
||||
}
|
||||
}
|
||||
if (isSafe) resolve(true);
|
||||
};
|
||||
reader.onerror = () => reject('文件读取失败,无法校验');
|
||||
if (expectedMagic === 'TYPE_TEXT' && extension === 'json') {
|
||||
reader.readAsArrayBuffer(file);
|
||||
} else {
|
||||
reader.readAsArrayBuffer(file.slice(0, 2048));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// 【demo】
|
||||
// 如果传入了 allowedExtensions,则只使用传入的;否则使用全部 KNOWN_SIGNATURES
|
||||
// const imageValidator = new FileValidator({
|
||||
// maxSizeMB: 5,
|
||||
// allowedExtensions: ['png', 'jpg', 'jpeg'],
|
||||
// });
|
||||
|
||||
// imageValidator
|
||||
// .validate(file)
|
||||
// .then(() => {
|
||||
// statusDiv.textContent = `检测通过: ${file.name}`;
|
||||
// statusDiv.style.color = 'green';
|
||||
// console.log('图片校验通过,开始上传...');
|
||||
// // upload(file)...
|
||||
// })
|
||||
// .catch((err) => {
|
||||
// statusDiv.textContent = `检测失败: ${err}`;
|
||||
// statusDiv.style.color = 'red';
|
||||
// });
|
||||
2636
static/upload.html
2636
static/upload.html
File diff suppressed because it is too large
Load Diff
@@ -9,10 +9,17 @@ import {
|
||||
} from '@/common/globalFunction.js'
|
||||
import config from '../config';
|
||||
|
||||
const defalutLongLat = {
|
||||
longitude: 120.382665,
|
||||
latitude: 36.066938,
|
||||
}
|
||||
|
||||
const useLocationStore = defineStore("location", () => {
|
||||
// 定义状态
|
||||
const longitudeVal = ref(null) // 经度
|
||||
const latitudeVal = ref(null) //纬度
|
||||
const timer = ref(null)
|
||||
const count = ref(0)
|
||||
|
||||
function getLocation() { // 获取经纬度两个平台
|
||||
return new Promise((resole, reject) => {
|
||||
@@ -25,49 +32,57 @@ const useLocationStore = defineStore("location", () => {
|
||||
resole(data)
|
||||
},
|
||||
fail: function(data) {
|
||||
longitudeVal.value = 120.382665
|
||||
latitudeVal.value = 36.066938
|
||||
resole({
|
||||
longitude: 120.382665,
|
||||
latitude: 36.066938
|
||||
})
|
||||
longitudeVal.value = defalutLongLat.longitude
|
||||
latitudeVal.value = defalutLongLat.latitude
|
||||
resole(defalutLongLat)
|
||||
msg('用户位置获取失败')
|
||||
console.log('失败3', data)
|
||||
}
|
||||
})
|
||||
} else {
|
||||
uni.getLocation({
|
||||
type: 'gcj02',
|
||||
highAccuracyExpireTime: 3000,
|
||||
isHighAccuracy: true,
|
||||
timeout: 2000,
|
||||
// highAccuracyExpireTime: 3000,
|
||||
// isHighAccuracy: true,
|
||||
// timeout: 2000,
|
||||
success: function(data) {
|
||||
longitudeVal.value = Number(data.longitude)
|
||||
latitudeVal.value = Number(data.latitude)
|
||||
resole(data)
|
||||
},
|
||||
fail: function(data) {
|
||||
longitudeVal.value = 120.382665
|
||||
latitudeVal.value = 36.066938
|
||||
resole({
|
||||
longitude: 120.382665,
|
||||
latitude: 36.066938
|
||||
})
|
||||
longitudeVal.value = defalutLongLat.longitude
|
||||
latitudeVal.value = defalutLongLat.latitude
|
||||
resole(defalutLongLat)
|
||||
msg('用户位置获取失败')
|
||||
console.log('失败2', data)
|
||||
}
|
||||
});
|
||||
}
|
||||
} catch (e) {
|
||||
longitudeVal.value = 120.382665
|
||||
latitudeVal.value = 36.066938
|
||||
resole({
|
||||
longitude: 120.382665,
|
||||
latitude: 36.066938
|
||||
})
|
||||
longitudeVal.value = defalutLongLat.longitude
|
||||
latitudeVal.value = defalutLongLat.latitude
|
||||
resole(defalutLongLat)
|
||||
msg('测试环境,使用模拟定位')
|
||||
console.log('失败', data)
|
||||
console.log('失败1', e)
|
||||
}
|
||||
})
|
||||
}
|
||||
function getLocationLoop(gap = 1000 * 60 * 2) {
|
||||
console.log(`🔄开始循环获取定位,间隔:${Math.floor(gap/1000)}秒`)
|
||||
const run = () => {
|
||||
count.value++
|
||||
console.log(`📍第${count.value}次获取定位`)
|
||||
getLocation()
|
||||
}
|
||||
run()
|
||||
timer.value = setInterval(run,gap);
|
||||
}
|
||||
|
||||
function clearGetLocationLoop(params) {
|
||||
clearInterval(timer.value)
|
||||
timer.value = null
|
||||
}
|
||||
|
||||
function longitude() {
|
||||
return longitudeVal.value
|
||||
@@ -80,11 +95,12 @@ const useLocationStore = defineStore("location", () => {
|
||||
// 导入
|
||||
return {
|
||||
getLocation,
|
||||
getLocationLoop,
|
||||
clearGetLocationLoop,
|
||||
longitudeVal,
|
||||
latitudeVal
|
||||
|
||||
latitudeVal,
|
||||
}
|
||||
},{
|
||||
}, {
|
||||
unistorage: true,
|
||||
})
|
||||
|
||||
|
||||
170
utils/fileValidator.js
Normal file
170
utils/fileValidator.js
Normal file
@@ -0,0 +1,170 @@
|
||||
const KNOWN_SIGNATURES = {
|
||||
png: '89504E470D0A1A0A',
|
||||
jpg: 'FFD8FF',
|
||||
jpeg: 'FFD8FF',
|
||||
gif: '47494638',
|
||||
webp: '52494646',
|
||||
docx: '504B0304',
|
||||
xlsx: '504B0304',
|
||||
pptx: '504B0304',
|
||||
doc: 'D0CF11E0',
|
||||
xls: 'D0CF11E0',
|
||||
ppt: 'D0CF11E0',
|
||||
pdf: '25504446',
|
||||
txt: 'TYPE_TEXT',
|
||||
csv: 'TYPE_TEXT',
|
||||
md: 'TYPE_TEXT',
|
||||
json: 'TYPE_TEXT',
|
||||
};
|
||||
export class FileValidator {
|
||||
version = '1.0.0';
|
||||
signs = Object.keys(KNOWN_SIGNATURES);
|
||||
constructor(options = {}) {
|
||||
this.maxSizeMB = options.maxSizeMB || 10;
|
||||
if (options.allowedExtensions && Array.isArray(options.allowedExtensions)) {
|
||||
this.allowedConfig = {};
|
||||
options.allowedExtensions.forEach((ext) => {
|
||||
const key = ext.toLowerCase();
|
||||
if (KNOWN_SIGNATURES[key]) {
|
||||
this.allowedConfig[key] = KNOWN_SIGNATURES[key];
|
||||
} else {
|
||||
console.warn(`[FileValidator] 未知的文件类型: .${key},已忽略`);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
this.allowedConfig = {
|
||||
...KNOWN_SIGNATURES,
|
||||
};
|
||||
}
|
||||
}
|
||||
_isValidUTF8(buffer) {
|
||||
try {
|
||||
const decoder = new TextDecoder('utf-8', {
|
||||
fatal: true,
|
||||
});
|
||||
decoder.decode(buffer);
|
||||
return true;
|
||||
} catch (e) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
_bufferToHex(buffer) {
|
||||
return Array.prototype.map
|
||||
.call(new Uint8Array(buffer), (x) => ('00' + x.toString(16)).slice(-2))
|
||||
.join('')
|
||||
.toUpperCase();
|
||||
}
|
||||
_countCSVRows(buffer) {
|
||||
const decoder = new TextDecoder('utf-8');
|
||||
const text = decoder.decode(buffer);
|
||||
let rowCount = 0;
|
||||
let inQuote = false;
|
||||
let len = text.length;
|
||||
for (let i = 0; i < len; i++) {
|
||||
const char = text[i];
|
||||
if (char === '"') {
|
||||
inQuote = !inQuote;
|
||||
} else if (char === '\n' && !inQuote) {
|
||||
rowCount++;
|
||||
}
|
||||
}
|
||||
if (len > 0 && text[len - 1] !== '\n') {
|
||||
rowCount++;
|
||||
}
|
||||
return rowCount;
|
||||
}
|
||||
_validateTextContent(buffer, extension) {
|
||||
let contentStr = '';
|
||||
try {
|
||||
const decoder = new TextDecoder('utf-8', {
|
||||
fatal: true,
|
||||
});
|
||||
contentStr = decoder.decode(buffer);
|
||||
} catch (e) {
|
||||
console.warn('UTF-8 解码失败', e);
|
||||
return false;
|
||||
}
|
||||
if (contentStr.includes('\0')) {
|
||||
return false;
|
||||
}
|
||||
if (extension === 'json') {
|
||||
try {
|
||||
JSON.parse(contentStr);
|
||||
} catch (e) {
|
||||
console.warn('无效的 JSON 格式');
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
validate(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
if (!file || !file.name) return reject('无效的文件对象');
|
||||
if (file.size > this.maxSizeMB * 1024 * 1024) {
|
||||
return reject(`文件大小超出限制 (最大 ${this.maxSizeMB}MB)`);
|
||||
}
|
||||
const fileName = file.name.toLowerCase();
|
||||
const extension = fileName.substring(fileName.lastIndexOf('.') + 1);
|
||||
const expectedMagic = this.allowedConfig[extension];
|
||||
if (!expectedMagic) {
|
||||
return reject(`不支持的文件格式: .${extension}`);
|
||||
}
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {
|
||||
const buffer = e.target.result;
|
||||
let isSafe = false;
|
||||
if (expectedMagic === 'TYPE_TEXT') {
|
||||
if (this._validateTextContent(buffer, extension)) {
|
||||
isSafe = true;
|
||||
} else {
|
||||
if (extension === 'json') {
|
||||
return reject(`文件异常:不是有效的 JSON 文件`);
|
||||
}
|
||||
return reject(`文件异常:.${extension} 包含非法二进制内容或编码错误`);
|
||||
}
|
||||
if (extension === 'csv' && this.csvMaxRows > 0) {
|
||||
const rows = this._countCSVRows(buffer);
|
||||
if (rows > this.csvMaxRows) {
|
||||
return reject(`CSV 行数超出限制 (当前 ${rows} 行,最大允许 ${this.csvMaxRows} 行)`);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
const fileHeader = this._bufferToHex(buffer.slice(0, 8));
|
||||
if (fileHeader.startsWith(expectedMagic)) {
|
||||
isSafe = true;
|
||||
} else {
|
||||
return reject(`文件可能已被篡改 (真实类型与 .${extension} 不符)`);
|
||||
}
|
||||
}
|
||||
if (isSafe) resolve(true);
|
||||
};
|
||||
reader.onerror = () => reject('文件读取失败,无法校验');
|
||||
if (expectedMagic === 'TYPE_TEXT' && extension === 'json') {
|
||||
reader.readAsArrayBuffer(file);
|
||||
} else {
|
||||
reader.readAsArrayBuffer(file.slice(0, 2048));
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// 【demo】
|
||||
// 如果传入了 allowedExtensions,则只使用传入的;否则使用全部 KNOWN_SIGNATURES
|
||||
// const imageValidator = new FileValidator({
|
||||
// maxSizeMB: 5,
|
||||
// allowedExtensions: ['png', 'jpg', 'jpeg'],
|
||||
// });
|
||||
|
||||
// imageValidator
|
||||
// .validate(file)
|
||||
// .then(() => {
|
||||
// statusDiv.textContent = `检测通过: ${file.name}`;
|
||||
// statusDiv.style.color = 'green';
|
||||
// console.log('图片校验通过,开始上传...');
|
||||
// // upload(file)...
|
||||
// })
|
||||
// .catch((err) => {
|
||||
// statusDiv.textContent = `检测失败: ${err}`;
|
||||
// statusDiv.style.color = 'red';
|
||||
// });
|
||||
Reference in New Issue
Block a user