flat: 暂存

This commit is contained in:
Apcallover
2025-11-11 21:11:09 +08:00
parent 2f0dd5ee86
commit 59e04e53b1
13 changed files with 562 additions and 384 deletions

View File

@@ -3,6 +3,7 @@ import { electronApp, optimizer, is } from "@electron-toolkit/utils";
import icon from "../../resources/icon.png?asset";
import { setupLiveHandlers } from "./ipc/live";
import { setupPromptHandlers } from "./ipc/prompt";
import { setupWorkflowHandlers } from "./ipc/workflow";
import { preload, indexHtml, ELECTRON_RENDERER_URL } from "./config";
// 在开发环境下启用热重载
@@ -75,6 +76,8 @@ app.whenReady().then(() => {
setupPromptHandlers();
setupWorkflowHandlers();
createWindow();
app.on("activate", function () {

View File

@@ -1,14 +1,8 @@
import { ipcMain, BrowserWindow } from "electron";
import { preload, indexHtml, ELECTRON_RENDERER_URL } from "../config";
import { showPrompt } from "../utils/tools";
import { OllamaClient } from "../utils/ollama-client";
let liveWindow: BrowserWindow | null = null;
const client = new OllamaClient({
baseUrl: "http://127.0.0.1:11434", // 可选,默认值
timeout: 30000, // 可选默认30秒
});
// 直播相关的主进程处理
export function setupLiveHandlers() {
let LiveSessionId = null;
@@ -104,7 +98,7 @@ export function setupLiveHandlers() {
contextIsolation: false,
},
});
// liveWindow.webContents.openDevTools();
liveWindow.webContents.openDevTools();
liveWindow.on("closed", () => {
liveWindow = null;
});
@@ -128,24 +122,6 @@ export function setupLiveHandlers() {
return { success: false, error: error.message };
}
});
ipcMain.handle("ollama-test", async (_, jobInfo) => {
try {
const result = await client.generateText({
model: "qwen:7b",
prompt: `请根据提供的 json 数据:${jobInfo},直接生成一段用于吸引求职者投递的岗位介绍文案。文案需:
1、简洁、有力突出岗位核心价值和吸引力。
2、不包含任何多余的开头、结尾、解释或废话。
3、目标是立即抓住用户眼球并促使他们投递简历。
4、不含任何废话或与岗位无关的内容
**要求:**只输出生成的岗位介绍文案本身。`,
});
return { success: true, data: result };
} catch (error: any) {
console.error("Ollama error:", error);
return { success: false, error: error.message };
}
});
}
async function getSessionId(requestBody: object) {

View File

@@ -2,7 +2,7 @@ import { ipcMain, dialog } from "electron";
export function setupPromptHandlers() {
// 提示信息处理
ipcMain.handle("show-prompt", async (event) => {
ipcMain.handle("show-prompt", async () => {
dialog.showMessageBox({
type: "info",
title: "提示",

329
src/main/ipc/workflow.ts Normal file
View File

@@ -0,0 +1,329 @@
import { ipcMain, BrowserWindow } from "electron";
import { spawn } from "child_process";
import { showPrompt } from "../utils/tools";
import { preload, indexHtml, ELECTRON_RENDERER_URL } from "../config";
import os from "os";
import http from "http";
let InstallWindows: BrowserWindow | null = null;
export function setupWorkflowHandlers() {
let lastJobSummary = "这是我们今天介绍的第一个岗位";
// 打开安装窗口
ipcMain.handle("open-install-window", async (_, args) => {
try {
if (InstallWindows) {
InstallWindows.focus();
showPrompt("下载已打开", "info");
return { success: true };
}
const { width, height, path } = args;
let installUrl = `${ELECTRON_RENDERER_URL}/#/${path}`;
console.log(installUrl);
InstallWindows = new BrowserWindow({
title: "模型下载",
width,
height,
minimizable: false, // 是否可以最小化
maximizable: false, // 是否可以最小化
closable: true, // 窗口是否可关闭
alwaysOnTop: false, // 窗口是否永远在别的窗口的上面
webPreferences: {
preload,
nodeIntegration: true,
contextIsolation: false,
},
});
// InstallWindows.webContents.openDevTools();
InstallWindows.on("closed", () => {
InstallWindows = null;
});
if (ELECTRON_RENDERER_URL) {
InstallWindows.loadURL(installUrl);
} else {
InstallWindows.loadFile(indexHtml, { hash: `/${path}` });
}
return { success: true };
} catch (error: any) {
return { success: false, error: error.message };
}
});
// 监听来自渲染器进程的 'install-ollama' 事件
ipcMain.handle("install-ollama-and-model", async (event) => {
const webContents = event.sender; // 获取发送事件的窗口
const platform = os.platform();
const modelToPull = "qwen3:8b";
const sendStatus = (status) => {
if (webContents && !webContents.isDestroyed()) {
webContents.send("install-progress", { status });
}
};
try {
sendStatus("Checking Ollama installation...");
try {
await streamCommand(
"ollama",
["-v"],
webContents,
"install-progress",
);
sendStatus("Ollama is already installed.");
} catch (error) {
// Ollama 未安装,执行安装
sendStatus("Ollama not found. Starting installation...");
if (platform === "darwin" || platform === "linux") {
// macOS / Linux - 使用官方的 curl 脚本
const installCommand =
"curl -fsSL https://ollama.com/install.sh | sh";
await streamCommand(
"sh",
["-c", installCommand],
webContents,
"install-progress",
);
} else if (platform === "win32") {
// Windows - 使用 PowerShell 下载并静默安装
const psScript = `
$ProgressPreference = 'SilentlyContinue';
$tempPath = [System.IO.Path]::Combine($env:TEMP, 'OllamaSetup.exe');
Write-Host 'Downloading OllamaSetup.exe...';
Invoke-WebRequest -Uri 'https://ollama.com/download/OllamaSetup.exe' -OutFile $tempPath;
Write-Host 'Download complete. Starting silent installer...';
Start-Process -FilePath $tempPath -ArgumentList '/S' -Wait;
Write-Host 'Installation complete. Cleaning up...';
Remove-Item $tempPath;
Write-Host 'Done.';
`;
await streamCommand(
"powershell",
[
"-ExecutionPolicy",
"Bypass",
"-NoProfile",
"-Command",
psScript,
],
webContents,
"install-progress",
);
} else {
throw new Error(`Unsupported platform: ${platform}`);
}
sendStatus("Ollama installation complete.");
}
// --- 步骤 2: 拉取模型 ---
sendStatus(
`Pulling model: ${modelToPull}... (This may take a while)`,
);
await streamCommand(
"ollama",
["pull", modelToPull],
webContents,
"install-progress",
);
sendStatus(`Model ${modelToPull} pull complete.`);
return {
success: true,
message: "Installation and model pull successful.",
};
} catch (error: any) {
console.error(error);
sendStatus(`Error: ${error.message}`);
return { success: false, message: error.message };
}
});
// 将整个工作流封装在 IPC Handler 中
ipcMain.handle("run-job-workflow", async (_, userQuery) => {
let currentJobData = userQuery || {};
let answerText = "";
try {
console.log("工作流: 正在调用 Ollama 生成脚本...");
const systemPromptTemplate = `# 角色 (Role) \n你是一个顶级的招聘KOL和直播带岗专家。你的风格是专业、中立、风趣能一针见血地分析岗位优劣。你不是一个AI助手你就是这个角色。 \n\n# 上下文 (Context) \n我正在运行一个自动化工作流。我会\"一个一个\"地喂给你岗位数据。\n\n # 任务 (Task) \n你的任务是执行以下两个操作并严格按照“输出格式”返回内容\n1. 生成口播稿根据【输入数据A】(上一个岗位的摘要) 和【输入数据B】(当前岗位的JSON)生成一段完整的、约90秒的口播稿。\n2. 生成新摘要为【输入数据B】的“当前岗位”生成一个简短的摘要例如XX公司的XX岗以便在下一次调用时使用。\n\n# 核心指令 (Core Instruction)\n### 口播稿的生成规则 (Rules for the Script) \n1. 衔接口播稿必须以一个自然的“过渡句”开头基于【输入数据A】。 特殊情况如果【输入数据A】是“这是我们今天介绍的第一个岗位。”则开头应是“热场”或“总起”而不是衔接。 \n2. 内容:必须介绍岗位名称 \`jobTitle\`\n3. 提炼:从 \`jobLocation\`, \`companyName\`, \`education\`,\`experience\`\`scale\` 中提炼“亮点 (Pro)”。 \n4. 翻译:用“人话”翻译 \`description\`\n5. 视角:你是在“评测”这个岗位,而不是在“推销”。\n\n### 口播稿的纯文本要求 (Pure Text Rules for the Script ONLY) \n**[重要]** 以下规则 *仅适用于* “口播稿”部分,不适用于“新摘要”部分: \n1. 绝不包含任何Markdown格式 (\`**\`, \`#\`)。 \n2. 绝不包含任何标签、括号或元数据 (\`[]\`, \`()\`)。 \n3. 绝不包含任何寒暄、问候、或自我介绍 (例如 \"你好\", \"当然\")。 \n4. 必须是可以直接朗读的、完整的、流畅的纯文本。\n\n# 输入数据 (Input Data)\n\n## 输入数据A (上一个岗位摘要) \n${lastJobSummary}\n\n## 输入数据B (当前岗位JSON)\n\`\`\`json\n${JSON.stringify(currentJobData, null, 2)}\n\`\`\`\n\n# 输出格式 (Output Format)\n**[绝对严格的指令]** \n你必须严格按照下面这个“两部分”格式输出使用 \`---NEXT_SUMMARY---\` 作为唯一的分隔符。 绝不在分隔符之外添加任何多余的文字、解释或Markdown。 \n\n[这里是AI生成的、符合上述所有“纯文本要求”的完整口播稿] \n---NEXT_SUMMARY--- \n[这里是AI为“当前岗位”生成的简短新摘要]`;
answerText = await runOllamaNonStream(
systemPromptTemplate,
"qwen3:8b",
);
if (!answerText) {
throw new Error("Ollama 返回为空");
}
} catch (e) {
return "抱歉AI 模型在生成脚本时出错。";
}
try {
console.log("工作流: 正在解析 AI 输出...");
let script = "抱歉AI没有按预定格式返回脚本请稍后重试。";
let summary = "这是我们今天介绍的第一个岗位。"; // 这是一个安全的“重置”摘要
if (answerText && typeof answerText === "string") {
const parts = answerText.split("---NEXT_SUMMARY---");
if (parts[0] && parts[0].trim() !== "") {
script = parts[0].trim();
}
if (parts[1] && parts[1].trim() !== "") {
summary = parts[1].trim();
}
}
console.log("工作流: 正在更新状态...");
lastJobSummary = summary; // 关键:更新主进程中的状态
console.log("工作流: 完成,返回口播稿。");
return { success: true, data: script }; // 将最终的“口播稿”返回给渲染进程
} catch (e: any) {
console.error("代码运行或变量更新节点出错:", e);
return "抱歉,处理 AI 响应时出错。";
}
});
// 检查 Ollama 服务器是否正在运行
ipcMain.handle("check-ollama-status", async () => {
return await checkOllamaServer();
});
// 处理器:检查服务,如果没运行,就用一个轻量命令唤醒它
ipcMain.handle("ensure-ollama-running", async () => {
let isRunning = await checkOllamaServer();
if (isRunning) {
return {
success: true,
message: "Ollama服务器已在运行.",
};
}
// 服务未运行。
// 我们运行 'ollama ps'。这个命令会与服务通信,
// 如果服务没启动Ollama CLI 会自动启动它。
try {
await runCommand("ollama", ["ps"]);
// 给服务一点启动时间 (例如 2 秒)
await new Promise((resolve) => setTimeout(resolve, 2000));
// 再次检查
isRunning = await checkOllamaServer();
if (isRunning) {
return {
success: true,
message: "Ollama 已在后台启动",
};
} else {
return {
success: false,
message: "服务启动失败",
};
}
} catch (error: any) {
console.error("错误:", error);
return {
success: false,
message: `错误: ${error.message}`,
};
}
});
}
// 辅助函数:检查 Ollama API 是否可访问
function checkOllamaServer() {
return new Promise((resolve) => {
// 默认端口是 11434
const req = http.get("http://127.0.0.1:11434/", (res) => {
// "Ollama is running" 的响应码是 200
resolve(res.statusCode === 200);
});
// 如果连接被拒绝 (ECONNREFUSED),则服务未运行
req.on("error", () => {
resolve(false);
});
});
}
// 辅助函数:运行一个简单的命令并等待它完成
function runCommand(command, args) {
return new Promise((resolve, reject) => {
const process = spawn(command, args, { shell: true });
process.on("close", (code) => {
if (code === 0) {
resolve(null);
} else {
reject(new Error(`Command failed with code ${code}`));
}
});
process.on("error", (err) => reject(err));
});
}
// 这是一个非流式的 Ollama 助手函数
async function runOllamaNonStream(prompt, model = "qwen3:8b") {
try {
const response = await fetch("http://127.0.0.1:11434/api/chat", {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
model: model,
messages: [{ role: "user", content: prompt }],
stream: false, // 关键:关闭流式
}),
});
if (!response.ok) {
throw new Error(`Ollama API error: ${response.statusText}`);
}
const data = await response.json();
// data.message.content 包含了完整的 AI 回复
return data.message.content;
} catch (error) {
console.error("Ollama Chat Error:", error);
return null; // 返回 null 以便后续逻辑处理
}
}
function streamCommand(command, args, webContents, eventName) {
return new Promise((resolve, reject) => {
const process = spawn(command, args, { shell: true });
const send = (channel, data) => {
if (webContents && !webContents.isDestroyed()) {
webContents.send(channel, data);
}
};
process.stdout.on("data", (data) => {
send(eventName, { type: "stdout", data: data.toString() });
});
process.stderr.on("data", (data) => {
send(eventName, { type: "stderr", data: data.toString() });
});
process.on("close", (code) => {
if (code === 0) {
resolve(null);
} else {
reject(new Error(`Process exited with code ${code}`));
}
});
process.on("error", (err) => {
reject(err);
});
});
}

View File

@@ -1,271 +0,0 @@
import { type ClientRequest, type IncomingMessage } from "http";
import * as http from "http";
interface OllamaOptions {
baseUrl?: string;
timeout?: number;
}
interface GenerateParams {
model: string;
prompt: string;
system?: string;
template?: string;
context?: number[];
stream?: boolean;
format?: "json";
options?: {
temperature?: number;
top_p?: number;
top_k?: number;
seed?: number;
num_predict?: number;
stop?: string[];
num_ctx?: number;
};
}
interface GenerateResponse {
model: string;
created_at: string;
response: string;
context: number[];
done: boolean;
total_duration: number;
load_duration: number;
prompt_eval_duration: number;
eval_duration: number;
prompt_eval_count: number;
eval_count: number;
}
interface ModelInfo {
name: string;
size: number;
digest: string;
details: {
format: string;
family: string;
families: string[];
parameter_size: string;
quantization_level: string;
};
}
interface ListModelsResponse {
models: ModelInfo[];
}
export class OllamaClient {
private readonly baseUrl: string;
private readonly timeout: number;
constructor(options: OllamaOptions = {}) {
this.baseUrl = options.baseUrl || "http://localhost:11434";
this.timeout = options.timeout || 30000;
}
private async request<T>(
path: string,
method = "GET",
body?: unknown,
): Promise<T> {
return new Promise((resolve, reject) => {
const url = new URL(path, this.baseUrl);
const options = {
method,
hostname: url.hostname,
port: url.port || "11434",
path: url.pathname,
headers: {
"Content-Type": "application/json",
},
timeout: this.timeout,
};
const req: ClientRequest = http.request(
options,
(res: IncomingMessage) => {
let data = "";
res.on("data", (chunk) => {
data += chunk;
});
res.on("end", () => {
if (
res.statusCode &&
res.statusCode >= 200 &&
res.statusCode < 300
) {
try {
resolve(JSON.parse(data));
} catch (e) {
reject(
new Error("Failed to parse response data"),
);
}
} else {
reject(
new Error(
`HTTP Error: ${res.statusCode} ${res.statusMessage}`,
),
);
}
});
},
);
req.on("error", reject);
req.on("timeout", () => {
req.destroy();
reject(new Error("Request timeout"));
});
if (body) {
req.write(JSON.stringify(body));
}
req.end();
});
}
async generate(params: GenerateParams): Promise<GenerateResponse> {
const result = await this.request<GenerateResponse>(
"/api/generate",
"POST",
{
...params,
stream: false,
},
);
// 如果响应是JSON格式尝试解析
if (params.format === "json" && result.response) {
try {
const parsed = JSON.parse(result.response);
result.response =
parsed.content || parsed.text || result.response;
} catch (e) {
console.warn(
"Failed to parse JSON response, returning raw response",
);
}
}
return result;
}
/**
* 生成文本响应,直接返回文本内容
*/
async generateText(
params: Omit<GenerateParams, "format">,
): Promise<string> {
const result = await this.generate({
...params,
format: undefined, // 确保不使用 JSON 格式
});
return result.response;
}
async streamGenerate(
params: GenerateParams,
onResponse: (response: GenerateResponse) => void,
onError?: (error: Error) => void,
): Promise<void> {
const url = new URL("/api/generate", this.baseUrl);
const options = {
method: "POST",
hostname: url.hostname,
port: url.port || "11434",
path: url.pathname,
headers: {
"Content-Type": "application/json",
},
};
return new Promise<void>((resolve, reject) => {
const req = http.request(options, (res) => {
let buffer = "";
res.on("data", (chunk) => {
buffer += chunk;
const lines = buffer.split("\n");
buffer = lines.pop() || "";
for (const line of lines) {
if (line.trim()) {
try {
const response = JSON.parse(
line,
) as GenerateResponse;
onResponse(response);
} catch (e) {
const error = new Error(
"Failed to parse streaming response",
);
onError?.(error);
console.error(error, e);
}
}
}
});
res.on("end", () => {
if (buffer.trim()) {
try {
const response = JSON.parse(
buffer,
) as GenerateResponse;
onResponse(response);
} catch (e) {
const error = new Error(
"Failed to parse final streaming response",
);
onError?.(error);
console.error(error, e);
}
}
resolve();
});
});
req.on("error", (error) => {
onError?.(error);
reject(error);
});
req.write(JSON.stringify({ ...params, stream: true }));
req.end();
});
}
async listModels(): Promise<ListModelsResponse> {
return this.request<ListModelsResponse>("/api/tags");
}
async pullModel(modelName: string): Promise<void> {
await this.request("/api/pull", "POST", { name: modelName });
}
async deleteModel(modelName: string): Promise<void> {
await this.request("/api/delete", "DELETE", { name: modelName });
}
async copyModel(sourceModel: string, targetModel: string): Promise<void> {
await this.request("/api/copy", "POST", {
source: sourceModel,
destination: targetModel,
});
}
async ping(): Promise<boolean> {
try {
await this.request("/");
return true;
} catch {
return false;
}
}
}

View File

@@ -1,22 +1,30 @@
import { contextBridge } from 'electron'
import { electronAPI } from '@electron-toolkit/preload'
import { contextBridge, ipcRenderer } from "electron";
import { electronAPI } from "@electron-toolkit/preload";
// Custom APIs for renderer
const api = {}
const api = {
installOllama: () => ipcRenderer.invoke("install-ollama-and-model"),
// 监听进度 (Send/On)
onInstallProgress: (callback) => {
ipcRenderer.on("install-progress", (_event, value) => callback(value));
},
// 移除监听器
removeInstallProgressListeners: () => {
ipcRenderer.removeAllListeners("install-progress");
},
};
// Use `contextBridge` APIs to expose Electron APIs to
// renderer only if context isolation is enabled, otherwise
// just add to the DOM global.
if (process.contextIsolated) {
try {
contextBridge.exposeInMainWorld('electron', electronAPI)
contextBridge.exposeInMainWorld('api', api)
contextBridge.exposeInMainWorld("electron", electronAPI);
contextBridge.exposeInMainWorld("api", api);
} catch (error) {
console.error(error)
console.error(error);
}
} else {
// @ts-ignore (define in dts)
window.electron = electronAPI
window.electron = electronAPI;
// @ts-ignore (define in dts)
window.api = api
window.api = api;
}

View File

@@ -19,7 +19,12 @@ const routes: Array<RouteRecordRaw> = [
component: () => import("../views/Live/index.vue"),
meta: { no_login: true },
},
// Add more routes as needed
{
path: "/install",
name: "Install",
component: () => import("../views/Install/index.vue"),
meta: { no_login: true },
},
];
export default routes;

View File

@@ -74,7 +74,7 @@ export const useLiveStore = defineStore("live", () => {
try {
isExplaining.value = true;
// TODO: 这里添加讲解岗位的具体实现
// 这里添加讲解岗位的具体实现
// 可以调用 AI 接口进行讲解
const result = await window.electron.ipcRenderer.invoke(
"explain-position",
@@ -117,7 +117,7 @@ export const useLiveStore = defineStore("live", () => {
try {
const result = await window.electron.ipcRenderer.invoke(
"open-live-window",
{ path: "live", width: 375, height: 692, userId: "rs876543" },
{ path: "live", width: 375, height: 682 },
);
if (result.success) {
isLiveWindowOpen.value = true;
@@ -181,13 +181,11 @@ export const useLiveStore = defineStore("live", () => {
positions,
(newVal) => {
console.log("positions changed:", newVal);
// 当添加了新岗位,并且当前没有正在讲解的岗位时,自动开始讲解第一个岗位
if (
newVal.length > 0 &&
!currentPosition.value &&
!isExplaining.value
) {
console.log("Starting next position from positions watch");
getNextPosition();
}
},
@@ -215,7 +213,11 @@ export const useLiveStore = defineStore("live", () => {
});
async function jobTransformAIobj(job: any) {
const val = await window.electron.ipcRenderer.invoke("ollama-test", job);
// const val = await window.electron.ipcRenderer.invoke("ollama-test", job);
const val = await window.electron.ipcRenderer.invoke(
"run-job-workflow",
job,
);
if (val.success === false) {
throw new Error(val.error);

View File

@@ -36,8 +36,9 @@
</div>
</section>
<section class="panel panel-controls">
<header class="panel-header">直播控制</header>
<header class="panel-header">直播控制</header>
<div class="controls-row">
<button @click="loadData">加载数据</button>
<button @click="handleOpenLiveWindow">打开直播窗口</button>
<button :class="{ 'primary': !isLiveOn }" @click="handleStartLive" :disabled="isLiveOn">
开始直播
@@ -47,12 +48,24 @@
</button>
</div>
<header class="panel-header small">内容控制</header>
<header class="panel-header ">内容控制</header>
<div class="controls-row">
<button @click="toggleCameraInsert">全屏插播</button>
<button @click="insertCameraVideo">窗口插播</button>
<button @click="insertVideoAudio">音频插入</button>
</div>
<header class="panel-header ">工具控制</header>
<div class="controls-row">
模型状态{{ modelStatus }}
</div>
<div class="controls-row">
<button @click="updateStatus">更新状态</button>
<button @click="InstallPlugins">安装插件</button>
<button @click="startModel">启动模型</button>
</div>
</section>
</div>
</div>
@@ -66,6 +79,7 @@ import { useUserStore } from '@renderer/stores/useUserStore';
import { useLiveStore } from '@renderer/stores/useLiveStore'
const { positions, currentPosition, seePosition, isLiveOn } = storeToRefs(useLiveStore())
const broadcastOrder = ref([]);
const modelStatus = ref('未知')
onMounted(async () => {
init();
@@ -73,6 +87,10 @@ onMounted(async () => {
})
async function init() {
// useLiveStore().fetchPositions()
}
function loadData() {
useLiveStore().fetchPositions()
}
@@ -114,6 +132,45 @@ const handleStopLive = async () => {
console.error('结束直播失败:', error)
}
}
const InstallPlugins = async () => {
try {
await window.electron.ipcRenderer.invoke(
"open-install-window",
{ path: "install", width: 375, height: 692, userId: "rs876543" },
);
} catch (error) {
console.error('安装插件失败:', error)
}
}
const updateStatus = async () => {
try {
const status = await window.electron.ipcRenderer.invoke("check-ollama-status");
if (status) {
modelStatus.value = '已安装'
} else {
modelStatus.value = '未安装'
}
} catch (error) {
console.error('更新状态失败:', error)
}
}
const startModel = async () => {
try {
const result = await window.electron.ipcRenderer.invoke("ensure-ollama-running");
console.log('result', result)
if (result.success) {
alert(result.message)
} else {
alert(result.message)
}
} catch (error) {
console.error('更新状态失败:', error)
}
}
function toggleCameraInsert() {
window.electron.ipcRenderer.send('toggle-camera-insert')
}

View File

@@ -0,0 +1,72 @@
<template>
<div>
<button @click="startInstallation" :disabled="isLoading">
{{ isLoading ? '安装中...' : '安装模型 & Qwen3 Modle' }}
</button>
<h3>安装 Log:</h3>
<div class="log-container">
<pre>{{ log }}</pre>
</div>
</div>
</template>
<script setup>
import { ref, onMounted, onUnmounted } from 'vue';
const isLoading = ref(false);
const log = ref('');
// 统一处理来自主进程的消息
const handleProgress = (progress) => {
if (progress.status) {
log.value += `\n--- ${progress.status} ---\n`;
} else if (progress.type === 'stdout') {
log.value += progress.data;
} else if (progress.type === 'stderr') {
log.value += `[ERROR] ${progress.data}`;
}
};
// 启动安装
const startInstallation = async () => {
isLoading.value = true;
log.value = 'Starting installation process...\n';
try {
const result = await window.api.installOllama();
if (result.success) {
log.value += `\n--- SUCCESS: ${result.message} ---\n`;
} else {
log.value += `\n--- FAILED: ${result.message} ---\n`;
}
} catch (error) {
log.value += `\n--- FATAL ERROR: ${error.message} ---\n`;
}
isLoading.value = false;
};
// 挂载时设置监听器
onMounted(() => {
window.api.onInstallProgress(handleProgress);
});
// 卸载时清理监听器
onUnmounted(() => {
window.api.removeInstallProgressListeners();
});
</script>
<style scoped>
.log-container {
background-color: #2b2b2b;
color: #f1f1f1;
padding: 15px;
border-radius: 8px;
max-height: 400px;
overflow-y: auto;
white-space: pre-wrap;
/* 自动换行 */
word-wrap: break-word;
font-family: 'Courier New', Courier, monospace;
}
</style>

View File

@@ -23,11 +23,11 @@ const liveUrl = ref("");
const soundUrl = ref("https://dmdemo.hx.cn/sound/welcome.mp3");
const welcome = ref()
const live = ref()
let cameraStream = null // 用于存储摄像头媒体流
let cameraStream = null
let wasWelcomePlaying = false
const cameraVideo = ref()
const liveIframe = ref(null) // 对应模板中的 iframe
const micAudio = ref(null) // 隐藏的麦克风播放元素
const liveIframe = ref(null)
const micAudio = ref(null)
let micStream = null
let prevIframeVideoVolume = 1
let micPlaying = false
@@ -45,7 +45,6 @@ function welcomeEnd() {
}
}
} catch (e) {
// 可能跨域,忽略
}
}
@@ -73,7 +72,6 @@ onBeforeUnmount(() => {
})
function startLive() {
// 从 URL 获取 userId
const paramsUserId = route.query.userId;
if (paramsUserId) {
userId.value = paramsUserId
@@ -83,10 +81,8 @@ function startLive() {
async function handleInsertCameraVideo() {
if (isCameraActive.value) {
// 停止摄像头
stopCamera()
} else {
// 启动摄像头
await startCamera()
}
}

View File

@@ -2,7 +2,7 @@
"extends": "@electron-toolkit/tsconfig/tsconfig.web.json",
"include": [
"src/renderer/src/env.d.ts",
"src/renderer/src/**/*",
"src/renderer/src/**/*.ts",
"src/renderer/src/**/*.vue",
"src/preload/*.d.ts"
],
@@ -13,6 +13,7 @@
"@renderer/*": [
"src/renderer/src/*"
]
}
},
"allowJs": true
}
}