flat: 暂存
This commit is contained in:
@@ -9,6 +9,9 @@ let InstallWindows: BrowserWindow | null = null;
|
||||
export function setupWorkflowHandlers() {
|
||||
let lastJobSummary = "这是我们今天介绍的第一个岗位";
|
||||
|
||||
// 存储用户确认回调的Map
|
||||
const modelDownloadCallbacks = new Map<string, { confirm: Function, reject: Function }>();
|
||||
|
||||
// 打开安装窗口
|
||||
ipcMain.handle("open-install-window", async (_, args) => {
|
||||
try {
|
||||
@@ -197,6 +200,259 @@ export function setupWorkflowHandlers() {
|
||||
return await checkOllamaServer();
|
||||
});
|
||||
|
||||
// 检查指定模型是否存在
|
||||
ipcMain.handle("check-model-exists", async (_, modelName = "qwen3:8b") => {
|
||||
try {
|
||||
const response = await fetch("http://127.0.0.1:11434/api/tags", {
|
||||
method: "GET",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`Ollama API error: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
const models = data.models || [];
|
||||
|
||||
// 检查模型是否存在于本地
|
||||
const modelExists = models.some((model: any) => model.name === modelName);
|
||||
|
||||
return {
|
||||
success: true,
|
||||
exists: modelExists,
|
||||
models: models.map((m: any) => ({ name: m.name, size: m.size, modified_at: m.modified_at }))
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error("Check model error:", error);
|
||||
return {
|
||||
success: false,
|
||||
exists: false,
|
||||
error: error.message
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// 加载模型(检查ollama状<61><E78AB6><EFBFBD>,下载模型如果不存在)
|
||||
ipcMain.handle("load-model", async (_, modelName = "qwen3:8b") => {
|
||||
const webContents = BrowserWindow.getFocusedWindow()?.webContents;
|
||||
|
||||
const sendStatus = (status: string, type = "info") => {
|
||||
if (webContents && !webContents.isDestroyed()) {
|
||||
webContents.send("model-load-progress", {
|
||||
status,
|
||||
type,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
// 发送询问是否下载模型的消息
|
||||
const askUserToDownload = () => {
|
||||
if (webContents && !webContents.isDestroyed()) {
|
||||
webContents.send("model-download-confirm", {
|
||||
modelName,
|
||||
message: `模型 ${modelName} 不存在,是否下载?`,
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
try {
|
||||
sendStatus("正在检查Ollama服务状态...", "info");
|
||||
|
||||
// 1. 检查Ollama是否运行
|
||||
const isOllamaRunning = await checkOllamaServer();
|
||||
if (!isOllamaRunning) {
|
||||
sendStatus("Ollama服务未运行,正在启动...", "warning");
|
||||
|
||||
try {
|
||||
// 尝试启动Ollama服务
|
||||
await runCommand("ollama", ["ps"]);
|
||||
await new Promise(resolve => setTimeout(resolve, 3000));
|
||||
|
||||
const isRunningNow = await checkOllamaServer();
|
||||
if (!isRunningNow) {
|
||||
throw new Error("无法启动Ollama服务,请手动启动");
|
||||
}
|
||||
sendStatus("Ollama服务启动成功", "success");
|
||||
} catch (error: any) {
|
||||
sendStatus(`启动Ollama服务失败: ${error.message}`, "error");
|
||||
return {
|
||||
success: false,
|
||||
message: `Ollama服务启动失败: ${error.message}`,
|
||||
downloaded: false
|
||||
};
|
||||
}
|
||||
} else {
|
||||
sendStatus("Ollama服务正在运行", "success");
|
||||
}
|
||||
|
||||
// 2. 检查模型是否存在
|
||||
sendStatus(`正在检查模型 ${modelName} 是否存在...`, "info");
|
||||
|
||||
const modelCheckResult = await new Promise<{ exists: boolean, models: any[] }>((resolve, reject) => {
|
||||
fetch("http://127.0.0.1:11434/api/tags", {
|
||||
method: "GET",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
})
|
||||
.then(response => response.json())
|
||||
.then(data => {
|
||||
const models = data.models || [];
|
||||
const modelExists = models.some((model: any) => model.name === modelName);
|
||||
resolve({ exists: modelExists, models });
|
||||
})
|
||||
.catch(reject);
|
||||
});
|
||||
|
||||
if (modelCheckResult.exists) {
|
||||
sendStatus(`模型 ${modelName} 已存在,无需下载`, "success");
|
||||
return {
|
||||
success: true,
|
||||
message: `模型 ${modelName} 已就绪`,
|
||||
downloaded: false
|
||||
};
|
||||
}
|
||||
|
||||
// 3. 模型不存在,询问用户是否下载
|
||||
askUserToDownload();
|
||||
|
||||
// 等待用户确认
|
||||
const userConfirmed = await new Promise<boolean>((resolve, reject) => {
|
||||
const timeout = setTimeout(() => {
|
||||
modelDownloadCallbacks.delete(modelName);
|
||||
resolve(false); // 30秒超时自动取消
|
||||
}, 30000);
|
||||
|
||||
// 存储回调函数
|
||||
modelDownloadCallbacks.set(modelName, {
|
||||
confirm: () => {
|
||||
clearTimeout(timeout);
|
||||
resolve(true);
|
||||
},
|
||||
reject: (error: any) => {
|
||||
clearTimeout(timeout);
|
||||
reject(error);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
if (!userConfirmed) {
|
||||
sendStatus("用户取消了模型下载", "info");
|
||||
return {
|
||||
success: false,
|
||||
message: `用户取消了 ${modelName} 模型的下载`,
|
||||
downloaded: false
|
||||
};
|
||||
}
|
||||
|
||||
// 4. 用户确认,开始下载模型
|
||||
sendStatus(`开始下载模型 ${modelName},这可能需要一些时间...`, "info");
|
||||
|
||||
await new Promise<void>((resolve, reject) => {
|
||||
const process = spawn("ollama", ["pull", modelName], { shell: true });
|
||||
|
||||
const sendProgress = (data: any) => {
|
||||
if (webContents && !webContents.isDestroyed()) {
|
||||
webContents.send("model-load-progress", {
|
||||
status: data.toString().trim(),
|
||||
type: "download",
|
||||
timestamp: new Date().toISOString()
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
process.stdout.on("data", sendProgress);
|
||||
process.stderr.on("data", sendProgress);
|
||||
|
||||
process.on("close", (code) => {
|
||||
if (code === 0) {
|
||||
resolve();
|
||||
} else {
|
||||
reject(new Error(`模型下载失败,退出码: ${code}`));
|
||||
}
|
||||
});
|
||||
|
||||
process.on("error", (err: any) => {
|
||||
reject(new Error(`启动下载进程失败: ${err.message}`));
|
||||
});
|
||||
});
|
||||
|
||||
sendStatus(`模型 ${modelName} 下载完成!`, "success");
|
||||
|
||||
return {
|
||||
success: true,
|
||||
message: `模型 ${modelName} 下载并加载成功`,
|
||||
downloaded: true
|
||||
};
|
||||
|
||||
} catch (error: any) {
|
||||
console.error("Load model error:", error);
|
||||
sendStatus(`加载模型失败: ${error.message}`, "error");
|
||||
return {
|
||||
success: false,
|
||||
message: error.message,
|
||||
downloaded: false
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
// 处理用户对模型下载的确认响应
|
||||
ipcMain.on("model-download-confirm-response", (_event, data) => {
|
||||
const { modelName, confirmed } = data;
|
||||
const callback = modelDownloadCallbacks.get(modelName);
|
||||
|
||||
if (callback) {
|
||||
modelDownloadCallbacks.delete(modelName);
|
||||
if (confirmed) {
|
||||
callback.confirm();
|
||||
} else {
|
||||
callback.reject(new Error("用户取消了模型下载"));
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// 润色文本的处理器
|
||||
ipcMain.handle("polish-text", async (_, text) => {
|
||||
try {
|
||||
if (!text || typeof text !== 'string' || text.trim() === '') {
|
||||
return {
|
||||
success: false,
|
||||
error: "输入文本不能为空"
|
||||
};
|
||||
}
|
||||
|
||||
const systemPrompt = `你是一个专业的文本润色专家。请将以下文本进行润色,使其更加流畅、自然、专业。要求:
|
||||
1. 保持原意不变
|
||||
2. 使语言更加流畅自然
|
||||
3. 提升表达的准确性
|
||||
4. 适合直播场合使用
|
||||
5. 保持简洁明了
|
||||
|
||||
请直接返回润色后的文本,不要添加任何其他说明或解释。
|
||||
|
||||
原文:${text.trim()}`;
|
||||
|
||||
const polishedText = await runOllamaNonStream(systemPrompt, "qwen3:8b");
|
||||
|
||||
if (!polishedText) {
|
||||
throw new Error("AI模型返回为空");
|
||||
}
|
||||
|
||||
return {
|
||||
success: true,
|
||||
data: polishedText.trim()
|
||||
};
|
||||
} catch (error: any) {
|
||||
console.error("润色文本失败:", error);
|
||||
return {
|
||||
success: false,
|
||||
error: error.message || "润色服务出现错误"
|
||||
};
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
// 处理器:检查服务,如果没运行,就用一个轻量命令唤醒它
|
||||
ipcMain.handle("ensure-ollama-running", async () => {
|
||||
let isRunning = await checkOllamaServer();
|
||||
|
||||
Reference in New Issue
Block a user