Skip to content

Commit 890d0fc

Browse files
authored
feat: support openai whisper (#12)
1 parent a42f25d commit 890d0fc

File tree

2 files changed

+43
-11
lines changed

2 files changed

+43
-11
lines changed

main/background.ts

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,6 +12,7 @@ import {
1212
deleteModel,
1313
downloadModelSync,
1414
getPath,
15+
checkOpanAiWhisper
1516
} from "./helpers/whisper";
1617
import { extractAudio } from "./helpers/ffmpeg";
1718
import translate from "./helpers/translate";
@@ -115,12 +116,17 @@ ipcMain.on("handleTask", async (event, { files, formData }) => {
115116
"main.exe",
116117
);
117118
}
119+
let runShell = `"${mainPath}" -m "${whisperPath}models/ggml-${whisperModel}.bin" -f "${audioFile}" -osrt -of "${srtFile}" -l ${sourceLanguage}`
120+
const hasOpenAiWhiaper = await checkOpanAiWhisper();
121+
if (hasOpenAiWhiaper) {
122+
runShell = `whisper "${audioFile}" --model ${whisperModel} --device cuda --output_format srt --output_dir ${directory} --language ${sourceLanguage}`
123+
}
118124
event.sender.send("taskStatusChange", file, "extractSubtitle", "loading");
119-
exec(
120-
`"${mainPath}" -m "${whisperPath}models/ggml-${whisperModel}.bin" -f "${audioFile}" -osrt -of "${srtFile}" -l ${sourceLanguage}`,
125+
exec(runShell,
121126
async (error, stdout, stderr) => {
122127
if (error) {
123128
event.sender.send("message", error);
129+
return;
124130
}
125131
event.sender.send(
126132
"taskStatusChange",

main/helpers/whisper.ts

Lines changed: 35 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,12 @@
1-
import { exec } from "child_process";
1+
import { exec, spawn } from "child_process";
22
import { app } from "electron";
33
import path from "path";
44
import fs from "fs";
55
import git from "isomorphic-git";
66
import http from "isomorphic-git/http/node";
77
import replaceModelSource from "./model-source";
88
import { isDarwin, isWin32 } from "./utils";
9+
import { log } from "console";
910

1011
export const getPath = (key?: string) => {
1112
const userDataPath = app.getPath("userData");
@@ -170,15 +171,40 @@ export const downloadModelSync = async (model, source) => {
170171
console.log("完成模型下载地址替换", model);
171172
console.log("正在安装 whisper.cpp 模型");
172173
return new Promise((resolve, reject) => {
173-
exec(`${shell} "${downShellPath}" ${model}`, (err, stdout) => {
174-
if (err) {
175-
reject(err);
176-
} else {
177-
resolve('ok')
178-
}
179-
});
174+
exec(`${shell} "${downShellPath}" ${model}`, (err, stdout) => {
175+
if (err) {
176+
reject(err);
177+
} else {
178+
resolve('ok')
179+
}
180+
});
180181
})
181182
} catch (error) {
182-
console.log(error)
183+
console.log(error)
183184
}
184185
};
186+
187+
188+
export async function checkOpanAiWhisper() {
189+
return new Promise((resolve, reject) => {
190+
let env = process.env;
191+
env.PYTHONIOENCODING = 'UTF-8';
192+
const childProcess = spawn('whisper', ['-h'], { env: env });
193+
childProcess.on('error', (error: { code: string }) => {
194+
if (error.code === 'ENOENT') {
195+
resolve(false);
196+
} else {
197+
reject(error);
198+
}
199+
});
200+
childProcess.on('exit', (code) => {
201+
console.log('code: ', code);
202+
if (code === 0) {
203+
console.log('openai whisper ready')
204+
resolve(true);
205+
} else {
206+
resolve(false);
207+
}
208+
});
209+
});
210+
}

0 commit comments

Comments
 (0)