Skip to content

Commit 3f4666c

Browse files
authored
Merge pull request #32 from workfunction/main
2 parents dbf9d14 + 85ea7af commit 3f4666c

File tree

3 files changed

+65
-60
lines changed

3 files changed

+65
-60
lines changed

global.d.ts

+1-7
Original file line numberDiff line numberDiff line change
@@ -214,13 +214,7 @@ declare namespace Bob {
214214

215215
// https://ripperhe.gitee.io/bob/#/plugin/api/option
216216
type Option = {
217-
apiKeys: string;
218-
apiUrl: string;
219-
customSystemPrompt: string;
220-
customUserPrompt: string;
221-
deploymentName: string;
222-
model: string;
223-
polishingMode: "simplicity" | "detailed";
217+
[propName: string]: string;
224218
};
225219

226220

src/info.json

+26-9
Original file line numberDiff line numberDiff line change
@@ -25,11 +25,22 @@
2525
"identifier": "deploymentName",
2626
"type": "text",
2727
"title": "Dep. Name",
28-
"desc": "可选项。此值为在部署模型时为部署选择的自定义名称,可在 Azure 门户中的 “资源管理>“部署下查看",
28+
"desc": "可选项。此值为在部署 Azure 模型时为部署选择的自定义名称,可在 Azure 门户中的 “资源管理>“部署下查看",
2929
"textConfig": {
3030
"type": "visible"
3131
}
3232
},
33+
{
34+
"identifier": "apiVersion",
35+
"type": "text",
36+
"title": "API Version",
37+
"defaultValue": "2023-03-15-preview",
38+
"desc": "可选项。此值为在使用 Azure 模型时采用的 Chat completions API 版本,不支持 2023-03-15-preview 之前的版本",
39+
"textConfig": {
40+
"type": "visible",
41+
"placeholderText": "2023-03-15-preview"
42+
}
43+
},
3344
{
3445
"identifier": "apiKeys",
3546
"type": "text",
@@ -47,6 +58,10 @@
4758
"title": "模型",
4859
"defaultValue": "gpt-3.5-turbo-0613",
4960
"menuValues": [
61+
{
62+
"title": "custom",
63+
"value": "custom"
64+
},
5065
{
5166
"title": "gpt-3.5-turbo-0613 (recommended)",
5267
"value": "gpt-3.5-turbo-0613"
@@ -86,17 +101,19 @@
86101
{
87102
"title": "gpt-4-32k-0613",
88103
"value": "gpt-4-32k-0613"
89-
},
90-
{
91-
"title": "text-davinci-003",
92-
"value": "text-davinci-003"
93-
},
94-
{
95-
"title": "text-davinci-002",
96-
"value": "text-davinci-002"
97104
}
98105
]
99106
},
107+
{
108+
"identifier": "customModel",
109+
"type": "text",
110+
"title": "自定义模型",
111+
"desc": "可选项。当 Model 选择为 custom 时,此项为必填项。请填写有效的模型名称",
112+
"textConfig": {
113+
"type": "visible",
114+
"placeholderText": "gpt-3.5-turbo"
115+
}
116+
},
100117
{
101118
"identifier": "customSystemPrompt",
102119
"type": "text",

src/main.js

+38-44
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,6 @@
11
//@ts-check
22

33
var lang = require("./lang.js");
4-
var ChatGPTModels = [
5-
"gpt-3.5-turbo",
6-
"gpt-3.5-turbo-16k",
7-
"gpt-3.5-turbo-0301",
8-
"gpt-3.5-turbo-0613",
9-
"gpt-4",
10-
"gpt-4-0314",
11-
"gpt-4-0613",
12-
"gpt-4-32k",
13-
"gpt-4-32k-0314",
14-
"gpt-4-32k-0613",
15-
];
164
var HttpErrorCodes = {
175
"400": "Bad Request",
186
"401": "Unauthorized",
@@ -85,7 +73,7 @@ function buildHeader(isAzureServiceProvider, apiKey) {
8573

8674
/**
8775
* @param {string} basePrompt
88-
* @param {"simplicity" | "detailed"} polishingMode
76+
* @param {string} polishingMode
8977
* @param {Bob.TranslateQuery} query
9078
* @returns {string}
9179
*/
@@ -150,11 +138,10 @@ function replacePromptKeywords(prompt, query) {
150138
}
151139

152140
/**
153-
* @param {typeof ChatGPTModels[number]} model
154-
* @param {boolean} isChatGPTModel
141+
* @param {string} model
155142
* @param {Bob.TranslateQuery} query
156143
* @returns {{
157-
* model: typeof ChatGPTModels[number];
144+
* model: string;
158145
* temperature: number;
159146
* max_tokens: number;
160147
* top_p: number;
@@ -167,14 +154,14 @@ function replacePromptKeywords(prompt, query) {
167154
* prompt?: string;
168155
* }}
169156
*/
170-
function buildRequestBody(model, isChatGPTModel, query) {
157+
function buildRequestBody(model, query) {
171158
const { customSystemPrompt, customUserPrompt, polishingMode } = $option;
172159

173160
const systemPrompt = generateSystemPrompt(replacePromptKeywords(customSystemPrompt, query), polishingMode, query);
174161
const userPrompt = customUserPrompt ? `${replacePromptKeywords(customUserPrompt, query)}:\n\n"${query.text}"` : query.text;
175162

176163
const standardBody = {
177-
model,
164+
model: model,
178165
stream: true,
179166
temperature: 0.2,
180167
max_tokens: 1000,
@@ -183,24 +170,19 @@ function buildRequestBody(model, isChatGPTModel, query) {
183170
presence_penalty: 1,
184171
};
185172

186-
if (isChatGPTModel) {
187-
return {
188-
...standardBody,
189-
messages: [
190-
{
191-
role: "system",
192-
content: systemPrompt,
193-
},
194-
{
195-
role: "user",
196-
content: userPrompt,
197-
},
198-
],
199-
};
200-
}
201173
return {
202174
...standardBody,
203-
prompt: `${systemPrompt}\n\n${userPrompt}`,
175+
model: model,
176+
messages: [
177+
{
178+
role: "system",
179+
content: systemPrompt,
180+
},
181+
{
182+
role: "user",
183+
content: userPrompt,
184+
},
185+
],
204186
};
205187
}
206188

@@ -223,12 +205,11 @@ function handleError(query, result) {
223205

224206
/**
225207
* @param {Bob.TranslateQuery} query
226-
* @param {boolean} isChatGPTModel
227208
* @param {string} targetText
228209
* @param {string} textFromResponse
229210
* @returns {string}
230211
*/
231-
function handleResponse(query, isChatGPTModel, targetText, textFromResponse) {
212+
function handleResponse(query, targetText, textFromResponse) {
232213
if (textFromResponse !== '[DONE]') {
233214
try {
234215
const dataObj = JSON.parse(textFromResponse);
@@ -244,7 +225,7 @@ function handleResponse(query, isChatGPTModel, targetText, textFromResponse) {
244225
return targetText;
245226
}
246227

247-
const content = isChatGPTModel ? choices[0].delta.content : choices[0].text;
228+
const content = choices[0].delta.content;
248229
if (content !== undefined) {
249230
targetText += content;
250231
query.onStream({
@@ -282,7 +263,18 @@ function translate(query, completion) {
282263
});
283264
}
284265

285-
const { model, apiKeys, apiUrl, deploymentName } = $option;
266+
const { model, customModel, apiKeys, apiVersion, apiUrl, deploymentName } = $option;
267+
268+
const isCustomModelRequired = model === "custom";
269+
if (isCustomModelRequired && !customModel) {
270+
query.onCompletion({
271+
error: {
272+
type: "param",
273+
message: "配置错误 - 请确保您在插件配置中填入了正确的自定义模型名称",
274+
addtion: "请在插件配置中填写自定义模型名称",
275+
},
276+
});
277+
}
286278

287279
if (!apiKeys) {
288280
completion({
@@ -293,20 +285,22 @@ function translate(query, completion) {
293285
},
294286
});
295287
}
288+
289+
const modelValue = isCustomModelRequired ? customModel : model;
290+
296291
const trimmedApiKeys = apiKeys.endsWith(",") ? apiKeys.slice(0, -1) : apiKeys;
297292
const apiKeySelection = trimmedApiKeys.split(",").map(key => key.trim());
298293
const apiKey = apiKeySelection[Math.floor(Math.random() * apiKeySelection.length)];
299294

300295
const modifiedApiUrl = ensureHttpsAndNoTrailingSlash(apiUrl || "https://api.openai.com");
301296

302-
const isChatGPTModel = ChatGPTModels.includes(model);
303297
const isAzureServiceProvider = modifiedApiUrl.includes("openai.azure.com");
304-
let apiUrlPath = isChatGPTModel ? "/v1/chat/completions" : "/v1/completions";
298+
let apiUrlPath = "/v1/chat/completions";
299+
const apiVersionQuery = apiVersion ? `?api-version=${apiVersion}` : "?api-version=2023-03-15-preview";
305300

306301
if (isAzureServiceProvider) {
307302
if (deploymentName) {
308-
apiUrlPath = `/openai/deployments/${deploymentName}`;
309-
apiUrlPath += isChatGPTModel ? "/chat/completions?api-version=2023-03-15-preview" : "/completions?api-version=2022-12-01";
303+
apiUrlPath = `/openai/deployments/${deploymentName}/chat/completions${apiVersionQuery}`;
310304
} else {
311305
completion({
312306
error: {
@@ -319,7 +313,7 @@ function translate(query, completion) {
319313
}
320314

321315
const header = buildHeader(isAzureServiceProvider, apiKey);
322-
const body = buildRequestBody(model, isChatGPTModel, query);
316+
const body = buildRequestBody(modelValue, query);
323317

324318
let targetText = ""; // 初始化拼接结果变量
325319
let buffer = ""; // 新增 buffer 变量
@@ -348,7 +342,7 @@ function translate(query, completion) {
348342
if (match) {
349343
// 如果是一个完整的消息,处理它并从缓冲变量中移除
350344
const textFromResponse = match[1].trim();
351-
targetText = handleResponse(query, isChatGPTModel, targetText, textFromResponse);
345+
targetText = handleResponse(query, targetText, textFromResponse);
352346
buffer = buffer.slice(match[0].length);
353347
} else {
354348
// 如果没有完整的消息,等待更多的数据

0 commit comments

Comments
 (0)