Skip to content

Commit 204f00c

Browse files
committed
merge
2 parents 2d65d89 + 9051ed8 commit 204f00c

File tree

2 files changed

+4
-2
lines changed

2 files changed

+4
-2
lines changed

packages/model/src/model/llm.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ export class LLMInstanceScope {
196196
}
197197

198198
async generate(request: GenerateTextRequest, cb: GenerateTextCallback) {
199-
const { conversation, maxTokens, assistantRoleName, stopTexts } = request;
199+
const { conversation, maxTokens, assistantRoleName, stopTexts, temperature, top_p } = request;
200200
const tokens = await this.getTokens(conversation, maxTokens);
201201
tokens.push(...(await this.tokenizer.encodeIds(`${assistantRoleName}:`)));
202202
console.log("debug: ", await this.tokenizer.decodeIds(tokens));
@@ -222,7 +222,7 @@ export class LLMInstanceScope {
222222
this.forward(input, this.kvCacheLength + inputTokenLength + step)
223223
);
224224
this.tvm.endScope();
225-
const nextToken = await this.sampleTokenFromLogits(logits);
225+
const nextToken = await this.sampleTokenFromLogits(logits,temperature, top_p);
226226
logits.dispose();
227227

228228
tokens.push(nextToken);

packages/model/src/types/modelApi.ts

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,8 @@ export type GenerateTextRequest = {
4747
stopTexts: string[],
4848
maxTokens: number,
4949
assistantRoleName: string,
50+
temperature: number,
51+
top_p: number,
5052
}
5153

5254
export type GenerateTextResponse = {

0 commit comments

Comments
 (0)