Skip to content

Commit 4a3af45

Browse files
committed
format and fix generative-model tests
1 parent 3557b15 commit 4a3af45

File tree

3 files changed

+33
-30
lines changed

3 files changed

+33
-30
lines changed

packages/ai/src/methods/chat-session.test.ts

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -266,8 +266,8 @@ describe('ChatSession', () => {
266266
undefined,
267267
requestOptions
268268
);
269-
await expect(chatSession.sendMessageStream('hello', singleRequestOptions)).to.be
270-
.rejected;
269+
await expect(chatSession.sendMessageStream('hello', singleRequestOptions))
270+
.to.be.rejected;
271271
expect(generateContentStreamStub).to.be.calledWith(
272272
fakeApiSettings,
273273
'a-model',
@@ -297,8 +297,8 @@ describe('ChatSession', () => {
297297
undefined,
298298
requestOptions
299299
);
300-
await expect(chatSession.sendMessageStream('hello', singleRequestOptions)).to.be
301-
.rejected;
300+
await expect(chatSession.sendMessageStream('hello', singleRequestOptions))
301+
.to.be.rejected;
302302
expect(generateContentStreamStub).to.be.calledWith(
303303
fakeApiSettings,
304304
'a-model',

packages/ai/src/models/generative-model.test.ts

Lines changed: 25 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -293,34 +293,34 @@ describe('GenerativeModel', () => {
293293
signal: singleRequestOptions.signal
294294
})
295295
);
296-
it('passes base model params through to ChatSession when there are no startChatParams', async () => {
297-
const genModel = new GenerativeModel(fakeAI, {
298-
model: 'my-model',
299-
generationConfig: {
300-
topK: 1
301-
}
302-
});
303-
const chatSession = genModel.startChat();
304-
expect(chatSession.params?.generationConfig).to.deep.equal({
296+
});
297+
it('passes base model params through to ChatSession when there are no startChatParams', async () => {
298+
const genModel = new GenerativeModel(fakeAI, {
299+
model: 'my-model',
300+
generationConfig: {
305301
topK: 1
306-
});
307-
restore();
302+
}
308303
});
309-
it('overrides base model params with startChatParams', () => {
310-
const genModel = new GenerativeModel(fakeAI, {
311-
model: 'my-model',
312-
generationConfig: {
313-
topK: 1
314-
}
315-
});
316-
const chatSession = genModel.startChat({
317-
generationConfig: {
318-
topK: 2
319-
}
320-
});
321-
expect(chatSession.params?.generationConfig).to.deep.equal({
304+
const chatSession = genModel.startChat();
305+
expect(chatSession.params?.generationConfig).to.deep.equal({
306+
topK: 1
307+
});
308+
restore();
309+
});
310+
it('overrides base model params with startChatParams', () => {
311+
const genModel = new GenerativeModel(fakeAI, {
312+
model: 'my-model',
313+
generationConfig: {
314+
topK: 1
315+
}
316+
});
317+
const chatSession = genModel.startChat({
318+
generationConfig: {
322319
topK: 2
323-
});
320+
}
321+
});
322+
expect(chatSession.params?.generationConfig).to.deep.equal({
323+
topK: 2
324324
});
325325
});
326326
it('passes params through to chat.sendMessage', async () => {

packages/ai/src/requests/request.test.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -750,7 +750,10 @@ describe('request methods', () => {
750750
'{}'
751751
);
752752

753-
await expect(requestPromise).to.be.rejectedWith(AIError, /Network failure/);
753+
await expect(requestPromise).to.be.rejectedWith(
754+
AIError,
755+
/Network failure/
756+
);
754757
expect(removeSpy).to.have.been.calledOnce;
755758
});
756759

0 commit comments

Comments
 (0)