Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main'
Browse files Browse the repository at this point in the history
  • Loading branch information
bentwnghk committed Oct 29, 2024
2 parents 8612f48 + d5f029e commit 3961c67
Show file tree
Hide file tree
Showing 2 changed files with 37 additions and 6 deletions.
25 changes: 22 additions & 3 deletions src/libs/agent-runtime/zhipu/index.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -107,16 +107,35 @@ describe('LobeZhipuAI', () => {
{ content: [{ type: 'text', text: 'Hello again' }], role: 'user' },
],
model: 'glm-4',
temperature: 1.6,
top_p: 1,
});

const calledWithParams = spyOn.mock.calls[0][0];

expect(calledWithParams.messages[1].content).toEqual([{ type: 'text', text: 'Hello again' }]);
expect(calledWithParams.temperature).toBe(0.8); // temperature should be divided by two
expect(calledWithParams.top_p).toEqual(1);
});

it('should pass arameters correctly', async () => {
const spyOn = vi.spyOn(instance['client'].chat.completions, 'create');

await instance.chat({
messages: [
{ content: 'Hello', role: 'user' },
{ content: [{ type: 'text', text: 'Hello again' }], role: 'user' },
],
model: 'glm-4-alltools',
temperature: 0,
top_p: 1,
});

const calledWithParams = spyOn.mock.calls[0][0];

expect(calledWithParams.messages[1].content).toEqual([{ type: 'text', text: 'Hello again' }]);
expect(calledWithParams.temperature).toBe(0); // temperature 0 should be undefined
expect((calledWithParams as any).do_sample).toBeTruthy(); // temperature 0 should be undefined
expect(calledWithParams.top_p).toEqual(1); // top_p should be transformed correctly
expect(calledWithParams.temperature).toBe(0.01);
expect(calledWithParams.top_p).toEqual(0.99);
});

describe('Error', () => {
Expand Down
18 changes: 15 additions & 3 deletions src/libs/agent-runtime/zhipu/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,24 @@ import { LobeOpenAICompatibleFactory } from '../utils/openaiCompatibleFactory';
export const LobeZhipuAI = LobeOpenAICompatibleFactory({
baseURL: 'https://open.bigmodel.cn/api/paas/v4',
chatCompletion: {
handlePayload: ({ temperature, ...payload }: ChatStreamPayload) =>
handlePayload: ({ model, temperature, top_p, ...payload }: ChatStreamPayload) =>
({
...payload,
do_sample: temperature === 0,
model,
stream: true,
temperature,
...(model === "glm-4-alltools" ? {
temperature: temperature !== undefined
? Math.max(0.01, Math.min(0.99, temperature / 2))
: undefined,
top_p: top_p !== undefined
? Math.max(0.01, Math.min(0.99, top_p))
: undefined,
} : {
temperature: temperature !== undefined
? temperature / 2
: undefined,
top_p,
}),
}) as OpenAI.ChatCompletionCreateParamsStreaming,
},
debug: {
Expand Down

0 comments on commit 3961c67

Please sign in to comment.