Skip to content

Commit

Permalink
Merge pull request #605 from Portkey-AI/feat/openai-o1-model-support
Browse files Browse the repository at this point in the history
feat: add support for max_completion_tokens param
  • Loading branch information
VisargD authored Sep 12, 2024
2 parents 3f5ffdb + 6f42fc8 commit da42af3
Showing 1 changed file with 9 additions and 0 deletions.
9 changes: 9 additions & 0 deletions src/providers/openai/chatComplete.ts
Original file line number Diff line number Diff line change
Expand Up @@ -90,6 +90,15 @@ export const OpenAIChatCompleteConfig: ProviderConfig = {
stream_options: {
param: 'stream_options',
},
service_tier: {
param: 'service_tier',
},
parallel_tool_calls: {
param: 'parallel_tool_calls',
},
max_completion_tokens: {
param: 'max_completion_tokens',
},
};

export interface OpenAIChatCompleteResponse extends ChatCompletionResponse {
Expand Down

0 comments on commit da42af3

Please sign in to comment.