diff --git a/src/api/providers/openai.ts b/src/api/providers/openai.ts index f1c576b..71308ed 100644 --- a/src/api/providers/openai.ts +++ b/src/api/providers/openai.ts @@ -32,18 +32,24 @@ export class OpenAiHandler implements ApiHandler { } } + // Include stream_options for OpenAI Compatible providers if the checkbox is checked async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream { const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [ { role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages), ] - const stream = await this.client.chat.completions.create({ + const requestOptions: OpenAI.Chat.ChatCompletionCreateParams = { model: this.options.openAiModelId ?? "", messages: openAiMessages, temperature: 0, stream: true, - stream_options: { include_usage: true }, - }) + } + + if (this.options.includeStreamOptions ?? true) { + requestOptions.stream_options = { include_usage: true } + } + + const stream = await this.client.chat.completions.create(requestOptions) for await (const chunk of stream) { const delta = chunk.choices[0]?.delta if (delta?.content) { diff --git a/src/shared/api.ts b/src/shared/api.ts index 87ff2db..4f9d466 100644 --- a/src/shared/api.ts +++ b/src/shared/api.ts @@ -34,6 +34,8 @@ export interface ApiHandlerOptions { openAiNativeApiKey?: string azureApiVersion?: string openRouterUseMiddleOutTransform?: boolean + includeStreamOptions?: boolean + setAzureApiVersion?: boolean } export type ApiConfiguration = ApiHandlerOptions & { diff --git a/webview-ui/src/components/settings/ApiOptions.tsx b/webview-ui/src/components/settings/ApiOptions.tsx index 06839c5..1e21999 100644 --- a/webview-ui/src/components/settings/ApiOptions.tsx +++ b/webview-ui/src/components/settings/ApiOptions.tsx @@ -445,6 +445,24 @@ const ApiOptions = ({ showModelOptions, apiErrorMessage, modelIdErrorMessage }: placeholder={"Enter Model ID..."}> Model ID +