From 0aa5f1fb854ebde19a46b785f85bbc5940c39c37 Mon Sep 17 00:00:00 2001 From: Matt Rubens Date: Fri, 20 Dec 2024 09:45:30 -0500 Subject: [PATCH] Add the o1 model --- src/api/providers/openai-native.ts | 1 + src/shared/api.ts | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/src/api/providers/openai-native.ts b/src/api/providers/openai-native.ts index 70d55b7..139b3a2 100644 --- a/src/api/providers/openai-native.ts +++ b/src/api/providers/openai-native.ts @@ -24,6 +24,7 @@ export class OpenAiNativeHandler implements ApiHandler { async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream { switch (this.getModel().id) { + case "o1": case "o1-preview": case "o1-mini": { // o1 doesnt support streaming, non-1 temp, or system prompt diff --git a/src/shared/api.ts b/src/shared/api.ts index b30c5ed..426e95b 100644 --- a/src/shared/api.ts +++ b/src/shared/api.ts @@ -481,6 +481,14 @@ export type OpenAiNativeModelId = keyof typeof openAiNativeModels export const openAiNativeDefaultModelId: OpenAiNativeModelId = "gpt-4o" export const openAiNativeModels = { // don't support tool use yet + "o1": { + maxTokens: 100_000, + contextWindow: 200_000, + supportsImages: true, + supportsPromptCache: false, + inputPrice: 15, + outputPrice: 60, + }, "o1-preview": { maxTokens: 32_768, contextWindow: 128_000,