This commit is contained in:
Saoud Rizwan
2024-10-16 18:00:53 -04:00
parent fb5a8ea524
commit 90b0c8243c

View File

@@ -24,13 +24,16 @@ export class OpenAiNativeHandler implements ApiHandler {
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream { async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
let systemPromptMessage: OpenAI.Chat.ChatCompletionMessageParam let systemPromptMessage: OpenAI.Chat.ChatCompletionMessageParam
let temperature = 0
switch (this.getModel().id) { switch (this.getModel().id) {
case "o1-preview": case "o1-preview":
case "o1-mini": case "o1-mini":
systemPromptMessage = { role: "user", content: systemPrompt } systemPromptMessage = { role: "user", content: systemPrompt }
temperature = 1
break break
default: default:
systemPromptMessage = { role: "system", content: systemPrompt } systemPromptMessage = { role: "system", content: systemPrompt }
temperature = 0
} }
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [ const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
@@ -41,7 +44,7 @@ export class OpenAiNativeHandler implements ApiHandler {
const stream = await this.client.chat.completions.create({ const stream = await this.client.chat.completions.create({
model: this.getModel().id, model: this.getModel().id,
// max_completion_tokens: this.getModel().info.maxTokens, // max_completion_tokens: this.getModel().info.maxTokens,
temperature: 0, temperature,
messages: openAiMessages, messages: openAiMessages,
stream: true, stream: true,
stream_options: { include_usage: true }, stream_options: { include_usage: true },