Merge pull request #698 from RooVetGit/more_o3_mini_fixes

Make o3-mini work in glama
This commit is contained in:
Matt Rubens
2025-01-31 23:22:43 -05:00
committed by GitHub

View File

@@ -72,16 +72,19 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
maxTokens = 8_192 maxTokens = 8_192
} }
const { data: completion, response } = await this.client.chat.completions const requestOptions: OpenAI.Chat.ChatCompletionCreateParams = {
.create(
{
model: this.getModel().id, model: this.getModel().id,
max_tokens: maxTokens, max_tokens: maxTokens,
temperature: 0,
messages: openAiMessages, messages: openAiMessages,
stream: true, stream: true,
}, }
{
if (this.supportsTemperature()) {
requestOptions.temperature = 0
}
const { data: completion, response } = await this.client.chat.completions
.create(requestOptions, {
headers: { headers: {
"X-Glama-Metadata": JSON.stringify({ "X-Glama-Metadata": JSON.stringify({
labels: [ labels: [
@@ -92,8 +95,7 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
], ],
}), }),
}, },
}, })
)
.withResponse() .withResponse()
const completionRequestId = response.headers.get("x-completion-request-id") const completionRequestId = response.headers.get("x-completion-request-id")
@@ -148,6 +150,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
} }
} }
private supportsTemperature(): boolean {
return !this.getModel().id.startsWith("openai/o3-mini")
}
getModel(): { id: string; info: ModelInfo } { getModel(): { id: string; info: ModelInfo } {
const modelId = this.options.glamaModelId const modelId = this.options.glamaModelId
const modelInfo = this.options.glamaModelInfo const modelInfo = this.options.glamaModelInfo
@@ -164,7 +170,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = { const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
model: this.getModel().id, model: this.getModel().id,
messages: [{ role: "user", content: prompt }], messages: [{ role: "user", content: prompt }],
temperature: 0, }
if (this.supportsTemperature()) {
requestOptions.temperature = 0
} }
if (this.getModel().id.startsWith("anthropic/")) { if (this.getModel().id.startsWith("anthropic/")) {