mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 12:21:13 -05:00
Merge pull request #698 from RooVetGit/more_o3_mini_fixes
Make o3-mini work in glama
This commit is contained in:
@@ -72,28 +72,30 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
maxTokens = 8_192
|
maxTokens = 8_192
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const requestOptions: OpenAI.Chat.ChatCompletionCreateParams = {
|
||||||
|
model: this.getModel().id,
|
||||||
|
max_tokens: maxTokens,
|
||||||
|
messages: openAiMessages,
|
||||||
|
stream: true,
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.supportsTemperature()) {
|
||||||
|
requestOptions.temperature = 0
|
||||||
|
}
|
||||||
|
|
||||||
const { data: completion, response } = await this.client.chat.completions
|
const { data: completion, response } = await this.client.chat.completions
|
||||||
.create(
|
.create(requestOptions, {
|
||||||
{
|
headers: {
|
||||||
model: this.getModel().id,
|
"X-Glama-Metadata": JSON.stringify({
|
||||||
max_tokens: maxTokens,
|
labels: [
|
||||||
temperature: 0,
|
{
|
||||||
messages: openAiMessages,
|
key: "app",
|
||||||
stream: true,
|
value: "vscode.rooveterinaryinc.roo-cline",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
}),
|
||||||
},
|
},
|
||||||
{
|
})
|
||||||
headers: {
|
|
||||||
"X-Glama-Metadata": JSON.stringify({
|
|
||||||
labels: [
|
|
||||||
{
|
|
||||||
key: "app",
|
|
||||||
value: "vscode.rooveterinaryinc.roo-cline",
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
)
|
|
||||||
.withResponse()
|
.withResponse()
|
||||||
|
|
||||||
const completionRequestId = response.headers.get("x-completion-request-id")
|
const completionRequestId = response.headers.get("x-completion-request-id")
|
||||||
@@ -148,6 +150,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private supportsTemperature(): boolean {
|
||||||
|
return !this.getModel().id.startsWith("openai/o3-mini")
|
||||||
|
}
|
||||||
|
|
||||||
getModel(): { id: string; info: ModelInfo } {
|
getModel(): { id: string; info: ModelInfo } {
|
||||||
const modelId = this.options.glamaModelId
|
const modelId = this.options.glamaModelId
|
||||||
const modelInfo = this.options.glamaModelInfo
|
const modelInfo = this.options.glamaModelInfo
|
||||||
@@ -164,7 +170,10 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
|
|||||||
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
const requestOptions: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
messages: [{ role: "user", content: prompt }],
|
messages: [{ role: "user", content: prompt }],
|
||||||
temperature: 0,
|
}
|
||||||
|
|
||||||
|
if (this.supportsTemperature()) {
|
||||||
|
requestOptions.temperature = 0
|
||||||
}
|
}
|
||||||
|
|
||||||
if (this.getModel().id.startsWith("anthropic/")) {
|
if (this.getModel().id.startsWith("anthropic/")) {
|
||||||
|
|||||||
Reference in New Issue
Block a user