mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 12:21:13 -05:00
Set temperature to 0.2
This commit is contained in:
@@ -37,6 +37,7 @@ export class AnthropicHandler implements ApiHandler {
|
|||||||
{
|
{
|
||||||
model: modelId,
|
model: modelId,
|
||||||
max_tokens: this.getModel().info.maxTokens,
|
max_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
system: [{ text: systemPrompt, type: "text", cache_control: { type: "ephemeral" } }], // setting cache breakpoint for system prompt so new tasks can reuse it
|
system: [{ text: systemPrompt, type: "text", cache_control: { type: "ephemeral" } }], // setting cache breakpoint for system prompt so new tasks can reuse it
|
||||||
messages: messages.map((message, index) => {
|
messages: messages.map((message, index) => {
|
||||||
if (index === lastUserMsgIndex || index === secondLastMsgUserIndex) {
|
if (index === lastUserMsgIndex || index === secondLastMsgUserIndex) {
|
||||||
@@ -89,6 +90,7 @@ export class AnthropicHandler implements ApiHandler {
|
|||||||
const message = await this.client.messages.create({
|
const message = await this.client.messages.create({
|
||||||
model: modelId,
|
model: modelId,
|
||||||
max_tokens: this.getModel().info.maxTokens,
|
max_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
system: [{ text: systemPrompt, type: "text" }],
|
system: [{ text: systemPrompt, type: "text" }],
|
||||||
messages,
|
messages,
|
||||||
tools,
|
tools,
|
||||||
|
|||||||
@@ -31,6 +31,7 @@ export class AwsBedrockHandler implements ApiHandler {
|
|||||||
const message = await this.client.messages.create({
|
const message = await this.client.messages.create({
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
max_tokens: this.getModel().info.maxTokens,
|
max_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
system: systemPrompt,
|
system: systemPrompt,
|
||||||
messages,
|
messages,
|
||||||
tools,
|
tools,
|
||||||
|
|||||||
@@ -39,6 +39,7 @@ export class GeminiHandler implements ApiHandler {
|
|||||||
contents: messages.map(convertAnthropicMessageToGemini),
|
contents: messages.map(convertAnthropicMessageToGemini),
|
||||||
generationConfig: {
|
generationConfig: {
|
||||||
maxOutputTokens: this.getModel().info.maxTokens,
|
maxOutputTokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
const message = convertGeminiResponseToAnthropic(result.response)
|
const message = convertGeminiResponseToAnthropic(result.response)
|
||||||
|
|||||||
@@ -36,6 +36,7 @@ export class OllamaHandler implements ApiHandler {
|
|||||||
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
||||||
model: this.options.ollamaModelId ?? "",
|
model: this.options.ollamaModelId ?? "",
|
||||||
messages: openAiMessages,
|
messages: openAiMessages,
|
||||||
|
temperature: 0.2,
|
||||||
tools: openAiTools,
|
tools: openAiTools,
|
||||||
tool_choice: "auto",
|
tool_choice: "auto",
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -48,6 +48,7 @@ export class OpenAiNativeHandler implements ApiHandler {
|
|||||||
createParams = {
|
createParams = {
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
max_completion_tokens: this.getModel().info.maxTokens,
|
max_completion_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
messages: convertToO1Messages(convertToOpenAiMessages(messages), systemPrompt),
|
messages: convertToO1Messages(convertToOpenAiMessages(messages), systemPrompt),
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
@@ -55,6 +56,7 @@ export class OpenAiNativeHandler implements ApiHandler {
|
|||||||
createParams = {
|
createParams = {
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
max_completion_tokens: this.getModel().info.maxTokens,
|
max_completion_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
messages: openAiMessages,
|
messages: openAiMessages,
|
||||||
tools: openAiTools,
|
tools: openAiTools,
|
||||||
tool_choice: "auto",
|
tool_choice: "auto",
|
||||||
|
|||||||
@@ -47,6 +47,7 @@ export class OpenAiHandler implements ApiHandler {
|
|||||||
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
const createParams: OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming = {
|
||||||
model: this.options.openAiModelId ?? "",
|
model: this.options.openAiModelId ?? "",
|
||||||
messages: openAiMessages,
|
messages: openAiMessages,
|
||||||
|
temperature: 0.2,
|
||||||
tools: openAiTools,
|
tools: openAiTools,
|
||||||
tool_choice: "auto",
|
tool_choice: "auto",
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -95,6 +95,7 @@ export class OpenRouterHandler implements ApiHandler {
|
|||||||
createParams = {
|
createParams = {
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
max_tokens: this.getModel().info.maxTokens,
|
max_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
messages: convertToO1Messages(convertToOpenAiMessages(messages), systemPrompt),
|
messages: convertToO1Messages(convertToOpenAiMessages(messages), systemPrompt),
|
||||||
}
|
}
|
||||||
break
|
break
|
||||||
@@ -102,6 +103,7 @@ export class OpenRouterHandler implements ApiHandler {
|
|||||||
createParams = {
|
createParams = {
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
max_tokens: this.getModel().info.maxTokens,
|
max_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
messages: openAiMessages,
|
messages: openAiMessages,
|
||||||
tools: openAiTools,
|
tools: openAiTools,
|
||||||
tool_choice: "auto",
|
tool_choice: "auto",
|
||||||
|
|||||||
@@ -25,6 +25,7 @@ export class VertexHandler implements ApiHandler {
|
|||||||
const message = await this.client.messages.create({
|
const message = await this.client.messages.create({
|
||||||
model: this.getModel().id,
|
model: this.getModel().id,
|
||||||
max_tokens: this.getModel().info.maxTokens,
|
max_tokens: this.getModel().info.maxTokens,
|
||||||
|
temperature: 0.2,
|
||||||
system: systemPrompt,
|
system: systemPrompt,
|
||||||
messages,
|
messages,
|
||||||
tools,
|
tools,
|
||||||
|
|||||||
Reference in New Issue
Block a user