mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 04:11:10 -05:00
Use 'user credits' header to update balance and show user under task header
This commit is contained in:
@@ -1,5 +1,5 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import { ApiHandler, withoutImageData } from "."
|
||||
import { ApiHandler, ApiHandlerMessageResponse, withoutImageData } from "."
|
||||
import { anthropicDefaultModelId, AnthropicModelId, anthropicModels, ApiHandlerOptions, ModelInfo } from "../shared/api"
|
||||
|
||||
export class AnthropicHandler implements ApiHandler {
|
||||
@@ -15,12 +15,12 @@ export class AnthropicHandler implements ApiHandler {
|
||||
systemPrompt: string,
|
||||
messages: Anthropic.Messages.MessageParam[],
|
||||
tools: Anthropic.Messages.Tool[]
|
||||
): Promise<Anthropic.Messages.Message> {
|
||||
): Promise<ApiHandlerMessageResponse> {
|
||||
const modelId = this.getModel().id
|
||||
switch (modelId) {
|
||||
case "claude-3-5-sonnet-20240620":
|
||||
case "claude-3-opus-20240229":
|
||||
case "claude-3-haiku-20240307":
|
||||
case "claude-3-haiku-20240307": {
|
||||
/*
|
||||
The latest message will be the new user message, one before will be the assistant message from a previous request, and the user message before that will be a previously cached user message. So we need to mark the latest user message as ephemeral to cache it for the next request, and mark the second to last user message as ephemeral to let the server know the last message to retrieve from the cache for the current request..
|
||||
*/
|
||||
@@ -30,7 +30,7 @@ export class AnthropicHandler implements ApiHandler {
|
||||
)
|
||||
const lastUserMsgIndex = userMsgIndices[userMsgIndices.length - 1] ?? -1
|
||||
const secondLastMsgUserIndex = userMsgIndices[userMsgIndices.length - 2] ?? -1
|
||||
return await this.client.beta.promptCaching.messages.create(
|
||||
const message = await this.client.beta.promptCaching.messages.create(
|
||||
{
|
||||
model: modelId,
|
||||
max_tokens: this.getModel().info.maxTokens,
|
||||
@@ -80,8 +80,10 @@ export class AnthropicHandler implements ApiHandler {
|
||||
}
|
||||
})()
|
||||
)
|
||||
default:
|
||||
return await this.client.messages.create({
|
||||
return { message }
|
||||
}
|
||||
default: {
|
||||
const message = await this.client.messages.create({
|
||||
model: modelId,
|
||||
max_tokens: this.getModel().info.maxTokens,
|
||||
system: [{ text: systemPrompt, type: "text" }],
|
||||
@@ -89,6 +91,8 @@ export class AnthropicHandler implements ApiHandler {
|
||||
tools,
|
||||
tool_choice: { type: "auto" },
|
||||
})
|
||||
return { message }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import AnthropicBedrock from "@anthropic-ai/bedrock-sdk"
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import { ApiHandler, withoutImageData } from "."
|
||||
import { ApiHandler, ApiHandlerMessageResponse, withoutImageData } from "."
|
||||
import { ApiHandlerOptions, bedrockDefaultModelId, BedrockModelId, bedrockModels, ModelInfo } from "../shared/api"
|
||||
|
||||
// https://docs.anthropic.com/en/api/claude-on-amazon-bedrock
|
||||
@@ -26,8 +26,8 @@ export class AwsBedrockHandler implements ApiHandler {
|
||||
systemPrompt: string,
|
||||
messages: Anthropic.Messages.MessageParam[],
|
||||
tools: Anthropic.Messages.Tool[]
|
||||
): Promise<Anthropic.Messages.Message> {
|
||||
return await this.client.messages.create({
|
||||
): Promise<ApiHandlerMessageResponse> {
|
||||
const message = await this.client.messages.create({
|
||||
model: this.getModel().id,
|
||||
max_tokens: this.getModel().info.maxTokens,
|
||||
system: systemPrompt,
|
||||
@@ -35,6 +35,7 @@ export class AwsBedrockHandler implements ApiHandler {
|
||||
tools,
|
||||
tool_choice: { type: "auto" },
|
||||
})
|
||||
return { message }
|
||||
}
|
||||
|
||||
createUserReadableRequest(
|
||||
|
||||
@@ -5,12 +5,17 @@ import { AwsBedrockHandler } from "./bedrock"
|
||||
import { OpenRouterHandler } from "./openrouter"
|
||||
import { KoduHandler } from "./kodu"
|
||||
|
||||
export interface ApiHandlerMessageResponse {
|
||||
message: Anthropic.Messages.Message
|
||||
userCredits?: number
|
||||
}
|
||||
|
||||
export interface ApiHandler {
|
||||
createMessage(
|
||||
systemPrompt: string,
|
||||
messages: Anthropic.Messages.MessageParam[],
|
||||
tools: Anthropic.Messages.Tool[]
|
||||
): Promise<Anthropic.Messages.Message>
|
||||
): Promise<ApiHandlerMessageResponse>
|
||||
|
||||
createUserReadableRequest(
|
||||
userContent: Array<
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import axios from "axios"
|
||||
import { ApiHandler, withoutImageData } from "."
|
||||
import { ApiHandler, ApiHandlerMessageResponse, withoutImageData } from "."
|
||||
import { ApiHandlerOptions, koduDefaultModelId, KoduModelId, koduModels, ModelInfo } from "../shared/api"
|
||||
import { getKoduCreditsUrl, getKoduInferenceUrl } from "../shared/kodu"
|
||||
|
||||
@@ -24,7 +24,7 @@ export class KoduHandler implements ApiHandler {
|
||||
systemPrompt: string,
|
||||
messages: Anthropic.Messages.MessageParam[],
|
||||
tools: Anthropic.Messages.Tool[]
|
||||
): Promise<Anthropic.Messages.Message> {
|
||||
): Promise<ApiHandlerMessageResponse> {
|
||||
const modelId = this.getModel().id
|
||||
let requestBody: Anthropic.Beta.PromptCaching.Messages.MessageCreateParamsNonStreaming
|
||||
switch (modelId) {
|
||||
@@ -82,7 +82,9 @@ export class KoduHandler implements ApiHandler {
|
||||
"x-api-key": this.options.koduApiKey,
|
||||
},
|
||||
})
|
||||
return response.data
|
||||
const message = response.data
|
||||
const userCredits = response.headers["user-credits"]
|
||||
return { message, userCredits: userCredits !== undefined ? parseFloat(userCredits) : undefined }
|
||||
}
|
||||
|
||||
createUserReadableRequest(
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Anthropic } from "@anthropic-ai/sdk"
|
||||
import OpenAI from "openai"
|
||||
import { ApiHandler, withoutImageData } from "."
|
||||
import { ApiHandler, ApiHandlerMessageResponse, withoutImageData } from "."
|
||||
import {
|
||||
ApiHandlerOptions,
|
||||
ModelInfo,
|
||||
@@ -30,7 +30,7 @@ export class OpenRouterHandler implements ApiHandler {
|
||||
systemPrompt: string,
|
||||
messages: Anthropic.Messages.MessageParam[],
|
||||
tools: Anthropic.Messages.Tool[]
|
||||
): Promise<Anthropic.Messages.Message> {
|
||||
): Promise<ApiHandlerMessageResponse> {
|
||||
// Convert Anthropic messages to OpenAI format
|
||||
const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
|
||||
{ role: "system", content: systemPrompt },
|
||||
@@ -120,7 +120,7 @@ export class OpenRouterHandler implements ApiHandler {
|
||||
)
|
||||
}
|
||||
|
||||
return anthropicMessage
|
||||
return { message: anthropicMessage }
|
||||
}
|
||||
|
||||
/*
|
||||
|
||||
Reference in New Issue
Block a user