Merge pull request #281 from RooVetGit/glama_fixes

Glama fixes
This commit is contained in:
Matt Rubens
2025-01-05 20:42:06 -05:00
committed by GitHub
2 changed files with 5 additions and 4 deletions

View File

@@ -107,14 +107,15 @@ export class GlamaHandler implements ApiHandler {
if (completionRequest.tokenUsage) { if (completionRequest.tokenUsage) {
yield { yield {
type: "usage", type: "usage",
cacheWriteTokens: completionRequest.tokenUsage.cacheCreationInputTokens,
cacheReadTokens: completionRequest.tokenUsage.cacheReadInputTokens,
inputTokens: completionRequest.tokenUsage.promptTokens, inputTokens: completionRequest.tokenUsage.promptTokens,
outputTokens: completionRequest.tokenUsage.completionTokens, outputTokens: completionRequest.tokenUsage.completionTokens,
totalCost: completionRequest.totalCostUsd, totalCost: parseFloat(completionRequest.totalCostUsd),
} }
} }
} catch (error) { } catch (error) {
// ignore if fails console.error("Error fetching Glama completion details", error)
console.error("Error fetching Glama generation details:", error)
} }
} }

View File

@@ -315,7 +315,7 @@ export const bedrockModels = {
// Glama // Glama
// https://glama.ai/models // https://glama.ai/models
export const glamaDefaultModelId = "anthropic/claude-3-5-sonnet" // will always exist in openRouterModels export const glamaDefaultModelId = "anthropic/claude-3-5-sonnet"
export const glamaDefaultModelInfo: ModelInfo = { export const glamaDefaultModelInfo: ModelInfo = {
maxTokens: 8192, maxTokens: 8192,
contextWindow: 200_000, contextWindow: 200_000,