mirror of
https://github.com/pacnpal/Roo-Code.git
synced 2025-12-20 04:11:10 -05:00
Add markdown rendering for model description; fixes
This commit is contained in:
@@ -41,7 +41,7 @@ export class AnthropicHandler implements ApiHandler {
|
||||
stream = await this.client.beta.promptCaching.messages.create(
|
||||
{
|
||||
model: modelId,
|
||||
max_tokens: this.getModel().info.maxTokens,
|
||||
max_tokens: this.getModel().info.maxTokens || 8192,
|
||||
temperature: 0,
|
||||
system: [{ text: systemPrompt, type: "text", cache_control: { type: "ephemeral" } }], // setting cache breakpoint for system prompt so new tasks can reuse it
|
||||
messages: messages.map((message, index) => {
|
||||
@@ -96,7 +96,7 @@ export class AnthropicHandler implements ApiHandler {
|
||||
default: {
|
||||
stream = (await this.client.messages.create({
|
||||
model: modelId,
|
||||
max_tokens: this.getModel().info.maxTokens,
|
||||
max_tokens: this.getModel().info.maxTokens || 8192,
|
||||
temperature: 0,
|
||||
system: [{ text: systemPrompt, type: "text" }],
|
||||
messages,
|
||||
|
||||
@@ -27,7 +27,7 @@ export class AwsBedrockHandler implements ApiHandler {
|
||||
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
|
||||
const stream = await this.client.messages.create({
|
||||
model: this.getModel().id,
|
||||
max_tokens: this.getModel().info.maxTokens,
|
||||
max_tokens: this.getModel().info.maxTokens || 8192,
|
||||
temperature: 0,
|
||||
system: systemPrompt,
|
||||
messages,
|
||||
|
||||
@@ -21,7 +21,7 @@ export class VertexHandler implements ApiHandler {
|
||||
async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
|
||||
const stream = await this.client.messages.create({
|
||||
model: this.getModel().id,
|
||||
max_tokens: this.getModel().info.maxTokens,
|
||||
max_tokens: this.getModel().info.maxTokens || 8192,
|
||||
temperature: 0,
|
||||
system: systemPrompt,
|
||||
messages,
|
||||
|
||||
Reference in New Issue
Block a user