From 3c324a5465049da1a829e2dda6ec2c9ded3e16e5 Mon Sep 17 00:00:00 2001 From: Peli de Halleux Date: Mon, 9 Dec 2024 15:12:11 +0000 Subject: [PATCH] =?UTF-8?q?refactor:=20update=20message=20param=20types=20?= =?UTF-8?q?for=20beta=20caching=20=E2=9C=A8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- packages/core/src/anthropic.ts | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/packages/core/src/anthropic.ts b/packages/core/src/anthropic.ts index 97db9b73d1..d72fe3b59e 100644 --- a/packages/core/src/anthropic.ts +++ b/packages/core/src/anthropic.ts @@ -64,7 +64,7 @@ const adjustUsage = ( const convertMessages = ( messages: ChatCompletionMessageParam[] -): Array => { +): Array => { return messages.map(convertSingleMessage) } @@ -93,7 +93,7 @@ const convertSingleMessage = ( const convertToolCallMessage = ( msg: ChatCompletionAssistantMessageParam -): Anthropic.Messages.MessageParam => { +): Anthropic.Beta.PromptCaching.PromptCachingBetaMessageParam => { return { role: "assistant", content: msg.tool_calls.map((tool) => ({ @@ -107,7 +107,7 @@ const convertToolCallMessage = ( const convertToolResultMessage = ( msg: ChatCompletionToolMessageParam -): Anthropic.Messages.MessageParam => { +): Anthropic.Beta.PromptCaching.PromptCachingBetaMessageParam => { return { role: "user", content: [ @@ -125,7 +125,7 @@ const convertStandardMessage = ( | ChatCompletionSystemMessageParam | ChatCompletionAssistantMessageParam | ChatCompletionUserMessageParam -): Anthropic.Messages.MessageParam => { +): Anthropic.Beta.PromptCaching.PromptCachingBetaMessageParam => { const role = msg.role === "assistant" ? "assistant" : "user" if (Array.isArray(msg.content)) { return { @@ -159,7 +159,7 @@ const convertStandardMessage = ( const convertImageUrlBlock = ( block: ChatCompletionContentPartImage -): Anthropic.Messages.ImageBlockParam => { +): Anthropic.Beta.PromptCaching.PromptCachingBetaImageBlockParam => { return { type: "image", source: { @@ -219,14 +219,14 @@ export const AnthropicChatCompletion: ChatCompletionHandler = async ( const toolCalls: ChatCompletionToolCall[] = [] try { - const stream = anthropic.messages.stream({ + const stream = anthropic.beta.promptCaching.messages.stream({ model, + tools: convertTools(req.tools), messages, max_tokens: req.max_tokens || ANTHROPIC_MAX_TOKEN, temperature: req.temperature, top_p: req.top_p, stream: true, - tools: convertTools(req.tools), ...headers, })