From f48f5a6f14c98f89041d26c75dded71dccfbee8f Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 25 Apr 2025 14:30:47 -0500 Subject: [PATCH 01/51] initial refactor from o1_like to reasoning --- packages/proxy/schema/models.ts | 31 ++++++++++++++++--------------- packages/proxy/src/proxy.ts | 10 +++++----- 2 files changed, 21 insertions(+), 20 deletions(-) diff --git a/packages/proxy/schema/models.ts b/packages/proxy/schema/models.ts index 5a6eb69e..2c0b53fa 100644 --- a/packages/proxy/schema/models.ts +++ b/packages/proxy/schema/models.ts @@ -44,7 +44,7 @@ export const ModelSchema = z.object({ input_cost_per_mil_tokens: z.number().nullish(), output_cost_per_mil_tokens: z.number().nullish(), displayName: z.string().nullish(), - o1_like: z.boolean().nullish(), + reasoning: z.boolean().nullish(), experimental: z.boolean().nullish(), deprecated: z.boolean().nullish(), parent: z.string().nullish(), @@ -159,7 +159,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 1.1, output_cost_per_mil_tokens: 4.4, - o1_like: true, + reasoning: true, }, "o4-mini-2025-04-16": { format: "openai", @@ -167,7 +167,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 1.1, output_cost_per_mil_tokens: 4.4, - o1_like: true, + reasoning: true, parent: "o4-mini", }, "o3-mini": { @@ -176,7 +176,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 1.1, output_cost_per_mil_tokens: 4.4, - o1_like: true, + reasoning: true, }, "o3-mini-2025-01-31": { format: "openai", @@ -184,7 +184,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 1.1, output_cost_per_mil_tokens: 4.4, - o1_like: true, + reasoning: true, parent: "o3-mini", }, o3: { @@ -193,7 +193,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 10.0, output_cost_per_mil_tokens: 40, - o1_like: true, + reasoning: true, }, "o3-2025-04-16": { format: "openai", @@ -201,7 +201,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 10.0, output_cost_per_mil_tokens: 40, - o1_like: true, + reasoning: true, parent: "o3", }, o1: { @@ -210,7 +210,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 15.0, output_cost_per_mil_tokens: 60, - o1_like: true, + reasoning: true, }, "o1-2024-12-17": { format: "openai", @@ -218,7 +218,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 15.0, output_cost_per_mil_tokens: 60, - o1_like: true, + reasoning: true, parent: "o1", }, "o1-mini": { @@ -227,7 +227,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: false, input_cost_per_mil_tokens: 3.0, output_cost_per_mil_tokens: 12.0, - o1_like: true, + reasoning: true, }, "o1-mini-2024-09-12": { format: "openai", @@ -235,7 +235,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: false, input_cost_per_mil_tokens: 3.0, output_cost_per_mil_tokens: 12.0, - o1_like: true, + reasoning: true, parent: "o1-mini", }, "o1-pro": { @@ -244,7 +244,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 150, output_cost_per_mil_tokens: 600, - o1_like: true, + reasoning: true, }, "o1-pro-2025-03-19": { format: "openai", @@ -252,7 +252,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, input_cost_per_mil_tokens: 150, output_cost_per_mil_tokens: 600, - o1_like: true, + reasoning: true, parent: "o1-pro", }, "chatgpt-4o-latest": { @@ -349,8 +349,8 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: false, input_cost_per_mil_tokens: 15.0, output_cost_per_mil_tokens: 60, - o1_like: true, experimental: true, + reasoning: true, parent: "o1", }, "o1-preview-2024-09-12": { @@ -359,8 +359,8 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: false, input_cost_per_mil_tokens: 15.0, output_cost_per_mil_tokens: 60.0, - o1_like: true, experimental: true, + reasoning: true, parent: "o1", }, "gpt-4o-search-preview": { @@ -547,6 +547,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { input_cost_per_mil_tokens: 3, output_cost_per_mil_tokens: 15, displayName: "Claude 3.7 Sonnet", + reasoning: true, }, "claude-3-7-sonnet-20250219": { format: "anthropic", diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 12d80bfa..ae8a4173 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -1657,15 +1657,15 @@ async function fetchOpenAI( }); } - // TODO: Ideally this is encapsulated as some advanced per-model config - // or mapping, but for now, let's just map it manually. - const isO1Like = - bodyData.o1_like || + // TODO(ibolmo): move to is model reasoning registry + const isReasoningLike = + bodyData.reasoning || (typeof bodyData.model === "string" && (bodyData.model.startsWith("o1") || bodyData.model.startsWith("o3") || bodyData.model.startsWith("o4"))); - if (isO1Like) { + + if (isReasoningLike) { if (!isEmpty(bodyData.max_tokens)) { bodyData.max_completion_tokens = bodyData.max_tokens; delete bodyData.max_tokens; From dce6a698e41a9788255efb0c585985922c3d0d2d Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 25 Apr 2025 17:20:31 -0500 Subject: [PATCH 02/51] rough attempt at translating reasoning_effort (openai) to thinking (claude) --- packages/proxy/schema/index.ts | 77 +++++++++++++++++++++++++++++---- packages/proxy/schema/models.ts | 1 + packages/proxy/src/proxy.ts | 12 ++--- 3 files changed, 76 insertions(+), 14 deletions(-) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 4108039c..51a37405 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -53,6 +53,50 @@ export const modelParamToModelParam: { stop: null, }; +const effortToBudgetMultiplier = { + low: 0.2, + medium: 0.5, + high: 0.8, +} as const; + +const getBudgetMultiplier = (effort: keyof typeof effortToBudgetMultiplier) => { + return effortToBudgetMultiplier[effort] || effortToBudgetMultiplier.low; +}; + +export const modelParamMappers: { + [name in ModelFormat]?: { + [param: string]: (params: any) => Record; + }; +} = { + anthropic: { + reasoning_effort: ({ + reasoning_effort, + max_tokens, + max_completion_tokens, + ...params + }) => { + // TODO(ibolmo): help the user do the right thing, or should we raise an exception? + + // Max tokens are inclusive of budget. If the max tokens are too low (below 1024), then the API will raise an exception. + const maxTokens = Math.max( + max_completion_tokens || max_tokens || 0, + 1024 / effortToBudgetMultiplier.low, + ); + + const budget = getBudgetMultiplier(reasoning_effort || "low") * maxTokens; + + return { + ...params, + max_tokens: maxTokens, + thinking: { + budget_tokens: budget, + enabled: true, + }, + }; + }, + }, +}; + export const sliderSpecs: { // min, max, step, required [name: string]: [number, number, number, boolean]; @@ -121,6 +165,17 @@ export const modelProviderHasTools: { converse: true, }; +export const modelProviderHasReasoning: { + [name in ModelFormat]?: RegExp; +} = { + openai: /^o[1-4]/i, + anthropic: /^claude-3\.7/i, + google: /gemini-2.0-flash$|gemini-2.5/i, + js: undefined, + window: undefined, + converse: undefined, +}; + export const DefaultEndpointTypes: { [name in ModelFormat]: ModelEndpointType[]; } = { @@ -427,23 +482,29 @@ export function translateParams( toProvider: ModelFormat, params: Record, ): Record { - const translatedParams: Record = {}; + let translatedParams: Record = {}; for (const [k, v] of Object.entries(params || {})) { const safeValue = v ?? undefined; // Don't propagate "null" along + const mapper = modelParamMappers[toProvider]?.[k]; + if (mapper) { + translatedParams = mapper(translatedParams); + continue; + } + const translatedKey = modelParamToModelParam[k as keyof ModelParams] as | keyof ModelParams | undefined | null; + if (translatedKey === null) { continue; - } else if ( - translatedKey !== undefined && - defaultModelParamSettings[toProvider][translatedKey] !== undefined - ) { - translatedParams[translatedKey] = safeValue; - } else { - translatedParams[k] = safeValue; } + + const hasDefaultParam = + translatedKey !== undefined && + defaultModelParamSettings[toProvider][translatedKey] !== undefined; + + translatedParams[hasDefaultParam ? translatedKey : k] = safeValue; } return translatedParams; diff --git a/packages/proxy/schema/models.ts b/packages/proxy/schema/models.ts index 2c0b53fa..8503ff40 100644 --- a/packages/proxy/schema/models.ts +++ b/packages/proxy/schema/models.ts @@ -44,6 +44,7 @@ export const ModelSchema = z.object({ input_cost_per_mil_tokens: z.number().nullish(), output_cost_per_mil_tokens: z.number().nullish(), displayName: z.string().nullish(), + o1_like: z.boolean().nullish().describe('DEPRECATED use "reasoning" instead'), reasoning: z.boolean().nullish(), experimental: z.boolean().nullish(), deprecated: z.boolean().nullish(), diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index ae8a4173..c028b714 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -15,6 +15,7 @@ import { ModelSpec, AzureEntraSecretSchema, DatabricksOAuthSecretSchema, + modelProviderHasReasoning, } from "@schema"; import { ModelResponse, @@ -1657,15 +1658,14 @@ async function fetchOpenAI( }); } - // TODO(ibolmo): move to is model reasoning registry - const isReasoningLike = + // TODO: perhaps convert reasoning.effort -> reasoning_effort? + + const hasReasoning = bodyData.reasoning || (typeof bodyData.model === "string" && - (bodyData.model.startsWith("o1") || - bodyData.model.startsWith("o3") || - bodyData.model.startsWith("o4"))); + modelProviderHasReasoning.openai?.test(bodyData.model)); - if (isReasoningLike) { + if (hasReasoning) { if (!isEmpty(bodyData.max_tokens)) { bodyData.max_completion_tokens = bodyData.max_tokens; delete bodyData.max_tokens; From fbc05e8f39e35da641624898321b833f5cc5509a Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Mon, 28 Apr 2025 11:07:39 -0500 Subject: [PATCH 03/51] support anthropic thinking responses --- packages/proxy/schema/index.ts | 5 ++++- packages/proxy/src/providers/anthropic.ts | 15 +++++++++++++++ packages/proxy/src/proxy.ts | 2 ++ 3 files changed, 21 insertions(+), 1 deletion(-) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 51a37405..76f4477f 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -73,6 +73,7 @@ export const modelParamMappers: { reasoning_effort, max_tokens, max_completion_tokens, + temperature: _, ...params }) => { // TODO(ibolmo): help the user do the right thing, or should we raise an exception? @@ -88,9 +89,11 @@ export const modelParamMappers: { return { ...params, max_tokens: maxTokens, + // must be set when using thinking + temperature: 1, thinking: { budget_tokens: budget, - enabled: true, + type: "enabled", }, }; }, diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index 06f995ad..1d369d25 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -111,6 +111,11 @@ export const anthropicStreamEventSchema = z.discriminatedUnion("type", [ type: z.literal("text"), text: z.string(), }), + z.object({ + type: z.literal("thinking"), + thinking: z.string(), + signature: z.string(), + }), z.object({ type: z.literal("tool_use"), id: z.string(), @@ -166,6 +171,7 @@ export interface AnthropicCompletion { role: "assistant"; content: [ | { type: "text"; text: string } + | { type: "thinking"; thinking: string; signature: string } | { type: "tool_use"; id: string; @@ -264,6 +270,12 @@ export function anthropicEventToOpenAIEvent( event.delta.type === "text_delta" ) { content = idx === 0 ? event.delta.text.trimStart() : event.delta.text; + } else if ( + event.type === "content_block_delta" && + event.delta.type === "thinking_delta" + ) { + content = + idx === 0 ? event.delta.thinking.trimStart() : event.delta.thinking; } else if ( event.type === "content_block_delta" && event.delta.type === "input_json_delta" @@ -352,7 +364,9 @@ export function anthropicCompletionToOpenAICompletion( isStructuredOutput: boolean, ): ChatCompletion { const firstText = completion.content.find((c) => c.type === "text"); + const firstThinking = completion.content.find((c) => c.type === "thinking"); const firstTool = completion.content.find((c) => c.type === "tool_use"); + return { id: completion.id, choices: [ @@ -390,6 +404,7 @@ export function anthropicCompletionToOpenAICompletion( } : undefined, refusal: null, + ...(firstThinking && { reasoning: firstThinking.thinking }), }, }, ], diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index c028b714..50416402 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -199,6 +199,8 @@ export async function proxyV1({ const cacheMisses = meter.createCounter("results_cache_misses"); const cacheSkips = meter.createCounter("results_cache_skips"); + console.log("WORLD HELLO!"); + totalCalls.add(1); proxyHeaders = Object.fromEntries( From 19cae6410f47f68698919f51a07d9793757bb5fa Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 29 Apr 2025 16:08:51 -0700 Subject: [PATCH 04/51] handle reasoning oai-like messages --- packages/proxy/package.json | 2 +- packages/proxy/src/proxy.ts | 16 ++++++++++++---- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/packages/proxy/package.json b/packages/proxy/package.json index a2360efd..a03fb1e6 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -82,7 +82,7 @@ "@anthropic-ai/sdk": "^0.39.0", "@apidevtools/json-schema-ref-parser": "^11.9.1", "@aws-sdk/client-bedrock-runtime": "^3.738.0", - "@braintrust/core": "^0.0.85", + "@braintrust/core": "workspace:*", "@breezystack/lamejs": "^1.2.7", "@google/generative-ai": "^0.24.0", "@opentelemetry/api": "^1.7.0", diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 50416402..15816877 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -199,8 +199,6 @@ export async function proxyV1({ const cacheMisses = meter.createCounter("results_cache_misses"); const cacheSkips = meter.createCounter("results_cache_skips"); - console.log("WORLD HELLO!"); - totalCalls.add(1); proxyHeaders = Object.fromEntries( @@ -2063,9 +2061,19 @@ async function fetchAnthropicChatCompletions({ } else if (m.role === "tool") { role = "user"; content = openAIToolMessageToAnthropicToolCall(m); - } else if (m.role === "assistant" && m.tool_calls) { + } else if (m.role === "assistant") { content = upgradeAnthropicContentMessage(content); - content.push(...openAIToolCallsToAnthropicToolUse(m.tool_calls)); + if (m.tool_calls) { + content.push(...openAIToolCallsToAnthropicToolUse(m.tool_calls)); + } + if (m.reasoning) { + content.unshift({ + type: "thinking", + thinking: m.reasoning, + // TODO: we are required to include the reasoning signature, but looks like this works for now. + signature: "", + }); + } } const translatedRole = MessageTypeToMessageType[role]; From c5b50e6cee1f78040749ef9d279c0dbe4e02b3e4 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 29 Apr 2025 16:25:29 -0700 Subject: [PATCH 05/51] assume a more generalized reasoning object --- packages/proxy/src/providers/anthropic.ts | 9 ++++++++- packages/proxy/src/proxy.ts | 13 +++++++------ 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index 1d369d25..fb760eab 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -404,7 +404,14 @@ export function anthropicCompletionToOpenAICompletion( } : undefined, refusal: null, - ...(firstThinking && { reasoning: firstThinking.thinking }), + ...(firstThinking && { + reasoning: [ + { + id: firstThinking.signature, + content: firstThinking.thinking, + }, + ], + }), }, }, ], diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 15816877..a5ed4c14 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -2067,12 +2067,13 @@ async function fetchAnthropicChatCompletions({ content.push(...openAIToolCallsToAnthropicToolUse(m.tool_calls)); } if (m.reasoning) { - content.unshift({ - type: "thinking", - thinking: m.reasoning, - // TODO: we are required to include the reasoning signature, but looks like this works for now. - signature: "", - }); + content.unshift( + ...m.reasoning.map((r) => ({ + type: "thinking", + thinking: r.content, + signature: r.id, + })), + ); } } From 342a4eaec85263521b9b787e373e7f1236bffc6b Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 30 Apr 2025 10:30:08 -0700 Subject: [PATCH 06/51] allow reasoning_effort to be defaulted --- packages/proxy/schema/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 76f4477f..237b5d2a 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -49,7 +49,7 @@ export const modelParamToModelParam: { stream_options: null, parallel_tool_calls: null, response_format: null, - reasoning_effort: null, + reasoning_effort: "reasoning_effort", stop: null, }; @@ -76,8 +76,6 @@ export const modelParamMappers: { temperature: _, ...params }) => { - // TODO(ibolmo): help the user do the right thing, or should we raise an exception? - // Max tokens are inclusive of budget. If the max tokens are too low (below 1024), then the API will raise an exception. const maxTokens = Math.max( max_completion_tokens || max_tokens || 0, @@ -129,6 +127,7 @@ export const defaultModelParamSettings: { response_format: null, stop: undefined, use_cache: true, + reasoning_effort: "medium", }, anthropic: { temperature: undefined, @@ -136,6 +135,7 @@ export const defaultModelParamSettings: { top_p: 0.7, top_k: 5, use_cache: true, + reasoning_effort: "medium", }, google: { temperature: undefined, From fdb87e9b980e095f521a8713b833669b39cf7d84 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 30 Apr 2025 16:42:39 -0700 Subject: [PATCH 07/51] add support for gemini --- packages/proxy/schema/index.ts | 24 ++++++++++++++++++++++++ packages/proxy/schema/models.ts | 3 +++ 2 files changed, 27 insertions(+) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 237b5d2a..5ad28d90 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -96,6 +96,30 @@ export const modelParamMappers: { }; }, }, + google: { + reasoning_effort: ({ + reasoning_effort, + max_tokens, + max_completion_tokens, + ...params + }) => { + const maxTokens = Math.max( + max_completion_tokens || max_tokens || 0, + 1024 / effortToBudgetMultiplier.low, + ); + + const budget = getBudgetMultiplier(reasoning_effort || "low") * maxTokens; + + return { + ...params, + thinkingConfig: { + thinkingBudget: budget, + includeThoughts: true, + }, + maxOutputTokens: maxTokens, + }; + }, + }, }; export const sliderSpecs: { diff --git a/packages/proxy/schema/models.ts b/packages/proxy/schema/models.ts index 8503ff40..af826300 100644 --- a/packages/proxy/schema/models.ts +++ b/packages/proxy/schema/models.ts @@ -1919,6 +1919,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { output_cost_per_mil_tokens: 0.6, multimodal: true, experimental: false, + reasoning: true, displayName: "Gemini 2.5 Flash Preview", }, "gemini-2.5-pro-preview-03-25": { @@ -1928,6 +1929,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { output_cost_per_mil_tokens: 10, multimodal: true, experimental: false, + reasoning: true, displayName: "Gemini 2.5 Pro Preview", }, "gemini-2.5-pro-exp-03-25": { @@ -1937,6 +1939,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { output_cost_per_mil_tokens: 0, multimodal: true, experimental: true, + reasoning: true, displayName: "Gemini 2.5 Pro Experimental", }, "gemini-2.0-flash-exp": { From f85073b1ee6ef0d60ed23d76f1f670f7fe355496 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 2 May 2025 17:46:45 -0700 Subject: [PATCH 08/51] should include the reasoning delta --- packages/proxy/src/providers/anthropic.ts | 41 +++++++++++++++++++++-- 1 file changed, 39 insertions(+), 2 deletions(-) diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index fb760eab..fd62aad5 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -23,6 +23,7 @@ import { Base64ImageSource, } from "@anthropic-ai/sdk/resources/messages"; import { ChatCompletionCreateParamsBase } from "openai/resources/chat/completions"; +import { type Reasoning } from "@braintrust/core/typespecs"; /* Example events: @@ -223,6 +224,8 @@ export function anthropicEventToOpenAIEvent( let tool_calls: ChatCompletionChunk.Choice.Delta.ToolCall[] | undefined = undefined; + let reasoning: Reasoning | undefined = undefined; + if (event.type === "message_start") { if (event.message.usage) { updateUsage(event.message.usage, usage); @@ -270,12 +273,28 @@ export function anthropicEventToOpenAIEvent( event.delta.type === "text_delta" ) { content = idx === 0 ? event.delta.text.trimStart() : event.delta.text; + } else if ( + event.type === "content_block_start" && + event.content_block.type === "thinking" + ) { + reasoning = { + id: event.content_block.signature, + content: event.content_block.thinking, + }; } else if ( event.type === "content_block_delta" && event.delta.type === "thinking_delta" ) { - content = - idx === 0 ? event.delta.thinking.trimStart() : event.delta.thinking; + reasoning = { + content: event.delta.thinking, + }; + } else if ( + event.type === "content_block_delta" && + event.delta.type === "signature_delta" + ) { + reasoning = { + id: event.delta.signature, + }; } else if ( event.type === "content_block_delta" && event.delta.type === "input_json_delta" @@ -326,6 +345,11 @@ export function anthropicEventToOpenAIEvent( }, finished: true, }; + } else if (event.type === "ping" || event.type === "content_block_stop") { + return { + event: null, + finished: false, + }; } else { console.warn( `Skipping unhandled Anthropic stream event: ${JSON.stringify(eventU)}`, @@ -345,6 +369,7 @@ export function anthropicEventToOpenAIEvent( content, tool_calls: isStructuredOutput ? undefined : tool_calls, role: "assistant", + reasoning, }, finish_reason: null, // Anthropic places this in a separate stream event. index: 0, @@ -358,12 +383,24 @@ export function anthropicEventToOpenAIEvent( }; } +// TODO: should this live here? +declare module "openai/resources/chat/completions" { + namespace ChatCompletionChunk { + namespace Choice { + interface Delta { + reasoning?: Reasoning; + } + } + } +} + export function anthropicCompletionToOpenAICompletion( completion: AnthropicCompletion, isFunction: boolean, isStructuredOutput: boolean, ): ChatCompletion { const firstText = completion.content.find((c) => c.type === "text"); + // TODO(ibolmo): we now support multiple thinking blocks const firstThinking = completion.content.find((c) => c.type === "thinking"); const firstTool = completion.content.find((c) => c.type === "tool_use"); From 3c7bc81c697c9893727dd3398b044d8c77822960 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 2 May 2025 17:47:01 -0700 Subject: [PATCH 09/51] include the reasoning type to chunk delta --- packages/proxy/src/proxy.ts | 1 + packages/proxy/utils/openai.ts | 3 +++ 2 files changed, 4 insertions(+) diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index a5ed4c14..1af1e6d7 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -1324,6 +1324,7 @@ function chatCompletionMessageFromResponseOutput( }; } +// TODO: should return the reasoning function chatCompletionFromResponse(response: OpenAIResponse): ChatCompletion { return { choices: [ diff --git a/packages/proxy/utils/openai.ts b/packages/proxy/utils/openai.ts index 667b9289..f5d7f027 100644 --- a/packages/proxy/utils/openai.ts +++ b/packages/proxy/utils/openai.ts @@ -1,6 +1,7 @@ // This is copied from the Vercel AI SDK commit bfa1182c7f5379d7a3d81878ea00ec84682cb046 // We just need the OpenAI parser, but not the streaming code. +import { Reasoning } from "@braintrust/core/typespecs"; import { CompletionUsage, FunctionCall, trimStartOfStreamHelper } from "ai"; // https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L28-L40 @@ -47,6 +48,8 @@ interface ChoiceDelta { role?: "system" | "user" | "assistant" | "tool"; tool_calls?: Array; + + reasoning?: Reasoning; } // From https://github.com/openai/openai-node/blob/master/src/resources/chat/completions.ts From bebacfd86ec985f70afa5d9e59ccd6e29478bc8e Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 2 May 2025 21:57:48 -0700 Subject: [PATCH 10/51] include reasoning when we're done with the stream --- packages/proxy/src/proxy.ts | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 1af1e6d7..a11f2b18 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -51,6 +51,7 @@ import { import { Message, MessageRole, + Reasoning, responseFormatSchema, } from "@braintrust/core/typespecs"; import { _urljoin, isArray } from "@braintrust/core"; @@ -616,6 +617,7 @@ export async function proxyV1({ const allChunks: Uint8Array[] = []; // These parameters are for the streaming case + let reasoning: Reasoning[] | undefined = undefined; let role: string | undefined = undefined; let content: string | undefined = undefined; let tool_calls: ChatCompletionChunk.Choice.Delta.ToolCall[] | undefined = @@ -668,6 +670,22 @@ export async function proxyV1({ content = (content || "") + delta.content; } + if (delta.reasoning) { + if (!reasoning) { + reasoning = [ + { + id: delta.reasoning.id || "", + content: delta.reasoning.content || "", + }, + ]; + } else { + // TODO: could be multiple + reasoning[0].id = reasoning[0].id || delta.reasoning.id; + reasoning[0].content = + reasoning[0].content + (delta.reasoning.content || ""); + } + } + if (delta.tool_calls) { if (!tool_calls) { tool_calls = [ @@ -679,6 +697,7 @@ export async function proxyV1({ }, ]; } else if (tool_calls[0].function) { + // TODO: what about parallel calls? tool_calls[0].function.arguments = (tool_calls[0].function.arguments ?? "") + (delta.tool_calls[0].function?.arguments ?? ""); @@ -724,6 +743,7 @@ export async function proxyV1({ role, content, tool_calls, + reasoning, }, logprobs: null, finish_reason, From d43314b8f954743d00afadff71a2f802d9e13a37 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 2 May 2025 21:58:09 -0700 Subject: [PATCH 11/51] temp: use local dir for proxy to appease build errors --- pnpm-lock.yaml | 202 ++++++++++++++++++++++++++++++++++++++----------- 1 file changed, 158 insertions(+), 44 deletions(-) diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 86aef631..c97a3545 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -19,10 +19,10 @@ importers: version: 2.3.3 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.5.4) + version: 4.3.2 vitest: specifier: ^2.1.9 - version: 2.1.9(@types/node@20.10.5) + version: 2.1.9 apis/cloudflare: dependencies: @@ -77,7 +77,7 @@ importers: version: 2.32.0 ai: specifier: 2.2.22 - version: 2.2.22(react@18.3.1)(solid-js@1.9.4)(svelte@4.2.19)(vue@3.5.13) + version: 2.2.22(react@18.3.1)(solid-js@1.9.5)(svelte@4.2.19)(vue@3.5.13) aws-lambda: specifier: ^1.0.7 version: 1.0.7 @@ -206,9 +206,6 @@ importers: '@aws-sdk/client-bedrock-runtime': specifier: ^3.738.0 version: 3.738.0 - '@braintrust/core': - specifier: ^0.0.85 - version: 0.0.85 '@breezystack/lamejs': specifier: ^1.2.7 version: 1.2.7 @@ -230,6 +227,9 @@ importers: ai: specifier: 2.2.37 version: 2.2.37(react@18.3.1)(solid-js@1.9.5)(svelte@4.2.19)(vue@3.5.13) + braintrust: + specifier: link:../../../sdk/js + version: link:../../../sdk/js cache-control-parser: specifier: ^2.0.6 version: 2.0.6 @@ -3783,7 +3783,7 @@ packages: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 magic-string: 0.30.17 - vite: 5.4.10(@types/node@20.10.5) + vite: 5.4.10 dev: true /@vitest/pretty-format@2.1.9: @@ -3980,7 +3980,7 @@ packages: humanize-ms: 1.2.1 dev: false - /ai@2.2.22(react@18.3.1)(solid-js@1.9.4)(svelte@4.2.19)(vue@3.5.13): + /ai@2.2.22(react@18.3.1)(solid-js@1.9.5)(svelte@4.2.19)(vue@3.5.13): resolution: {integrity: sha512-H1TXjX3uGYU4bb8/GUTaY7BJ6YiMJDpp8WpmqwdVLmAh0+HufB7r27vCX0R4XXzJhdRaYp0ex6s9QvqkfvVA2A==} engines: {node: '>=14.6'} peerDependencies: @@ -4001,8 +4001,8 @@ packages: eventsource-parser: 1.0.0 nanoid: 3.3.6 react: 18.3.1 - solid-js: 1.9.4 - solid-swr-store: 0.10.7(solid-js@1.9.4)(swr-store@0.10.6) + solid-js: 1.9.5 + solid-swr-store: 0.10.7(solid-js@1.9.5)(swr-store@0.10.6) sswr: 2.0.0(svelte@4.2.19) svelte: 4.2.19 swr: 2.2.0(react@18.3.1) @@ -4797,17 +4797,6 @@ packages: ms: 2.0.0 dev: false - /debug@3.2.7: - resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true - dependencies: - ms: 2.1.3 - dev: true - /debug@3.2.7(supports-color@5.5.0): resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} peerDependencies: @@ -5343,7 +5332,7 @@ packages: /eslint-import-resolver-node@0.3.9: resolution: {integrity: sha512-WFj2isz22JahUv+B788TlO3N6zL3nNJGU8CcZbPZvVEkBPaJdCV4vy5wyghty5ROFbCRnm132v8BScu5/1BQ8g==} dependencies: - debug: 3.2.7 + debug: 3.2.7(supports-color@5.5.0) is-core-module: 2.13.1 resolve: 1.22.8 transitivePeerDependencies: @@ -5395,7 +5384,7 @@ packages: optional: true dependencies: '@typescript-eslint/parser': 6.14.0(eslint@8.56.0)(typescript@4.7.4) - debug: 3.2.7 + debug: 3.2.7(supports-color@5.5.0) eslint: 8.56.0 eslint-import-resolver-node: 0.3.9 eslint-import-resolver-typescript: 3.6.1(@typescript-eslint/parser@6.14.0)(eslint-import-resolver-node@0.3.9)(eslint-plugin-import@2.29.1)(eslint@8.56.0) @@ -5418,7 +5407,7 @@ packages: array.prototype.findlastindex: 1.2.3 array.prototype.flat: 1.3.2 array.prototype.flatmap: 1.3.2 - debug: 3.2.7 + debug: 3.2.7(supports-color@5.5.0) doctrine: 2.1.0 eslint: 8.56.0 eslint-import-resolver-node: 0.3.9 @@ -7980,14 +7969,6 @@ packages: engines: {node: '>=8.0.0'} dev: false - /solid-js@1.9.4: - resolution: {integrity: sha512-ipQl8FJ31bFUoBNScDQTG3BjN6+9Rg+Q+f10bUbnO6EOTTf5NGerJeHc7wyu5I4RMHEl/WwZwUmy/PTRgxxZ8g==} - dependencies: - csstype: 3.1.3 - seroval: 1.2.1 - seroval-plugins: 1.2.1(seroval@1.2.1) - dev: false - /solid-js@1.9.5: resolution: {integrity: sha512-ogI3DaFcyn6UhYhrgcyRAMbu/buBJitYQASZz5WzfQVPP10RD2AbCoRZ517psnezrasyCbWzIxZ6kVqet768xw==} dependencies: @@ -7996,17 +7977,6 @@ packages: seroval-plugins: 1.2.1(seroval@1.2.1) dev: false - /solid-swr-store@0.10.7(solid-js@1.9.4)(swr-store@0.10.6): - resolution: {integrity: sha512-A6d68aJmRP471aWqKKPE2tpgOiR5fH4qXQNfKIec+Vap+MGQm3tvXlT8n0I8UgJSlNAsSAUuw2VTviH2h3Vv5g==} - engines: {node: '>=10'} - peerDependencies: - solid-js: ^1.2 - swr-store: ^0.10 - dependencies: - solid-js: 1.9.4 - swr-store: 0.10.6 - dev: false - /solid-swr-store@0.10.7(solid-js@1.9.5)(swr-store@0.10.6): resolution: {integrity: sha512-A6d68aJmRP471aWqKKPE2tpgOiR5fH4qXQNfKIec+Vap+MGQm3tvXlT8n0I8UgJSlNAsSAUuw2VTviH2h3Vv5g==} engines: {node: '>=10'} @@ -8343,7 +8313,7 @@ packages: peerDependencies: vue: '>=3.2.26 < 4' dependencies: - vue: 3.5.13(typescript@5.5.4) + vue: 3.5.13(typescript@5.3.3) dev: false /tailwindcss@3.2.7(postcss@8.4.38): @@ -8502,6 +8472,17 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true + /tsconfck@3.1.4: + resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} + engines: {node: ^18 || >=20} + hasBin: true + peerDependencies: + typescript: ^5.0.0 + peerDependenciesMeta: + typescript: + optional: true + dev: true + /tsconfck@3.1.4(typescript@5.5.4): resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} engines: {node: ^18 || >=20} @@ -8945,6 +8926,28 @@ packages: engines: {node: '>= 0.8'} dev: false + /vite-node@2.1.9: + resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + dependencies: + cac: 6.7.14 + debug: 4.4.0 + es-module-lexer: 1.6.0 + pathe: 1.1.2 + vite: 5.4.10 + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + dev: true + /vite-node@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -8967,6 +8970,22 @@ packages: - terser dev: true + /vite-tsconfig-paths@4.3.2: + resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} + peerDependencies: + vite: '*' + peerDependenciesMeta: + vite: + optional: true + dependencies: + debug: 4.3.7 + globrex: 0.1.2 + tsconfck: 3.1.4 + transitivePeerDependencies: + - supports-color + - typescript + dev: true + /vite-tsconfig-paths@4.3.2(typescript@5.5.4): resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: @@ -8983,6 +9002,44 @@ packages: - typescript dev: true + /vite@5.4.10: + resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || >=20.0.0 + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 + peerDependenciesMeta: + '@types/node': + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + dependencies: + esbuild: 0.21.5 + postcss: 8.5.1 + rollup: 4.24.0 + optionalDependencies: + fsevents: 2.3.3 + dev: true + /vite@5.4.10(@types/node@20.10.5): resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} engines: {node: ^18.0.0 || >=20.0.0} @@ -9022,6 +9079,63 @@ packages: fsevents: 2.3.3 dev: true + /vitest@2.1.9: + resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': 2.1.9 + '@vitest/ui': 2.1.9 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + dependencies: + '@vitest/expect': 2.1.9 + '@vitest/mocker': 2.1.9(vite@5.4.10) + '@vitest/pretty-format': 2.1.9 + '@vitest/runner': 2.1.9 + '@vitest/snapshot': 2.1.9 + '@vitest/spy': 2.1.9 + '@vitest/utils': 2.1.9 + chai: 5.1.2 + debug: 4.4.0 + expect-type: 1.2.0 + magic-string: 0.30.17 + pathe: 1.1.2 + std-env: 3.8.1 + tinybench: 2.9.0 + tinyexec: 0.3.1 + tinypool: 1.0.1 + tinyrainbow: 1.2.0 + vite: 5.4.10 + vite-node: 2.1.9 + why-is-node-running: 2.3.0 + transitivePeerDependencies: + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + dev: true + /vitest@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} engines: {node: ^18.0.0 || >=20.0.0} From ea0bacf205c53bfd2dd91bd1ba370626c7c057c4 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 6 May 2025 19:23:11 -0500 Subject: [PATCH 12/51] small cleanup --- packages/proxy/schema/models.ts | 2 ++ packages/proxy/src/proxy.ts | 7 ++----- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/packages/proxy/schema/models.ts b/packages/proxy/schema/models.ts index af826300..93d23efc 100644 --- a/packages/proxy/schema/models.ts +++ b/packages/proxy/schema/models.ts @@ -2528,6 +2528,8 @@ export const AvailableModels: { [name: string]: ModelSpec } = { displayName: "Command Light", }, + // TODO: add anthropic 3.7 converse + // VERTEX MODELS "publishers/google/models/gemini-2.0-flash": { format: "google", diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index a11f2b18..fe345701 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -643,6 +643,7 @@ export async function proxyV1({ if (result) { if (result.usage) { spanLogger.log({ + // TODO: we should include the proxy meters metrics here metrics: { tokens: result.usage.total_tokens, prompt_tokens: result.usage.prompt_tokens, @@ -899,8 +900,6 @@ async function fetchModelLoop( ttl_seconds?: number, ) => Promise, ): Promise<{ modelResponse: ModelResponse; secretName?: string | null }> { - const requestId = ++loopIndex; - const endpointCalls = meter.createCounter("endpoint_calls"); const endpointFailures = meter.createCounter("endpoint_failures"); const endpointRetryableErrors = meter.createCounter( @@ -1344,7 +1343,7 @@ function chatCompletionMessageFromResponseOutput( }; } -// TODO: should return the reasoning +// TODO(ibolmo): should return the reasoning function chatCompletionFromResponse(response: OpenAIResponse): ChatCompletion { return { choices: [ @@ -1679,8 +1678,6 @@ async function fetchOpenAI( }); } - // TODO: perhaps convert reasoning.effort -> reasoning_effort? - const hasReasoning = bodyData.reasoning || (typeof bodyData.model === "string" && From 383514ba07fc04e786d8aee4d4db25f980a81c39 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 8 May 2025 11:48:23 -0500 Subject: [PATCH 13/51] start local tests for proxyV1 and add coverage for anthorpic --- .envrc | 2 + packages/proxy/package.json | 2 +- .../proxy/src/providers/anthropic.test.ts | 80 +++++++++ packages/proxy/utils/tests.ts | 170 ++++++++++++++++++ pnpm-lock.yaml | 156 +--------------- 5 files changed, 259 insertions(+), 151 deletions(-) create mode 100644 .envrc create mode 100644 packages/proxy/src/providers/anthropic.test.ts create mode 100644 packages/proxy/utils/tests.ts diff --git a/.envrc b/.envrc new file mode 100644 index 00000000..43edf50a --- /dev/null +++ b/.envrc @@ -0,0 +1,2 @@ +source_up_if_exists +dotenv_if_exists diff --git a/packages/proxy/package.json b/packages/proxy/package.json index a03fb1e6..4589baff 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -82,7 +82,7 @@ "@anthropic-ai/sdk": "^0.39.0", "@apidevtools/json-schema-ref-parser": "^11.9.1", "@aws-sdk/client-bedrock-runtime": "^3.738.0", - "@braintrust/core": "workspace:*", + "@braintrust/core": "link:../../../sdk/core/js", "@breezystack/lamejs": "^1.2.7", "@google/generative-ai": "^0.24.0", "@opentelemetry/api": "^1.7.0", diff --git a/packages/proxy/src/providers/anthropic.test.ts b/packages/proxy/src/providers/anthropic.test.ts new file mode 100644 index 00000000..c65d3744 --- /dev/null +++ b/packages/proxy/src/providers/anthropic.test.ts @@ -0,0 +1,80 @@ +import { describe, it, expect } from "vitest"; +import { callProxyV1 } from "../../utils/tests"; +import { ChatCompletion, ChatCompletionChunk } from "openai/resources"; + +describe("Anthropic Provider", () => { + it("should convert OpenAI streaming request to Anthropic and back", async () => { + const { events } = await callProxyV1({ + body: { + model: "claude-2", + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Tell me a short joke about programming." }, + ], + stream: true, + max_tokens: 150, + }, + }); + + const streamedEvents = events(); + + expect(streamedEvents.length).toBeGreaterThan(0); + + streamedEvents.forEach((event) => { + expect(event.type).toBe("event"); + + const data = event.data; + expect(data.id).toBeTruthy(); + expect(data.object).toBe("chat.completion.chunk"); + expect(data.created).toBeTruthy(); + expect(Array.isArray(data.choices)).toBe(true); + + if (data.choices[0]?.delta?.content) { + expect(data.choices[0].delta.content.trim()).not.toBe(""); + } + }); + + const hasContent = streamedEvents.some( + (event) => event.data.choices[0]?.delta?.content !== undefined, + ); + expect(hasContent).toBe(true); + }); + + it.only("should convert OpenAI non-streaming request to Anthropic and back", async () => { + const { json } = await callProxyV1({ + body: { + model: "claude-2", + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Tell me a short joke about programming." }, + ], + stream: false, + max_tokens: 150, + }, + }); + + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + logprobs: null, + message: { + content: expect.any(String), + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model: "claude-2.1", + object: "chat.completion", + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + }, + }); + }); +}); diff --git a/packages/proxy/utils/tests.ts b/packages/proxy/utils/tests.ts new file mode 100644 index 00000000..050b2c6e --- /dev/null +++ b/packages/proxy/utils/tests.ts @@ -0,0 +1,170 @@ +/* eslint-disable turbo/no-undeclared-env-vars */ + +import { TextDecoder } from "util"; +import { Buffer } from "node:buffer"; +import { proxyV1 } from "../src/proxy"; +import { APISecret, AvailableModels, getModelEndpointTypes } from "@schema"; +import { createParser, ParsedEvent, ParseEvent } from "eventsource-parser"; +import { mergeDicts } from "@braintrust/core"; +import { assert } from "vitest"; + +export function createResponseStream(): [ + WritableStream, + Promise, +] { + const chunks: Uint8Array[] = []; + let resolveChunks: (chunks: Uint8Array[]) => void; + let rejectChunks: (error: Error) => void; + + const chunksPromise = new Promise((resolve, reject) => { + resolveChunks = resolve; + rejectChunks = reject; + }); + + const writableStream = new WritableStream({ + write(chunk) { + chunks.push(chunk); + }, + close() { + resolveChunks(chunks); + }, + abort(reason) { + rejectChunks(new Error(`Stream aborted: ${reason}`)); + }, + }); + + return [writableStream, chunksPromise]; +} + +export function createHeaderHandlers() { + const headers: Record = {}; + let statusCode = 200; + + const setHeader = (name: string, value: string) => { + headers[name] = value; + }; + + const setStatusCode = (code: number) => { + statusCode = code; + }; + + return { headers, statusCode, setHeader, setStatusCode }; +} + +export const getKnownApiSecrets: Parameters< + typeof proxyV1 +>[0]["getApiSecrets"] = async ( + useCache: boolean, + authToken: string, + model: string | null, +) => { + const endpointTypes = model && getModelEndpointTypes(model); + if (!endpointTypes?.length) throw new Error(`Unknown model: ${model}`); + + return [ + { + type: "anthropic" as const, + secret: process.env.ANTHROPIC_API_KEY || "", + name: "anthropic", + }, + { + type: "google" as const, + secret: process.env.VERTEX_AI_API_KEY || "", + name: "google", + }, + { + type: "openai" as const, + secret: process.env.OPENAI_API_KEY || "", + name: "openai", + }, + ].filter((secret) => !!secret.secret && endpointTypes.includes(secret.type)); +}; + +export async function callProxyV1({ + body, + ...request +}: Partial, "body">> & { + body: string | object; +}) { + const [writableStream, chunksPromise] = createResponseStream(); + const { headers, statusCode, setHeader, setStatusCode } = + createHeaderHandlers(); + + let timeoutId: NodeJS.Timeout | null = null; + const timeoutPromise = new Promise((_, reject) => { + timeoutId = setTimeout(() => { + reject(new Error(`Request timed out after 30s`)); + }, 30000); + }); + + try { + const requestBody = typeof body === "string" ? body : JSON.stringify(body); + + const proxyPromise = proxyV1({ + method: "POST", + url: "/chat/completions", + proxyHeaders: { + "content-type": "application/json", + authorization: `Bearer dummy-token`, + }, + setHeader, + setStatusCode, + res: writableStream, + getApiSecrets: getKnownApiSecrets, + cacheGet: async () => null, + cachePut: async () => {}, + digest: async (message: string) => + Buffer.from(message).toString("base64"), + ...request, + body: requestBody, + }); + + await proxyPromise; + + const chunks = await Promise.race([chunksPromise, timeoutPromise]); + const responseText = new TextDecoder().decode(Buffer.concat(chunks)); + + return { + chunks, + headers, + statusCode, + responseText, + events() { + return chucksToEvents(chunks); + }, + json() { + try { + return JSON.parse(responseText) as Data; + } catch (e) { + return null; + } + }, + }; + } catch (error) { + throw error; + } finally { + if (timeoutId) { + clearTimeout(timeoutId); + } + } +} + +const chucksToEvents = (chunks: Uint8Array[]) => { + const textDecoder = new TextDecoder(); + const results: (Omit & { data: ChunkData })[] = []; + + const parser = createParser((event) => { + if (event.type === "event" && event.data !== "[DONE]") { + results.push({ + ...event, + data: JSON.parse(event.data) as ChunkData, + }); + } + }); + + for (const chunk of chunks) { + parser.feed(textDecoder.decode(chunk)); + } + + return results; +}; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 8b8644f6..2b418f84 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -22,10 +22,10 @@ importers: version: 2.3.3 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2 + version: 4.3.2(typescript@5.5.4) vitest: specifier: ^2.1.9 - version: 2.1.9 + version: 2.1.9(@types/node@20.10.5) apis/cloudflare: dependencies: @@ -206,6 +206,9 @@ importers: '@aws-sdk/client-bedrock-runtime': specifier: ^3.738.0 version: 3.738.0 + '@braintrust/core': + specifier: link:../../../sdk/core/js + version: link:../../../sdk/core/js '@breezystack/lamejs': specifier: ^1.2.7 version: 1.2.7 @@ -227,9 +230,6 @@ importers: ai: specifier: 2.2.37 version: 2.2.37(react@18.3.1)(solid-js@1.9.5)(svelte@4.2.19)(vue@3.5.13) - braintrust: - specifier: link:../../../sdk/js - version: link:../../../sdk/js cache-control-parser: specifier: ^2.0.6 version: 2.0.6 @@ -3717,7 +3717,7 @@ packages: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 magic-string: 0.30.17 - vite: 5.4.10 + vite: 5.4.10(@types/node@20.10.5) dev: true /@vitest/pretty-format@2.1.9: @@ -8336,17 +8336,6 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true - /tsconfck@3.1.4: - resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} - engines: {node: ^18 || >=20} - hasBin: true - peerDependencies: - typescript: ^5.0.0 - peerDependenciesMeta: - typescript: - optional: true - dev: true - /tsconfck@3.1.4(typescript@5.5.4): resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} engines: {node: ^18 || >=20} @@ -8790,28 +8779,6 @@ packages: engines: {node: '>= 0.8'} dev: false - /vite-node@2.1.9: - resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - dependencies: - cac: 6.7.14 - debug: 4.4.0 - es-module-lexer: 1.6.0 - pathe: 1.1.2 - vite: 5.4.10 - transitivePeerDependencies: - - '@types/node' - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - dev: true - /vite-node@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -8834,22 +8801,6 @@ packages: - terser dev: true - /vite-tsconfig-paths@4.3.2: - resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} - peerDependencies: - vite: '*' - peerDependenciesMeta: - vite: - optional: true - dependencies: - debug: 4.3.7 - globrex: 0.1.2 - tsconfck: 3.1.4 - transitivePeerDependencies: - - supports-color - - typescript - dev: true - /vite-tsconfig-paths@4.3.2(typescript@5.5.4): resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: @@ -8866,44 +8817,6 @@ packages: - typescript dev: true - /vite@5.4.10: - resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 - less: '*' - lightningcss: ^1.21.0 - sass: '*' - sass-embedded: '*' - stylus: '*' - sugarss: '*' - terser: ^5.4.0 - peerDependenciesMeta: - '@types/node': - optional: true - less: - optional: true - lightningcss: - optional: true - sass: - optional: true - sass-embedded: - optional: true - stylus: - optional: true - sugarss: - optional: true - terser: - optional: true - dependencies: - esbuild: 0.21.5 - postcss: 8.5.1 - rollup: 4.24.0 - optionalDependencies: - fsevents: 2.3.3 - dev: true - /vite@5.4.10(@types/node@20.10.5): resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} engines: {node: ^18.0.0 || >=20.0.0} @@ -8943,63 +8856,6 @@ packages: fsevents: 2.3.3 dev: true - /vitest@2.1.9: - resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': 2.1.9 - '@vitest/ui': 2.1.9 - happy-dom: '*' - jsdom: '*' - peerDependenciesMeta: - '@edge-runtime/vm': - optional: true - '@types/node': - optional: true - '@vitest/browser': - optional: true - '@vitest/ui': - optional: true - happy-dom: - optional: true - jsdom: - optional: true - dependencies: - '@vitest/expect': 2.1.9 - '@vitest/mocker': 2.1.9(vite@5.4.10) - '@vitest/pretty-format': 2.1.9 - '@vitest/runner': 2.1.9 - '@vitest/snapshot': 2.1.9 - '@vitest/spy': 2.1.9 - '@vitest/utils': 2.1.9 - chai: 5.1.2 - debug: 4.4.0 - expect-type: 1.2.0 - magic-string: 0.30.17 - pathe: 1.1.2 - std-env: 3.8.1 - tinybench: 2.9.0 - tinyexec: 0.3.1 - tinypool: 1.0.1 - tinyrainbow: 1.2.0 - vite: 5.4.10 - vite-node: 2.1.9 - why-is-node-running: 2.3.0 - transitivePeerDependencies: - - less - - lightningcss - - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - dev: true - /vitest@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} engines: {node: ^18.0.0 || >=20.0.0} From 5250106e22679c6a4ded292e54a981cc9e979b39 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 8 May 2025 16:05:07 -0500 Subject: [PATCH 14/51] one cannot assume the bodyData is a valid json --- packages/proxy/src/proxy.ts | 24 ++++++++++++------------ 1 file changed, 12 insertions(+), 12 deletions(-) diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 94cd80a2..a6e72361 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -923,9 +923,9 @@ async function fetchModelLoop( url === "/responses" || url === "/anthropic/messages") && isObject(bodyData) && - bodyData.model + bodyData?.model ) { - model = bodyData.model; + model = bodyData?.model; } else if (method === "POST") { const m = url.match(GOOGLE_URL_REGEX); if (m) { @@ -1679,8 +1679,8 @@ async function fetchOpenAI( } const hasReasoning = - bodyData.reasoning || - (typeof bodyData.model === "string" && + bodyData?.reasoning || + (typeof bodyData?.model === "string" && modelProviderHasReasoning.openai?.test(bodyData.model)); if (hasReasoning) { @@ -1693,9 +1693,9 @@ async function fetchOpenAI( // Only remove system messages for old O1 models. if ( - bodyData.messages && + bodyData?.messages && ["o1-preview", "o1-mini", "o1-preview-2024-09-12"].includes( - bodyData.model, + bodyData?.model, ) ) { bodyData.messages = bodyData.messages.map((m: any) => ({ @@ -1705,7 +1705,7 @@ async function fetchOpenAI( } } - if (bodyData.messages) { + if (bodyData?.messages) { bodyData.messages = await normalizeOpenAIMessages(bodyData.messages); } @@ -1719,7 +1719,7 @@ async function fetchOpenAI( }); } - if (bodyData.model.startsWith("o1-pro")) { + if (bodyData?.model.startsWith("o1-pro")) { return fetchOpenAIResponsesTranslate({ headers, body: bodyData, @@ -1728,7 +1728,7 @@ async function fetchOpenAI( let isManagedStructuredOutput = false; const responseFormatParsed = responseFormatSchema.safeParse( - bodyData.response_format, + bodyData?.response_format, ); if (responseFormatParsed.success) { switch (responseFormatParsed.data.type) { @@ -1788,7 +1788,7 @@ async function fetchOpenAI( let stream = proxyResponse.body; if (isManagedStructuredOutput && stream) { - if (bodyData.stream) { + if (bodyData?.stream) { stream = stream.pipeThrough( createEventStreamTransformer((data) => { const chunk: ChatCompletionChunk = JSON.parse(data); @@ -2084,9 +2084,9 @@ async function fetchAnthropicChatCompletions({ if (m.tool_calls) { content.push(...openAIToolCallsToAnthropicToolUse(m.tool_calls)); } - if (m.reasoning) { + if (m?.reasoning) { content.unshift( - ...m.reasoning.map((r) => ({ + ...m?.reasoning.map((r) => ({ type: "thinking", thinking: r.content, signature: r.id, From de802e3ddd013dd6b2bd8145c585e0819b8cc84d Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 8 May 2025 16:30:41 -0500 Subject: [PATCH 15/51] add test coverage for reasoning and improve type handling when we extend the type --- packages/proxy/src/index.ts | 1 + .../proxy/src/providers/anthropic.test.ts | 243 +++++++++++++----- packages/proxy/src/providers/anthropic.ts | 24 +- packages/proxy/src/proxy.ts | 5 +- packages/proxy/src/types.ts | 41 +++ packages/proxy/utils/tests.ts | 8 +- 6 files changed, 241 insertions(+), 81 deletions(-) create mode 100644 packages/proxy/src/types.ts diff --git a/packages/proxy/src/index.ts b/packages/proxy/src/index.ts index d6a42938..452300f7 100644 --- a/packages/proxy/src/index.ts +++ b/packages/proxy/src/index.ts @@ -1,3 +1,4 @@ export * from "./util"; export * from "./proxy"; export * from "./metrics"; +export * from "./types"; diff --git a/packages/proxy/src/providers/anthropic.test.ts b/packages/proxy/src/providers/anthropic.test.ts index c65d3744..4cb71ab5 100644 --- a/packages/proxy/src/providers/anthropic.test.ts +++ b/packages/proxy/src/providers/anthropic.test.ts @@ -1,80 +1,201 @@ import { describe, it, expect } from "vitest"; import { callProxyV1 } from "../../utils/tests"; -import { ChatCompletion, ChatCompletionChunk } from "openai/resources"; +import { + ExtendedOpenAIChatCompletion, + ExtendedOpenAIChatCompletionChunk, + ExtendedOpenAIChatCompletionCreateParams, +} from "@lib/types"; -describe("Anthropic Provider", () => { - it("should convert OpenAI streaming request to Anthropic and back", async () => { - const { events } = await callProxyV1({ - body: { - model: "claude-2", - messages: [ - { role: "system", content: "You are a helpful assistant." }, - { role: "user", content: "Tell me a short joke about programming." }, - ], - stream: true, - max_tokens: 150, - }, - }); +it("should convert OpenAI streaming request to Anthropic and back", async () => { + const { events } = await callProxyV1< + ExtendedOpenAIChatCompletionCreateParams, + ExtendedOpenAIChatCompletionChunk + >({ + body: { + model: "claude-2", + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Tell me a short joke about programming." }, + ], + stream: true, + max_tokens: 150, + }, + }); + + const streamedEvents = events(); - const streamedEvents = events(); + expect(streamedEvents.length).toBeGreaterThan(0); - expect(streamedEvents.length).toBeGreaterThan(0); + streamedEvents.forEach((event) => { + expect(event.type).toBe("event"); - streamedEvents.forEach((event) => { - expect(event.type).toBe("event"); + const data = event.data; + expect(data.id).toBeTruthy(); + expect(data.object).toBe("chat.completion.chunk"); + expect(data.created).toBeTruthy(); + expect(Array.isArray(data.choices)).toBe(true); - const data = event.data; - expect(data.id).toBeTruthy(); - expect(data.object).toBe("chat.completion.chunk"); - expect(data.created).toBeTruthy(); - expect(Array.isArray(data.choices)).toBe(true); + if (data.choices[0]?.delta?.content) { + expect(data.choices[0].delta.content.trim()).not.toBe(""); + } + }); - if (data.choices[0]?.delta?.content) { - expect(data.choices[0].delta.content.trim()).not.toBe(""); - } - }); + const hasContent = streamedEvents.some( + (event) => event.data.choices[0]?.delta?.content !== undefined, + ); + expect(hasContent).toBe(true); +}); - const hasContent = streamedEvents.some( - (event) => event.data.choices[0]?.delta?.content !== undefined, - ); - expect(hasContent).toBe(true); +it("should convert OpenAI non-streaming request to Anthropic and back", async () => { + const { json } = await callProxyV1< + ExtendedOpenAIChatCompletionCreateParams, + ExtendedOpenAIChatCompletion + >({ + body: { + model: "claude-2", + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Tell me a short joke about programming." }, + ], + stream: false, + max_tokens: 150, + }, }); - it.only("should convert OpenAI non-streaming request to Anthropic and back", async () => { - const { json } = await callProxyV1({ - body: { - model: "claude-2", - messages: [ - { role: "system", content: "You are a helpful assistant." }, - { role: "user", content: "Tell me a short joke about programming." }, - ], - stream: false, - max_tokens: 150, + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + logprobs: null, + message: { + content: expect.any(String), + refusal: null, + role: "assistant", + }, }, - }); + ], + created: expect.any(Number), + id: expect.any(String), + model: "claude-2.1", + object: "chat.completion", + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + }, + }); +}); + +it("should accept and return reasoning/thinking params and detail streaming", async () => { + const { events } = await callProxyV1< + ExtendedOpenAIChatCompletionCreateParams, + ExtendedOpenAIChatCompletionChunk + >({ + body: { + model: "claude-3-7-sonnet-latest", + reasoning_effort: "medium", + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "ErUBCkYIAxgCIkDWT/7OwDfkVSgdtjIwGqUpzIHQXkiBQQpIqzh6WnHHoGxN1ilJxIlnJQNarUI4Jo/3WWrmRnnqOU3LtAakLr4REgwvY1G5jTSbLHWOo4caDKNco+CyDfNT56iXBCIwrNSFdvNJNsBaa0hpbTZ6N4Q4z4/6l+gu8hniKnftBhS+IuzcncsuJqKxWKs/EVyjKh3tvH/eDeYovKskosVSO5x64iebuze1S8JbavI3UBgC", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + stream: true, + }, + }); + + const streamedEvents = events(); + expect(streamedEvents.length).toBeGreaterThan(0); + + const hasReasoning = streamedEvents.some( + (event) => event.data.choices[0]?.delta?.reasoning?.content !== undefined, + ); + expect(hasReasoning).toBe(true); + + const hasContent = streamedEvents.some( + (event) => event.data.choices[0]?.delta?.content !== undefined, + ); + expect(hasContent).toBe(true); +}); - expect(json()).toEqual({ - choices: [ +it("should accept and return reasoning/thinking params and detail non-streaming", async () => { + const { json } = await callProxyV1< + ExtendedOpenAIChatCompletionCreateParams, + ExtendedOpenAIChatCompletionChunk + >({ + body: { + model: "claude-3-7-sonnet-20250219", + reasoning_effort: "medium", + stream: false, + messages: [ { - finish_reason: "stop", - index: 0, - logprobs: null, - message: { - content: expect.any(String), - refusal: null, - role: "assistant", - }, + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "ErUBCkYIAxgCIkDWT/7OwDfkVSgdtjIwGqUpzIHQXkiBQQpIqzh6WnHHoGxN1ilJxIlnJQNarUI4Jo/3WWrmRnnqOU3LtAakLr4REgwvY1G5jTSbLHWOo4caDKNco+CyDfNT56iXBCIwrNSFdvNJNsBaa0hpbTZ6N4Q4z4/6l+gu8hniKnftBhS+IuzcncsuJqKxWKs/EVyjKh3tvH/eDeYovKskosVSO5x64iebuze1S8JbavI3UBgC", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", }, ], - created: expect.any(Number), - id: expect.any(String), - model: "claude-2.1", - object: "chat.completion", - usage: { - completion_tokens: expect.any(Number), - prompt_tokens: expect.any(Number), - total_tokens: expect.any(Number), + }, + }); + + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + logprobs: null, + message: { + content: expect.any(String), + reasoning: [ + { + content: expect.any(String), + id: expect.any(String), + }, + ], + refusal: null, + role: "assistant", + }, }, - }); + ], + created: expect.any(Number), + id: expect.any(String), + model: "claude-3-7-sonnet-20250219", + object: "chat.completion", + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + }, }); }); diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index fd62aad5..2fd6b0cc 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -7,7 +7,7 @@ import { ChatCompletionToolMessageParam, CompletionUsage, } from "openai/resources"; -import { getTimestampInSeconds, isEmpty, ModelResponse } from "../util"; +import { getTimestampInSeconds, isEmpty } from "../util"; import { Message } from "@braintrust/core/typespecs"; import { z } from "zod"; import { @@ -23,7 +23,10 @@ import { Base64ImageSource, } from "@anthropic-ai/sdk/resources/messages"; import { ChatCompletionCreateParamsBase } from "openai/resources/chat/completions"; -import { type Reasoning } from "@braintrust/core/typespecs"; +import { + ExtendedOpenAIChatCompletionChunk, + ExtendedOpenAIChatCompletionChunkChoiceDelta, +} from "@lib/types"; /* Example events: @@ -203,7 +206,7 @@ export function anthropicEventToOpenAIEvent( usage: Partial, eventU: unknown, isStructuredOutput: boolean, -): { event: ChatCompletionChunk | null; finished: boolean } { +): { event: ExtendedOpenAIChatCompletionChunk | null; finished: boolean } { const parsedEvent = anthropicStreamEventSchema.safeParse(eventU); if (!parsedEvent.success) { throw new Error( @@ -224,7 +227,9 @@ export function anthropicEventToOpenAIEvent( let tool_calls: ChatCompletionChunk.Choice.Delta.ToolCall[] | undefined = undefined; - let reasoning: Reasoning | undefined = undefined; + let reasoning: + | ExtendedOpenAIChatCompletionChunkChoiceDelta["reasoning"] + | undefined = undefined; if (event.type === "message_start") { if (event.message.usage) { @@ -383,17 +388,6 @@ export function anthropicEventToOpenAIEvent( }; } -// TODO: should this live here? -declare module "openai/resources/chat/completions" { - namespace ChatCompletionChunk { - namespace Choice { - interface Delta { - reasoning?: Reasoning; - } - } - } -} - export function anthropicCompletionToOpenAICompletion( completion: AnthropicCompletion, isFunction: boolean, diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index a6e72361..0ba0e6be 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -101,6 +101,7 @@ import { z } from "zod"; import $RefParser from "@apidevtools/json-schema-ref-parser"; import { getAzureEntraAccessToken } from "./providers/azure"; import { getDatabricksOAuthAccessToken } from "./providers/databricks"; +import { ExtendedOpenAIChatCompletionChunk } from "./types"; type CachedMetadata = { cached_at: Date; @@ -639,7 +640,9 @@ export async function proxyV1({ try { if ("data" in event) { - const result = JSON.parse(event.data) as ChatCompletionChunk; + const result = JSON.parse( + event.data, + ) as ExtendedOpenAIChatCompletionChunk; if (result) { if (result.usage) { spanLogger.log({ diff --git a/packages/proxy/src/types.ts b/packages/proxy/src/types.ts new file mode 100644 index 00000000..d54757f8 --- /dev/null +++ b/packages/proxy/src/types.ts @@ -0,0 +1,41 @@ +import { + chatCompletionMessageParamSchema, + chatCompletionMessageReasoningSchema, +} from "@braintrust/core/typespecs/dist"; +import { z } from "zod"; + +import { + ChatCompletion, + ChatCompletionChunk, + ChatCompletionCreateParams, +} from "openai/resources"; + +export type ExtendedOpenAIChatCompletionMessage = z.infer< + typeof chatCompletionMessageParamSchema +>; + +export type ExtendedOpenAIChatCompletionChoice = ChatCompletion.Choice & { + message: ExtendedOpenAIChatCompletionMessage; +}; + +export type ExtendedOpenAIChatCompletion = ChatCompletion & { + choices: Array; +}; + +export type ExtendedOpenAIChatCompletionChunkChoiceDelta = + ChatCompletionChunk.Choice.Delta & { + reasoning?: z.infer; + }; + +export type ExtendedChatCompletionChunkChoice = ChatCompletionChunk.Choice & { + delta: ExtendedOpenAIChatCompletionChunkChoiceDelta; +}; + +export type ExtendedOpenAIChatCompletionChunk = ChatCompletionChunk & { + choices: Array; +}; + +export type ExtendedOpenAIChatCompletionCreateParams = + ChatCompletionCreateParams & { + messages: Array; + }; diff --git a/packages/proxy/utils/tests.ts b/packages/proxy/utils/tests.ts index 050b2c6e..2ef902bd 100644 --- a/packages/proxy/utils/tests.ts +++ b/packages/proxy/utils/tests.ts @@ -80,11 +80,11 @@ export const getKnownApiSecrets: Parameters< ].filter((secret) => !!secret.secret && endpointTypes.includes(secret.type)); }; -export async function callProxyV1({ +export async function callProxyV1({ body, ...request }: Partial, "body">> & { - body: string | object; + body: Input; }) { const [writableStream, chunksPromise] = createResponseStream(); const { headers, statusCode, setHeader, setStatusCode } = @@ -130,11 +130,11 @@ export async function callProxyV1({ statusCode, responseText, events() { - return chucksToEvents(chunks); + return chucksToEvents(chunks); }, json() { try { - return JSON.parse(responseText) as Data; + return JSON.parse(responseText) as Output; } catch (e) { return null; } From 3084245858f289fe3c77b4e3e7b95a4504de2591 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 8 May 2025 23:43:42 -0500 Subject: [PATCH 16/51] start to move types from core to new types/openai.ts We have historically placed all types into core/js/typespecs. Moving forward, I think the AI Proxy should own these types and we should be more clear to outside users which types are ours vs. openai. This doesn't go far enough to decouple the two (too gnarly), but it's a start for discussion. --- packages/proxy/package.json | 9 +- packages/proxy/src/index.ts | 1 - .../proxy/src/providers/anthropic.test.ts | 24 +-- packages/proxy/src/providers/anthropic.ts | 31 ++-- packages/proxy/src/providers/bedrock.ts | 18 ++- packages/proxy/src/providers/google.ts | 13 +- packages/proxy/src/proxy.ts | 7 +- packages/proxy/src/types.ts | 41 ----- packages/proxy/tsconfig.json | 3 +- packages/proxy/tsup.config.ts | 6 + packages/proxy/types/index.ts | 1 + packages/proxy/types/openai.ts | 59 +++++++ packages/proxy/utils/openai.ts | 150 +----------------- pnpm-lock.yaml | 150 +++++++++++++++++- 14 files changed, 278 insertions(+), 235 deletions(-) delete mode 100644 packages/proxy/src/types.ts create mode 100644 packages/proxy/types/index.ts create mode 100644 packages/proxy/types/openai.ts diff --git a/packages/proxy/package.json b/packages/proxy/package.json index 4589baff..5011195d 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -36,12 +36,19 @@ "import": "./utils/dist/index.mjs", "module": "./utils/dist/index.mjs", "require": "./utils/dist/index.js" + }, + "./types": { + "types": "./types/dist/index.d.ts", + "import": "./types/dist/index.mjs", + "module": "./types/dist/index.mjs", + "require": "./types/dist/index.js" } }, "files": [ "dist/**/*", "edge/dist/**/*", - "schema/dist/**/*" + "schema/dist/**/*", + "types/dist/**/*" ], "license": "MIT", "publishConfig": { diff --git a/packages/proxy/src/index.ts b/packages/proxy/src/index.ts index 452300f7..d6a42938 100644 --- a/packages/proxy/src/index.ts +++ b/packages/proxy/src/index.ts @@ -1,4 +1,3 @@ export * from "./util"; export * from "./proxy"; export * from "./metrics"; -export * from "./types"; diff --git a/packages/proxy/src/providers/anthropic.test.ts b/packages/proxy/src/providers/anthropic.test.ts index 4cb71ab5..50652b93 100644 --- a/packages/proxy/src/providers/anthropic.test.ts +++ b/packages/proxy/src/providers/anthropic.test.ts @@ -1,15 +1,15 @@ import { describe, it, expect } from "vitest"; import { callProxyV1 } from "../../utils/tests"; import { - ExtendedOpenAIChatCompletion, - ExtendedOpenAIChatCompletionChunk, - ExtendedOpenAIChatCompletionCreateParams, -} from "@lib/types"; + OpenAIChatCompletion, + OpenAIChatCompletionChunk, + OpenAIChatCompletionCreateParams, +} from "@types"; it("should convert OpenAI streaming request to Anthropic and back", async () => { const { events } = await callProxyV1< - ExtendedOpenAIChatCompletionCreateParams, - ExtendedOpenAIChatCompletionChunk + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk >({ body: { model: "claude-2", @@ -48,8 +48,8 @@ it("should convert OpenAI streaming request to Anthropic and back", async () => it("should convert OpenAI non-streaming request to Anthropic and back", async () => { const { json } = await callProxyV1< - ExtendedOpenAIChatCompletionCreateParams, - ExtendedOpenAIChatCompletion + OpenAIChatCompletionCreateParams, + OpenAIChatCompletion >({ body: { model: "claude-2", @@ -89,8 +89,8 @@ it("should convert OpenAI non-streaming request to Anthropic and back", async () it("should accept and return reasoning/thinking params and detail streaming", async () => { const { events } = await callProxyV1< - ExtendedOpenAIChatCompletionCreateParams, - ExtendedOpenAIChatCompletionChunk + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk >({ body: { model: "claude-3-7-sonnet-latest", @@ -137,8 +137,8 @@ it("should accept and return reasoning/thinking params and detail streaming", as it("should accept and return reasoning/thinking params and detail non-streaming", async () => { const { json } = await callProxyV1< - ExtendedOpenAIChatCompletionCreateParams, - ExtendedOpenAIChatCompletionChunk + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk >({ body: { model: "claude-3-7-sonnet-20250219", diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index 2fd6b0cc..11213599 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -1,7 +1,5 @@ import { v4 as uuidv4 } from "uuid"; import { - ChatCompletion, - ChatCompletionChunk, ChatCompletionMessageToolCall, ChatCompletionTool, ChatCompletionToolMessageParam, @@ -22,11 +20,14 @@ import { MessageCreateParamsBase, Base64ImageSource, } from "@anthropic-ai/sdk/resources/messages"; -import { ChatCompletionCreateParamsBase } from "openai/resources/chat/completions"; import { - ExtendedOpenAIChatCompletionChunk, - ExtendedOpenAIChatCompletionChunkChoiceDelta, -} from "@lib/types"; + OpenAIChatCompletion, + OpenAIChatCompletionChoice, + OpenAIChatCompletionChunk, + OpenAIChatCompletionChunkChoiceDelta, + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionMessage, +} from "@types"; /* Example events: @@ -206,7 +207,7 @@ export function anthropicEventToOpenAIEvent( usage: Partial, eventU: unknown, isStructuredOutput: boolean, -): { event: ExtendedOpenAIChatCompletionChunk | null; finished: boolean } { +): { event: OpenAIChatCompletionChunk | null; finished: boolean } { const parsedEvent = anthropicStreamEventSchema.safeParse(eventU); if (!parsedEvent.success) { throw new Error( @@ -224,13 +225,13 @@ export function anthropicEventToOpenAIEvent( } let content: string | undefined = undefined; - let tool_calls: ChatCompletionChunk.Choice.Delta.ToolCall[] | undefined = - undefined; - - let reasoning: - | ExtendedOpenAIChatCompletionChunkChoiceDelta["reasoning"] + let tool_calls: + | OpenAIChatCompletionChunkChoiceDelta["tool_calls"] | undefined = undefined; + let reasoning: OpenAIChatCompletionChunkChoiceDelta["reasoning"] | undefined = + undefined; + if (event.type === "message_start") { if (event.message.usage) { updateUsage(event.message.usage, usage); @@ -392,7 +393,7 @@ export function anthropicCompletionToOpenAICompletion( completion: AnthropicCompletion, isFunction: boolean, isStructuredOutput: boolean, -): ChatCompletion { +): OpenAIChatCompletion { const firstText = completion.content.find((c) => c.type === "text"); // TODO(ibolmo): we now support multiple thinking blocks const firstThinking = completion.content.find((c) => c.type === "thinking"); @@ -460,7 +461,7 @@ export function anthropicCompletionToOpenAICompletion( function anthropicFinishReason( stop_reason: string, -): ChatCompletion.Choice["finish_reason"] | null { +): OpenAIChatCompletionChoice["finish_reason"] | null { return stop_reason === "stop_reason" ? "stop" : stop_reason === "max_tokens" @@ -602,7 +603,7 @@ export function openAIToolsToAnthropicTools( } export function anthropicToolChoiceToOpenAIToolChoice( - toolChoice: ChatCompletionCreateParamsBase["tool_choice"], + toolChoice: OpenAIChatCompletionCreateParams["tool_choice"], ): MessageCreateParamsBase["tool_choice"] { if (!toolChoice) { return undefined; diff --git a/packages/proxy/src/providers/bedrock.ts b/packages/proxy/src/providers/bedrock.ts index 36c576dc..a33e1b6f 100644 --- a/packages/proxy/src/providers/bedrock.ts +++ b/packages/proxy/src/providers/bedrock.ts @@ -12,8 +12,6 @@ import { StopReason, SystemContentBlock, Message as BedrockMessage, - ImageBlock, - DocumentBlock, ToolConfiguration, InferenceConfiguration, ImageFormat, @@ -29,7 +27,7 @@ import { anthropicCompletionToOpenAICompletion, anthropicEventToOpenAIEvent, } from "./anthropic"; -import { ChatCompletionChunk, CompletionUsage } from "openai/resources"; +import { CompletionUsage } from "openai/resources"; import { getTimestampInSeconds, writeToReadable, @@ -38,12 +36,15 @@ import { } from ".."; import { Message as OaiMessage, + OpenAIChatCompletionChunk, + OpenAIChatCompletion, +} from "@types"; +import { MessageRole, toolsSchema, responseFormatJsonSchemaSchema, } from "@braintrust/core/typespecs"; import { - ChatCompletion, ChatCompletionMessageToolCall, ChatCompletionTool, ChatCompletionToolMessageParam, @@ -752,7 +753,7 @@ function openAIResponse( model: string, response: ConverseCommandOutput, isStructuredOutput: boolean, -): ChatCompletion { +): OpenAIChatCompletion { const firstText = response.output?.message?.content?.find( (c) => c.text !== undefined, ); @@ -833,19 +834,20 @@ function translateInferenceConfig( interface BedrockMessageState { completionId: string; - role: ChatCompletionChunk["choices"][0]["delta"]["role"]; + role: OpenAIChatCompletionChunk["choices"][0]["delta"]["role"]; } +// TODO(ibolmo): should support reasoning for claude models export function bedrockMessageToOpenAIMessage( state: BedrockMessageState, output: ConverseStreamOutput, isStructuredOutput: boolean, ): { - event: ChatCompletionChunk | null; + event: OpenAIChatCompletionChunk | null; finished: boolean; } { return ConverseStreamOutput.visit<{ - event: ChatCompletionChunk | null; + event: OpenAIChatCompletionChunk | null; finished: boolean; }>(output, { messageStart: (value) => { diff --git a/packages/proxy/src/providers/google.ts b/packages/proxy/src/providers/google.ts index 2cad8d83..747080f4 100644 --- a/packages/proxy/src/providers/google.ts +++ b/packages/proxy/src/providers/google.ts @@ -7,8 +7,12 @@ import { InlineDataPart, Part, } from "@google/generative-ai"; -import { ChatCompletion, ChatCompletionChunk } from "openai/resources"; import { getTimestampInSeconds } from ".."; +import { + OpenAIChatCompletion, + OpenAIChatCompletionChoice, + OpenAIChatCompletionChunk, +} from "@types"; import { convertMediaToBase64 } from "./util"; async function makeGoogleMediaBlock(media: string): Promise { @@ -140,7 +144,7 @@ export async function openAIMessagesToGoogleMessages( function translateFinishReason( reason?: FinishReason, -): ChatCompletion.Choice["finish_reason"] | null { +): OpenAIChatCompletionChoice["finish_reason"] | null { // "length" | "stop" | "tool_calls" | "content_filter" | "function_call" switch (reason) { case FinishReason.MAX_TOKENS: @@ -156,12 +160,13 @@ function translateFinishReason( case undefined: return null; } + return null; } export function googleEventToOpenAIChatEvent( model: string, data: GenerateContentResponse, -): { event: ChatCompletionChunk | null; finished: boolean } { +): { event: OpenAIChatCompletionChunk | null; finished: boolean } { return { event: data.candidates ? { @@ -216,7 +221,7 @@ export function googleEventToOpenAIChatEvent( export function googleCompletionToOpenAICompletion( model: string, data: GenerateContentResponse, -): ChatCompletion { +): OpenAIChatCompletion { return { id: uuidv4(), choices: (data.candidates || []).map((candidate) => { diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 0ba0e6be..966e442a 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -51,7 +51,6 @@ import { import { Message, MessageRole, - Reasoning, responseFormatSchema, } from "@braintrust/core/typespecs"; import { _urljoin, isArray } from "@braintrust/core"; @@ -101,7 +100,7 @@ import { z } from "zod"; import $RefParser from "@apidevtools/json-schema-ref-parser"; import { getAzureEntraAccessToken } from "./providers/azure"; import { getDatabricksOAuthAccessToken } from "./providers/databricks"; -import { ExtendedOpenAIChatCompletionChunk } from "./types"; +import { OpenAIChatCompletionChunk, OpenAIReasoning } from "@types"; type CachedMetadata = { cached_at: Date; @@ -618,7 +617,7 @@ export async function proxyV1({ const allChunks: Uint8Array[] = []; // These parameters are for the streaming case - let reasoning: Reasoning[] | undefined = undefined; + let reasoning: OpenAIReasoning[] | undefined = undefined; let role: string | undefined = undefined; let content: string | undefined = undefined; let tool_calls: ChatCompletionChunk.Choice.Delta.ToolCall[] | undefined = @@ -642,7 +641,7 @@ export async function proxyV1({ if ("data" in event) { const result = JSON.parse( event.data, - ) as ExtendedOpenAIChatCompletionChunk; + ) as OpenAIChatCompletionChunk; if (result) { if (result.usage) { spanLogger.log({ diff --git a/packages/proxy/src/types.ts b/packages/proxy/src/types.ts deleted file mode 100644 index d54757f8..00000000 --- a/packages/proxy/src/types.ts +++ /dev/null @@ -1,41 +0,0 @@ -import { - chatCompletionMessageParamSchema, - chatCompletionMessageReasoningSchema, -} from "@braintrust/core/typespecs/dist"; -import { z } from "zod"; - -import { - ChatCompletion, - ChatCompletionChunk, - ChatCompletionCreateParams, -} from "openai/resources"; - -export type ExtendedOpenAIChatCompletionMessage = z.infer< - typeof chatCompletionMessageParamSchema ->; - -export type ExtendedOpenAIChatCompletionChoice = ChatCompletion.Choice & { - message: ExtendedOpenAIChatCompletionMessage; -}; - -export type ExtendedOpenAIChatCompletion = ChatCompletion & { - choices: Array; -}; - -export type ExtendedOpenAIChatCompletionChunkChoiceDelta = - ChatCompletionChunk.Choice.Delta & { - reasoning?: z.infer; - }; - -export type ExtendedChatCompletionChunkChoice = ChatCompletionChunk.Choice & { - delta: ExtendedOpenAIChatCompletionChunkChoiceDelta; -}; - -export type ExtendedOpenAIChatCompletionChunk = ChatCompletionChunk & { - choices: Array; -}; - -export type ExtendedOpenAIChatCompletionCreateParams = - ChatCompletionCreateParams & { - messages: Array; - }; diff --git a/packages/proxy/tsconfig.json b/packages/proxy/tsconfig.json index af9c6262..1bb721e8 100644 --- a/packages/proxy/tsconfig.json +++ b/packages/proxy/tsconfig.json @@ -8,7 +8,8 @@ "baseUrl": ".", "paths": { "@lib/*": ["src/*"], - "@schema": ["schema/index"] + "@schema": ["schema/index"], + "@types": ["types/index"] }, "esModuleInterop": true }, diff --git a/packages/proxy/tsup.config.ts b/packages/proxy/tsup.config.ts index 79d8338b..9a2e208e 100644 --- a/packages/proxy/tsup.config.ts +++ b/packages/proxy/tsup.config.ts @@ -25,4 +25,10 @@ export default defineConfig([ outDir: "utils/dist", dts: true, }, + { + entry: ["types/index.ts"], + format: ["cjs", "esm"], + outDir: "types/dist", + dts: true, + }, ]); diff --git a/packages/proxy/types/index.ts b/packages/proxy/types/index.ts new file mode 100644 index 00000000..09c41947 --- /dev/null +++ b/packages/proxy/types/index.ts @@ -0,0 +1 @@ +export * from "./openai"; diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts new file mode 100644 index 00000000..70db0146 --- /dev/null +++ b/packages/proxy/types/openai.ts @@ -0,0 +1,59 @@ +// TODO(ibolmo): move from core +import { chatCompletionMessageParamSchema } from "@braintrust/core/typespecs/dist"; +export { chatCompletionMessageParamSchema } from "@braintrust/core/typespecs/dist"; + +import { z } from "zod"; + +import { + ChatCompletion, + ChatCompletionChunk, + ChatCompletionCreateParams, +} from "openai/resources"; + +export type OpenAIChatCompletionMessage = z.infer< + typeof chatCompletionMessageParamSchema +>; + +export type OpenAIChatCompletionChoice = ChatCompletion.Choice & { + message: OpenAIChatCompletionMessage; +}; + +export type OpenAIChatCompletion = ChatCompletion & { + choices: Array; +}; + +export const chatCompletionMessageReasoningSchema = z + .object({ + id: z + .string() + .nullish() + .transform((x) => x ?? undefined), + content: z + .string() + .nullish() + .transform((x) => x ?? undefined), + }) + .describe( + "Note: This is not part of the OpenAI API spec, but we added it for interoperability with multiple reasoning models.", + ); + +export type OpenAIReasoning = z.infer< + typeof chatCompletionMessageReasoningSchema +>; + +export type OpenAIChatCompletionChunkChoiceDelta = + ChatCompletionChunk.Choice.Delta & { + reasoning?: OpenAIReasoning; + }; + +export type OpenAIChatCompletionChunkChoice = ChatCompletionChunk.Choice & { + delta: OpenAIChatCompletionChunkChoiceDelta; +}; + +export type OpenAIChatCompletionChunk = ChatCompletionChunk & { + choices: Array; +}; + +export type OpenAIChatCompletionCreateParams = ChatCompletionCreateParams & { + messages: Array; +}; diff --git a/packages/proxy/utils/openai.ts b/packages/proxy/utils/openai.ts index f5d7f027..5c637c44 100644 --- a/packages/proxy/utils/openai.ts +++ b/packages/proxy/utils/openai.ts @@ -1,146 +1,6 @@ -// This is copied from the Vercel AI SDK commit bfa1182c7f5379d7a3d81878ea00ec84682cb046 -// We just need the OpenAI parser, but not the streaming code. - -import { Reasoning } from "@braintrust/core/typespecs"; -import { CompletionUsage, FunctionCall, trimStartOfStreamHelper } from "ai"; - -// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L28-L40 -interface ChatCompletionChunk { - id: string; - choices: Array; - created: number; - model: string; - object: string; -} - -// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L43-L49 -// Updated for https://github.com/openai/openai-node/commit/f10c757d831d90407ba47b4659d9cd34b1a35b1d -// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce -interface ChatCompletionChunkChoice { - delta: ChoiceDelta; - finish_reason: - | "stop" - | "length" - | "tool_calls" - | "content_filter" - | "function_call" - | null; - index: number; -} - -// https://github.com/openai/openai-node/blob/07b3504e1c40fd929f4aae1651b83afc19e3baf8/src/resources/chat/completions.ts#L123-L139 -// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce -interface ChoiceDelta { - /** - * The contents of the chunk message. - */ - content?: string | null; - - /** - * The name and arguments of a function that should be called, as generated by the - * model. - */ - function_call?: FunctionCall; - - /** - * The role of the author of this message. - */ - role?: "system" | "user" | "assistant" | "tool"; - - tool_calls?: Array; - - reasoning?: Reasoning; -} - -// From https://github.com/openai/openai-node/blob/master/src/resources/chat/completions.ts -// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce -interface DeltaToolCall { - index: number; - - /** - * The ID of the tool call. - */ - id?: string; - - /** - * The function that the model called. - */ - function?: ToolCallFunction; - - /** - * The type of the tool. Currently, only `function` is supported. - */ - type?: "function"; -} - -// From https://github.com/openai/openai-node/blob/master/src/resources/chat/completions.ts -// Updated to https://github.com/openai/openai-node/commit/84b43280089eacdf18f171723591856811beddce -interface ToolCallFunction { - /** - * The arguments to call the function with, as generated by the model in JSON - * format. Note that the model does not always generate valid JSON, and may - * hallucinate parameters not defined by your function schema. Validate the - * arguments in your code before calling your function. - */ - arguments?: string; - - /** - * The name of the function to call. - */ - name?: string; -} - -/** - * https://github.com/openai/openai-node/blob/3ec43ee790a2eb6a0ccdd5f25faa23251b0f9b8e/src/resources/completions.ts#L28C1-L64C1 - * Completions API. Streamed and non-streamed responses are the same. - */ -interface Completion { - /** - * A unique identifier for the completion. - */ - id: string; - - /** - * The list of completion choices the model generated for the input prompt. - */ - choices: Array; - - /** - * The Unix timestamp of when the completion was created. - */ - created: number; - - /** - * The model used for completion. - */ - model: string; - - /** - * The object type, which is always "text_completion" - */ - object: string; - - /** - * Usage statistics for the completion request. - */ - usage?: CompletionUsage; -} - -interface CompletionChoice { - /** - * The reason the model stopped generating tokens. This will be `stop` if the model - * hit a natural stop point or a provided stop sequence, or `length` if the maximum - * number of tokens specified in the request was reached. - */ - finish_reason: "stop" | "length" | "content_filter"; - - index: number; - - // edited: Removed CompletionChoice.logProbs and replaced with any - logprobs: any | null; - - text: string; -} +import { OpenAIChatCompletionChunk } from "@types"; +import { trimStartOfStreamHelper } from "ai"; +import { Completion } from "openai/resources"; /** * Creates a parser function for processing the OpenAI stream data. @@ -222,7 +82,7 @@ const __internal__OpenAIFnMessagesSymbol = Symbol( type AzureChatCompletions = any; type AsyncIterableOpenAIStreamReturnTypes = - | AsyncIterable + | AsyncIterable | AsyncIterable | AsyncIterable; @@ -233,7 +93,7 @@ type OpenAIStreamReturnTypes = export function isChatCompletionChunk( data: unknown, -): data is ChatCompletionChunk { +): data is OpenAIChatCompletionChunk { if (!data || typeof data !== "object") { return false; } diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 2b418f84..fb6bd794 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -22,10 +22,10 @@ importers: version: 2.3.3 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2(typescript@5.5.4) + version: 4.3.2 vitest: specifier: ^2.1.9 - version: 2.1.9(@types/node@20.10.5) + version: 2.1.9 apis/cloudflare: dependencies: @@ -3717,7 +3717,7 @@ packages: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 magic-string: 0.30.17 - vite: 5.4.10(@types/node@20.10.5) + vite: 5.4.10 dev: true /@vitest/pretty-format@2.1.9: @@ -8336,6 +8336,17 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true + /tsconfck@3.1.4: + resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} + engines: {node: ^18 || >=20} + hasBin: true + peerDependencies: + typescript: ^5.0.0 + peerDependenciesMeta: + typescript: + optional: true + dev: true + /tsconfck@3.1.4(typescript@5.5.4): resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} engines: {node: ^18 || >=20} @@ -8779,6 +8790,28 @@ packages: engines: {node: '>= 0.8'} dev: false + /vite-node@2.1.9: + resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + dependencies: + cac: 6.7.14 + debug: 4.4.0 + es-module-lexer: 1.6.0 + pathe: 1.1.2 + vite: 5.4.10 + transitivePeerDependencies: + - '@types/node' + - less + - lightningcss + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + dev: true + /vite-node@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -8801,6 +8834,22 @@ packages: - terser dev: true + /vite-tsconfig-paths@4.3.2: + resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} + peerDependencies: + vite: '*' + peerDependenciesMeta: + vite: + optional: true + dependencies: + debug: 4.3.7 + globrex: 0.1.2 + tsconfck: 3.1.4 + transitivePeerDependencies: + - supports-color + - typescript + dev: true + /vite-tsconfig-paths@4.3.2(typescript@5.5.4): resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: @@ -8817,6 +8866,44 @@ packages: - typescript dev: true + /vite@5.4.10: + resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || >=20.0.0 + less: '*' + lightningcss: ^1.21.0 + sass: '*' + sass-embedded: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 + peerDependenciesMeta: + '@types/node': + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + dependencies: + esbuild: 0.21.5 + postcss: 8.5.1 + rollup: 4.24.0 + optionalDependencies: + fsevents: 2.3.3 + dev: true + /vite@5.4.10(@types/node@20.10.5): resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} engines: {node: ^18.0.0 || >=20.0.0} @@ -8856,6 +8943,63 @@ packages: fsevents: 2.3.3 dev: true + /vitest@2.1.9: + resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@types/node': ^18.0.0 || >=20.0.0 + '@vitest/browser': 2.1.9 + '@vitest/ui': 2.1.9 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@types/node': + optional: true + '@vitest/browser': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + dependencies: + '@vitest/expect': 2.1.9 + '@vitest/mocker': 2.1.9(vite@5.4.10) + '@vitest/pretty-format': 2.1.9 + '@vitest/runner': 2.1.9 + '@vitest/snapshot': 2.1.9 + '@vitest/spy': 2.1.9 + '@vitest/utils': 2.1.9 + chai: 5.1.2 + debug: 4.4.0 + expect-type: 1.2.0 + magic-string: 0.30.17 + pathe: 1.1.2 + std-env: 3.8.1 + tinybench: 2.9.0 + tinyexec: 0.3.1 + tinypool: 1.0.1 + tinyrainbow: 1.2.0 + vite: 5.4.10 + vite-node: 2.1.9 + why-is-node-running: 2.3.0 + transitivePeerDependencies: + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - supports-color + - terser + dev: true + /vitest@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} engines: {node: ^18.0.0 || >=20.0.0} From b311dae0dcc8a254677322468161943746ae614e Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 9 May 2025 11:04:11 -0500 Subject: [PATCH 17/51] add google test coverage and upgrade us away from deprecated library .. add vertex flash --- packages/proxy/package.json | 2 +- packages/proxy/schema/index.ts | 1 + packages/proxy/schema/models.ts | 8 + packages/proxy/src/providers/google.test.ts | 132 ++++++++++++++ packages/proxy/src/providers/google.ts | 116 ++++++++----- packages/proxy/src/providers/openai.test.ts | 182 ++++++++++++++++++++ packages/proxy/utils/tests.ts | 18 +- pnpm-lock.yaml | 126 +++++++++++++- 8 files changed, 530 insertions(+), 55 deletions(-) create mode 100644 packages/proxy/src/providers/google.test.ts create mode 100644 packages/proxy/src/providers/openai.test.ts diff --git a/packages/proxy/package.json b/packages/proxy/package.json index 5011195d..23fea2bf 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -91,7 +91,7 @@ "@aws-sdk/client-bedrock-runtime": "^3.738.0", "@braintrust/core": "link:../../../sdk/core/js", "@breezystack/lamejs": "^1.2.7", - "@google/generative-ai": "^0.24.0", + "@google/genai": "^0.13.0", "@opentelemetry/api": "^1.7.0", "@opentelemetry/core": "^1.19.0", "@opentelemetry/resources": "^1.19.0", diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 5ad28d90..9346b8f6 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -397,6 +397,7 @@ export const AvailableEndpointTypes: { [name: string]: ModelEndpointType[] } = { "grok-vision-beta": ["xAI"], "grok-beta": ["xAI"], "publishers/google/models/gemini-2.5-pro-exp-03-25": ["vertex"], + "publishers/google/models/gemini-2.5-flash-preview-04-17": ["vertex"], "publishers/google/models/gemini-2.0-flash-thinking-exp-01-21": ["vertex"], "publishers/google/models/gemini-2.0-flash": ["vertex"], "publishers/google/models/gemini-2.0-flash-001": ["vertex"], diff --git a/packages/proxy/schema/models.ts b/packages/proxy/schema/models.ts index 93d23efc..ac018ff4 100644 --- a/packages/proxy/schema/models.ts +++ b/packages/proxy/schema/models.ts @@ -2531,6 +2531,14 @@ export const AvailableModels: { [name: string]: ModelSpec } = { // TODO: add anthropic 3.7 converse // VERTEX MODELS + "publishers/google/models/gemini-2.5-flash-preview-04-17": { + format: "google", + flavor: "chat", + displayName: "Gemini 2.5 Flash Preview", + multimodal: true, + input_cost_per_mil_tokens: 0.15, + output_cost_per_mil_tokens: 0.6, + }, "publishers/google/models/gemini-2.0-flash": { format: "google", flavor: "chat", diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts new file mode 100644 index 00000000..8ff60858 --- /dev/null +++ b/packages/proxy/src/providers/google.test.ts @@ -0,0 +1,132 @@ +import { describe, it, expect } from "vitest"; +import { callProxyV1 } from "../../utils/tests"; +import { + OpenAIChatCompletion, + OpenAIChatCompletionChunk, + OpenAIChatCompletionCreateParams, +} from "@types"; + +for (const model of [ + "gemini-2.5-flash-preview-04-17", + "publishers/google/models/gemini-2.5-flash-preview-04-17", +]) { + describe(model, () => { + it("should accept and should not return reasoning/thinking params and detail streaming", async () => { + const { events, json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model, + reasoning_effort: "medium", + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + stream: true, + }, + }); + + const streamedEvents = events(); + expect(streamedEvents.length).toBeGreaterThan(0); + + const hasContent = streamedEvents.some( + (event) => event.data.choices[0]?.delta?.content !== undefined, + ); + expect(hasContent).toBe(true); + + const hasReasoning = streamedEvents.some( + (event) => + event.data.choices[0]?.delta?.reasoning?.content !== undefined, + ); + expect(hasReasoning).toBe(isVertex(model)); // as of writing, gemini api does not yet provide this detail! + }); + + it("should accept and return reasoning/thinking params and detail non-streaming", async () => { + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model, + reasoning_effort: "medium", + stream: false, + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + }, + }); + + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + logprobs: null, + message: { + content: expect.any(String), + ...(isVertex(model) && { + reasoning: [ + { + id: expect.any(String), + content: expect.any(String), + }, + ], + }), // gemini apis do not include reasoning + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model, + object: "chat.completion", + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + }, + }); + }); + }); +} + +const isVertex = (model: string) => model.includes("publishers/"); diff --git a/packages/proxy/src/providers/google.ts b/packages/proxy/src/providers/google.ts index 747080f4..42dc8372 100644 --- a/packages/proxy/src/providers/google.ts +++ b/packages/proxy/src/providers/google.ts @@ -4,9 +4,8 @@ import { Content, FinishReason, GenerateContentResponse, - InlineDataPart, Part, -} from "@google/generative-ai"; +} from "@google/genai"; import { getTimestampInSeconds } from ".."; import { OpenAIChatCompletion, @@ -15,7 +14,7 @@ import { } from "@types"; import { convertMediaToBase64 } from "./util"; -async function makeGoogleMediaBlock(media: string): Promise { +async function makeGoogleMediaBlock(media: string): Promise { const { media_type: mimeType, data } = await convertMediaToBase64({ media, allowedMediaTypes: [ @@ -67,6 +66,11 @@ export async function openAIMessagesToGoogleMessages( // First, do a basic mapping const content: Content[] = await Promise.all( messages.map(async (m) => { + const reasoningParts = + "reasoning" in m && m.reasoning + ? m.reasoning.map((r) => ({ text: r.content, thought: true })) + : []; + const contentParts = m.role === "tool" ? [] : await openAIContentToGoogleContent(m.content); const toolCallParts: Part[] = @@ -93,7 +97,12 @@ export async function openAIMessagesToGoogleMessages( ] : []; return { - parts: [...contentParts, ...toolCallParts, ...toolResponseParts], + parts: [ + ...reasoningParts, + ...contentParts, + ...toolCallParts, + ...toolResponseParts, + ], role: m.role === "assistant" ? "model" @@ -106,13 +115,9 @@ export async function openAIMessagesToGoogleMessages( const flattenedContent: Content[] = []; for (let i = 0; i < content.length; i++) { - if ( - flattenedContent.length > 0 && - flattenedContent[flattenedContent.length - 1].role === content[i].role - ) { - flattenedContent[flattenedContent.length - 1].parts = flattenedContent[ - flattenedContent.length - 1 - ].parts.concat(content[i].parts); + const last = flattenedContent[flattenedContent.length - 1]; + if (last && last.role === content[i].role) { + last.parts = [...(last.parts || []), ...(content[i].parts || [])]; } else { flattenedContent.push(content[i]); } @@ -124,9 +129,12 @@ export async function openAIMessagesToGoogleMessages( // 3. Then all user messages' text parts // The EcmaScript spec requires the sort to be stable, so this is safe. const sortedContent: Content[] = flattenedContent.sort((a, b) => { - if (a.parts[0].inlineData && !b.parts[0].inlineData) { + const aFirst = a.parts?.[0]; + const bFirst = b.parts?.[0]; + + if (aFirst?.inlineData && !bFirst?.inlineData) { return -1; - } else if (b.parts[0].inlineData && !a.parts[0].inlineData) { + } else if (bFirst?.inlineData && !aFirst?.inlineData) { return 1; } @@ -172,26 +180,36 @@ export function googleEventToOpenAIChatEvent( ? { id: uuidv4(), choices: (data.candidates || []).map((candidate) => { - const firstText = candidate.content.parts.find( - (p) => p.text !== undefined, + const firstThought = candidate.content?.parts?.find( + (part) => part.text !== undefined && part.thought, ); - const toolCalls = candidate.content.parts - .filter((p) => p.functionCall !== undefined) - .map((p, i) => ({ - id: uuidv4(), - type: "function" as const, - function: { - name: p.functionCall.name, - arguments: JSON.stringify(p.functionCall.args), - }, - index: i, - })); + const firstText = candidate.content?.parts?.find( + (part) => part.text !== undefined && !part.thought, + ); + const toolCalls = + candidate.content?.parts + ?.filter((part) => part.functionCall !== undefined) + .map((part, i) => ({ + id: uuidv4(), + type: "function" as const, + function: { + name: part?.functionCall?.name, + arguments: JSON.stringify(part.functionCall?.args), + }, + index: i, + })) || []; return { index: 0, delta: { role: "assistant", content: firstText?.text ?? "", tool_calls: toolCalls.length > 0 ? toolCalls : undefined, + ...(firstThought && { + reasoning: { + id: uuidv4(), + content: firstThought.text, + }, + }), }, finish_reason: toolCalls.length > 0 @@ -204,9 +222,10 @@ export function googleEventToOpenAIChatEvent( object: "chat.completion.chunk", usage: data.usageMetadata ? { - prompt_tokens: data.usageMetadata.promptTokenCount, - completion_tokens: data.usageMetadata.candidatesTokenCount, - total_tokens: data.usageMetadata.totalTokenCount, + prompt_tokens: data?.usageMetadata?.promptTokenCount || 0, + completion_tokens: + data?.usageMetadata?.candidatesTokenCount || 0, + total_tokens: data?.usageMetadata?.totalTokenCount || 0, } : undefined, } @@ -225,27 +244,34 @@ export function googleCompletionToOpenAICompletion( return { id: uuidv4(), choices: (data.candidates || []).map((candidate) => { - const firstText = candidate.content.parts.find( - (p) => p.text !== undefined, + const firstText = candidate.content?.parts?.find( + (part) => part.text !== undefined && !part.thought, ); - const toolCalls = candidate.content.parts - .filter((p) => p.functionCall !== undefined) - .map((p) => ({ - id: uuidv4(), - type: "function" as const, - function: { - name: p.functionCall.name, - arguments: JSON.stringify(p.functionCall.args), - }, - })); + const firstThought = candidate.content?.parts?.find( + (part) => part.text !== undefined && part.thought, + ); + const toolCalls = + candidate.content?.parts + ?.filter((part) => part.functionCall !== undefined) + .map((part) => ({ + id: uuidv4(), + type: "function" as const, + function: { + name: part?.functionCall?.name || "unknown", + arguments: JSON.stringify(part?.functionCall?.args), + }, + })) || []; return { logprobs: null, - index: candidate.index, + index: candidate.index || 0, message: { role: "assistant", content: firstText?.text ?? "", tool_calls: toolCalls.length > 0 ? toolCalls : undefined, refusal: null, + ...(firstThought && { + reasoning: [{ id: uuidv4(), content: firstThought.text }], + }), }, finish_reason: toolCalls.length > 0 @@ -258,9 +284,9 @@ export function googleCompletionToOpenAICompletion( object: "chat.completion", usage: data.usageMetadata ? { - prompt_tokens: data.usageMetadata.promptTokenCount, - completion_tokens: data.usageMetadata.candidatesTokenCount, - total_tokens: data.usageMetadata.totalTokenCount, + prompt_tokens: data?.usageMetadata?.promptTokenCount || 0, + completion_tokens: data?.usageMetadata?.candidatesTokenCount || 0, + total_tokens: data?.usageMetadata.totalTokenCount || 0, } : undefined, }; diff --git a/packages/proxy/src/providers/openai.test.ts b/packages/proxy/src/providers/openai.test.ts new file mode 100644 index 00000000..dde694b2 --- /dev/null +++ b/packages/proxy/src/providers/openai.test.ts @@ -0,0 +1,182 @@ +import { describe, it, expect } from "vitest"; +import { callProxyV1 } from "../../utils/tests"; +import { + OpenAIChatCompletion, + OpenAIChatCompletionChunk, + OpenAIChatCompletionCreateParams, +} from "@types"; + +it("should deny reasoning_effort for unsupported models streaming", async () => { + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model: "gpt-4o-mini", + reasoning_effort: "high", + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Tell me a short joke about programming." }, + ], + stream: true, + max_tokens: 150, + }, + }); + + expect(json()).toEqual({ + error: { + message: "Unrecognized request argument supplied: reasoning_effort", + type: "invalid_request_error", + param: null, + code: null, + }, + }); +}); + +it("should deny reasoning_effort for unsupported models non-streaming", async () => { + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletion + >({ + body: { + model: "gpt-4o-mini", + reasoning_effort: "high", + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Tell me a short joke about programming." }, + ], + stream: false, + max_tokens: 150, + }, + }); + + expect(json()).toEqual({ + error: { + message: "Unrecognized request argument supplied: reasoning_effort", + type: "invalid_request_error", + param: null, + code: null, + }, + }); +}); + +it("should accept and return reasoning/thinking params and detail streaming", async () => { + const { events } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model: "o3-mini-2025-01-31", + reasoning_effort: "medium", + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + stream: true, + }, + }); + + const streamedEvents = events(); + expect(streamedEvents.length).toBeGreaterThan(0); + + const hasContent = streamedEvents.some( + (event) => event.data.choices[0]?.delta?.content !== undefined, + ); + expect(hasContent).toBe(true); + + const hasReasoning = streamedEvents.some( + (event) => event.data.choices[0]?.delta?.reasoning?.content !== undefined, + ); + expect(hasReasoning).toBe(false); // as of writing, openai is not providing this detail! +}); + +it("should accept and return reasoning/thinking params and detail non-streaming", async () => { + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model: "o3-mini-2025-01-31", + reasoning_effort: "medium", + stream: false, + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + }, + }); + + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + message: { + content: expect.any(String), + // as of writing, openai does not provide this detail + // reasoning: [], + annotations: [], + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model: "o3-mini-2025-01-31", + object: "chat.completion", + service_tier: expect.any(String), + system_fingerprint: expect.any(String), + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + completion_tokens_details: { + accepted_prediction_tokens: expect.any(Number), + audio_tokens: expect.any(Number), + reasoning_tokens: expect.any(Number), + rejected_prediction_tokens: expect.any(Number), + }, + prompt_tokens_details: { + audio_tokens: expect.any(Number), + cached_tokens: expect.any(Number), + }, + }, + }); +}); diff --git a/packages/proxy/utils/tests.ts b/packages/proxy/utils/tests.ts index 2ef902bd..e48e8105 100644 --- a/packages/proxy/utils/tests.ts +++ b/packages/proxy/utils/tests.ts @@ -3,10 +3,8 @@ import { TextDecoder } from "util"; import { Buffer } from "node:buffer"; import { proxyV1 } from "../src/proxy"; -import { APISecret, AvailableModels, getModelEndpointTypes } from "@schema"; +import { getModelEndpointTypes } from "@schema"; import { createParser, ParsedEvent, ParseEvent } from "eventsource-parser"; -import { mergeDicts } from "@braintrust/core"; -import { assert } from "vitest"; export function createResponseStream(): [ WritableStream, @@ -69,7 +67,7 @@ export const getKnownApiSecrets: Parameters< }, { type: "google" as const, - secret: process.env.VERTEX_AI_API_KEY || "", + secret: process.env.GEMINI_API_KEY || "", name: "google", }, { @@ -77,6 +75,18 @@ export const getKnownApiSecrets: Parameters< secret: process.env.OPENAI_API_KEY || "", name: "openai", }, + { + type: "vertex" as const, + secret: process.env.VERTEX_AI_API_KEY || "", + name: "vertex", + metadata: { + project: process.env.GCP_PROJECT_ID || "", + authType: "access_token", + api_base: "", + supportsStreaming: true, + excludeDefaultModels: false, + }, + }, ].filter((secret) => !!secret.secret && endpointTypes.includes(secret.type)); }; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index fb6bd794..b07bc4da 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -212,9 +212,9 @@ importers: '@breezystack/lamejs': specifier: ^1.2.7 version: 1.2.7 - '@google/generative-ai': - specifier: ^0.24.0 - version: 0.24.0 + '@google/genai': + specifier: ^0.13.0 + version: 0.13.0 '@opentelemetry/api': specifier: ^1.7.0 version: 1.7.0 @@ -2127,9 +2127,19 @@ packages: engines: {node: '>=14'} dev: true - /@google/generative-ai@0.24.0: - resolution: {integrity: sha512-fnEITCGEB7NdX0BhoYZ/cq/7WPZ1QS5IzJJfC3Tg/OwkvBetMiVJciyaan297OvE4B9Jg1xvo0zIazX/9sGu1Q==} + /@google/genai@0.13.0: + resolution: {integrity: sha512-eaEncWt875H7046T04mOpxpHJUM+jLIljEf+5QctRyOeChylE/nhpwm1bZWTRWoOu/t46R9r+PmgsJFhTpE7tQ==} engines: {node: '>=18.0.0'} + dependencies: + google-auth-library: 9.15.1 + ws: 8.18.0 + zod: 3.22.4 + zod-to-json-schema: 3.23.5(zod@3.22.4) + transitivePeerDependencies: + - bufferutil + - encoding + - supports-color + - utf-8-validate dev: false /@humanwhocodes/config-array@0.11.13: @@ -3900,6 +3910,11 @@ packages: hasBin: true dev: false + /agent-base@7.1.3: + resolution: {integrity: sha512-jRR5wdylq8CkOe6hei19GGZnxM6rBGwFl3Bg0YItGDimvjGtAvdZk4Pu6Cl4u4Igsws4a1fd1Vq3ezrhn4KmFw==} + engines: {node: '>= 14'} + dev: false + /agentkeepalive@4.5.0: resolution: {integrity: sha512-5GG/5IbQQpC9FpkRGsSvZI5QYeSCzlJHdpBQntCsuTOxhKD8lqKhrleg2Yi7yvMIf82Ycmmqln9U8V9qwEiJew==} engines: {node: '>= 8.0.0'} @@ -4268,6 +4283,10 @@ packages: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} dev: false + /bignumber.js@9.3.0: + resolution: {integrity: sha512-EM7aMFTXbptt/wZdMlBv2t8IViwQL+h6SLHosp8Yf0dqJMTnY6iL32opnAB6kAdL0SZPuvcAzFr31o0c/R3/RA==} + dev: false + /binary-extensions@2.2.0: resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} engines: {node: '>=8'} @@ -5677,6 +5696,10 @@ packages: type: 2.7.2 dev: false + /extend@3.0.2: + resolution: {integrity: sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==} + dev: false + /fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} dev: true @@ -5879,6 +5902,32 @@ packages: resolution: {integrity: sha512-xckBUXyTIqT97tq2x2AMb+g163b5JFysYk0x4qxNFwbfQkmNZoiRHb6sPzI9/QV33WeuvVYBUIiD4NzNIyqaRQ==} dev: true + /gaxios@6.7.1: + resolution: {integrity: sha512-LDODD4TMYx7XXdpwxAVRAIAuB0bzv0s+ywFonY46k126qzQHT9ygyoa9tncmOiQmmDrik65UYsEkv3lbfqQ3yQ==} + engines: {node: '>=14'} + dependencies: + extend: 3.0.2 + https-proxy-agent: 7.0.6 + is-stream: 2.0.1 + node-fetch: 2.7.0 + uuid: 9.0.1 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + + /gcp-metadata@6.1.1: + resolution: {integrity: sha512-a4tiq7E0/5fTjxPAaH4jpjkSv/uCaU2p5KC6HVGrvl0cDjA8iBZv4vv1gyzlmK0ZUKqwpOyQMKzZQe3lTit77A==} + engines: {node: '>=14'} + dependencies: + gaxios: 6.7.1 + google-logging-utils: 0.0.2 + json-bigint: 1.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + /generic-pool@3.9.0: resolution: {integrity: sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==} engines: {node: '>= 4'} @@ -5984,6 +6033,26 @@ packages: resolution: {integrity: sha512-uHJgbwAMwNFf5mLst7IWLNg14x1CkeqglJb/K3doi4dw6q2IvAAmM/Y81kevy83wP+Sst+nutFTYOGg3d1lsxg==} dev: true + /google-auth-library@9.15.1: + resolution: {integrity: sha512-Jb6Z0+nvECVz+2lzSMt9u98UsoakXxA2HGHMCxh+so3n90XgYWkq5dur19JAJV7ONiJY22yBTyJB1TSkvPq9Ng==} + engines: {node: '>=14'} + dependencies: + base64-js: 1.5.1 + ecdsa-sig-formatter: 1.0.11 + gaxios: 6.7.1 + gcp-metadata: 6.1.1 + gtoken: 7.1.0 + jws: 4.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + + /google-logging-utils@0.0.2: + resolution: {integrity: sha512-NEgUnEcBiP5HrPzufUkBzJOD/Sxsco3rLNo1F1TNf7ieU8ryUzBhqba8r756CjLX7rn3fHl6iLEwPYuqpoKgQQ==} + engines: {node: '>=14'} + dev: false + /gopd@1.0.1: resolution: {integrity: sha512-d65bNlIadxvpb/A2abVdlqKqV563juRnZ1Wtk6s1sIR8uNsXR70xqIzVqxVf1eTqDunwT2MkczEeaezCKTZhwA==} dependencies: @@ -5996,6 +6065,17 @@ packages: resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} dev: true + /gtoken@7.1.0: + resolution: {integrity: sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==} + engines: {node: '>=14.0.0'} + dependencies: + gaxios: 6.7.1 + jws: 4.0.0 + transitivePeerDependencies: + - encoding + - supports-color + dev: false + /has-bigints@1.0.2: resolution: {integrity: sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ==} dev: true @@ -6049,6 +6129,16 @@ packages: toidentifier: 1.0.1 dev: false + /https-proxy-agent@7.0.6: + resolution: {integrity: sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw==} + engines: {node: '>= 14'} + dependencies: + agent-base: 7.1.3 + debug: 4.4.0 + transitivePeerDependencies: + - supports-color + dev: false + /humanize-ms@1.2.1: resolution: {integrity: sha512-Fl70vYtsAFb/C06PTS9dZBo7ihau+Tu/DNCk/OyHhea07S+aeMWpFFkUaXRa8fI+ScZbEI8dfSxwY7gxZ9SAVQ==} dependencies: @@ -6261,6 +6351,11 @@ packages: call-bind: 1.0.5 dev: true + /is-stream@2.0.1: + resolution: {integrity: sha512-hFoiJiTl63nn+kstHGBtewWSKnQLpyb155KHheA1l39uvtO9nWIop1p3udqPcUd/xbF1VLMO4n7OI6p7RbngDg==} + engines: {node: '>=8'} + dev: false + /is-string@1.0.7: resolution: {integrity: sha512-tE2UXzivje6ofPW7l23cjDOMa09gb7xlAqG6jG5ej6uPV32TlWP3NKPigtaGeHNu9fohccRYvIiZMfOOnOYUtg==} engines: {node: '>= 0.4'} @@ -6377,6 +6472,12 @@ packages: dependencies: argparse: 2.0.1 + /json-bigint@1.0.0: + resolution: {integrity: sha512-SiPv/8VpZuWbvLSMtTDU8hEfrZWg/mH/nV/b4o0CYbSxu1UIQPLdwKOCIyLQX+VIPO5vrLX3i8qtqFyhdPSUSQ==} + dependencies: + bignumber.js: 9.3.0 + dev: false + /json-buffer@3.0.1: resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} dev: true @@ -6448,6 +6549,14 @@ packages: safe-buffer: 5.2.1 dev: false + /jwa@2.0.1: + resolution: {integrity: sha512-hRF04fqJIP8Abbkq5NKGN0Bbr3JxlQ+qhZufXVr0DvujKy93ZCbXZMHDL4EOtodSbCWxOqR8MS1tXA5hwqCXDg==} + dependencies: + buffer-equal-constant-time: 1.0.1 + ecdsa-sig-formatter: 1.0.11 + safe-buffer: 5.2.1 + dev: false + /jws@3.2.2: resolution: {integrity: sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==} dependencies: @@ -6455,6 +6564,13 @@ packages: safe-buffer: 5.2.1 dev: false + /jws@4.0.0: + resolution: {integrity: sha512-KDncfTmOZoOMTFG4mBlG0qUIOlc03fmzH+ru6RgYVZhPkyiy/92Owlt/8UEN+a4TXR1FQetfIpJE8ApdvdVxTg==} + dependencies: + jwa: 2.0.1 + safe-buffer: 5.2.1 + dev: false + /keyv@4.5.4: resolution: {integrity: sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw==} dependencies: From 0914e7886bc18c9959a7ada85c6787d8f2973455 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 9 May 2025 13:52:21 -0500 Subject: [PATCH 18/51] return bedrock & openai (responses) reasoning --- packages/proxy/package.json | 2 +- packages/proxy/src/providers/anthropic.ts | 3 +- packages/proxy/src/providers/bedrock.ts | 12 +- packages/proxy/types/openai.ts | 2 +- packages/proxy/utils/tests.ts | 14 +- pnpm-lock.yaml | 964 ++++++++++------------ 6 files changed, 444 insertions(+), 553 deletions(-) diff --git a/packages/proxy/package.json b/packages/proxy/package.json index 23fea2bf..e6a1a90a 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -88,7 +88,7 @@ "dependencies": { "@anthropic-ai/sdk": "^0.39.0", "@apidevtools/json-schema-ref-parser": "^11.9.1", - "@aws-sdk/client-bedrock-runtime": "^3.738.0", + "@aws-sdk/client-bedrock-runtime": "^3.806.0", "@braintrust/core": "link:../../../sdk/core/js", "@breezystack/lamejs": "^1.2.7", "@google/genai": "^0.13.0", diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index 11213599..0d701c41 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -26,7 +26,6 @@ import { OpenAIChatCompletionChunk, OpenAIChatCompletionChunkChoiceDelta, OpenAIChatCompletionCreateParams, - OpenAIChatCompletionMessage, } from "@types"; /* @@ -394,8 +393,8 @@ export function anthropicCompletionToOpenAICompletion( isFunction: boolean, isStructuredOutput: boolean, ): OpenAIChatCompletion { + // TODO: will we ever have text -> thinking -> text -> tool_use, thus are we dropping tokens? const firstText = completion.content.find((c) => c.type === "text"); - // TODO(ibolmo): we now support multiple thinking blocks const firstThinking = completion.content.find((c) => c.type === "thinking"); const firstTool = completion.content.find((c) => c.type === "tool_use"); diff --git a/packages/proxy/src/providers/bedrock.ts b/packages/proxy/src/providers/bedrock.ts index a33e1b6f..4519777f 100644 --- a/packages/proxy/src/providers/bedrock.ts +++ b/packages/proxy/src/providers/bedrock.ts @@ -34,12 +34,9 @@ import { isEmpty, ProxyBadRequestError, } from ".."; +import { OpenAIChatCompletionChunk, OpenAIChatCompletion } from "@types"; import { Message as OaiMessage, - OpenAIChatCompletionChunk, - OpenAIChatCompletion, -} from "@types"; -import { MessageRole, toolsSchema, responseFormatJsonSchemaSchema, @@ -837,7 +834,6 @@ interface BedrockMessageState { role: OpenAIChatCompletionChunk["choices"][0]["delta"]["role"]; } -// TODO(ibolmo): should support reasoning for claude models export function bedrockMessageToOpenAIMessage( state: BedrockMessageState, output: ConverseStreamOutput, @@ -878,6 +874,12 @@ export function bedrockMessageToOpenAIMessage( }, ] : undefined, + ...(value.delta?.reasoningContent && { + reasoning: { + id: value.delta.reasoningContent.signature, + content: value.delta.reasoningContent.text, + }, + }), }, finish_reason: null, index: 0, diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts index 70db0146..0c975db6 100644 --- a/packages/proxy/types/openai.ts +++ b/packages/proxy/types/openai.ts @@ -1,4 +1,4 @@ -// TODO(ibolmo): move from core +// TODO: move from core import { chatCompletionMessageParamSchema } from "@braintrust/core/typespecs/dist"; export { chatCompletionMessageParamSchema } from "@braintrust/core/typespecs/dist"; diff --git a/packages/proxy/utils/tests.ts b/packages/proxy/utils/tests.ts index e48e8105..2006af9f 100644 --- a/packages/proxy/utils/tests.ts +++ b/packages/proxy/utils/tests.ts @@ -81,12 +81,24 @@ export const getKnownApiSecrets: Parameters< name: "vertex", metadata: { project: process.env.GCP_PROJECT_ID || "", - authType: "access_token", + authType: "access_token" as const, api_base: "", supportsStreaming: true, excludeDefaultModels: false, }, }, + { + type: "bedrock" as const, + secret: process.env.AWS_SECRET_ACCESS_KEY || "", + name: "bedrock" as const, + metadata: { + region: process.env.AWS_REGION || "", + access_key: process.env.AWS_ACCESS_KEY_ID || "", + session_token: process.env.AWS_SESSION_TOKEN || "", + supportsStreaming: true, + excludeDefaultModels: false, + }, + }, ].filter((secret) => !!secret.secret && endpointTypes.includes(secret.type)); }; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index b07bc4da..08de8097 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -22,10 +22,10 @@ importers: version: 2.3.3 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2 + version: 4.3.2(typescript@5.5.4) vitest: specifier: ^2.1.9 - version: 2.1.9 + version: 2.1.9(@types/node@20.10.5) apis/cloudflare: dependencies: @@ -204,8 +204,8 @@ importers: specifier: ^11.9.1 version: 11.9.1 '@aws-sdk/client-bedrock-runtime': - specifier: ^3.738.0 - version: 3.738.0 + specifier: ^3.806.0 + version: 3.806.0 '@braintrust/core': specifier: link:../../../sdk/core/js version: link:../../../sdk/core/js @@ -453,7 +453,7 @@ packages: engines: {node: '>=16.0.0'} dependencies: '@aws-crypto/util': 5.2.0 - '@aws-sdk/types': 3.734.0 + '@aws-sdk/types': 3.804.0 tslib: 2.6.2 dev: false @@ -463,7 +463,7 @@ packages: '@aws-crypto/sha256-js': 5.2.0 '@aws-crypto/supports-web-crypto': 5.2.0 '@aws-crypto/util': 5.2.0 - '@aws-sdk/types': 3.734.0 + '@aws-sdk/types': 3.804.0 '@aws-sdk/util-locate-window': 3.535.0 '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 @@ -474,7 +474,7 @@ packages: engines: {node: '>=16.0.0'} dependencies: '@aws-crypto/util': 5.2.0 - '@aws-sdk/types': 3.734.0 + '@aws-sdk/types': 3.804.0 tslib: 2.6.2 dev: false @@ -487,56 +487,58 @@ packages: /@aws-crypto/util@5.2.0: resolution: {integrity: sha512-4RkU9EsI6ZpBve5fseQlGNUWKMa1RLPQ1dnjnQoe07ldfIzcsGb5hC5W0Dm7u423KWzawlrpbjXBrXCEv9zazQ==} dependencies: - '@aws-sdk/types': 3.734.0 + '@aws-sdk/types': 3.804.0 '@smithy/util-utf8': 2.3.0 tslib: 2.6.2 dev: false - /@aws-sdk/client-bedrock-runtime@3.738.0: - resolution: {integrity: sha512-RBFk+THc/qY8ZgKem6OsqnwnOoQcpms8JC8euKfJjWO8UB5nQvUDZCH4GxjnXKOtpNWnZ6eg2fCaKpKflYyfgQ==} + /@aws-sdk/client-bedrock-runtime@3.806.0: + resolution: {integrity: sha512-i3fxpJN28fJrQblzJnstnKLM3UY6JOniVQ7eXoNKsTkROVdjZFRKwVbZ23dhDAkrVVdFJqu5bhE3pKMYa/IXvg==} engines: {node: '>=18.0.0'} dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/core': 3.734.0 - '@aws-sdk/credential-provider-node': 3.738.0 - '@aws-sdk/middleware-host-header': 3.734.0 - '@aws-sdk/middleware-logger': 3.734.0 - '@aws-sdk/middleware-recursion-detection': 3.734.0 - '@aws-sdk/middleware-user-agent': 3.734.0 - '@aws-sdk/region-config-resolver': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@aws-sdk/util-endpoints': 3.734.0 - '@aws-sdk/util-user-agent-browser': 3.734.0 - '@aws-sdk/util-user-agent-node': 3.734.0 - '@smithy/config-resolver': 4.0.1 - '@smithy/core': 3.1.2 - '@smithy/eventstream-serde-browser': 4.0.1 - '@smithy/eventstream-serde-config-resolver': 4.0.1 - '@smithy/eventstream-serde-node': 4.0.1 - '@smithy/fetch-http-handler': 5.0.1 - '@smithy/hash-node': 4.0.1 - '@smithy/invalid-dependency': 4.0.1 - '@smithy/middleware-content-length': 4.0.1 - '@smithy/middleware-endpoint': 4.0.3 - '@smithy/middleware-retry': 4.0.4 - '@smithy/middleware-serde': 4.0.2 - '@smithy/middleware-stack': 4.0.1 - '@smithy/node-config-provider': 4.0.1 - '@smithy/node-http-handler': 4.0.2 - '@smithy/protocol-http': 5.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 - '@smithy/url-parser': 4.0.1 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/credential-provider-node': 3.806.0 + '@aws-sdk/eventstream-handler-node': 3.804.0 + '@aws-sdk/middleware-eventstream': 3.804.0 + '@aws-sdk/middleware-host-header': 3.804.0 + '@aws-sdk/middleware-logger': 3.804.0 + '@aws-sdk/middleware-recursion-detection': 3.804.0 + '@aws-sdk/middleware-user-agent': 3.806.0 + '@aws-sdk/region-config-resolver': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@aws-sdk/util-endpoints': 3.806.0 + '@aws-sdk/util-user-agent-browser': 3.804.0 + '@aws-sdk/util-user-agent-node': 3.806.0 + '@smithy/config-resolver': 4.1.1 + '@smithy/core': 3.3.1 + '@smithy/eventstream-serde-browser': 4.0.2 + '@smithy/eventstream-serde-config-resolver': 4.1.0 + '@smithy/eventstream-serde-node': 4.0.2 + '@smithy/fetch-http-handler': 5.0.2 + '@smithy/hash-node': 4.0.2 + '@smithy/invalid-dependency': 4.0.2 + '@smithy/middleware-content-length': 4.0.2 + '@smithy/middleware-endpoint': 4.1.3 + '@smithy/middleware-retry': 4.1.4 + '@smithy/middleware-serde': 4.0.3 + '@smithy/middleware-stack': 4.0.2 + '@smithy/node-config-provider': 4.1.0 + '@smithy/node-http-handler': 4.0.4 + '@smithy/protocol-http': 5.1.0 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 + '@smithy/url-parser': 4.0.2 '@smithy/util-base64': 4.0.0 '@smithy/util-body-length-browser': 4.0.0 '@smithy/util-body-length-node': 4.0.0 - '@smithy/util-defaults-mode-browser': 4.0.4 - '@smithy/util-defaults-mode-node': 4.0.4 - '@smithy/util-endpoints': 3.0.1 - '@smithy/util-middleware': 4.0.1 - '@smithy/util-retry': 4.0.1 - '@smithy/util-stream': 4.0.2 + '@smithy/util-defaults-mode-browser': 4.0.11 + '@smithy/util-defaults-mode-node': 4.0.11 + '@smithy/util-endpoints': 3.0.3 + '@smithy/util-middleware': 4.0.2 + '@smithy/util-retry': 4.0.3 + '@smithy/util-stream': 4.2.0 '@smithy/util-utf8': 4.0.0 '@types/uuid': 9.0.7 tslib: 2.6.2 @@ -545,308 +547,328 @@ packages: - aws-crt dev: false - /@aws-sdk/client-sso@3.734.0: - resolution: {integrity: sha512-oerepp0mut9VlgTwnG5Ds/lb0C0b2/rQ+hL/rF6q+HGKPfGsCuPvFx1GtwGKCXd49ase88/jVgrhcA9OQbz3kg==} + /@aws-sdk/client-sso@3.806.0: + resolution: {integrity: sha512-X0p/9/u9e6b22rlQqKucdtjdqmjSNB4c/8zDEoD5MvgYAAbMF9HNE0ST2xaA/WsJ7uE0jFfhPY2/00pslL1DqQ==} engines: {node: '>=18.0.0'} dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/core': 3.734.0 - '@aws-sdk/middleware-host-header': 3.734.0 - '@aws-sdk/middleware-logger': 3.734.0 - '@aws-sdk/middleware-recursion-detection': 3.734.0 - '@aws-sdk/middleware-user-agent': 3.734.0 - '@aws-sdk/region-config-resolver': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@aws-sdk/util-endpoints': 3.734.0 - '@aws-sdk/util-user-agent-browser': 3.734.0 - '@aws-sdk/util-user-agent-node': 3.734.0 - '@smithy/config-resolver': 4.0.1 - '@smithy/core': 3.1.2 - '@smithy/fetch-http-handler': 5.0.1 - '@smithy/hash-node': 4.0.1 - '@smithy/invalid-dependency': 4.0.1 - '@smithy/middleware-content-length': 4.0.1 - '@smithy/middleware-endpoint': 4.0.3 - '@smithy/middleware-retry': 4.0.4 - '@smithy/middleware-serde': 4.0.2 - '@smithy/middleware-stack': 4.0.1 - '@smithy/node-config-provider': 4.0.1 - '@smithy/node-http-handler': 4.0.2 - '@smithy/protocol-http': 5.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 - '@smithy/url-parser': 4.0.1 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/middleware-host-header': 3.804.0 + '@aws-sdk/middleware-logger': 3.804.0 + '@aws-sdk/middleware-recursion-detection': 3.804.0 + '@aws-sdk/middleware-user-agent': 3.806.0 + '@aws-sdk/region-config-resolver': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@aws-sdk/util-endpoints': 3.806.0 + '@aws-sdk/util-user-agent-browser': 3.804.0 + '@aws-sdk/util-user-agent-node': 3.806.0 + '@smithy/config-resolver': 4.1.1 + '@smithy/core': 3.3.1 + '@smithy/fetch-http-handler': 5.0.2 + '@smithy/hash-node': 4.0.2 + '@smithy/invalid-dependency': 4.0.2 + '@smithy/middleware-content-length': 4.0.2 + '@smithy/middleware-endpoint': 4.1.3 + '@smithy/middleware-retry': 4.1.4 + '@smithy/middleware-serde': 4.0.3 + '@smithy/middleware-stack': 4.0.2 + '@smithy/node-config-provider': 4.1.0 + '@smithy/node-http-handler': 4.0.4 + '@smithy/protocol-http': 5.1.0 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 + '@smithy/url-parser': 4.0.2 '@smithy/util-base64': 4.0.0 '@smithy/util-body-length-browser': 4.0.0 '@smithy/util-body-length-node': 4.0.0 - '@smithy/util-defaults-mode-browser': 4.0.4 - '@smithy/util-defaults-mode-node': 4.0.4 - '@smithy/util-endpoints': 3.0.1 - '@smithy/util-middleware': 4.0.1 - '@smithy/util-retry': 4.0.1 + '@smithy/util-defaults-mode-browser': 4.0.11 + '@smithy/util-defaults-mode-node': 4.0.11 + '@smithy/util-endpoints': 3.0.3 + '@smithy/util-middleware': 4.0.2 + '@smithy/util-retry': 4.0.3 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/core@3.734.0: - resolution: {integrity: sha512-SxnDqf3vobdm50OLyAKfqZetv6zzwnSqwIwd3jrbopxxHKqNIM/I0xcYjD6Tn+mPig+u7iRKb9q3QnEooFTlmg==} + /@aws-sdk/core@3.806.0: + resolution: {integrity: sha512-HJRINPncdjPK0iL3f6cBpqCMaxVwq2oDbRCzOx04tsLZ0tNgRACBfT3d/zNVRvMt6fnOVKXoN1LAtQaw50pjEA==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/types': 3.734.0 - '@smithy/core': 3.1.2 - '@smithy/node-config-provider': 4.0.1 - '@smithy/property-provider': 4.0.1 - '@smithy/protocol-http': 5.0.1 - '@smithy/signature-v4': 5.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 - '@smithy/util-middleware': 4.0.1 + '@aws-sdk/types': 3.804.0 + '@smithy/core': 3.3.1 + '@smithy/node-config-provider': 4.1.0 + '@smithy/property-provider': 4.0.2 + '@smithy/protocol-http': 5.1.0 + '@smithy/signature-v4': 5.1.0 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 + '@smithy/util-middleware': 4.0.2 fast-xml-parser: 4.4.1 tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-env@3.734.0: - resolution: {integrity: sha512-gtRkzYTGafnm1FPpiNO8VBmJrYMoxhDlGPYDVcijzx3DlF8dhWnowuSBCxLSi+MJMx5hvwrX2A+e/q0QAeHqmw==} + /@aws-sdk/credential-provider-env@3.806.0: + resolution: {integrity: sha512-nbPwmZn0kt6Q1XI2FaJWP6AhF9tro4cO5HlmZQx8NU+B0H1y9WMo659Q5zLLY46BXgoQVIJEsPSZpcZk27O4aw==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/core': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/property-provider': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/property-provider': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-http@3.734.0: - resolution: {integrity: sha512-JFSL6xhONsq+hKM8xroIPhM5/FOhiQ1cov0lZxhzZWj6Ai3UAjucy3zyIFDr9MgP1KfCYNdvyaUq9/o+HWvEDg==} + /@aws-sdk/credential-provider-http@3.806.0: + resolution: {integrity: sha512-e/gB2iJQQ4ZpecOVpEFhEvjGwuTqNCzhVaVsFYVc49FPfR1seuN7qBGYe1MO7mouGDQFInzJgcNup0DnYUrLiw==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/core': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/fetch-http-handler': 5.0.1 - '@smithy/node-http-handler': 4.0.2 - '@smithy/property-provider': 4.0.1 - '@smithy/protocol-http': 5.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 - '@smithy/util-stream': 4.0.2 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/fetch-http-handler': 5.0.2 + '@smithy/node-http-handler': 4.0.4 + '@smithy/property-provider': 4.0.2 + '@smithy/protocol-http': 5.1.0 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 + '@smithy/util-stream': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-ini@3.734.0: - resolution: {integrity: sha512-HEyaM/hWI7dNmb4NhdlcDLcgJvrilk8G4DQX6qz0i4pBZGC2l4iffuqP8K6ZQjUfz5/6894PzeFuhTORAMd+cg==} + /@aws-sdk/credential-provider-ini@3.806.0: + resolution: {integrity: sha512-FogfbuYSEZgFxbNy0QcsBZHHe5mSv5HV3+JyB5n0kCyjOISCVCZD7gwxKdXjt8O1hXq5k5SOdQvydGULlB6rew==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/core': 3.734.0 - '@aws-sdk/credential-provider-env': 3.734.0 - '@aws-sdk/credential-provider-http': 3.734.0 - '@aws-sdk/credential-provider-process': 3.734.0 - '@aws-sdk/credential-provider-sso': 3.734.0 - '@aws-sdk/credential-provider-web-identity': 3.734.0 - '@aws-sdk/nested-clients': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/credential-provider-imds': 4.0.1 - '@smithy/property-provider': 4.0.1 - '@smithy/shared-ini-file-loader': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/credential-provider-env': 3.806.0 + '@aws-sdk/credential-provider-http': 3.806.0 + '@aws-sdk/credential-provider-process': 3.806.0 + '@aws-sdk/credential-provider-sso': 3.806.0 + '@aws-sdk/credential-provider-web-identity': 3.806.0 + '@aws-sdk/nested-clients': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/credential-provider-imds': 4.0.3 + '@smithy/property-provider': 4.0.2 + '@smithy/shared-ini-file-loader': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/credential-provider-node@3.738.0: - resolution: {integrity: sha512-3MuREsazwBxghKb2sQQHvie+uuK4dX4/ckFYiSoffzJQd0YHxaGxf8cr4NOSCQCUesWu8D3Y0SzlnHGboVSkpA==} + /@aws-sdk/credential-provider-node@3.806.0: + resolution: {integrity: sha512-fZX8xP2Kf0k70kDTog/87fh/M+CV0E2yujSw1cUBJhDSwDX3RlUahiJk7TpB/KGw6hEFESMd6+7kq3UzYuw3rg==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/credential-provider-env': 3.734.0 - '@aws-sdk/credential-provider-http': 3.734.0 - '@aws-sdk/credential-provider-ini': 3.734.0 - '@aws-sdk/credential-provider-process': 3.734.0 - '@aws-sdk/credential-provider-sso': 3.734.0 - '@aws-sdk/credential-provider-web-identity': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/credential-provider-imds': 4.0.1 - '@smithy/property-provider': 4.0.1 - '@smithy/shared-ini-file-loader': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/credential-provider-env': 3.806.0 + '@aws-sdk/credential-provider-http': 3.806.0 + '@aws-sdk/credential-provider-ini': 3.806.0 + '@aws-sdk/credential-provider-process': 3.806.0 + '@aws-sdk/credential-provider-sso': 3.806.0 + '@aws-sdk/credential-provider-web-identity': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/credential-provider-imds': 4.0.3 + '@smithy/property-provider': 4.0.2 + '@smithy/shared-ini-file-loader': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/credential-provider-process@3.734.0: - resolution: {integrity: sha512-zvjsUo+bkYn2vjT+EtLWu3eD6me+uun+Hws1IyWej/fKFAqiBPwyeyCgU7qjkiPQSXqk1U9+/HG9IQ6Iiz+eBw==} + /@aws-sdk/credential-provider-process@3.806.0: + resolution: {integrity: sha512-8Y8GYEw/1e5IZRDQL02H6nsTDcRWid/afRMeWg+93oLQmbHcTtdm48tjis+7Xwqy+XazhMDmkbUht11QPTDJcQ==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/core': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/property-provider': 4.0.1 - '@smithy/shared-ini-file-loader': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/property-provider': 4.0.2 + '@smithy/shared-ini-file-loader': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/credential-provider-sso@3.734.0: - resolution: {integrity: sha512-cCwwcgUBJOsV/ddyh1OGb4gKYWEaTeTsqaAK19hiNINfYV/DO9r4RMlnWAo84sSBfJuj9shUNsxzyoe6K7R92Q==} + /@aws-sdk/credential-provider-sso@3.806.0: + resolution: {integrity: sha512-hT9OBwCxWMPBydNhXm2gdNNzx5AJNheS9RglwDDvXWzQ9qDuRztjuMBilMSUMb0HF9K4IqQjYzGqczMuktz4qQ==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/client-sso': 3.734.0 - '@aws-sdk/core': 3.734.0 - '@aws-sdk/token-providers': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/property-provider': 4.0.1 - '@smithy/shared-ini-file-loader': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/client-sso': 3.806.0 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/token-providers': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/property-provider': 4.0.2 + '@smithy/shared-ini-file-loader': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/credential-provider-web-identity@3.734.0: - resolution: {integrity: sha512-t4OSOerc+ppK541/Iyn1AS40+2vT/qE+MFMotFkhCgCJbApeRF2ozEdnDN6tGmnl4ybcUuxnp9JWLjwDVlR/4g==} + /@aws-sdk/credential-provider-web-identity@3.806.0: + resolution: {integrity: sha512-XxaSY9Zd3D4ClUGENYMvi52ac5FuJPPAsvRtEfyrSdEpf6QufbMpnexWBZMYRF31h/VutgqtJwosGgNytpxMEg==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/core': 3.734.0 - '@aws-sdk/nested-clients': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/property-provider': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/nested-clients': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/property-provider': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/middleware-host-header@3.734.0: - resolution: {integrity: sha512-LW7RRgSOHHBzWZnigNsDIzu3AiwtjeI2X66v+Wn1P1u+eXssy1+up4ZY/h+t2sU4LU36UvEf+jrZti9c6vRnFw==} + /@aws-sdk/eventstream-handler-node@3.804.0: + resolution: {integrity: sha512-LZddQVBUCB86tZtLJRhqiDyIqr4hfRxZCcUp1fZSfpBMcf419lgcFRGWMR3J/kCWHQ0G05aor7fSeoeaxskuNQ==} + engines: {node: '>=18.0.0'} + dependencies: + '@aws-sdk/types': 3.804.0 + '@smithy/eventstream-codec': 4.0.2 + '@smithy/types': 4.2.0 + tslib: 2.6.2 + dev: false + + /@aws-sdk/middleware-eventstream@3.804.0: + resolution: {integrity: sha512-3lPxZshOJoKSxIMUq8FCiIre+FZ1g/t+O7DHwOMB6EuzJ8lp5QyUeh1wE5iD/gB8VhWZoj90rGIaWCmT8ccEuA==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/types': 3.734.0 - '@smithy/protocol-http': 5.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/types': 3.804.0 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/middleware-logger@3.734.0: - resolution: {integrity: sha512-mUMFITpJUW3LcKvFok176eI5zXAUomVtahb9IQBwLzkqFYOrMJvWAvoV4yuxrJ8TlQBG8gyEnkb9SnhZvjg67w==} + /@aws-sdk/middleware-host-header@3.804.0: + resolution: {integrity: sha512-bum1hLVBrn2lJCi423Z2fMUYtsbkGI2s4N+2RI2WSjvbaVyMSv/WcejIrjkqiiMR+2Y7m5exgoKeg4/TODLDPQ==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/types': 3.734.0 - '@smithy/types': 4.1.0 + '@aws-sdk/types': 3.804.0 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/middleware-recursion-detection@3.734.0: - resolution: {integrity: sha512-CUat2d9ITsFc2XsmeiRQO96iWpxSKYFjxvj27Hc7vo87YUHRnfMfnc8jw1EpxEwMcvBD7LsRa6vDNky6AjcrFA==} + /@aws-sdk/middleware-logger@3.804.0: + resolution: {integrity: sha512-w/qLwL3iq0KOPQNat0Kb7sKndl9BtceigINwBU7SpkYWX9L/Lem6f8NPEKrC9Tl4wDBht3Yztub4oRTy/horJA==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/types': 3.734.0 - '@smithy/protocol-http': 5.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/types': 3.804.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/middleware-user-agent@3.734.0: - resolution: {integrity: sha512-MFVzLWRkfFz02GqGPjqSOteLe5kPfElUrXZft1eElnqulqs6RJfVSpOV7mO90gu293tNAeggMWAVSGRPKIYVMg==} + /@aws-sdk/middleware-recursion-detection@3.804.0: + resolution: {integrity: sha512-zqHOrvLRdsUdN/ehYfZ9Tf8svhbiLLz5VaWUz22YndFv6m9qaAcijkpAOlKexsv3nLBMJdSdJ6GUTAeIy3BZzw==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/core': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@aws-sdk/util-endpoints': 3.734.0 - '@smithy/core': 3.1.2 - '@smithy/protocol-http': 5.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/types': 3.804.0 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/nested-clients@3.734.0: - resolution: {integrity: sha512-iph2XUy8UzIfdJFWo1r0Zng9uWj3253yvW9gljhtu+y/LNmNvSnJxQk1f3D2BC5WmcoPZqTS3UsycT3mLPSzWA==} + /@aws-sdk/middleware-user-agent@3.806.0: + resolution: {integrity: sha512-XoIromVffgXnc+/mjlR2EVzQVIei3bPVtafIZNsHuEmUvIWJXiWsa2eJpt3BUqa0HF9YPknK7ommNEhqRb8ucg==} + engines: {node: '>=18.0.0'} + dependencies: + '@aws-sdk/core': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@aws-sdk/util-endpoints': 3.806.0 + '@smithy/core': 3.3.1 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 + tslib: 2.6.2 + dev: false + + /@aws-sdk/nested-clients@3.806.0: + resolution: {integrity: sha512-ua2gzpfQ9MF8Rny+tOAivowOWWvqEusez2rdcQK8jdBjA1ANd/0xzToSZjZh0ziN8Kl8jOhNnHbQJ0v6dT6+hg==} engines: {node: '>=18.0.0'} dependencies: '@aws-crypto/sha256-browser': 5.2.0 '@aws-crypto/sha256-js': 5.2.0 - '@aws-sdk/core': 3.734.0 - '@aws-sdk/middleware-host-header': 3.734.0 - '@aws-sdk/middleware-logger': 3.734.0 - '@aws-sdk/middleware-recursion-detection': 3.734.0 - '@aws-sdk/middleware-user-agent': 3.734.0 - '@aws-sdk/region-config-resolver': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@aws-sdk/util-endpoints': 3.734.0 - '@aws-sdk/util-user-agent-browser': 3.734.0 - '@aws-sdk/util-user-agent-node': 3.734.0 - '@smithy/config-resolver': 4.0.1 - '@smithy/core': 3.1.2 - '@smithy/fetch-http-handler': 5.0.1 - '@smithy/hash-node': 4.0.1 - '@smithy/invalid-dependency': 4.0.1 - '@smithy/middleware-content-length': 4.0.1 - '@smithy/middleware-endpoint': 4.0.3 - '@smithy/middleware-retry': 4.0.4 - '@smithy/middleware-serde': 4.0.2 - '@smithy/middleware-stack': 4.0.1 - '@smithy/node-config-provider': 4.0.1 - '@smithy/node-http-handler': 4.0.2 - '@smithy/protocol-http': 5.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 - '@smithy/url-parser': 4.0.1 + '@aws-sdk/core': 3.806.0 + '@aws-sdk/middleware-host-header': 3.804.0 + '@aws-sdk/middleware-logger': 3.804.0 + '@aws-sdk/middleware-recursion-detection': 3.804.0 + '@aws-sdk/middleware-user-agent': 3.806.0 + '@aws-sdk/region-config-resolver': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@aws-sdk/util-endpoints': 3.806.0 + '@aws-sdk/util-user-agent-browser': 3.804.0 + '@aws-sdk/util-user-agent-node': 3.806.0 + '@smithy/config-resolver': 4.1.1 + '@smithy/core': 3.3.1 + '@smithy/fetch-http-handler': 5.0.2 + '@smithy/hash-node': 4.0.2 + '@smithy/invalid-dependency': 4.0.2 + '@smithy/middleware-content-length': 4.0.2 + '@smithy/middleware-endpoint': 4.1.3 + '@smithy/middleware-retry': 4.1.4 + '@smithy/middleware-serde': 4.0.3 + '@smithy/middleware-stack': 4.0.2 + '@smithy/node-config-provider': 4.1.0 + '@smithy/node-http-handler': 4.0.4 + '@smithy/protocol-http': 5.1.0 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 + '@smithy/url-parser': 4.0.2 '@smithy/util-base64': 4.0.0 '@smithy/util-body-length-browser': 4.0.0 '@smithy/util-body-length-node': 4.0.0 - '@smithy/util-defaults-mode-browser': 4.0.4 - '@smithy/util-defaults-mode-node': 4.0.4 - '@smithy/util-endpoints': 3.0.1 - '@smithy/util-middleware': 4.0.1 - '@smithy/util-retry': 4.0.1 + '@smithy/util-defaults-mode-browser': 4.0.11 + '@smithy/util-defaults-mode-node': 4.0.11 + '@smithy/util-endpoints': 3.0.3 + '@smithy/util-middleware': 4.0.2 + '@smithy/util-retry': 4.0.3 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/region-config-resolver@3.734.0: - resolution: {integrity: sha512-Lvj1kPRC5IuJBr9DyJ9T9/plkh+EfKLy+12s/mykOy1JaKHDpvj+XGy2YO6YgYVOb8JFtaqloid+5COtje4JTQ==} + /@aws-sdk/region-config-resolver@3.806.0: + resolution: {integrity: sha512-cuv5pX55JOlzKC/iLsB5nZ9eUyVgncim3VhhWHZA/KYPh7rLMjOEfZ+xyaE9uLJXGmzOJboFH7+YdTRdIcOgrg==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/types': 3.734.0 - '@smithy/node-config-provider': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/types': 3.804.0 + '@smithy/node-config-provider': 4.1.0 + '@smithy/types': 4.2.0 '@smithy/util-config-provider': 4.0.0 - '@smithy/util-middleware': 4.0.1 + '@smithy/util-middleware': 4.0.2 tslib: 2.6.2 dev: false - /@aws-sdk/token-providers@3.734.0: - resolution: {integrity: sha512-2U6yWKrjWjZO8Y5SHQxkFvMVWHQWbS0ufqfAIBROqmIZNubOL7jXCiVdEFekz6MZ9LF2tvYGnOW4jX8OKDGfIw==} + /@aws-sdk/token-providers@3.806.0: + resolution: {integrity: sha512-I6SxcsvV7yinJZmPgGullFHS0tsTKa7K3jEc5dmyCz8X+kZPfsWNffZmtmnCvWXPqMXWBvK6hVaxwomx79yeHA==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/nested-clients': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/property-provider': 4.0.1 - '@smithy/shared-ini-file-loader': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/nested-clients': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/property-provider': 4.0.2 + '@smithy/shared-ini-file-loader': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 transitivePeerDependencies: - aws-crt dev: false - /@aws-sdk/types@3.734.0: - resolution: {integrity: sha512-o11tSPTT70nAkGV1fN9wm/hAIiLPyWX6SuGf+9JyTp7S/rC2cFWhR26MvA69nplcjNaXVzB0f+QFrLXXjOqCrg==} + /@aws-sdk/types@3.804.0: + resolution: {integrity: sha512-A9qnsy9zQ8G89vrPPlNG9d1d8QcKRGqJKqwyGgS0dclJpwy6d1EWgQLIolKPl6vcFpLoe6avLOLxr+h8ur5wpg==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@aws-sdk/util-endpoints@3.734.0: - resolution: {integrity: sha512-w2+/E88NUbqql6uCVAsmMxDQKu7vsKV0KqhlQb0lL+RCq4zy07yXYptVNs13qrnuTfyX7uPXkXrlugvK9R1Ucg==} + /@aws-sdk/util-endpoints@3.806.0: + resolution: {integrity: sha512-3YRRgZ+qFuWDdm5uAbxKsr65UAil4KkrFKua9f4m7Be3v24ETiFOOqhanFUIk9/WOtvzF7oFEiDjYKDGlwV2xg==} engines: {node: '>=18.0.0'} dependencies: - '@aws-sdk/types': 3.734.0 - '@smithy/types': 4.1.0 - '@smithy/util-endpoints': 3.0.1 + '@aws-sdk/types': 3.804.0 + '@smithy/types': 4.2.0 + '@smithy/util-endpoints': 3.0.3 tslib: 2.6.2 dev: false @@ -857,17 +879,17 @@ packages: tslib: 2.6.2 dev: false - /@aws-sdk/util-user-agent-browser@3.734.0: - resolution: {integrity: sha512-xQTCus6Q9LwUuALW+S76OL0jcWtMOVu14q+GoLnWPUM7QeUw963oQcLhF7oq0CtaLLKyl4GOUfcwc773Zmwwng==} + /@aws-sdk/util-user-agent-browser@3.804.0: + resolution: {integrity: sha512-KfW6T6nQHHM/vZBBdGn6fMyG/MgX5lq82TDdX4HRQRRuHKLgBWGpKXqqvBwqIaCdXwWHgDrg2VQups6GqOWW2A==} dependencies: - '@aws-sdk/types': 3.734.0 - '@smithy/types': 4.1.0 + '@aws-sdk/types': 3.804.0 + '@smithy/types': 4.2.0 bowser: 2.11.0 tslib: 2.6.2 dev: false - /@aws-sdk/util-user-agent-node@3.734.0: - resolution: {integrity: sha512-c6Iinh+RVQKs6jYUFQ64htOU2HUXFQ3TVx+8Tu3EDF19+9vzWi9UukhIMH9rqyyEXIAkk9XL7avt8y2Uyw2dGA==} + /@aws-sdk/util-user-agent-node@3.806.0: + resolution: {integrity: sha512-Az2e4/gmPZ4BpB7QRj7U76I+fctXhNcxlcgsaHnMhvt+R30nvzM2EhsyBUvsWl8+r9bnLeYt9BpvEZeq2ANDzA==} engines: {node: '>=18.0.0'} peerDependencies: aws-crt: '>=1.0.0' @@ -875,10 +897,10 @@ packages: aws-crt: optional: true dependencies: - '@aws-sdk/middleware-user-agent': 3.734.0 - '@aws-sdk/types': 3.734.0 - '@smithy/node-config-provider': 4.0.1 - '@smithy/types': 4.1.0 + '@aws-sdk/middleware-user-agent': 3.806.0 + '@aws-sdk/types': 3.804.0 + '@smithy/node-config-provider': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false @@ -2741,121 +2763,121 @@ packages: resolution: {integrity: sha512-UY+FGM/2jjMkzQLn8pxcHGMaVLh9aEitG3zY2CiY7XHdLiz3bZOwa6oDxNqEMv7zZkV+cj5DOdz0cQ1BP5Hjgw==} dev: true - /@smithy/abort-controller@4.0.1: - resolution: {integrity: sha512-fiUIYgIgRjMWznk6iLJz35K2YxSLHzLBA/RC6lBrKfQ8fHbPfvk7Pk9UvpKoHgJjI18MnbPuEju53zcVy6KF1g==} + /@smithy/abort-controller@4.0.2: + resolution: {integrity: sha512-Sl/78VDtgqKxN2+1qduaVE140XF+Xg+TafkncspwM4jFP/LHr76ZHmIY/y3V1M0mMLNk+Je6IGbzxy23RSToMw==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/config-resolver@4.0.1: - resolution: {integrity: sha512-Igfg8lKu3dRVkTSEm98QpZUvKEOa71jDX4vKRcvJVyRc3UgN3j7vFMf0s7xLQhYmKa8kyJGQgUJDOV5V3neVlQ==} + /@smithy/config-resolver@4.1.1: + resolution: {integrity: sha512-FZUtpiDnPZQmuIl4lfbdO+u3foNLmRCKct/2w2nRwgB99Yvaq4SHcfxyzMfxkyBrBmgnF1kdXzhHNXN7ycDvWg==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/node-config-provider': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/node-config-provider': 4.1.0 + '@smithy/types': 4.2.0 '@smithy/util-config-provider': 4.0.0 - '@smithy/util-middleware': 4.0.1 + '@smithy/util-middleware': 4.0.2 tslib: 2.6.2 dev: false - /@smithy/core@3.1.2: - resolution: {integrity: sha512-htwQXkbdF13uwwDevz9BEzL5ABK+1sJpVQXywwGSH973AVOvisHNfpcB8A8761G6XgHoS2kHPqc9DqHJ2gp+/Q==} + /@smithy/core@3.3.1: + resolution: {integrity: sha512-W7AppgQD3fP1aBmo8wWo0id5zeR2/aYRy067vZsDVaa6v/mdhkg6DxXwEVuSPjZl+ZnvWAQbUMCd5ckw38+tHQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/middleware-serde': 4.0.2 - '@smithy/protocol-http': 5.0.1 - '@smithy/types': 4.1.0 + '@smithy/middleware-serde': 4.0.3 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 '@smithy/util-body-length-browser': 4.0.0 - '@smithy/util-middleware': 4.0.1 - '@smithy/util-stream': 4.0.2 + '@smithy/util-middleware': 4.0.2 + '@smithy/util-stream': 4.2.0 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 dev: false - /@smithy/credential-provider-imds@4.0.1: - resolution: {integrity: sha512-l/qdInaDq1Zpznpmev/+52QomsJNZ3JkTl5yrTl02V6NBgJOQ4LY0SFw/8zsMwj3tLe8vqiIuwF6nxaEwgf6mg==} + /@smithy/credential-provider-imds@4.0.3: + resolution: {integrity: sha512-UdNvGjZnunS9+45gHYtVXDynoWH1X0tYY0pS368k1zUZum6Mm4ivU4Se0WhFJf8jNocD+p94khzTtrx4ha3OOQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/node-config-provider': 4.0.1 - '@smithy/property-provider': 4.0.1 - '@smithy/types': 4.1.0 - '@smithy/url-parser': 4.0.1 + '@smithy/node-config-provider': 4.1.0 + '@smithy/property-provider': 4.0.2 + '@smithy/types': 4.2.0 + '@smithy/url-parser': 4.0.2 tslib: 2.6.2 dev: false - /@smithy/eventstream-codec@4.0.1: - resolution: {integrity: sha512-Q2bCAAR6zXNVtJgifsU16ZjKGqdw/DyecKNgIgi7dlqw04fqDu0mnq+JmGphqheypVc64CYq3azSuCpAdFk2+A==} + /@smithy/eventstream-codec@4.0.2: + resolution: {integrity: sha512-p+f2kLSK7ZrXVfskU/f5dzksKTewZk8pJLPvER3aFHPt76C2MxD9vNatSfLzzQSQB4FNO96RK4PSXfhD1TTeMQ==} engines: {node: '>=18.0.0'} dependencies: '@aws-crypto/crc32': 5.2.0 - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 '@smithy/util-hex-encoding': 4.0.0 tslib: 2.6.2 dev: false - /@smithy/eventstream-serde-browser@4.0.1: - resolution: {integrity: sha512-HbIybmz5rhNg+zxKiyVAnvdM3vkzjE6ccrJ620iPL8IXcJEntd3hnBl+ktMwIy12Te/kyrSbUb8UCdnUT4QEdA==} + /@smithy/eventstream-serde-browser@4.0.2: + resolution: {integrity: sha512-CepZCDs2xgVUtH7ZZ7oDdZFH8e6Y2zOv8iiX6RhndH69nlojCALSKK+OXwZUgOtUZEUaZ5e1hULVCHYbCn7pug==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/eventstream-serde-universal': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/eventstream-serde-universal': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/eventstream-serde-config-resolver@4.0.1: - resolution: {integrity: sha512-lSipaiq3rmHguHa3QFF4YcCM3VJOrY9oq2sow3qlhFY+nBSTF/nrO82MUQRPrxHQXA58J5G1UnU2WuJfi465BA==} + /@smithy/eventstream-serde-config-resolver@4.1.0: + resolution: {integrity: sha512-1PI+WPZ5TWXrfj3CIoKyUycYynYJgZjuQo8U+sphneOtjsgrttYybdqESFReQrdWJ+LKt6NEdbYzmmfDBmjX2A==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/eventstream-serde-node@4.0.1: - resolution: {integrity: sha512-o4CoOI6oYGYJ4zXo34U8X9szDe3oGjmHgsMGiZM0j4vtNoT+h80TLnkUcrLZR3+E6HIxqW+G+9WHAVfl0GXK0Q==} + /@smithy/eventstream-serde-node@4.0.2: + resolution: {integrity: sha512-C5bJ/C6x9ENPMx2cFOirspnF9ZsBVnBMtP6BdPl/qYSuUawdGQ34Lq0dMcf42QTjUZgWGbUIZnz6+zLxJlb9aw==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/eventstream-serde-universal': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/eventstream-serde-universal': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/eventstream-serde-universal@4.0.1: - resolution: {integrity: sha512-Z94uZp0tGJuxds3iEAZBqGU2QiaBHP4YytLUjwZWx+oUeohCsLyUm33yp4MMBmhkuPqSbQCXq5hDet6JGUgHWA==} + /@smithy/eventstream-serde-universal@4.0.2: + resolution: {integrity: sha512-St8h9JqzvnbB52FtckiHPN4U/cnXcarMniXRXTKn0r4b4XesZOGiAyUdj1aXbqqn1icSqBlzzUsCl6nPB018ng==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/eventstream-codec': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/eventstream-codec': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/fetch-http-handler@5.0.1: - resolution: {integrity: sha512-3aS+fP28urrMW2KTjb6z9iFow6jO8n3MFfineGbndvzGZit3taZhKWtTorf+Gp5RpFDDafeHlhfsGlDCXvUnJA==} + /@smithy/fetch-http-handler@5.0.2: + resolution: {integrity: sha512-+9Dz8sakS9pe7f2cBocpJXdeVjMopUDLgZs1yWeu7h++WqSbjUYv/JAJwKwXw1HV6gq1jyWjxuyn24E2GhoEcQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/protocol-http': 5.0.1 - '@smithy/querystring-builder': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/protocol-http': 5.1.0 + '@smithy/querystring-builder': 4.0.2 + '@smithy/types': 4.2.0 '@smithy/util-base64': 4.0.0 tslib: 2.6.2 dev: false - /@smithy/hash-node@4.0.1: - resolution: {integrity: sha512-TJ6oZS+3r2Xu4emVse1YPB3Dq3d8RkZDKcPr71Nj/lJsdAP1c7oFzYqEn1IBc915TsgLl2xIJNuxCz+gLbLE0w==} + /@smithy/hash-node@4.0.2: + resolution: {integrity: sha512-VnTpYPnRUE7yVhWozFdlxcYknv9UN7CeOqSrMH+V877v4oqtVYuoqhIhtSjmGPvYrYnAkaM61sLMKHvxL138yg==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 '@smithy/util-buffer-from': 4.0.0 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 dev: false - /@smithy/invalid-dependency@4.0.1: - resolution: {integrity: sha512-gdudFPf4QRQ5pzj7HEnu6FhKRi61BfH/Gk5Yf6O0KiSbr1LlVhgjThcvjdu658VE6Nve8vaIWB8/fodmS1rBPQ==} + /@smithy/invalid-dependency@4.0.2: + resolution: {integrity: sha512-GatB4+2DTpgWPday+mnUkoumP54u/MDM/5u44KF9hIu8jF0uafZtQLcdfIKkIcUNuF/fBojpLEHZS/56JqPeXQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false @@ -2873,169 +2895,169 @@ packages: tslib: 2.6.2 dev: false - /@smithy/middleware-content-length@4.0.1: - resolution: {integrity: sha512-OGXo7w5EkB5pPiac7KNzVtfCW2vKBTZNuCctn++TTSOMpe6RZO/n6WEC1AxJINn3+vWLKW49uad3lo/u0WJ9oQ==} + /@smithy/middleware-content-length@4.0.2: + resolution: {integrity: sha512-hAfEXm1zU+ELvucxqQ7I8SszwQ4znWMbNv6PLMndN83JJN41EPuS93AIyh2N+gJ6x8QFhzSO6b7q2e6oClDI8A==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/protocol-http': 5.0.1 - '@smithy/types': 4.1.0 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/middleware-endpoint@4.0.3: - resolution: {integrity: sha512-YdbmWhQF5kIxZjWqPIgboVfi8i5XgiYMM7GGKFMTvBei4XjNQfNv8sukT50ITvgnWKKKpOtp0C0h7qixLgb77Q==} + /@smithy/middleware-endpoint@4.1.3: + resolution: {integrity: sha512-w7fJjCSqdTVTs1o1O7SRZm+Umf6r/FzkdlO5OH6tboASeUeugnMgQAs7gnc2dXvJVJtEGrmrBgPZFPxq3wWyzw==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/core': 3.1.2 - '@smithy/middleware-serde': 4.0.2 - '@smithy/node-config-provider': 4.0.1 - '@smithy/shared-ini-file-loader': 4.0.1 - '@smithy/types': 4.1.0 - '@smithy/url-parser': 4.0.1 - '@smithy/util-middleware': 4.0.1 + '@smithy/core': 3.3.1 + '@smithy/middleware-serde': 4.0.3 + '@smithy/node-config-provider': 4.1.0 + '@smithy/shared-ini-file-loader': 4.0.2 + '@smithy/types': 4.2.0 + '@smithy/url-parser': 4.0.2 + '@smithy/util-middleware': 4.0.2 tslib: 2.6.2 dev: false - /@smithy/middleware-retry@4.0.4: - resolution: {integrity: sha512-wmxyUBGHaYUqul0wZiset4M39SMtDBOtUr2KpDuftKNN74Do9Y36Go6Eqzj9tL0mIPpr31ulB5UUtxcsCeGXsQ==} + /@smithy/middleware-retry@4.1.4: + resolution: {integrity: sha512-QtWuD7bd7AAEFKvBmLQdOax25bXv4BACLQNWi3ddvpWwUUSAkAku9mzI+28jbjg48qw28lbzJ+YoYbbaXhLUjw==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/node-config-provider': 4.0.1 - '@smithy/protocol-http': 5.0.1 - '@smithy/service-error-classification': 4.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 - '@smithy/util-middleware': 4.0.1 - '@smithy/util-retry': 4.0.1 + '@smithy/node-config-provider': 4.1.0 + '@smithy/protocol-http': 5.1.0 + '@smithy/service-error-classification': 4.0.3 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 + '@smithy/util-middleware': 4.0.2 + '@smithy/util-retry': 4.0.3 tslib: 2.6.2 uuid: 9.0.1 dev: false - /@smithy/middleware-serde@4.0.2: - resolution: {integrity: sha512-Sdr5lOagCn5tt+zKsaW+U2/iwr6bI9p08wOkCp6/eL6iMbgdtc2R5Ety66rf87PeohR0ExI84Txz9GYv5ou3iQ==} + /@smithy/middleware-serde@4.0.3: + resolution: {integrity: sha512-rfgDVrgLEVMmMn0BI8O+8OVr6vXzjV7HZj57l0QxslhzbvVfikZbVfBVthjLHqib4BW44QhcIgJpvebHlRaC9A==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/middleware-stack@4.0.1: - resolution: {integrity: sha512-dHwDmrtR/ln8UTHpaIavRSzeIk5+YZTBtLnKwDW3G2t6nAupCiQUvNzNoHBpik63fwUaJPtlnMzXbQrNFWssIA==} + /@smithy/middleware-stack@4.0.2: + resolution: {integrity: sha512-eSPVcuJJGVYrFYu2hEq8g8WWdJav3sdrI4o2c6z/rjnYDd3xH9j9E7deZQCzFn4QvGPouLngH3dQ+QVTxv5bOQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/node-config-provider@4.0.1: - resolution: {integrity: sha512-8mRTjvCtVET8+rxvmzRNRR0hH2JjV0DFOmwXPrISmTIJEfnCBugpYYGAsCj8t41qd+RB5gbheSQ/6aKZCQvFLQ==} + /@smithy/node-config-provider@4.1.0: + resolution: {integrity: sha512-gmPsv6L3ZRlBinv+vtSGUwfhTMh4+SgjbgGdX7bqYEs3Ys5RYVQtLuZ/WgZZdxn8QrDSUqLmTWunLM96WyM7UQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/property-provider': 4.0.1 - '@smithy/shared-ini-file-loader': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/property-provider': 4.0.2 + '@smithy/shared-ini-file-loader': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/node-http-handler@4.0.2: - resolution: {integrity: sha512-X66H9aah9hisLLSnGuzRYba6vckuFtGE+a5DcHLliI/YlqKrGoxhisD5XbX44KyoeRzoNlGr94eTsMVHFAzPOw==} + /@smithy/node-http-handler@4.0.4: + resolution: {integrity: sha512-/mdqabuAT3o/ihBGjL94PUbTSPSRJ0eeVTdgADzow0wRJ0rN4A27EOrtlK56MYiO1fDvlO3jVTCxQtQmK9dZ1g==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/abort-controller': 4.0.1 - '@smithy/protocol-http': 5.0.1 - '@smithy/querystring-builder': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/abort-controller': 4.0.2 + '@smithy/protocol-http': 5.1.0 + '@smithy/querystring-builder': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/property-provider@4.0.1: - resolution: {integrity: sha512-o+VRiwC2cgmk/WFV0jaETGOtX16VNPp2bSQEzu0whbReqE1BMqsP2ami2Vi3cbGVdKu1kq9gQkDAGKbt0WOHAQ==} + /@smithy/property-provider@4.0.2: + resolution: {integrity: sha512-wNRoQC1uISOuNc2s4hkOYwYllmiyrvVXWMtq+TysNRVQaHm4yoafYQyjN/goYZS+QbYlPIbb/QRjaUZMuzwQ7A==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/protocol-http@5.0.1: - resolution: {integrity: sha512-TE4cpj49jJNB/oHyh/cRVEgNZaoPaxd4vteJNB0yGidOCVR0jCw/hjPVsT8Q8FRmj8Bd3bFZt8Dh7xGCT+xMBQ==} + /@smithy/protocol-http@5.1.0: + resolution: {integrity: sha512-KxAOL1nUNw2JTYrtviRRjEnykIDhxc84qMBzxvu1MUfQfHTuBlCG7PA6EdVwqpJjH7glw7FqQoFxUJSyBQgu7g==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/querystring-builder@4.0.1: - resolution: {integrity: sha512-wU87iWZoCbcqrwszsOewEIuq+SU2mSoBE2CcsLwE0I19m0B2gOJr1MVjxWcDQYOzHbR1xCk7AcOBbGFUYOKvdg==} + /@smithy/querystring-builder@4.0.2: + resolution: {integrity: sha512-NTOs0FwHw1vimmQM4ebh+wFQvOwkEf/kQL6bSM1Lock+Bv4I89B3hGYoUEPkmvYPkDKyp5UdXJYu+PoTQ3T31Q==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 '@smithy/util-uri-escape': 4.0.0 tslib: 2.6.2 dev: false - /@smithy/querystring-parser@4.0.1: - resolution: {integrity: sha512-Ma2XC7VS9aV77+clSFylVUnPZRindhB7BbmYiNOdr+CHt/kZNJoPP0cd3QxCnCFyPXC4eybmyE98phEHkqZ5Jw==} + /@smithy/querystring-parser@4.0.2: + resolution: {integrity: sha512-v6w8wnmZcVXjfVLjxw8qF7OwESD9wnpjp0Dqry/Pod0/5vcEA3qxCr+BhbOHlxS8O+29eLpT3aagxXGwIoEk7Q==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/service-error-classification@4.0.1: - resolution: {integrity: sha512-3JNjBfOWpj/mYfjXJHB4Txc/7E4LVq32bwzE7m28GN79+M1f76XHflUaSUkhOriprPDzev9cX/M+dEB80DNDKA==} + /@smithy/service-error-classification@4.0.3: + resolution: {integrity: sha512-FTbcajmltovWMjj3tksDQdD23b2w6gH+A0DYA1Yz3iSpjDj8fmkwy62UnXcWMy4d5YoMoSyLFHMfkEVEzbiN8Q==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 dev: false - /@smithy/shared-ini-file-loader@4.0.1: - resolution: {integrity: sha512-hC8F6qTBbuHRI/uqDgqqi6J0R4GtEZcgrZPhFQnMhfJs3MnUTGSnR1NSJCJs5VWlMydu0kJz15M640fJlRsIOw==} + /@smithy/shared-ini-file-loader@4.0.2: + resolution: {integrity: sha512-J9/gTWBGVuFZ01oVA6vdb4DAjf1XbDhK6sLsu3OS9qmLrS6KB5ygpeHiM3miIbj1qgSJ96GYszXFWv6ErJ8QEw==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/signature-v4@5.0.1: - resolution: {integrity: sha512-nCe6fQ+ppm1bQuw5iKoeJ0MJfz2os7Ic3GBjOkLOPtavbD1ONoyE3ygjBfz2ythFWm4YnRm6OxW+8p/m9uCoIA==} + /@smithy/signature-v4@5.1.0: + resolution: {integrity: sha512-4t5WX60sL3zGJF/CtZsUQTs3UrZEDO2P7pEaElrekbLqkWPYkgqNW1oeiNYC6xXifBnT9dVBOnNQRvOE9riU9w==} engines: {node: '>=18.0.0'} dependencies: '@smithy/is-array-buffer': 4.0.0 - '@smithy/protocol-http': 5.0.1 - '@smithy/types': 4.1.0 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 '@smithy/util-hex-encoding': 4.0.0 - '@smithy/util-middleware': 4.0.1 + '@smithy/util-middleware': 4.0.2 '@smithy/util-uri-escape': 4.0.0 '@smithy/util-utf8': 4.0.0 tslib: 2.6.2 dev: false - /@smithy/smithy-client@4.1.3: - resolution: {integrity: sha512-A2Hz85pu8BJJaYFdX8yb1yocqigyqBzn+OVaVgm+Kwi/DkN8vhN2kbDVEfADo6jXf5hPKquMLGA3UINA64UZ7A==} + /@smithy/smithy-client@4.2.3: + resolution: {integrity: sha512-j/RRx6N007rJQ3qyjN4yuX9B0bxTn9ynDVxYQ43mcs7fluVJXmQGquy0TrWJfOPZcIikpY377GunZ2UK90GHYQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/core': 3.1.2 - '@smithy/middleware-endpoint': 4.0.3 - '@smithy/middleware-stack': 4.0.1 - '@smithy/protocol-http': 5.0.1 - '@smithy/types': 4.1.0 - '@smithy/util-stream': 4.0.2 + '@smithy/core': 3.3.1 + '@smithy/middleware-endpoint': 4.1.3 + '@smithy/middleware-stack': 4.0.2 + '@smithy/protocol-http': 5.1.0 + '@smithy/types': 4.2.0 + '@smithy/util-stream': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/types@4.1.0: - resolution: {integrity: sha512-enhjdwp4D7CXmwLtD6zbcDMbo6/T6WtuuKCY49Xxc6OMOmUWlBEBDREsxxgV2LIdeQPW756+f97GzcgAwp3iLw==} + /@smithy/types@4.2.0: + resolution: {integrity: sha512-7eMk09zQKCO+E/ivsjQv+fDlOupcFUCSC/L2YUPgwhvowVGWbPQHjEFcmjt7QQ4ra5lyowS92SV53Zc6XD4+fg==} engines: {node: '>=18.0.0'} dependencies: tslib: 2.6.2 dev: false - /@smithy/url-parser@4.0.1: - resolution: {integrity: sha512-gPXcIEUtw7VlK8f/QcruNXm7q+T5hhvGu9tl63LsJPZ27exB6dtNwvh2HIi0v7JcXJ5emBxB+CJxwaLEdJfA+g==} + /@smithy/url-parser@4.0.2: + resolution: {integrity: sha512-Bm8n3j2ScqnT+kJaClSVCMeiSenK6jVAzZCNewsYWuZtnBehEz4r2qP0riZySZVfzB+03XZHJeqfmJDkeeSLiQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/querystring-parser': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/querystring-parser': 4.0.2 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false @@ -3085,36 +3107,36 @@ packages: tslib: 2.6.2 dev: false - /@smithy/util-defaults-mode-browser@4.0.4: - resolution: {integrity: sha512-Ej1bV5sbrIfH++KnWxjjzFNq9nyP3RIUq2c9Iqq7SmMO/idUR24sqvKH2LUQFTSPy/K7G4sB2m8n7YYlEAfZaw==} + /@smithy/util-defaults-mode-browser@4.0.11: + resolution: {integrity: sha512-Z49QNUSKbEj7JVZqaSUZkTkexRciQBbmonJ8AMar4fA0S2kvVpgjeVyGXnZYWTFzkgEwStacjFq4cQKbaQ8AnQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/property-provider': 4.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 + '@smithy/property-provider': 4.0.2 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 bowser: 2.11.0 tslib: 2.6.2 dev: false - /@smithy/util-defaults-mode-node@4.0.4: - resolution: {integrity: sha512-HE1I7gxa6yP7ZgXPCFfZSDmVmMtY7SHqzFF55gM/GPegzZKaQWZZ+nYn9C2Cc3JltCMyWe63VPR3tSFDEvuGjw==} + /@smithy/util-defaults-mode-node@4.0.11: + resolution: {integrity: sha512-y9UYcXjz4ry5sDPX40Vy6224Cw2/dch+wET6giaRoeXpyh56DCUVxW+Mgc/gO2uczAKktWd4ZWs2LWcW+PHz3Q==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/config-resolver': 4.0.1 - '@smithy/credential-provider-imds': 4.0.1 - '@smithy/node-config-provider': 4.0.1 - '@smithy/property-provider': 4.0.1 - '@smithy/smithy-client': 4.1.3 - '@smithy/types': 4.1.0 + '@smithy/config-resolver': 4.1.1 + '@smithy/credential-provider-imds': 4.0.3 + '@smithy/node-config-provider': 4.1.0 + '@smithy/property-provider': 4.0.2 + '@smithy/smithy-client': 4.2.3 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/util-endpoints@3.0.1: - resolution: {integrity: sha512-zVdUENQpdtn9jbpD9SCFK4+aSiavRb9BxEtw9ZGUR1TYo6bBHbIoi7VkrFQ0/RwZlzx0wRBaRmPclj8iAoJCLA==} + /@smithy/util-endpoints@3.0.3: + resolution: {integrity: sha512-284PZFhCMdudqq61/E67zJ3i10gCYrMBjXcMg3h048qI39gTXQCCeNZvtJhL4vrj9yMpJ/y9M+Ek7V0o5tak3w==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/node-config-provider': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/node-config-provider': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false @@ -3125,30 +3147,30 @@ packages: tslib: 2.6.2 dev: false - /@smithy/util-middleware@4.0.1: - resolution: {integrity: sha512-HiLAvlcqhbzhuiOa0Lyct5IIlyIz0PQO5dnMlmQ/ubYM46dPInB+3yQGkfxsk6Q24Y0n3/JmcA1v5iEhmOF5mA==} + /@smithy/util-middleware@4.0.2: + resolution: {integrity: sha512-6GDamTGLuBQVAEuQ4yDQ+ti/YINf/MEmIegrEeg7DdB/sld8BX1lqt9RRuIcABOhAGTA50bRbPzErez7SlDtDQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/types': 4.1.0 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/util-retry@4.0.1: - resolution: {integrity: sha512-WmRHqNVwn3kI3rKk1LsKcVgPBG6iLTBGC1iYOV3GQegwJ3E8yjzHytPt26VNzOWr1qu0xE03nK0Ug8S7T7oufw==} + /@smithy/util-retry@4.0.3: + resolution: {integrity: sha512-DPuYjZQDXmKr/sNvy9Spu8R/ESa2e22wXZzSAY6NkjOLj6spbIje/Aq8rT97iUMdDj0qHMRIe+bTxvlU74d9Ng==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/service-error-classification': 4.0.1 - '@smithy/types': 4.1.0 + '@smithy/service-error-classification': 4.0.3 + '@smithy/types': 4.2.0 tslib: 2.6.2 dev: false - /@smithy/util-stream@4.0.2: - resolution: {integrity: sha512-0eZ4G5fRzIoewtHtwaYyl8g2C+osYOT4KClXgfdNEDAgkbe2TYPqcnw4GAWabqkZCax2ihRGPe9LZnsPdIUIHA==} + /@smithy/util-stream@4.2.0: + resolution: {integrity: sha512-Vj1TtwWnuWqdgQI6YTUF5hQ/0jmFiOYsc51CSMgj7QfyO+RF4EnT2HNjoviNlOOmgzgvf3f5yno+EiC4vrnaWQ==} engines: {node: '>=18.0.0'} dependencies: - '@smithy/fetch-http-handler': 5.0.1 - '@smithy/node-http-handler': 4.0.2 - '@smithy/types': 4.1.0 + '@smithy/fetch-http-handler': 5.0.2 + '@smithy/node-http-handler': 4.0.4 + '@smithy/types': 4.2.0 '@smithy/util-base64': 4.0.0 '@smithy/util-buffer-from': 4.0.0 '@smithy/util-hex-encoding': 4.0.0 @@ -3727,7 +3749,7 @@ packages: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 magic-string: 0.30.17 - vite: 5.4.10 + vite: 5.4.10(@types/node@20.10.5) dev: true /@vitest/pretty-format@2.1.9: @@ -8452,17 +8474,6 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true - /tsconfck@3.1.4: - resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} - engines: {node: ^18 || >=20} - hasBin: true - peerDependencies: - typescript: ^5.0.0 - peerDependenciesMeta: - typescript: - optional: true - dev: true - /tsconfck@3.1.4(typescript@5.5.4): resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} engines: {node: ^18 || >=20} @@ -8906,28 +8917,6 @@ packages: engines: {node: '>= 0.8'} dev: false - /vite-node@2.1.9: - resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - dependencies: - cac: 6.7.14 - debug: 4.4.0 - es-module-lexer: 1.6.0 - pathe: 1.1.2 - vite: 5.4.10 - transitivePeerDependencies: - - '@types/node' - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - dev: true - /vite-node@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} engines: {node: ^18.0.0 || >=20.0.0} @@ -8950,22 +8939,6 @@ packages: - terser dev: true - /vite-tsconfig-paths@4.3.2: - resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} - peerDependencies: - vite: '*' - peerDependenciesMeta: - vite: - optional: true - dependencies: - debug: 4.3.7 - globrex: 0.1.2 - tsconfck: 3.1.4 - transitivePeerDependencies: - - supports-color - - typescript - dev: true - /vite-tsconfig-paths@4.3.2(typescript@5.5.4): resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: @@ -8982,44 +8955,6 @@ packages: - typescript dev: true - /vite@5.4.10: - resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 - less: '*' - lightningcss: ^1.21.0 - sass: '*' - sass-embedded: '*' - stylus: '*' - sugarss: '*' - terser: ^5.4.0 - peerDependenciesMeta: - '@types/node': - optional: true - less: - optional: true - lightningcss: - optional: true - sass: - optional: true - sass-embedded: - optional: true - stylus: - optional: true - sugarss: - optional: true - terser: - optional: true - dependencies: - esbuild: 0.21.5 - postcss: 8.5.1 - rollup: 4.24.0 - optionalDependencies: - fsevents: 2.3.3 - dev: true - /vite@5.4.10(@types/node@20.10.5): resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} engines: {node: ^18.0.0 || >=20.0.0} @@ -9059,63 +8994,6 @@ packages: fsevents: 2.3.3 dev: true - /vitest@2.1.9: - resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': 2.1.9 - '@vitest/ui': 2.1.9 - happy-dom: '*' - jsdom: '*' - peerDependenciesMeta: - '@edge-runtime/vm': - optional: true - '@types/node': - optional: true - '@vitest/browser': - optional: true - '@vitest/ui': - optional: true - happy-dom: - optional: true - jsdom: - optional: true - dependencies: - '@vitest/expect': 2.1.9 - '@vitest/mocker': 2.1.9(vite@5.4.10) - '@vitest/pretty-format': 2.1.9 - '@vitest/runner': 2.1.9 - '@vitest/snapshot': 2.1.9 - '@vitest/spy': 2.1.9 - '@vitest/utils': 2.1.9 - chai: 5.1.2 - debug: 4.4.0 - expect-type: 1.2.0 - magic-string: 0.30.17 - pathe: 1.1.2 - std-env: 3.8.1 - tinybench: 2.9.0 - tinyexec: 0.3.1 - tinypool: 1.0.1 - tinyrainbow: 1.2.0 - vite: 5.4.10 - vite-node: 2.1.9 - why-is-node-running: 2.3.0 - transitivePeerDependencies: - - less - - lightningcss - - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - dev: true - /vitest@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} engines: {node: ^18.0.0 || >=20.0.0} From 0e4de2dede64cfe886675d96de518bce43f56b22 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 9 May 2025 15:10:30 -0500 Subject: [PATCH 19/51] bump version --- packages/proxy/package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/proxy/package.json b/packages/proxy/package.json index e6a1a90a..8493d5ff 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -1,6 +1,6 @@ { "name": "@braintrust/proxy", - "version": "0.0.7", + "version": "0.0.8", "description": "A proxy server that load balances across AI providers.", "main": "./dist/index.js", "module": "./dist/index.mjs", From c1876db6fb0c97051a12f3e75a8ff92679ec1c74 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 13 May 2025 08:52:41 -0500 Subject: [PATCH 20/51] use 0.0.86 --- packages/proxy/package.json | 2 +- pnpm-lock.yaml | 12 ++++++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/packages/proxy/package.json b/packages/proxy/package.json index 8493d5ff..42af76cf 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -89,7 +89,7 @@ "@anthropic-ai/sdk": "^0.39.0", "@apidevtools/json-schema-ref-parser": "^11.9.1", "@aws-sdk/client-bedrock-runtime": "^3.806.0", - "@braintrust/core": "link:../../../sdk/core/js", + "@braintrust/core": "^0.0.87", "@breezystack/lamejs": "^1.2.7", "@google/genai": "^0.13.0", "@opentelemetry/api": "^1.7.0", diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 08de8097..f6ecd045 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -207,8 +207,8 @@ importers: specifier: ^3.806.0 version: 3.806.0 '@braintrust/core': - specifier: link:../../../sdk/core/js - version: link:../../../sdk/core/js + specifier: ^0.0.87 + version: 0.0.87 '@breezystack/lamejs': specifier: ^1.2.7 version: 1.2.7 @@ -953,6 +953,14 @@ packages: zod: 3.22.4 dev: false + /@braintrust/core@0.0.87: + resolution: {integrity: sha512-yKo+2McKBcluVUq+5qoYI7QfGvqZ7c0ftTOmnRSToBR2RqGyHkClnnQZ3+M8Guuk9NEKJu92UMTTaR9AonIvvA==} + dependencies: + '@asteasolutions/zod-to-openapi': 6.4.0(zod@3.22.4) + uuid: 9.0.1 + zod: 3.22.4 + dev: false + /@breezystack/lamejs@1.2.7: resolution: {integrity: sha512-6wc7ck65ctA75Hq7FYHTtTvGnYs6msgdxiSUICQ+A01nVOWg6rqouZB8IdyteRlfpYYiFovkf67dIeOgWIUzTA==} dev: false From 67a5bdc87e7161204202f7dbaffcc2bba7020545 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 13 May 2025 13:33:46 -0500 Subject: [PATCH 21/51] get ci/cd working again --- .github/workflows/js.yaml | 11 +++++++---- packages/proxy/src/providers/anthropic.test.ts | 2 +- packages/proxy/src/providers/google.test.ts | 6 +++--- packages/proxy/vitest.config.js | 1 + vitest.config.js | 1 + 5 files changed, 13 insertions(+), 8 deletions(-) diff --git a/.github/workflows/js.yaml b/.github/workflows/js.yaml index dc055adc..3537e56b 100644 --- a/.github/workflows/js.yaml +++ b/.github/workflows/js.yaml @@ -21,7 +21,10 @@ jobs: node-version: ${{ matrix.node-version }} registry-url: "https://registry.npmjs.org" - uses: pnpm/action-setup@v4 - - run: | - pnpm install - pnpm run test - pnpm run build + - run: pnpm install + - run: pnpm run test + env: + OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} + ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} + GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }} + - run: pnpm run build diff --git a/packages/proxy/src/providers/anthropic.test.ts b/packages/proxy/src/providers/anthropic.test.ts index 50652b93..986f16b3 100644 --- a/packages/proxy/src/providers/anthropic.test.ts +++ b/packages/proxy/src/providers/anthropic.test.ts @@ -52,7 +52,7 @@ it("should convert OpenAI non-streaming request to Anthropic and back", async () OpenAIChatCompletion >({ body: { - model: "claude-2", + model: "claude-2.1", messages: [ { role: "system", content: "You are a helpful assistant." }, { role: "user", content: "Tell me a short joke about programming." }, diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts index 8ff60858..58bd5176 100644 --- a/packages/proxy/src/providers/google.test.ts +++ b/packages/proxy/src/providers/google.test.ts @@ -1,18 +1,18 @@ import { describe, it, expect } from "vitest"; import { callProxyV1 } from "../../utils/tests"; import { - OpenAIChatCompletion, OpenAIChatCompletionChunk, OpenAIChatCompletionCreateParams, } from "@types"; for (const model of [ "gemini-2.5-flash-preview-04-17", - "publishers/google/models/gemini-2.5-flash-preview-04-17", + // TODO: re-enable when we have a working CI/CD solution + // "publishers/google/models/gemini-2.5-flash-preview-04-17", ]) { describe(model, () => { it("should accept and should not return reasoning/thinking params and detail streaming", async () => { - const { events, json } = await callProxyV1< + const { events } = await callProxyV1< OpenAIChatCompletionCreateParams, OpenAIChatCompletionChunk >({ diff --git a/packages/proxy/vitest.config.js b/packages/proxy/vitest.config.js index fed4d6c0..f8f8ff3c 100644 --- a/packages/proxy/vitest.config.js +++ b/packages/proxy/vitest.config.js @@ -4,6 +4,7 @@ const config = { plugins: [tsconfigPaths()], test: { exclude: ["**/node_modules/**"], + testTimeout: 15_000, }, }; export default config; diff --git a/vitest.config.js b/vitest.config.js index fed4d6c0..f8f8ff3c 100644 --- a/vitest.config.js +++ b/vitest.config.js @@ -4,6 +4,7 @@ const config = { plugins: [tsconfigPaths()], test: { exclude: ["**/node_modules/**"], + testTimeout: 15_000, }, }; export default config; From 4a9eb623b4a24067e3269db970fd802c29096e6c Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 13 May 2025 13:38:13 -0500 Subject: [PATCH 22/51] icnrease timeout --- packages/proxy/vitest.config.js | 2 +- vitest.config.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/proxy/vitest.config.js b/packages/proxy/vitest.config.js index f8f8ff3c..d38e8f1c 100644 --- a/packages/proxy/vitest.config.js +++ b/packages/proxy/vitest.config.js @@ -4,7 +4,7 @@ const config = { plugins: [tsconfigPaths()], test: { exclude: ["**/node_modules/**"], - testTimeout: 15_000, + testTimeout: 30_000, }, }; export default config; diff --git a/vitest.config.js b/vitest.config.js index f8f8ff3c..d38e8f1c 100644 --- a/vitest.config.js +++ b/vitest.config.js @@ -4,7 +4,7 @@ const config = { plugins: [tsconfigPaths()], test: { exclude: ["**/node_modules/**"], - testTimeout: 15_000, + testTimeout: 30_000, }, }; export default config; From 3beba3b2f2c94c4587786df03e3a77a08c8d9d5a Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 13 May 2025 15:53:35 -0500 Subject: [PATCH 23/51] add test coverage and fix mapping methods --- packages/proxy/schema/index.test.ts | 148 ++++++++++++++++++++++++++++ packages/proxy/schema/index.ts | 29 ++++-- 2 files changed, 170 insertions(+), 7 deletions(-) create mode 100644 packages/proxy/schema/index.test.ts diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts new file mode 100644 index 00000000..c0cc5d32 --- /dev/null +++ b/packages/proxy/schema/index.test.ts @@ -0,0 +1,148 @@ +import { it, expect } from "vitest"; +import { ModelFormat, translateParams } from "./index"; +import { ChatCompletionCreateParams } from "openai/resources"; +import { MessageCreateParamsBase } from "@anthropic-ai/sdk/resources/messages"; +import { GenerateContentParameters } from "@google/genai"; + +const examples: Record< + string, + { openai: ChatCompletionCreateParams } & ( + | { google: GenerateContentParameters } + | { anthropic: MessageCreateParamsBase } + ) +> = { + // simple: { + // openai: { + // model: "gpt-4o", + // max_tokens: 1500, + // temperature: 0.7, + // top_p: 0.9, + // frequency_penalty: 0.1, + // presence_penalty: 0.2, + // messages: [ + // { role: "system", content: "You are a helpful assistant." }, + // { role: "user", content: "Hello, how are you?" }, + // ], + // stream: true, + // }, + // google: { + // max_tokens: 1500, + // messages: [ + // { + // content: "You are a helpful assistant.", + // role: "system", + // }, + // { + // content: "Hello, how are you?", + // role: "user", + // }, + // ], + // model: "gpt-4o", + // stream: true, + // temperature: 0.7, + // top_p: 0.9, + // }, + // anthropic: { + // max_tokens: 1500, + // messages: [ + // { + // content: "You are a helpful assistant.", + // // @ts-expect-error -- TODO: shouldn't we have translated this to a non system role? + // role: "system", + // }, + // { + // content: "Hello, how are you?", + // role: "user", + // }, + // ], + // model: "gpt-4o", + // stream: true, + // temperature: 0.7, + // top_p: 0.9, + // }, + // }, + reasoning: { + openai: { + model: "gpt-4o", + messages: [ + { + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + temperature: 0, + max_tokens: 1000, + reasoning_effort: "high", + stream: false, + }, + google: { + model: "gpt-4o", + messages: [ + { + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + temperature: 0, + thinkingConfig: { + thinkingBudget: 4096, + includeThoughts: true, + }, + maxOutputTokens: 5120, + stream: false, + }, + anthropic: { + model: "gpt-4o", + messages: [ + { + // @ts-expect-error -- we use the role to later manipulate the request + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + max_tokens: 5120, + temperature: 1, + stream: false, + thinking: { + budget_tokens: 4096, + type: "enabled", + }, + }, + }, +}; + +Object.entries(examples).forEach(([example, { openai, ...providers }]) => { + Object.entries(providers).forEach(([provider, expected]) => { + it(`[${example}] translate openai to ${provider} params`, () => { + const result = translateParams( + provider as ModelFormat, + openai as unknown as Record, + ); + try { + expect(result).toEqual(expected); + } catch (error) { + try { + // try to relax the output a little + expect(result).toMatchObject(expected); + } finally { + console.warn( + `Exact openai -> ${provider} translation failed. Found:`, + JSON.stringify(result, null, 2), + ); + } + } + }); + }); +}); diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index e082d7e4..6cc1e92c 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -73,9 +73,21 @@ export const modelParamMappers: { reasoning_effort, max_tokens, max_completion_tokens, - temperature: _, + temperature, ...params }) => { + debugger; + + if (!reasoning_effort) { + // noop, but let's clean reasoning_effort + return { + ...params, + max_tokens, + max_completion_tokens, + temperature, + }; + } + // Max tokens are inclusive of budget. If the max tokens are too low (below 1024), then the API will raise an exception. const maxTokens = Math.max( max_completion_tokens || max_tokens || 0, @@ -515,14 +527,9 @@ export function translateParams( params: Record, ): Record { let translatedParams: Record = {}; + for (const [k, v] of Object.entries(params || {})) { const safeValue = v ?? undefined; // Don't propagate "null" along - const mapper = modelParamMappers[toProvider]?.[k]; - if (mapper) { - translatedParams = mapper(translatedParams); - continue; - } - const translatedKey = modelParamToModelParam[k as keyof ModelParams] as | keyof ModelParams | undefined @@ -539,6 +546,14 @@ export function translateParams( translatedParams[hasDefaultParam ? translatedKey : k] = safeValue; } + for (const [k, _] of Object.entries(params || {})) { + const mapper = modelParamMappers[toProvider]?.[k]; + if (mapper) { + // not ideal.. we should pass the original params to the mappers, but simple params mapping may overwrite complex mappers + translatedParams = mapper(translatedParams); + } + } + return translatedParams; } From d8eae39c63e50c23428b678b4fa5e160c89337cd Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 13 May 2025 15:57:14 -0500 Subject: [PATCH 24/51] add some dev notes --- packages/proxy/schema/index.test.ts | 112 ++++++++++++++-------------- 1 file changed, 58 insertions(+), 54 deletions(-) diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts index c0cc5d32..862b6f53 100644 --- a/packages/proxy/schema/index.test.ts +++ b/packages/proxy/schema/index.test.ts @@ -1,66 +1,68 @@ -import { it, expect } from "vitest"; -import { ModelFormat, translateParams } from "./index"; -import { ChatCompletionCreateParams } from "openai/resources"; import { MessageCreateParamsBase } from "@anthropic-ai/sdk/resources/messages"; import { GenerateContentParameters } from "@google/genai"; +import { ChatCompletionCreateParams } from "openai/resources"; +import { expect, it } from "vitest"; +import { ModelFormat, translateParams } from "./index"; const examples: Record< string, - { openai: ChatCompletionCreateParams } & ( + { + openai: ChatCompletionCreateParams; + } & ( // NOTE: these are not strictly the API params. | { google: GenerateContentParameters } | { anthropic: MessageCreateParamsBase } ) > = { - // simple: { - // openai: { - // model: "gpt-4o", - // max_tokens: 1500, - // temperature: 0.7, - // top_p: 0.9, - // frequency_penalty: 0.1, - // presence_penalty: 0.2, - // messages: [ - // { role: "system", content: "You are a helpful assistant." }, - // { role: "user", content: "Hello, how are you?" }, - // ], - // stream: true, - // }, - // google: { - // max_tokens: 1500, - // messages: [ - // { - // content: "You are a helpful assistant.", - // role: "system", - // }, - // { - // content: "Hello, how are you?", - // role: "user", - // }, - // ], - // model: "gpt-4o", - // stream: true, - // temperature: 0.7, - // top_p: 0.9, - // }, - // anthropic: { - // max_tokens: 1500, - // messages: [ - // { - // content: "You are a helpful assistant.", - // // @ts-expect-error -- TODO: shouldn't we have translated this to a non system role? - // role: "system", - // }, - // { - // content: "Hello, how are you?", - // role: "user", - // }, - // ], - // model: "gpt-4o", - // stream: true, - // temperature: 0.7, - // top_p: 0.9, - // }, - // }, + simple: { + openai: { + model: "gpt-4o", + max_tokens: 1500, + temperature: 0.7, + top_p: 0.9, + frequency_penalty: 0.1, + presence_penalty: 0.2, + messages: [ + { role: "system", content: "You are a helpful assistant." }, + { role: "user", content: "Hello, how are you?" }, + ], + stream: true, + }, + google: { + max_tokens: 1500, + messages: [ + { + content: "You are a helpful assistant.", + role: "system", + }, + { + content: "Hello, how are you?", + role: "user", + }, + ], + model: "gpt-4o", + stream: true, + temperature: 0.7, + top_p: 0.9, + }, + anthropic: { + max_tokens: 1500, + messages: [ + { + content: "You are a helpful assistant.", + // @ts-expect-error -- TODO: shouldn't we have translated this to a non system role? + role: "system", + }, + { + content: "Hello, how are you?", + role: "user", + }, + ], + model: "gpt-4o", + stream: true, + temperature: 0.7, + top_p: 0.9, + }, + }, reasoning: { openai: { model: "gpt-4o", @@ -81,6 +83,8 @@ const examples: Record< }, google: { model: "gpt-4o", + // notice how this is still an intermediate param + // google's api expects a content instead of messages, for example messages: [ { role: "system", From 7ff7188f93029c9980d8af5b79e04e021333a81a Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 15 May 2025 14:07:12 -0500 Subject: [PATCH 25/51] reconcile the latest on gemini models & vertex ai --- packages/proxy/schema/index.ts | 2 - packages/proxy/schema/models.ts | 241 ++++++++++++++++++-------------- 2 files changed, 133 insertions(+), 110 deletions(-) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 6cc1e92c..01e84d4f 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -76,8 +76,6 @@ export const modelParamMappers: { temperature, ...params }) => { - debugger; - if (!reasoning_effort) { // noop, but let's clean reasoning_effort return { diff --git a/packages/proxy/schema/models.ts b/packages/proxy/schema/models.ts index 069d56db..0e676855 100644 --- a/packages/proxy/schema/models.ts +++ b/packages/proxy/schema/models.ts @@ -55,6 +55,18 @@ export const ModelSchema = z.object({ export type ModelSpec = z.infer; +// XXX: General Guidance on Maintainging Available Models: +// +// Order here closely resembles how the UI displays the models. +// For now, let's assume latest models first then previous models. +// +// For models that are in multiple providers, e.g. gemini -> vertex ai, remember +// to propagate changes to those providers. +// +// Set experimental, if model is not allowed production load or API is unstable +// Set parent, if the model was replaced by another model. +// Set deprecated, if and only if we want to discourage (literally hide) the use of the model. +// Set displayName, only if the model is the latest production/stable. export const AvailableModels: { [name: string]: ModelSpec } = { // OPENAI / AZURE MODELS @@ -1808,12 +1820,72 @@ export const AvailableModels: { [name: string]: ModelSpec } = { }, // GEMINI MODELS + "gemini-2.5-pro-preview-05-06": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 1.25, + output_cost_per_mil_tokens: 10, + multimodal: true, + // may technically not be true (according to docs), but the Gemini API doesn't complain if we have the params + reasoning: true, + displayName: "Gemini 2.5 Pro Preview", + }, + "gemini-2.5-pro-exp-03-25": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 0, + output_cost_per_mil_tokens: 0, + multimodal: true, + experimental: true, + reasoning: true, + deprecated: true, + parent: "gemini-2.5-pro-preview-05-06", + }, + "gemini-2.5-pro-preview-03-25": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 1.25, + output_cost_per_mil_tokens: 10, + multimodal: true, + reasoning: true, + parent: "gemini-2.5-pro-preview-05-06", + }, + "gemini-2.0-pro-exp-02-05": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? + output_cost_per_mil_tokens: 0, + multimodal: true, + experimental: true, + deprecated: true, + parent: "gemini-2.5-pro-preview-03-25", + }, + "gemini-2.5-flash-preview-04-17": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 0.15, + output_cost_per_mil_tokens: 0.6, + multimodal: true, + reasoning: true, + displayName: "Gemini 2.5 Flash Preview", + }, + "gemini-2.0-flash-thinking-exp-01-21": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? + output_cost_per_mil_tokens: 0, + multimodal: true, + experimental: true, + reasoning: true, + deprecated: true, + parent: "gemini-2.5-flash-preview-04-17", + }, "gemini-2.0-flash": { format: "google", flavor: "chat", input_cost_per_mil_tokens: 0.1, output_cost_per_mil_tokens: 0.4, - displayName: "Gemini 2.0 Flash", + displayName: "Gemini 2.0 Flash Latest", multimodal: true, }, "gemini-2.0-flash-001": { @@ -1824,6 +1896,16 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, parent: "gemini-2.0-flash", }, + "gemini-2.0-flash-exp": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? + output_cost_per_mil_tokens: 0, + multimodal: true, + experimental: true, + deprecated: true, + parent: "gemini-2.0-flash", + }, "gemini-2.0-flash-lite": { format: "google", flavor: "chat", @@ -1928,63 +2010,6 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, parent: "gemini-1.5-pro", }, - "gemini-2.5-flash-preview-04-17": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 0.15, - output_cost_per_mil_tokens: 0.6, - multimodal: true, - experimental: false, - reasoning: true, - displayName: "Gemini 2.5 Flash Preview", - }, - "gemini-2.5-pro-preview-05-06": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 1.25, - output_cost_per_mil_tokens: 10, - multimodal: true, - experimental: false, - reasoning: true, - displayName: "Gemini 2.5 Pro Preview", - }, - "gemini-2.5-pro-preview-03-25": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 1.25, - output_cost_per_mil_tokens: 10, - multimodal: true, - experimental: false, - parent: "gemini-2.5-pro-preview-05-06", - }, - "gemini-2.5-pro-exp-03-25": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 0, - output_cost_per_mil_tokens: 0, - multimodal: true, - experimental: true, - reasoning: true, - displayName: "Gemini 2.5 Pro Experimental", - }, - "gemini-2.0-flash-exp": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? - output_cost_per_mil_tokens: 0, - multimodal: true, - experimental: true, - parent: "gemini-2.0-flash", - }, - "gemini-2.0-flash-thinking-exp-01-21": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? - output_cost_per_mil_tokens: 0, - multimodal: true, - experimental: true, - parent: "gemini-2.0-flash", - }, "learnlm-1.5-pro-experimental": { format: "google", flavor: "chat", @@ -1993,25 +2018,6 @@ export const AvailableModels: { [name: string]: ModelSpec } = { multimodal: true, experimental: true, }, - // Gemini deprecated. - "gemini-2.0-pro-exp-02-05": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? - output_cost_per_mil_tokens: 0, - multimodal: true, - experimental: true, - deprecated: true, - }, - "gemini-exp-1206": { - format: "google", - flavor: "chat", - input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? - output_cost_per_mil_tokens: 0, - multimodal: true, - experimental: true, - deprecated: true, - }, "gemini-1.0-pro": { format: "google", flavor: "chat", @@ -2028,6 +2034,16 @@ export const AvailableModels: { [name: string]: ModelSpec } = { displayName: "Gemini Pro", deprecated: true, }, + "gemini-exp-1206": { + format: "google", + flavor: "chat", + input_cost_per_mil_tokens: 0, // TODO: Appears to be free for now? + output_cost_per_mil_tokens: 0, + multimodal: true, + experimental: true, + deprecated: true, + parent: "gemini-2.0-pro-exp-02-05", + }, // XAI MODELS "grok-2-vision": { @@ -2164,6 +2180,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { format: "anthropic", flavor: "chat", multimodal: true, + reasoning: true, input_cost_per_mil_tokens: 3, output_cost_per_mil_tokens: 15, displayName: "Claude 3.7 Sonnet", @@ -2171,6 +2188,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { "us.anthropic.claude-3-7-sonnet-20250219-v1:0": { format: "anthropic", flavor: "chat", + reasoning: true, multimodal: true, input_cost_per_mil_tokens: 3, output_cost_per_mil_tokens: 15, @@ -2555,15 +2573,48 @@ export const AvailableModels: { [name: string]: ModelSpec } = { // TODO: add anthropic 3.7 converse - // VERTEX MODELS + // VERTEX AI - GEMINI (GOOGLE) + "publishers/google/models/gemini-2.5-pro-preview-05-06": { + format: "google", + flavor: "chat", + displayName: "Gemini 2.5 Pro Preview", + multimodal: true, + // reasoning: true, // coming soon according to the vertex docs + }, + "publishers/google/models/gemini-2.5-pro-preview-03-25": { + format: "google", + flavor: "chat", + multimodal: true, + // reasoning: true, // coming soon according to the vertex docs + parent: "publishers/google/models/gemini-2.5-pro-preview-05-06", + }, + "publishers/google/models/gemini-2.5-pro-exp-03-25": { + format: "google", + flavor: "chat", + multimodal: true, + experimental: true, + // reasoning: true, // coming soon according to the vertex docs + deprecated: true, + parent: "publishers/google/models/gemini-2.5-pro-preview-03-25", + }, "publishers/google/models/gemini-2.5-flash-preview-04-17": { format: "google", flavor: "chat", displayName: "Gemini 2.5 Flash Preview", multimodal: true, + reasoning: true, input_cost_per_mil_tokens: 0.15, output_cost_per_mil_tokens: 0.6, }, + "publishers/google/models/gemini-2.0-flash-thinking-exp-01-21": { + format: "google", + flavor: "chat", + multimodal: true, + experimental: true, + deprecated: true, + // reasoning: true, // coming soon according to the vertex docs + parent: "publishers/google/models/gemini-2.5-flash-preview-04-17", + }, "publishers/google/models/gemini-2.0-flash": { format: "google", flavor: "chat", @@ -2659,6 +2710,8 @@ export const AvailableModels: { [name: string]: ModelSpec } = { flavor: "chat", parent: "publishers/google/models/gemini-1.0-pro", }, + + // VERTEX AI - CLAUDE (ANTHROPIC) "publishers/anthropic/models/claude-3-7-sonnet": { format: "anthropic", flavor: "chat", @@ -2666,6 +2719,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { input_cost_per_mil_tokens: 3, output_cost_per_mil_tokens: 15, multimodal: true, + reasoning: true, }, "publishers/anthropic/models/claude-3-7-sonnet@20250219": { format: "anthropic", @@ -2674,6 +2728,7 @@ export const AvailableModels: { [name: string]: ModelSpec } = { output_cost_per_mil_tokens: 15, multimodal: true, experimental: true, + reasoning: true, parent: "publishers/anthropic/models/claude-3-7-sonnet", }, "publishers/anthropic/models/claude-3-5-haiku": { @@ -2784,36 +2839,6 @@ export const AvailableModels: { [name: string]: ModelSpec } = { input_cost_per_mil_tokens: 0.3, output_cost_per_mil_tokens: 0.9, }, - // Vertex experimental models. - "publishers/google/models/gemini-2.5-pro-preview-05-06": { - format: "google", - flavor: "chat", - displayName: "Gemini 2.5 Pro Preview", - multimodal: true, - experimental: true, - }, - "publishers/google/models/gemini-2.5-pro-preview-03-25": { - format: "google", - flavor: "chat", - multimodal: true, - experimental: true, - parent: "publishers/google/models/gemini-2.5-pro-preview-05-06", - }, - "publishers/google/models/gemini-2.5-pro-exp-03-25": { - format: "google", - flavor: "chat", - displayName: "Gemini 2.5 Pro Experimental", - multimodal: true, - experimental: true, - }, - "publishers/google/models/gemini-2.0-flash-thinking-exp-01-21": { - format: "google", - flavor: "chat", - displayName: "Gemini 2.0 Flash Thinking Mode", - multimodal: true, - experimental: true, - parent: "publishers/google/models/gemini-2.0-flash", - }, "publishers/meta/models/llama-3.3-70b-instruct-maas": { format: "openai", flavor: "chat", From 33677b102bda2d6ec4cab2881439ef5fc5a7f220 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 15 May 2025 17:03:54 -0500 Subject: [PATCH 26/51] add support for reasoning_effort: undefined to disable reasoning tokens for anthropic/gemini --- packages/proxy/schema/index.test.ts | 75 +++++++++++++++++++++++++---- packages/proxy/schema/index.ts | 25 ++++++++-- 2 files changed, 86 insertions(+), 14 deletions(-) diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts index 862b6f53..d6d95e99 100644 --- a/packages/proxy/schema/index.test.ts +++ b/packages/proxy/schema/index.test.ts @@ -125,6 +125,67 @@ const examples: Record< }, }, }, + "reasoning disable": { + openai: { + model: "gpt-4o", + messages: [ + { + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + temperature: 0, + max_tokens: 1000, + reasoning_effort: undefined, + stream: false, + }, + google: { + model: "gpt-4o", + // notice how this is still an intermediate param + // google's api expects a content instead of messages, for example + messages: [ + { + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + temperature: 0, + maxOutputTokens: 1000, + thinkingConfig: { + thinkingBudget: 0, + includeThoughts: true, + }, + stream: false, + }, + anthropic: { + model: "gpt-4o", + messages: [ + { + // @ts-expect-error -- we use the role to later manipulate the request + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + max_tokens: 1000, + temperature: 0, + stream: false, + thinking: { + type: "disabled", + }, + }, + }, }; Object.entries(examples).forEach(([example, { openai, ...providers }]) => { @@ -137,15 +198,11 @@ Object.entries(examples).forEach(([example, { openai, ...providers }]) => { try { expect(result).toEqual(expected); } catch (error) { - try { - // try to relax the output a little - expect(result).toMatchObject(expected); - } finally { - console.warn( - `Exact openai -> ${provider} translation failed. Found:`, - JSON.stringify(result, null, 2), - ); - } + console.warn( + `Exact openai -> ${provider} translation failed. Found:`, + JSON.stringify(result, null, 2), + ); + expect.soft(result).toEqual(expected); } }); }); diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 01e84d4f..0cb18f41 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -76,13 +76,15 @@ export const modelParamMappers: { temperature, ...params }) => { - if (!reasoning_effort) { - // noop, but let's clean reasoning_effort + if (!reasoning_effort || reasoning_effort === "none") { return { ...params, - max_tokens, - max_completion_tokens, + max_tokens: max_completion_tokens || max_tokens, temperature, + // an empty/unset means we should disable + thinking: { + type: "disabled", + }, }; } @@ -113,6 +115,18 @@ export const modelParamMappers: { max_completion_tokens, ...params }) => { + // TODO: update types to accept an explicit reasoning_effort + if (!reasoning_effort || reasoning_effort === "none") { + return { + ...params, + maxOutputTokens: max_completion_tokens || max_tokens, + thinkingConfig: { + thinkingBudget: 0, + includeThoughts: true, + }, + }; + } + const maxTokens = Math.max( max_completion_tokens || max_tokens || 0, 1024 / effortToBudgetMultiplier.low, @@ -169,7 +183,7 @@ export const defaultModelParamSettings: { top_p: 0.7, top_k: 5, use_cache: true, - reasoning_effort: "medium", + reasoning_effort: undefined, }, google: { temperature: undefined, @@ -177,6 +191,7 @@ export const defaultModelParamSettings: { topP: 0.7, topK: 5, use_cache: true, + reasoning_effort: undefined, }, js: {}, window: { From c6ba9350d5e0246acc4c3ad9defb01cc5636825f Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 15 May 2025 17:31:05 -0500 Subject: [PATCH 27/51] PR feedback --- .envrc | 2 -- packages/proxy/src/proxy.ts | 1 - 2 files changed, 3 deletions(-) delete mode 100644 .envrc diff --git a/.envrc b/.envrc deleted file mode 100644 index 43edf50a..00000000 --- a/.envrc +++ /dev/null @@ -1,2 +0,0 @@ -source_up_if_exists -dotenv_if_exists diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 966e442a..c857d8de 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -1345,7 +1345,6 @@ function chatCompletionMessageFromResponseOutput( }; } -// TODO(ibolmo): should return the reasoning function chatCompletionFromResponse(response: OpenAIResponse): ChatCompletion { return { choices: [ From b10694af9f080f45b11f28fe13f134c20e60e547 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 15 May 2025 19:37:30 -0500 Subject: [PATCH 28/51] add coverage for openai call checking --- .pre-commit-config.yaml | 2 +- packages/proxy/package.json | 1 + packages/proxy/src/providers/openai.test.ts | 218 +++++++++++++- packages/proxy/src/providers/openai.ts | 5 +- pnpm-lock.yaml | 307 +++++++++++++++++++- 5 files changed, 520 insertions(+), 13 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 4547f1cc..639569c3 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -13,7 +13,7 @@ repos: .*\.(json|prisma|svg)| .*pnpm-lock.yaml )$ - args: ["-L rouge,coo,couldn,unsecure"] + args: ["-L rouge,coo,couldn,unsecure,afterall"] - repo: https://github.com/rbubley/mirrors-prettier rev: v3.3.2 hooks: diff --git a/packages/proxy/package.json b/packages/proxy/package.json index 42af76cf..e3dfdc77 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -79,6 +79,7 @@ "@types/uuid": "^9.0.7", "@typescript-eslint/eslint-plugin": "^8.21.0", "esbuild": "^0.19.10", + "msw": "^2.8.2", "npm-run-all": "^4.1.5", "tsup": "^8.4.0", "typescript": "5.5.4", diff --git a/packages/proxy/src/providers/openai.test.ts b/packages/proxy/src/providers/openai.test.ts index dde694b2..ba738da1 100644 --- a/packages/proxy/src/providers/openai.test.ts +++ b/packages/proxy/src/providers/openai.test.ts @@ -1,12 +1,32 @@ -import { describe, it, expect } from "vitest"; -import { callProxyV1 } from "../../utils/tests"; import { OpenAIChatCompletion, OpenAIChatCompletionChunk, OpenAIChatCompletionCreateParams, } from "@types"; +import { bypass, http, HttpResponse, JsonBodyType } from "msw"; +import { setupServer } from "msw/node"; +import { afterAll, afterEach, beforeAll, expect, it } from "vitest"; +import { callProxyV1 } from "../../utils/tests"; + +export const server = setupServer(); + +beforeAll(() => { + server.listen({ + onUnhandledRequest: () => { + throw new Error("Unexpected request"); + }, + }); +}); -it("should deny reasoning_effort for unsupported models streaming", async () => { +afterEach(() => { + server.resetHandlers(); +}); + +afterAll(() => { + server.close(); +}); + +it("should deny reasoning_effort for unsupported models non-streaming", async () => { const { json } = await callProxyV1< OpenAIChatCompletionCreateParams, OpenAIChatCompletionChunk @@ -180,3 +200,195 @@ it("should accept and return reasoning/thinking params and detail non-streaming" }, }); }); + +type InterceptedRequest = { + method: string; + url: string; + body: JsonBodyType; +}; + +type InterceptedResponse = { + status: number; + body: JsonBodyType; +}; + +type InterceptedCall = { + request: InterceptedRequest; + response: InterceptedResponse; +}; + +it.only("should fallback to medium if reasoning_effort is missing", async () => { + const calls: InterceptedCall[] = []; + server.use( + http.post( + "https://api.openai.com/v1/chat/completions", + async ({ request: req }) => { + const request: InterceptedRequest = { + method: req.method, + url: req.url, + body: await req.json(), + }; + + const res = await fetch( + bypass( + new Request(req.url, { + method: req.method, + body: JSON.stringify(request.body), + headers: req.headers, + }), + ), + ); + + const response: InterceptedResponse = { + status: res.status, + body: await res.json(), + }; + + calls.push({ request, response }); + + return HttpResponse.json(response.body, { + status: res.status, + headers: res.headers, + }); + }, + ), + ); + + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model: "o3-mini-2025-01-31", + reasoning_effort: null, + stream: false, + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + }, + }); + + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + message: { + content: expect.any(String), + // as of writing, openai does not provide this detail + // reasoning: [], + annotations: [], + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model: "o3-mini-2025-01-31", + object: "chat.completion", + service_tier: expect.any(String), + system_fingerprint: expect.any(String), + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + completion_tokens_details: { + accepted_prediction_tokens: expect.any(Number), + audio_tokens: expect.any(Number), + reasoning_tokens: expect.any(Number), + rejected_prediction_tokens: expect.any(Number), + }, + prompt_tokens_details: { + audio_tokens: expect.any(Number), + cached_tokens: expect.any(Number), + }, + }, + }); + + expect(calls).toEqual([ + { + request: { + body: { + reasoning_effort: null, // let openai decide what is the default + messages: [ + { + content: "How many rs in 'ferrocarril'", + role: "user", + }, + { + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + role: "assistant", + }, + { + content: "How many e in what you said?", + role: "user", + }, + ], + model: "o3-mini-2025-01-31", + stream: false, + }, + method: "POST", + url: "https://api.openai.com/v1/chat/completions", + }, + response: { + body: { + choices: [ + { + finish_reason: "stop", + index: 0, + message: { + annotations: [], + content: expect.any(String), + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model: "o3-mini-2025-01-31", + object: "chat.completion", + service_tier: "default", + system_fingerprint: expect.any(String), + usage: { + completion_tokens: expect.any(Number), + completion_tokens_details: { + accepted_prediction_tokens: expect.any(Number), + audio_tokens: expect.any(Number), + reasoning_tokens: expect.any(Number), + rejected_prediction_tokens: expect.any(Number), + }, + prompt_tokens: expect.any(Number), + prompt_tokens_details: { + audio_tokens: expect.any(Number), + cached_tokens: expect.any(Number), + }, + total_tokens: expect.any(Number), + }, + }, + status: 200, + }, + }, + ]); +}); diff --git a/packages/proxy/src/providers/openai.ts b/packages/proxy/src/providers/openai.ts index c92fb5a6..0d519472 100644 --- a/packages/proxy/src/providers/openai.ts +++ b/packages/proxy/src/providers/openai.ts @@ -3,7 +3,6 @@ import { ChatCompletion, ChatCompletionMessageParam, ChatCompletionContentPart, - ChatCompletionContentPartRefusal, } from "openai/resources"; import { base64ToUrl, convertBase64Media, convertMediaToBase64 } from "./util"; import { parseFilenameFromUrl } from ".."; @@ -92,6 +91,10 @@ export async function normalizeOpenAIMessages( ), ); } + // not part of the openai spec + if ("reasoning" in message) { + delete message.reasoning; + } return message; }), ); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f6ecd045..a3621b91 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -25,7 +25,7 @@ importers: version: 4.3.2(typescript@5.5.4) vitest: specifier: ^2.1.9 - version: 2.1.9(@types/node@20.10.5) + version: 2.1.9(@types/node@20.10.5)(msw@2.8.2) apis/cloudflare: dependencies: @@ -270,6 +270,9 @@ importers: esbuild: specifier: ^0.19.10 version: 0.19.10 + msw: + specifier: ^2.8.2 + version: 2.8.2(@types/node@20.10.5)(typescript@5.5.4) npm-run-all: specifier: ^4.1.5 version: 4.1.5 @@ -284,7 +287,7 @@ importers: version: 4.3.2(typescript@5.5.4) vitest: specifier: ^2.1.9 - version: 2.1.9(@types/node@20.10.5) + version: 2.1.9(@types/node@20.10.5)(msw@2.8.2) packages: @@ -965,6 +968,25 @@ packages: resolution: {integrity: sha512-6wc7ck65ctA75Hq7FYHTtTvGnYs6msgdxiSUICQ+A01nVOWg6rqouZB8IdyteRlfpYYiFovkf67dIeOgWIUzTA==} dev: false + /@bundled-es-modules/cookie@2.0.1: + resolution: {integrity: sha512-8o+5fRPLNbjbdGRRmJj3h6Hh1AQJf2dk3qQ/5ZFb+PXkRNiSoMGGUKlsgLfrxneb72axVJyIYji64E2+nNfYyw==} + dependencies: + cookie: 0.7.2 + dev: true + + /@bundled-es-modules/statuses@1.0.1: + resolution: {integrity: sha512-yn7BklA5acgcBr+7w064fGV+SGIFySjCKpqjcWgBAIfrAkY+4GQTJJHQMeT3V/sgz23VTEVV8TtOmkvJAhFVfg==} + dependencies: + statuses: 2.0.1 + dev: true + + /@bundled-es-modules/tough-cookie@0.1.6: + resolution: {integrity: sha512-dvMHbL464C0zI+Yqxbz6kZ5TOEp7GLW+pry/RWndAR8MJQAXZ2rPmIs8tziTZjeIyhSNZgZbCePtfSbdWqStJw==} + dependencies: + '@types/tough-cookie': 4.0.5 + tough-cookie: 4.1.4 + dev: true + /@cloudflare/kv-asset-handler@0.3.4: resolution: {integrity: sha512-YLPHc8yASwjNkmcDMQMY35yiWjoKAKnhUbPRszBRS0YgH+IXtsMp61j+yTcnCE3oO2DgP0U3iejLC8FTtKDC8Q==} engines: {node: '>=16.13'} @@ -2211,6 +2233,57 @@ packages: deprecated: Use @eslint/object-schema instead dev: true + /@inquirer/confirm@5.1.10(@types/node@20.10.5): + resolution: {integrity: sha512-FxbQ9giWxUWKUk2O5XZ6PduVnH2CZ/fmMKMBkH71MHJvWr7WL5AHKevhzF1L5uYWB2P548o1RzVxrNd3dpmk6g==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + dependencies: + '@inquirer/core': 10.1.11(@types/node@20.10.5) + '@inquirer/type': 3.0.6(@types/node@20.10.5) + '@types/node': 20.10.5 + dev: true + + /@inquirer/core@10.1.11(@types/node@20.10.5): + resolution: {integrity: sha512-BXwI/MCqdtAhzNQlBEFE7CEflhPkl/BqvAuV/aK6lW3DClIfYVDWPP/kXuXHtBWC7/EEbNqd/1BGq2BGBBnuxw==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + dependencies: + '@inquirer/figures': 1.0.11 + '@inquirer/type': 3.0.6(@types/node@20.10.5) + '@types/node': 20.10.5 + ansi-escapes: 4.3.2 + cli-width: 4.1.0 + mute-stream: 2.0.0 + signal-exit: 4.1.0 + wrap-ansi: 6.2.0 + yoctocolors-cjs: 2.1.2 + dev: true + + /@inquirer/figures@1.0.11: + resolution: {integrity: sha512-eOg92lvrn/aRUqbxRyvpEWnrvRuTYRifixHkYVpJiygTgVSBIHDqLh0SrMQXkafvULg3ck11V7xvR+zcgvpHFw==} + engines: {node: '>=18'} + dev: true + + /@inquirer/type@3.0.6(@types/node@20.10.5): + resolution: {integrity: sha512-/mKVCtVpyBu3IDarv0G+59KC4stsD5mDsGpYh+GKs1NZT88Jh52+cuoA1AtLk2Q0r/quNl+1cSUyLRHBFeD0XA==} + engines: {node: '>=18'} + peerDependencies: + '@types/node': '>=18' + peerDependenciesMeta: + '@types/node': + optional: true + dependencies: + '@types/node': 20.10.5 + dev: true + /@isaacs/cliui@8.0.2: resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} engines: {node: '>=12'} @@ -2271,6 +2344,18 @@ packages: resolution: {integrity: sha512-GaHYm+c0O9MjZRu0ongGBRbinu8gVAMd2UZjji6jVmqKtZluZnptXGWhz1E8j8D2HJ3f/yMxKAUC0b+57wncIw==} dev: false + /@mswjs/interceptors@0.37.6: + resolution: {integrity: sha512-wK+5pLK5XFmgtH3aQ2YVvA3HohS3xqV/OxuVOdNx9Wpnz7VE/fnC+e1A7ln6LFYeck7gOJ/dsZV6OLplOtAJ2w==} + engines: {node: '>=18'} + dependencies: + '@open-draft/deferred-promise': 2.2.0 + '@open-draft/logger': 0.3.0 + '@open-draft/until': 2.1.0 + is-node-process: 1.2.0 + outvariant: 1.4.3 + strict-event-emitter: 0.5.1 + dev: true + /@next/env@14.2.3: resolution: {integrity: sha512-W7fd7IbkfmeeY2gXrzJYDx8D2lWKbVoTIj1o1ScPHNzvp30s1AuoEFSdr39bC5sjxJaxTtq3OTCZboNp0lNWHA==} dev: false @@ -2383,6 +2468,21 @@ packages: fastq: 1.15.0 dev: true + /@open-draft/deferred-promise@2.2.0: + resolution: {integrity: sha512-CecwLWx3rhxVQF6V4bAgPS5t+So2sTbPgAzafKkVizyi7tlwpcFpdFqq+wqF2OwNBmqFuu6tOyouTuxgpMfzmA==} + dev: true + + /@open-draft/logger@0.3.0: + resolution: {integrity: sha512-X2g45fzhxH238HKO4xbSr7+wBS8Fvw6ixhTDuvLd5mqh6bJJCFAPwU9mPDxbcrRtfxv4u5IHCEH77BmxvXmmxQ==} + dependencies: + is-node-process: 1.2.0 + outvariant: 1.4.3 + dev: true + + /@open-draft/until@2.1.0: + resolution: {integrity: sha512-U69T3ItWHvLwGg5eJ0n3I62nWuE6ilHlmz7zM0npLBRvPRd7e6NYmg54vvRtP5mZG7kZqZCFVdsTWo7BPtBujg==} + dev: true + /@opentelemetry/api@1.7.0: resolution: {integrity: sha512-AdY5wvN0P2vXBi3b29hxZgSFvdhdxPB9+f0B6s//P9Q8nibRWeA3cHm8UmLpio9ABigkVHJ5NMPk+Mz8VCCyrw==} engines: {node: '>=8.0.0'} @@ -3313,6 +3413,10 @@ packages: '@types/node': 20.10.5 dev: true + /@types/cookie@0.6.0: + resolution: {integrity: sha512-4Kh9a6B2bQciAhf7FSuMRRkUWecJgJu9nPnx3yzpsfXX/c50REIqpHY4C82bXP90qrLtXtkDxTZosYO3UpOwlA==} + dev: true + /@types/cors@2.8.13: resolution: {integrity: sha512-RG8AStHlUiV5ysZQKq97copd2UmVYw3/pRMLefISZ3S1hK104Cwm7iLQ3fTKx+lsUH2CE8FlLaYeEA2LSeqYUA==} dependencies: @@ -3441,6 +3545,14 @@ packages: '@types/node': 20.10.5 dev: true + /@types/statuses@2.0.5: + resolution: {integrity: sha512-jmIUGWrAiwu3dZpxntxieC+1n/5c3mjrImkmOSQ2NC5uP6cYO4aAZDdSmRcI5C1oiTmqlZGHC+/NmJrKogbP5A==} + dev: true + + /@types/tough-cookie@4.0.5: + resolution: {integrity: sha512-/Ad8+nIOV7Rl++6f1BdKxFSMgmoqEoYbHRpPcx3JEfv8VRsQe9Z4mCXeJBzxs7mbHY/XOZZuXlRNfhpVPbs6ZA==} + dev: true + /@types/uuid@9.0.7: resolution: {integrity: sha512-WUtIVRUZ9i5dYXefDEAI7sh9/O7jGvHg7Df/5O/gtH3Yabe5odI3UWopVR1qbPXQtvOxWu3mM4XxlYeZtMWF4g==} @@ -3743,7 +3855,7 @@ packages: tinyrainbow: 1.2.0 dev: true - /@vitest/mocker@2.1.9(vite@5.4.10): + /@vitest/mocker@2.1.9(msw@2.8.2)(vite@5.4.10): resolution: {integrity: sha512-tVL6uJgoUdi6icpxmdrn5YNo3g3Dxv+IHJBr0GXHaEdTcw3F+cPKnsXFhli6nO+f/6SDKPHEK1UN+k+TQv0Ehg==} peerDependencies: msw: ^2.4.9 @@ -3757,6 +3869,7 @@ packages: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 magic-string: 0.30.17 + msw: 2.8.2(@types/node@20.10.5)(typescript@5.5.4) vite: 5.4.10(@types/node@20.10.5) dev: true @@ -3862,7 +3975,7 @@ packages: dependencies: '@vue/compiler-ssr': 3.5.13 '@vue/shared': 3.5.13 - vue: 3.5.13(typescript@5.3.3) + vue: 3.5.13(typescript@5.5.4) dev: false /@vue/shared@3.5.13: @@ -4065,6 +4178,13 @@ packages: uri-js: 4.4.1 dev: true + /ansi-escapes@4.3.2: + resolution: {integrity: sha512-gKXj5ALrKWQLsYG9jlTRmR/xKluxHV+Z9QEwNIgCfM1/uwPMCuzVVnh5mwTd+OuBZcwSIMbqssNWRm1lE51QaQ==} + engines: {node: '>=8'} + dependencies: + type-fest: 0.21.3 + dev: true + /ansi-regex@5.0.1: resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} engines: {node: '>=8'} @@ -4566,10 +4686,24 @@ packages: string-width: 4.2.3 dev: false + /cli-width@4.1.0: + resolution: {integrity: sha512-ouuZd4/dm2Sw5Gmqy6bGyNNNe1qt9RpmxveLSO7KcgsTnU7RXfsw+/bukWGo1abgBiMAic068rclZsO4IWmmxQ==} + engines: {node: '>= 12'} + dev: true + /client-only@0.0.1: resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} dev: false + /cliui@8.0.1: + resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} + engines: {node: '>=12'} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + dev: true + /clsx@1.2.1: resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} engines: {node: '>=6'} @@ -5963,6 +6097,11 @@ packages: engines: {node: '>= 4'} dev: false + /get-caller-file@2.0.5: + resolution: {integrity: sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==} + engines: {node: 6.* || 8.* || >= 10.*} + dev: true + /get-intrinsic@1.2.2: resolution: {integrity: sha512-0gSo4ml/0j98Y3lngkFEot/zhiCeWsbYIlZ+uZOVgzLyLaUw7wxUL+nCTP0XJvJg1AXulJRI3UJi8GsbDuxdGA==} dependencies: @@ -6095,6 +6234,11 @@ packages: resolution: {integrity: sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag==} dev: true + /graphql@16.11.0: + resolution: {integrity: sha512-mS1lbMsxgQj6hge1XZ6p7GPhbrtFwUFYi3wRzXAC/FmYnyXMTvvI3td3rjmQ2u8ewXueaSvRPWaEcgVVOT9Jnw==} + engines: {node: ^12.22.0 || ^14.16.0 || ^16.0.0 || >=17.0.0} + dev: true + /gtoken@7.1.0: resolution: {integrity: sha512-pCcEwRi+TKpMlxAQObHDQ56KawURgyAf6jtIY046fJ5tIv3zDe/LEIubckAO8fj6JnAxLdmWkUfNyulQ2iKdEw==} engines: {node: '>=14.0.0'} @@ -6144,6 +6288,10 @@ packages: dependencies: function-bind: 1.1.2 + /headers-polyfill@4.0.3: + resolution: {integrity: sha512-IScLbePpkvO846sIwOtOTDjutRMWdXdJmXdMvk6gCBHxFO8d+QKOQedyZSxFTTFYRSmlgSTDtXqqq4pcenBXLQ==} + dev: true + /hosted-git-info@2.8.9: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} dev: true @@ -6340,6 +6488,10 @@ packages: engines: {node: '>= 0.4'} dev: true + /is-node-process@1.2.0: + resolution: {integrity: sha512-Vg4o6/fqPxIjtxgUH5QLJhwZ7gW5diGCVlXpuUfELC62CuxM1iHcRe51f2W1FDy04Ai4KJkagKjx3XaqyfRKXw==} + dev: true + /is-number-object@1.0.7: resolution: {integrity: sha512-k1U0IRzLMo7ZlYIfzRu23Oh6MiIFasgpb9X76eqfFZAqwH44UI4KTBvBYIZ1dSL9ZzChTB9ShHfLkR4pdW5krQ==} engines: {node: '>= 0.4'} @@ -6870,10 +7022,49 @@ packages: /ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + /msw@2.8.2(@types/node@20.10.5)(typescript@5.5.4): + resolution: {integrity: sha512-ugu8RBgUj6//RD0utqDDPdS+QIs36BKYkDAM6u59hcMVtFM4PM0vW4l3G1R+1uCWP2EWFUG8reT/gPXVEtx7/w==} + engines: {node: '>=18'} + hasBin: true + requiresBuild: true + peerDependencies: + typescript: '>= 4.8.x' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@bundled-es-modules/cookie': 2.0.1 + '@bundled-es-modules/statuses': 1.0.1 + '@bundled-es-modules/tough-cookie': 0.1.6 + '@inquirer/confirm': 5.1.10(@types/node@20.10.5) + '@mswjs/interceptors': 0.37.6 + '@open-draft/deferred-promise': 2.2.0 + '@open-draft/until': 2.1.0 + '@types/cookie': 0.6.0 + '@types/statuses': 2.0.5 + graphql: 16.11.0 + headers-polyfill: 4.0.3 + is-node-process: 1.2.0 + outvariant: 1.4.3 + path-to-regexp: 6.3.0 + picocolors: 1.1.1 + strict-event-emitter: 0.5.1 + type-fest: 4.41.0 + typescript: 5.5.4 + yargs: 17.7.2 + transitivePeerDependencies: + - '@types/node' + dev: true + /mustache@4.2.0: resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} hasBin: true + /mute-stream@2.0.0: + resolution: {integrity: sha512-WWdIxpyjEn+FhQJQQv9aQAYlHoNVdzIzUySNV1gHUPDSdZJ3yZn7pAAbQcV7B56Mvu881q9FZV+0Vx2xC44VWA==} + engines: {node: ^18.17.0 || >=20.5.0} + dev: true + /mz@2.7.0: resolution: {integrity: sha512-z81GNO7nnYMEhrGh9LeymoE4+Yr0Wn5McHIZMK5cfQCl+NDX08sCZgUc9/6MHni9IWuFLm1Z3HTCXu2z9fN62Q==} dependencies: @@ -7205,6 +7396,10 @@ packages: word-wrap: 1.2.5 dev: true + /outvariant@1.4.3: + resolution: {integrity: sha512-+Sl2UErvtsoajRDKCE5/dBz4DIvHXQQnAxtQTF04OJxY0+DyZXSo5P5Bb7XYWOh81syohlYL24hbDwxedPUJCA==} + dev: true + /p-limit@3.1.0: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} engines: {node: '>=10'} @@ -7509,6 +7704,12 @@ packages: resolution: {integrity: sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg==} dev: false + /psl@1.15.0: + resolution: {integrity: sha512-JZd3gMVBAVQkSs6HdNZo9Sdo0LNcQeMNP3CozBJb3JYC/QUYZTnKxP+f8oWRX4rHP5EurWxqAHTSwUCjlNKa1w==} + dependencies: + punycode: 2.3.1 + dev: true + /pstree.remy@1.1.8: resolution: {integrity: sha512-77DZwxQmxKnu3aR542U+X8FypNzbfJ+C5XQDk3uWjWxn6151aIMGthWYRXTqT1E5oJvg+ljaa2OJi+VfvCOQ8w==} dev: true @@ -7522,6 +7723,11 @@ packages: engines: {node: '>=6'} dev: true + /punycode@2.3.1: + resolution: {integrity: sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==} + engines: {node: '>=6'} + dev: true + /qs@6.11.0: resolution: {integrity: sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==} engines: {node: '>=0.6'} @@ -7535,6 +7741,10 @@ packages: deprecated: The querystring API is considered Legacy. new code should use the URLSearchParams API instead. dev: false + /querystringify@2.2.0: + resolution: {integrity: sha512-FIqgj2EUvTa7R50u0rGsyTftzjYmv/a3hO345bZNrqabNqjtgiDMgmo4mkUjd+nzU5oF3dClKqFIPUKybUyqoQ==} + dev: true + /queue-microtask@1.2.3: resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} dev: true @@ -7643,6 +7853,15 @@ packages: set-function-name: 2.0.1 dev: true + /require-directory@2.1.1: + resolution: {integrity: sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==} + engines: {node: '>=0.10.0'} + dev: true + + /requires-port@1.0.0: + resolution: {integrity: sha512-KigOCHcocU3XODJxsu8i/j8T9tzT4adHiecwORRQ0ZZFcp7ahwXuRU1m+yuO90C5ZUyGeGfocHDI14M3L3yDAQ==} + dev: true + /resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} @@ -8087,7 +8306,6 @@ packages: /statuses@2.0.1: resolution: {integrity: sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==} engines: {node: '>= 0.8'} - dev: false /std-env@3.8.1: resolution: {integrity: sha512-vj5lIj3Mwf9D79hBkltk5qmkFI+biIKWS2IBxEyEU3AX1tUf7AoL8nSazCOiiqQsGKIq01SClsKEzweu34uwvA==} @@ -8103,6 +8321,10 @@ packages: engines: {node: '>=10.0.0'} dev: false + /strict-event-emitter@0.5.1: + resolution: {integrity: sha512-vMgjE/GGEPEFnhFub6pa4FmJBRBVOLpIII2hvCZ8Kzb7K0hlHo7mQv6xYrBvCL2LtAIBwFUK8wvuJgTVSQ5MFQ==} + dev: true + /string-width@4.2.3: resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} engines: {node: '>=8'} @@ -8323,7 +8545,7 @@ packages: peerDependencies: vue: '>=3.2.26 < 4' dependencies: - vue: 3.5.13(typescript@5.3.3) + vue: 3.5.13(typescript@5.5.4) dev: false /tailwindcss@3.2.7(postcss@8.4.38): @@ -8436,6 +8658,16 @@ packages: nopt: 1.0.10 dev: true + /tough-cookie@4.1.4: + resolution: {integrity: sha512-Loo5UUvLD9ScZ6jh8beX1T6sO1w2/MpCRpEP7V280GKMVUQ0Jzar2U3UJPsrdbziLEMMhu3Ujnq//rhiFuIeag==} + engines: {node: '>=6'} + dependencies: + psl: 1.15.0 + punycode: 2.3.1 + universalify: 0.2.0 + url-parse: 1.5.10 + dev: true + /tr46@0.0.3: resolution: {integrity: sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==} dev: false @@ -8726,6 +8958,16 @@ packages: engines: {node: '>=10'} dev: true + /type-fest@0.21.3: + resolution: {integrity: sha512-t0rzBq87m3fVcduHDUFhKmyyX+9eo6WQjZvf51Ea/M0Q7+T374Jp1aUiyUl0GKxp8M/OETVHSDvmkyPgvX+X2w==} + engines: {node: '>=10'} + dev: true + + /type-fest@4.41.0: + resolution: {integrity: sha512-TeTSQ6H5YHvpqVwBRcnLDCBnDOHWYu7IvGbHT6N8AOymcr9PJGjc1GTtiWZTYg0NCgYwvnYWEkVChQAr9bjfwA==} + engines: {node: '>=16'} + dev: true + /type-is@1.6.18: resolution: {integrity: sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g==} engines: {node: '>= 0.6'} @@ -8839,6 +9081,11 @@ packages: ufo: 1.5.4 dev: true + /universalify@0.2.0: + resolution: {integrity: sha512-CJ1QgKmNg3CwvAv/kOFmtnEN05f0D/cn9QntgNOQlQF9dgvVTHj3t+8JPdjqawCHk7V/KA+fbUqzZ9XWhcqPUg==} + engines: {node: '>= 4.0.0'} + dev: true + /unpipe@1.0.0: resolution: {integrity: sha512-pjy2bYhSsufwWlKwPc+l3cN7+wuJlK6uz0YdJEOlQDbl6jo/YlPi4mb8agUkVC8BF7V8NuzeyPNqRksA3hztKQ==} engines: {node: '>= 0.8'} @@ -8861,6 +9108,13 @@ packages: punycode: 2.1.1 dev: true + /url-parse@1.5.10: + resolution: {integrity: sha512-WypcfiRhfeUP9vvF0j6rw0J3hrWrw6iZv3+22h6iRMJ/8z1Tj6XfLP4DsUix5MhMPnXpiHDoKyoZ/bdCkwBCiQ==} + dependencies: + querystringify: 2.2.0 + requires-port: 1.0.0 + dev: true + /url@0.10.3: resolution: {integrity: sha512-hzSUW2q06EqL1gKM/a+obYHLIO6ct2hwPuviqTTOcfFVc61UbfJ2Q32+uGL/HCPxKqrdGB5QUwIe7UqlDgwsOQ==} dependencies: @@ -9002,7 +9256,7 @@ packages: fsevents: 2.3.3 dev: true - /vitest@2.1.9(@types/node@20.10.5): + /vitest@2.1.9(@types/node@20.10.5)(msw@2.8.2): resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} engines: {node: ^18.0.0 || >=20.0.0} hasBin: true @@ -9029,7 +9283,7 @@ packages: dependencies: '@types/node': 20.10.5 '@vitest/expect': 2.1.9 - '@vitest/mocker': 2.1.9(vite@5.4.10) + '@vitest/mocker': 2.1.9(msw@2.8.2)(vite@5.4.10) '@vitest/pretty-format': 2.1.9 '@vitest/runner': 2.1.9 '@vitest/snapshot': 2.1.9 @@ -9267,6 +9521,15 @@ packages: - utf-8-validate dev: true + /wrap-ansi@6.2.0: + resolution: {integrity: sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==} + engines: {node: '>=8'} + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 + dev: true + /wrap-ansi@7.0.0: resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} engines: {node: '>=10'} @@ -9319,6 +9582,11 @@ packages: engines: {node: '>=0.4'} dev: true + /y18n@5.0.8: + resolution: {integrity: sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==} + engines: {node: '>=10'} + dev: true + /yaeti@0.0.6: resolution: {integrity: sha512-MvQa//+KcZCUkBTIC9blM+CU9J2GzuTytsOUwf2lidtvkx/6gnEp1QvJv34t9vdjhFmha/mUiNDbN0D0mJWdug==} engines: {node: '>=0.10.32'} @@ -9338,11 +9606,34 @@ packages: hasBin: true dev: false + /yargs-parser@21.1.1: + resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} + engines: {node: '>=12'} + dev: true + + /yargs@17.7.2: + resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} + engines: {node: '>=12'} + dependencies: + cliui: 8.0.1 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 21.1.1 + dev: true + /yocto-queue@0.1.0: resolution: {integrity: sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==} engines: {node: '>=10'} dev: true + /yoctocolors-cjs@2.1.2: + resolution: {integrity: sha512-cYVsTjKl8b+FrnidjibDWskAv7UKOfcwaVZdp/it9n1s9fU3IkgDbhdIRKCW4JDsAlECJY0ytoVPT3sK6kideA==} + engines: {node: '>=18'} + dev: true + /youch@3.3.4: resolution: {integrity: sha512-UeVBXie8cA35DS6+nBkls68xaBBXCye0CNznrhszZjTbRVnJKQuNsyLKBTTL4ln1o1rh2PKtv35twV7irj5SEg==} dependencies: From 0e0ebb3545fa34980b885062dd18b2f56ab5e23b Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 15 May 2025 20:09:47 -0500 Subject: [PATCH 29/51] avoid including includeThoughts if disabled --- packages/proxy/schema/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 0cb18f41..4d933766 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -122,7 +122,6 @@ export const modelParamMappers: { maxOutputTokens: max_completion_tokens || max_tokens, thinkingConfig: { thinkingBudget: 0, - includeThoughts: true, }, }; } From c5858dd670ce0729902cad54123e87a82405e9cb Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 15 May 2025 20:22:00 -0500 Subject: [PATCH 30/51] fix tests --- packages/proxy/schema/index.test.ts | 1 - packages/proxy/src/providers/openai.test.ts | 350 ++++++++++---------- 2 files changed, 176 insertions(+), 175 deletions(-) diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts index d6d95e99..41445dee 100644 --- a/packages/proxy/schema/index.test.ts +++ b/packages/proxy/schema/index.test.ts @@ -161,7 +161,6 @@ const examples: Record< maxOutputTokens: 1000, thinkingConfig: { thinkingBudget: 0, - includeThoughts: true, }, stream: false, }, diff --git a/packages/proxy/src/providers/openai.test.ts b/packages/proxy/src/providers/openai.test.ts index ba738da1..27ba02df 100644 --- a/packages/proxy/src/providers/openai.test.ts +++ b/packages/proxy/src/providers/openai.test.ts @@ -5,27 +5,9 @@ import { } from "@types"; import { bypass, http, HttpResponse, JsonBodyType } from "msw"; import { setupServer } from "msw/node"; -import { afterAll, afterEach, beforeAll, expect, it } from "vitest"; +import { afterAll, afterEach, beforeAll, describe, expect, it } from "vitest"; import { callProxyV1 } from "../../utils/tests"; -export const server = setupServer(); - -beforeAll(() => { - server.listen({ - onUnhandledRequest: () => { - throw new Error("Unexpected request"); - }, - }); -}); - -afterEach(() => { - server.resetHandlers(); -}); - -afterAll(() => { - server.close(); -}); - it("should deny reasoning_effort for unsupported models non-streaming", async () => { const { json } = await callProxyV1< OpenAIChatCompletionCreateParams, @@ -217,178 +199,198 @@ type InterceptedCall = { response: InterceptedResponse; }; -it.only("should fallback to medium if reasoning_effort is missing", async () => { - const calls: InterceptedCall[] = []; - server.use( - http.post( - "https://api.openai.com/v1/chat/completions", - async ({ request: req }) => { - const request: InterceptedRequest = { - method: req.method, - url: req.url, - body: await req.json(), - }; +describe("request/response checking", () => { + const server = setupServer(); - const res = await fetch( - bypass( - new Request(req.url, { - method: req.method, - body: JSON.stringify(request.body), - headers: req.headers, - }), - ), - ); + beforeAll(() => { + server.listen({ + onUnhandledRequest: () => { + throw new Error("Unexpected request"); + }, + }); + }); - const response: InterceptedResponse = { - status: res.status, - body: await res.json(), - }; + afterEach(() => { + server.resetHandlers(); + }); - calls.push({ request, response }); + afterAll(() => { + server.close(); + }); - return HttpResponse.json(response.body, { - status: res.status, - headers: res.headers, - }); - }, - ), - ); + it("should fallback to medium if reasoning_effort is missing", async () => { + const calls: InterceptedCall[] = []; + server.use( + http.post( + "https://api.openai.com/v1/chat/completions", + async ({ request: req }) => { + const request: InterceptedRequest = { + method: req.method, + url: req.url, + body: await req.json(), + }; - const { json } = await callProxyV1< - OpenAIChatCompletionCreateParams, - OpenAIChatCompletionChunk - >({ - body: { - model: "o3-mini-2025-01-31", - reasoning_effort: null, - stream: false, - messages: [ - { - role: "user", - content: "How many rs in 'ferrocarril'", - }, - { - role: "assistant", - content: "There are 4 letter 'r's in the word \"ferrocarril\".", - refusal: null, - reasoning: [ - { - id: "", - content: - "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", - }, - ], + const res = await fetch( + bypass( + new Request(req.url, { + method: req.method, + body: JSON.stringify(request.body), + headers: req.headers, + }), + ), + ); + + const response: InterceptedResponse = { + status: res.status, + body: await res.json(), + }; + + calls.push({ request, response }); + + return HttpResponse.json(response.body, { + status: res.status, + headers: res.headers, + }); }, + ), + ); + + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model: "o3-mini-2025-01-31", + reasoning_effort: null, + stream: false, + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + }, + }); + + expect(json()).toEqual({ + choices: [ { - role: "user", - content: "How many e in what you said?", + finish_reason: "stop", + index: 0, + message: { + content: expect.any(String), + // as of writing, openai does not provide this detail + // reasoning: [], + annotations: [], + refusal: null, + role: "assistant", + }, }, ], - }, - }); - - expect(json()).toEqual({ - choices: [ - { - finish_reason: "stop", - index: 0, - message: { - content: expect.any(String), - // as of writing, openai does not provide this detail - // reasoning: [], - annotations: [], - refusal: null, - role: "assistant", + created: expect.any(Number), + id: expect.any(String), + model: "o3-mini-2025-01-31", + object: "chat.completion", + service_tier: expect.any(String), + system_fingerprint: expect.any(String), + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + completion_tokens_details: { + accepted_prediction_tokens: expect.any(Number), + audio_tokens: expect.any(Number), + reasoning_tokens: expect.any(Number), + rejected_prediction_tokens: expect.any(Number), }, - }, - ], - created: expect.any(Number), - id: expect.any(String), - model: "o3-mini-2025-01-31", - object: "chat.completion", - service_tier: expect.any(String), - system_fingerprint: expect.any(String), - usage: { - completion_tokens: expect.any(Number), - prompt_tokens: expect.any(Number), - total_tokens: expect.any(Number), - completion_tokens_details: { - accepted_prediction_tokens: expect.any(Number), - audio_tokens: expect.any(Number), - reasoning_tokens: expect.any(Number), - rejected_prediction_tokens: expect.any(Number), - }, - prompt_tokens_details: { - audio_tokens: expect.any(Number), - cached_tokens: expect.any(Number), - }, - }, - }); - - expect(calls).toEqual([ - { - request: { - body: { - reasoning_effort: null, // let openai decide what is the default - messages: [ - { - content: "How many rs in 'ferrocarril'", - role: "user", - }, - { - content: "There are 4 letter 'r's in the word \"ferrocarril\".", - refusal: null, - role: "assistant", - }, - { - content: "How many e in what you said?", - role: "user", - }, - ], - model: "o3-mini-2025-01-31", - stream: false, + prompt_tokens_details: { + audio_tokens: expect.any(Number), + cached_tokens: expect.any(Number), }, - method: "POST", - url: "https://api.openai.com/v1/chat/completions", }, - response: { - body: { - choices: [ - { - finish_reason: "stop", - index: 0, - message: { - annotations: [], - content: expect.any(String), + }); + + expect(calls).toEqual([ + { + request: { + body: { + reasoning_effort: null, // let openai decide what is the default + messages: [ + { + content: "How many rs in 'ferrocarril'", + role: "user", + }, + { + content: "There are 4 letter 'r's in the word \"ferrocarril\".", refusal: null, role: "assistant", }, + { + content: "How many e in what you said?", + role: "user", + }, + ], + model: "o3-mini-2025-01-31", + stream: false, + }, + method: "POST", + url: "https://api.openai.com/v1/chat/completions", + }, + response: { + body: { + choices: [ + { + finish_reason: "stop", + index: 0, + message: { + annotations: [], + content: expect.any(String), + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model: "o3-mini-2025-01-31", + object: "chat.completion", + service_tier: "default", + system_fingerprint: expect.any(String), + usage: { + completion_tokens: expect.any(Number), + completion_tokens_details: { + accepted_prediction_tokens: expect.any(Number), + audio_tokens: expect.any(Number), + reasoning_tokens: expect.any(Number), + rejected_prediction_tokens: expect.any(Number), + }, + prompt_tokens: expect.any(Number), + prompt_tokens_details: { + audio_tokens: expect.any(Number), + cached_tokens: expect.any(Number), + }, + total_tokens: expect.any(Number), }, - ], - created: expect.any(Number), - id: expect.any(String), - model: "o3-mini-2025-01-31", - object: "chat.completion", - service_tier: "default", - system_fingerprint: expect.any(String), - usage: { - completion_tokens: expect.any(Number), - completion_tokens_details: { - accepted_prediction_tokens: expect.any(Number), - audio_tokens: expect.any(Number), - reasoning_tokens: expect.any(Number), - rejected_prediction_tokens: expect.any(Number), - }, - prompt_tokens: expect.any(Number), - prompt_tokens_details: { - audio_tokens: expect.any(Number), - cached_tokens: expect.any(Number), - }, - total_tokens: expect.any(Number), }, + status: 200, }, - status: 200, }, - }, - ]); + ]); + }); }); From 3562ca7a0d6a51087897e504116e906828ff1997 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 16 May 2025 11:57:40 -0500 Subject: [PATCH 31/51] fix disable + no max tokens.. add coverage --- packages/proxy/schema/index.test.ts | 3 - packages/proxy/schema/index.ts | 6 +- .../proxy/src/providers/anthropic.test.ts | 59 +++++++++++++++++++ packages/proxy/src/providers/google.test.ts | 59 +++++++++++++++++++ packages/proxy/src/proxy.ts | 12 ++-- 5 files changed, 130 insertions(+), 9 deletions(-) diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts index 41445dee..f51b5a8f 100644 --- a/packages/proxy/schema/index.test.ts +++ b/packages/proxy/schema/index.test.ts @@ -139,7 +139,6 @@ const examples: Record< }, ], temperature: 0, - max_tokens: 1000, reasoning_effort: undefined, stream: false, }, @@ -158,7 +157,6 @@ const examples: Record< }, ], temperature: 0, - maxOutputTokens: 1000, thinkingConfig: { thinkingBudget: 0, }, @@ -177,7 +175,6 @@ const examples: Record< content: "Explain how to solve 2x + 4 = 12 step by step.", }, ], - max_tokens: 1000, temperature: 0, stream: false, thinking: { diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 4d933766..56936f70 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -77,9 +77,10 @@ export const modelParamMappers: { ...params }) => { if (!reasoning_effort || reasoning_effort === "none") { + const maxTokens = max_completion_tokens || max_tokens; return { ...params, - max_tokens: max_completion_tokens || max_tokens, + ...(maxTokens !== undefined ? { max_tokens: maxTokens } : undefined), // required by anthropic temperature, // an empty/unset means we should disable thinking: { @@ -117,9 +118,10 @@ export const modelParamMappers: { }) => { // TODO: update types to accept an explicit reasoning_effort if (!reasoning_effort || reasoning_effort === "none") { + const maxTokens = max_completion_tokens || max_tokens; return { ...params, - maxOutputTokens: max_completion_tokens || max_tokens, + ...(maxTokens !== undefined ? { max_tokens: maxTokens } : undefined), thinkingConfig: { thinkingBudget: 0, }, diff --git a/packages/proxy/src/providers/anthropic.test.ts b/packages/proxy/src/providers/anthropic.test.ts index 986f16b3..2ca15dd8 100644 --- a/packages/proxy/src/providers/anthropic.test.ts +++ b/packages/proxy/src/providers/anthropic.test.ts @@ -199,3 +199,62 @@ it("should accept and return reasoning/thinking params and detail non-streaming" }, }); }); + +it("should disable reasoning/thinking params non-streaming", async () => { + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model: "claude-3-7-sonnet-20250219", + reasoning_effort: null, + stream: false, + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "ErUBCkYIAxgCIkDWT/7OwDfkVSgdtjIwGqUpzIHQXkiBQQpIqzh6WnHHoGxN1ilJxIlnJQNarUI4Jo/3WWrmRnnqOU3LtAakLr4REgwvY1G5jTSbLHWOo4caDKNco+CyDfNT56iXBCIwrNSFdvNJNsBaa0hpbTZ6N4Q4z4/6l+gu8hniKnftBhS+IuzcncsuJqKxWKs/EVyjKh3tvH/eDeYovKskosVSO5x64iebuze1S8JbavI3UBgC", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + }, + }); + + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + logprobs: null, + message: { + content: expect.any(String), + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model: "claude-3-7-sonnet-20250219", + object: "chat.completion", + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + }, + }); +}); diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts index 58bd5176..ca8a9918 100644 --- a/packages/proxy/src/providers/google.test.ts +++ b/packages/proxy/src/providers/google.test.ts @@ -126,6 +126,65 @@ for (const model of [ }, }); }); + + it("should disable reasoning/thinking non-streaming", async () => { + const { json } = await callProxyV1< + OpenAIChatCompletionCreateParams, + OpenAIChatCompletionChunk + >({ + body: { + model, + reasoning_effort: null, + stream: false, + messages: [ + { + role: "user", + content: "How many rs in 'ferrocarril'", + }, + { + role: "assistant", + content: "There are 4 letter 'r's in the word \"ferrocarril\".", + refusal: null, + reasoning: [ + { + id: "", + content: + "To count the number of 'r's in the word 'ferrocarril', I'll just go through the word letter by letter.\n\n'ferrocarril' has the following letters:\nf-e-r-r-o-c-a-r-r-i-l\n\nLooking at each letter:\n- 'f': not an 'r'\n- 'e': not an 'r'\n- 'r': This is an 'r', so that's 1.\n- 'r': This is an 'r', so that's 2.\n- 'o': not an 'r'\n- 'c': not an 'r'\n- 'a': not an 'r'\n- 'r': This is an 'r', so that's 3.\n- 'r': This is an 'r', so that's 4.\n- 'i': not an 'r'\n- 'l': not an 'r'\n\nSo there are 4 'r's in the word 'ferrocarril'.", + }, + ], + }, + { + role: "user", + content: "How many e in what you said?", + }, + ], + }, + }); + + expect(json()).toEqual({ + choices: [ + { + finish_reason: "stop", + index: 0, + logprobs: null, + message: { + content: expect.any(String), + refusal: null, + role: "assistant", + }, + }, + ], + created: expect.any(Number), + id: expect.any(String), + model, + object: "chat.completion", + usage: { + completion_tokens: expect.any(Number), + prompt_tokens: expect.any(Number), + total_tokens: expect.any(Number), + }, + }); + }); }); } diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index c65460f9..c858db58 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -2112,10 +2112,14 @@ async function fetchAnthropicChatCompletions({ } messages = flattenAnthropicMessages(messages); - const params: Record = { - max_tokens: 4096, // Required param - ...translateParams("anthropic", oaiParams), - }; + const params: Record = translateParams( + "anthropic", + oaiParams, + ); + + if (!params.max_tokens) { + params.max_tokens = 4096; // Required param + } const stop = z .union([z.string(), z.array(z.string())]) From 517ecf023cde3da083d754741dc9b18b26c47ede Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 16 May 2025 14:14:18 -0500 Subject: [PATCH 32/51] add type overrides users can now just `import "@braintrust/proxy/types";` to fix our overrides to openai. --- packages/proxy/types/index.ts | 1 + packages/proxy/types/openai.ts | 17 +++++++++++++++-- 2 files changed, 16 insertions(+), 2 deletions(-) diff --git a/packages/proxy/types/index.ts b/packages/proxy/types/index.ts index 09c41947..a7879486 100644 --- a/packages/proxy/types/index.ts +++ b/packages/proxy/types/index.ts @@ -1 +1,2 @@ export * from "./openai"; +export type * from "./openai"; diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts index 0c975db6..07b29bd1 100644 --- a/packages/proxy/types/openai.ts +++ b/packages/proxy/types/openai.ts @@ -1,6 +1,5 @@ // TODO: move from core -import { chatCompletionMessageParamSchema } from "@braintrust/core/typespecs/dist"; -export { chatCompletionMessageParamSchema } from "@braintrust/core/typespecs/dist"; +import { chatCompletionMessageParamSchema } from "@braintrust/core/typespecs"; import { z } from "zod"; @@ -57,3 +56,17 @@ export type OpenAIChatCompletionChunk = ChatCompletionChunk & { export type OpenAIChatCompletionCreateParams = ChatCompletionCreateParams & { messages: Array; }; + +// overrides +import "openai/resources/chat/completions"; + +declare module "openai/resources/chat/completions" { + interface ChatCompletionAssistantMessageParam { + reasoning?: OpenAIReasoning[]; + } + namespace ChatCompletion { + interface Choice { + reasoning?: OpenAIReasoning[]; + } + } +} From fdb4625cf309afc98786a0b674442ff6f792c648 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Mon, 19 May 2025 19:39:33 -0500 Subject: [PATCH 33/51] add reasoning budget to model spec and set defaults --- packages/proxy/schema/index.ts | 6 ++++-- packages/proxy/schema/model_list.json | 15 ++++++++++++++ packages/proxy/schema/models.ts | 28 ++++++++++++++++++++++----- 3 files changed, 42 insertions(+), 7 deletions(-) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 56936f70..105c26c8 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -184,7 +184,8 @@ export const defaultModelParamSettings: { top_p: 0.7, top_k: 5, use_cache: true, - reasoning_effort: undefined, + reasoning_enabled: false, + reasoning_budget: undefined, }, google: { temperature: undefined, @@ -192,7 +193,8 @@ export const defaultModelParamSettings: { topP: 0.7, topK: 5, use_cache: true, - reasoning_effort: undefined, + reasoning_enabled: false, + reasoning_budget: undefined, }, js: {}, window: { diff --git a/packages/proxy/schema/model_list.json b/packages/proxy/schema/model_list.json index 43880276..6402e580 100644 --- a/packages/proxy/schema/model_list.json +++ b/packages/proxy/schema/model_list.json @@ -510,6 +510,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning_budget": true, + "reasoning": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "input_cache_read_cost_per_mil_tokens": 0.3, @@ -520,6 +522,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning_budget": true, + "reasoning": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "input_cache_read_cost_per_mil_tokens": 0.3, @@ -1861,6 +1865,7 @@ "flavor": "chat", "multimodal": true, "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 0.15, "output_cost_per_mil_tokens": 0.6, "displayName": "Gemini 2.5 Flash Preview", @@ -1870,6 +1875,7 @@ "format": "google", "flavor": "chat", "multimodal": true, + "reasoning_budget": true, "reasoning": true, "input_cost_per_mil_tokens": 1.25, "output_cost_per_mil_tokens": 10, @@ -1880,6 +1886,7 @@ "format": "google", "flavor": "chat", "multimodal": true, + "reasoning_budget": true, "reasoning": true, "input_cost_per_mil_tokens": 1.25, "output_cost_per_mil_tokens": 10, @@ -1909,6 +1916,7 @@ "format": "google", "flavor": "chat", "multimodal": true, + "reasoning_budget": true, "reasoning": true, "deprecated": true, "input_cost_per_mil_tokens": 0, @@ -2090,6 +2098,7 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning_budget": true, "reasoning": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, @@ -2101,6 +2110,7 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning_budget": true, "reasoning": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, @@ -2593,6 +2603,7 @@ "multimodal": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, + "reasoning_budget": true, "reasoning": true, "displayName": "Claude 3.7 Sonnet" }, @@ -2602,6 +2613,7 @@ "multimodal": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, + "reasoning_budget": true, "reasoning": true, "experimental": true, "parent": "publishers/anthropic/models/claude-3-7-sonnet" @@ -2780,6 +2792,9 @@ "databricks-claude-3-7-sonnet": { "format": "openai", "flavor": "chat", + "multimodal": true, + "reasoning_budget": true, + "reasoning": true, "displayName": "Claude 3.7 Sonnet" }, "databricks-meta-llama-3-3-70b-instruct": { diff --git a/packages/proxy/schema/models.ts b/packages/proxy/schema/models.ts index 879a37c0..836c7d0d 100644 --- a/packages/proxy/schema/models.ts +++ b/packages/proxy/schema/models.ts @@ -45,12 +45,30 @@ export const ModelSchema = z.object({ output_cost_per_mil_tokens: z.number().nullish(), input_cache_read_cost_per_mil_tokens: z.number().nullish(), input_cache_write_cost_per_mil_tokens: z.number().nullish(), - displayName: z.string().nullish(), + displayName: z + .string() + .nullish() + .describe("The model is the latest production/stable"), o1_like: z.boolean().nullish().describe('DEPRECATED use "reasoning" instead'), - reasoning: z.boolean().nullish(), - experimental: z.boolean().nullish(), - deprecated: z.boolean().nullish(), - parent: z.string().nullish(), + reasoning: z + .boolean() + .nullish() + .describe("The model supports reasoning/thinking tokens"), + reasoning_budget: z + .boolean() + .nullish() + .describe("The model supports reasoning/thinking budgets"), + experimental: z + .boolean() + .nullish() + .describe("The model is not allowed production load or API is unstable."), + deprecated: z + .boolean() + .nullish() + .describe( + "Discourage the use of the model (we will hide the model in the UI).", + ), + parent: z.string().nullish().describe("The model was replaced this model."), endpoint_types: z.array(z.enum(ModelEndpointType)).nullish(), locations: z.array(z.string()).nullish(), }); From bd0b39188aa4fe613ef5e48ef9e56a9016fcaf1b Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Mon, 19 May 2025 21:39:35 -0500 Subject: [PATCH 34/51] let's get reasoning_enabled and reasoning_budget translated it's getting hectic so let's go a different approach in the next commit --- packages/proxy/schema/index.test.ts | 65 ++++++++++++++++++++++++++++- packages/proxy/schema/index.ts | 47 +++++++++++++++++++-- packages/proxy/types/openai.ts | 4 ++ 3 files changed, 111 insertions(+), 5 deletions(-) diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts index f51b5a8f..a9561cf7 100644 --- a/packages/proxy/schema/index.test.ts +++ b/packages/proxy/schema/index.test.ts @@ -63,7 +63,7 @@ const examples: Record< top_p: 0.9, }, }, - reasoning: { + reasoning_effort: { openai: { model: "gpt-4o", messages: [ @@ -139,7 +139,8 @@ const examples: Record< }, ], temperature: 0, - reasoning_effort: undefined, + reasoning_enabled: false, + reasoning_budget: 1024, stream: false, }, google: { @@ -182,6 +183,66 @@ const examples: Record< }, }, }, + "reasoning budget": { + openai: { + model: "gpt-4o", + messages: [ + { + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + temperature: 0, + reasoning_enabled: true, + reasoning_budget: 1024, + stream: false, + }, + google: { + model: "gpt-4o", + // notice how this is still an intermediate param + // google's api expects a content instead of messages, for example + messages: [ + { + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + temperature: 0, + thinkingConfig: { + thinkingBudget: 1024, + includeThoughts: true, + }, + stream: false, + }, + anthropic: { + model: "gpt-4o", + messages: [ + { + // @ts-expect-error -- we use the role to later manipulate the request + role: "system", + content: "You are a detailed reasoning assistant.", + }, + { + role: "user", + content: "Explain how to solve 2x + 4 = 12 step by step.", + }, + ], + temperature: 1, + stream: false, + thinking: { + budget_tokens: 1024, + type: "enabled", + }, + }, + }, }; Object.entries(examples).forEach(([example, { openai, ...providers }]) => { diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 105c26c8..8a8c7786 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -76,7 +76,7 @@ export const modelParamMappers: { temperature, ...params }) => { - if (!reasoning_effort || reasoning_effort === "none") { + if (!reasoning_effort) { const maxTokens = max_completion_tokens || max_tokens; return { ...params, @@ -108,6 +108,24 @@ export const modelParamMappers: { }, }; }, + reasoning_enabled: ({ reasoning_enabled, thinking, ...params }) => { + return { + ...params, + thinking: { + ...thinking, + enabled: true, + }, + }; + }, + reasoning_budget: ({ reasoning_budget, thinking, ...params }) => { + return { + ...params, + thinking: { + ...thinking, + budget_tokens: reasoning_budget, + }, + }; + }, }, google: { reasoning_effort: ({ @@ -116,8 +134,7 @@ export const modelParamMappers: { max_completion_tokens, ...params }) => { - // TODO: update types to accept an explicit reasoning_effort - if (!reasoning_effort || reasoning_effort === "none") { + if (!reasoning_effort) { const maxTokens = max_completion_tokens || max_tokens; return { ...params, @@ -144,6 +161,30 @@ export const modelParamMappers: { maxOutputTokens: maxTokens, }; }, + reasoning_enabled: ({ reasoning_enabled, thinkingConfig, ...params }) => { + return { + ...params, + ...(reasoning_enabled && { + thinkingConfig: { + ...thinkingConfig, + includeThoughts: true, + }, + }), + }; + }, + reasoning_budget: ({ reasoning_budget, thinkingConfig, ...params }) => { + const enabled = !!reasoning_budget && reasoning_budget > 0; + return { + ...params, + ...(enabled && { + thinkingConfig: { + ...thinkingConfig, + includeThoughts: true, + thinkingBudget: reasoning_budget, + }, + }), + }; + }, }, }; diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts index 07b29bd1..c3ed7cc5 100644 --- a/packages/proxy/types/openai.ts +++ b/packages/proxy/types/openai.ts @@ -61,6 +61,10 @@ export type OpenAIChatCompletionCreateParams = ChatCompletionCreateParams & { import "openai/resources/chat/completions"; declare module "openai/resources/chat/completions" { + interface ChatCompletionCreateParamsBase { + reasoning_enabled?: boolean; + reasoning_budget?: number; + } interface ChatCompletionAssistantMessageParam { reasoning?: OpenAIReasoning[]; } From 710677490b6ec1da3192bb9e850b434318cd145c Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 20 May 2025 01:01:03 -0500 Subject: [PATCH 35/51] refactor mapping to handle the whole mapping makes managing multiple params that affect each other more managable ideally future mapping happens in a single spot like this and we just guarantee the output is the actual params we would pass into the call to the ai provider --- packages/proxy/schema/index.test.ts | 12 +- packages/proxy/schema/index.ts | 155 ++---------------- .../proxy/src/providers/anthropic.test.ts | 2 +- packages/proxy/src/providers/anthropic.ts | 86 ++++++++++ packages/proxy/src/providers/google.test.ts | 3 +- packages/proxy/src/providers/google.ts | 87 ++++++++++ packages/proxy/types/openai.ts | 2 + packages/proxy/utils/index.ts | 12 ++ packages/proxy/utils/openai.ts | 22 ++- 9 files changed, 234 insertions(+), 147 deletions(-) diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts index a9561cf7..149673d8 100644 --- a/packages/proxy/schema/index.test.ts +++ b/packages/proxy/schema/index.test.ts @@ -28,6 +28,7 @@ const examples: Record< stream: true, }, google: { + maxOutputTokens: 1500, max_tokens: 1500, messages: [ { @@ -97,10 +98,11 @@ const examples: Record< ], temperature: 0, thinkingConfig: { - thinkingBudget: 4096, + thinkingBudget: 800, includeThoughts: true, }, - maxOutputTokens: 5120, + maxOutputTokens: 1000, + max_tokens: 1000, stream: false, }, anthropic: { @@ -116,11 +118,11 @@ const examples: Record< content: "Explain how to solve 2x + 4 = 12 step by step.", }, ], - max_tokens: 5120, temperature: 1, stream: false, + max_tokens: 1500, thinking: { - budget_tokens: 4096, + budget_tokens: 1024, type: "enabled", }, }, @@ -178,6 +180,7 @@ const examples: Record< ], temperature: 0, stream: false, + max_tokens: 1024, thinking: { type: "disabled", }, @@ -237,6 +240,7 @@ const examples: Record< ], temperature: 1, stream: false, + max_tokens: 1536, thinking: { budget_tokens: 1024, type: "enabled", diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 8a8c7786..3b159985 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -6,6 +6,9 @@ import type { ModelParams, } from "@braintrust/core/typespecs"; import { AvailableModels, ModelFormat, ModelEndpointType } from "./models"; +import { openaiParamsToAnthropicMesssageParams } from "@lib/providers/anthropic"; +import { OpenAIChatCompletionCreateParams } from "@types"; +import { openaiParamsToGeminiMessageParams } from "@lib/providers/google"; export * from "./secrets"; export * from "./models"; @@ -53,139 +56,11 @@ export const modelParamToModelParam: { stop: null, }; -const effortToBudgetMultiplier = { - low: 0.2, - medium: 0.5, - high: 0.8, -} as const; - -const getBudgetMultiplier = (effort: keyof typeof effortToBudgetMultiplier) => { - return effortToBudgetMultiplier[effort] || effortToBudgetMultiplier.low; -}; - -export const modelParamMappers: { - [name in ModelFormat]?: { - [param: string]: (params: any) => Record; - }; -} = { - anthropic: { - reasoning_effort: ({ - reasoning_effort, - max_tokens, - max_completion_tokens, - temperature, - ...params - }) => { - if (!reasoning_effort) { - const maxTokens = max_completion_tokens || max_tokens; - return { - ...params, - ...(maxTokens !== undefined ? { max_tokens: maxTokens } : undefined), // required by anthropic - temperature, - // an empty/unset means we should disable - thinking: { - type: "disabled", - }, - }; - } - - // Max tokens are inclusive of budget. If the max tokens are too low (below 1024), then the API will raise an exception. - const maxTokens = Math.max( - max_completion_tokens || max_tokens || 0, - 1024 / effortToBudgetMultiplier.low, - ); - - const budget = getBudgetMultiplier(reasoning_effort || "low") * maxTokens; - - return { - ...params, - max_tokens: maxTokens, - // must be set when using thinking - temperature: 1, - thinking: { - budget_tokens: budget, - type: "enabled", - }, - }; - }, - reasoning_enabled: ({ reasoning_enabled, thinking, ...params }) => { - return { - ...params, - thinking: { - ...thinking, - enabled: true, - }, - }; - }, - reasoning_budget: ({ reasoning_budget, thinking, ...params }) => { - return { - ...params, - thinking: { - ...thinking, - budget_tokens: reasoning_budget, - }, - }; - }, - }, - google: { - reasoning_effort: ({ - reasoning_effort, - max_tokens, - max_completion_tokens, - ...params - }) => { - if (!reasoning_effort) { - const maxTokens = max_completion_tokens || max_tokens; - return { - ...params, - ...(maxTokens !== undefined ? { max_tokens: maxTokens } : undefined), - thinkingConfig: { - thinkingBudget: 0, - }, - }; - } - - const maxTokens = Math.max( - max_completion_tokens || max_tokens || 0, - 1024 / effortToBudgetMultiplier.low, - ); - - const budget = getBudgetMultiplier(reasoning_effort || "low") * maxTokens; - - return { - ...params, - thinkingConfig: { - thinkingBudget: budget, - includeThoughts: true, - }, - maxOutputTokens: maxTokens, - }; - }, - reasoning_enabled: ({ reasoning_enabled, thinkingConfig, ...params }) => { - return { - ...params, - ...(reasoning_enabled && { - thinkingConfig: { - ...thinkingConfig, - includeThoughts: true, - }, - }), - }; - }, - reasoning_budget: ({ reasoning_budget, thinkingConfig, ...params }) => { - const enabled = !!reasoning_budget && reasoning_budget > 0; - return { - ...params, - ...(enabled && { - thinkingConfig: { - ...thinkingConfig, - includeThoughts: true, - thinkingBudget: reasoning_budget, - }, - }), - }; - }, - }, +const paramMappers: Partial< + Record object> +> = { + anthropic: openaiParamsToAnthropicMesssageParams, + google: openaiParamsToGeminiMessageParams, }; export const sliderSpecs: { @@ -603,12 +478,14 @@ export function translateParams( translatedParams[hasDefaultParam ? translatedKey : k] = safeValue; } - for (const [k, _] of Object.entries(params || {})) { - const mapper = modelParamMappers[toProvider]?.[k]; - if (mapper) { - // not ideal.. we should pass the original params to the mappers, but simple params mapping may overwrite complex mappers - translatedParams = mapper(translatedParams); - } + // ideally we should short circuit and just have a master mapper but this avoids scope + // for now + const mapper = paramMappers[toProvider]; + if (mapper) { + translatedParams = mapper(translatedParams as any) as Record< + string, + unknown + >; } return translatedParams; diff --git a/packages/proxy/src/providers/anthropic.test.ts b/packages/proxy/src/providers/anthropic.test.ts index 2ca15dd8..0fd0b715 100644 --- a/packages/proxy/src/providers/anthropic.test.ts +++ b/packages/proxy/src/providers/anthropic.test.ts @@ -207,7 +207,7 @@ it("should disable reasoning/thinking params non-streaming", async () => { >({ body: { model: "claude-3-7-sonnet-20250219", - reasoning_effort: null, + reasoning_enabled: false, stream: false, messages: [ { diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index 0d701c41..92648353 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -19,6 +19,8 @@ import { DocumentBlockParam, MessageCreateParamsBase, Base64ImageSource, + MessageCreateParams, + ThinkingConfigParam, } from "@anthropic-ai/sdk/resources/messages"; import { OpenAIChatCompletion, @@ -27,6 +29,8 @@ import { OpenAIChatCompletionChunkChoiceDelta, OpenAIChatCompletionCreateParams, } from "@types"; +import { getBudgetMultiplier } from "utils"; +import { cleanOpenAIParams } from "utils/openai"; /* Example events: @@ -618,3 +622,85 @@ export function anthropicToolChoiceToOpenAIToolChoice( return { type: "tool", name: toolChoice.function.name }; } } + +export function openaiParamsToAnthropicMesssageParams( + openai: OpenAIChatCompletionCreateParams, +): MessageCreateParams { + const anthropic: MessageCreateParams = { + // TODO: we depend on translateParams to get us half way there + ...(cleanOpenAIParams(openai) as any), + }; + + const maxTokens = + Math.max(openai.max_completion_tokens || 0, openai.max_tokens || 0) || 1024; + + anthropic.max_tokens = maxTokens; + + if ( + openai.reasoning_effort !== undefined || + openai.reasoning_budget !== undefined || + openai.reasoning_enabled !== undefined + ) { + anthropic.thinking = getAnthropicThinkingParams({ + ...openai, + max_completion_tokens: maxTokens, + }); + + if (anthropic.thinking.type === "enabled") { + // must be 1 when thinking + anthropic.temperature = 1; + + // avoid anthropic APIs complaining about this + // need to make sure max_tokens are greater than budget_tokens + const effectiveMax = Math.max( + anthropic.max_tokens, + anthropic.thinking.budget_tokens, + ); + if (effectiveMax === anthropic.thinking.budget_tokens) { + anthropic.max_tokens = Math.floor(anthropic.max_tokens * 1.5); + } + } + } + + return anthropic; +} + +const getAnthropicThinkingParams = ( + openai: OpenAIChatCompletionCreateParams & { + max_completion_tokens: Required; + }, +): ThinkingConfigParam => { + if (openai.reasoning_enabled === false || openai.reasoning_budget === 0) { + return { type: "disabled" }; + } + + return { + type: "enabled", + budget_tokens: getThinkingBudget(openai), + }; +}; + +const getThinkingBudget = ( + openai: OpenAIChatCompletionCreateParams & { + max_completion_tokens: Required; + }, +): number => { + if (openai.reasoning_budget !== undefined) { + return openai.reasoning_budget; + } + + let budget = 1024; + + if (openai.reasoning_effort !== undefined) { + // budget must be at least 1024 + budget = Math.max( + Math.floor( + getBudgetMultiplier(openai.reasoning_effort || "low") * + openai.max_completion_tokens, + ), + 1024, + ); + } + + return budget; +}; diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts index ca8a9918..ac10efb2 100644 --- a/packages/proxy/src/providers/google.test.ts +++ b/packages/proxy/src/providers/google.test.ts @@ -134,7 +134,8 @@ for (const model of [ >({ body: { model, - reasoning_effort: null, + reasoning_enabled: true, + reasoning_budget: 0, stream: false, messages: [ { diff --git a/packages/proxy/src/providers/google.ts b/packages/proxy/src/providers/google.ts index 42dc8372..f8f14c53 100644 --- a/packages/proxy/src/providers/google.ts +++ b/packages/proxy/src/providers/google.ts @@ -3,16 +3,23 @@ import { Message } from "@braintrust/core/typespecs"; import { Content, FinishReason, + GenerateContentConfig, + GenerateContentParameters, GenerateContentResponse, Part, + ThinkingConfig, } from "@google/genai"; import { getTimestampInSeconds } from ".."; import { OpenAIChatCompletion, OpenAIChatCompletionChoice, OpenAIChatCompletionChunk, + OpenAIChatCompletionCreateParams, } from "@types"; import { convertMediaToBase64 } from "./util"; +import { MessageParam } from "@anthropic-ai/sdk/resources"; +import { getBudgetMultiplier } from "utils"; +import { cleanOpenAIParams } from "utils/openai"; async function makeGoogleMediaBlock(media: string): Promise { const { media_type: mimeType, data } = await convertMediaToBase64({ @@ -303,3 +310,83 @@ export const OpenAIParamsToGoogleParams: { presence_penalty: null, tool_choice: null, }; + +// because GenAI sdk doesn't provide a convenient API equivalent type +type GeminiGenerateContentParams = Omit & + Omit< + GenerateContentConfig, + | "httpOptions" + | "abortSignal" + | "routingConfig" + | "modelSelectionConfig" + | "labels" + >; + +export const openaiParamsToGeminiMessageParams = ( + openai: OpenAIChatCompletionCreateParams, +): GeminiGenerateContentParams => { + const gemini: GeminiGenerateContentParams = { + // TODO: we depend on translateParams to get us half way there + ...(cleanOpenAIParams(openai) as any), + }; + + const maxTokens = + openai.max_completion_tokens !== undefined || + openai.max_tokens !== undefined + ? Math.max(openai.max_completion_tokens || 0, openai.max_tokens || 0) || + 1024 + : undefined; + + gemini.maxOutputTokens = maxTokens; + + if ( + openai.reasoning_effort !== undefined || + openai.reasoning_budget !== undefined || + openai.reasoning_enabled !== undefined + ) { + gemini.thinkingConfig = getGeminiThinkingParams({ + ...openai, + max_completion_tokens: maxTokens, + }); + } + + return gemini; +}; + +const getGeminiThinkingParams = ( + openai: OpenAIChatCompletionCreateParams & { + max_completion_tokens?: Required; + }, +): ThinkingConfig => { + if (openai.reasoning_enabled === false || openai.reasoning_budget === 0) { + return { + thinkingBudget: 0, + }; + } + + return { + includeThoughts: true, + thinkingBudget: getThinkingBudget(openai), + }; +}; + +const getThinkingBudget = ( + openai: OpenAIChatCompletionCreateParams & { + max_completion_tokens?: Required; + }, +): number => { + if (openai.reasoning_budget !== undefined) { + return openai.reasoning_budget; + } + + let budget = 1024; + + if (openai.reasoning_effort !== undefined) { + budget = Math.floor( + getBudgetMultiplier(openai.reasoning_effort || "low") * + (openai.max_completion_tokens ?? 1024), + ); + } + + return budget; +}; diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts index c3ed7cc5..07871f93 100644 --- a/packages/proxy/types/openai.ts +++ b/packages/proxy/types/openai.ts @@ -55,6 +55,8 @@ export type OpenAIChatCompletionChunk = ChatCompletionChunk & { export type OpenAIChatCompletionCreateParams = ChatCompletionCreateParams & { messages: Array; + reasoning_enabled?: boolean; + reasoning_budget?: number; }; // overrides diff --git a/packages/proxy/utils/index.ts b/packages/proxy/utils/index.ts index 7652eee5..587fcfc1 100644 --- a/packages/proxy/utils/index.ts +++ b/packages/proxy/utils/index.ts @@ -16,3 +16,15 @@ export { makeWavFile, makeMp3File } from "./audioEncoder"; export function getCurrentUnixTimestamp(): number { return Date.now() / 1000; } + +export const effortToBudgetMultiplier = { + low: 0.2, + medium: 0.5, + high: 0.8, +} as const; + +export const getBudgetMultiplier = ( + effort: keyof typeof effortToBudgetMultiplier, +) => { + return effortToBudgetMultiplier[effort] || effortToBudgetMultiplier.low; +}; diff --git a/packages/proxy/utils/openai.ts b/packages/proxy/utils/openai.ts index 5c637c44..bfc53489 100644 --- a/packages/proxy/utils/openai.ts +++ b/packages/proxy/utils/openai.ts @@ -1,6 +1,9 @@ -import { OpenAIChatCompletionChunk } from "@types"; +import { + OpenAIChatCompletionChunk, + OpenAIChatCompletionCreateParams, +} from "@types"; import { trimStartOfStreamHelper } from "ai"; -import { Completion } from "openai/resources"; +import { ChatCompletionCreateParams, Completion } from "openai/resources"; /** * Creates a parser function for processing the OpenAI stream data. @@ -118,3 +121,18 @@ export function isCompletion(data: unknown): data is Completion { "text" in data.choices[0] ); } + +/** + * Cleans the OpenAI parameters by removing extra braintrust fields. + * + * @param {OpenAIChatCompletionCreateParams} params - The OpenAI parameters to clean. + * @returns {ChatCompletionCreateParams} - The cleaned OpenAI parameters. + */ +export function cleanOpenAIParams({ + reasoning_effort, + reasoning_budget, + reasoning_enabled, + ...openai +}: OpenAIChatCompletionCreateParams): ChatCompletionCreateParams { + return openai; +} From fba1dffe8b71089346ba6e6ee9e36099983a6d04 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 20 May 2025 19:14:01 -0500 Subject: [PATCH 36/51] remove the bare import --- packages/proxy/types/openai.ts | 2 -- 1 file changed, 2 deletions(-) diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts index 6a2857e7..f7e6a337 100644 --- a/packages/proxy/types/openai.ts +++ b/packages/proxy/types/openai.ts @@ -60,8 +60,6 @@ export type OpenAIChatCompletionCreateParams = ChatCompletionCreateParams & { }; // overrides -import "openai/resources/chat/completions"; - declare module "openai/resources/chat/completions" { interface ChatCompletionCreateParamsBase { reasoning_enabled?: boolean; From 7d19720eb486da7bf1ec79682aca2c77abfb6549 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 21 May 2025 11:50:41 -0500 Subject: [PATCH 37/51] fix max tokens adjustment for anthorpic --- packages/proxy/src/providers/anthropic.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index 2354e0e1..9c644b25 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -697,7 +697,7 @@ export function openaiParamsToAnthropicMesssageParams( anthropic.thinking.budget_tokens, ); if (effectiveMax === anthropic.thinking.budget_tokens) { - anthropic.max_tokens = Math.floor(anthropic.max_tokens * 1.5); + anthropic.max_tokens = Math.floor(effectiveMax * 1.5); } } } From 2f11f2a10e7d9cbbd4da0d22f6a79b1810819c21 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 21 May 2025 14:28:14 -0500 Subject: [PATCH 38/51] make sure to include reasoning tokens if available --- packages/proxy/schema/model_list.json | 35 +++++++++++------- packages/proxy/src/providers/anthropic.ts | 10 +++--- packages/proxy/src/providers/google.ts | 43 ++++++++++++++--------- packages/proxy/src/proxy.ts | 1 + packages/proxy/types/openai.ts | 2 +- 5 files changed, 57 insertions(+), 34 deletions(-) diff --git a/packages/proxy/schema/model_list.json b/packages/proxy/schema/model_list.json index 6402e580..5bc8c280 100644 --- a/packages/proxy/schema/model_list.json +++ b/packages/proxy/schema/model_list.json @@ -1746,7 +1746,7 @@ "multimodal": true, "input_cost_per_mil_tokens": 0.1, "output_cost_per_mil_tokens": 0.4, - "displayName": "Gemini 2.0 Flash" + "displayName": "Gemini 2.0 Flash Latest" }, "gemini-2.0-flash-001": { "format": "google", @@ -1900,8 +1900,18 @@ "reasoning": true, "input_cost_per_mil_tokens": 1.25, "output_cost_per_mil_tokens": 10, - "displayName": "Gemini 2.5 Pro Experimental", - "experimental": true + "experimental": true, + "parent": "gemini-2.5-pro-preview-05-06" + }, + "gemini-2.0-pro-exp-02-05": { + "format": "google", + "flavor": "chat", + "multimodal": true, + "input_cost_per_mil_tokens": 1.25, + "output_cost_per_mil_tokens": 10, + "experimental": true, + "deprecated": true, + "parent": "gemini-2.5-pro-preview-03-25" }, "gemini-2.0-flash-exp": { "format": "google", @@ -1932,15 +1942,6 @@ "output_cost_per_mil_tokens": 0, "experimental": true }, - "gemini-2.0-pro-exp-02-05": { - "format": "google", - "flavor": "chat", - "multimodal": true, - "input_cost_per_mil_tokens": 1.25, - "output_cost_per_mil_tokens": 10, - "experimental": true, - "deprecated": true - }, "gemini-exp-1206": { "format": "google", "flavor": "chat", @@ -2747,6 +2748,16 @@ "displayName": "Gemini 2.5 Pro Experimental", "experimental": true }, + "publishers/google/models/gemini-2.5-flash-preview-04-17": { + "format": "google", + "flavor": "chat", + "displayName": "Gemini 2.5 Flash Preview", + "multimodal": true, + "reasoning": true, + "reasoning_budget": true, + "input_cost_per_mil_tokens": 0.15, + "output_cost_per_mil_tokens": 0.6 + }, "publishers/google/models/gemini-2.0-flash-thinking-exp-01-21": { "format": "google", "flavor": "chat", diff --git a/packages/proxy/src/providers/anthropic.ts b/packages/proxy/src/providers/anthropic.ts index 9c644b25..def6c903 100644 --- a/packages/proxy/src/providers/anthropic.ts +++ b/packages/proxy/src/providers/anthropic.ts @@ -14,7 +14,7 @@ import { } from "@anthropic-ai/sdk/resources/messages"; import { Message } from "@braintrust/core/typespecs"; import { - CompletionUsage, + OpenAICompletionUsage, OpenAIChatCompletion, OpenAIChatCompletionChoice, OpenAIChatCompletionChunk, @@ -203,7 +203,7 @@ export interface AnthropicCompletion { function updateUsage( anthropic: z.infer, - openai: Partial, + openai: Partial, ) { if (!isEmpty(anthropic.cache_read_input_tokens)) { openai.prompt_tokens_details = { @@ -235,7 +235,7 @@ function updateUsage( export function anthropicEventToOpenAIEvent( idx: number, - usage: Partial, + usage: Partial, eventU: unknown, isStructuredOutput: boolean, ): { event: OpenAIChatCompletionChunk | null; finished: boolean } { @@ -373,7 +373,7 @@ export function anthropicEventToOpenAIEvent( created: getTimestampInSeconds(), usage: !isEmpty(usage.completion_tokens) && !isEmpty(usage.prompt_tokens) - ? (usage as CompletionUsage) + ? (usage as OpenAICompletionUsage) : undefined, }, finished: true, @@ -425,7 +425,7 @@ export function anthropicCompletionToOpenAICompletion( const firstText = completion.content.find((c) => c.type === "text"); const firstThinking = completion.content.find((c) => c.type === "thinking"); const firstTool = completion.content.find((c) => c.type === "tool_use"); - let usage: CompletionUsage | undefined = undefined; + let usage: OpenAICompletionUsage | undefined = undefined; if (completion.usage) { usage = { prompt_tokens: 0, diff --git a/packages/proxy/src/providers/google.ts b/packages/proxy/src/providers/google.ts index 00dc784a..051664f4 100644 --- a/packages/proxy/src/providers/google.ts +++ b/packages/proxy/src/providers/google.ts @@ -6,6 +6,7 @@ import { GenerateContentConfig, GenerateContentParameters, GenerateContentResponse, + GenerateContentResponseUsageMetadata, Part, ThinkingConfig, } from "@google/genai"; @@ -15,9 +16,9 @@ import { OpenAIChatCompletionChoice, OpenAIChatCompletionChunk, OpenAIChatCompletionCreateParams, + OpenAICompletionUsage, } from "@types"; import { convertMediaToBase64 } from "./util"; -import { MessageParam } from "@anthropic-ai/sdk/resources"; import { getBudgetMultiplier } from "utils"; import { cleanOpenAIParams } from "utils/openai"; @@ -233,14 +234,7 @@ export function googleEventToOpenAIChatEvent( created: getTimestampInSeconds(), model, object: "chat.completion.chunk", - usage: data.usageMetadata - ? { - prompt_tokens: data?.usageMetadata?.promptTokenCount || 0, - completion_tokens: - data?.usageMetadata?.candidatesTokenCount || 0, - total_tokens: data?.usageMetadata?.totalTokenCount || 0, - } - : undefined, + usage: geminiUsageToOpenAIUsage(data.usageMetadata), } : null, finished: @@ -250,6 +244,29 @@ export function googleEventToOpenAIChatEvent( }; } +const geminiUsageToOpenAIUsage = ( + usageMetadata?: GenerateContentResponseUsageMetadata, +): OpenAICompletionUsage | undefined => { + if (!usageMetadata) { + return undefined; + } + + const thoughtsTokenCount = usageMetadata.thoughtsTokenCount; + const cachedContentTokenCount = usageMetadata.cachedContentTokenCount; + + return { + prompt_tokens: usageMetadata.promptTokenCount || 0, + completion_tokens: usageMetadata.candidatesTokenCount || 0, + total_tokens: usageMetadata.totalTokenCount || 0, + ...(thoughtsTokenCount && { + completion_tokens_details: { reasoning_tokens: thoughtsTokenCount }, + }), + ...(cachedContentTokenCount && { + prompt_tokens_details: { cached_tokens: cachedContentTokenCount }, + }), + }; +}; + export function googleCompletionToOpenAICompletion( model: string, data: GenerateContentResponse, @@ -295,13 +312,7 @@ export function googleCompletionToOpenAICompletion( created: getTimestampInSeconds(), model, object: "chat.completion", - usage: data.usageMetadata - ? { - prompt_tokens: data?.usageMetadata?.promptTokenCount || 0, - completion_tokens: data?.usageMetadata?.candidatesTokenCount || 0, - total_tokens: data?.usageMetadata.totalTokenCount || 0, - } - : undefined, + usage: geminiUsageToOpenAIUsage(data.usageMetadata), }; } diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index e83cf20c..a23d38fc 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -1224,6 +1224,7 @@ async function fetchModel( ttl_seconds?: number, ) => Promise, ): Promise { + debugger; const format = modelSpec?.format ?? "openai"; switch (format) { case "openai": diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts index f7e6a337..1473de45 100644 --- a/packages/proxy/types/openai.ts +++ b/packages/proxy/types/openai.ts @@ -101,4 +101,4 @@ export const completionUsageSchema = z.object({ .optional(), }); -export type CompletionUsage = z.infer; +export type OpenAICompletionUsage = z.infer; From f68bb951af8b6341ac14d865196bdc4d90fec773 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 21 May 2025 15:58:51 -0500 Subject: [PATCH 39/51] remove extra debugger --- packages/proxy/src/proxy.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index a23d38fc..e83cf20c 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -1224,7 +1224,6 @@ async function fetchModel( ttl_seconds?: number, ) => Promise, ): Promise { - debugger; const format = modelSpec?.format ?? "openai"; switch (format) { case "openai": From 45fb63a10a2369cb31de1a481905a26e27259896 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 21 May 2025 16:11:15 -0500 Subject: [PATCH 40/51] update google test now that gemini models return reasoning tokens --- packages/proxy/src/providers/google.test.ts | 27 ++++++++++----------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts index ac10efb2..afd53bc3 100644 --- a/packages/proxy/src/providers/google.test.ts +++ b/packages/proxy/src/providers/google.test.ts @@ -6,13 +6,13 @@ import { } from "@types"; for (const model of [ - "gemini-2.5-flash-preview-04-17", + // "gemini-2.5-flash-preview-04-17", // TODO: re-enable when we have a working CI/CD solution - // "publishers/google/models/gemini-2.5-flash-preview-04-17", + "publishers/google/models/gemini-2.5-flash-preview-04-17", ]) { describe(model, () => { it("should accept and should not return reasoning/thinking params and detail streaming", async () => { - const { events } = await callProxyV1< + const { events, json } = await callProxyV1< OpenAIChatCompletionCreateParams, OpenAIChatCompletionChunk >({ @@ -57,7 +57,7 @@ for (const model of [ (event) => event.data.choices[0]?.delta?.reasoning?.content !== undefined, ); - expect(hasReasoning).toBe(isVertex(model)); // as of writing, gemini api does not yet provide this detail! + expect(hasReasoning).toBe(true); }); it("should accept and return reasoning/thinking params and detail non-streaming", async () => { @@ -102,14 +102,12 @@ for (const model of [ logprobs: null, message: { content: expect.any(String), - ...(isVertex(model) && { - reasoning: [ - { - id: expect.any(String), - content: expect.any(String), - }, - ], - }), // gemini apis do not include reasoning + reasoning: [ + { + id: expect.any(String), + content: expect.any(String), + }, + ], refusal: null, role: "assistant", }, @@ -121,6 +119,9 @@ for (const model of [ object: "chat.completion", usage: { completion_tokens: expect.any(Number), + completion_tokens_details: { + reasoning_tokens: expect.any(Number), + }, prompt_tokens: expect.any(Number), total_tokens: expect.any(Number), }, @@ -188,5 +189,3 @@ for (const model of [ }); }); } - -const isVertex = (model: string) => model.includes("publishers/"); From 4f688850cf465de82b495d8f3cf640106ea676bd Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 21 May 2025 16:15:12 -0500 Subject: [PATCH 41/51] fix tests --- packages/proxy/schema/index.test.ts | 2 +- packages/proxy/src/providers/anthropic.test.ts | 12 ++++++++++++ packages/proxy/src/providers/google.test.ts | 4 ++-- 3 files changed, 15 insertions(+), 3 deletions(-) diff --git a/packages/proxy/schema/index.test.ts b/packages/proxy/schema/index.test.ts index 149673d8..59928cb5 100644 --- a/packages/proxy/schema/index.test.ts +++ b/packages/proxy/schema/index.test.ts @@ -120,7 +120,7 @@ const examples: Record< ], temperature: 1, stream: false, - max_tokens: 1500, + max_tokens: 1536, thinking: { budget_tokens: 1024, type: "enabled", diff --git a/packages/proxy/src/providers/anthropic.test.ts b/packages/proxy/src/providers/anthropic.test.ts index 0fd0b715..63c2ab94 100644 --- a/packages/proxy/src/providers/anthropic.test.ts +++ b/packages/proxy/src/providers/anthropic.test.ts @@ -83,6 +83,10 @@ it("should convert OpenAI non-streaming request to Anthropic and back", async () completion_tokens: expect.any(Number), prompt_tokens: expect.any(Number), total_tokens: expect.any(Number), + prompt_tokens_details: { + cache_creation_tokens: expect.any(Number), + cached_tokens: expect.any(Number), + }, }, }); }); @@ -196,6 +200,10 @@ it("should accept and return reasoning/thinking params and detail non-streaming" completion_tokens: expect.any(Number), prompt_tokens: expect.any(Number), total_tokens: expect.any(Number), + prompt_tokens_details: { + cache_creation_tokens: expect.any(Number), + cached_tokens: expect.any(Number), + }, }, }); }); @@ -255,6 +263,10 @@ it("should disable reasoning/thinking params non-streaming", async () => { completion_tokens: expect.any(Number), prompt_tokens: expect.any(Number), total_tokens: expect.any(Number), + prompt_tokens_details: { + cache_creation_tokens: expect.any(Number), + cached_tokens: expect.any(Number), + }, }, }); }); diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts index afd53bc3..a186a3c2 100644 --- a/packages/proxy/src/providers/google.test.ts +++ b/packages/proxy/src/providers/google.test.ts @@ -6,9 +6,9 @@ import { } from "@types"; for (const model of [ - // "gemini-2.5-flash-preview-04-17", + "gemini-2.5-flash-preview-04-17", // TODO: re-enable when we have a working CI/CD solution - "publishers/google/models/gemini-2.5-flash-preview-04-17", + // "publishers/google/models/gemini-2.5-flash-preview-04-17", ]) { describe(model, () => { it("should accept and should not return reasoning/thinking params and detail streaming", async () => { From 81c6968708eebf9c55ac536830670ce74984cd4b Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 21 May 2025 16:59:20 -0500 Subject: [PATCH 42/51] add gemini flash preview 05-20 --- packages/proxy/schema/index.ts | 1 + packages/proxy/schema/model_list.json | 26 +++++++++++++++++++-- packages/proxy/src/providers/google.test.ts | 4 ++-- 3 files changed, 27 insertions(+), 4 deletions(-) diff --git a/packages/proxy/schema/index.ts b/packages/proxy/schema/index.ts index 3b159985..285a4c3f 100644 --- a/packages/proxy/schema/index.ts +++ b/packages/proxy/schema/index.ts @@ -345,6 +345,7 @@ export const AvailableEndpointTypes: { [name: string]: ModelEndpointType[] } = { "publishers/google/models/gemini-2.5-pro-preview-05-06": ["vertex"], "publishers/google/models/gemini-2.5-pro-preview-03-25": ["vertex"], "publishers/google/models/gemini-2.5-pro-exp-03-25": ["vertex"], + "publishers/google/models/gemini-2.5-flash-preview-05-20": ["vertex"], "publishers/google/models/gemini-2.5-flash-preview-04-17": ["vertex"], "publishers/google/models/gemini-2.0-flash-thinking-exp-01-21": ["vertex"], "publishers/google/models/gemini-2.0-flash": ["vertex"], diff --git a/packages/proxy/schema/model_list.json b/packages/proxy/schema/model_list.json index 5bc8c280..5c0b7d51 100644 --- a/packages/proxy/schema/model_list.json +++ b/packages/proxy/schema/model_list.json @@ -1860,7 +1860,7 @@ "output_cost_per_mil_tokens": 5, "parent": "gemini-1.5-pro" }, - "gemini-2.5-flash-preview-04-17": { + "gemini-2.5-flash-preview-05-20": { "format": "google", "flavor": "chat", "multimodal": true, @@ -1871,6 +1871,17 @@ "displayName": "Gemini 2.5 Flash Preview", "experimental": false }, + "gemini-2.5-flash-preview-04-17": { + "format": "google", + "flavor": "chat", + "multimodal": true, + "reasoning": true, + "reasoning_budget": true, + "input_cost_per_mil_tokens": 0.15, + "output_cost_per_mil_tokens": 0.6, + "experimental": false, + "parent": "gemini-2.5-flash-preview-05-20" + }, "gemini-2.5-pro-preview-05-06": { "format": "google", "flavor": "chat", @@ -2748,7 +2759,7 @@ "displayName": "Gemini 2.5 Pro Experimental", "experimental": true }, - "publishers/google/models/gemini-2.5-flash-preview-04-17": { + "publishers/google/models/gemini-2.5-flash-preview-05-20": { "format": "google", "flavor": "chat", "displayName": "Gemini 2.5 Flash Preview", @@ -2758,6 +2769,17 @@ "input_cost_per_mil_tokens": 0.15, "output_cost_per_mil_tokens": 0.6 }, + "publishers/google/models/gemini-2.5-flash-preview-04-17": { + "format": "google", + "flavor": "chat", + "displayName": "Gemini 2.5 Flash Preview", + "multimodal": true, + "reasoning": true, + "reasoning_budget": true, + "input_cost_per_mil_tokens": 0.15, + "output_cost_per_mil_tokens": 0.6, + "parent": "publishers/google/models/gemini-2.5-flash-preview-05-20" + }, "publishers/google/models/gemini-2.0-flash-thinking-exp-01-21": { "format": "google", "flavor": "chat", diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts index a186a3c2..be480e76 100644 --- a/packages/proxy/src/providers/google.test.ts +++ b/packages/proxy/src/providers/google.test.ts @@ -6,9 +6,9 @@ import { } from "@types"; for (const model of [ - "gemini-2.5-flash-preview-04-17", + "gemini-2.5-flash-preview-05-20", // TODO: re-enable when we have a working CI/CD solution - // "publishers/google/models/gemini-2.5-flash-preview-04-17", + "publishers/google/models/gemini-2.5-flash-preview-05-20", ]) { describe(model, () => { it("should accept and should not return reasoning/thinking params and detail streaming", async () => { From 6e661b4ead3263382ed5611f39461625cf28eb14 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Wed, 21 May 2025 17:09:05 -0500 Subject: [PATCH 43/51] disable vertex test again --- packages/proxy/src/providers/google.test.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/proxy/src/providers/google.test.ts b/packages/proxy/src/providers/google.test.ts index be480e76..f4d0bdd5 100644 --- a/packages/proxy/src/providers/google.test.ts +++ b/packages/proxy/src/providers/google.test.ts @@ -8,7 +8,7 @@ import { for (const model of [ "gemini-2.5-flash-preview-05-20", // TODO: re-enable when we have a working CI/CD solution - "publishers/google/models/gemini-2.5-flash-preview-05-20", + // "publishers/google/models/gemini-2.5-flash-preview-05-20", ]) { describe(model, () => { it("should accept and should not return reasoning/thinking params and detail streaming", async () => { From 575530812037f6dbb91a601842ef51e33e2abd4a Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 22 May 2025 15:05:12 -0500 Subject: [PATCH 44/51] set the reasoning for claude 4 --- packages/proxy/schema/model_list.json | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/packages/proxy/schema/model_list.json b/packages/proxy/schema/model_list.json index 358efcc3..ab066c6d 100644 --- a/packages/proxy/schema/model_list.json +++ b/packages/proxy/schema/model_list.json @@ -510,6 +510,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "input_cache_read_cost_per_mil_tokens": 0.3, @@ -594,6 +596,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "input_cache_read_cost_per_mil_tokens": 1.5, From a0277d705957f9010714e656de8ae3c2a6ef13be Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 22 May 2025 19:11:12 -0500 Subject: [PATCH 45/51] try reverting the ci change --- .github/workflows/js.yaml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/.github/workflows/js.yaml b/.github/workflows/js.yaml index 3537e56b..4737ca53 100644 --- a/.github/workflows/js.yaml +++ b/.github/workflows/js.yaml @@ -21,10 +21,11 @@ jobs: node-version: ${{ matrix.node-version }} registry-url: "https://registry.npmjs.org" - uses: pnpm/action-setup@v4 - - run: pnpm install - - run: pnpm run test + - run: | + pnpm install + pnpm run test + pnpm run build env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} ANTHROPIC_API_KEY: ${{ secrets.ANTHROPIC_API_KEY }} GEMINI_API_KEY: ${{ secrets.GEMINI_API_KEY }} - - run: pnpm run build From 8cd6c417d1894219f92f410aa5256293110fb6e2 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Sat, 24 May 2025 09:14:44 -0500 Subject: [PATCH 46/51] switch to default export with cacheControl and jsonwebtoken --- packages/proxy/src/proxy.ts | 6 ++++-- packages/proxy/utils/tempCredentials.ts | 17 ++++++++--------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 7c717374..665c1b25 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -24,7 +24,7 @@ import { OpenAIChatCompletionChunk, OpenAIReasoning, } from "@types"; -import { parse as cacheControlParse } from "cache-control-parser"; +import cacheControlParse from "cache-control-parser"; import { differenceInSeconds } from "date-fns"; import { createParser, @@ -247,7 +247,9 @@ export async function proxyV1({ ), MAX_CACHE_TTL, ); - const cacheControl = cacheControlParse(proxyHeaders["cache-control"] || ""); + const cacheControl = cacheControlParse.parse( + proxyHeaders["cache-control"] || "", + ); const cacheMaxAge = cacheControl?.["max-age"]; const noCache = !!cacheControl?.["no-cache"] || cacheMaxAge === 0; const noStore = !!cacheControl?.["no-store"]; diff --git a/packages/proxy/utils/tempCredentials.ts b/packages/proxy/utils/tempCredentials.ts index b597079e..54c33114 100644 --- a/packages/proxy/utils/tempCredentials.ts +++ b/packages/proxy/utils/tempCredentials.ts @@ -8,11 +8,7 @@ import { } from "../schema"; import { v4 as uuidv4 } from "uuid"; import { arrayBufferToBase64 } from "./encrypt"; -import { - sign as jwtSign, - verify as jwtVerify, - decode as jwtDecode, -} from "jsonwebtoken"; +import jsonwebtoken from "jsonwebtoken"; import { isEmpty } from "@lib/util"; const JWT_ALGORITHM = "HS256"; @@ -81,7 +77,7 @@ export function makeTempCredentialsJwt({ logging: request.logging ?? undefined, }, }; - const jwt = jwtSign(jwtPayload, authToken, { + const jwt = jsonwebtoken.sign(jwtPayload, authToken, { expiresIn: request.ttl_seconds, mutatePayload: true, algorithm: JWT_ALGORITHM, @@ -177,7 +173,7 @@ export function isTempCredential(jwt: string): boolean { .pick({ iss: true }) .or(tempCredentialJwtPayloadSchema.pick({ aud: true })); return looseJwtPayloadSchema.safeParse( - jwtDecode(jwt, { complete: false, json: true }), + jsonwebtoken.decode(jwt, { complete: false, json: true }), ).success; } @@ -195,7 +191,7 @@ export function verifyJwtOnly({ jwt: string; credentialCacheValue: TempCredentialsCacheValue; }): void { - jwtVerify(jwt, credentialCacheValue.authToken, { + jsonwebtoken.verify(jwt, credentialCacheValue.authToken, { algorithms: [JWT_ALGORITHM], }); } @@ -225,7 +221,10 @@ export async function verifyTempCredentials({ cacheGet: (encryptionKey: string, key: string) => Promise; }): Promise { // Decode, but do not verify, just to get the ID and encryption key. - const jwtPayloadRaw = jwtDecode(jwt, { complete: false, json: true }); + const jwtPayloadRaw = jsonwebtoken.decode(jwt, { + complete: false, + json: true, + }); if (isEmpty(jwtPayloadRaw)) { throw new Error("Could not parse JWT format"); } From 56b38df06f3d9782f1e1c4f15fbd8463f76e3b38 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Sun, 25 May 2025 09:47:40 -0500 Subject: [PATCH 47/51] add `skott` to assert no circular dependencies issues --- packages/proxy/edge/deps.test.ts | 20 + packages/proxy/package.json | 1 + packages/proxy/schema/deps.test.ts | 20 + packages/proxy/src/deps.test.ts | 20 + packages/proxy/src/providers/bedrock.ts | 9 +- packages/proxy/src/providers/google.ts | 2 +- packages/proxy/src/proxy.ts | 10 +- packages/proxy/src/util.ts | 9 + packages/proxy/utils/deps.test.ts | 18 + pnpm-lock.yaml | 1031 +++++++++++++++++++---- 10 files changed, 941 insertions(+), 199 deletions(-) create mode 100644 packages/proxy/edge/deps.test.ts create mode 100644 packages/proxy/schema/deps.test.ts create mode 100644 packages/proxy/src/deps.test.ts create mode 100644 packages/proxy/utils/deps.test.ts diff --git a/packages/proxy/edge/deps.test.ts b/packages/proxy/edge/deps.test.ts new file mode 100644 index 00000000..be2aaecf --- /dev/null +++ b/packages/proxy/edge/deps.test.ts @@ -0,0 +1,20 @@ +import skott from "skott"; +import { describe, expect, it } from "vitest"; + +describe("proxy/edge", () => { + it("no circ dependencies", async () => { + const { useGraph } = await skott({ + entrypoint: `${__dirname}/index.ts`, + tsConfigPath: `${__dirname}/../tsconfig.json`, + dependencyTracking: { + builtin: false, + thirdParty: true, + typeOnly: true, + }, + }); + + const { findCircularDependencies } = useGraph(); + + expect(findCircularDependencies()).toEqual([]); + }); +}); diff --git a/packages/proxy/package.json b/packages/proxy/package.json index 38263764..af0534da 100644 --- a/packages/proxy/package.json +++ b/packages/proxy/package.json @@ -75,6 +75,7 @@ "@typescript-eslint/eslint-plugin": "^8.21.0", "esbuild": "^0.19.10", "npm-run-all": "^4.1.5", + "skott": "^0.35.4", "tsup": "^8.4.0", "typescript": "5.5.4", "vite-tsconfig-paths": "^4.3.2", diff --git a/packages/proxy/schema/deps.test.ts b/packages/proxy/schema/deps.test.ts new file mode 100644 index 00000000..7797c7f2 --- /dev/null +++ b/packages/proxy/schema/deps.test.ts @@ -0,0 +1,20 @@ +import skott from "skott"; +import { describe, expect, it } from "vitest"; + +describe("proxy/schema", () => { + it("no circ dependencies", async () => { + const { useGraph } = await skott({ + entrypoint: `${__dirname}/index.ts`, + tsConfigPath: `${__dirname}/../tsconfig.json`, + dependencyTracking: { + builtin: false, + thirdParty: true, + typeOnly: true, + }, + }); + + const { findCircularDependencies } = useGraph(); + + expect(findCircularDependencies()).toEqual([]); + }); +}); diff --git a/packages/proxy/src/deps.test.ts b/packages/proxy/src/deps.test.ts new file mode 100644 index 00000000..01d5049b --- /dev/null +++ b/packages/proxy/src/deps.test.ts @@ -0,0 +1,20 @@ +import skott from "skott"; +import { describe, expect, it } from "vitest"; + +describe("proxy/src", () => { + it("no circ dependencies", async () => { + const { useGraph } = await skott({ + entrypoint: `${__dirname}/index.ts`, + tsConfigPath: `${__dirname}/../tsconfig.json`, + dependencyTracking: { + builtin: false, + thirdParty: true, + typeOnly: true, + }, + }); + + const { findCircularDependencies } = useGraph(); + + expect(findCircularDependencies()).toEqual([]); + }); +}); diff --git a/packages/proxy/src/providers/bedrock.ts b/packages/proxy/src/providers/bedrock.ts index 36c576dc..ce9806cf 100644 --- a/packages/proxy/src/providers/bedrock.ts +++ b/packages/proxy/src/providers/bedrock.ts @@ -30,12 +30,7 @@ import { anthropicEventToOpenAIEvent, } from "./anthropic"; import { ChatCompletionChunk, CompletionUsage } from "openai/resources"; -import { - getTimestampInSeconds, - writeToReadable, - isEmpty, - ProxyBadRequestError, -} from ".."; +import { getTimestampInSeconds, isEmpty, ProxyBadRequestError } from "../util"; import { Message as OaiMessage, MessageRole, @@ -50,7 +45,7 @@ import { } from "openai/resources/chat/completions"; import { convertMediaToBase64 } from "./util"; import { makeFakeOpenAIStreamTransformer } from "./openai"; -import { ModelResponse } from "../util"; +import { ModelResponse, writeToReadable } from "../util"; function streamResponse( body: AsyncIterable, diff --git a/packages/proxy/src/providers/google.ts b/packages/proxy/src/providers/google.ts index a74b16c1..613f2270 100644 --- a/packages/proxy/src/providers/google.ts +++ b/packages/proxy/src/providers/google.ts @@ -8,7 +8,7 @@ import { Part, } from "@google/generative-ai"; import { ChatCompletion, ChatCompletionChunk } from "openai/resources"; -import { getTimestampInSeconds } from ".."; +import { getTimestampInSeconds } from "../util"; import { convertMediaToBase64 } from "./util"; async function makeGoogleMediaBlock(media: string): Promise { diff --git a/packages/proxy/src/proxy.ts b/packages/proxy/src/proxy.ts index 83ecbdc1..8cb4ebd7 100644 --- a/packages/proxy/src/proxy.ts +++ b/packages/proxy/src/proxy.ts @@ -26,6 +26,7 @@ import { isObject, parseAuthHeader, parseNumericHeader, + writeToReadable, } from "./util"; import { anthropicCompletionToOpenAICompletion, @@ -2893,12 +2894,3 @@ function logSpanInputs( } } } - -export const writeToReadable = (response: string) => { - return new ReadableStream({ - start(controller) { - controller.enqueue(new TextEncoder().encode(response)); - controller.close(); - }, - }); -}; diff --git a/packages/proxy/src/util.ts b/packages/proxy/src/util.ts index 3a2dd5bd..2ee5b460 100644 --- a/packages/proxy/src/util.ts +++ b/packages/proxy/src/util.ts @@ -149,3 +149,12 @@ export function parseFileMetadataFromUrl( return undefined; } } + +export const writeToReadable = (response: string) => { + return new ReadableStream({ + start(controller) { + controller.enqueue(new TextEncoder().encode(response)); + controller.close(); + }, + }); +}; diff --git a/packages/proxy/utils/deps.test.ts b/packages/proxy/utils/deps.test.ts new file mode 100644 index 00000000..cde1be7b --- /dev/null +++ b/packages/proxy/utils/deps.test.ts @@ -0,0 +1,18 @@ +import skott from "skott"; +import { expect, it } from "vitest"; + +it("no circ dependencies", async () => { + const { useGraph } = await skott({ + entrypoint: `${__dirname}/index.ts`, + tsConfigPath: `${__dirname}/../tsconfig.json`, + dependencyTracking: { + builtin: false, + thirdParty: true, + typeOnly: true, + }, + }); + + const { findCircularDependencies } = useGraph(); + + expect(findCircularDependencies()).toEqual([]); +}); diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index f050a8a4..7f69edb3 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -22,10 +22,10 @@ importers: version: 2.3.3 vite-tsconfig-paths: specifier: ^4.3.2 - version: 4.3.2 + version: 4.3.2(typescript@5.5.4) vitest: specifier: ^2.1.9 - version: 2.1.9 + version: 2.1.9(@types/node@20.10.5) apis/cloudflare: dependencies: @@ -282,6 +282,9 @@ importers: npm-run-all: specifier: ^4.1.5 version: 4.1.5 + skott: + specifier: ^0.35.4 + version: 0.35.4 tsup: specifier: ^8.4.0 version: 8.4.0(typescript@5.5.4) @@ -451,6 +454,11 @@ packages: js-yaml: 4.1.0 dev: false + /@arr/every@1.0.1: + resolution: {integrity: sha512-UQFQ6SgyJ6LX42W8rHCs8KVc0JS0tzVL9ct4XYedJukskYVWTo49tNiMEK9C2HTyarbNiT/RVIRSY82vH+6sTg==} + engines: {node: '>=4'} + dev: true + /@asteasolutions/zod-to-openapi@6.4.0(zod@3.22.4): resolution: {integrity: sha512-8cxfF7AHHx2PqnN4Cd8/O8CBu/nVYJP9DpnfVLW3BFb66VJDnqI/CczZnkqMc3SNh6J9GiX7JbJ5T4BSP4HZ2Q==} peerDependencies: @@ -894,15 +902,33 @@ packages: tslib: 2.6.2 dev: false + /@babel/code-frame@7.27.1: + resolution: {integrity: sha512-cjQ7ZlQ0Mv3b47hABuTevyTuYN4i+loJKGeV9flcCgIK37cCXRh+L1bd3iBHlynerhQ7BhCkn2BPbQUL+rGqFg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/helper-validator-identifier': 7.27.1 + js-tokens: 4.0.0 + picocolors: 1.1.1 + dev: true + + /@babel/generator@7.27.1: + resolution: {integrity: sha512-UnJfnIpc/+JO0/+KRVQNGU+y5taA5vCbwN8+azkX6beii/ZF+enZJSOKo11ZSzGJjlNfJHfQtmQT8H+9TXPG2w==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/parser': 7.27.1 + '@babel/types': 7.27.1 + '@jridgewell/gen-mapping': 0.3.8 + '@jridgewell/trace-mapping': 0.3.25 + jsesc: 3.1.0 + dev: true + /@babel/helper-string-parser@7.27.1: resolution: {integrity: sha512-qMlSxKbpRlAridDExk92nSobyDdpPijUq2DW6oDnUqd0iOGxmQjyqhMIihI9+zv4LPyZdRje2cavWPbCbWm3eA==} engines: {node: '>=6.9.0'} - dev: false /@babel/helper-validator-identifier@7.27.1: resolution: {integrity: sha512-D2hP9eA+Sqx1kBZgzxZh0y1trbuU+JoDkiEwqhQ36nodYqJwyEIhPSdMNd7lOm/4io72luTPWH20Yda0xOuUow==} engines: {node: '>=6.9.0'} - dev: false /@babel/parser@7.27.1: resolution: {integrity: sha512-I0dZ3ZpCrJ1c04OqlNsQcKiZlsrXf/kkE4FXzID9rIOYICsAbA8mMDzhW/luRNAHdCNt7os/u8wenklZDlUVUQ==} @@ -910,7 +936,14 @@ packages: hasBin: true dependencies: '@babel/types': 7.27.1 - dev: false + + /@babel/parser@7.27.2: + resolution: {integrity: sha512-QYLs8299NA7WM/bZAdp+CviYYkVoYXlDW2rzliy3chxd1PQjej7JORuMJDJXJUb9g0TT+B99EwaVLKmX+sPXWw==} + engines: {node: '>=6.0.0'} + hasBin: true + dependencies: + '@babel/types': 7.27.1 + dev: true /@babel/runtime@7.23.6: resolution: {integrity: sha512-zHd0eUrf5GZoOWVCXp6koAKQTfZV07eit6bGPmJgnZdnSAvvZee6zniW2XMF7Cmc4ISOOnPy3QaSiIJGJkVEDQ==} @@ -919,13 +952,36 @@ packages: regenerator-runtime: 0.14.1 dev: true + /@babel/template@7.27.2: + resolution: {integrity: sha512-LPDZ85aEJyYSd18/DkjNh4/y1ntkE5KwUHWTiqgRxruuZL2F1yuHligVHLvcHY2vMHXttKFpJn6LwfI7cw7ODw==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/parser': 7.27.2 + '@babel/types': 7.27.1 + dev: true + + /@babel/traverse@7.27.1: + resolution: {integrity: sha512-ZCYtZciz1IWJB4U61UPu4KEaqyfj+r5T1Q5mqPo+IBpcG9kHv30Z0aD8LXPgC1trYa6rK0orRyAhqUgk4MjmEg==} + engines: {node: '>=6.9.0'} + dependencies: + '@babel/code-frame': 7.27.1 + '@babel/generator': 7.27.1 + '@babel/parser': 7.27.1 + '@babel/template': 7.27.2 + '@babel/types': 7.27.1 + debug: 4.4.0 + globals: 11.12.0 + transitivePeerDependencies: + - supports-color + dev: true + /@babel/types@7.27.1: resolution: {integrity: sha512-+EzkxvLNfiUeKMgy/3luqfsCWFRXLb7U6wNQTk60tovuckwB15B191tJWvpp4HjiQWdJkCxO3Wbvc6jlk3Xb2Q==} engines: {node: '>=6.9.0'} dependencies: '@babel/helper-string-parser': 7.27.1 '@babel/helper-validator-identifier': 7.27.1 - dev: false /@braintrust/core@0.0.84: resolution: {integrity: sha512-LByhkZ6ueKk5wFo1kuWwOjIw21mmkHNzOX8NRbPMISbRId26H8mWbShGa6UtM7+mS0KrIgQhy13vpa0tt69Gvg==} @@ -2403,6 +2459,148 @@ packages: engines: {node: '>=14'} dev: false + /@parcel/watcher-android-arm64@2.5.1: + resolution: {integrity: sha512-KF8+j9nNbUN8vzOFDpRMsaKBHZ/mcjEjMToVMJOhTozkDonQFFrRcfdLWn6yWKCmJKmdVxSgHiYvTCef4/qcBA==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [android] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-darwin-arm64@2.5.1: + resolution: {integrity: sha512-eAzPv5osDmZyBhou8PoF4i6RQXAfeKL9tjb3QzYuccXFMQU0ruIc/POh30ePnaOyD1UXdlKguHBmsTs53tVoPw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-darwin-x64@2.5.1: + resolution: {integrity: sha512-1ZXDthrnNmwv10A0/3AJNZ9JGlzrF82i3gNQcWOzd7nJ8aj+ILyW1MTxVk35Db0u91oD5Nlk9MBiujMlwmeXZg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [darwin] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-freebsd-x64@2.5.1: + resolution: {integrity: sha512-SI4eljM7Flp9yPuKi8W0ird8TI/JK6CSxju3NojVI6BjHsTyK7zxA9urjVjEKJ5MBYC+bLmMcbAWlZ+rFkLpJQ==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [freebsd] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-linux-arm-glibc@2.5.1: + resolution: {integrity: sha512-RCdZlEyTs8geyBkkcnPWvtXLY44BCeZKmGYRtSgtwwnHR4dxfHRG3gR99XdMEdQ7KeiDdasJwwvNSF5jKtDwdA==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-linux-arm-musl@2.5.1: + resolution: {integrity: sha512-6E+m/Mm1t1yhB8X412stiKFG3XykmgdIOqhjWj+VL8oHkKABfu/gjFj8DvLrYVHSBNC+/u5PeNrujiSQ1zwd1Q==} + engines: {node: '>= 10.0.0'} + cpu: [arm] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-linux-arm64-glibc@2.5.1: + resolution: {integrity: sha512-LrGp+f02yU3BN9A+DGuY3v3bmnFUggAITBGriZHUREfNEzZh/GO06FF5u2kx8x+GBEUYfyTGamol4j3m9ANe8w==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-linux-arm64-musl@2.5.1: + resolution: {integrity: sha512-cFOjABi92pMYRXS7AcQv9/M1YuKRw8SZniCDw0ssQb/noPkRzA+HBDkwmyOJYp5wXcsTrhxO0zq1U11cK9jsFg==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-linux-x64-glibc@2.5.1: + resolution: {integrity: sha512-GcESn8NZySmfwlTsIur+49yDqSny2IhPeZfXunQi48DMugKeZ7uy1FX83pO0X22sHntJ4Ub+9k34XQCX+oHt2A==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-linux-x64-musl@2.5.1: + resolution: {integrity: sha512-n0E2EQbatQ3bXhcH2D1XIAANAcTZkQICBPVaxMeaCVBtOpBZpWJuf7LwyWPSBDITb7In8mqQgJ7gH8CILCURXg==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [linux] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-win32-arm64@2.5.1: + resolution: {integrity: sha512-RFzklRvmc3PkjKjry3hLF9wD7ppR4AKcWNzH7kXR7GUe0Igb3Nz8fyPwtZCSquGrhU5HhUNDr/mKBqj7tqA2Vw==} + engines: {node: '>= 10.0.0'} + cpu: [arm64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-win32-ia32@2.5.1: + resolution: {integrity: sha512-c2KkcVN+NJmuA7CGlaGD1qJh1cLfDnQsHjE89E60vUEMlqduHGCdCLJCID5geFVM0dOtA3ZiIO8BoEQmzQVfpQ==} + engines: {node: '>= 10.0.0'} + cpu: [ia32] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher-win32-x64@2.5.1: + resolution: {integrity: sha512-9lHBdJITeNR++EvSQVUcaZoWupyHfXe1jZvGZ06O/5MflPcuPLtEphScIBL+AiCWBO46tDSHzWyD0uDmmZqsgA==} + engines: {node: '>= 10.0.0'} + cpu: [x64] + os: [win32] + requiresBuild: true + dev: true + optional: true + + /@parcel/watcher@2.5.1: + resolution: {integrity: sha512-dfUnCxiN9H4ap84DvD2ubjw+3vUNpstxa0TneY/Paat8a3R4uQZDLSvWjmznAY/DoahqTHl9V46HF/Zs3F29pg==} + engines: {node: '>= 10.0.0'} + requiresBuild: true + dependencies: + detect-libc: 1.0.3 + is-glob: 4.0.3 + micromatch: 4.0.8 + node-addon-api: 7.1.1 + optionalDependencies: + '@parcel/watcher-android-arm64': 2.5.1 + '@parcel/watcher-darwin-arm64': 2.5.1 + '@parcel/watcher-darwin-x64': 2.5.1 + '@parcel/watcher-freebsd-x64': 2.5.1 + '@parcel/watcher-linux-arm-glibc': 2.5.1 + '@parcel/watcher-linux-arm-musl': 2.5.1 + '@parcel/watcher-linux-arm64-glibc': 2.5.1 + '@parcel/watcher-linux-arm64-musl': 2.5.1 + '@parcel/watcher-linux-x64-glibc': 2.5.1 + '@parcel/watcher-linux-x64-musl': 2.5.1 + '@parcel/watcher-win32-arm64': 2.5.1 + '@parcel/watcher-win32-ia32': 2.5.1 + '@parcel/watcher-win32-x64': 2.5.1 + dev: true + /@pkgjs/parseargs@0.11.0: resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} engines: {node: '>=14'} @@ -2410,6 +2608,14 @@ packages: dev: true optional: true + /@polka/url@0.5.0: + resolution: {integrity: sha512-oZLYFEAzUKyi3SKnXvj32ZCEGH6RDnao7COuCVhDydMS9NrCSVXhM79VaKyP5+Zc33m0QXEd2DN3UkU7OsHcfw==} + dev: true + + /@polka/url@1.0.0-next.29: + resolution: {integrity: sha512-wwQAWhWSuHaag8c4q/KN/vCoeOJYshAIvMQwD4GpSb3OiZklFfvAgmj0VCBBImRpuF/aFgIRzllXlVX93Jevww==} + dev: true + /@redis/bloom@1.2.0(@redis/client@1.5.9): resolution: {integrity: sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==} peerDependencies: @@ -3357,6 +3563,10 @@ packages: resolution: {integrity: sha512-iJt33IQnVRkqeqC7PzBHPTC6fDlRNRW8vjrgqtScAhrmMwe8c4Eo7+fUGTa+XdWrpEgpyKWMYmi2dIwMAYRzPw==} dev: true + /@types/minimatch@3.0.5: + resolution: {integrity: sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ==} + dev: true + /@types/node-fetch@2.6.9: resolution: {integrity: sha512-bQVlnMLFJ2d35DkPNjEPmd9ueO/rh5EiaZt2bhqiSarPjZIuIV6bPQVqcrEyvNo+AfTrRGVazle1tl597w3gfA==} dependencies: @@ -3379,6 +3589,10 @@ packages: dependencies: undici-types: 5.26.5 + /@types/parse-json@4.0.2: + resolution: {integrity: sha512-dISoDXWWQwUquiKsyZ4Ng+HX2KsPL7LyHKHQwgGFEA3IaKac4Obd+h2a/a6waisAoepJlBcx9paWqjA8/HVjCw==} + dev: true + /@types/phoenix@1.6.4: resolution: {integrity: sha512-B34A7uot1Cv0XtaHRYDATltAdKx0BvVKNgYNqE4WjtPUa4VQJM7kxeXcVKaH+KS+kCmZ+6w+QaUdcljiheiBJA==} dev: false @@ -3545,6 +3759,11 @@ packages: engines: {node: ^16.0.0 || >=18.0.0} dev: true + /@typescript-eslint/types@7.13.1: + resolution: {integrity: sha512-7K7HMcSQIAND6RBL4kDl24sG/xKM13cA85dc7JnmQXw2cBDngg7c19B++JzvJHRG3zG36n9j1i451GBzRuHchw==} + engines: {node: ^18.18.0 || >=20.0.0} + dev: true + /@typescript-eslint/types@8.21.0: resolution: {integrity: sha512-PAL6LUuQwotLW2a8VsySDBwYMm129vFm4tMVlylzdoTybTHaAi0oBp7Ac6LhSrHHOdLM3efH+nAR6hAWoMF89A==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3576,6 +3795,28 @@ packages: - supports-color dev: true + /@typescript-eslint/typescript-estree@7.13.1(typescript@5.4.5): + resolution: {integrity: sha512-uxNr51CMV7npU1BxZzYjoVz9iyjckBduFBP0S5sLlh1tXYzHzgZ3BR9SVsNed+LmwKrmnqN3Kdl5t7eZ5TS1Yw==} + engines: {node: ^18.18.0 || >=20.0.0} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + dependencies: + '@typescript-eslint/types': 7.13.1 + '@typescript-eslint/visitor-keys': 7.13.1 + debug: 4.4.0 + globby: 11.1.0 + is-glob: 4.0.3 + minimatch: 9.0.5 + semver: 7.7.1 + ts-api-utils: 1.4.3(typescript@5.4.5) + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + dev: true + /@typescript-eslint/typescript-estree@8.21.0(typescript@5.5.4): resolution: {integrity: sha512-x+aeKh/AjAArSauz0GiQZsjT8ciadNMHdkUSwBB9Z6PrKc/4knM4g3UfHml6oDJmKC88a6//cdxnO/+P2LkMcg==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3639,6 +3880,14 @@ packages: eslint-visitor-keys: 3.4.3 dev: true + /@typescript-eslint/visitor-keys@7.13.1: + resolution: {integrity: sha512-k/Bfne7lrP7hcb7m9zSsgcBmo+8eicqqfNAJ7uUY+jkTFpKeH2FSkWpFRtimBxgkyvqfu9jTPRbYOvud6isdXA==} + engines: {node: ^18.18.0 || >=20.0.0} + dependencies: + '@typescript-eslint/types': 7.13.1 + eslint-visitor-keys: 3.4.3 + dev: true + /@typescript-eslint/visitor-keys@8.21.0: resolution: {integrity: sha512-BkLMNpdV6prozk8LlyK/SOoWLmUFi+ZD+pcqti9ILCbVvHGk1ui1g4jJOc2WDLaeExz2qWwojxlPce5PljcT3w==} engines: {node: ^18.18.0 || ^20.9.0 || >=21.1.0} @@ -3743,7 +3992,7 @@ packages: '@vitest/spy': 2.1.9 estree-walker: 3.0.3 magic-string: 0.30.17 - vite: 5.4.10 + vite: 5.4.10(@types/node@20.10.5) dev: true /@vitest/pretty-format@2.1.9: @@ -3789,14 +4038,12 @@ packages: entities: 4.5.0 estree-walker: 2.0.2 source-map-js: 1.2.1 - dev: false /@vue/compiler-dom@3.5.13: resolution: {integrity: sha512-ZOJ46sMOKUjO3e94wPdCzQ6P1Lx/vhp2RSvfaab88Ajexs0AHeV0uasYhi99WPaogmBlRHNRuly8xV75cNTMDA==} dependencies: '@vue/compiler-core': 3.5.13 '@vue/shared': 3.5.13 - dev: false /@vue/compiler-sfc@3.5.13: resolution: {integrity: sha512-6VdaljMpD82w6c2749Zhf5T9u5uLBWKnVue6XWxprDobftnletJ8+oel7sexFfM3qIxNmVE7LSFGTpv6obNyaQ==} @@ -3810,14 +4057,12 @@ packages: magic-string: 0.30.17 postcss: 8.5.3 source-map-js: 1.2.1 - dev: false /@vue/compiler-ssr@3.5.13: resolution: {integrity: sha512-wMH6vrYHxQl/IybKJagqbquvxpWCuVYpoUJfCqFZwa/JY1GdATAQ+TgVtgrwwMZ0D07QhA99rs/EAAWfvG6KpA==} dependencies: '@vue/compiler-dom': 3.5.13 '@vue/shared': 3.5.13 - dev: false /@vue/reactivity@3.5.13: resolution: {integrity: sha512-NaCwtw8o48B9I6L1zl2p41OHo/2Z4wqYGGIK1Khu5T7yxrn+ATOixn/Udn2m+6kZKB/J7cuT9DbWWhRxqixACg==} @@ -3853,7 +4098,6 @@ packages: /@vue/shared@3.5.13: resolution: {integrity: sha512-/hnE/qP5ZoGpol0a5mDi45bOd7t3tjYJBjsgCsivow7D48cJeV5l05RD82lPqi7gRiphZM37rnhW1l6ZoCNNnQ==} - dev: false /abbrev@1.1.1: resolution: {integrity: sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==} @@ -4093,7 +4337,6 @@ packages: resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} dependencies: sprintf-js: 1.0.3 - dev: false /argparse@2.0.1: resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} @@ -4116,6 +4359,11 @@ packages: is-array-buffer: 3.0.2 dev: true + /array-differ@3.0.0: + resolution: {integrity: sha512-THtfYS6KtME/yIAhKjZ2ul7XI96lQGHRputJQHO80LAWQnuGP4iCIN8vdMRboGbIEYBwU33q8Tch1os2+X0kMg==} + engines: {node: '>=8'} + dev: true + /array-flatten@1.1.1: resolution: {integrity: sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==} dev: false @@ -4190,6 +4438,11 @@ packages: is-shared-array-buffer: 1.0.2 dev: true + /arrify@2.0.1: + resolution: {integrity: sha512-3duEwti880xqi4eAMN8AyR4a0ByT90zoYdLlevfrvU43vb0YZwZVfxOgxWrLXXXpyugL0hNZc9G6BiB5B3nUug==} + engines: {node: '>=8'} + dev: true + /as-table@1.0.55: resolution: {integrity: sha512-xvsWESUJn0JN421Xb9MQw6AsMHRCUknCe0Wjlxvjud80mU4E6hQf1A6NzQKcYNmYw62MfzEtXc+badstZP3JpQ==} dependencies: @@ -4292,7 +4545,6 @@ packages: /base64-js@1.5.1: resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - dev: false /binary-extensions@2.2.0: resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} @@ -4303,6 +4555,14 @@ packages: resolution: {integrity: sha512-nbE1WxOTTrUWIfsfZ4aHGYu5DOuNkbxGokjV6Z2kxfJK3uaAb8zNK1muzOeipoLHZjInT4Br88BHpzevc681xA==} dev: false + /bl@5.1.0: + resolution: {integrity: sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==} + dependencies: + buffer: 6.0.3 + inherits: 2.0.4 + readable-stream: 3.6.2 + dev: true + /blake3-wasm@2.1.5: resolution: {integrity: sha512-F1+K8EbfOZE49dtoPtmxUQrpXaBIl3ICvasLh+nJta0xkz+9kF/7uet9fLnwKqhDrmj6g+6K3Tw9yQPUg2ka5g==} dev: true @@ -4418,6 +4678,13 @@ packages: isarray: 1.0.0 dev: false + /buffer@6.0.3: + resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + dependencies: + base64-js: 1.5.1 + ieee754: 1.2.1 + dev: true + /bufferutil@4.0.8: resolution: {integrity: sha512-4T53u4PdgsXqKaIctwF8ifXlRTTmEPJ8iEPWFdGZvcf7sbwYo6FKFEX9eNNAnzFZ7EzJAQ3CJeOtCRA4rDp7Pw==} engines: {node: '>=6.14.2'} @@ -4446,7 +4713,6 @@ packages: /bytes@3.1.2: resolution: {integrity: sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==} engines: {node: '>= 0.8'} - dev: false /cac@6.7.14: resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} @@ -4464,6 +4730,10 @@ packages: get-intrinsic: 1.2.2 set-function-length: 1.1.1 + /callsite@1.0.0: + resolution: {integrity: sha512-0vdNRFXn5q+dtOqjfFtmtlI9N2eVZ7LMyEV2iKC5mEEFvSg/69Ml6b/WU2qF8W1nLRa0wiSrDT3Y5jOHZCwKPQ==} + dev: true + /callsites@3.1.0: resolution: {integrity: sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==} engines: {node: '>=6'} @@ -4474,6 +4744,11 @@ packages: engines: {node: '>= 6'} dev: true + /camelcase@6.3.0: + resolution: {integrity: sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==} + engines: {node: '>=10'} + dev: true + /caniuse-lite@1.0.30001632: resolution: {integrity: sha512-udx3o7yHJfUxMLkGohMlVHCvFvWmirKh9JAH/d7WOLPetlH+LTL5cocMZ0t7oZx/mdlOWXti97xLZWc8uURRHg==} @@ -4507,7 +4782,6 @@ packages: /chalk@5.3.0: resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - dev: false /check-error@2.1.1: resolution: {integrity: sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==} @@ -4536,6 +4810,13 @@ packages: readdirp: 4.1.2 dev: true + /cli-cursor@4.0.0: + resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + restore-cursor: 4.0.0 + dev: true + /cli-progress@3.12.0: resolution: {integrity: sha512-tRkV3HJ1ASwm19THiiLIXLO7Im7wlTuKnvkYaTkyoAPefqjNg7W7DHKUlGRxy9vxDvbyCYQkQozvptuMkGCg8A==} engines: {node: '>=4'} @@ -4543,10 +4824,23 @@ packages: string-width: 4.2.3 dev: false + /cli-spinners@2.9.2: + resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} + engines: {node: '>=6'} + dev: true + /client-only@0.0.1: resolution: {integrity: sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==} dev: false + /cliui@7.0.4: + resolution: {integrity: sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==} + dependencies: + string-width: 4.2.3 + strip-ansi: 6.0.1 + wrap-ansi: 7.0.0 + dev: true + /cliui@8.0.1: resolution: {integrity: sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ==} engines: {node: '>=12'} @@ -4556,6 +4850,11 @@ packages: wrap-ansi: 7.0.0 dev: true + /clone@1.0.4: + resolution: {integrity: sha512-JQHZ2QMW6l3aH/j6xCqQThY/9OH4D/9ls34cgkUBiEeocRTU04tHfKPBsUK1PqZCUQM7GiA0IIXJSuXHI64Kbg==} + engines: {node: '>=0.8'} + dev: true + /clsx@1.2.1: resolution: {integrity: sha512-EcR6r5a8bj6pu3ycsa/E/cKVGuTgZJZdsyUYHOksG/UHIiKfjxzRxYJpyVBwYaQeOvghal9fcc4PidlgzugAQg==} engines: {node: '>=6'} @@ -4607,6 +4906,11 @@ packages: delayed-stream: 1.0.0 dev: false + /commander@11.1.0: + resolution: {integrity: sha512-yPVavfyCcRhmorC7rWlkHn15b4wDVgVmBA7kV4QVBsF7kv/9TKJAbAXVTxvTnwP8HHKjRCJDClKbciiYS7p0DQ==} + engines: {node: '>=16'} + dev: true + /commander@3.0.2: resolution: {integrity: sha512-Gar0ASD4BDyKC4hl4DwHqDrmvjoxWKZigVnAbn5H1owvm4CxCPdb0HQDehwNYMJpla5+M2tPmPARzhtYuwpHow==} dev: false @@ -4616,6 +4920,28 @@ packages: engines: {node: '>= 6'} dev: true + /compressible@2.0.18: + resolution: {integrity: sha512-AF3r7P5dWxL8MxyITRMlORQNaOA2IkAFaTr4k7BUumjPtRpGDTZpl0Pb1XCO6JeDCBdp126Cgs9sMxqSjgYyRg==} + engines: {node: '>= 0.6'} + dependencies: + mime-db: 1.52.0 + dev: true + + /compression@1.8.0: + resolution: {integrity: sha512-k6WLKfunuqCYD3t6AsuPGvQWaKwuLLh2/xHNcX4qE+vIfDNXpSqnrhwA7O53R7WVQUnt8dVAIW+YHr7xTgOgGA==} + engines: {node: '>= 0.8.0'} + dependencies: + bytes: 3.1.2 + compressible: 2.0.18 + debug: 2.6.9 + negotiator: 0.6.4 + on-headers: 1.0.2 + safe-buffer: 5.2.1 + vary: 1.1.2 + transitivePeerDependencies: + - supports-color + dev: true + /concat-map@0.0.1: resolution: {integrity: sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==} dev: true @@ -4663,6 +4989,17 @@ packages: vary: 1.1.2 dev: false + /cosmiconfig@7.1.0: + resolution: {integrity: sha512-AdmX6xUzdNASswsFtmwSt7Vj8po9IuqXm0UXz7QKPuEUmPB4XyjGfaAr2PSuELMwkRMVH1EpIkX5bTZGRB3eCA==} + engines: {node: '>=10'} + dependencies: + '@types/parse-json': 4.0.2 + import-fresh: 3.3.0 + parse-json: 5.2.0 + path-type: 4.0.0 + yaml: 1.10.2 + dev: true + /cross-fetch@3.1.8: resolution: {integrity: sha512-cvA+JwZoU0Xq+h6WkMvAUqPEYy92Obet6UdKLfW60qn99ftItKjB5T+BkyWOFWe2pUyfQ+IJHmpOTznqk1M6Kg==} dependencies: @@ -4749,7 +5086,6 @@ packages: optional: true dependencies: ms: 2.0.0 - dev: false /debug@3.2.7(supports-color@5.5.0): resolution: {integrity: sha512-CFjzYYAi4ThfiQvizrFQevTTXHtnCqWfe7x1AhgEscTz6ZbLbfoLRLPugTQyBth6f8ZERVUSyWHFD/7Wu4t1XQ==} @@ -4807,6 +5143,12 @@ packages: resolution: {integrity: sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==} dev: true + /defaults@1.0.4: + resolution: {integrity: sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A==} + dependencies: + clone: 1.0.4 + dev: true + /define-data-property@1.1.1: resolution: {integrity: sha512-E7uGkTzkk1d0ByLeSc6ZsFS79Axg+m1P/VsgYsxHgiuc3tFSj+MjMIwe90FC4lOAZzNBdY7kkO2P2wKdsQ1vgQ==} engines: {node: '>= 0.4'} @@ -4837,20 +5179,67 @@ packages: engines: {node: '>=0.4.0'} dev: false - /depd@2.0.0: - resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} - engines: {node: '>= 0.8'} - dev: false - - /dequal@2.0.3: - resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} - engines: {node: '>=6'} - - /destroy@1.2.0: + /depcheck@1.4.7: + resolution: {integrity: sha512-1lklS/bV5chOxwNKA/2XUUk/hPORp8zihZsXflr8x0kLwmcZ9Y9BsS6Hs3ssvA+2wUVbG0U2Ciqvm1SokNjPkA==} + engines: {node: '>=10'} + hasBin: true + dependencies: + '@babel/parser': 7.27.1 + '@babel/traverse': 7.27.1 + '@vue/compiler-sfc': 3.5.13 + callsite: 1.0.0 + camelcase: 6.3.0 + cosmiconfig: 7.1.0 + debug: 4.4.0 + deps-regex: 0.2.0 + findup-sync: 5.0.0 + ignore: 5.3.2 + is-core-module: 2.13.1 + js-yaml: 3.14.1 + json5: 2.2.3 + lodash: 4.17.21 + minimatch: 7.4.6 + multimatch: 5.0.0 + please-upgrade-node: 3.2.0 + readdirp: 3.6.0 + require-package-name: 2.0.1 + resolve: 1.22.8 + resolve-from: 5.0.0 + semver: 7.7.1 + yargs: 16.2.0 + transitivePeerDependencies: + - supports-color + dev: true + + /depd@2.0.0: + resolution: {integrity: sha512-g7nH6P6dyDioJogAAGprGpCtVImJhpPk/roCzdb3fIh61/s/nPsfR6onyMwkCAR/OlC3yBC0lESvUoQEAssIrw==} + engines: {node: '>= 0.8'} + dev: false + + /deps-regex@0.2.0: + resolution: {integrity: sha512-PwuBojGMQAYbWkMXOY9Pd/NWCDNHVH12pnS7WHqZkTSeMESe4hwnKKRp0yR87g37113x4JPbo/oIvXY+s/f56Q==} + dev: true + + /dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + /destroy@1.2.0: resolution: {integrity: sha512-2sJGJTaXIIaR1w4iJSNoN0hnMY7Gpc/n8D4qSCJw8QqFWXf7cuAgnEHxBpweaVcPevC2l3KpjYCx3NypQQgaJg==} engines: {node: '>= 0.8', npm: 1.2.8000 || >= 1.4.16} dev: false + /detect-file@1.0.0: + resolution: {integrity: sha512-DtCOLG98P007x7wiiOmfI0fi3eIKyWiLTGJ2MDnVi/E04lWGbf+JzrRHMm0rgIIZJGtHpKpbVgLWHrv8xXpc3Q==} + engines: {node: '>=0.10.0'} + dev: true + + /detect-libc@1.0.3: + resolution: {integrity: sha512-pGjwhsmsp4kL2RTz08wcOlGN83otlqHeD/Z5T8GXZB+/YcpQ/dgo+lbU8ZsGxV0HIvqqxo9l7mqYwyYMD9bKDg==} + engines: {node: '>=0.10'} + hasBin: true + dev: true + /detective@5.2.1: resolution: {integrity: sha512-v9XE1zRnz1wRtgurGu0Bs8uHKFSTdteYZNbIPFVhUZ39L/S79ppMpdmVOZAnoz1jfEFodc48n6MX483Xo3t1yw==} engines: {node: '>=0.8.0'} @@ -4869,6 +5258,14 @@ packages: resolution: {integrity: sha512-IayShXAgj/QMXgB0IWmKx+rOPuGMhqm5w6jvFxmVenXKIzRqTAAsbBPT3kWQeGANj3jGgvcvv4yK6SxqYmikgw==} dev: false + /digraph-js@2.2.3: + resolution: {integrity: sha512-btynrARSW6pBmDz9+cwCxkBJ91CGBxIaNQo7V+ul9/rCRr3HddwehpEMnL6Ru2OeC2pKdRteB1v5TgZRrAAYKQ==} + engines: {node: '>=16.0.0'} + dependencies: + lodash.isequal: 4.5.0 + lodash.uniqwith: 4.5.0 + dev: true + /dir-glob@3.0.1: resolution: {integrity: sha512-WkrWp9GR4KXfKGYzOLmTuGVi1UWFfws377n9cc55/tb6DuqyF6pcQ5AbiHEshaDpY9v6oaSr2XCDidGmMwdzIA==} engines: {node: '>=8'} @@ -4922,6 +5319,10 @@ packages: resolution: {integrity: sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow==} dev: false + /effect@3.3.2: + resolution: {integrity: sha512-695XQBtp+UUYG50oREG9ujnRoeQU7xhwHDhT6ZAexm3Q+umdml1kjxcPoYRrS65crmaLlhVpjZHePJNzWOODnA==} + dev: true + /electron-to-chromium@1.4.591: resolution: {integrity: sha512-vLv/P7wwAPKQoY+CVMyyI6rsTp+A14KGtPXx92oz1FY41AAqa9l6Wkizcixg0LDuJgyeo8xgNN9+9hsnGp66UA==} dev: true @@ -4949,7 +5350,6 @@ packages: /entities@4.5.0: resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} engines: {node: '>=0.12'} - dev: false /error-ex@1.3.2: resolution: {integrity: sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g==} @@ -5580,7 +5980,6 @@ packages: resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} engines: {node: '>=4'} hasBin: true - dev: false /esquery@1.5.0: resolution: {integrity: sha512-YQLXUplAwJgCydQ78IMJywZCceoqk1oH01OERdSAJc/7U2AylwjhSCLDEtqwg811idIS/9fIU5GjG73IgjKMVg==} @@ -5620,7 +6019,6 @@ packages: /estree-walker@2.0.2: resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} - dev: false /estree-walker@3.0.3: resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} @@ -5662,6 +6060,13 @@ packages: engines: {node: '>=6'} dev: true + /expand-tilde@2.0.2: + resolution: {integrity: sha512-A5EmesHW6rfnZ9ysHQjPdJRni0SRar0tjtG5MNtm9n5TUvsYU8oozprtRD4AqHxcZWWlVuAmQo2nWKfN9oyjTw==} + engines: {node: '>=0.10.0'} + dependencies: + homedir-polyfill: 1.0.3 + dev: true + /expect-type@1.2.0: resolution: {integrity: sha512-80F22aiJ3GLyVnS/B3HzgR6RelZVumzj9jkL0Rhz4h0xYbNW9PjlQz5h3J/SShErbXBc295vseR4/MIbVmUbeA==} engines: {node: '>=12.0.0'} @@ -5814,6 +6219,16 @@ packages: path-exists: 4.0.0 dev: true + /findup-sync@5.0.0: + resolution: {integrity: sha512-MzwXju70AuyflbgeOhzvQWAvvQdo1XL0A9bVvlXsYcFEBM87WR4OakL4OfZq+QRmr+duJubio+UtNQCPsVESzQ==} + engines: {node: '>= 10.13.0'} + dependencies: + detect-file: 1.0.0 + is-glob: 4.0.3 + micromatch: 4.0.8 + resolve-dir: 1.0.1 + dev: true + /flat-cache@3.2.0: resolution: {integrity: sha512-CYcENa+FtcUKLmhhqyctpclsq7QF38pKjZHsGNiSQF5r4FtoKDWabFDl3hzaEQMvT1LHEysw5twgLvpYYb4vbw==} engines: {node: ^10.12.0 || >=12.0.0} @@ -5876,6 +6291,10 @@ packages: engines: {node: '>= 0.6'} dev: false + /fp-ts@2.5.0: + resolution: {integrity: sha512-xkC9ZKl/i2cU+8FAsdyLcTvPRXphp42FcK5WmZpB47VXb4gggC3DHlVDKNLdbC+U8zz6yp1b0bj0mZg0axmZYQ==} + dev: true + /fraction.js@4.3.7: resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} dev: true @@ -5885,6 +6304,12 @@ packages: engines: {node: '>= 0.6'} dev: false + /fs-tree-structure@0.0.5: + resolution: {integrity: sha512-827ACYnAMC1DQRvhLUzZH0fCPhBJLo9P7WfxxwP4cibIzlrSzbD+Fh9W4FxFtSU+p9GlX0BoQUWLJ2LFJuoKuQ==} + dependencies: + lodash-es: 4.17.21 + dev: true + /fs.realpath@1.0.0: resolution: {integrity: sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==} dev: true @@ -5994,6 +6419,31 @@ packages: path-is-absolute: 1.0.1 dev: true + /global-modules@1.0.0: + resolution: {integrity: sha512-sKzpEkf11GpOFuw0Zzjzmt4B4UZwjOcG757PPvrfhxcLFbq0wpsgpOqxpxtxFiCG4DtG93M6XRVbF2oGdev7bg==} + engines: {node: '>=0.10.0'} + dependencies: + global-prefix: 1.0.2 + is-windows: 1.0.2 + resolve-dir: 1.0.1 + dev: true + + /global-prefix@1.0.2: + resolution: {integrity: sha512-5lsx1NUDHtSjfg0eHlmYvZKv8/nVqX4ckFbM+FrGcQ+04KWcWFo9P5MxPZYSzUvyzmdTbI7Eix8Q4IbELDqzKg==} + engines: {node: '>=0.10.0'} + dependencies: + expand-tilde: 2.0.2 + homedir-polyfill: 1.0.3 + ini: 1.3.8 + is-windows: 1.0.2 + which: 1.3.1 + dev: true + + /globals@11.12.0: + resolution: {integrity: sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==} + engines: {node: '>=4'} + dev: true + /globals@13.24.0: resolution: {integrity: sha512-AhO5QUcj8llrbG09iWhPU2B204J1xnPeL8kQmVorSsy+Sjj1sk8gIyh6cUocGmH4L0UuhAJy+hJMRA4mgA4mFQ==} engines: {node: '>=8'} @@ -6074,6 +6524,13 @@ packages: dependencies: function-bind: 1.1.2 + /homedir-polyfill@1.0.3: + resolution: {integrity: sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA==} + engines: {node: '>=0.10.0'} + dependencies: + parse-passwd: 1.0.0 + dev: true + /hosted-git-info@2.8.9: resolution: {integrity: sha512-mxIDAb9Lsm6DoOJ7xH+5+X4y1LU/4Hi50L9C5sIswK3JzULS4bwk1FvjdBgvYR4bzT4tuUQiC15FE2f5HbLvYw==} dev: true @@ -6106,10 +6563,21 @@ packages: resolution: {integrity: sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg==} dev: false + /ieee754@1.2.1: + resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} + dev: true + /ignore-by-default@1.0.1: resolution: {integrity: sha512-Ius2VYcGNk7T90CppJqcIkS5ooHUZyIQK+ClZfMfMNFEF9VSE73Fq+906u/CWu92x4gzZMWOwfFYckPObzdEbA==} dev: true + /ignore-walk@6.0.5: + resolution: {integrity: sha512-VuuG0wCnjhnylG1ABXT3dAuIpTNDs/G8jlpmwXY03fXoXy/8ZK8/T+hMzt8L4WnrLCJgdybqgPagnF/f97cg3A==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + dependencies: + minimatch: 9.0.5 + dev: true + /ignore@5.3.0: resolution: {integrity: sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==} engines: {node: '>= 4'} @@ -6144,6 +6612,10 @@ packages: /inherits@2.0.4: resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} + /ini@1.3.8: + resolution: {integrity: sha512-JV/yugV2uzW5iMRSiZAyDtQd+nxtUnjeLt0acNdw98kKLrvuRVyB80tsREOE7yvGVgalhZ6RNXCmEHkUKBKxew==} + dev: true + /internal-slot@1.0.6: resolution: {integrity: sha512-Xj6dv+PsbtwyPpEflsejS+oIZxmMlV44zAhG479uYu89MsjcYOhCFnNyKrkJrihbsiasQyY0afoCl/9BLR65bg==} engines: {node: '>= 0.4'} @@ -6153,6 +6625,14 @@ packages: side-channel: 1.0.4 dev: true + /io-ts@2.2.22(fp-ts@2.5.0): + resolution: {integrity: sha512-FHCCztTkHoV9mdBsHpocLpdTAfh956ZQcIkWQxxS0U5HT53vtrcuYdQneEJKH6xILaLNzXVl2Cvwtoy8XNN0AA==} + peerDependencies: + fp-ts: ^2.5.0 + dependencies: + fp-ts: 2.5.0 + dev: true + /ipaddr.js@1.9.1: resolution: {integrity: sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g==} engines: {node: '>= 0.10'} @@ -6223,6 +6703,12 @@ packages: has-tostringtag: 1.0.0 dev: true + /is-docker@3.0.0: + resolution: {integrity: sha512-eljcgEDlEns/7AXFosB5K/2nCM4P7FQPkGc/DWLy5rmFEWvZayGrik1d9/QIY5nJ4f9YsVvBkA6kJpHn9rISdQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + hasBin: true + dev: true + /is-extglob@2.1.1: resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} engines: {node: '>=0.10.0'} @@ -6251,6 +6737,19 @@ packages: is-extglob: 2.1.1 dev: true + /is-inside-container@1.0.0: + resolution: {integrity: sha512-KIYLCCJghfHZxqjYBE7rEy0OBuTd5xCHS7tHVgvCLkx7StIoaxwNW3hCALgEUjFfeRk+MG/Qxmp/vtETEF3tRA==} + engines: {node: '>=14.16'} + hasBin: true + dependencies: + is-docker: 3.0.0 + dev: true + + /is-interactive@2.0.0: + resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} + engines: {node: '>=12'} + dev: true + /is-map@2.0.2: resolution: {integrity: sha512-cOZFQQozTha1f4MxLFzlgKYPTyj26picdZTx82hbc/Xf4K/tZOOXSCkMvU4pKioRXGDLJRn0GM7Upe7kR721yg==} dev: true @@ -6325,6 +6824,11 @@ packages: resolution: {integrity: sha512-cyA56iCMHAh5CdzjJIa4aohJyeO1YbwLi3Jc35MmRU6poroFjIGZzUzupGiRPOjgHg9TLu43xbpwXk523fMxKA==} dev: false + /is-unicode-supported@1.3.0: + resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} + engines: {node: '>=12'} + dev: true + /is-weakmap@2.0.1: resolution: {integrity: sha512-NSBR4kH5oVj1Uwvv970ruUkCV7O1mzgVFO4/rev2cLRda9Tm9HrL70ZPut4rOHgY0FNrUu9BCbXA2sdQ+x0chA==} dev: true @@ -6342,6 +6846,18 @@ packages: get-intrinsic: 1.2.2 dev: true + /is-windows@1.0.2: + resolution: {integrity: sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA==} + engines: {node: '>=0.10.0'} + dev: true + + /is-wsl@3.1.0: + resolution: {integrity: sha512-UcVfVfaK4Sc4m7X3dUSoHoozQGBEFeDC+zVo06t98xe8CzHSZZBekNXH+tu0NalHolcJ/QAGqS46Hef7QXBIMw==} + engines: {node: '>=16'} + dependencies: + is-inside-container: 1.0.0 + dev: true + /isarray@1.0.0: resolution: {integrity: sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ==} dev: false @@ -6409,7 +6925,6 @@ packages: dependencies: argparse: 1.0.10 esprima: 4.0.1 - dev: false /js-yaml@4.1.0: resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} @@ -6417,6 +6932,12 @@ packages: dependencies: argparse: 2.0.1 + /jsesc@3.1.0: + resolution: {integrity: sha512-/sM3dO2FOzXjKQhJuo0Q173wf2KOo8t4I8vHy6lF9poUp7bKT0/NHE8fPX23PwfhnykfqnC2xRxOnVw5XuGIaA==} + engines: {node: '>=6'} + hasBin: true + dev: true + /json-buffer@3.0.1: resolution: {integrity: sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ==} dev: true @@ -6425,6 +6946,10 @@ packages: resolution: {integrity: sha512-mrqyZKfX5EhL7hvqcV6WG1yYjnjeuYDzDhhcAAUrq8Po85NBQBJP+ZDUT75qZQ98IkUoBqdkExkukOU7Ts2wrw==} dev: true + /json-parse-even-better-errors@2.3.1: + resolution: {integrity: sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==} + dev: true + /json-schema-traverse@0.4.1: resolution: {integrity: sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==} dev: true @@ -6444,6 +6969,12 @@ packages: minimist: 1.2.8 dev: true + /json5@2.2.3: + resolution: {integrity: sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==} + engines: {node: '>=6'} + hasBin: true + dev: true + /jsondiffpatch@0.6.0: resolution: {integrity: sha512-3QItJOXp2AP1uv7waBkao5nCvhEv+QmJAd38Ybq7wNI74Q+BBmnLn4EDKz6yI9xGAIQoUF87qHt+kc1IVxB4zQ==} engines: {node: ^18.0.0 || >=20.0.0} @@ -6501,6 +7032,11 @@ packages: json-buffer: 3.0.1 dev: true + /kleur@4.1.5: + resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} + engines: {node: '>=6'} + dev: true + /language-subtag-registry@0.3.22: resolution: {integrity: sha512-tN0MCzyWnoz/4nHS6uxdlFWoUZT7ABptwKPQ52Ea7URk6vll88bWBVhodtnlfEuCcKWNGoc+uGbw1cwa9IKh/w==} dev: true @@ -6560,6 +7096,10 @@ packages: p-locate: 5.0.0 dev: true + /lodash-es@4.17.21: + resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} + dev: true + /lodash.includes@4.3.0: resolution: {integrity: sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==} dev: false @@ -6568,6 +7108,11 @@ packages: resolution: {integrity: sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==} dev: false + /lodash.isequal@4.5.0: + resolution: {integrity: sha512-pDo3lu8Jhfjqls6GkMgpahsF9kCyayhgykjyLMNFTKWrpVdAQtYyB4muAMWozBB4ig/dtWAmsMxLEI8wuz+DYQ==} + deprecated: This package is deprecated. Use require('node:util').isDeepStrictEqual instead. + dev: true + /lodash.isinteger@4.0.4: resolution: {integrity: sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==} dev: false @@ -6595,6 +7140,22 @@ packages: resolution: {integrity: sha512-HDWXG8isMntAyRF5vZ7xKuEvOhT4AhlRt/3czTSjvGUxjYCBVRQY48ViDHyfYz9VIoBkW4TMGQNapx+l3RUwdA==} dev: true + /lodash.uniqwith@4.5.0: + resolution: {integrity: sha512-7lYL8bLopMoy4CTICbxygAUq6CdRJ36vFc80DucPueUee+d5NBRxz3FdT9Pes/HEx5mPoT9jwnsEJWz1N7uq7Q==} + dev: true + + /lodash@4.17.21: + resolution: {integrity: sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==} + dev: true + + /log-symbols@5.1.0: + resolution: {integrity: sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==} + engines: {node: '>=12'} + dependencies: + chalk: 5.3.0 + is-unicode-supported: 1.3.0 + dev: true + /loose-envify@1.4.0: resolution: {integrity: sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q==} hasBin: true @@ -6627,6 +7188,13 @@ packages: dependencies: '@jridgewell/sourcemap-codec': 1.5.0 + /matchit@1.1.0: + resolution: {integrity: sha512-+nGYoOlfHmxe5BW5tE0EMJppXEwdSf8uBA1GTZC7Q77kbT35+VKLYJMzVNWCHSsga1ps1tPYFtFyvxvKzWVmMA==} + engines: {node: '>=6'} + dependencies: + '@arr/every': 1.0.1 + dev: true + /mdn-data@2.0.30: resolution: {integrity: sha512-GaqWWShW4kv/G9IEucWScBx9G1/vsFZZJUO+tD26M8J8z3Kw5RDQjaoZe03YAClgeS/SWPOcb4nkFBTEi5DUEA==} dev: false @@ -6650,6 +7218,11 @@ packages: engines: {node: '>= 8'} dev: true + /meriyah@4.5.0: + resolution: {integrity: sha512-Rbiu0QPIxTXgOXwiIpRVJfZRQ2FWyfzYrOGBs9SN5RbaXg1CN5ELn/plodwWwluX93yzc4qO/bNIen1ThGFCxw==} + engines: {node: '>=10.4.0'} + dev: true + /methods@1.1.2: resolution: {integrity: sha512-iclAHeNqNm68zFtnZ0e+1L2yUIdvzNoauKU4WBA3VvH/vPFieF7qfRlwUZU+DA9P9bPXIS90ulxoUoCH23sV2w==} engines: {node: '>= 0.6'} @@ -6674,7 +7247,6 @@ packages: /mime-db@1.52.0: resolution: {integrity: sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==} engines: {node: '>= 0.6'} - dev: false /mime-types@2.1.35: resolution: {integrity: sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==} @@ -6695,6 +7267,11 @@ packages: hasBin: true dev: true + /mimic-fn@2.1.0: + resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} + engines: {node: '>=6'} + dev: true + /miniflare@3.20250129.0: resolution: {integrity: sha512-qYlGEjMl/2kJdgNaztj4hpA64d6Dl79Lx/NL61p/v5XZRiWanBOTgkQqdPxCKZOj6KQnioqhC7lfd6jDXKSs2A==} engines: {node: '>=16.13'} @@ -6722,6 +7299,13 @@ packages: brace-expansion: 1.1.11 dev: true + /minimatch@7.4.6: + resolution: {integrity: sha512-sBz8G/YjVniEz6lKPNpKxXwazJe4c19fEfV2GDMX6AjFz+MX9uDWIZW8XreVhkFW3fkIdTv/gxWr/Kks5FFAVw==} + engines: {node: '>=10'} + dependencies: + brace-expansion: 2.0.1 + dev: true + /minimatch@9.0.4: resolution: {integrity: sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==} engines: {node: '>=16 || 14 >=14.17'} @@ -6753,9 +7337,13 @@ packages: ufo: 1.5.4 dev: true + /mrmime@2.0.1: + resolution: {integrity: sha512-Y3wQdFg2Va6etvQ5I82yUhGdsKrcYox6p7FfL1LbK2J4V01F9TGlepTIhnK24t7koZibmg82KGglhA1XK5IsLQ==} + engines: {node: '>=10'} + dev: true + /ms@2.0.0: resolution: {integrity: sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==} - dev: false /ms@2.1.2: resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} @@ -6764,6 +7352,17 @@ packages: /ms@2.1.3: resolution: {integrity: sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==} + /multimatch@5.0.0: + resolution: {integrity: sha512-ypMKuglUrZUD99Tk2bUQ+xNQj43lPEfAeX2o9cTteAmShXy2VHDJpuwu1o0xqoKCt9jLVAvwyFKdLTPXKAfJyA==} + engines: {node: '>=10'} + dependencies: + '@types/minimatch': 3.0.5 + array-differ: 3.0.0 + array-union: 2.1.0 + arrify: 2.0.1 + minimatch: 3.1.2 + dev: true + /mustache@4.2.0: resolution: {integrity: sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==} hasBin: true @@ -6780,7 +7379,6 @@ packages: resolution: {integrity: sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==} engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} hasBin: true - dev: false /nanoid@3.3.6: resolution: {integrity: sha512-BGcqMMJuToF7i1rt+2PWSNVnWIkGCU78jBG3RxO/bZlnZPK2Cmi2QaffxGO/2RvWi9sL+FAiRiXMgsyxQ1DIDA==} @@ -6807,6 +7405,11 @@ packages: engines: {node: '>= 0.6'} dev: false + /negotiator@0.6.4: + resolution: {integrity: sha512-myRT3DiWPHqho5PrJaIRyaMv2kgYf0mUVgBNOYMuCH5Ki1yEiQaf/ZJuQ62nvpc44wL5WDbTX7yGJi1Neevw8w==} + engines: {node: '>= 0.6'} + dev: true + /next-tick@1.1.0: resolution: {integrity: sha512-CXdUiJembsNjuToQvxayPZF9Vqht7hewsvy2sOWafLvi2awflj9mOC6bHIg50orX8IJvWKY9wYQ/zB2kogPslQ==} dev: false @@ -6857,6 +7460,10 @@ packages: resolution: {integrity: sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==} dev: true + /node-addon-api@7.1.1: + resolution: {integrity: sha512-5m3bsyrjFWE1xf7nz7YXdN4udnVtXK6/Yfgn5qnahL6bCkf2yKt4k3nuTKAtT4r3IG8JNR2ncsIMdZuAzJjHQQ==} + dev: true + /node-domexception@1.0.0: resolution: {integrity: sha512-/jKZoMpw0F8GRwl4/eLROPA3cfcXtLApP0QzLmUT/HuPCZWyB7IY9ZrMeKw2O/nFIqPQB3PVM9aYm0F312AXDQ==} engines: {node: '>=10.5.0'} @@ -7023,12 +7630,24 @@ packages: ee-first: 1.1.1 dev: false + /on-headers@1.0.2: + resolution: {integrity: sha512-pZAE+FJLoyITytdqK0U5s+FIpjN0JP3OzFi/u8Rx+EV5/W+JTWGXG8xFzevE7AjBfDqHv/8vL8qQsIhHnqRkrA==} + engines: {node: '>= 0.8'} + dev: true + /once@1.4.0: resolution: {integrity: sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==} dependencies: wrappy: 1.0.2 dev: true + /onetime@5.1.2: + resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} + engines: {node: '>=6'} + dependencies: + mimic-fn: 2.1.0 + dev: true + /openai@4.51.0: resolution: {integrity: sha512-UKuWc3/qQyklqhHM8CbdXCv0Z0obap6T0ECdcO5oATQxAbKE5Ky3YCXFQY207z+eGG6ez4U9wvAcuMygxhmStg==} hasBin: true @@ -7099,6 +7718,21 @@ packages: word-wrap: 1.2.5 dev: true + /ora@6.3.1: + resolution: {integrity: sha512-ERAyNnZOfqM+Ao3RAvIXkYh5joP220yf59gVe2X/cI6SiCxIdi4c9HZKZD8R6q/RDXEje1THBju6iExiSsgJaQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + chalk: 5.3.0 + cli-cursor: 4.0.0 + cli-spinners: 2.9.2 + is-interactive: 2.0.0 + is-unicode-supported: 1.3.0 + log-symbols: 5.1.0 + stdin-discarder: 0.1.0 + strip-ansi: 7.1.0 + wcwidth: 1.0.1 + dev: true + /p-limit@3.1.0: resolution: {integrity: sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ==} engines: {node: '>=10'} @@ -7120,6 +7754,11 @@ packages: callsites: 3.1.0 dev: true + /parse-gitignore@2.0.0: + resolution: {integrity: sha512-RmVuCHWsfu0QPNW+mraxh/xjQVw/lhUCUru8Zni3Ctq3AoMhpDTq0OVdKS6iesd6Kqb7viCV3isAL43dciOSog==} + engines: {node: '>=14'} + dev: true + /parse-json@4.0.0: resolution: {integrity: sha512-aOIos8bujGN93/8Ox/jPLh7RwVnPEysynVFE+fQZyg6jKELEHwzgKdLRFHUgXJL6kylijVSBC4BvN9OmsB48Rw==} engines: {node: '>=4'} @@ -7128,6 +7767,21 @@ packages: json-parse-better-errors: 1.0.2 dev: true + /parse-json@5.2.0: + resolution: {integrity: sha512-ayCKvm/phCGxOkYRSCM82iDwct8/EonSEgCSxWxD7ve6jHggsFl4fZVQBPRNgQoKiuV/odhFrGzQXZwbifC8Rg==} + engines: {node: '>=8'} + dependencies: + '@babel/code-frame': 7.27.1 + error-ex: 1.3.2 + json-parse-even-better-errors: 2.3.1 + lines-and-columns: 1.2.4 + dev: true + + /parse-passwd@1.0.0: + resolution: {integrity: sha512-1Y1A//QUXEZK7YKz+rD9WydcE1+EuPr6ZBgKecAB8tmoW6UFv0NREVJe1p+jRxtThkcbbKkfwIbWJe/IeE6m2Q==} + engines: {node: '>=0.10.0'} + dev: true + /parseurl@1.3.3: resolution: {integrity: sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ==} engines: {node: '>= 0.8'} @@ -7251,11 +7905,24 @@ packages: pathe: 2.0.2 dev: true + /please-upgrade-node@3.2.0: + resolution: {integrity: sha512-gQR3WpIgNIKwBMVLkpMUeR3e1/E1y42bqDQZfql+kDeXd8COYfM8PQA4X6y7a8u9Ua9FHmsrrmirW2vHs45hWg==} + dependencies: + semver-compare: 1.0.0 + dev: true + /pluralize@8.0.0: resolution: {integrity: sha512-Nc3IT5yHzflTfbjgqWcCPpo7DaKy4FnpB0l/zCAW0Tc7jxAiuqSxHasntB3D7887LSrA93kDJ9IXovxJYxyLCA==} engines: {node: '>=4'} dev: false + /polka@0.5.2: + resolution: {integrity: sha512-FVg3vDmCqP80tOrs+OeNlgXYmFppTXdjD5E7I4ET1NjvtNmQrb1/mJibybKkb/d4NA7YWAr1ojxuhpL3FHqdlw==} + dependencies: + '@polka/url': 0.5.0 + trouter: 2.0.1 + dev: true + /postcss-import@14.1.0(postcss@8.4.38): resolution: {integrity: sha512-flwI+Vgm4SElObFVPpTIT7SU7R3qk2L7PyduMcokiaVKuWv9d/U+Gm/QAd8NDLuykTWTkcrjOeD2Pp1rMeBTGw==} engines: {node: '>=10.0.0'} @@ -7372,7 +8039,6 @@ packages: nanoid: 3.3.11 picocolors: 1.1.1 source-map-js: 1.2.1 - dev: false /prelude-ls@1.2.1: resolution: {integrity: sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g==} @@ -7489,6 +8155,15 @@ packages: path-type: 3.0.0 dev: true + /readable-stream@3.6.2: + resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} + engines: {node: '>= 6'} + dependencies: + inherits: 2.0.4 + string_decoder: 1.3.0 + util-deprecate: 1.0.2 + dev: true + /readdirp@3.6.0: resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} engines: {node: '>=8.10.0'} @@ -7542,6 +8217,18 @@ packages: engines: {node: '>=0.10.0'} dev: true + /require-package-name@2.0.1: + resolution: {integrity: sha512-uuoJ1hU/k6M0779t3VMVIYpb2VMJk05cehCaABFhXaibcbvfgR8wKiozLjVFSzJPmQMRqIcO0HMyTFqfV09V6Q==} + dev: true + + /resolve-dir@1.0.1: + resolution: {integrity: sha512-R7uiTjECzvOsWSfdM0QKFNBVFcK27aHOUwdvK53BcW8zqnGdYp0Fbj82cy54+2A4P2tFM22J5kRfe1R+lM/1yg==} + engines: {node: '>=0.10.0'} + dependencies: + expand-tilde: 2.0.2 + global-modules: 1.0.0 + dev: true + /resolve-from@4.0.0: resolution: {integrity: sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==} engines: {node: '>=4'} @@ -7574,6 +8261,14 @@ packages: supports-preserve-symlinks-flag: 1.0.0 dev: true + /restore-cursor@4.0.0: + resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + onetime: 5.1.2 + signal-exit: 3.0.7 + dev: true + /reusify@1.0.4: resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} @@ -7681,7 +8376,6 @@ packages: /safe-buffer@5.2.1: resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - dev: false /safe-regex-test@1.0.0: resolution: {integrity: sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA==} @@ -7709,6 +8403,10 @@ packages: resolution: {integrity: sha512-6aU+Rwsezw7VR8/nyvKTx8QpWH9FrcYiXXlqC4z5d5XQBDRqtbfsRjnwGyqbi3gddNtWHuEk9OANUotL26qKUw==} dev: false + /semver-compare@1.0.0: + resolution: {integrity: sha512-YM3/ITh2MJ5MtzaM429anh+x2jiLVjqILF4m4oyQB18W7Ggea7BfqdH/wGMK7dDiMghv/6WG7znWMwUDzJiXow==} + dev: true + /semver@5.7.2: resolution: {integrity: sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==} hasBin: true @@ -7846,6 +8544,10 @@ packages: resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} dev: true + /signal-exit@3.0.7: + resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} + dev: true + /signal-exit@4.1.0: resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} engines: {node: '>=14'} @@ -7868,6 +8570,53 @@ packages: semver: 7.5.4 dev: true + /sirv@2.0.4: + resolution: {integrity: sha512-94Bdh3cC2PKrbgSOUqTiGPWVZeSiXfKOVZNJniWoqrWrRkB1CJzBU3NEbiTsPcYy1lDsANA/THzS+9WBiy5nfQ==} + engines: {node: '>= 10'} + dependencies: + '@polka/url': 1.0.0-next.29 + mrmime: 2.0.1 + totalist: 3.0.1 + dev: true + + /skott-webapp@2.3.0: + resolution: {integrity: sha512-nmt+ilxGOqX5zN2WDKv1Y5gLfxy/lceHgbB8HM/ym/Cm8572ypD1s2S+pcN+jOw13xqoavHJPonX1WT2QvkpDg==} + dependencies: + digraph-js: 2.2.3 + dev: true + + /skott@0.35.4: + resolution: {integrity: sha512-z6Ww+Z+TdLO1Z1HXiW9iltburLEWkapxk2MkI+8UQGsJ7d1HiO0dj7ZI0Q/kV/nwbOaOZyRUgMO7hj1/d8h8hw==} + hasBin: true + dependencies: + '@parcel/watcher': 2.5.1 + '@typescript-eslint/typescript-estree': 7.13.1(typescript@5.4.5) + commander: 11.1.0 + compression: 1.8.0 + depcheck: 1.4.7 + digraph-js: 2.2.3 + effect: 3.3.2 + estree-walker: 3.0.3 + fp-ts: 2.5.0 + fs-tree-structure: 0.0.5 + ignore-walk: 6.0.5 + io-ts: 2.2.22(fp-ts@2.5.0) + is-wsl: 3.1.0 + json5: 2.2.3 + kleur: 4.1.5 + lodash-es: 4.17.21 + meriyah: 4.5.0 + minimatch: 9.0.5 + ora: 6.3.1 + parse-gitignore: 2.0.0 + polka: 0.5.2 + sirv: 2.0.4 + skott-webapp: 2.3.0 + typescript: 5.4.5 + transitivePeerDependencies: + - supports-color + dev: true + /slash@3.0.0: resolution: {integrity: sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==} engines: {node: '>=8'} @@ -7952,7 +8701,6 @@ packages: /sprintf-js@1.0.3: resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - dev: false /sswr@2.0.0(svelte@4.2.19): resolution: {integrity: sha512-mV0kkeBHcjcb0M5NqKtKVg/uTIYNlIIniyDfSGrSfxpEdM9C365jK0z55pl9K0xAkNTJi2OAOVFQpgMPUk+V0w==} @@ -7992,6 +8740,13 @@ packages: resolution: {integrity: sha512-vj5lIj3Mwf9D79hBkltk5qmkFI+biIKWS2IBxEyEU3AX1tUf7AoL8nSazCOiiqQsGKIq01SClsKEzweu34uwvA==} dev: true + /stdin-discarder@0.1.0: + resolution: {integrity: sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==} + engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + dependencies: + bl: 5.1.0 + dev: true + /stoppable@1.1.0: resolution: {integrity: sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==} engines: {node: '>=4', npm: '>=6'} @@ -8067,6 +8822,12 @@ packages: es-abstract: 1.22.3 dev: true + /string_decoder@1.3.0: + resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + dependencies: + safe-buffer: 5.2.1 + dev: true + /strip-ansi@6.0.1: resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} engines: {node: '>=8'} @@ -8328,6 +9089,11 @@ packages: engines: {node: '>=0.6'} dev: false + /totalist@3.0.1: + resolution: {integrity: sha512-sf4i37nQ2LBx4m3wB74y+ubopq6W/dIzXg0FDGjsYnZHVa1Da8FH853wlL2gtUhg+xJXjfk3kUZS3BRoQeoQBQ==} + engines: {node: '>=6'} + dev: true + /touch@3.1.0: resolution: {integrity: sha512-WBx8Uy5TLtOSRtIq+M03/sKDrXCLHxwDcquSP2c43Le03/9serjQBIztjRz6FkJez9D/hleyAXTBGLwwZUw9lA==} hasBin: true @@ -8350,6 +9116,13 @@ packages: hasBin: true dev: true + /trouter@2.0.1: + resolution: {integrity: sha512-kr8SKKw94OI+xTGOkfsvwZQ8mWoikZDd2n8XZHjJVZUARZT+4/VV6cacRS6CLsH9bNm+HFIPU1Zx4CnNnb4qlQ==} + engines: {node: '>=6'} + dependencies: + matchit: 1.1.0 + dev: true + /ts-api-utils@1.0.3(typescript@4.7.4): resolution: {integrity: sha512-wNMeqtMz5NtwpT/UZGY5alT+VoKdSsOOP/kqHFcUW1P/VRhH2wJ48+DN2WwUliNbQ976ETwDL0Ifd2VVvgonvg==} engines: {node: '>=16.13.0'} @@ -8359,6 +9132,15 @@ packages: typescript: 4.7.4 dev: true + /ts-api-utils@1.4.3(typescript@5.4.5): + resolution: {integrity: sha512-i3eMG77UTMD0hZhgRS562pv83RC6ukSAC2GMNWc+9dieh/+jDM5u5YG+NHX6VNDRHQcHwmsTHctP9LhbC3WxVw==} + engines: {node: '>=16'} + peerDependencies: + typescript: '>=4.2.0' + dependencies: + typescript: 5.4.5 + dev: true + /ts-api-utils@2.0.0(typescript@5.5.4): resolution: {integrity: sha512-xCt/TOAc+EOHS1XPnijD3/yzpH6qg2xppZO1YDqGoVsNXfQfzHpOdNuXwrwOU8u4ITXJyDCTyt8w5g1sZv9ynQ==} engines: {node: '>=18.12'} @@ -8381,17 +9163,6 @@ packages: resolution: {integrity: sha512-Y/arvbn+rrz3JCKl9C4kVNfTfSm2/mEp5FSz5EsZSANGPSlQrpRI5M4PKF+mJnE52jOO90PnPSc3Ur3bTQw0gA==} dev: true - /tsconfck@3.1.4: - resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} - engines: {node: ^18 || >=20} - hasBin: true - peerDependencies: - typescript: ^5.0.0 - peerDependenciesMeta: - typescript: - optional: true - dev: true - /tsconfck@3.1.4(typescript@5.5.4): resolution: {integrity: sha512-kdqWFGVJqe+KGYvlSO9NIaWn9jT1Ny4oKVzAJsKii5eoE9snzTJzL4+MMVOMn+fikWGFmKEylcXL710V/kIPJQ==} engines: {node: ^18 || >=20} @@ -8707,6 +9478,12 @@ packages: engines: {node: '>=14.17'} hasBin: true + /typescript@5.4.5: + resolution: {integrity: sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==} + engines: {node: '>=14.17'} + hasBin: true + dev: true + /typescript@5.5.4: resolution: {integrity: sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==} engines: {node: '>=14.17'} @@ -8833,29 +9610,6 @@ packages: /vary@1.1.2: resolution: {integrity: sha512-BNGbWLfd0eUPabhkXUVm0j8uuvREyTh5ovRa/dyow/BqAbZJyC+5fU+IzQOzmAKzYqYRAISoRhdQr3eIZ/PXqg==} engines: {node: '>= 0.8'} - dev: false - - /vite-node@2.1.9: - resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - dependencies: - cac: 6.7.14 - debug: 4.4.0 - es-module-lexer: 1.6.0 - pathe: 1.1.2 - vite: 5.4.10 - transitivePeerDependencies: - - '@types/node' - - less - - lightningcss - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - dev: true /vite-node@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-AM9aQ/IPrW/6ENLQg3AGY4K1N2TGZdR5e4gu/MmmR2xR3Ll1+dib+nook92g4TV3PXVyeyxdWwtaCAiUL0hMxA==} @@ -8879,22 +9633,6 @@ packages: - terser dev: true - /vite-tsconfig-paths@4.3.2: - resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} - peerDependencies: - vite: '*' - peerDependenciesMeta: - vite: - optional: true - dependencies: - debug: 4.3.7 - globrex: 0.1.2 - tsconfck: 3.1.4 - transitivePeerDependencies: - - supports-color - - typescript - dev: true - /vite-tsconfig-paths@4.3.2(typescript@5.5.4): resolution: {integrity: sha512-0Vd/a6po6Q+86rPlntHye7F31zA2URZMbH8M3saAZ/xR9QoGN/L21bxEGfXdWmFdNkqPpRdxFT7nmNe12e9/uA==} peerDependencies: @@ -8911,44 +9649,6 @@ packages: - typescript dev: true - /vite@5.4.10: - resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 - less: '*' - lightningcss: ^1.21.0 - sass: '*' - sass-embedded: '*' - stylus: '*' - sugarss: '*' - terser: ^5.4.0 - peerDependenciesMeta: - '@types/node': - optional: true - less: - optional: true - lightningcss: - optional: true - sass: - optional: true - sass-embedded: - optional: true - stylus: - optional: true - sugarss: - optional: true - terser: - optional: true - dependencies: - esbuild: 0.21.5 - postcss: 8.5.1 - rollup: 4.24.0 - optionalDependencies: - fsevents: 2.3.3 - dev: true - /vite@5.4.10(@types/node@20.10.5): resolution: {integrity: sha512-1hvaPshuPUtxeQ0hsVH3Mud0ZanOLwVTneA1EgbAM5LhaZEqyPWGRQ7BtaMvUrTDeEaC8pxtj6a6jku3x4z6SQ==} engines: {node: ^18.0.0 || >=20.0.0} @@ -8988,63 +9688,6 @@ packages: fsevents: 2.3.3 dev: true - /vitest@2.1.9: - resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@edge-runtime/vm': '*' - '@types/node': ^18.0.0 || >=20.0.0 - '@vitest/browser': 2.1.9 - '@vitest/ui': 2.1.9 - happy-dom: '*' - jsdom: '*' - peerDependenciesMeta: - '@edge-runtime/vm': - optional: true - '@types/node': - optional: true - '@vitest/browser': - optional: true - '@vitest/ui': - optional: true - happy-dom: - optional: true - jsdom: - optional: true - dependencies: - '@vitest/expect': 2.1.9 - '@vitest/mocker': 2.1.9(vite@5.4.10) - '@vitest/pretty-format': 2.1.9 - '@vitest/runner': 2.1.9 - '@vitest/snapshot': 2.1.9 - '@vitest/spy': 2.1.9 - '@vitest/utils': 2.1.9 - chai: 5.1.2 - debug: 4.4.0 - expect-type: 1.2.0 - magic-string: 0.30.17 - pathe: 1.1.2 - std-env: 3.8.1 - tinybench: 2.9.0 - tinyexec: 0.3.1 - tinypool: 1.0.1 - tinyrainbow: 1.2.0 - vite: 5.4.10 - vite-node: 2.1.9 - why-is-node-running: 2.3.0 - transitivePeerDependencies: - - less - - lightningcss - - msw - - sass - - sass-embedded - - stylus - - sugarss - - supports-color - - terser - dev: true - /vitest@2.1.9(@types/node@20.10.5): resolution: {integrity: sha512-MSmPM9REYqDGBI8439mA4mWhV5sKmDlBKWIYbA3lRb2PTHACE0mgKwA8yQ2xq9vxDTuk4iPrECBAEW2aoFXY0Q==} engines: {node: ^18.0.0 || >=20.0.0} @@ -9143,6 +9786,12 @@ packages: graceful-fs: 4.2.11 dev: false + /wcwidth@1.0.1: + resolution: {integrity: sha512-XHPEwS0q6TaxcvG85+8EYkbiCux2XtWG2mkc47Ng2A77BQu9+DqIOJldST4HgPkuea7dvKSj5VgX3P1d4rW8Tg==} + dependencies: + defaults: 1.0.4 + dev: true + /web-streams-polyfill@3.2.1: resolution: {integrity: sha512-e0MO3wdXWKrLbL0DgGnUV7WHVuw9OUvL4hjgnPkIeEvESk74gAITi5G606JtZPp39cd8HA9VQzCIvA49LpPN5Q==} engines: {node: '>= 8'} @@ -9386,11 +10035,29 @@ packages: hasBin: true dev: false + /yargs-parser@20.2.9: + resolution: {integrity: sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==} + engines: {node: '>=10'} + dev: true + /yargs-parser@21.1.1: resolution: {integrity: sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw==} engines: {node: '>=12'} dev: true + /yargs@16.2.0: + resolution: {integrity: sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==} + engines: {node: '>=10'} + dependencies: + cliui: 7.0.4 + escalade: 3.1.1 + get-caller-file: 2.0.5 + require-directory: 2.1.1 + string-width: 4.2.3 + y18n: 5.0.8 + yargs-parser: 20.2.9 + dev: true + /yargs@17.7.2: resolution: {integrity: sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==} engines: {node: '>=12'} From 7438e828a5bd594d735671b5ce4a30c4e2b6c016 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Sun, 25 May 2025 21:24:10 -0500 Subject: [PATCH 48/51] fix more circ dependencies --- packages/proxy/tsconfig.json | 1 + packages/proxy/utils/audioEncoder.ts | 2 +- packages/proxy/utils/tempCredentials.ts | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/proxy/tsconfig.json b/packages/proxy/tsconfig.json index b6e73a06..b739d1e7 100644 --- a/packages/proxy/tsconfig.json +++ b/packages/proxy/tsconfig.json @@ -9,6 +9,7 @@ "paths": { "@lib/*": ["src/*"], "@schema": ["schema/index"], + "@schema/*": ["schema/*"], "@types": ["types/index"] }, "resolveJsonModule": true, diff --git a/packages/proxy/utils/audioEncoder.ts b/packages/proxy/utils/audioEncoder.ts index 87b758a4..9a3663ce 100644 --- a/packages/proxy/utils/audioEncoder.ts +++ b/packages/proxy/utils/audioEncoder.ts @@ -1,4 +1,4 @@ -import { Mp3Bitrate, PcmAudioFormat } from "@schema"; +import { Mp3Bitrate, PcmAudioFormat } from "@schema/audio"; import { Mp3Encoder } from "@breezystack/lamejs"; export function makeWavFile( diff --git a/packages/proxy/utils/tempCredentials.ts b/packages/proxy/utils/tempCredentials.ts index 54c33114..5f854ac1 100644 --- a/packages/proxy/utils/tempCredentials.ts +++ b/packages/proxy/utils/tempCredentials.ts @@ -5,7 +5,7 @@ import { tempCredentialJwtPayloadSchema, TempCredentialsCacheValue, tempCredentialsCacheValueSchema, -} from "../schema"; +} from "@schema/secrets"; import { v4 as uuidv4 } from "uuid"; import { arrayBufferToBase64 } from "./encrypt"; import jsonwebtoken from "jsonwebtoken"; From c95c56aad791bba1055266afdc53424d8a591f7e Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Tue, 27 May 2025 16:42:18 -0500 Subject: [PATCH 49/51] update bedrock/vertex ai opus/sonnet 4 to have reasoning --- packages/proxy/schema/model_list.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/packages/proxy/schema/model_list.json b/packages/proxy/schema/model_list.json index 8972e2ec..d6437b39 100644 --- a/packages/proxy/schema/model_list.json +++ b/packages/proxy/schema/model_list.json @@ -2156,6 +2156,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "input_cache_read_cost_per_mil_tokens": 0.3, @@ -2166,6 +2168,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "input_cache_read_cost_per_mil_tokens": 0.3, @@ -2288,6 +2292,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "displayName": "Claude 4 Opus" @@ -2296,6 +2302,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "displayName": "US Claude 4 Opus", @@ -2697,6 +2705,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "displayName": "Claude 4 Sonnet" @@ -2705,6 +2715,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "experimental": true, @@ -2783,6 +2795,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "displayName": "Claude 4 Opus" @@ -2791,6 +2805,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "parent": "publishers/anthropic/models/claude-opus-4" From f9cd4eb7c334ecf981664fc3e4e798ab5b8d8e66 Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Thu, 29 May 2025 20:07:41 -0500 Subject: [PATCH 50/51] add reasoning enabled and budget for bedrock/vertex claude providers --- packages/proxy/schema/model_list.json | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/packages/proxy/schema/model_list.json b/packages/proxy/schema/model_list.json index 8972e2ec..d6437b39 100644 --- a/packages/proxy/schema/model_list.json +++ b/packages/proxy/schema/model_list.json @@ -2156,6 +2156,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "input_cache_read_cost_per_mil_tokens": 0.3, @@ -2166,6 +2168,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "input_cache_read_cost_per_mil_tokens": 0.3, @@ -2288,6 +2292,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "displayName": "Claude 4 Opus" @@ -2296,6 +2302,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "displayName": "US Claude 4 Opus", @@ -2697,6 +2705,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "displayName": "Claude 4 Sonnet" @@ -2705,6 +2715,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 3, "output_cost_per_mil_tokens": 15, "experimental": true, @@ -2783,6 +2795,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "displayName": "Claude 4 Opus" @@ -2791,6 +2805,8 @@ "format": "anthropic", "flavor": "chat", "multimodal": true, + "reasoning": true, + "reasoning_budget": true, "input_cost_per_mil_tokens": 15, "output_cost_per_mil_tokens": 75, "parent": "publishers/anthropic/models/claude-opus-4" From b01da0987657d551f201ee1620dd83058f2daa5c Mon Sep 17 00:00:00 2001 From: Olmo Maldonado Date: Fri, 30 May 2025 14:32:24 -0500 Subject: [PATCH 51/51] add delta override with new reasoning chunk --- packages/proxy/types/openai.ts | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/packages/proxy/types/openai.ts b/packages/proxy/types/openai.ts index 1473de45..1dccbcf6 100644 --- a/packages/proxy/types/openai.ts +++ b/packages/proxy/types/openai.ts @@ -73,6 +73,13 @@ declare module "openai/resources/chat/completions" { reasoning?: OpenAIReasoning[]; } } + namespace ChatCompletionChunk { + namespace Choice { + interface Delta { + reasoning?: OpenAIReasoning; + } + } + } } export const completionUsageSchema = z.object({