Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
18 commits
Select commit Hold shift + click to select a range
4c9cfdd
feat: prioritize explicit provider config over model registry lookup
Jun 12, 2025
c8fc2be
fix: improve JSON parsing robustness in OpenAI tool arguments
Jun 12, 2025
0866741
debug: add complete toolCallData logging for better error diagnosis
Jun 12, 2025
7515774
fix: handle concatenated JSON objects in OpenAI tool arguments
Jun 12, 2025
d8f9d97
fix: handle Claude-style concatenated JSON in OpenAI tool arguments a…
Jun 12, 2025
80f3246
fix: make Claude-style JSON concatenation handling primary path inste…
Jun 12, 2025
7d4b1a1
fix: handle duplicate keys in concatenated JSON by converting to arrays
Jun 12, 2025
9c9af1e
Revert "fix: handle duplicate keys in concatenated JSON by converting…
Jun 13, 2025
6323502
Revert "fix: make Claude-style JSON concatenation handling primary pa…
Jun 13, 2025
66d7351
Revert "fix: handle Claude-style concatenated JSON in OpenAI tool arg…
Jun 13, 2025
0cf2760
Revert "fix: handle concatenated JSON objects in OpenAI tool arguments"
Jun 13, 2025
03f037a
Revert "debug: add complete toolCallData logging for better error dia…
Jun 13, 2025
c240698
Revert "fix: improve JSON parsing robustness in OpenAI tool arguments"
Jun 13, 2025
4c73a6d
Revert "feat: prioritize explicit provider config over model registry…
Jun 13, 2025
da640a7
feat: add proxy provider with OpenAI-compatible API and Anthropic-sty…
Jun 14, 2025
0c0fd12
Fix tool related bugs
Jun 16, 2025
aebd091
fix: claude-bridge proxy request format and debug logging
wolvever Jul 4, 2025
ccd45ad
fix: claude-bridge tool parameter handling and bump to v1.0.14
wolvever Jul 4, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion apps/claude-bridge/package.json
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{
"name": "@mariozechner/claude-bridge",
"version": "1.0.10",
"version": "1.0.14",
"description": "Use non-Anthropic models with Claude Code by proxying requests through the lemmy unified interface",
"type": "module",
"main": "dist/index.js",
Expand Down
21 changes: 18 additions & 3 deletions apps/claude-bridge/src/interceptor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ import {
import { transformAnthropicToLemmy } from "./transforms/anthropic-to-lemmy.js";
import { createAnthropicSSE } from "./transforms/lemmy-to-anthropic.js";
import { jsonSchemaToZod } from "./transforms/tool-schemas.js";
import type { MessageCreateParamsBase } from "@anthropic-ai/sdk/resources/messages/messages.js";
import type { MessageCreateParamsBase, ThinkingConfigEnabled } from "@anthropic-ai/sdk/resources/messages/messages.js";
import {
Context,
type AskResult,
Expand All @@ -31,7 +31,6 @@ import { parseSSE, extractAssistantFromSSE } from "./utils/sse.js";
import {
parseAnthropicMessageCreateRequest,
parseResponse,
isAnthropicAPI,
generateRequestId,
type ParsedRequestData,
} from "./utils/request-parser.js";
Expand Down Expand Up @@ -233,7 +232,16 @@ export class ClaudeBridgeInterceptor {
}

// Convert thinking parameters for provider
const askOptions = convertThinkingParameters(this.clientInfo.provider, originalRequest);
this.logger.log(`Original thinking config: ${JSON.stringify(originalRequest.thinking)}`);
const askOptions: any = {
...(originalRequest.thinking?.type === "enabled" && {
thinking: {
type: "enabled",
budget_tokens: originalRequest.thinking.budget_tokens,
} as ThinkingConfigEnabled,
}),
};
this.logger.log(`Converted thinking config: ${JSON.stringify(askOptions.thinking)}`);

// Apply capability adjustments
if (validation.adjustments.maxOutputTokens) {
Expand Down Expand Up @@ -456,3 +464,10 @@ export async function initializeInterceptor(config?: BridgeConfig): Promise<Clau
export function getInterceptor(): ClaudeBridgeInterceptor | null {
return globalInterceptor;
}

/**
* Check if URL is an Anthropic API endpoint
*/
export function isAnthropicAPI(url: string): boolean {
return url.includes("api.anthropic.com") && url.includes("/v1/messages");
}
2 changes: 1 addition & 1 deletion apps/claude-bridge/src/transforms/lemmy-to-anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -84,7 +84,7 @@ export function createAnthropicSSE(askResult: AskResult, model: string): Readabl
writeEvent("content_block_start", {
type: "content_block_start",
index: blockIndex,
content_block: { type: "tool_use", id: toolCall.id, name: toolCall.name, input: {} },
content_block: { type: "tool_use", id: toolCall.id, name: toolCall.name, input: toolCall.arguments },
});
const argsJson = JSON.stringify(toolCall.arguments);
for (let i = 0; i < argsJson.length; i += 50) {
Expand Down
6 changes: 3 additions & 3 deletions apps/claude-bridge/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import type { SerializedContext, ChatClient } from "@mariozechner/lemmy";
import type { MessageCreateParamsBase } from "@anthropic-ai/sdk/resources/messages/messages.js";
import type { MessageCreateParamsBase, ThinkingConfigEnabled } from "@anthropic-ai/sdk/resources/messages/messages.js";
import type { AnthropicConfig, OpenAIConfig, GoogleConfig } from "@mariozechner/lemmy";
import type { ModelData } from "@mariozechner/lemmy";

Expand All @@ -24,7 +24,7 @@ export interface RawPair {
note?: string;
}

export type Provider = "anthropic" | "openai" | "google";
export type Provider = "anthropic" | "openai" | "google" | "proxy";

// JSON Schema types
export interface JSONSchema {
Expand Down Expand Up @@ -73,7 +73,7 @@ export interface ProviderClientInfo {
modelData: ModelData | null; // null for unknown models
}

export type ProviderConfig = AnthropicConfig | OpenAIConfig | GoogleConfig;
export type ProviderConfig = AnthropicConfig | OpenAIConfig | GoogleConfig | OpenAIConfig; // Proxy uses OpenAI config

export interface TransformationEntry {
timestamp: number;
Expand Down
Loading