diff --git a/Cargo.lock b/Cargo.lock index 152e6a7..a01d12d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -237,6 +237,7 @@ dependencies = [ "config", "dotenvy", "futures-util", + "minijinja", "reqwest", "serde", "serde_json", @@ -980,6 +981,15 @@ version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a" +[[package]] +name = "minijinja" +version = "2.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4e60ac08614cc09062820e51d5d94c2fce16b94ea4e5003bb81b99a95f84e876" +dependencies = [ + "serde", +] + [[package]] name = "minimal-lexical" version = "0.2.1" diff --git a/Cargo.toml b/Cargo.toml index d28822d..b318397 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -15,3 +15,4 @@ async-stream = "0.3" tracing = "0.1.41" tracing-subscriber = { version = "0.3.19", features = ["fmt", "env-filter"] } config = { version = "0.14.0", features = ["yaml"] } +minijinja = "2.11.0" diff --git a/README.md b/README.md index 70a245b..b41d232 100644 --- a/README.md +++ b/README.md @@ -1,64 +1,170 @@ # CCORP - Anthropic to OpenAI/OpenRouter Adapter -This is a Rust application that acts as an adapter between the Anthropic API format and the OpenAI/OpenRouter API format. It spins up a webserver, receives requests in the Anthropic format, rewrites them to the OpenAI/OpenRouter format, sends them to OpenRouter, and streams the results back. +## Use Claude Code with any OpenRouter model. -## Installation via Cargo +CCORP (Claude Code OpenRouter Proxy) is a high-performance Rust application that acts as an adapter between the Anthropic API format and the OpenAI/OpenRouter API format. It provides a seamless bridge for applications expecting Anthropic's API to work with OpenRouter's extensive model collection. -### Prerequisites +## Features +- **API Translation**: Converts Anthropic API requests to OpenAI/OpenRouter format and vice versa +- **Streaming Support**: Full support for both streaming and non-streaming API calls +- **Model Mapping**: Flexible configuration to map Claude models to any OpenRouter-supported model +- **Web UI**: Built-in web interface for easy model switching at runtime +- **Request Logging**: Optional logging of all requests and responses for debugging + +## Installation + +![asset/image.jpg](assets/image.jpg) + +### Via Cargo + +#### Prerequisites - Rust and Cargo: [https://www.rust-lang.org/tools/install](https://www.rust-lang.org/tools/install) -``` +```bash cargo install --git https://github.com/terhechte/CCORP --bin ccor ``` -## Installation +### Via Releases -Download from the releases +Download the latest binary from the [releases page](https://github.com/terhechte/CCORP/releases). ## Configuration -1. Create a `.env` file in the root of the project. -2. Add the following environment variables to the `.env` file: +### Step 1: Environment Setup + +Create a `.env` file in the root directory with your OpenRouter API key: + +```env +OPENROUTER_API_KEY=your_openrouter_api_key_here +``` - ``` - OPENROUTER_MODEL_HAIKU=mistralai/devstral-small # or another model - OPENROUTER_MODEL_SONNET=mistralai/devstral-small # or another model - OPENROUTER_MODEL_OPUS=mistralai/devstral-small # or another model - ``` +### Step 2: Model Configuration + +Create a `config.json` file to configure the port and model mappings: + +```json +{ + "port": 3000, + "models": { + "haiku": "mistralai/mistral-7b-instruct", + "sonnet": "meta-llama/llama-3.2-90b-vision-instruct", + "opus": "openai/gpt-4o" + } +} +``` + +You can map Claude models to any model available on OpenRouter. ## Running the Application -To run the application, use the following command: +### Basic Usage ```bash cargo run ``` -The server will start on `0.0.0.0:3000`. +The server will start on `0.0.0.0:3000` (or the port specified in `config.json`). + +### With Request Logging + +To enable logging of all requests and responses: + +```bash +cargo run -- --logging logs +``` + +This creates timestamped JSON files in the `logs` directory for each request/response pair. -## Using Claude Code +## Using with Claude Code CLI -Start the proxy according to the docs which will run it in localhost:3073 +CCORP is designed to work seamlessly with Anthropic's Claude Code CLI: -export ANTHROPIC_BASE_URL=http://localhost:3073 +1. Start CCORP (it will run on port 3000 by default) +2. Set environment variables: -export ANTHROPIC_AUTH_TOKEN="your openrouter api key" + ```bash + export ANTHROPIC_BASE_URL=http://localhost:3000 + export ANTHROPIC_AUTH_TOKEN="your_openrouter_api_key" + ``` -run claude code +3. Run Claude Code as normal: -## Logging + ```bash + claude + ``` -CCORP can also log requests and responses to a specified directory. To enable this, pass the `--logging` flag followed by the path to the directory where you want the logs to be stored. +## Web UI for Model Management -For example, to log requests and responses to the `logs` directory, run the following command: +CCORP includes a web interface for dynamically switching models without restarting the server. + +Visit `http://localhost:3000/switch-model` in your browser to: +- View all available OpenRouter models +- Change model mappings for Haiku, Sonnet, and Opus + +Changes are saved to `config.json` and take effect immediately. + +## API Usage Examples + +### Non-Streaming Request ```bash -cargo run --logging logs +curl -X POST http://localhost:3000/v1/messages \ + -H "Content-Type: application/json" \ + -H "x-api-key: your_openrouter_api_key" \ + -d '{ + "model": "claude-3-5-haiku-20241022", + "messages": [{"role": "user", "content": "Hello, world!"}] + }' ``` -This will start the server and log requests and responses to the `logs` directory. +### Streaming Request + +```bash +curl -X POST http://localhost:3000/v1/messages \ + -H "Content-Type: application/json" \ + -H "x-api-key: your_openrouter_api_key" \ + -d '{ + "model": "claude-3-5-sonnet-20241022", + "messages": [{"role": "user", "content": "Tell me a story"}], + "stream": true + }' +``` + +## Architecture + +CCORP is built with: +- **Rust**: For high performance and memory safety +- **Axum**: Modern async web framework +- **Tokio**: Async runtime +- **Minijinja**: Template engine for the web UI + +The request flow: +1. Receive Anthropic-formatted request +2. Map Claude model to configured OpenRouter model +3. Transform request to OpenAI format +4. Forward to OpenRouter +5. Transform response back to Anthropic format +6. Stream or return to client + +## Development + +### Building + +```bash +cargo build --release +``` + +### Running Tests + +```bash +cargo test +``` + +### Contributing + +Contributions are welcome! Please feel free to submit a Pull Request. ## License -CCORP is licensed under the MIT License. +CCORP is licensed under the MIT License. See [LICENSE](LICENSE) for details. diff --git a/assets/images.jpg b/assets/images.jpg new file mode 100644 index 0000000..8903109 Binary files /dev/null and b/assets/images.jpg differ diff --git a/config.json b/config.json new file mode 100644 index 0000000..efb92da --- /dev/null +++ b/config.json @@ -0,0 +1,8 @@ +{ + "port": 3332, + "models": { + "haiku": "deepseek/claude-haiku", + "sonnet": "anthropic/claude-sonnet-4", + "opus": "anthropic/claude-opus-4" + } +} diff --git a/src/anthropic_to_openai.rs b/src/anthropic_to_openai.rs index 44a42fd..41cfa7f 100644 --- a/src/anthropic_to_openai.rs +++ b/src/anthropic_to_openai.rs @@ -1,20 +1,20 @@ +use crate::config::Config; use crate::models::*; -use crate::settings::Settings; use serde_json::json; -pub fn map_model(anthropic_model: &str, settings: &Settings) -> String { +pub fn map_model(anthropic_model: &str, settings: &Config) -> String { if anthropic_model.contains("haiku") { - settings.openrouter_model_haiku.clone() + settings.model_haiku.clone() } else if anthropic_model.contains("sonnet") { - settings.openrouter_model_sonnet.clone() + settings.model_sonnet.clone() } else if anthropic_model.contains("opus") { - settings.openrouter_model_opus.clone() + settings.model_opus.clone() } else { anthropic_model.to_string() } } -pub fn format_anthropic_to_openai(req: AnthropicRequest, settings: &Settings) -> OpenAIRequest { +pub fn format_anthropic_to_openai(req: AnthropicRequest, settings: &Config) -> OpenAIRequest { let mut openapi_messages = Vec::new(); if let Some(system) = req.system { diff --git a/src/config.rs b/src/config.rs new file mode 100644 index 0000000..1d4245b --- /dev/null +++ b/src/config.rs @@ -0,0 +1,84 @@ +use dotenvy::dotenv; +use serde::Deserialize; +use serde::Serialize; +use std::env; +use std::fs; + +/// TOML configuration structure +#[derive(Deserialize, Serialize)] +struct JsonConfig { + port: u16, + models: ModelConfig, +} + +#[derive(Deserialize, Serialize)] +struct ModelConfig { + haiku: String, + sonnet: String, + opus: String, +} + +/// Runtime configuration loaded from environment variables. +#[derive(Clone, Debug)] +pub struct Config { + /// The port to listen on + pub port: u16, + /// Base URL for the OpenRouter API (e.g., https://openrouter.ai/api/v1) + pub base_url: String, + /// API key for authenticating with OpenRouter + pub api_key: String, + /// Override model name for Claude 3.5 Haiku + pub model_haiku: String, + /// Override model name for Claude Sonnet 4 + pub model_sonnet: String, + /// Override model name for Claude Opus 4 + pub model_opus: String, +} + +impl Config { + /// Load configuration from `config.json` and `.env` file. + pub fn from_env() -> Self { + // Load environment variables from .env file + dotenv().ok(); + + // Load API key from environment (must be present) + let api_key = env::var("OPENROUTER_API_KEY") + .expect("Environment variable OPENROUTER_API_KEY must be set"); + + // Load config.json + let config: JsonConfig = serde_json::from_str( + &fs::read_to_string("config.json").expect("Could not read config.json file"), + ) + .expect("Could not read config.json file"); + + Config { + port: config.port, + base_url: default_openrouter_base_url(), + api_key, + model_haiku: config.models.haiku, + model_sonnet: config.models.sonnet, + model_opus: config.models.opus, + } + } + + /// Write configuration to `config.json` (excluding secrets like api_key). + pub fn write(&self) { + let config_out = JsonConfig { + port: self.port, + models: ModelConfig { + haiku: self.model_haiku.clone(), + sonnet: self.model_sonnet.clone(), + opus: self.model_opus.clone(), + }, + }; + + let json_string = + serde_json::to_string_pretty(&config_out).expect("Failed to serialize configuration"); + + fs::write("config.json", json_string).expect("Failed to write config.json"); + } +} + +fn default_openrouter_base_url() -> String { + "https://openrouter.ai/api/v1".to_string() +} diff --git a/src/main.rs b/src/main.rs index a54588f..8fed35f 100644 --- a/src/main.rs +++ b/src/main.rs @@ -1,7 +1,9 @@ mod anthropic_to_openai; +mod config; mod models; mod openai_to_anthropic; -mod settings; +mod openrouter; +mod switch_model; use axum::{ Router, @@ -9,16 +11,23 @@ use axum::{ extract::{Json, State}, http::{HeaderMap, StatusCode, header}, response::{IntoResponse, Response}, - routing::post, + routing::{get, post}, }; +use config::Config; use futures_util::stream::StreamExt; use models::{AnthropicRequest, OpenAIStreamResponse}; use reqwest::Client; use serde_json::json; -use settings::Settings; use std::sync::Arc; +use tokio::sync::RwLock; use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; +#[derive(Clone)] +pub struct AppState { + pub config: Arc>, + pub logging_path: Arc>, +} + #[tokio::main] async fn main() { tracing_subscriber::registry() @@ -42,33 +51,43 @@ async fn main() { } } - let settings = Settings::new().expect("Failed to load settings"); + let settings = Config::from_env(); println!("Using the following model mappings:"); - println!("- Haiku: {}", settings.openrouter_model_haiku); - println!("- Sonnet: {}", settings.openrouter_model_sonnet); - println!("- Opus: {}", settings.openrouter_model_opus); + println!("- Haiku: {}", settings.model_haiku); + println!("- Sonnet: {}", settings.model_sonnet); + println!("- Opus: {}", settings.model_opus); + + let addr = std::net::SocketAddr::from(([0, 0, 0, 0], settings.port)); - let shared_settings = Arc::new(settings); - let shared_logging_path = Arc::new(logging_path); + let state = AppState { + config: Arc::new(RwLock::new(settings)), + logging_path: Arc::new(logging_path), + }; let app = Router::new() .route("/v1/messages", post(messages_handler)) - .with_state((shared_settings, shared_logging_path)); + .route( + "/switch-model", + get(switch_model::switch_model_get).post(switch_model::switch_model_post), + ) + .with_state(state); + + println!("listening on {addr}"); - let listener = tokio::net::TcpListener::bind("0.0.0.0:3073").await.unwrap(); - println!("Listening on http://0.0.0.0:3073"); + let listener = tokio::net::TcpListener::bind(addr).await.unwrap(); axum::serve(listener, app).await.unwrap(); } async fn messages_handler( - State((settings, logging_path)): State<(Arc, Arc>)>, + State(state): State, headers: HeaderMap, Json(payload): Json, ) -> impl IntoResponse { - let openai_request = anthropic_to_openai::format_anthropic_to_openai(payload, &settings); + let settings_guard = state.config.read().await; + let openai_request = anthropic_to_openai::format_anthropic_to_openai(payload, &settings_guard); - if let Some(path) = logging_path.as_ref() { + if let Some(path) = state.logging_path.as_ref() { let timestamp = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .unwrap() @@ -85,12 +104,12 @@ async fn messages_handler( .to_string(); if openai_request.stream.unwrap_or(false) { - let settings = settings.clone(); + let base_url = settings_guard.base_url.clone(); + drop(settings_guard); let stream = async_stream::stream! { let res = client .post(format!( - "{}/chat/completions", - settings.openrouter_base_url + "{base_url}/chat/completions", )) .bearer_auth(api_key) .json(&openai_request) @@ -151,7 +170,7 @@ data: {message_stop} "); yield Ok::<_, axum::Error>(sse_event.into_bytes()); - if let Some(path) = logging_path.as_ref() { + if let Some(path) = state.logging_path.as_ref() { let timestamp = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .unwrap() @@ -169,7 +188,7 @@ data: {message_stop} .unwrap() } else { let res = client - .post(format!("{}/chat/completions", settings.openrouter_base_url)) + .post(format!("{}/chat/completions", settings_guard.base_url)) .bearer_auth(api_key) .json(&openai_request) .send() @@ -184,7 +203,7 @@ data: {message_stop} let anthropic_response = openai_to_anthropic::format_openai_to_anthropic(openai_response.clone()); - if let Some(path) = logging_path.as_ref() { + if let Some(path) = state.logging_path.as_ref() { let timestamp = std::time::SystemTime::now() .duration_since(std::time::UNIX_EPOCH) .unwrap() diff --git a/src/openrouter.rs b/src/openrouter.rs new file mode 100644 index 0000000..6e4fd44 --- /dev/null +++ b/src/openrouter.rs @@ -0,0 +1,67 @@ +use crate::config::Config; +use serde::{Deserialize, Serialize}; + +/// Response structure for the OpenRouter models list API +#[derive(Debug, Deserialize, Serialize)] +pub struct ModelsResponse { + pub data: Vec, +} + +/// Individual model information +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct Model { + pub id: String, + pub name: String, + pub description: Option, + pub context_length: Option, + pub architecture: Option, + pub pricing: Option, + pub supported_generation_methods: Option>, + pub top_provider: Option, + pub per_request_limits: Option, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct Architecture { + pub modality: Option, + pub tokenizer: Option, + pub instruct_type: Option, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct Pricing { + pub prompt: Option, + pub completion: Option, + pub request: Option, + pub image: Option, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct TopProvider { + pub context_length: Option, + pub max_completion_tokens: Option, + pub is_moderated: Option, +} + +#[derive(Debug, Deserialize, Serialize, Clone)] +pub struct PerRequestLimits { + pub prompt_tokens: Option, + pub completion_tokens: Option, +} + +/// Fetch the list of available models from OpenRouter +pub async fn fetch_models(config: &Config) -> Result { + let client = reqwest::Client::new(); + let url = format!("{}/models", config.base_url); + + let response = client + .get(&url) + .header("Authorization", format!("Bearer {}", config.api_key)) + .header("HTTP-Referer", "https://github.com/yourusername/ccor") + .header("X-Title", "CCOR - Claude Connector for OpenRouter") + .send() + .await?; + + let models = response.json::().await?; + Ok(models) +} diff --git a/src/settings.rs b/src/settings.rs deleted file mode 100644 index a566928..0000000 --- a/src/settings.rs +++ /dev/null @@ -1,24 +0,0 @@ -use serde::Deserialize; - -#[derive(Debug, Deserialize, Clone)] -pub struct Settings { - #[serde(default = "default_openrouter_base_url")] - pub openrouter_base_url: String, - pub openrouter_model_haiku: String, - pub openrouter_model_sonnet: String, - pub openrouter_model_opus: String, -} - -fn default_openrouter_base_url() -> String { - "https://openrouter.ai/api/v1".to_string() -} - -impl Settings { - pub fn new() -> Result { - dotenvy::dotenv().ok(); - let s = config::Config::builder() - .add_source(config::Environment::default().separator("__")) - .build()?; - s.try_deserialize() - } -} diff --git a/src/switch_model.rs b/src/switch_model.rs new file mode 100644 index 0000000..fd6d01b --- /dev/null +++ b/src/switch_model.rs @@ -0,0 +1,63 @@ +use crate::openrouter; +use minijinja::Environment; +use serde::{Deserialize, Serialize}; + +use axum::{ + extract::{Json, State}, + response::{Html, IntoResponse}, +}; + +#[derive(Deserialize, Serialize)] +pub struct ModelSelection { + haiku: String, + sonnet: String, + opus: String, +} + +// Include the template at compile time +const SWITCH_MODEL_TEMPLATE: &str = include_str!("templates/switch_model.html"); + +/// GET /switch-model - Serve HTML interface for model selection +pub async fn switch_model_get(State(state): State) -> Html { + let cfg = state.config.read().await; + // Fetch available models from OpenRouter + let models_result = openrouter::fetch_models(&cfg).await; + let models_json = match models_result { + Ok(models) => serde_json::to_string(&models.data).unwrap_or_else(|_| "[]".to_string()), + Err(_) => "[]".to_string(), + }; + + // Create a new minijinja environment + let mut env = Environment::new(); + env.add_template("switch_model", SWITCH_MODEL_TEMPLATE) + .unwrap(); + + // Get the template + let tmpl = env.get_template("switch_model").unwrap(); + + // Create the context + let ctx = minijinja::context! { + model_haiku => cfg.model_haiku, + model_sonnet => cfg.model_sonnet, + model_opus => cfg.model_opus, + models_json => models_json, + }; + + // Render the template + let html = tmpl.render(ctx).unwrap(); + + Html(html) +} + +/// POST /switch-model - Save selected models to config.toml +pub async fn switch_model_post( + State(state): State, + Json(selection): Json, +) -> impl IntoResponse { + let mut config = state.config.write().await; + config.model_haiku = selection.haiku; + config.model_opus = selection.opus; + config.model_sonnet = selection.sonnet; + config.write(); + "Models updated successfully" +} diff --git a/src/templates/switch_model.html b/src/templates/switch_model.html new file mode 100644 index 0000000..a89b9c0 --- /dev/null +++ b/src/templates/switch_model.html @@ -0,0 +1,246 @@ + + + + + + + OpenRouter Model Switcher + + + + +

OpenRouter Model Switcher

+ +
+ +
+
+

Claude 3.5 Haiku

+
Current: {{ model_haiku }}
+ +
+
+ +
+

Claude Sonnet 4

+
Current: {{ model_sonnet }}
+ +
+
+ +
+

Claude Opus 4

+
Current: {{ model_opus }}
+ +
+
+ +
+ +
+
+ + + + + \ No newline at end of file