From a697f1c5b0b25f1fd52eff7804e76334ab7d332e Mon Sep 17 00:00:00 2001 From: nmb Date: Tue, 17 Feb 2026 16:30:29 +0100 Subject: [PATCH 1/2] =?UTF-8?q?=E2=9C=A8=20feat(llm):=20add=20Ollama=20Clo?= =?UTF-8?q?ud=20provider=20support?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 +- interface/src/lib/providerIcons.tsx | 1 + src/config.rs | 43 +++++++++++++++++++---------- src/llm/providers.rs | 14 ++++++++-- src/llm/routing.rs | 15 ++++++++++ 5 files changed, 57 insertions(+), 18 deletions(-) diff --git a/README.md b/README.md index fa7b8920c..b791364e4 100644 --- a/README.md +++ b/README.md @@ -311,7 +311,7 @@ Read the full vision in [docs/spacedrive.md](docs/spacedrive.md). ### Prerequisites - **Rust** 1.85+ ([rustup](https://rustup.rs/)) -- An LLM API key from any supported provider (Anthropic, OpenAI, OpenRouter, Z.ai, Groq, Together, Fireworks, DeepSeek, xAI, Mistral, or OpenCode Zen) +- An LLM API key from any supported provider (Anthropic, OpenAI, OpenRouter, Ollama Cloud, Z.ai, Groq, Together, Fireworks, DeepSeek, xAI, Mistral, or OpenCode Zen) ### Build and Run diff --git a/interface/src/lib/providerIcons.tsx b/interface/src/lib/providerIcons.tsx index 8d6666bd6..76ba6fdd1 100644 --- a/interface/src/lib/providerIcons.tsx +++ b/interface/src/lib/providerIcons.tsx @@ -60,6 +60,7 @@ export function ProviderIcon({ provider, className = "text-ink-faint", size = 24 anthropic: Anthropic, openai: OpenAI, openrouter: OpenRouter, + ollama: OpenRouter, groq: Groq, mistral: Mistral, deepseek: DeepSeek, diff --git a/src/config.rs b/src/config.rs index 94c289a86..d930df304 100644 --- a/src/config.rs +++ b/src/config.rs @@ -55,6 +55,7 @@ pub struct LlmConfig { pub anthropic_key: Option, pub openai_key: Option, pub openrouter_key: Option, + pub ollama_key: Option, pub zhipu_key: Option, pub groq_key: Option, pub together_key: Option, @@ -68,9 +69,10 @@ pub struct LlmConfig { impl LlmConfig { /// Check if any provider key is configured. pub fn has_any_key(&self) -> bool { - self.anthropic_key.is_some() - || self.openai_key.is_some() - || self.openrouter_key.is_some() + self.anthropic_key.is_some() + || self.openai_key.is_some() + || self.openrouter_key.is_some() + || self.ollama_key.is_some() || self.zhipu_key.is_some() || self.groq_key.is_some() || self.together_key.is_some() @@ -869,6 +871,7 @@ struct TomlLlmConfig { anthropic_key: Option, openai_key: Option, openrouter_key: Option, + ollama_key: Option, zhipu_key: Option, groq_key: Option, together_key: Option, @@ -1146,6 +1149,7 @@ impl Config { std::env::var("ANTHROPIC_API_KEY").is_err() && std::env::var("OPENAI_API_KEY").is_err() && std::env::var("OPENROUTER_API_KEY").is_err() + && std::env::var("OLLAMA_API_KEY").is_err() && std::env::var("OPENCODE_ZEN_API_KEY").is_err() } @@ -1183,6 +1187,7 @@ impl Config { anthropic_key: std::env::var("ANTHROPIC_API_KEY").ok(), openai_key: std::env::var("OPENAI_API_KEY").ok(), openrouter_key: std::env::var("OPENROUTER_API_KEY").ok(), + ollama_key: std::env::var("OLLAMA_API_KEY").ok(), zhipu_key: std::env::var("ZHIPU_API_KEY").ok(), groq_key: std::env::var("GROQ_API_KEY").ok(), together_key: std::env::var("TOGETHER_API_KEY").ok(), @@ -1239,8 +1244,8 @@ impl Config { /// Validate a raw TOML string as a valid Spacebot config. /// Returns Ok(()) if the config is structurally valid, or an error describing what's wrong. pub fn validate_toml(content: &str) -> Result<()> { - let toml_config: TomlConfig = toml::from_str(content) - .context("failed to parse config TOML")?; + let toml_config: TomlConfig = + toml::from_str(content).context("failed to parse config TOML")?; // Run full conversion to catch semantic errors (env resolution, defaults, etc.) let instance_dir = Self::default_instance_dir(); Self::from_toml(toml_config, instance_dir)?; @@ -1267,6 +1272,12 @@ impl Config { .as_deref() .and_then(resolve_env_value) .or_else(|| std::env::var("OPENROUTER_API_KEY").ok()), + ollama_key: toml + .llm + .ollama_key + .as_deref() + .and_then(resolve_env_value) + .or_else(|| std::env::var("OLLAMA_API_KEY").ok()), zhipu_key: toml .llm .zhipu_key @@ -1939,7 +1950,9 @@ pub fn spawn_file_watcher( // Only forward data modification events, not metadata/access changes use notify::EventKind; match &event.kind { - EventKind::Create(_) | EventKind::Modify(notify::event::ModifyKind::Data(_)) | EventKind::Remove(_) => { + EventKind::Create(_) + | EventKind::Modify(notify::event::ModifyKind::Data(_)) + | EventKind::Remove(_) => { let _ = tx.send(event); } // Also forward Any/Other modify events (some backends don't distinguish) @@ -2248,6 +2261,7 @@ pub fn run_onboarding() -> anyhow::Result> { "Anthropic", "OpenRouter", "OpenAI", + "Ollama Cloud", "Z.ai (GLM)", "Groq", "Together AI", @@ -2267,14 +2281,15 @@ pub fn run_onboarding() -> anyhow::Result> { 0 => ("Anthropic API key", "anthropic_key", "anthropic"), 1 => ("OpenRouter API key", "openrouter_key", "openrouter"), 2 => ("OpenAI API key", "openai_key", "openai"), - 3 => ("Z.ai (GLM) API key", "zhipu_key", "zhipu"), - 4 => ("Groq API key", "groq_key", "groq"), - 5 => ("Together AI API key", "together_key", "together"), - 6 => ("Fireworks AI API key", "fireworks_key", "fireworks"), - 7 => ("DeepSeek API key", "deepseek_key", "deepseek"), - 8 => ("xAI API key", "xai_key", "xai"), - 9 => ("Mistral AI API key", "mistral_key", "mistral"), - 10 => ("OpenCode Zen API key", "opencode_zen_key", "opencode-zen"), + 3 => ("Ollama Cloud API key", "ollama_key", "ollama"), + 4 => ("Z.ai (GLM) API key", "zhipu_key", "zhipu"), + 5 => ("Groq API key", "groq_key", "groq"), + 6 => ("Together AI API key", "together_key", "together"), + 7 => ("Fireworks AI API key", "fireworks_key", "fireworks"), + 8 => ("DeepSeek API key", "deepseek_key", "deepseek"), + 9 => ("xAI API key", "xai_key", "xai"), + 10 => ("Mistral AI API key", "mistral_key", "mistral"), + 11 => ("OpenCode Zen API key", "opencode_zen_key", "opencode-zen"), _ => unreachable!(), }; diff --git a/src/llm/providers.rs b/src/llm/providers.rs index e3ce47ccc..8e0672149 100644 --- a/src/llm/providers.rs +++ b/src/llm/providers.rs @@ -8,18 +8,26 @@ pub async fn init_providers(config: &LlmConfig) -> Result<()> { // Provider clients are initialized lazily through LlmManager // This module exists for any provider-specific setup that needs to happen // during system startup - + if config.anthropic_key.is_some() { tracing::info!("Anthropic provider configured"); } - + if config.openai_key.is_some() { tracing::info!("OpenAI provider configured"); } + if config.openrouter_key.is_some() { + tracing::info!("OpenRouter provider configured"); + } + + if config.ollama_key.is_some() { + tracing::info!("Ollama provider configured"); + } + if config.opencode_zen_key.is_some() { tracing::info!("OpenCode Zen provider configured"); } - + Ok(()) } diff --git a/src/llm/routing.rs b/src/llm/routing.rs index 5671516cb..d6d61dd8a 100644 --- a/src/llm/routing.rs +++ b/src/llm/routing.rs @@ -153,6 +153,20 @@ pub fn defaults_for_provider(provider: &str) -> RoutingConfig { rate_limit_cooldown_secs: 60, } } + "ollama" => { + let channel: String = "ollama/gpt-oss:120b".into(); + let worker: String = "ollama/gpt-oss:20b".into(); + RoutingConfig { + channel: channel.clone(), + branch: channel.clone(), + worker: worker.clone(), + compactor: worker.clone(), + cortex: worker.clone(), + task_overrides: HashMap::from([("coding".into(), channel.clone())]), + fallbacks: HashMap::from([(channel, vec![worker])]), + rate_limit_cooldown_secs: 60, + } + } "zhipu" => { let channel: String = "zhipu/glm-4-plus".into(); let worker: String = "zhipu/glm-4-flash".into(); @@ -277,6 +291,7 @@ pub fn provider_to_prefix(provider: &str) -> &str { match provider { "openrouter" => "openrouter/", "openai" => "openai/", + "ollama" => "ollama/", "anthropic" => "anthropic/", "zhipu" => "zhipu/", "groq" => "groq/", From 02d2a07eab94e61234bf182e580d322508d0dd2f Mon Sep 17 00:00:00 2001 From: nmb Date: Tue, 17 Feb 2026 16:30:57 +0100 Subject: [PATCH 2/2] =?UTF-8?q?=E2=9C=A8=20feat(ui):=20update=20settings?= =?UTF-8?q?=20for=20new=20provider=20and=20options?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- interface/src/routes/Settings.tsx | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/interface/src/routes/Settings.tsx b/interface/src/routes/Settings.tsx index 716e1cd4d..5fd3129ad 100644 --- a/interface/src/routes/Settings.tsx +++ b/interface/src/routes/Settings.tsx @@ -95,6 +95,13 @@ const PROVIDERS = [ placeholder: "sk-...", envVar: "OPENAI_API_KEY", }, + { + id: "ollama", + name: "Ollama Cloud", + description: "Hosted Ollama models via OpenAI-compatible API", + placeholder: "ollama_...", + envVar: "OLLAMA_API_KEY", + }, { id: "zhipu", name: "Z.ai (GLM)",