diff --git a/README.md b/README.md index 8be43f9..6eb5aaa 100644 --- a/README.md +++ b/README.md @@ -49,7 +49,7 @@ You can use **Google Gemini**, **Grok**, **Claude**, **ChatGPT**, or **Ollama** | `CLAUDE_API_KEY` | Your API key | Required if using Claude | | `OPENAI_API_KEY` | Your API key | Required if using ChatGPT | | `OLLAMA_URL` | URL (optional) | Ollama server URL (default: http://localhost:11434/api/generate) | -| `OLLAMA_MODEL` | Model name (optional) | Ollama model to use (default: qwen2:0.5b) | +| `OLLAMA_MODEL` | Model name (optional) | Ollama model to use (default: llama3) | --- diff --git a/cmd/cli/createMsg.go b/cmd/cli/createMsg.go index 63c8c26..f68c258 100644 --- a/cmd/cli/createMsg.go +++ b/cmd/cli/createMsg.go @@ -12,6 +12,7 @@ import ( "github.com/dfanso/commit-msg/internal/gemini" "github.com/dfanso/commit-msg/internal/git" "github.com/dfanso/commit-msg/internal/grok" + "github.com/dfanso/commit-msg/internal/ollama" "github.com/dfanso/commit-msg/internal/stats" "github.com/dfanso/commit-msg/pkg/types" "github.com/pterm/pterm" @@ -106,6 +107,17 @@ func CreateCommitMsg () { case "Claude": commitMsg, err = claude.GenerateCommitMessage(config, changes, apiKey) + case "Ollama": + url := os.Getenv("OLLAMA_URL") + if url == "" { + url = "http://localhost:11434/api/generate" + } + model := os.Getenv("OLLAMA_MODEL") + if model == "" { + model = "llama3:latest" + } + commitMsg, err = ollama.GenerateCommitMessage(config, changes, url, model) + default: commitMsg, err = grok.GenerateCommitMessage(config, changes, apiKey) diff --git a/cmd/cli/llmSetup.go b/cmd/cli/llmSetup.go index e4f40e9..f170c65 100644 --- a/cmd/cli/llmSetup.go +++ b/cmd/cli/llmSetup.go @@ -11,7 +11,7 @@ import ( func SetupLLM() error { - providers := []string{"OpenAI", "Claude", "Gemini", "Grok"} + providers := []string{"OpenAI", "Claude", "Gemini", "Grok", "Ollama"} prompt := promptui.Select{ Label: "Select LLM", Items: providers, @@ -22,15 +22,21 @@ func SetupLLM() error { return fmt.Errorf("prompt failed") } - apiKeyPrompt := promptui.Prompt{ - Label: "Enter API Key", - Mask: '*', - - } - - apiKey, err := apiKeyPrompt.Run() - if err != nil { - return fmt.Errorf("failed to read API Key: %w", err) + var apiKey string + + // Skip API key prompt for Ollama (local LLM) + if model != "Ollama" { + apiKeyPrompt := promptui.Prompt{ + Label: "Enter API Key", + Mask: '*', + } + + apiKey, err = apiKeyPrompt.Run() + if err != nil { + return fmt.Errorf("failed to read API Key: %w", err) + } + } else { + apiKey = "" // No API key needed for Ollama } LLMConfig := store.LLMProvider{