From f08540198eaae74a9d9823f16b22a577161eb2a1 Mon Sep 17 00:00:00 2001 From: Thuan Ha Date: Tue, 5 Nov 2024 16:21:12 +0700 Subject: [PATCH] Enable compatibility with OpenAI-compatible APIs, including options like LM Studio, OpenRouter, Jan,... --- .gitignore | 5 ++++- cofounder/api/.env | 4 +++- cofounder/api/package.json | 1 + cofounder/api/utils/openai.js | 3 ++- 4 files changed, 10 insertions(+), 3 deletions(-) diff --git a/.gitignore b/.gitignore index b512c09..851b56c 100644 --- a/.gitignore +++ b/.gitignore @@ -1 +1,4 @@ -node_modules \ No newline at end of file +node_modules +apps +cofounder/api/db +package-lock.json diff --git a/cofounder/api/.env b/cofounder/api/.env index 0151e62..3dd20c3 100644 --- a/cofounder/api/.env +++ b/cofounder/api/.env @@ -1,12 +1,14 @@ PORT = 667 +OPENAI_BASE_URL="https://api.openai.com/v1" # Replace with your OpenAI-compatible API base URL if you are using a service similar to OpenAI’s APIs OPENAI_API_KEY = "REPLACE_WITH_OPENAI_KEY" +OPENAI_MODEL="gpt-4o-mini" ANTHROPIC_API_KEY = "REPLACE_WITH_ANTHROPIC_KEY" COFOUNDER_API_KEY = "REPLACE_WITH_COFOUNDER.OPENINTERFACE.AI_KEY" # llm, can be 'ANTHROPIC' (for claude sonnet 3.5) or 'OPENAI' (uses diff. models for diff. passes) # make sure there are matching api keys -LLM_PROVIDER = "ANTHROPIC" #"OPENAI" +LLM_PROVIDER = "OPENAI" #"OPENAI" # should be kept to "text-embedding-3-small" to work with RAG using api.cofounder.openinterface.ai EMBEDDING_MODEL = "text-embedding-3-small" diff --git a/cofounder/api/package.json b/cofounder/api/package.json index 1e59452..9d37fb1 100644 --- a/cofounder/api/package.json +++ b/cofounder/api/package.json @@ -31,6 +31,7 @@ "lodash": "^4.17.21", "lodash-es": "^4.17.21", "module-alias": "^2.2.3", + "nodemon": "^3.1.7", "open": "^10.1.0", "openai": "^4.55.4", "p-all": "^5.0.0", diff --git a/cofounder/api/utils/openai.js b/cofounder/api/utils/openai.js index 20470af..fa45fbd 100644 --- a/cofounder/api/utils/openai.js +++ b/cofounder/api/utils/openai.js @@ -7,13 +7,14 @@ let openai; try { openai = new OpenAI({ apiKey: process.env.OPENAI_API_KEY, + baseURL: process.env.OPENAI_BASE_URL || "https://api.openai.com/v1", }); } catch (e) { console.error("utils:openai : " + e); } async function inference({ - model = `gpt-4o-mini`, + model = process.env.OPENAI_MODEL || `gpt-4o-mini`, messages, stream = process.stdout, }) {