From effd9199e78209734e54e20a1a8d9491e1a19c8d Mon Sep 17 00:00:00 2001 From: Blake Ramsdell Date: Sun, 25 May 2025 23:14:02 -0700 Subject: [PATCH 1/3] Update google/genai to 0.13.0 to get model listing --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index aedd160..bc13eae 100644 --- a/package.json +++ b/package.json @@ -34,7 +34,7 @@ "@anthropic-ai/sdk": "^0.39.0", "@aws-sdk/client-bedrock": "^3.787.0", "@aws-sdk/client-bedrock-runtime": "^3.785.0", - "@google/genai": "^0.9.0", + "@google/genai": "^0.13.0", "@modelcontextprotocol/sdk": "^1.7.0", "@types/react": "^19.0.12", "@types/react-dom": "^19.0.4", From 793d3dc2be609b140a8e0fbbcf38bf936f8ddebc Mon Sep 17 00:00:00 2001 From: Blake Ramsdell Date: Sun, 25 May 2025 23:15:34 -0700 Subject: [PATCH 2/3] Use Google models list API call to get models --- src/main/llm/geminiLLM.ts | 25 ++++++++++++++++++++++--- 1 file changed, 22 insertions(+), 3 deletions(-) diff --git a/src/main/llm/geminiLLM.ts b/src/main/llm/geminiLLM.ts index dddef51..d9a0680 100644 --- a/src/main/llm/geminiLLM.ts +++ b/src/main/llm/geminiLLM.ts @@ -135,9 +135,7 @@ export class GeminiLLM implements ILLM { } } - async getModels(): Promise { - // Currently no support for listModels in the Node SDK - may be coming: https://github.com/google-gemini/generative-ai-js/issues/54 - // For now we're going to make a hardcoded list of current models. + async getModelsStatic(): Promise { // This seems like the best source for models and description: https://ai.google.dev/gemini-api/docs/ const models: ILLMModel[] = [ { @@ -201,6 +199,27 @@ export class GeminiLLM implements ILLM { return models; } + async getModels(): Promise { + const returnModels: ILLMModel[] = [] + const models = await this.genAI.models.list(); + + // You might want to filter or sort this list in some way. There's some + // models that may not make sense, and you might want the "good" ones first. + + for await (const model of models) { + const newModel: ILLMModel = { + provider: LLMType.Gemini, + // May not need to remove the model/ prefix here in case you like it + id: model.name ? model.name.replace(/^model\//, '') : '', + name: model.displayName ?? '', + description: model.description || '', + modelSource: 'Google' + }; + returnModels.push(newModel); + } + return returnModels; + } + async generateResponse(session: ChatSession, messages: ChatMessage[]): Promise { const modelReply: ModelReply = { timestamp: Date.now(), From 6e03fb3bbf6c60287f7279034cbe38dad108b45b Mon Sep 17 00:00:00 2001 From: Blake Ramsdell Date: Mon, 26 May 2025 00:54:59 -0700 Subject: [PATCH 3/3] Fix models/ prefix removal --- src/main/llm/geminiLLM.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/llm/geminiLLM.ts b/src/main/llm/geminiLLM.ts index d9a0680..b4ac2fc 100644 --- a/src/main/llm/geminiLLM.ts +++ b/src/main/llm/geminiLLM.ts @@ -209,8 +209,8 @@ export class GeminiLLM implements ILLM { for await (const model of models) { const newModel: ILLMModel = { provider: LLMType.Gemini, - // May not need to remove the model/ prefix here in case you like it - id: model.name ? model.name.replace(/^model\//, '') : '', + // May not need to remove the models/ prefix here in case you like it + id: model.name ? model.name.replace(/^models\//, '') : '', name: model.displayName ?? '', description: model.description || '', modelSource: 'Google'