Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@
"eslint": "^8.57.0",
"tsup": "^8.1.0",
"tsx": "^4.11.0",
"typescript": "^5.4.0",
"typescript": "^5.9.3",
"vitest": "^1.6.0"
},
"engines": {
Expand Down Expand Up @@ -97,4 +97,4 @@
"bugs": {
"url": "https://github.com/d1maash/sortora/issues"
}
}
}
137 changes: 137 additions & 0 deletions src/ai/providers/anthropic.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,137 @@
/**
* Anthropic (Claude) Provider
* Uses Anthropic API for file classification
*/

import type {
AIProvider,
ClassificationRequest,
ClassificationResult,
ProviderConfig,
} from './types.js';
import { buildClassificationPrompt, parseClassificationResponse } from './types.js';

export interface AnthropicConfig extends ProviderConfig {
type: 'anthropic';
apiKey?: string;
baseUrl?: string;
model?: string;
}

interface AnthropicMessage {
role: 'user' | 'assistant';
content: string;
}

interface AnthropicResponse {
id: string;
type: 'message';
role: 'assistant';
content: Array<{
type: 'text';
text: string;
}>;
stop_reason: string;
usage: {
input_tokens: number;
output_tokens: number;
};
}

export class AnthropicProvider implements AIProvider {
readonly name = 'Claude (Anthropic)';
readonly type = 'anthropic' as const;

private apiKey: string;
private baseUrl: string;
private model: string;
private initialized = false;

constructor(config: AnthropicConfig) {
this.apiKey = config.apiKey || '';
this.baseUrl = config.baseUrl || 'https://api.anthropic.com/v1';
this.model = config.model || 'claude-3-haiku-20240307';
}

isReady(): boolean {
return this.initialized && !!this.apiKey;
}

async init(): Promise<void> {
if (!this.apiKey) {
throw new Error('Anthropic API key is required');
}

// Validate by checking if key format is correct
if (!this.apiKey.startsWith('sk-ant-')) {
throw new Error('Invalid Anthropic API key format');
}

this.initialized = true;
}

async classifyFile(request: ClassificationRequest): Promise<ClassificationResult> {
if (!this.isReady()) {
throw new Error('Anthropic provider is not initialized');
}

const prompt = buildClassificationPrompt(request);

const messages: AnthropicMessage[] = [
{
role: 'user',
content: prompt,
},
];

const response = await fetch(`${this.baseUrl}/messages`, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-api-key': this.apiKey,
'anthropic-version': '2023-06-01',
},
body: JSON.stringify({
model: this.model,
max_tokens: 100,
system: 'You are a file classification assistant. Respond only with valid JSON, no markdown formatting.',
messages,
}),
});

if (!response.ok) {
const error = await response.text();
throw new Error(`Anthropic API error: ${error}`);
}

const data = await response.json() as AnthropicResponse;
const content = data.content[0]?.text || '';

return parseClassificationResponse(content);
}

async classifyBatch(requests: ClassificationRequest[]): Promise<ClassificationResult[]> {
// Process in parallel with rate limiting
const results: ClassificationResult[] = [];
const batchSize = 5;

for (let i = 0; i < requests.length; i += batchSize) {
const batch = requests.slice(i, i + batchSize);
const batchResults = await Promise.all(
batch.map(req => this.classifyFile(req))
);
results.push(...batchResults);

// Small delay between batches to avoid rate limits
if (i + batchSize < requests.length) {
await new Promise(resolve => setTimeout(resolve, 100));
}
}

return results;
}

async dispose(): Promise<void> {
this.initialized = false;
}
}
152 changes: 152 additions & 0 deletions src/ai/providers/gemini.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
/**
* Google Gemini Provider
* Uses Google Generative AI API for file classification
*/

import type {
AIProvider,
ClassificationRequest,
ClassificationResult,
ProviderConfig,
} from './types.js';
import { buildClassificationPrompt, parseClassificationResponse } from './types.js';

export interface GeminiConfig extends ProviderConfig {
type: 'gemini';
apiKey?: string;
model?: string;
}

interface GeminiContent {
parts: Array<{ text: string }>;
role: 'user' | 'model';
}

interface GeminiResponse {
candidates: Array<{
content: {
parts: Array<{ text: string }>;
role: string;
};
finishReason: string;
}>;
usageMetadata?: {
promptTokenCount: number;
candidatesTokenCount: number;
totalTokenCount: number;
};
}

export class GeminiProvider implements AIProvider {
readonly name = 'Gemini (Google)';
readonly type = 'gemini' as const;

private apiKey: string;
private model: string;
private initialized = false;

constructor(config: GeminiConfig) {
this.apiKey = config.apiKey || '';
this.model = config.model || 'gemini-1.5-flash';
}

isReady(): boolean {
return this.initialized && !!this.apiKey;
}

async init(): Promise<void> {
if (!this.apiKey) {
throw new Error('Gemini API key is required');
}

// Validate API key by making a simple request
try {
const response = await fetch(
`https://generativelanguage.googleapis.com/v1beta/models?key=${this.apiKey}`
);

if (!response.ok) {
const error = await response.text();
throw new Error(`Gemini API validation failed: ${error}`);
}

this.initialized = true;
} catch (error) {
if (error instanceof Error && error.message.includes('fetch')) {
// Network error - assume key is valid
this.initialized = true;
} else {
throw error;
}
}
}

async classifyFile(request: ClassificationRequest): Promise<ClassificationResult> {
if (!this.isReady()) {
throw new Error('Gemini provider is not initialized');
}

const prompt = buildClassificationPrompt(request);

const contents: GeminiContent[] = [
{
role: 'user',
parts: [{ text: prompt }],
},
];

const url = `https://generativelanguage.googleapis.com/v1beta/models/${this.model}:generateContent?key=${this.apiKey}`;

const response = await fetch(url, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({
contents,
generationConfig: {
temperature: 0.1,
maxOutputTokens: 100,
},
systemInstruction: {
parts: [{ text: 'You are a file classification assistant. Respond only with valid JSON, no markdown formatting.' }],
},
}),
});

if (!response.ok) {
const error = await response.text();
throw new Error(`Gemini API error: ${error}`);
}

const data = await response.json() as GeminiResponse;
const content = data.candidates[0]?.content?.parts[0]?.text || '';

return parseClassificationResponse(content);
}

async classifyBatch(requests: ClassificationRequest[]): Promise<ClassificationResult[]> {
// Process in parallel with rate limiting
const results: ClassificationResult[] = [];
const batchSize = 5;

for (let i = 0; i < requests.length; i += batchSize) {
const batch = requests.slice(i, i + batchSize);
const batchResults = await Promise.all(
batch.map(req => this.classifyFile(req))
);
results.push(...batchResults);

// Small delay between batches to avoid rate limits
if (i + batchSize < requests.length) {
await new Promise(resolve => setTimeout(resolve, 100));
}
}

return results;
}

async dispose(): Promise<void> {
this.initialized = false;
}
}
Loading
Loading