From a2c331ebbec0b5f43698fc6b6314df41d16abc09 Mon Sep 17 00:00:00 2001 From: Copilot <198982749+Copilot@users.noreply.github.com> Date: Mon, 11 Aug 2025 18:07:36 +0000 Subject: [PATCH 1/4] Initial plan From 9dad692316d25b126ffe9aa8086a7608412dc0cf Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 11 Aug 2025 18:12:06 +0000 Subject: [PATCH 2/4] Initial commit: Add OpenRouter provider support - planning phase Co-authored-by: malaksedarous <18029995+malaksedarous@users.noreply.github.com> --- tsconfig.json | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/tsconfig.json b/tsconfig.json index 819a630..90e1622 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,7 +17,8 @@ "noImplicitReturns": true, "noFallthroughCasesInSwitch": true, "noUncheckedIndexedAccess": true, - "exactOptionalPropertyTypes": true + "exactOptionalPropertyTypes": true, + "types": ["node"] }, "include": ["src/**/*"], "exclude": ["node_modules", "dist", "**/*.test.ts", "**/*.spec.ts"] From d6e1fcc9a6b7779f9ffb6f900ece0bc435b902b0 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Mon, 11 Aug 2025 18:19:05 +0000 Subject: [PATCH 3/4] Implement OpenRouter provider support with complete functionality Co-authored-by: malaksedarous <18029995+malaksedarous@users.noreply.github.com> --- src/config/manager.ts | 18 +-- src/config/schema.ts | 3 +- src/providers/factory.ts | 4 + src/providers/openrouter.ts | 62 ++++++++++ test/openrouter.test.ts | 224 ++++++++++++++++++++++++++++++++++++ 5 files changed, 302 insertions(+), 9 deletions(-) create mode 100644 src/providers/openrouter.ts create mode 100644 test/openrouter.test.ts diff --git a/src/config/manager.ts b/src/config/manager.ts index 5e94ca2..b34ecd1 100644 --- a/src/config/manager.ts +++ b/src/config/manager.ts @@ -27,7 +27,8 @@ export class ConfigurationManager { ...(process.env.CONTEXT_OPT_LLM_MODEL && { model: process.env.CONTEXT_OPT_LLM_MODEL }), ...(process.env.CONTEXT_OPT_GEMINI_KEY && { geminiKey: process.env.CONTEXT_OPT_GEMINI_KEY }), ...(process.env.CONTEXT_OPT_CLAUDE_KEY && { claudeKey: process.env.CONTEXT_OPT_CLAUDE_KEY }), - ...(process.env.CONTEXT_OPT_OPENAI_KEY && { openaiKey: process.env.CONTEXT_OPT_OPENAI_KEY }) + ...(process.env.CONTEXT_OPT_OPENAI_KEY && { openaiKey: process.env.CONTEXT_OPT_OPENAI_KEY }), + ...(process.env.CONTEXT_OPT_OPENROUTER_KEY && { openrouterKey: process.env.CONTEXT_OPT_OPENROUTER_KEY }) }, research: { ...(process.env.CONTEXT_OPT_EXA_KEY && { exaKey: process.env.CONTEXT_OPT_EXA_KEY }) @@ -50,18 +51,18 @@ export class ConfigurationManager { return config; } - private static getLLMProvider(): 'gemini' | 'claude' | 'openai' { + private static getLLMProvider(): 'gemini' | 'claude' | 'openai' | 'openrouter' { const provider = process.env.CONTEXT_OPT_LLM_PROVIDER?.toLowerCase(); if (!provider) { - throw new Error('CONTEXT_OPT_LLM_PROVIDER environment variable is required. Set to "gemini", "claude", or "openai"'); + throw new Error('CONTEXT_OPT_LLM_PROVIDER environment variable is required. Set to "gemini", "claude", "openai", or "openrouter"'); } - if (!['gemini', 'claude', 'openai'].includes(provider)) { - throw new Error(`Invalid CONTEXT_OPT_LLM_PROVIDER: ${provider}. Must be "gemini", "claude", or "openai"`); + if (!['gemini', 'claude', 'openai', 'openrouter'].includes(provider)) { + throw new Error(`Invalid CONTEXT_OPT_LLM_PROVIDER: ${provider}. Must be "gemini", "claude", "openai", or "openrouter"`); } - return provider as 'gemini' | 'claude' | 'openai'; + return provider as 'gemini' | 'claude' | 'openai' | 'openrouter'; } private static parseAllowedBasePaths(): string[] { @@ -108,7 +109,7 @@ export class ConfigurationManager { throw new Error('Configuration error: llm.provider is required'); } - const validProviders = ['gemini', 'claude', 'openai']; + const validProviders = ['gemini', 'claude', 'openai', 'openrouter']; if (!validProviders.includes(config.llm.provider)) { throw new Error(`Configuration error: llm.provider must be one of: ${validProviders.join(', ')}`); } @@ -210,7 +211,8 @@ export class ConfigurationManager { model: config.llm.model, hasGeminiKey: !!config.llm.geminiKey, hasClaudeKey: !!config.llm.claudeKey, - hasOpenaiKey: !!config.llm.openaiKey + hasOpenaiKey: !!config.llm.openaiKey, + hasOpenrouterKey: !!config.llm.openrouterKey }, research: { hasExaKey: !!config.research.exaKey diff --git a/src/config/schema.ts b/src/config/schema.ts index 29edf25..3a7fecc 100644 --- a/src/config/schema.ts +++ b/src/config/schema.ts @@ -15,11 +15,12 @@ export interface MCPServerConfig { // LLM providers llm: { - provider: 'gemini' | 'claude' | 'openai'; + provider: 'gemini' | 'claude' | 'openai' | 'openrouter'; model?: string; // Optional custom model geminiKey?: string; claudeKey?: string; openaiKey?: string; + openrouterKey?: string; }; // Research tools diff --git a/src/providers/factory.ts b/src/providers/factory.ts index 540b625..bcec0b9 100644 --- a/src/providers/factory.ts +++ b/src/providers/factory.ts @@ -8,6 +8,7 @@ import { BaseLLMProvider, LLMResponse } from './base'; import { GeminiProvider } from './gemini'; import { ClaudeProvider } from './claude'; import { OpenAIProvider } from './openai'; +import { OpenRouterProvider } from './openrouter'; export interface LLMProvider { processRequest(prompt: string, model?: string, apiKey?: string): Promise; @@ -29,6 +30,9 @@ export class LLMProviderFactory { case 'openai': this.providers.set(providerName, new OpenAIProvider()); break; + case 'openrouter': + this.providers.set(providerName, new OpenRouterProvider()); + break; default: throw new Error(`Unknown provider: ${providerName}`); } diff --git a/src/providers/openrouter.ts b/src/providers/openrouter.ts new file mode 100644 index 0000000..e586a09 --- /dev/null +++ b/src/providers/openrouter.ts @@ -0,0 +1,62 @@ +/** + * OpenRouter provider implementation + */ + +import { BaseLLMProvider, LLMResponse } from './base'; + +export class OpenRouterProvider extends BaseLLMProvider { + readonly name = 'OpenRouter'; + readonly defaultModel = 'openai/gpt-4o-mini'; + readonly apiKeyUrl = 'https://openrouter.ai/'; + readonly apiKeyPrefix = undefined; // Not standardized + + async processRequest(prompt: string, model?: string, apiKey?: string): Promise { + if (!apiKey) { + return this.createErrorResponse('OpenRouter API key not configured'); + } + + try { + const body = this.createStandardRequest(prompt, model || this.defaultModel); + const headers: Record = { + 'Authorization': `Bearer ${apiKey}`, + 'Content-Type': 'application/json' + }; + + // Add optional branding headers if environment variables are present + if (process.env.CONTEXT_OPT_APP_URL) { + headers['HTTP-Referer'] = process.env.CONTEXT_OPT_APP_URL; + } + if (process.env.CONTEXT_OPT_APP_NAME) { + headers['X-Title'] = process.env.CONTEXT_OPT_APP_NAME; + } + + const response = await fetch('https://openrouter.ai/api/v1/chat/completions', { + method: 'POST', + headers, + body: JSON.stringify(body) + }); + + if (!response.ok) { + let errorMsg = `HTTP ${response.status}`; + try { + const errorJson: any = await response.json(); + errorMsg = errorJson?.error?.message || errorMsg; + } catch { + // Ignore JSON parsing errors, use HTTP status + } + return this.createErrorResponse(`OpenRouter request failed: ${errorMsg}`); + } + + const json: any = await response.json(); + const content = json?.choices?.[0]?.message?.content; + if (!content) { + return this.createErrorResponse('No response from OpenRouter'); + } + + return this.createSuccessResponse(content); + } catch (error: unknown) { + const errorMessage = error instanceof Error ? error.message : 'Unknown error'; + return this.createErrorResponse(`OpenRouter processing failed: ${errorMessage}`); + } + } +} \ No newline at end of file diff --git a/test/openrouter.test.ts b/test/openrouter.test.ts new file mode 100644 index 0000000..49072ab --- /dev/null +++ b/test/openrouter.test.ts @@ -0,0 +1,224 @@ +/** + * Tests for OpenRouter provider + */ + +import { OpenRouterProvider } from '../src/providers/openrouter'; + +// Mock global fetch +const mockFetch = jest.fn(); +(global as any).fetch = mockFetch; + +describe('OpenRouterProvider', () => { + let provider: OpenRouterProvider; + + beforeEach(() => { + provider = new OpenRouterProvider(); + jest.clearAllMocks(); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + describe('Provider properties', () => { + it('should have correct provider properties', () => { + expect(provider.name).toBe('OpenRouter'); + expect(provider.defaultModel).toBe('openai/gpt-4o-mini'); + expect(provider.apiKeyUrl).toBe('https://openrouter.ai/'); + expect(provider.apiKeyPrefix).toBeUndefined(); + }); + }); + + describe('processRequest', () => { + it('should return error when API key is not provided', async () => { + const result = await provider.processRequest('test prompt'); + + expect(result.success).toBe(false); + expect(result.error).toBe('OpenRouter API key not configured'); + expect(result.content).toBe(''); + }); + + it('should make successful request and return content', async () => { + const mockResponse = { + ok: true, + json: jest.fn().mockResolvedValue({ + choices: [ + { + message: { + content: 'Test response from OpenRouter' + } + } + ] + }) + }; + mockFetch.mockResolvedValue(mockResponse); + + const result = await provider.processRequest('test prompt', 'test-model', 'test-api-key'); + + expect(result.success).toBe(true); + expect(result.content).toBe('Test response from OpenRouter'); + expect(result.error).toBeUndefined(); + + expect(mockFetch).toHaveBeenCalledWith( + 'https://openrouter.ai/api/v1/chat/completions', + { + method: 'POST', + headers: { + 'Authorization': 'Bearer test-api-key', + 'Content-Type': 'application/json' + }, + body: JSON.stringify({ + model: 'test-model', + temperature: 0.1, + max_tokens: 4000, + messages: [{ role: 'user', content: 'test prompt' }] + }) + } + ); + }); + + it('should use default model when model is not specified', async () => { + const mockResponse = { + ok: true, + json: jest.fn().mockResolvedValue({ + choices: [ + { + message: { + content: 'Response with default model' + } + } + ] + }) + }; + mockFetch.mockResolvedValue(mockResponse); + + const result = await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(result.success).toBe(true); + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + body: expect.stringContaining('"model":"openai/gpt-4o-mini"') + }) + ); + }); + + it('should include optional branding headers when environment variables are set', async () => { + // Set environment variables + process.env.CONTEXT_OPT_APP_URL = 'https://example.com'; + process.env.CONTEXT_OPT_APP_NAME = 'Test App'; + + const mockResponse = { + ok: true, + json: jest.fn().mockResolvedValue({ + choices: [{ message: { content: 'Test response' } }] + }) + }; + mockFetch.mockResolvedValue(mockResponse); + + await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(mockFetch).toHaveBeenCalledWith( + expect.any(String), + expect.objectContaining({ + headers: expect.objectContaining({ + 'Authorization': 'Bearer test-api-key', + 'Content-Type': 'application/json', + 'HTTP-Referer': 'https://example.com', + 'X-Title': 'Test App' + }) + }) + ); + + // Clean up environment variables + delete process.env.CONTEXT_OPT_APP_URL; + delete process.env.CONTEXT_OPT_APP_NAME; + }); + + it('should handle HTTP error responses', async () => { + const mockResponse = { + ok: false, + status: 400, + json: jest.fn().mockResolvedValue({ + error: { + message: 'Bad Request - Invalid model' + } + }) + }; + mockFetch.mockResolvedValue(mockResponse); + + const result = await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(result.success).toBe(false); + expect(result.error).toBe('OpenRouter request failed: Bad Request - Invalid model'); + expect(result.content).toBe(''); + }); + + it('should handle HTTP error without error JSON', async () => { + const mockResponse = { + ok: false, + status: 500, + json: jest.fn().mockRejectedValue(new Error('Invalid JSON')) + }; + mockFetch.mockResolvedValue(mockResponse); + + const result = await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(result.success).toBe(false); + expect(result.error).toBe('OpenRouter request failed: HTTP 500'); + expect(result.content).toBe(''); + }); + + it('should handle malformed response (no choices)', async () => { + const mockResponse = { + ok: true, + json: jest.fn().mockResolvedValue({ + // Missing choices array + }) + }; + mockFetch.mockResolvedValue(mockResponse); + + const result = await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(result.success).toBe(false); + expect(result.error).toBe('No response from OpenRouter'); + expect(result.content).toBe(''); + }); + + it('should handle malformed response (empty choices)', async () => { + const mockResponse = { + ok: true, + json: jest.fn().mockResolvedValue({ + choices: [] + }) + }; + mockFetch.mockResolvedValue(mockResponse); + + const result = await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(result.success).toBe(false); + expect(result.error).toBe('No response from OpenRouter'); + expect(result.content).toBe(''); + }); + + it('should handle fetch network errors', async () => { + mockFetch.mockRejectedValue(new Error('Network error')); + + const result = await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(result.success).toBe(false); + expect(result.error).toBe('OpenRouter processing failed: Network error'); + expect(result.content).toBe(''); + }); + + it('should handle unknown errors', async () => { + mockFetch.mockRejectedValue('Unknown error type'); + + const result = await provider.processRequest('test prompt', undefined, 'test-api-key'); + + expect(result.success).toBe(false); + expect(result.error).toBe('OpenRouter processing failed: Unknown error'); + expect(result.content).toBe(''); + }); + }); +}); \ No newline at end of file From 43a237f2a97e91f270f928e8dbbfab3c523a9d46 Mon Sep 17 00:00:00 2001 From: Malak Sedarous Date: Mon, 11 Aug 2025 20:52:03 +0200 Subject: [PATCH 4/4] refactor: extract LLM provider list to constant to eliminate duplication - Added VALID_LLM_PROVIDERS constant to avoid code duplication - Updated getLLMProvider() and validateConfiguration() to use the constant - Error messages now dynamically generate from the constant - Maintains same functionality while improving maintainability --- src/config/manager.ts | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/src/config/manager.ts b/src/config/manager.ts index b34ecd1..a20bbeb 100644 --- a/src/config/manager.ts +++ b/src/config/manager.ts @@ -10,6 +10,7 @@ import { Logger } from '../utils/logger'; export class ConfigurationManager { private static config: MCPServerConfig | null = null; + private static readonly VALID_LLM_PROVIDERS = ['gemini', 'claude', 'openai', 'openrouter'] as const; static async loadConfiguration(): Promise { Logger.debug('Loading configuration from environment variables...'); @@ -55,11 +56,11 @@ export class ConfigurationManager { const provider = process.env.CONTEXT_OPT_LLM_PROVIDER?.toLowerCase(); if (!provider) { - throw new Error('CONTEXT_OPT_LLM_PROVIDER environment variable is required. Set to "gemini", "claude", "openai", or "openrouter"'); + throw new Error(`CONTEXT_OPT_LLM_PROVIDER environment variable is required. Set to ${this.VALID_LLM_PROVIDERS.map(p => `"${p}"`).join(', ')}`); } - if (!['gemini', 'claude', 'openai', 'openrouter'].includes(provider)) { - throw new Error(`Invalid CONTEXT_OPT_LLM_PROVIDER: ${provider}. Must be "gemini", "claude", "openai", or "openrouter"`); + if (!this.VALID_LLM_PROVIDERS.includes(provider as any)) { + throw new Error(`Invalid CONTEXT_OPT_LLM_PROVIDER: ${provider}. Must be ${this.VALID_LLM_PROVIDERS.map(p => `"${p}"`).join(', ')}`); } return provider as 'gemini' | 'claude' | 'openai' | 'openrouter'; @@ -109,7 +110,7 @@ export class ConfigurationManager { throw new Error('Configuration error: llm.provider is required'); } - const validProviders = ['gemini', 'claude', 'openai', 'openrouter']; + const validProviders = [...this.VALID_LLM_PROVIDERS]; if (!validProviders.includes(config.llm.provider)) { throw new Error(`Configuration error: llm.provider must be one of: ${validProviders.join(', ')}`); }