Matt Rubens 11 месяцев назад
Родитель
Сommit
5fbfe9b775

+ 3 - 2
.gitignore

@@ -1,7 +1,7 @@
 out
 dist
 node_modules
-.vscode-test/
+coverage/
 
 .DS_Store
 
@@ -13,4 +13,5 @@ roo-cline-*.vsix
 /local-prompts
 
 # Test environment
-.test_env
+.test_env
+.vscode-test/

+ 168 - 0
src/api/providers/__tests__/anthropic.test.ts

@@ -0,0 +1,168 @@
+import { AnthropicHandler } from '../anthropic';
+import { ApiHandlerOptions } from '../../../shared/api';
+import { ApiStream } from '../../transform/stream';
+import { Anthropic } from '@anthropic-ai/sdk';
+
+// Mock Anthropic client
+const mockBetaCreate = jest.fn();
+const mockCreate = jest.fn();
+jest.mock('@anthropic-ai/sdk', () => {
+    return {
+        Anthropic: jest.fn().mockImplementation(() => ({
+            beta: {
+                promptCaching: {
+                    messages: {
+                        create: mockBetaCreate.mockImplementation(async () => ({
+                            async *[Symbol.asyncIterator]() {
+                                yield {
+                                    type: 'message_start',
+                                    message: {
+                                        usage: {
+                                            input_tokens: 100,
+                                            output_tokens: 50,
+                                            cache_creation_input_tokens: 20,
+                                            cache_read_input_tokens: 10
+                                        }
+                                    }
+                                };
+                                yield {
+                                    type: 'content_block_start',
+                                    index: 0,
+                                    content_block: {
+                                        type: 'text',
+                                        text: 'Hello'
+                                    }
+                                };
+                                yield {
+                                    type: 'content_block_delta',
+                                    delta: {
+                                        type: 'text_delta',
+                                        text: ' world'
+                                    }
+                                };
+                            }
+                        }))
+                    }
+                }
+            },
+            messages: {
+                create: mockCreate
+            }
+        }))
+    };
+});
+
+describe('AnthropicHandler', () => {
+    let handler: AnthropicHandler;
+    let mockOptions: ApiHandlerOptions;
+
+    beforeEach(() => {
+        mockOptions = {
+            apiKey: 'test-api-key',
+            apiModelId: 'claude-3-5-sonnet-20241022'
+        };
+        handler = new AnthropicHandler(mockOptions);
+        mockBetaCreate.mockClear();
+        mockCreate.mockClear();
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided options', () => {
+            expect(handler).toBeInstanceOf(AnthropicHandler);
+            expect(handler.getModel().id).toBe(mockOptions.apiModelId);
+        });
+
+        it('should initialize with undefined API key', () => {
+            // The SDK will handle API key validation, so we just verify it initializes
+            const handlerWithoutKey = new AnthropicHandler({
+                ...mockOptions,
+                apiKey: undefined
+            });
+            expect(handlerWithoutKey).toBeInstanceOf(AnthropicHandler);
+        });
+
+        it('should use custom base URL if provided', () => {
+            const customBaseUrl = 'https://custom.anthropic.com';
+            const handlerWithCustomUrl = new AnthropicHandler({
+                ...mockOptions,
+                anthropicBaseUrl: customBaseUrl
+            });
+            expect(handlerWithCustomUrl).toBeInstanceOf(AnthropicHandler);
+        });
+    });
+
+    describe('createMessage', () => {
+        const systemPrompt = 'You are a helpful assistant.';
+        const messages: Anthropic.Messages.MessageParam[] = [
+            {
+                role: 'user',
+                content: [{ 
+                    type: 'text' as const,
+                    text: 'Hello!'
+                }]
+            }
+        ];
+
+        it('should handle prompt caching for supported models', async () => {
+            const stream = handler.createMessage(systemPrompt, [
+                {
+                    role: 'user',
+                    content: [{ type: 'text' as const, text: 'First message' }]
+                },
+                {
+                    role: 'assistant',
+                    content: [{ type: 'text' as const, text: 'Response' }]
+                },
+                {
+                    role: 'user',
+                    content: [{ type: 'text' as const, text: 'Second message' }]
+                }
+            ]);
+
+            const chunks: any[] = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+
+            // Verify usage information
+            const usageChunk = chunks.find(chunk => chunk.type === 'usage');
+            expect(usageChunk).toBeDefined();
+            expect(usageChunk?.inputTokens).toBe(100);
+            expect(usageChunk?.outputTokens).toBe(50);
+            expect(usageChunk?.cacheWriteTokens).toBe(20);
+            expect(usageChunk?.cacheReadTokens).toBe(10);
+
+            // Verify text content
+            const textChunks = chunks.filter(chunk => chunk.type === 'text');
+            expect(textChunks).toHaveLength(2);
+            expect(textChunks[0].text).toBe('Hello');
+            expect(textChunks[1].text).toBe(' world');
+
+            // Verify beta API was used
+            expect(mockBetaCreate).toHaveBeenCalled();
+            expect(mockCreate).not.toHaveBeenCalled();
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return default model if no model ID is provided', () => {
+            const handlerWithoutModel = new AnthropicHandler({
+                ...mockOptions,
+                apiModelId: undefined
+            });
+            const model = handlerWithoutModel.getModel();
+            expect(model.id).toBeDefined();
+            expect(model.info).toBeDefined();
+        });
+
+        it('should return specified model if valid model ID is provided', () => {
+            const model = handler.getModel();
+            expect(model.id).toBe(mockOptions.apiModelId);
+            expect(model.info).toBeDefined();
+            expect(model.info.maxTokens).toBe(8192);
+            expect(model.info.contextWindow).toBe(200_000);
+            expect(model.info.supportsImages).toBe(true);
+            expect(model.info.supportsPromptCache).toBe(true);
+        });
+    });
+});

+ 135 - 182
src/api/providers/__tests__/bedrock.test.ts

@@ -1,191 +1,144 @@
-import { AwsBedrockHandler } from '../bedrock'
-import { ApiHandlerOptions, ModelInfo } from '../../../shared/api'
-import { Anthropic } from '@anthropic-ai/sdk'
-import { StreamEvent } from '../bedrock'
-
-// Simplified mock for BedrockRuntimeClient
-class MockBedrockRuntimeClient {
-    private _region: string
-    private mockStream: StreamEvent[] = []
-
-    constructor(config: { region: string }) {
-        this._region = config.region
-    }
-
-    async send(command: any): Promise<{ stream: AsyncIterableIterator<StreamEvent> }> {
-        return {
-            stream: this.createMockStream()
-        }
-    }
-
-    private createMockStream(): AsyncIterableIterator<StreamEvent> {
-        const self = this;
-        return {
-            async *[Symbol.asyncIterator]() {
-                for (const event of self.mockStream) {
-                    yield event;
-                }
-            },
-            next: async () => {
-                const value = this.mockStream.shift();
-                return value ? { value, done: false } : { value: undefined, done: true };
-            },
-            return: async () => ({ value: undefined, done: true }),
-            throw: async (e) => { throw e; }
-        };
-    }
-
-    setMockStream(stream: StreamEvent[]) {
-        this.mockStream = stream;
-    }
-
-    get config() {
-        return { region: this._region };
-    }
-}
+import { AwsBedrockHandler } from '../bedrock';
+import { MessageContent } from '../../../shared/api';
+import { BedrockRuntimeClient } from '@aws-sdk/client-bedrock-runtime';
+import { Anthropic } from '@anthropic-ai/sdk';
 
 describe('AwsBedrockHandler', () => {
-    const mockOptions: ApiHandlerOptions = {
-        awsRegion: 'us-east-1',
-        awsAccessKey: 'mock-access-key',
-        awsSecretKey: 'mock-secret-key',
-        apiModelId: 'anthropic.claude-v2',
-    }
-
-    // Override the BedrockRuntimeClient creation in the constructor
-    class TestAwsBedrockHandler extends AwsBedrockHandler {
-        constructor(options: ApiHandlerOptions, mockClient?: MockBedrockRuntimeClient) {
-            super(options)
-            if (mockClient) {
-                // Force type casting to bypass strict type checking
-                (this as any)['client'] = mockClient
-            }
-        }
-    }
-
-    test('constructor initializes with correct AWS credentials', () => {
-        const mockClient = new MockBedrockRuntimeClient({
-            region: 'us-east-1'
-        })
-
-        const handler = new TestAwsBedrockHandler(mockOptions, mockClient)
-        
-        // Verify that the client is created with the correct configuration
-        expect(handler['client']).toBeDefined()
-        expect(handler['client'].config.region).toBe('us-east-1')
-    })
-
-    test('getModel returns correct model info', () => {
-        const mockClient = new MockBedrockRuntimeClient({
-            region: 'us-east-1'
-        })
-
-        const handler = new TestAwsBedrockHandler(mockOptions, mockClient)
-        const result = handler.getModel()
-        
-        expect(result).toEqual({
-            id: 'anthropic.claude-v2',
-            info: {
-                maxTokens: 5000,
-                contextWindow: 128_000,
-                supportsPromptCache: false
-            }
-        })
-    })
-
-    test('createMessage handles successful stream events', async () => {
-        const mockClient = new MockBedrockRuntimeClient({
-            region: 'us-east-1'
-        })
-        
-        // Mock stream events
-        const mockStreamEvents: StreamEvent[] = [
-            {
-                metadata: {
-                    usage: {
-                        inputTokens: 50,
-                        outputTokens: 100
-                    }
-                }
-            },
+    let handler: AwsBedrockHandler;
+
+    beforeEach(() => {
+        handler = new AwsBedrockHandler({
+            apiModelId: 'anthropic.claude-3-5-sonnet-20241022-v2:0',
+            awsAccessKey: 'test-access-key',
+            awsSecretKey: 'test-secret-key',
+            awsRegion: 'us-east-1'
+        });
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided config', () => {
+            expect(handler['options'].awsAccessKey).toBe('test-access-key');
+            expect(handler['options'].awsSecretKey).toBe('test-secret-key');
+            expect(handler['options'].awsRegion).toBe('us-east-1');
+            expect(handler['options'].apiModelId).toBe('anthropic.claude-3-5-sonnet-20241022-v2:0');
+        });
+
+        it('should initialize with missing AWS credentials', () => {
+            const handlerWithoutCreds = new AwsBedrockHandler({
+                apiModelId: 'anthropic.claude-3-5-sonnet-20241022-v2:0',
+                awsRegion: 'us-east-1'
+            });
+            expect(handlerWithoutCreds).toBeInstanceOf(AwsBedrockHandler);
+        });
+    });
+
+    describe('createMessage', () => {
+        const mockMessages: Anthropic.Messages.MessageParam[] = [
             {
-                contentBlockStart: {
-                    start: {
-                        text: 'Hello'
-                    }
-                }
+                role: 'user',
+                content: 'Hello'
             },
             {
-                contentBlockDelta: {
-                    delta: {
-                        text: ' world'
-                    }
+                role: 'assistant',
+                content: 'Hi there!'
+            }
+        ];
+
+        const systemPrompt = 'You are a helpful assistant';
+
+        it('should handle text messages correctly', async () => {
+            const mockResponse = {
+                messages: [{
+                    role: 'assistant',
+                    content: [{ type: 'text', text: 'Hello! How can I help you?' }]
+                }],
+                usage: {
+                    input_tokens: 10,
+                    output_tokens: 5
                 }
-            },
-            {
-                messageStop: {
-                    stopReason: 'end_turn'
+            };
+
+            // Mock AWS SDK invoke
+            const mockStream = {
+                [Symbol.asyncIterator]: async function* () {
+                    yield {
+                        metadata: {
+                            usage: {
+                                inputTokens: 10,
+                                outputTokens: 5
+                            }
+                        }
+                    };
                 }
-            }
-        ]
-
-        mockClient.setMockStream(mockStreamEvents)
-
-        const handler = new TestAwsBedrockHandler(mockOptions, mockClient)
-
-        const systemPrompt = 'You are a helpful assistant'
-        const messages: Anthropic.Messages.MessageParam[] = [
-            { role: 'user', content: 'Say hello' }
-        ]
-
-        const generator = handler.createMessage(systemPrompt, messages)
-        const chunks = []
-
-        for await (const chunk of generator) {
-            chunks.push(chunk)
-        }
-
-        // Verify the chunks match expected stream events
-        expect(chunks).toHaveLength(3)
-        expect(chunks[0]).toEqual({
-            type: 'usage',
-            inputTokens: 50,
-            outputTokens: 100
-        })
-        expect(chunks[1]).toEqual({
-            type: 'text',
-            text: 'Hello'
-        })
-        expect(chunks[2]).toEqual({
-            type: 'text',
-            text: ' world'
-        })
-    })
-
-    test('createMessage handles error scenarios', async () => {
-        const mockClient = new MockBedrockRuntimeClient({
-            region: 'us-east-1'
-        })
-
-        // Simulate an error by overriding the send method
-        mockClient.send = () => {
-            throw new Error('API request failed')
-        }
-
-        const handler = new TestAwsBedrockHandler(mockOptions, mockClient)
-
-        const systemPrompt = 'You are a helpful assistant'
-        const messages: Anthropic.Messages.MessageParam[] = [
-            { role: 'user', content: 'Cause an error' }
-        ]
-
-        await expect(async () => {
-            const generator = handler.createMessage(systemPrompt, messages)
-            const chunks = []
+            };
+
+            const mockInvoke = jest.fn().mockResolvedValue({
+                stream: mockStream
+            });
+
+            handler['client'] = {
+                send: mockInvoke
+            } as unknown as BedrockRuntimeClient;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+            const chunks = [];
             
-            for await (const chunk of generator) {
-                chunks.push(chunk)
+            for await (const chunk of stream) {
+                chunks.push(chunk);
             }
-        }).rejects.toThrow('API request failed')
-    })
-})
+
+            expect(chunks.length).toBeGreaterThan(0);
+            expect(chunks[0]).toEqual({
+                type: 'usage',
+                inputTokens: 10,
+                outputTokens: 5
+            });
+
+            expect(mockInvoke).toHaveBeenCalledWith(expect.objectContaining({
+                input: expect.objectContaining({
+                    modelId: 'anthropic.claude-3-5-sonnet-20241022-v2:0'
+                })
+            }));
+        });
+
+        it('should handle API errors', async () => {
+            // Mock AWS SDK invoke with error
+            const mockInvoke = jest.fn().mockRejectedValue(new Error('AWS Bedrock error'));
+
+            handler['client'] = {
+                send: mockInvoke
+            } as unknown as BedrockRuntimeClient;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+
+            await expect(async () => {
+                for await (const chunk of stream) {
+                    // Should throw before yielding any chunks
+                }
+            }).rejects.toThrow('AWS Bedrock error');
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return correct model info in test environment', () => {
+            const modelInfo = handler.getModel();
+            expect(modelInfo.id).toBe('anthropic.claude-3-5-sonnet-20241022-v2:0');
+            expect(modelInfo.info).toBeDefined();
+            expect(modelInfo.info.maxTokens).toBe(5000); // Test environment value
+            expect(modelInfo.info.contextWindow).toBe(128_000); // Test environment value
+        });
+
+        it('should return test model info for invalid model in test environment', () => {
+            const invalidHandler = new AwsBedrockHandler({
+                apiModelId: 'invalid-model',
+                awsAccessKey: 'test-access-key',
+                awsSecretKey: 'test-secret-key',
+                awsRegion: 'us-east-1'
+            });
+            const modelInfo = invalidHandler.getModel();
+            expect(modelInfo.id).toBe('invalid-model'); // In test env, returns whatever is passed
+            expect(modelInfo.info.maxTokens).toBe(5000);
+            expect(modelInfo.info.contextWindow).toBe(128_000);
+        });
+    });
+});

+ 189 - 153
src/api/providers/__tests__/deepseek.test.ts

@@ -1,167 +1,203 @@
-import { DeepSeekHandler } from '../deepseek'
-import { ApiHandlerOptions } from '../../../shared/api'
-import OpenAI from 'openai'
-import { Anthropic } from '@anthropic-ai/sdk'
-
-// Mock dependencies
-jest.mock('openai')
-
-describe('DeepSeekHandler', () => {
-
-    const mockOptions: ApiHandlerOptions = {
-        deepSeekApiKey: 'test-key',
-        deepSeekModelId: 'deepseek-chat',
-    }
-
-    beforeEach(() => {
-        jest.clearAllMocks()
-    })
-
-    test('constructor initializes with correct options', () => {
-        const handler = new DeepSeekHandler(mockOptions)
-        expect(handler).toBeInstanceOf(DeepSeekHandler)
-        expect(OpenAI).toHaveBeenCalledWith({
-            baseURL: 'https://api.deepseek.com/v1',
-            apiKey: mockOptions.deepSeekApiKey,
-        })
-    })
-
-    test('getModel returns correct model info', () => {
-        const handler = new DeepSeekHandler(mockOptions)
-        const result = handler.getModel()
-        
-        expect(result).toEqual({
-            id: mockOptions.deepSeekModelId,
-            info: expect.objectContaining({
-                maxTokens: 8192,
-                contextWindow: 64000,
-                supportsPromptCache: false,
-                supportsImages: false,
-                inputPrice: 0.014,
-                outputPrice: 0.28,
-            })
-        })
-    })
-
-    test('getModel returns default model info when no model specified', () => {
-        const handler = new DeepSeekHandler({ deepSeekApiKey: 'test-key' })
-        const result = handler.getModel()
-        
-        expect(result.id).toBe('deepseek-chat')
-        expect(result.info.maxTokens).toBe(8192)
-    })
-
-    test('createMessage handles string content correctly', async () => {
-        const handler = new DeepSeekHandler(mockOptions)
-        const mockStream = {
-            async *[Symbol.asyncIterator]() {
-                yield {
-                    choices: [{
-                        delta: {
-                            content: 'test response'
+import { DeepSeekHandler } from '../deepseek';
+import { ApiHandlerOptions, deepSeekDefaultModelId } from '../../../shared/api';
+import OpenAI from 'openai';
+import { Anthropic } from '@anthropic-ai/sdk';
+
+// Mock OpenAI client
+const mockCreate = jest.fn();
+jest.mock('openai', () => {
+    return {
+        __esModule: true,
+        default: jest.fn().mockImplementation(() => ({
+            chat: {
+                completions: {
+                    create: mockCreate.mockImplementation(async (options) => {
+                        if (!options.stream) {
+                            return {
+                                id: 'test-completion',
+                                choices: [{
+                                    message: { role: 'assistant', content: 'Test response', refusal: null },
+                                    finish_reason: 'stop',
+                                    index: 0
+                                }],
+                                usage: {
+                                    prompt_tokens: 10,
+                                    completion_tokens: 5,
+                                    total_tokens: 15
+                                }
+                            };
                         }
-                    }]
+                        
+                        // Return async iterator for streaming
+                        return {
+                            [Symbol.asyncIterator]: async function* () {
+                                yield {
+                                    choices: [{
+                                        delta: { content: 'Test response' },
+                                        index: 0
+                                    }],
+                                    usage: null
+                                };
+                                yield {
+                                    choices: [{
+                                        delta: {},
+                                        index: 0
+                                    }],
+                                    usage: {
+                                        prompt_tokens: 10,
+                                        completion_tokens: 5,
+                                        total_tokens: 15
+                                    }
+                                };
+                            }
+                        };
+                    })
                 }
             }
-        }
-
-        const mockCreate = jest.fn().mockResolvedValue(mockStream)
-        ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
-            completions: { create: mockCreate }
-        } as any
-
-        const systemPrompt = 'test system prompt'
-        const messages: Anthropic.Messages.MessageParam[] = [
-            { role: 'user', content: 'test message' }
-        ]
-
-        const generator = handler.createMessage(systemPrompt, messages)
-        const chunks = []
-        
-        for await (const chunk of generator) {
-            chunks.push(chunk)
-        }
-
-        expect(chunks).toHaveLength(1)
-        expect(chunks[0]).toEqual({
-            type: 'text',
-            text: 'test response'
-        })
-
-        expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({
-            model: mockOptions.deepSeekModelId,
-            messages: [
-                { role: 'system', content: systemPrompt },
-                { role: 'user', content: 'test message' }
-            ],
-            temperature: 0,
-            stream: true,
-            max_tokens: 8192,
-            stream_options: { include_usage: true }
         }))
-    })
-
-    test('createMessage handles complex content correctly', async () => {
-        const handler = new DeepSeekHandler(mockOptions)
-        const mockStream = {
-            async *[Symbol.asyncIterator]() {
-                yield {
-                    choices: [{
-                        delta: {
-                            content: 'test response'
-                        }
-                    }]
-                }
-            }
-        }
+    };
+});
 
-        const mockCreate = jest.fn().mockResolvedValue(mockStream)
-        ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
-            completions: { create: mockCreate }
-        } as any
+describe('DeepSeekHandler', () => {
+    let handler: DeepSeekHandler;
+    let mockOptions: ApiHandlerOptions;
 
-        const systemPrompt = 'test system prompt'
+    beforeEach(() => {
+        mockOptions = {
+            deepSeekApiKey: 'test-api-key',
+            deepSeekModelId: 'deepseek-chat',
+            deepSeekBaseUrl: 'https://api.deepseek.com/v1'
+        };
+        handler = new DeepSeekHandler(mockOptions);
+        mockCreate.mockClear();
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided options', () => {
+            expect(handler).toBeInstanceOf(DeepSeekHandler);
+            expect(handler.getModel().id).toBe(mockOptions.deepSeekModelId);
+        });
+
+        it('should throw error if API key is missing', () => {
+            expect(() => {
+                new DeepSeekHandler({
+                    ...mockOptions,
+                    deepSeekApiKey: undefined
+                });
+            }).toThrow('DeepSeek API key is required');
+        });
+
+        it('should use default model ID if not provided', () => {
+            const handlerWithoutModel = new DeepSeekHandler({
+                ...mockOptions,
+                deepSeekModelId: undefined
+            });
+            expect(handlerWithoutModel.getModel().id).toBe(deepSeekDefaultModelId);
+        });
+
+        it('should use default base URL if not provided', () => {
+            const handlerWithoutBaseUrl = new DeepSeekHandler({
+                ...mockOptions,
+                deepSeekBaseUrl: undefined
+            });
+            expect(handlerWithoutBaseUrl).toBeInstanceOf(DeepSeekHandler);
+            // The base URL is passed to OpenAI client internally
+            expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({
+                baseURL: 'https://api.deepseek.com/v1'
+            }));
+        });
+
+        it('should use custom base URL if provided', () => {
+            const customBaseUrl = 'https://custom.deepseek.com/v1';
+            const handlerWithCustomUrl = new DeepSeekHandler({
+                ...mockOptions,
+                deepSeekBaseUrl: customBaseUrl
+            });
+            expect(handlerWithCustomUrl).toBeInstanceOf(DeepSeekHandler);
+            // The custom base URL is passed to OpenAI client
+            expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({
+                baseURL: customBaseUrl
+            }));
+        });
+
+        it('should set includeMaxTokens to true', () => {
+            // Create a new handler and verify OpenAI client was called with includeMaxTokens
+            new DeepSeekHandler(mockOptions);
+            expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({
+                apiKey: mockOptions.deepSeekApiKey
+            }));
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return model info for valid model ID', () => {
+            const model = handler.getModel();
+            expect(model.id).toBe(mockOptions.deepSeekModelId);
+            expect(model.info).toBeDefined();
+            expect(model.info.maxTokens).toBe(8192);
+            expect(model.info.contextWindow).toBe(64_000);
+            expect(model.info.supportsImages).toBe(false);
+            expect(model.info.supportsPromptCache).toBe(false);
+        });
+
+        it('should return provided model ID with default model info if model does not exist', () => {
+            const handlerWithInvalidModel = new DeepSeekHandler({
+                ...mockOptions,
+                deepSeekModelId: 'invalid-model'
+            });
+            const model = handlerWithInvalidModel.getModel();
+            expect(model.id).toBe('invalid-model'); // Returns provided ID
+            expect(model.info).toBeDefined();
+            expect(model.info).toBe(handler.getModel().info); // But uses default model info
+        });
+
+        it('should return default model if no model ID is provided', () => {
+            const handlerWithoutModel = new DeepSeekHandler({
+                ...mockOptions,
+                deepSeekModelId: undefined
+            });
+            const model = handlerWithoutModel.getModel();
+            expect(model.id).toBe(deepSeekDefaultModelId);
+            expect(model.info).toBeDefined();
+        });
+    });
+
+    describe('createMessage', () => {
+        const systemPrompt = 'You are a helpful assistant.';
         const messages: Anthropic.Messages.MessageParam[] = [
             {
                 role: 'user',
-                content: [
-                    { type: 'text', text: 'part 1' },
-                    { type: 'text', text: 'part 2' }
-                ]
+                content: [{ 
+                    type: 'text' as const,
+                    text: 'Hello!'
+                }]
             }
-        ]
-
-        const generator = handler.createMessage(systemPrompt, messages)
-        await generator.next()
-
-        expect(mockCreate).toHaveBeenCalledWith(expect.objectContaining({
-            messages: [
-                { role: 'system', content: systemPrompt },
-                {
-                    role: 'user',
-                    content: [
-                        { type: 'text', text: 'part 1' },
-                        { type: 'text', text: 'part 2' }
-                    ]
-                }
-            ]
-        }))
-    })
+        ];
 
-    test('createMessage handles API errors', async () => {
-        const handler = new DeepSeekHandler(mockOptions)
-        const mockStream = {
-            async *[Symbol.asyncIterator]() {
-                throw new Error('API Error')
+        it('should handle streaming responses', async () => {
+            const stream = handler.createMessage(systemPrompt, messages);
+            const chunks: any[] = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
             }
-        }
 
-        const mockCreate = jest.fn().mockResolvedValue(mockStream)
-        ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
-            completions: { create: mockCreate }
-        } as any
+            expect(chunks.length).toBeGreaterThan(0);
+            const textChunks = chunks.filter(chunk => chunk.type === 'text');
+            expect(textChunks).toHaveLength(1);
+            expect(textChunks[0].text).toBe('Test response');
+        });
+
+        it('should include usage information', async () => {
+            const stream = handler.createMessage(systemPrompt, messages);
+            const chunks: any[] = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
 
-        const generator = handler.createMessage('test', [])
-        await expect(generator.next()).rejects.toThrow('API Error')
-    })
-})
+            const usageChunks = chunks.filter(chunk => chunk.type === 'usage');
+            expect(usageChunks.length).toBeGreaterThan(0);
+            expect(usageChunks[0].inputTokens).toBe(10);
+            expect(usageChunks[0].outputTokens).toBe(5);
+        });
+    });
+});

+ 154 - 0
src/api/providers/__tests__/gemini.test.ts

@@ -0,0 +1,154 @@
+import { GeminiHandler } from '../gemini';
+import { Anthropic } from '@anthropic-ai/sdk';
+import { GoogleGenerativeAI } from '@google/generative-ai';
+
+// Mock the Google Generative AI SDK
+jest.mock('@google/generative-ai', () => ({
+    GoogleGenerativeAI: jest.fn().mockImplementation(() => ({
+        getGenerativeModel: jest.fn().mockReturnValue({
+            generateContentStream: jest.fn()
+        })
+    }))
+}));
+
+describe('GeminiHandler', () => {
+    let handler: GeminiHandler;
+
+    beforeEach(() => {
+        handler = new GeminiHandler({
+            apiKey: 'test-key',
+            apiModelId: 'gemini-2.0-flash-thinking-exp-1219',
+            geminiApiKey: 'test-key'
+        });
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided config', () => {
+            expect(handler['options'].geminiApiKey).toBe('test-key');
+            expect(handler['options'].apiModelId).toBe('gemini-2.0-flash-thinking-exp-1219');
+        });
+
+        it('should throw if API key is missing', () => {
+            expect(() => {
+                new GeminiHandler({
+                    apiModelId: 'gemini-2.0-flash-thinking-exp-1219',
+                    geminiApiKey: ''
+                });
+            }).toThrow('API key is required for Google Gemini');
+        });
+    });
+
+    describe('createMessage', () => {
+        const mockMessages: Anthropic.Messages.MessageParam[] = [
+            {
+                role: 'user',
+                content: 'Hello'
+            },
+            {
+                role: 'assistant',
+                content: 'Hi there!'
+            }
+        ];
+
+        const systemPrompt = 'You are a helpful assistant';
+
+        it('should handle text messages correctly', async () => {
+            // Mock the stream response
+            const mockStream = {
+                stream: [
+                    { text: () => 'Hello' },
+                    { text: () => ' world!' }
+                ],
+                response: {
+                    usageMetadata: {
+                        promptTokenCount: 10,
+                        candidatesTokenCount: 5
+                    }
+                }
+            };
+
+            // Setup the mock implementation
+            const mockGenerateContentStream = jest.fn().mockResolvedValue(mockStream);
+            const mockGetGenerativeModel = jest.fn().mockReturnValue({
+                generateContentStream: mockGenerateContentStream
+            });
+
+            (handler['client'] as any).getGenerativeModel = mockGetGenerativeModel;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+            const chunks = [];
+            
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+
+            // Should have 3 chunks: 'Hello', ' world!', and usage info
+            expect(chunks.length).toBe(3);
+            expect(chunks[0]).toEqual({
+                type: 'text',
+                text: 'Hello'
+            });
+            expect(chunks[1]).toEqual({
+                type: 'text',
+                text: ' world!'
+            });
+            expect(chunks[2]).toEqual({
+                type: 'usage',
+                inputTokens: 10,
+                outputTokens: 5
+            });
+
+            // Verify the model configuration
+            expect(mockGetGenerativeModel).toHaveBeenCalledWith({
+                model: 'gemini-2.0-flash-thinking-exp-1219',
+                systemInstruction: systemPrompt
+            });
+
+            // Verify generation config
+            expect(mockGenerateContentStream).toHaveBeenCalledWith(
+                expect.objectContaining({
+                    generationConfig: {
+                        temperature: 0
+                    }
+                })
+            );
+        });
+
+        it('should handle API errors', async () => {
+            const mockError = new Error('Gemini API error');
+            const mockGenerateContentStream = jest.fn().mockRejectedValue(mockError);
+            const mockGetGenerativeModel = jest.fn().mockReturnValue({
+                generateContentStream: mockGenerateContentStream
+            });
+
+            (handler['client'] as any).getGenerativeModel = mockGetGenerativeModel;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+
+            await expect(async () => {
+                for await (const chunk of stream) {
+                    // Should throw before yielding any chunks
+                }
+            }).rejects.toThrow('Gemini API error');
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return correct model info', () => {
+            const modelInfo = handler.getModel();
+            expect(modelInfo.id).toBe('gemini-2.0-flash-thinking-exp-1219');
+            expect(modelInfo.info).toBeDefined();
+            expect(modelInfo.info.maxTokens).toBe(8192);
+            expect(modelInfo.info.contextWindow).toBe(32_767);
+        });
+
+        it('should return default model if invalid model specified', () => {
+            const invalidHandler = new GeminiHandler({
+                apiModelId: 'invalid-model',
+                geminiApiKey: 'test-key'
+            });
+            const modelInfo = invalidHandler.getModel();
+            expect(modelInfo.id).toBe('gemini-2.0-flash-thinking-exp-1219'); // Default model
+        });
+    });
+});

+ 148 - 0
src/api/providers/__tests__/lmstudio.test.ts

@@ -0,0 +1,148 @@
+import { LmStudioHandler } from '../lmstudio';
+import { Anthropic } from '@anthropic-ai/sdk';
+import OpenAI from 'openai';
+
+// Mock OpenAI SDK
+jest.mock('openai', () => ({
+    __esModule: true,
+    default: jest.fn().mockImplementation(() => ({
+        chat: {
+            completions: {
+                create: jest.fn()
+            }
+        }
+    }))
+}));
+
+describe('LmStudioHandler', () => {
+    let handler: LmStudioHandler;
+
+    beforeEach(() => {
+        handler = new LmStudioHandler({
+            lmStudioModelId: 'mistral-7b',
+            lmStudioBaseUrl: 'http://localhost:1234'
+        });
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided config', () => {
+            expect(OpenAI).toHaveBeenCalledWith({
+                baseURL: 'http://localhost:1234/v1',
+                apiKey: 'noop'
+            });
+        });
+
+        it('should use default base URL if not provided', () => {
+            const defaultHandler = new LmStudioHandler({
+                lmStudioModelId: 'mistral-7b'
+            });
+
+            expect(OpenAI).toHaveBeenCalledWith({
+                baseURL: 'http://localhost:1234/v1',
+                apiKey: 'noop'
+            });
+        });
+    });
+
+    describe('createMessage', () => {
+        const mockMessages: Anthropic.Messages.MessageParam[] = [
+            {
+                role: 'user',
+                content: 'Hello'
+            },
+            {
+                role: 'assistant',
+                content: 'Hi there!'
+            }
+        ];
+
+        const systemPrompt = 'You are a helpful assistant';
+
+        it('should handle streaming responses correctly', async () => {
+            const mockStream = [
+                {
+                    choices: [{
+                        delta: { content: 'Hello' }
+                    }]
+                },
+                {
+                    choices: [{
+                        delta: { content: ' world!' }
+                    }]
+                }
+            ];
+
+            // Setup async iterator for mock stream
+            const asyncIterator = {
+                async *[Symbol.asyncIterator]() {
+                    for (const chunk of mockStream) {
+                        yield chunk;
+                    }
+                }
+            };
+
+            const mockCreate = jest.fn().mockResolvedValue(asyncIterator);
+            (handler['client'].chat.completions as any).create = mockCreate;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+            const chunks = [];
+            
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+
+            expect(chunks.length).toBe(2);
+            expect(chunks[0]).toEqual({
+                type: 'text',
+                text: 'Hello'
+            });
+            expect(chunks[1]).toEqual({
+                type: 'text',
+                text: ' world!'
+            });
+
+            expect(mockCreate).toHaveBeenCalledWith({
+                model: 'mistral-7b',
+                messages: expect.arrayContaining([
+                    {
+                        role: 'system',
+                        content: systemPrompt
+                    }
+                ]),
+                temperature: 0,
+                stream: true
+            });
+        });
+
+        it('should handle API errors with custom message', async () => {
+            const mockError = new Error('LM Studio API error');
+            const mockCreate = jest.fn().mockRejectedValue(mockError);
+            (handler['client'].chat.completions as any).create = mockCreate;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+
+            await expect(async () => {
+                for await (const chunk of stream) {
+                    // Should throw before yielding any chunks
+                }
+            }).rejects.toThrow('Please check the LM Studio developer logs to debug what went wrong');
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return model info with sane defaults', () => {
+            const modelInfo = handler.getModel();
+            expect(modelInfo.id).toBe('mistral-7b');
+            expect(modelInfo.info).toBeDefined();
+            expect(modelInfo.info.maxTokens).toBe(-1);
+            expect(modelInfo.info.contextWindow).toBe(128_000);
+        });
+
+        it('should return empty string as model ID if not provided', () => {
+            const noModelHandler = new LmStudioHandler({});
+            const modelInfo = noModelHandler.getModel();
+            expect(modelInfo.id).toBe('');
+            expect(modelInfo.info).toBeDefined();
+        });
+    });
+});

+ 148 - 0
src/api/providers/__tests__/ollama.test.ts

@@ -0,0 +1,148 @@
+import { OllamaHandler } from '../ollama';
+import { Anthropic } from '@anthropic-ai/sdk';
+import OpenAI from 'openai';
+
+// Mock OpenAI SDK
+jest.mock('openai', () => ({
+    __esModule: true,
+    default: jest.fn().mockImplementation(() => ({
+        chat: {
+            completions: {
+                create: jest.fn()
+            }
+        }
+    }))
+}));
+
+describe('OllamaHandler', () => {
+    let handler: OllamaHandler;
+
+    beforeEach(() => {
+        handler = new OllamaHandler({
+            ollamaModelId: 'llama2',
+            ollamaBaseUrl: 'http://localhost:11434'
+        });
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided config', () => {
+            expect(OpenAI).toHaveBeenCalledWith({
+                baseURL: 'http://localhost:11434/v1',
+                apiKey: 'ollama'
+            });
+        });
+
+        it('should use default base URL if not provided', () => {
+            const defaultHandler = new OllamaHandler({
+                ollamaModelId: 'llama2'
+            });
+
+            expect(OpenAI).toHaveBeenCalledWith({
+                baseURL: 'http://localhost:11434/v1',
+                apiKey: 'ollama'
+            });
+        });
+    });
+
+    describe('createMessage', () => {
+        const mockMessages: Anthropic.Messages.MessageParam[] = [
+            {
+                role: 'user',
+                content: 'Hello'
+            },
+            {
+                role: 'assistant',
+                content: 'Hi there!'
+            }
+        ];
+
+        const systemPrompt = 'You are a helpful assistant';
+
+        it('should handle streaming responses correctly', async () => {
+            const mockStream = [
+                {
+                    choices: [{
+                        delta: { content: 'Hello' }
+                    }]
+                },
+                {
+                    choices: [{
+                        delta: { content: ' world!' }
+                    }]
+                }
+            ];
+
+            // Setup async iterator for mock stream
+            const asyncIterator = {
+                async *[Symbol.asyncIterator]() {
+                    for (const chunk of mockStream) {
+                        yield chunk;
+                    }
+                }
+            };
+
+            const mockCreate = jest.fn().mockResolvedValue(asyncIterator);
+            (handler['client'].chat.completions as any).create = mockCreate;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+            const chunks = [];
+            
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+
+            expect(chunks.length).toBe(2);
+            expect(chunks[0]).toEqual({
+                type: 'text',
+                text: 'Hello'
+            });
+            expect(chunks[1]).toEqual({
+                type: 'text',
+                text: ' world!'
+            });
+
+            expect(mockCreate).toHaveBeenCalledWith({
+                model: 'llama2',
+                messages: expect.arrayContaining([
+                    {
+                        role: 'system',
+                        content: systemPrompt
+                    }
+                ]),
+                temperature: 0,
+                stream: true
+            });
+        });
+
+        it('should handle API errors', async () => {
+            const mockError = new Error('Ollama API error');
+            const mockCreate = jest.fn().mockRejectedValue(mockError);
+            (handler['client'].chat.completions as any).create = mockCreate;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+
+            await expect(async () => {
+                for await (const chunk of stream) {
+                    // Should throw before yielding any chunks
+                }
+            }).rejects.toThrow('Ollama API error');
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return model info with sane defaults', () => {
+            const modelInfo = handler.getModel();
+            expect(modelInfo.id).toBe('llama2');
+            expect(modelInfo.info).toBeDefined();
+            expect(modelInfo.info.maxTokens).toBe(-1);
+            expect(modelInfo.info.contextWindow).toBe(128_000);
+        });
+
+        it('should return empty string as model ID if not provided', () => {
+            const noModelHandler = new OllamaHandler({});
+            const modelInfo = noModelHandler.getModel();
+            expect(modelInfo.id).toBe('');
+            expect(modelInfo.info).toBeDefined();
+        });
+    });
+});

+ 230 - 0
src/api/providers/__tests__/openai-native.test.ts

@@ -0,0 +1,230 @@
+import { OpenAiNativeHandler } from "../openai-native"
+import OpenAI from "openai"
+import { ApiHandlerOptions, openAiNativeDefaultModelId } from "../../../shared/api"
+import { Anthropic } from "@anthropic-ai/sdk"
+
+// Mock OpenAI
+jest.mock("openai")
+
+describe("OpenAiNativeHandler", () => {
+    let handler: OpenAiNativeHandler
+    let mockOptions: ApiHandlerOptions
+    let mockOpenAIClient: jest.Mocked<OpenAI>
+    let mockCreate: jest.Mock
+
+    beforeEach(() => {
+        // Reset mocks
+        jest.clearAllMocks()
+
+        // Setup mock options
+        mockOptions = {
+            openAiNativeApiKey: "test-api-key",
+            apiModelId: "gpt-4o", // Use the correct model ID from shared/api.ts
+        }
+
+        // Setup mock create function
+        mockCreate = jest.fn()
+
+        // Setup mock OpenAI client
+        mockOpenAIClient = {
+            chat: {
+                completions: {
+                    create: mockCreate,
+                },
+            },
+        } as unknown as jest.Mocked<OpenAI>
+
+        // Mock OpenAI constructor
+        ;(OpenAI as jest.MockedClass<typeof OpenAI>).mockImplementation(() => mockOpenAIClient)
+
+        // Create handler instance
+        handler = new OpenAiNativeHandler(mockOptions)
+    })
+
+    describe("constructor", () => {
+        it("should initialize with provided options", () => {
+            expect(OpenAI).toHaveBeenCalledWith({
+                apiKey: mockOptions.openAiNativeApiKey,
+            })
+        })
+    })
+
+    describe("getModel", () => {
+        it("should return specified model when valid", () => {
+            const result = handler.getModel()
+            expect(result.id).toBe("gpt-4o") // Use the correct model ID
+        })
+
+        it("should return default model when model ID is invalid", () => {
+            handler = new OpenAiNativeHandler({
+                ...mockOptions,
+                apiModelId: "invalid-model" as any,
+            })
+            const result = handler.getModel()
+            expect(result.id).toBe(openAiNativeDefaultModelId)
+        })
+
+        it("should return default model when model ID is not provided", () => {
+            handler = new OpenAiNativeHandler({
+                ...mockOptions,
+                apiModelId: undefined,
+            })
+            const result = handler.getModel()
+            expect(result.id).toBe(openAiNativeDefaultModelId)
+        })
+    })
+
+    describe("createMessage", () => {
+        const systemPrompt = "You are a helpful assistant"
+        const messages: Anthropic.Messages.MessageParam[] = [
+            { role: "user", content: "Hello" },
+        ]
+
+        describe("o1 models", () => {
+            beforeEach(() => {
+                handler = new OpenAiNativeHandler({
+                    ...mockOptions,
+                    apiModelId: "o1-preview",
+                })
+            })
+
+            it("should handle non-streaming response for o1 models", async () => {
+                const mockResponse = {
+                    choices: [{ message: { content: "Hello there!" } }],
+                    usage: {
+                        prompt_tokens: 10,
+                        completion_tokens: 5,
+                    },
+                }
+
+                mockCreate.mockResolvedValueOnce(mockResponse)
+
+                const generator = handler.createMessage(systemPrompt, messages)
+                const results = []
+                for await (const result of generator) {
+                    results.push(result)
+                }
+
+                expect(results).toEqual([
+                    { type: "text", text: "Hello there!" },
+                    { type: "usage", inputTokens: 10, outputTokens: 5 },
+                ])
+
+                expect(mockCreate).toHaveBeenCalledWith({
+                    model: "o1-preview",
+                    messages: [
+                        { role: "user", content: systemPrompt },
+                        { role: "user", content: "Hello" },
+                    ],
+                })
+            })
+
+            it("should handle missing content in response", async () => {
+                const mockResponse = {
+                    choices: [{ message: { content: null } }],
+                    usage: null,
+                }
+
+                mockCreate.mockResolvedValueOnce(mockResponse)
+
+                const generator = handler.createMessage(systemPrompt, messages)
+                const results = []
+                for await (const result of generator) {
+                    results.push(result)
+                }
+
+                expect(results).toEqual([
+                    { type: "text", text: "" },
+                    { type: "usage", inputTokens: 0, outputTokens: 0 },
+                ])
+            })
+        })
+
+        describe("streaming models", () => {
+            beforeEach(() => {
+                handler = new OpenAiNativeHandler({
+                    ...mockOptions,
+                    apiModelId: "gpt-4o",
+                })
+            })
+
+            it("should handle streaming response", async () => {
+                const mockStream = [
+                    { choices: [{ delta: { content: "Hello" } }], usage: null },
+                    { choices: [{ delta: { content: " there" } }], usage: null },
+                    { choices: [{ delta: { content: "!" } }], usage: { prompt_tokens: 10, completion_tokens: 5 } },
+                ]
+
+                mockCreate.mockResolvedValueOnce(
+                    (async function* () {
+                        for (const chunk of mockStream) {
+                            yield chunk
+                        }
+                    })()
+                )
+
+                const generator = handler.createMessage(systemPrompt, messages)
+                const results = []
+                for await (const result of generator) {
+                    results.push(result)
+                }
+
+                expect(results).toEqual([
+                    { type: "text", text: "Hello" },
+                    { type: "text", text: " there" },
+                    { type: "text", text: "!" },
+                    { type: "usage", inputTokens: 10, outputTokens: 5 },
+                ])
+
+                expect(mockCreate).toHaveBeenCalledWith({
+                    model: "gpt-4o",
+                    temperature: 0,
+                    messages: [
+                        { role: "system", content: systemPrompt },
+                        { role: "user", content: "Hello" },
+                    ],
+                    stream: true,
+                    stream_options: { include_usage: true },
+                })
+            })
+
+            it("should handle empty delta content", async () => {
+                const mockStream = [
+                    { choices: [{ delta: {} }], usage: null },
+                    { choices: [{ delta: { content: null } }], usage: null },
+                    { choices: [{ delta: { content: "Hello" } }], usage: { prompt_tokens: 10, completion_tokens: 5 } },
+                ]
+
+                mockCreate.mockResolvedValueOnce(
+                    (async function* () {
+                        for (const chunk of mockStream) {
+                            yield chunk
+                        }
+                    })()
+                )
+
+                const generator = handler.createMessage(systemPrompt, messages)
+                const results = []
+                for await (const result of generator) {
+                    results.push(result)
+                }
+
+                expect(results).toEqual([
+                    { type: "text", text: "Hello" },
+                    { type: "usage", inputTokens: 10, outputTokens: 5 },
+                ])
+            })
+        })
+
+        it("should handle API errors", async () => {
+            mockCreate.mockRejectedValueOnce(new Error("API Error"))
+
+            const generator = handler.createMessage(systemPrompt, messages)
+            await expect(async () => {
+                for await (const _ of generator) {
+                    // consume generator
+                }
+            }).rejects.toThrow("API Error")
+        })
+    })
+})

+ 182 - 176
src/api/providers/__tests__/openai.test.ts

@@ -1,192 +1,198 @@
-import { OpenAiHandler } from '../openai'
-import { ApiHandlerOptions, openAiModelInfoSaneDefaults } from '../../../shared/api'
-import OpenAI, { AzureOpenAI } from 'openai'
-import { Anthropic } from '@anthropic-ai/sdk'
-
-// Mock dependencies
-jest.mock('openai')
-
-describe('OpenAiHandler', () => {
-    const mockOptions: ApiHandlerOptions = {
-        openAiApiKey: 'test-key',
-        openAiModelId: 'gpt-4',
-        openAiStreamingEnabled: true,
-        openAiBaseUrl: 'https://api.openai.com/v1'
-    }
-
-    beforeEach(() => {
-        jest.clearAllMocks()
-    })
-
-    test('constructor initializes with correct options', () => {
-        const handler = new OpenAiHandler(mockOptions)
-        expect(handler).toBeInstanceOf(OpenAiHandler)
-        expect(OpenAI).toHaveBeenCalledWith({
-            apiKey: mockOptions.openAiApiKey,
-            baseURL: mockOptions.openAiBaseUrl
-        })
-    })
-
-    test('constructor initializes Azure client when Azure URL is provided', () => {
-        const azureOptions: ApiHandlerOptions = {
-            ...mockOptions,
-            openAiBaseUrl: 'https://example.azure.com',
-            azureApiVersion: '2023-05-15'
-        }
-        const handler = new OpenAiHandler(azureOptions)
-        expect(handler).toBeInstanceOf(OpenAiHandler)
-        expect(AzureOpenAI).toHaveBeenCalledWith({
-            baseURL: azureOptions.openAiBaseUrl,
-            apiKey: azureOptions.openAiApiKey,
-            apiVersion: azureOptions.azureApiVersion
-        })
-    })
-
-    test('getModel returns correct model info', () => {
-        const handler = new OpenAiHandler(mockOptions)
-        const result = handler.getModel()
-        
-        expect(result).toEqual({
-            id: mockOptions.openAiModelId,
-            info: openAiModelInfoSaneDefaults
-        })
-    })
-
-    test('createMessage handles streaming correctly when enabled', async () => {
-        const handler = new OpenAiHandler({
-            ...mockOptions,
-            openAiStreamingEnabled: true,
-            includeMaxTokens: true
-        })
-        
-        const mockStream = {
-            async *[Symbol.asyncIterator]() {
-                yield {
-                    choices: [{
-                        delta: {
-                            content: 'test response'
+import { OpenAiHandler } from '../openai';
+import { ApiHandlerOptions } from '../../../shared/api';
+import { ApiStream } from '../../transform/stream';
+import OpenAI from 'openai';
+import { Anthropic } from '@anthropic-ai/sdk';
+
+// Mock OpenAI client
+const mockCreate = jest.fn();
+jest.mock('openai', () => {
+    return {
+        __esModule: true,
+        default: jest.fn().mockImplementation(() => ({
+            chat: {
+                completions: {
+                    create: mockCreate.mockImplementation(async (options) => {
+                        if (!options.stream) {
+                            return {
+                                id: 'test-completion',
+                                choices: [{
+                                    message: { role: 'assistant', content: 'Test response', refusal: null },
+                                    finish_reason: 'stop',
+                                    index: 0
+                                }],
+                                usage: {
+                                    prompt_tokens: 10,
+                                    completion_tokens: 5,
+                                    total_tokens: 15
+                                }
+                            };
                         }
-                    }],
-                    usage: {
-                        prompt_tokens: 10,
-                        completion_tokens: 5
-                    }
+                        
+                        return {
+                            [Symbol.asyncIterator]: async function* () {
+                                yield {
+                                    choices: [{
+                                        delta: { content: 'Test response' },
+                                        index: 0
+                                    }],
+                                    usage: null
+                                };
+                                yield {
+                                    choices: [{
+                                        delta: {},
+                                        index: 0
+                                    }],
+                                    usage: {
+                                        prompt_tokens: 10,
+                                        completion_tokens: 5,
+                                        total_tokens: 15
+                                    }
+                                };
+                            }
+                        };
+                    })
                 }
             }
-        }
+        }))
+    };
+});
 
-        const mockCreate = jest.fn().mockResolvedValue(mockStream)
-        ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
-            completions: { create: mockCreate }
-        } as any
+describe('OpenAiHandler', () => {
+    let handler: OpenAiHandler;
+    let mockOptions: ApiHandlerOptions;
 
-        const systemPrompt = 'test system prompt'
+    beforeEach(() => {
+        mockOptions = {
+            openAiApiKey: 'test-api-key',
+            openAiModelId: 'gpt-4',
+            openAiBaseUrl: 'https://api.openai.com/v1'
+        };
+        handler = new OpenAiHandler(mockOptions);
+        mockCreate.mockClear();
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided options', () => {
+            expect(handler).toBeInstanceOf(OpenAiHandler);
+            expect(handler.getModel().id).toBe(mockOptions.openAiModelId);
+        });
+
+        it('should use custom base URL if provided', () => {
+            const customBaseUrl = 'https://custom.openai.com/v1';
+            const handlerWithCustomUrl = new OpenAiHandler({
+                ...mockOptions,
+                openAiBaseUrl: customBaseUrl
+            });
+            expect(handlerWithCustomUrl).toBeInstanceOf(OpenAiHandler);
+        });
+    });
+
+    describe('createMessage', () => {
+        const systemPrompt = 'You are a helpful assistant.';
         const messages: Anthropic.Messages.MessageParam[] = [
-            { role: 'user', content: 'test message' }
-        ]
-
-        const generator = handler.createMessage(systemPrompt, messages)
-        const chunks = []
-        
-        for await (const chunk of generator) {
-            chunks.push(chunk)
-        }
-
-        expect(chunks).toEqual([
             {
-                type: 'text',
-                text: 'test response'
-            },
-            {
-                type: 'usage',
-                inputTokens: 10,
-                outputTokens: 5
+                role: 'user',
+                content: [{ 
+                    type: 'text' as const,
+                    text: 'Hello!'
+                }]
             }
-        ])
-
-        expect(mockCreate).toHaveBeenCalledWith({
-            model: mockOptions.openAiModelId,
-            messages: [
-                { role: 'system', content: systemPrompt },
-                { role: 'user', content: 'test message' }
-            ],
-            temperature: 0,
-            stream: true,
-            stream_options: { include_usage: true },
-            max_tokens: openAiModelInfoSaneDefaults.maxTokens
-        })
-    })
-
-    test('createMessage handles non-streaming correctly when disabled', async () => {
-        const handler = new OpenAiHandler({
-            ...mockOptions,
-            openAiStreamingEnabled: false
-        })
-        
-        const mockResponse = {
-            choices: [{
-                message: {
-                    content: 'test response'
-                }
-            }],
-            usage: {
-                prompt_tokens: 10,
-                completion_tokens: 5
+        ];
+
+        it('should handle non-streaming mode', async () => {
+            const handler = new OpenAiHandler({
+                ...mockOptions,
+                openAiStreamingEnabled: false
+            });
+
+            const stream = handler.createMessage(systemPrompt, messages);
+            const chunks: any[] = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
             }
-        }
 
-        const mockCreate = jest.fn().mockResolvedValue(mockResponse)
-        ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
-            completions: { create: mockCreate }
-        } as any
-
-        const systemPrompt = 'test system prompt'
-        const messages: Anthropic.Messages.MessageParam[] = [
-            { role: 'user', content: 'test message' }
-        ]
+            expect(chunks.length).toBeGreaterThan(0);
+            const textChunk = chunks.find(chunk => chunk.type === 'text');
+            const usageChunk = chunks.find(chunk => chunk.type === 'usage');
+            
+            expect(textChunk).toBeDefined();
+            expect(textChunk?.text).toBe('Test response');
+            expect(usageChunk).toBeDefined();
+            expect(usageChunk?.inputTokens).toBe(10);
+            expect(usageChunk?.outputTokens).toBe(5);
+        });
+
+        it('should handle streaming responses', async () => {
+            const stream = handler.createMessage(systemPrompt, messages);
+            const chunks: any[] = [];
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
 
-        const generator = handler.createMessage(systemPrompt, messages)
-        const chunks = []
-        
-        for await (const chunk of generator) {
-            chunks.push(chunk)
-        }
+            expect(chunks.length).toBeGreaterThan(0);
+            const textChunks = chunks.filter(chunk => chunk.type === 'text');
+            expect(textChunks).toHaveLength(1);
+            expect(textChunks[0].text).toBe('Test response');
+        });
+    });
 
-        expect(chunks).toEqual([
+    describe('error handling', () => {
+        const testMessages: Anthropic.Messages.MessageParam[] = [
             {
-                type: 'text',
-                text: 'test response'
-            },
-            {
-                type: 'usage',
-                inputTokens: 10,
-                outputTokens: 5
-            }
-        ])
-
-        expect(mockCreate).toHaveBeenCalledWith({
-            model: mockOptions.openAiModelId,
-            messages: [
-                { role: 'user', content: systemPrompt },
-                { role: 'user', content: 'test message' }
-            ]
-        })
-    })
-
-    test('createMessage handles API errors', async () => {
-        const handler = new OpenAiHandler(mockOptions)
-        const mockStream = {
-            async *[Symbol.asyncIterator]() {
-                throw new Error('API Error')
+                role: 'user',
+                content: [{ 
+                    type: 'text' as const,
+                    text: 'Hello'
+                }]
             }
-        }
+        ];
+
+        it('should handle API errors', async () => {
+            mockCreate.mockRejectedValueOnce(new Error('API Error'));
 
-        const mockCreate = jest.fn().mockResolvedValue(mockStream)
-        ;(OpenAI as jest.MockedClass<typeof OpenAI>).prototype.chat = {
-            completions: { create: mockCreate }
-        } as any
+            const stream = handler.createMessage('system prompt', testMessages);
 
-        const generator = handler.createMessage('test', [])
-        await expect(generator.next()).rejects.toThrow('API Error')
-    })
-})
+            await expect(async () => {
+                for await (const chunk of stream) {
+                    // Should not reach here
+                }
+            }).rejects.toThrow('API Error');
+        });
+
+        it('should handle rate limiting', async () => {
+            const rateLimitError = new Error('Rate limit exceeded');
+            rateLimitError.name = 'Error';
+            (rateLimitError as any).status = 429;
+            mockCreate.mockRejectedValueOnce(rateLimitError);
+
+            const stream = handler.createMessage('system prompt', testMessages);
+
+            await expect(async () => {
+                for await (const chunk of stream) {
+                    // Should not reach here
+                }
+            }).rejects.toThrow('Rate limit exceeded');
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return model info with sane defaults', () => {
+            const model = handler.getModel();
+            expect(model.id).toBe(mockOptions.openAiModelId);
+            expect(model.info).toBeDefined();
+            expect(model.info.contextWindow).toBe(128_000);
+            expect(model.info.supportsImages).toBe(true);
+        });
+
+        it('should handle undefined model ID', () => {
+            const handlerWithoutModel = new OpenAiHandler({
+                ...mockOptions,
+                openAiModelId: undefined
+            });
+            const model = handlerWithoutModel.getModel();
+            expect(model.id).toBe('');
+            expect(model.info).toBeDefined();
+        });
+    });
+});

+ 218 - 0
src/api/providers/__tests__/vertex.test.ts

@@ -0,0 +1,218 @@
+import { VertexHandler } from '../vertex';
+import { Anthropic } from '@anthropic-ai/sdk';
+import { AnthropicVertex } from '@anthropic-ai/vertex-sdk';
+
+// Mock Vertex SDK
+jest.mock('@anthropic-ai/vertex-sdk', () => ({
+    AnthropicVertex: jest.fn().mockImplementation(() => ({
+        messages: {
+            create: jest.fn()
+        }
+    }))
+}));
+
+describe('VertexHandler', () => {
+    let handler: VertexHandler;
+
+    beforeEach(() => {
+        handler = new VertexHandler({
+            apiModelId: 'claude-3-5-sonnet-v2@20241022',
+            vertexProjectId: 'test-project',
+            vertexRegion: 'us-central1'
+        });
+    });
+
+    describe('constructor', () => {
+        it('should initialize with provided config', () => {
+            expect(AnthropicVertex).toHaveBeenCalledWith({
+                projectId: 'test-project',
+                region: 'us-central1'
+            });
+        });
+    });
+
+    describe('createMessage', () => {
+        const mockMessages: Anthropic.Messages.MessageParam[] = [
+            {
+                role: 'user',
+                content: 'Hello'
+            },
+            {
+                role: 'assistant',
+                content: 'Hi there!'
+            }
+        ];
+
+        const systemPrompt = 'You are a helpful assistant';
+
+        it('should handle streaming responses correctly', async () => {
+            const mockStream = [
+                {
+                    type: 'message_start',
+                    message: {
+                        usage: {
+                            input_tokens: 10,
+                            output_tokens: 0
+                        }
+                    }
+                },
+                {
+                    type: 'content_block_start',
+                    index: 0,
+                    content_block: {
+                        type: 'text',
+                        text: 'Hello'
+                    }
+                },
+                {
+                    type: 'content_block_delta',
+                    delta: {
+                        type: 'text_delta',
+                        text: ' world!'
+                    }
+                },
+                {
+                    type: 'message_delta',
+                    usage: {
+                        output_tokens: 5
+                    }
+                }
+            ];
+
+            // Setup async iterator for mock stream
+            const asyncIterator = {
+                async *[Symbol.asyncIterator]() {
+                    for (const chunk of mockStream) {
+                        yield chunk;
+                    }
+                }
+            };
+
+            const mockCreate = jest.fn().mockResolvedValue(asyncIterator);
+            (handler['client'].messages as any).create = mockCreate;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+            const chunks = [];
+            
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+
+            expect(chunks.length).toBe(4);
+            expect(chunks[0]).toEqual({
+                type: 'usage',
+                inputTokens: 10,
+                outputTokens: 0
+            });
+            expect(chunks[1]).toEqual({
+                type: 'text',
+                text: 'Hello'
+            });
+            expect(chunks[2]).toEqual({
+                type: 'text',
+                text: ' world!'
+            });
+            expect(chunks[3]).toEqual({
+                type: 'usage',
+                inputTokens: 0,
+                outputTokens: 5
+            });
+
+            expect(mockCreate).toHaveBeenCalledWith({
+                model: 'claude-3-5-sonnet-v2@20241022',
+                max_tokens: 8192,
+                temperature: 0,
+                system: systemPrompt,
+                messages: mockMessages,
+                stream: true
+            });
+        });
+
+        it('should handle multiple content blocks with line breaks', async () => {
+            const mockStream = [
+                {
+                    type: 'content_block_start',
+                    index: 0,
+                    content_block: {
+                        type: 'text',
+                        text: 'First line'
+                    }
+                },
+                {
+                    type: 'content_block_start',
+                    index: 1,
+                    content_block: {
+                        type: 'text',
+                        text: 'Second line'
+                    }
+                }
+            ];
+
+            const asyncIterator = {
+                async *[Symbol.asyncIterator]() {
+                    for (const chunk of mockStream) {
+                        yield chunk;
+                    }
+                }
+            };
+
+            const mockCreate = jest.fn().mockResolvedValue(asyncIterator);
+            (handler['client'].messages as any).create = mockCreate;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+            const chunks = [];
+            
+            for await (const chunk of stream) {
+                chunks.push(chunk);
+            }
+
+            expect(chunks.length).toBe(3);
+            expect(chunks[0]).toEqual({
+                type: 'text',
+                text: 'First line'
+            });
+            expect(chunks[1]).toEqual({
+                type: 'text',
+                text: '\n'
+            });
+            expect(chunks[2]).toEqual({
+                type: 'text',
+                text: 'Second line'
+            });
+        });
+
+        it('should handle API errors', async () => {
+            const mockError = new Error('Vertex API error');
+            const mockCreate = jest.fn().mockRejectedValue(mockError);
+            (handler['client'].messages as any).create = mockCreate;
+
+            const stream = handler.createMessage(systemPrompt, mockMessages);
+
+            await expect(async () => {
+                for await (const chunk of stream) {
+                    // Should throw before yielding any chunks
+                }
+            }).rejects.toThrow('Vertex API error');
+        });
+    });
+
+    describe('getModel', () => {
+        it('should return correct model info', () => {
+            const modelInfo = handler.getModel();
+            expect(modelInfo.id).toBe('claude-3-5-sonnet-v2@20241022');
+            expect(modelInfo.info).toBeDefined();
+            expect(modelInfo.info.maxTokens).toBe(8192);
+            expect(modelInfo.info.contextWindow).toBe(200_000);
+        });
+
+        it('should return default model if invalid model specified', () => {
+            const invalidHandler = new VertexHandler({
+                apiModelId: 'invalid-model',
+                vertexProjectId: 'test-project',
+                vertexRegion: 'us-central1'
+            });
+            const modelInfo = invalidHandler.getModel();
+            expect(modelInfo.id).toBe('claude-3-5-sonnet-v2@20241022'); // Default model
+        });
+    });
+});

+ 257 - 0
src/api/transform/__tests__/openai-format.test.ts

@@ -0,0 +1,257 @@
+import { convertToOpenAiMessages, convertToAnthropicMessage } from '../openai-format';
+import { Anthropic } from '@anthropic-ai/sdk';
+import OpenAI from 'openai';
+
+type PartialChatCompletion = Omit<OpenAI.Chat.Completions.ChatCompletion, 'choices'> & {
+    choices: Array<Partial<OpenAI.Chat.Completions.ChatCompletion.Choice> & {
+        message: OpenAI.Chat.Completions.ChatCompletion.Choice['message'];
+        finish_reason: string;
+        index: number;
+    }>;
+};
+
+describe('OpenAI Format Transformations', () => {
+    describe('convertToOpenAiMessages', () => {
+        it('should convert simple text messages', () => {
+            const anthropicMessages: Anthropic.Messages.MessageParam[] = [
+                {
+                    role: 'user',
+                    content: 'Hello'
+                },
+                {
+                    role: 'assistant',
+                    content: 'Hi there!'
+                }
+            ];
+
+            const openAiMessages = convertToOpenAiMessages(anthropicMessages);
+            expect(openAiMessages).toHaveLength(2);
+            expect(openAiMessages[0]).toEqual({
+                role: 'user',
+                content: 'Hello'
+            });
+            expect(openAiMessages[1]).toEqual({
+                role: 'assistant',
+                content: 'Hi there!'
+            });
+        });
+
+        it('should handle messages with image content', () => {
+            const anthropicMessages: Anthropic.Messages.MessageParam[] = [
+                {
+                    role: 'user',
+                    content: [
+                        {
+                            type: 'text',
+                            text: 'What is in this image?'
+                        },
+                        {
+                            type: 'image',
+                            source: {
+                                type: 'base64',
+                                media_type: 'image/jpeg',
+                                data: 'base64data'
+                            }
+                        }
+                    ]
+                }
+            ];
+
+            const openAiMessages = convertToOpenAiMessages(anthropicMessages);
+            expect(openAiMessages).toHaveLength(1);
+            expect(openAiMessages[0].role).toBe('user');
+            
+            const content = openAiMessages[0].content as Array<{
+                type: string;
+                text?: string;
+                image_url?: { url: string };
+            }>;
+            
+            expect(Array.isArray(content)).toBe(true);
+            expect(content).toHaveLength(2);
+            expect(content[0]).toEqual({ type: 'text', text: 'What is in this image?' });
+            expect(content[1]).toEqual({
+                type: 'image_url',
+                image_url: { url: 'data:image/jpeg;base64,base64data' }
+            });
+        });
+
+        it('should handle assistant messages with tool use', () => {
+            const anthropicMessages: Anthropic.Messages.MessageParam[] = [
+                {
+                    role: 'assistant',
+                    content: [
+                        {
+                            type: 'text',
+                            text: 'Let me check the weather.'
+                        },
+                        {
+                            type: 'tool_use',
+                            id: 'weather-123',
+                            name: 'get_weather',
+                            input: { city: 'London' }
+                        }
+                    ]
+                }
+            ];
+
+            const openAiMessages = convertToOpenAiMessages(anthropicMessages);
+            expect(openAiMessages).toHaveLength(1);
+            
+            const assistantMessage = openAiMessages[0] as OpenAI.Chat.ChatCompletionAssistantMessageParam;
+            expect(assistantMessage.role).toBe('assistant');
+            expect(assistantMessage.content).toBe('Let me check the weather.');
+            expect(assistantMessage.tool_calls).toHaveLength(1);
+            expect(assistantMessage.tool_calls![0]).toEqual({
+                id: 'weather-123',
+                type: 'function',
+                function: {
+                    name: 'get_weather',
+                    arguments: JSON.stringify({ city: 'London' })
+                }
+            });
+        });
+
+        it('should handle user messages with tool results', () => {
+            const anthropicMessages: Anthropic.Messages.MessageParam[] = [
+                {
+                    role: 'user',
+                    content: [
+                        {
+                            type: 'tool_result',
+                            tool_use_id: 'weather-123',
+                            content: 'Current temperature in London: 20°C'
+                        }
+                    ]
+                }
+            ];
+
+            const openAiMessages = convertToOpenAiMessages(anthropicMessages);
+            expect(openAiMessages).toHaveLength(1);
+            
+            const toolMessage = openAiMessages[0] as OpenAI.Chat.ChatCompletionToolMessageParam;
+            expect(toolMessage.role).toBe('tool');
+            expect(toolMessage.tool_call_id).toBe('weather-123');
+            expect(toolMessage.content).toBe('Current temperature in London: 20°C');
+        });
+    });
+
+    describe('convertToAnthropicMessage', () => {
+        it('should convert simple completion', () => {
+            const openAiCompletion: PartialChatCompletion = {
+                id: 'completion-123',
+                model: 'gpt-4',
+                choices: [{
+                    message: {
+                        role: 'assistant',
+                        content: 'Hello there!',
+                        refusal: null
+                    },
+                    finish_reason: 'stop',
+                    index: 0
+                }],
+                usage: {
+                    prompt_tokens: 10,
+                    completion_tokens: 5,
+                    total_tokens: 15
+                },
+                created: 123456789,
+                object: 'chat.completion'
+            };
+
+            const anthropicMessage = convertToAnthropicMessage(openAiCompletion as OpenAI.Chat.Completions.ChatCompletion);
+            expect(anthropicMessage.id).toBe('completion-123');
+            expect(anthropicMessage.role).toBe('assistant');
+            expect(anthropicMessage.content).toHaveLength(1);
+            expect(anthropicMessage.content[0]).toEqual({
+                type: 'text',
+                text: 'Hello there!'
+            });
+            expect(anthropicMessage.stop_reason).toBe('end_turn');
+            expect(anthropicMessage.usage).toEqual({
+                input_tokens: 10,
+                output_tokens: 5
+            });
+        });
+
+        it('should handle tool calls in completion', () => {
+            const openAiCompletion: PartialChatCompletion = {
+                id: 'completion-123',
+                model: 'gpt-4',
+                choices: [{
+                    message: {
+                        role: 'assistant',
+                        content: 'Let me check the weather.',
+                        tool_calls: [{
+                            id: 'weather-123',
+                            type: 'function',
+                            function: {
+                                name: 'get_weather',
+                                arguments: '{"city":"London"}'
+                            }
+                        }],
+                        refusal: null
+                    },
+                    finish_reason: 'tool_calls',
+                    index: 0
+                }],
+                usage: {
+                    prompt_tokens: 15,
+                    completion_tokens: 8,
+                    total_tokens: 23
+                },
+                created: 123456789,
+                object: 'chat.completion'
+            };
+
+            const anthropicMessage = convertToAnthropicMessage(openAiCompletion as OpenAI.Chat.Completions.ChatCompletion);
+            expect(anthropicMessage.content).toHaveLength(2);
+            expect(anthropicMessage.content[0]).toEqual({
+                type: 'text',
+                text: 'Let me check the weather.'
+            });
+            expect(anthropicMessage.content[1]).toEqual({
+                type: 'tool_use',
+                id: 'weather-123',
+                name: 'get_weather',
+                input: { city: 'London' }
+            });
+            expect(anthropicMessage.stop_reason).toBe('tool_use');
+        });
+
+        it('should handle invalid tool call arguments', () => {
+            const openAiCompletion: PartialChatCompletion = {
+                id: 'completion-123',
+                model: 'gpt-4',
+                choices: [{
+                    message: {
+                        role: 'assistant',
+                        content: 'Testing invalid arguments',
+                        tool_calls: [{
+                            id: 'test-123',
+                            type: 'function',
+                            function: {
+                                name: 'test_function',
+                                arguments: 'invalid json'
+                            }
+                        }],
+                        refusal: null
+                    },
+                    finish_reason: 'tool_calls',
+                    index: 0
+                }],
+                created: 123456789,
+                object: 'chat.completion'
+            };
+
+            const anthropicMessage = convertToAnthropicMessage(openAiCompletion as OpenAI.Chat.Completions.ChatCompletion);
+            expect(anthropicMessage.content).toHaveLength(2);
+            expect(anthropicMessage.content[1]).toEqual({
+                type: 'tool_use',
+                id: 'test-123',
+                name: 'test_function',
+                input: {}  // Should default to empty object for invalid JSON
+            });
+        });
+    });
+});

+ 114 - 0
src/api/transform/__tests__/stream.test.ts

@@ -0,0 +1,114 @@
+import { ApiStreamChunk } from '../stream';
+
+describe('API Stream Types', () => {
+    describe('ApiStreamChunk', () => {
+        it('should correctly handle text chunks', () => {
+            const textChunk: ApiStreamChunk = {
+                type: 'text',
+                text: 'Hello world'
+            };
+
+            expect(textChunk.type).toBe('text');
+            expect(textChunk.text).toBe('Hello world');
+        });
+
+        it('should correctly handle usage chunks with cache information', () => {
+            const usageChunk: ApiStreamChunk = {
+                type: 'usage',
+                inputTokens: 100,
+                outputTokens: 50,
+                cacheWriteTokens: 20,
+                cacheReadTokens: 10
+            };
+
+            expect(usageChunk.type).toBe('usage');
+            expect(usageChunk.inputTokens).toBe(100);
+            expect(usageChunk.outputTokens).toBe(50);
+            expect(usageChunk.cacheWriteTokens).toBe(20);
+            expect(usageChunk.cacheReadTokens).toBe(10);
+        });
+
+        it('should handle usage chunks without cache tokens', () => {
+            const usageChunk: ApiStreamChunk = {
+                type: 'usage',
+                inputTokens: 100,
+                outputTokens: 50
+            };
+
+            expect(usageChunk.type).toBe('usage');
+            expect(usageChunk.inputTokens).toBe(100);
+            expect(usageChunk.outputTokens).toBe(50);
+            expect(usageChunk.cacheWriteTokens).toBeUndefined();
+            expect(usageChunk.cacheReadTokens).toBeUndefined();
+        });
+
+        it('should handle text chunks with empty strings', () => {
+            const emptyTextChunk: ApiStreamChunk = {
+                type: 'text',
+                text: ''
+            };
+
+            expect(emptyTextChunk.type).toBe('text');
+            expect(emptyTextChunk.text).toBe('');
+        });
+
+        it('should handle usage chunks with zero tokens', () => {
+            const zeroUsageChunk: ApiStreamChunk = {
+                type: 'usage',
+                inputTokens: 0,
+                outputTokens: 0
+            };
+
+            expect(zeroUsageChunk.type).toBe('usage');
+            expect(zeroUsageChunk.inputTokens).toBe(0);
+            expect(zeroUsageChunk.outputTokens).toBe(0);
+        });
+
+        it('should handle usage chunks with large token counts', () => {
+            const largeUsageChunk: ApiStreamChunk = {
+                type: 'usage',
+                inputTokens: 1000000,
+                outputTokens: 500000,
+                cacheWriteTokens: 200000,
+                cacheReadTokens: 100000
+            };
+
+            expect(largeUsageChunk.type).toBe('usage');
+            expect(largeUsageChunk.inputTokens).toBe(1000000);
+            expect(largeUsageChunk.outputTokens).toBe(500000);
+            expect(largeUsageChunk.cacheWriteTokens).toBe(200000);
+            expect(largeUsageChunk.cacheReadTokens).toBe(100000);
+        });
+
+        it('should handle text chunks with special characters', () => {
+            const specialCharsChunk: ApiStreamChunk = {
+                type: 'text',
+                text: '!@#$%^&*()_+-=[]{}|;:,.<>?`~'
+            };
+
+            expect(specialCharsChunk.type).toBe('text');
+            expect(specialCharsChunk.text).toBe('!@#$%^&*()_+-=[]{}|;:,.<>?`~');
+        });
+
+        it('should handle text chunks with unicode characters', () => {
+            const unicodeChunk: ApiStreamChunk = {
+                type: 'text',
+                text: '你好世界👋🌍'
+            };
+
+            expect(unicodeChunk.type).toBe('text');
+            expect(unicodeChunk.text).toBe('你好世界👋🌍');
+        });
+
+        it('should handle text chunks with multiline content', () => {
+            const multilineChunk: ApiStreamChunk = {
+                type: 'text',
+                text: 'Line 1\nLine 2\nLine 3'
+            };
+
+            expect(multilineChunk.type).toBe('text');
+            expect(multilineChunk.text).toBe('Line 1\nLine 2\nLine 3');
+            expect(multilineChunk.text.split('\n')).toHaveLength(3);
+        });
+    });
+});

+ 2 - 1
src/core/__tests__/Cline.test.ts

@@ -252,7 +252,8 @@ describe('Cline', () => {
         // Setup mock API configuration
         mockApiConfig = {
             apiProvider: 'anthropic',
-            apiModelId: 'claude-3-5-sonnet-20241022'
+            apiModelId: 'claude-3-5-sonnet-20241022',
+            apiKey: 'test-api-key'  // Add API key to mock config
         };
 
         // Mock provider methods

+ 229 - 0
src/integrations/terminal/__tests__/TerminalProcess.test.ts

@@ -0,0 +1,229 @@
+import { TerminalProcess, mergePromise } from "../TerminalProcess"
+import * as vscode from "vscode"
+import { EventEmitter } from "events"
+
+// Mock vscode
+jest.mock("vscode")
+
+describe("TerminalProcess", () => {
+    let terminalProcess: TerminalProcess
+    let mockTerminal: jest.Mocked<vscode.Terminal & {
+        shellIntegration: {
+            executeCommand: jest.Mock
+        }
+    }>
+    let mockExecution: any
+    let mockStream: AsyncIterableIterator<string>
+
+    beforeEach(() => {
+        terminalProcess = new TerminalProcess()
+        
+        // Create properly typed mock terminal
+        mockTerminal = {
+            shellIntegration: {
+                executeCommand: jest.fn()
+            },
+            name: "Mock Terminal",
+            processId: Promise.resolve(123),
+            creationOptions: {},
+            exitStatus: undefined,
+            state: { isInteractedWith: true },
+            dispose: jest.fn(),
+            hide: jest.fn(),
+            show: jest.fn(),
+            sendText: jest.fn()
+        } as unknown as jest.Mocked<vscode.Terminal & {
+            shellIntegration: {
+                executeCommand: jest.Mock
+            }
+        }>
+
+        // Reset event listeners
+        terminalProcess.removeAllListeners()
+    })
+
+    describe("run", () => {
+        it("handles shell integration commands correctly", async () => {
+            const lines: string[] = []
+            terminalProcess.on("line", (line) => {
+                // Skip empty lines used for loading spinner
+                if (line !== "") {
+                    lines.push(line)
+                }
+            })
+
+            // Mock stream data with shell integration sequences
+            mockStream = (async function* () {
+                // The first chunk contains the command start sequence
+                yield "Initial output\n"
+                yield "More output\n"
+                // The last chunk contains the command end sequence
+                yield "Final output"
+            })()
+
+            mockExecution = {
+                read: jest.fn().mockReturnValue(mockStream)
+            }
+
+            mockTerminal.shellIntegration.executeCommand.mockReturnValue(mockExecution)
+
+            const completedPromise = new Promise<void>((resolve) => {
+                terminalProcess.once("completed", resolve)
+            })
+
+            await terminalProcess.run(mockTerminal, "test command")
+            await completedPromise
+
+            expect(lines).toEqual(["Initial output", "More output", "Final output"])
+            expect(terminalProcess.isHot).toBe(false)
+        })
+
+        it("handles terminals without shell integration", async () => {
+            const noShellTerminal = {
+                sendText: jest.fn(),
+                shellIntegration: undefined
+            } as unknown as vscode.Terminal
+
+            const noShellPromise = new Promise<void>((resolve) => {
+                terminalProcess.once("no_shell_integration", resolve)
+            })
+
+            await terminalProcess.run(noShellTerminal, "test command")
+            await noShellPromise
+
+            expect(noShellTerminal.sendText).toHaveBeenCalledWith("test command", true)
+        })
+
+        it("sets hot state for compiling commands", async () => {
+            const lines: string[] = []
+            terminalProcess.on("line", (line) => {
+                if (line !== "") {
+                    lines.push(line)
+                }
+            })
+
+            // Create a promise that resolves when the first chunk is processed
+            const firstChunkProcessed = new Promise<void>(resolve => {
+                terminalProcess.on("line", () => resolve())
+            })
+
+            mockStream = (async function* () {
+                yield "compiling...\n"
+                // Wait to ensure hot state check happens after first chunk
+                await new Promise(resolve => setTimeout(resolve, 10))
+                yield "still compiling...\n"
+                yield "done"
+            })()
+
+            mockExecution = {
+                read: jest.fn().mockReturnValue(mockStream)
+            }
+
+            mockTerminal.shellIntegration.executeCommand.mockReturnValue(mockExecution)
+
+            // Start the command execution
+            const runPromise = terminalProcess.run(mockTerminal, "npm run build")
+            
+            // Wait for the first chunk to be processed
+            await firstChunkProcessed
+            
+            // Hot state should be true while compiling
+            expect(terminalProcess.isHot).toBe(true)
+
+            // Complete the execution
+            const completedPromise = new Promise<void>((resolve) => {
+                terminalProcess.once("completed", resolve)
+            })
+
+            await runPromise
+            await completedPromise
+
+            expect(lines).toEqual(["compiling...", "still compiling...", "done"])
+        })
+    })
+
+    describe("buffer processing", () => {
+        it("correctly processes and emits lines", () => {
+            const lines: string[] = []
+            terminalProcess.on("line", (line) => lines.push(line))
+
+            // Simulate incoming chunks
+            terminalProcess["emitIfEol"]("first line\n")
+            terminalProcess["emitIfEol"]("second")
+            terminalProcess["emitIfEol"](" line\n")
+            terminalProcess["emitIfEol"]("third line")
+
+            expect(lines).toEqual(["first line", "second line"])
+
+            // Process remaining buffer
+            terminalProcess["emitRemainingBufferIfListening"]()
+            expect(lines).toEqual(["first line", "second line", "third line"])
+        })
+
+        it("handles Windows-style line endings", () => {
+            const lines: string[] = []
+            terminalProcess.on("line", (line) => lines.push(line))
+
+            terminalProcess["emitIfEol"]("line1\r\nline2\r\n")
+
+            expect(lines).toEqual(["line1", "line2"])
+        })
+    })
+
+    describe("removeLastLineArtifacts", () => {
+        it("removes terminal artifacts from output", () => {
+            const cases = [
+                ["output%", "output"],
+                ["output$ ", "output"],
+                ["output#", "output"],
+                ["output> ", "output"],
+                ["multi\nline%", "multi\nline"],
+                ["no artifacts", "no artifacts"]
+            ]
+
+            for (const [input, expected] of cases) {
+                expect(terminalProcess["removeLastLineArtifacts"](input)).toBe(expected)
+            }
+        })
+    })
+
+    describe("continue", () => {
+        it("stops listening and emits continue event", () => {
+            const continueSpy = jest.fn()
+            terminalProcess.on("continue", continueSpy)
+
+            terminalProcess.continue()
+
+            expect(continueSpy).toHaveBeenCalled()
+            expect(terminalProcess["isListening"]).toBe(false)
+        })
+    })
+
+    describe("getUnretrievedOutput", () => {
+        it("returns and clears unretrieved output", () => {
+            terminalProcess["fullOutput"] = "previous\nnew output"
+            terminalProcess["lastRetrievedIndex"] = 9 // After "previous\n"
+
+            const unretrieved = terminalProcess.getUnretrievedOutput()
+
+            expect(unretrieved).toBe("new output")
+            expect(terminalProcess["lastRetrievedIndex"]).toBe(terminalProcess["fullOutput"].length)
+        })
+    })
+
+    describe("mergePromise", () => {
+        it("merges promise methods with terminal process", async () => {
+            const process = new TerminalProcess()
+            const promise = Promise.resolve()
+
+            const merged = mergePromise(process, promise)
+
+            expect(merged).toHaveProperty("then")
+            expect(merged).toHaveProperty("catch")
+            expect(merged).toHaveProperty("finally")
+            expect(merged instanceof TerminalProcess).toBe(true)
+
+            await expect(merged).resolves.toBeUndefined()
+        })
+    })
+})

+ 254 - 0
src/services/tree-sitter/__tests__/index.test.ts

@@ -0,0 +1,254 @@
+import { parseSourceCodeForDefinitionsTopLevel } from '../index';
+import { listFiles } from '../../glob/list-files';
+import { loadRequiredLanguageParsers } from '../languageParser';
+import { fileExistsAtPath } from '../../../utils/fs';
+import * as fs from 'fs/promises';
+import * as path from 'path';
+
+// Mock dependencies
+jest.mock('../../glob/list-files');
+jest.mock('../languageParser');
+jest.mock('../../../utils/fs');
+jest.mock('fs/promises');
+
+describe('Tree-sitter Service', () => {
+    beforeEach(() => {
+        jest.clearAllMocks();
+        (fileExistsAtPath as jest.Mock).mockResolvedValue(true);
+    });
+
+    describe('parseSourceCodeForDefinitionsTopLevel', () => {
+        it('should handle non-existent directory', async () => {
+            (fileExistsAtPath as jest.Mock).mockResolvedValue(false);
+            
+            const result = await parseSourceCodeForDefinitionsTopLevel('/non/existent/path');
+            expect(result).toBe('This directory does not exist or you do not have permission to access it.');
+        });
+
+        it('should handle empty directory', async () => {
+            (listFiles as jest.Mock).mockResolvedValue([[], new Set()]);
+            
+            const result = await parseSourceCodeForDefinitionsTopLevel('/test/path');
+            expect(result).toBe('No source code definitions found.');
+        });
+
+        it('should parse TypeScript files correctly', async () => {
+            const mockFiles = [
+                '/test/path/file1.ts',
+                '/test/path/file2.tsx',
+                '/test/path/readme.md'
+            ];
+            
+            (listFiles as jest.Mock).mockResolvedValue([mockFiles, new Set()]);
+            
+            const mockParser = {
+                parse: jest.fn().mockReturnValue({
+                    rootNode: 'mockNode'
+                })
+            };
+            
+            const mockQuery = {
+                captures: jest.fn().mockReturnValue([
+                    {
+                        node: {
+                            startPosition: { row: 0 },
+                            endPosition: { row: 0 }
+                        },
+                        name: 'name.definition'
+                    }
+                ])
+            };
+
+            (loadRequiredLanguageParsers as jest.Mock).mockResolvedValue({
+                ts: { parser: mockParser, query: mockQuery },
+                tsx: { parser: mockParser, query: mockQuery }
+            });
+
+            (fs.readFile as jest.Mock).mockResolvedValue(
+                'export class TestClass {\n  constructor() {}\n}'
+            );
+
+            const result = await parseSourceCodeForDefinitionsTopLevel('/test/path');
+            
+            expect(result).toContain('file1.ts');
+            expect(result).toContain('file2.tsx');
+            expect(result).not.toContain('readme.md');
+            expect(result).toContain('export class TestClass');
+        });
+
+        it('should handle multiple definition types', async () => {
+            const mockFiles = ['/test/path/file.ts'];
+            (listFiles as jest.Mock).mockResolvedValue([mockFiles, new Set()]);
+            
+            const mockParser = {
+                parse: jest.fn().mockReturnValue({
+                    rootNode: 'mockNode'
+                })
+            };
+            
+            const mockQuery = {
+                captures: jest.fn().mockReturnValue([
+                    {
+                        node: {
+                            startPosition: { row: 0 },
+                            endPosition: { row: 0 }
+                        },
+                        name: 'name.definition.class'
+                    },
+                    {
+                        node: {
+                            startPosition: { row: 2 },
+                            endPosition: { row: 2 }
+                        },
+                        name: 'name.definition.function'
+                    }
+                ])
+            };
+
+            (loadRequiredLanguageParsers as jest.Mock).mockResolvedValue({
+                ts: { parser: mockParser, query: mockQuery }
+            });
+
+            const fileContent = 
+                'class TestClass {\n' +
+                '  constructor() {}\n' +
+                '  testMethod() {}\n' +
+                '}';
+            
+            (fs.readFile as jest.Mock).mockResolvedValue(fileContent);
+
+            const result = await parseSourceCodeForDefinitionsTopLevel('/test/path');
+            
+            expect(result).toContain('class TestClass');
+            expect(result).toContain('testMethod()');
+            expect(result).toContain('|----');
+        });
+
+        it('should handle parsing errors gracefully', async () => {
+            const mockFiles = ['/test/path/file.ts'];
+            (listFiles as jest.Mock).mockResolvedValue([mockFiles, new Set()]);
+            
+            const mockParser = {
+                parse: jest.fn().mockImplementation(() => {
+                    throw new Error('Parsing error');
+                })
+            };
+            
+            const mockQuery = {
+                captures: jest.fn()
+            };
+
+            (loadRequiredLanguageParsers as jest.Mock).mockResolvedValue({
+                ts: { parser: mockParser, query: mockQuery }
+            });
+
+            (fs.readFile as jest.Mock).mockResolvedValue('invalid code');
+
+            const result = await parseSourceCodeForDefinitionsTopLevel('/test/path');
+            expect(result).toBe('No source code definitions found.');
+        });
+
+        it('should respect file limit', async () => {
+            const mockFiles = Array(100).fill(0).map((_, i) => `/test/path/file${i}.ts`);
+            (listFiles as jest.Mock).mockResolvedValue([mockFiles, new Set()]);
+            
+            const mockParser = {
+                parse: jest.fn().mockReturnValue({
+                    rootNode: 'mockNode'
+                })
+            };
+            
+            const mockQuery = {
+                captures: jest.fn().mockReturnValue([])
+            };
+
+            (loadRequiredLanguageParsers as jest.Mock).mockResolvedValue({
+                ts: { parser: mockParser, query: mockQuery }
+            });
+
+            await parseSourceCodeForDefinitionsTopLevel('/test/path');
+            
+            // Should only process first 50 files
+            expect(mockParser.parse).toHaveBeenCalledTimes(50);
+        });
+
+        it('should handle various supported file extensions', async () => {
+            const mockFiles = [
+                '/test/path/script.js',
+                '/test/path/app.py',
+                '/test/path/main.rs',
+                '/test/path/program.cpp',
+                '/test/path/code.go'
+            ];
+            
+            (listFiles as jest.Mock).mockResolvedValue([mockFiles, new Set()]);
+            
+            const mockParser = {
+                parse: jest.fn().mockReturnValue({
+                    rootNode: 'mockNode'
+                })
+            };
+            
+            const mockQuery = {
+                captures: jest.fn().mockReturnValue([{
+                    node: {
+                        startPosition: { row: 0 },
+                        endPosition: { row: 0 }
+                    },
+                    name: 'name'
+                }])
+            };
+
+            (loadRequiredLanguageParsers as jest.Mock).mockResolvedValue({
+                js: { parser: mockParser, query: mockQuery },
+                py: { parser: mockParser, query: mockQuery },
+                rs: { parser: mockParser, query: mockQuery },
+                cpp: { parser: mockParser, query: mockQuery },
+                go: { parser: mockParser, query: mockQuery }
+            });
+
+            (fs.readFile as jest.Mock).mockResolvedValue('function test() {}');
+
+            const result = await parseSourceCodeForDefinitionsTopLevel('/test/path');
+            
+            expect(result).toContain('script.js');
+            expect(result).toContain('app.py');
+            expect(result).toContain('main.rs');
+            expect(result).toContain('program.cpp');
+            expect(result).toContain('code.go');
+        });
+
+        it('should normalize paths in output', async () => {
+            const mockFiles = ['/test/path/dir\\file.ts'];
+            (listFiles as jest.Mock).mockResolvedValue([mockFiles, new Set()]);
+            
+            const mockParser = {
+                parse: jest.fn().mockReturnValue({
+                    rootNode: 'mockNode'
+                })
+            };
+            
+            const mockQuery = {
+                captures: jest.fn().mockReturnValue([{
+                    node: {
+                        startPosition: { row: 0 },
+                        endPosition: { row: 0 }
+                    },
+                    name: 'name'
+                }])
+            };
+
+            (loadRequiredLanguageParsers as jest.Mock).mockResolvedValue({
+                ts: { parser: mockParser, query: mockQuery }
+            });
+
+            (fs.readFile as jest.Mock).mockResolvedValue('class Test {}');
+
+            const result = await parseSourceCodeForDefinitionsTopLevel('/test/path');
+            
+            // Should use forward slashes regardless of platform
+            expect(result).toContain('dir/file.ts');
+            expect(result).not.toContain('dir\\file.ts');
+        });
+    });
+});

+ 128 - 0
src/services/tree-sitter/__tests__/languageParser.test.ts

@@ -0,0 +1,128 @@
+import { loadRequiredLanguageParsers } from '../languageParser';
+import Parser from 'web-tree-sitter';
+
+// Mock web-tree-sitter
+const mockSetLanguage = jest.fn();
+jest.mock('web-tree-sitter', () => {
+    return {
+        __esModule: true,
+        default: jest.fn().mockImplementation(() => ({
+            setLanguage: mockSetLanguage
+        }))
+    };
+});
+
+// Add static methods to Parser mock
+const ParserMock = Parser as jest.MockedClass<typeof Parser>;
+ParserMock.init = jest.fn().mockResolvedValue(undefined);
+ParserMock.Language = {
+    load: jest.fn().mockResolvedValue({
+        query: jest.fn().mockReturnValue('mockQuery')
+    }),
+    prototype: {} // Add required prototype property
+} as unknown as typeof Parser.Language;
+
+describe('Language Parser', () => {
+    beforeEach(() => {
+        jest.clearAllMocks();
+    });
+
+    describe('loadRequiredLanguageParsers', () => {
+        it('should initialize parser only once', async () => {
+            const files = ['test.js', 'test2.js'];
+            await loadRequiredLanguageParsers(files);
+            await loadRequiredLanguageParsers(files);
+            
+            expect(ParserMock.init).toHaveBeenCalledTimes(1);
+        });
+
+        it('should load JavaScript parser for .js and .jsx files', async () => {
+            const files = ['test.js', 'test.jsx'];
+            const parsers = await loadRequiredLanguageParsers(files);
+            
+            expect(ParserMock.Language.load).toHaveBeenCalledWith(
+                expect.stringContaining('tree-sitter-javascript.wasm')
+            );
+            expect(parsers.js).toBeDefined();
+            expect(parsers.jsx).toBeDefined();
+            expect(parsers.js.query).toBeDefined();
+            expect(parsers.jsx.query).toBeDefined();
+        });
+
+        it('should load TypeScript parser for .ts and .tsx files', async () => {
+            const files = ['test.ts', 'test.tsx'];
+            const parsers = await loadRequiredLanguageParsers(files);
+            
+            expect(ParserMock.Language.load).toHaveBeenCalledWith(
+                expect.stringContaining('tree-sitter-typescript.wasm')
+            );
+            expect(ParserMock.Language.load).toHaveBeenCalledWith(
+                expect.stringContaining('tree-sitter-tsx.wasm')
+            );
+            expect(parsers.ts).toBeDefined();
+            expect(parsers.tsx).toBeDefined();
+        });
+
+        it('should load Python parser for .py files', async () => {
+            const files = ['test.py'];
+            const parsers = await loadRequiredLanguageParsers(files);
+            
+            expect(ParserMock.Language.load).toHaveBeenCalledWith(
+                expect.stringContaining('tree-sitter-python.wasm')
+            );
+            expect(parsers.py).toBeDefined();
+        });
+
+        it('should load multiple language parsers as needed', async () => {
+            const files = ['test.js', 'test.py', 'test.rs', 'test.go'];
+            const parsers = await loadRequiredLanguageParsers(files);
+            
+            expect(ParserMock.Language.load).toHaveBeenCalledTimes(4);
+            expect(parsers.js).toBeDefined();
+            expect(parsers.py).toBeDefined();
+            expect(parsers.rs).toBeDefined();
+            expect(parsers.go).toBeDefined();
+        });
+
+        it('should handle C/C++ files correctly', async () => {
+            const files = ['test.c', 'test.h', 'test.cpp', 'test.hpp'];
+            const parsers = await loadRequiredLanguageParsers(files);
+            
+            expect(ParserMock.Language.load).toHaveBeenCalledWith(
+                expect.stringContaining('tree-sitter-c.wasm')
+            );
+            expect(ParserMock.Language.load).toHaveBeenCalledWith(
+                expect.stringContaining('tree-sitter-cpp.wasm')
+            );
+            expect(parsers.c).toBeDefined();
+            expect(parsers.h).toBeDefined();
+            expect(parsers.cpp).toBeDefined();
+            expect(parsers.hpp).toBeDefined();
+        });
+
+        it('should throw error for unsupported file extensions', async () => {
+            const files = ['test.unsupported'];
+            
+            await expect(loadRequiredLanguageParsers(files)).rejects.toThrow(
+                'Unsupported language: unsupported'
+            );
+        });
+
+        it('should load each language only once for multiple files', async () => {
+            const files = ['test1.js', 'test2.js', 'test3.js'];
+            await loadRequiredLanguageParsers(files);
+            
+            expect(ParserMock.Language.load).toHaveBeenCalledTimes(1);
+            expect(ParserMock.Language.load).toHaveBeenCalledWith(
+                expect.stringContaining('tree-sitter-javascript.wasm')
+            );
+        });
+
+        it('should set language for each parser instance', async () => {
+            const files = ['test.js', 'test.py'];
+            await loadRequiredLanguageParsers(files);
+            
+            expect(mockSetLanguage).toHaveBeenCalledTimes(2);
+        });
+    });
+});

+ 97 - 0
src/utils/__tests__/cost.test.ts

@@ -0,0 +1,97 @@
+import { calculateApiCost } from '../cost';
+import { ModelInfo } from '../../shared/api';
+
+describe('Cost Utility', () => {
+    describe('calculateApiCost', () => {
+        const mockModelInfo: ModelInfo = {
+            maxTokens: 8192,
+            contextWindow: 200_000,
+            supportsPromptCache: true,
+            inputPrice: 3.0,  // $3 per million tokens
+            outputPrice: 15.0, // $15 per million tokens
+            cacheWritesPrice: 3.75, // $3.75 per million tokens
+            cacheReadsPrice: 0.3,  // $0.30 per million tokens
+        };
+
+        it('should calculate basic input/output costs correctly', () => {
+            const cost = calculateApiCost(mockModelInfo, 1000, 500);
+            
+            // Input cost: (3.0 / 1_000_000) * 1000 = 0.003
+            // Output cost: (15.0 / 1_000_000) * 500 = 0.0075
+            // Total: 0.003 + 0.0075 = 0.0105
+            expect(cost).toBe(0.0105);
+        });
+
+        it('should handle cache writes cost', () => {
+            const cost = calculateApiCost(mockModelInfo, 1000, 500, 2000);
+            
+            // Input cost: (3.0 / 1_000_000) * 1000 = 0.003
+            // Output cost: (15.0 / 1_000_000) * 500 = 0.0075
+            // Cache writes: (3.75 / 1_000_000) * 2000 = 0.0075
+            // Total: 0.003 + 0.0075 + 0.0075 = 0.018
+            expect(cost).toBeCloseTo(0.018, 6);
+        });
+
+        it('should handle cache reads cost', () => {
+            const cost = calculateApiCost(mockModelInfo, 1000, 500, undefined, 3000);
+            
+            // Input cost: (3.0 / 1_000_000) * 1000 = 0.003
+            // Output cost: (15.0 / 1_000_000) * 500 = 0.0075
+            // Cache reads: (0.3 / 1_000_000) * 3000 = 0.0009
+            // Total: 0.003 + 0.0075 + 0.0009 = 0.0114
+            expect(cost).toBe(0.0114);
+        });
+
+        it('should handle all cost components together', () => {
+            const cost = calculateApiCost(mockModelInfo, 1000, 500, 2000, 3000);
+            
+            // Input cost: (3.0 / 1_000_000) * 1000 = 0.003
+            // Output cost: (15.0 / 1_000_000) * 500 = 0.0075
+            // Cache writes: (3.75 / 1_000_000) * 2000 = 0.0075
+            // Cache reads: (0.3 / 1_000_000) * 3000 = 0.0009
+            // Total: 0.003 + 0.0075 + 0.0075 + 0.0009 = 0.0189
+            expect(cost).toBe(0.0189);
+        });
+
+        it('should handle missing prices gracefully', () => {
+            const modelWithoutPrices: ModelInfo = {
+                maxTokens: 8192,
+                contextWindow: 200_000,
+                supportsPromptCache: true
+            };
+
+            const cost = calculateApiCost(modelWithoutPrices, 1000, 500, 2000, 3000);
+            expect(cost).toBe(0);
+        });
+
+        it('should handle zero tokens', () => {
+            const cost = calculateApiCost(mockModelInfo, 0, 0, 0, 0);
+            expect(cost).toBe(0);
+        });
+
+        it('should handle undefined cache values', () => {
+            const cost = calculateApiCost(mockModelInfo, 1000, 500);
+            
+            // Input cost: (3.0 / 1_000_000) * 1000 = 0.003
+            // Output cost: (15.0 / 1_000_000) * 500 = 0.0075
+            // Total: 0.003 + 0.0075 = 0.0105
+            expect(cost).toBe(0.0105);
+        });
+
+        it('should handle missing cache prices', () => {
+            const modelWithoutCachePrices: ModelInfo = {
+                ...mockModelInfo,
+                cacheWritesPrice: undefined,
+                cacheReadsPrice: undefined
+            };
+
+            const cost = calculateApiCost(modelWithoutCachePrices, 1000, 500, 2000, 3000);
+            
+            // Should only include input and output costs
+            // Input cost: (3.0 / 1_000_000) * 1000 = 0.003
+            // Output cost: (15.0 / 1_000_000) * 500 = 0.0075
+            // Total: 0.003 + 0.0075 = 0.0105
+            expect(cost).toBe(0.0105);
+        });
+    });
+});

+ 336 - 0
src/utils/__tests__/git.test.ts

@@ -0,0 +1,336 @@
+import { jest } from '@jest/globals'
+import { searchCommits, getCommitInfo, getWorkingState, GitCommit } from '../git'
+import { ExecException } from 'child_process'
+
+type ExecFunction = (
+  command: string,
+  options: { cwd?: string },
+  callback: (error: ExecException | null, result?: { stdout: string; stderr: string }) => void
+) => void
+
+type PromisifiedExec = (command: string, options?: { cwd?: string }) => Promise<{ stdout: string; stderr: string }>
+
+// Mock child_process.exec
+jest.mock('child_process', () => ({
+  exec: jest.fn()
+}))
+
+// Mock util.promisify to return our own mock function
+jest.mock('util', () => ({
+  promisify: jest.fn((fn: ExecFunction): PromisifiedExec => {
+    return async (command: string, options?: { cwd?: string }) => {
+      // Call the original mock to maintain the mock implementation
+      return new Promise((resolve, reject) => {
+        fn(command, options || {}, (error: ExecException | null, result?: { stdout: string; stderr: string }) => {
+          if (error) {
+            reject(error)
+          } else {
+            resolve(result!)
+          }
+        })
+      })
+    }
+  })
+}))
+
+// Mock extract-text
+jest.mock('../../integrations/misc/extract-text', () => ({
+  truncateOutput: jest.fn(text => text)
+}))
+
+describe('git utils', () => {
+  // Get the mock with proper typing
+  const { exec } = jest.requireMock('child_process') as { exec: jest.MockedFunction<ExecFunction> }
+  const cwd = '/test/path'
+
+  beforeEach(() => {
+    jest.clearAllMocks()
+  })
+
+  describe('searchCommits', () => {
+    const mockCommitData = [
+      'abc123def456',
+      'abc123',
+      'fix: test commit',
+      'John Doe',
+      '2024-01-06',
+      'def456abc789',
+      'def456',
+      'feat: new feature',
+      'Jane Smith',
+      '2024-01-05'
+    ].join('\n')
+
+    it('should return commits when git is installed and repo exists', async () => {
+      // Set up mock responses
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', { stdout: '.git', stderr: '' }],
+        ['git log -n 10 --format="%H%n%h%n%s%n%an%n%ad" --date=short --grep="test" --regexp-ignore-case', { stdout: mockCommitData, stderr: '' }]
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        // Find matching response
+        for (const [cmd, response] of responses) {
+          if (command === cmd) {
+            callback(null, response)
+            return
+          }
+        }
+        callback(new Error(`Unexpected command: ${command}`))
+      })
+
+      const result = await searchCommits('test', cwd)
+
+      // First verify the result is correct
+      expect(result).toHaveLength(2)
+      expect(result[0]).toEqual({
+        hash: 'abc123def456',
+        shortHash: 'abc123',
+        subject: 'fix: test commit',
+        author: 'John Doe',
+        date: '2024-01-06'
+      })
+
+      // Then verify all commands were called correctly
+      expect(exec).toHaveBeenCalledWith(
+        'git --version',
+        {},
+        expect.any(Function)
+      )
+      expect(exec).toHaveBeenCalledWith(
+        'git rev-parse --git-dir',
+        { cwd },
+        expect.any(Function)
+      )
+      expect(exec).toHaveBeenCalledWith(
+        'git log -n 10 --format="%H%n%h%n%s%n%an%n%ad" --date=short --grep="test" --regexp-ignore-case',
+        { cwd },
+        expect.any(Function)
+      )
+    }, 20000)
+
+    it('should return empty array when git is not installed', async () => {
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        if (command === 'git --version') {
+          callback(new Error('git not found'))
+          return
+        }
+        callback(new Error('Unexpected command'))
+      })
+
+      const result = await searchCommits('test', cwd)
+      expect(result).toEqual([])
+      expect(exec).toHaveBeenCalledWith('git --version', {}, expect.any(Function))
+    })
+
+    it('should return empty array when not in a git repository', async () => {
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', null] // null indicates error should be called
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        const response = responses.get(command)
+        if (response === null) {
+          callback(new Error('not a git repository'))
+        } else if (response) {
+          callback(null, response)
+        } else {
+          callback(new Error('Unexpected command'))
+        }
+      })
+
+      const result = await searchCommits('test', cwd)
+      expect(result).toEqual([])
+      expect(exec).toHaveBeenCalledWith('git --version', {}, expect.any(Function))
+      expect(exec).toHaveBeenCalledWith('git rev-parse --git-dir', { cwd }, expect.any(Function))
+    })
+
+    it('should handle hash search when grep search returns no results', async () => {
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', { stdout: '.git', stderr: '' }],
+        ['git log -n 10 --format="%H%n%h%n%s%n%an%n%ad" --date=short --grep="abc123" --regexp-ignore-case', { stdout: '', stderr: '' }],
+        ['git log -n 10 --format="%H%n%h%n%s%n%an%n%ad" --date=short --author-date-order abc123', { stdout: mockCommitData, stderr: '' }]
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        for (const [cmd, response] of responses) {
+          if (command === cmd) {
+            callback(null, response)
+            return
+          }
+        }
+        callback(new Error('Unexpected command'))
+      })
+
+      const result = await searchCommits('abc123', cwd)
+      expect(result).toHaveLength(2)
+      expect(result[0]).toEqual({
+        hash: 'abc123def456',
+        shortHash: 'abc123',
+        subject: 'fix: test commit',
+        author: 'John Doe',
+        date: '2024-01-06'
+      })
+    })
+  })
+
+  describe('getCommitInfo', () => {
+    const mockCommitInfo = [
+      'abc123def456',
+      'abc123',
+      'fix: test commit',
+      'John Doe',
+      '2024-01-06',
+      'Detailed description'
+    ].join('\n')
+    const mockStats = '1 file changed, 2 insertions(+), 1 deletion(-)'
+    const mockDiff = '@@ -1,1 +1,2 @@\n-old line\n+new line'
+
+    it('should return formatted commit info', async () => {
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', { stdout: '.git', stderr: '' }],
+        ['git show --format="%H%n%h%n%s%n%an%n%ad%n%b" --no-patch abc123', { stdout: mockCommitInfo, stderr: '' }],
+        ['git show --stat --format="" abc123', { stdout: mockStats, stderr: '' }],
+        ['git show --format="" abc123', { stdout: mockDiff, stderr: '' }]
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        for (const [cmd, response] of responses) {
+          if (command.startsWith(cmd)) {
+            callback(null, response)
+            return
+          }
+        }
+        callback(new Error('Unexpected command'))
+      })
+
+      const result = await getCommitInfo('abc123', cwd)
+      expect(result).toContain('Commit: abc123')
+      expect(result).toContain('Author: John Doe')
+      expect(result).toContain('Files Changed:')
+      expect(result).toContain('Full Changes:')
+    })
+
+    it('should return error message when git is not installed', async () => {
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        if (command === 'git --version') {
+          callback(new Error('git not found'))
+          return
+        }
+        callback(new Error('Unexpected command'))
+      })
+
+      const result = await getCommitInfo('abc123', cwd)
+      expect(result).toBe('Git is not installed')
+    })
+
+    it('should return error message when not in a git repository', async () => {
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', null] // null indicates error should be called
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        const response = responses.get(command)
+        if (response === null) {
+          callback(new Error('not a git repository'))
+        } else if (response) {
+          callback(null, response)
+        } else {
+          callback(new Error('Unexpected command'))
+        }
+      })
+
+      const result = await getCommitInfo('abc123', cwd)
+      expect(result).toBe('Not a git repository')
+    })
+  })
+
+  describe('getWorkingState', () => {
+    const mockStatus = ' M src/file1.ts\n?? src/file2.ts'
+    const mockDiff = '@@ -1,1 +1,2 @@\n-old line\n+new line'
+
+    it('should return working directory changes', async () => {
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', { stdout: '.git', stderr: '' }],
+        ['git status --short', { stdout: mockStatus, stderr: '' }],
+        ['git diff HEAD', { stdout: mockDiff, stderr: '' }]
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        for (const [cmd, response] of responses) {
+          if (command === cmd) {
+            callback(null, response)
+            return
+          }
+        }
+        callback(new Error('Unexpected command'))
+      })
+
+      const result = await getWorkingState(cwd)
+      expect(result).toContain('Working directory changes:')
+      expect(result).toContain('src/file1.ts')
+      expect(result).toContain('src/file2.ts')
+    })
+
+    it('should return message when working directory is clean', async () => {
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', { stdout: '.git', stderr: '' }],
+        ['git status --short', { stdout: '', stderr: '' }]
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        for (const [cmd, response] of responses) {
+          if (command === cmd) {
+            callback(null, response)
+            return
+          }
+        }
+        callback(new Error('Unexpected command'))
+      })
+
+      const result = await getWorkingState(cwd)
+      expect(result).toBe('No changes in working directory')
+    })
+
+    it('should return error message when git is not installed', async () => {
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        if (command === 'git --version') {
+          callback(new Error('git not found'))
+          return
+        }
+        callback(new Error('Unexpected command'))
+      })
+
+      const result = await getWorkingState(cwd)
+      expect(result).toBe('Git is not installed')
+    })
+
+    it('should return error message when not in a git repository', async () => {
+      const responses = new Map([
+        ['git --version', { stdout: 'git version 2.39.2', stderr: '' }],
+        ['git rev-parse --git-dir', null] // null indicates error should be called
+      ])
+
+      exec.mockImplementation((command: string, options: { cwd?: string }, callback: Function) => {
+        const response = responses.get(command)
+        if (response === null) {
+          callback(new Error('not a git repository'))
+        } else if (response) {
+          callback(null, response)
+        } else {
+          callback(new Error('Unexpected command'))
+        }
+      })
+
+      const result = await getWorkingState(cwd)
+      expect(result).toBe('Not a git repository')
+    })
+  })
+})

+ 135 - 0
src/utils/__tests__/path.test.ts

@@ -0,0 +1,135 @@
+import { arePathsEqual, getReadablePath } from '../path';
+import * as path from 'path';
+import os from 'os';
+
+describe('Path Utilities', () => {
+    const originalPlatform = process.platform;
+
+    afterEach(() => {
+        Object.defineProperty(process, 'platform', {
+            value: originalPlatform
+        });
+    });
+
+    describe('String.prototype.toPosix', () => {
+        it('should convert backslashes to forward slashes', () => {
+            const windowsPath = 'C:\\Users\\test\\file.txt';
+            expect(windowsPath.toPosix()).toBe('C:/Users/test/file.txt');
+        });
+
+        it('should not modify paths with forward slashes', () => {
+            const unixPath = '/home/user/file.txt';
+            expect(unixPath.toPosix()).toBe('/home/user/file.txt');
+        });
+
+        it('should preserve extended-length Windows paths', () => {
+            const extendedPath = '\\\\?\\C:\\Very\\Long\\Path';
+            expect(extendedPath.toPosix()).toBe('\\\\?\\C:\\Very\\Long\\Path');
+        });
+    });
+
+    describe('arePathsEqual', () => {
+        describe('on Windows', () => {
+            beforeEach(() => {
+                Object.defineProperty(process, 'platform', {
+                    value: 'win32'
+                });
+            });
+
+            it('should compare paths case-insensitively', () => {
+                expect(arePathsEqual('C:\\Users\\Test', 'c:\\users\\test')).toBe(true);
+            });
+
+            it('should handle different path separators', () => {
+                // Convert both paths to use forward slashes after normalization
+                const path1 = path.normalize('C:\\Users\\Test').replace(/\\/g, '/');
+                const path2 = path.normalize('C:/Users/Test').replace(/\\/g, '/');
+                expect(arePathsEqual(path1, path2)).toBe(true);
+            });
+
+            it('should normalize paths with ../', () => {
+                // Convert both paths to use forward slashes after normalization
+                const path1 = path.normalize('C:\\Users\\Test\\..\\Test').replace(/\\/g, '/');
+                const path2 = path.normalize('C:\\Users\\Test').replace(/\\/g, '/');
+                expect(arePathsEqual(path1, path2)).toBe(true);
+            });
+        });
+
+        describe('on POSIX', () => {
+            beforeEach(() => {
+                Object.defineProperty(process, 'platform', {
+                    value: 'darwin'
+                });
+            });
+
+            it('should compare paths case-sensitively', () => {
+                expect(arePathsEqual('/Users/Test', '/Users/test')).toBe(false);
+            });
+
+            it('should normalize paths', () => {
+                expect(arePathsEqual('/Users/./Test', '/Users/Test')).toBe(true);
+            });
+
+            it('should handle trailing slashes', () => {
+                expect(arePathsEqual('/Users/Test/', '/Users/Test')).toBe(true);
+            });
+        });
+
+        describe('edge cases', () => {
+            it('should handle undefined paths', () => {
+                expect(arePathsEqual(undefined, undefined)).toBe(true);
+                expect(arePathsEqual('/test', undefined)).toBe(false);
+                expect(arePathsEqual(undefined, '/test')).toBe(false);
+            });
+
+            it('should handle root paths with trailing slashes', () => {
+                expect(arePathsEqual('/', '/')).toBe(true);
+                expect(arePathsEqual('C:\\', 'C:\\')).toBe(true);
+            });
+        });
+    });
+
+    describe('getReadablePath', () => {
+        const homeDir = os.homedir();
+        const desktop = path.join(homeDir, 'Desktop');
+
+        it('should return basename when path equals cwd', () => {
+            const cwd = '/Users/test/project';
+            expect(getReadablePath(cwd, cwd)).toBe('project');
+        });
+
+        it('should return relative path when inside cwd', () => {
+            const cwd = '/Users/test/project';
+            const filePath = '/Users/test/project/src/file.txt';
+            expect(getReadablePath(cwd, filePath)).toBe('src/file.txt');
+        });
+
+        it('should return absolute path when outside cwd', () => {
+            const cwd = '/Users/test/project';
+            const filePath = '/Users/test/other/file.txt';
+            expect(getReadablePath(cwd, filePath)).toBe('/Users/test/other/file.txt');
+        });
+
+        it('should handle Desktop as cwd', () => {
+            const filePath = path.join(desktop, 'file.txt');
+            expect(getReadablePath(desktop, filePath)).toBe(filePath.toPosix());
+        });
+
+        it('should handle undefined relative path', () => {
+            const cwd = '/Users/test/project';
+            expect(getReadablePath(cwd)).toBe('project');
+        });
+
+        it('should handle parent directory traversal', () => {
+            const cwd = '/Users/test/project';
+            const filePath = '../../other/file.txt';
+            expect(getReadablePath(cwd, filePath)).toBe('/Users/other/file.txt');
+        });
+
+        it('should normalize paths with redundant segments', () => {
+            const cwd = '/Users/test/project';
+            const filePath = '/Users/test/project/./src/../src/file.txt';
+            expect(getReadablePath(cwd, filePath)).toBe('src/file.txt');
+        });
+    });
+});