Browse Source

Add tests

Matt Rubens 1 year ago
parent
commit
d1c39c17e9

+ 289 - 0
src/api/providers/__tests__/vscode-lm.test.ts

@@ -0,0 +1,289 @@
+import * as vscode from 'vscode';
+import { VsCodeLmHandler } from '../vscode-lm';
+import { ApiHandlerOptions } from '../../../shared/api';
+import { Anthropic } from '@anthropic-ai/sdk';
+
+// Mock vscode namespace
+jest.mock('vscode', () => {
+	class MockLanguageModelTextPart {
+		type = 'text';
+		constructor(public value: string) {}
+	}
+
+	class MockLanguageModelToolCallPart {
+		type = 'tool_call';
+		constructor(
+			public callId: string,
+			public name: string,
+			public input: any
+		) {}
+	}
+
+	return {
+		workspace: {
+			onDidChangeConfiguration: jest.fn((callback) => ({
+				dispose: jest.fn()
+			}))
+		},
+		CancellationTokenSource: jest.fn(() => ({
+			token: {
+				isCancellationRequested: false,
+				onCancellationRequested: jest.fn()
+			},
+			cancel: jest.fn(),
+			dispose: jest.fn()
+		})),
+		CancellationError: class CancellationError extends Error {
+			constructor() {
+				super('Operation cancelled');
+				this.name = 'CancellationError';
+			}
+		},
+		LanguageModelChatMessage: {
+			Assistant: jest.fn((content) => ({
+				role: 'assistant',
+				content: Array.isArray(content) ? content : [new MockLanguageModelTextPart(content)]
+			})),
+			User: jest.fn((content) => ({
+				role: 'user',
+				content: Array.isArray(content) ? content : [new MockLanguageModelTextPart(content)]
+			}))
+		},
+		LanguageModelTextPart: MockLanguageModelTextPart,
+		LanguageModelToolCallPart: MockLanguageModelToolCallPart,
+		lm: {
+			selectChatModels: jest.fn()
+		}
+	};
+});
+
+const mockLanguageModelChat = {
+	id: 'test-model',
+	name: 'Test Model',
+	vendor: 'test-vendor',
+	family: 'test-family',
+	version: '1.0',
+	maxInputTokens: 4096,
+	sendRequest: jest.fn(),
+	countTokens: jest.fn()
+};
+
+describe('VsCodeLmHandler', () => {
+	let handler: VsCodeLmHandler;
+	const defaultOptions: ApiHandlerOptions = {
+		vsCodeLmModelSelector: {
+			vendor: 'test-vendor',
+			family: 'test-family'
+		}
+	};
+
+	beforeEach(() => {
+		jest.clearAllMocks();
+		handler = new VsCodeLmHandler(defaultOptions);
+	});
+
+	afterEach(() => {
+		handler.dispose();
+	});
+
+	describe('constructor', () => {
+		it('should initialize with provided options', () => {
+			expect(handler).toBeDefined();
+			expect(vscode.workspace.onDidChangeConfiguration).toHaveBeenCalled();
+		});
+
+		it('should handle configuration changes', () => {
+			const callback = (vscode.workspace.onDidChangeConfiguration as jest.Mock).mock.calls[0][0];
+			callback({ affectsConfiguration: () => true });
+			// Should reset client when config changes
+			expect(handler['client']).toBeNull();
+		});
+	});
+
+	describe('createClient', () => {
+		it('should create client with selector', async () => {
+			const mockModel = { ...mockLanguageModelChat };
+			(vscode.lm.selectChatModels as jest.Mock).mockResolvedValueOnce([mockModel]);
+
+			const client = await handler['createClient']({
+				vendor: 'test-vendor',
+				family: 'test-family'
+			});
+
+			expect(client).toBeDefined();
+			expect(client.id).toBe('test-model');
+			expect(vscode.lm.selectChatModels).toHaveBeenCalledWith({
+				vendor: 'test-vendor',
+				family: 'test-family'
+			});
+		});
+
+		it('should return default client when no models available', async () => {
+			(vscode.lm.selectChatModels as jest.Mock).mockResolvedValueOnce([]);
+
+			const client = await handler['createClient']({});
+			
+			expect(client).toBeDefined();
+			expect(client.id).toBe('default-lm');
+			expect(client.vendor).toBe('vscode');
+		});
+	});
+
+	describe('createMessage', () => {
+		beforeEach(() => {
+			const mockModel = { ...mockLanguageModelChat };
+			(vscode.lm.selectChatModels as jest.Mock).mockResolvedValueOnce([mockModel]);
+			mockLanguageModelChat.countTokens.mockResolvedValue(10);
+		});
+
+		it('should stream text responses', async () => {
+			const systemPrompt = 'You are a helpful assistant';
+			const messages: Anthropic.Messages.MessageParam[] = [{
+				role: 'user' as const,
+				content: 'Hello'
+			}];
+
+			const responseText = 'Hello! How can I help you?';
+			mockLanguageModelChat.sendRequest.mockResolvedValueOnce({
+				stream: (async function* () {
+					yield new vscode.LanguageModelTextPart(responseText);
+					return;
+				})(),
+				text: (async function* () {
+					yield responseText;
+					return;
+				})()
+			});
+
+			const stream = handler.createMessage(systemPrompt, messages);
+			const chunks = [];
+			for await (const chunk of stream) {
+				chunks.push(chunk);
+			}
+
+			expect(chunks).toHaveLength(2); // Text chunk + usage chunk
+			expect(chunks[0]).toEqual({
+				type: 'text',
+				text: responseText
+			});
+			expect(chunks[1]).toMatchObject({
+				type: 'usage',
+				inputTokens: expect.any(Number),
+				outputTokens: expect.any(Number)
+			});
+		});
+
+		it('should handle tool calls', async () => {
+			const systemPrompt = 'You are a helpful assistant';
+			const messages: Anthropic.Messages.MessageParam[] = [{
+				role: 'user' as const,
+				content: 'Calculate 2+2'
+			}];
+
+			const toolCallData = {
+				name: 'calculator',
+				arguments: { operation: 'add', numbers: [2, 2] },
+				callId: 'call-1'
+			};
+
+			mockLanguageModelChat.sendRequest.mockResolvedValueOnce({
+				stream: (async function* () {
+					yield new vscode.LanguageModelToolCallPart(
+						toolCallData.callId,
+						toolCallData.name,
+						toolCallData.arguments
+					);
+					return;
+				})(),
+				text: (async function* () {
+					yield JSON.stringify({ type: 'tool_call', ...toolCallData });
+					return;
+				})()
+			});
+
+			const stream = handler.createMessage(systemPrompt, messages);
+			const chunks = [];
+			for await (const chunk of stream) {
+				chunks.push(chunk);
+			}
+
+			expect(chunks).toHaveLength(2); // Tool call chunk + usage chunk
+			expect(chunks[0]).toEqual({
+				type: 'text',
+				text: JSON.stringify({ type: 'tool_call', ...toolCallData })
+			});
+		});
+
+		it('should handle errors', async () => {
+			const systemPrompt = 'You are a helpful assistant';
+			const messages: Anthropic.Messages.MessageParam[] = [{
+				role: 'user' as const,
+				content: 'Hello'
+			}];
+
+			mockLanguageModelChat.sendRequest.mockRejectedValueOnce(new Error('API Error'));
+
+			await expect(async () => {
+				const stream = handler.createMessage(systemPrompt, messages);
+				for await (const _ of stream) {
+					// consume stream
+				}
+			}).rejects.toThrow('API Error');
+		});
+	});
+
+	describe('getModel', () => {
+		it('should return model info when client exists', async () => {
+			const mockModel = { ...mockLanguageModelChat };
+			(vscode.lm.selectChatModels as jest.Mock).mockResolvedValueOnce([mockModel]);
+			
+			// Initialize client
+			await handler['getClient']();
+			
+			const model = handler.getModel();
+			expect(model.id).toBe('test-model');
+			expect(model.info).toBeDefined();
+			expect(model.info.contextWindow).toBe(4096);
+		});
+
+		it('should return fallback model info when no client exists', () => {
+			const model = handler.getModel();
+			expect(model.id).toBe('test-vendor/test-family');
+			expect(model.info).toBeDefined();
+		});
+	});
+
+	describe('completePrompt', () => {
+		it('should complete single prompt', async () => {
+			const mockModel = { ...mockLanguageModelChat };
+			(vscode.lm.selectChatModels as jest.Mock).mockResolvedValueOnce([mockModel]);
+
+			const responseText = 'Completed text';
+			mockLanguageModelChat.sendRequest.mockResolvedValueOnce({
+				stream: (async function* () {
+					yield new vscode.LanguageModelTextPart(responseText);
+					return;
+				})(),
+				text: (async function* () {
+					yield responseText;
+					return;
+				})()
+			});
+
+			const result = await handler.completePrompt('Test prompt');
+			expect(result).toBe(responseText);
+			expect(mockLanguageModelChat.sendRequest).toHaveBeenCalled();
+		});
+
+		it('should handle errors during completion', async () => {
+			const mockModel = { ...mockLanguageModelChat };
+			(vscode.lm.selectChatModels as jest.Mock).mockResolvedValueOnce([mockModel]);
+
+			mockLanguageModelChat.sendRequest.mockRejectedValueOnce(new Error('Completion failed'));
+
+			await expect(handler.completePrompt('Test prompt'))
+				.rejects
+				.toThrow('VSCode LM completion error: Completion failed');
+		});
+	});
+});

+ 246 - 0
src/api/transform/__tests__/vscode-lm-format.test.ts

@@ -0,0 +1,246 @@
+import { Anthropic } from "@anthropic-ai/sdk";
+import * as vscode from 'vscode';
+import { convertToVsCodeLmMessages, convertToAnthropicRole, convertToAnthropicMessage } from '../vscode-lm-format';
+
+// Mock crypto
+const mockCrypto = {
+	randomUUID: () => 'test-uuid'
+};
+global.crypto = mockCrypto as any;
+
+// Define types for our mocked classes
+interface MockLanguageModelTextPart {
+	type: 'text';
+	value: string;
+}
+
+interface MockLanguageModelToolCallPart {
+	type: 'tool_call';
+	callId: string;
+	name: string;
+	input: any;
+}
+
+interface MockLanguageModelToolResultPart {
+	type: 'tool_result';
+	toolUseId: string;
+	parts: MockLanguageModelTextPart[];
+}
+
+type MockMessageContent = MockLanguageModelTextPart | MockLanguageModelToolCallPart | MockLanguageModelToolResultPart;
+
+interface MockLanguageModelChatMessage {
+	role: string;
+	name?: string;
+	content: MockMessageContent[];
+}
+
+// Mock vscode namespace
+jest.mock('vscode', () => {
+	const LanguageModelChatMessageRole = {
+		Assistant: 'assistant',
+		User: 'user'
+	};
+
+	class MockLanguageModelTextPart {
+		type = 'text';
+		constructor(public value: string) {}
+	}
+
+	class MockLanguageModelToolCallPart {
+		type = 'tool_call';
+		constructor(
+			public callId: string,
+			public name: string,
+			public input: any
+		) {}
+	}
+
+	class MockLanguageModelToolResultPart {
+		type = 'tool_result';
+		constructor(
+			public toolUseId: string,
+			public parts: MockLanguageModelTextPart[]
+		) {}
+	}
+
+	return {
+		LanguageModelChatMessage: {
+			Assistant: jest.fn((content) => ({
+				role: LanguageModelChatMessageRole.Assistant,
+				name: 'assistant',
+				content: Array.isArray(content) ? content : [new MockLanguageModelTextPart(content)]
+			})),
+			User: jest.fn((content) => ({
+				role: LanguageModelChatMessageRole.User,
+				name: 'user',
+				content: Array.isArray(content) ? content : [new MockLanguageModelTextPart(content)]
+			}))
+		},
+		LanguageModelChatMessageRole,
+		LanguageModelTextPart: MockLanguageModelTextPart,
+		LanguageModelToolCallPart: MockLanguageModelToolCallPart,
+		LanguageModelToolResultPart: MockLanguageModelToolResultPart
+	};
+});
+
+describe('vscode-lm-format', () => {
+	describe('convertToVsCodeLmMessages', () => {
+		it('should convert simple string messages', () => {
+			const messages: Anthropic.Messages.MessageParam[] = [
+				{ role: 'user', content: 'Hello' },
+				{ role: 'assistant', content: 'Hi there' }
+			];
+
+			const result = convertToVsCodeLmMessages(messages);
+			
+			expect(result).toHaveLength(2);
+			expect(result[0].role).toBe('user');
+			expect((result[0].content[0] as MockLanguageModelTextPart).value).toBe('Hello');
+			expect(result[1].role).toBe('assistant');
+			expect((result[1].content[0] as MockLanguageModelTextPart).value).toBe('Hi there');
+		});
+
+		it('should handle complex user messages with tool results', () => {
+			const messages: Anthropic.Messages.MessageParam[] = [{
+				role: 'user',
+				content: [
+					{ type: 'text', text: 'Here is the result:' },
+					{
+						type: 'tool_result',
+						tool_use_id: 'tool-1',
+						content: 'Tool output'
+					}
+				]
+			}];
+
+			const result = convertToVsCodeLmMessages(messages);
+			
+			expect(result).toHaveLength(1);
+			expect(result[0].role).toBe('user');
+			expect(result[0].content).toHaveLength(2);
+			const [toolResult, textContent] = result[0].content as [MockLanguageModelToolResultPart, MockLanguageModelTextPart];
+			expect(toolResult.type).toBe('tool_result');
+			expect(textContent.type).toBe('text');
+		});
+
+		it('should handle complex assistant messages with tool calls', () => {
+			const messages: Anthropic.Messages.MessageParam[] = [{
+				role: 'assistant',
+				content: [
+					{ type: 'text', text: 'Let me help you with that.' },
+					{
+						type: 'tool_use',
+						id: 'tool-1',
+						name: 'calculator',
+						input: { operation: 'add', numbers: [2, 2] }
+					}
+				]
+			}];
+
+			const result = convertToVsCodeLmMessages(messages);
+			
+			expect(result).toHaveLength(1);
+			expect(result[0].role).toBe('assistant');
+			expect(result[0].content).toHaveLength(2);
+			const [toolCall, textContent] = result[0].content as [MockLanguageModelToolCallPart, MockLanguageModelTextPart];
+			expect(toolCall.type).toBe('tool_call');
+			expect(textContent.type).toBe('text');
+		});
+
+		it('should handle image blocks with appropriate placeholders', () => {
+			const messages: Anthropic.Messages.MessageParam[] = [{
+				role: 'user',
+				content: [
+					{ type: 'text', text: 'Look at this:' },
+					{
+						type: 'image',
+						source: {
+							type: 'base64',
+							media_type: 'image/png',
+							data: 'base64data'
+						}
+					}
+				]
+			}];
+
+			const result = convertToVsCodeLmMessages(messages);
+			
+			expect(result).toHaveLength(1);
+			const imagePlaceholder = result[0].content[1] as MockLanguageModelTextPart;
+			expect(imagePlaceholder.value).toContain('[Image (base64): image/png not supported by VSCode LM API]');
+		});
+	});
+
+	describe('convertToAnthropicRole', () => {
+		it('should convert assistant role correctly', () => {
+			const result = convertToAnthropicRole('assistant' as any);
+			expect(result).toBe('assistant');
+		});
+
+		it('should convert user role correctly', () => {
+			const result = convertToAnthropicRole('user' as any);
+			expect(result).toBe('user');
+		});
+
+		it('should return null for unknown roles', () => {
+			const result = convertToAnthropicRole('unknown' as any);
+			expect(result).toBeNull();
+		});
+	});
+
+	describe('convertToAnthropicMessage', () => {
+		it('should convert assistant message with text content', async () => {
+			const vsCodeMessage = {
+				role: 'assistant',
+				name: 'assistant',
+				content: [new vscode.LanguageModelTextPart('Hello')]
+			};
+
+			const result = await convertToAnthropicMessage(vsCodeMessage as any);
+			
+			expect(result.role).toBe('assistant');
+			expect(result.content).toHaveLength(1);
+			expect(result.content[0]).toEqual({
+				type: 'text',
+				text: 'Hello'
+			});
+			expect(result.id).toBe('test-uuid');
+		});
+
+		it('should convert assistant message with tool calls', async () => {
+			const vsCodeMessage = {
+				role: 'assistant',
+				name: 'assistant',
+				content: [new vscode.LanguageModelToolCallPart(
+					'call-1',
+					'calculator',
+					{ operation: 'add', numbers: [2, 2] }
+				)]
+			};
+
+			const result = await convertToAnthropicMessage(vsCodeMessage as any);
+			
+			expect(result.content).toHaveLength(1);
+			expect(result.content[0]).toEqual({
+				type: 'tool_use',
+				id: 'call-1',
+				name: 'calculator',
+				input: { operation: 'add', numbers: [2, 2] }
+			});
+			expect(result.id).toBe('test-uuid');
+		});
+
+		it('should throw error for non-assistant messages', async () => {
+			const vsCodeMessage = {
+				role: 'user',
+				name: 'user',
+				content: [new vscode.LanguageModelTextPart('Hello')]
+			};
+
+			await expect(convertToAnthropicMessage(vsCodeMessage as any))
+				.rejects
+				.toThrow('Cline <Language Model API>: Only assistant messages are supported.');
+		});
+	});
+});

+ 44 - 0
src/shared/__tests__/vsCodeSelectorUtils.test.ts

@@ -0,0 +1,44 @@
+import { stringifyVsCodeLmModelSelector, SELECTOR_SEPARATOR } from '../vsCodeSelectorUtils';
+import { LanguageModelChatSelector } from 'vscode';
+
+describe('vsCodeSelectorUtils', () => {
+	describe('stringifyVsCodeLmModelSelector', () => {
+		it('should join all defined selector properties with separator', () => {
+			const selector: LanguageModelChatSelector = {
+				vendor: 'test-vendor',
+				family: 'test-family',
+				version: 'v1',
+				id: 'test-id'
+			};
+
+			const result = stringifyVsCodeLmModelSelector(selector);
+			expect(result).toBe('test-vendor/test-family/v1/test-id');
+		});
+
+		it('should skip undefined properties', () => {
+			const selector: LanguageModelChatSelector = {
+				vendor: 'test-vendor',
+				family: 'test-family'
+			};
+
+			const result = stringifyVsCodeLmModelSelector(selector);
+			expect(result).toBe('test-vendor/test-family');
+		});
+
+		it('should handle empty selector', () => {
+			const selector: LanguageModelChatSelector = {};
+
+			const result = stringifyVsCodeLmModelSelector(selector);
+			expect(result).toBe('');
+		});
+
+		it('should handle selector with only one property', () => {
+			const selector: LanguageModelChatSelector = {
+				vendor: 'test-vendor'
+			};
+
+			const result = stringifyVsCodeLmModelSelector(selector);
+			expect(result).toBe('test-vendor');
+		});
+	});
+});