|
|
@@ -180,7 +180,7 @@ describe("VsCodeLmHandler", () => {
|
|
|
})
|
|
|
})
|
|
|
|
|
|
- it("should handle tool calls", async () => {
|
|
|
+ it("should handle tool calls as text when not using native tool protocol", async () => {
|
|
|
const systemPrompt = "You are a helpful assistant"
|
|
|
const messages: Anthropic.Messages.MessageParam[] = [
|
|
|
{
|
|
|
@@ -223,6 +223,139 @@ describe("VsCodeLmHandler", () => {
|
|
|
})
|
|
|
})
|
|
|
|
|
|
+ it("should handle native tool calls when using native tool protocol", async () => {
|
|
|
+ const systemPrompt = "You are a helpful assistant"
|
|
|
+ const messages: Anthropic.Messages.MessageParam[] = [
|
|
|
+ {
|
|
|
+ role: "user" as const,
|
|
|
+ content: "Calculate 2+2",
|
|
|
+ },
|
|
|
+ ]
|
|
|
+
|
|
|
+ const toolCallData = {
|
|
|
+ name: "calculator",
|
|
|
+ arguments: { operation: "add", numbers: [2, 2] },
|
|
|
+ callId: "call-1",
|
|
|
+ }
|
|
|
+
|
|
|
+ const tools = [
|
|
|
+ {
|
|
|
+ type: "function" as const,
|
|
|
+ function: {
|
|
|
+ name: "calculator",
|
|
|
+ description: "A simple calculator",
|
|
|
+ parameters: {
|
|
|
+ type: "object",
|
|
|
+ properties: {
|
|
|
+ operation: { type: "string" },
|
|
|
+ numbers: { type: "array", items: { type: "number" } },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ ]
|
|
|
+
|
|
|
+ mockLanguageModelChat.sendRequest.mockResolvedValueOnce({
|
|
|
+ stream: (async function* () {
|
|
|
+ yield new vscode.LanguageModelToolCallPart(
|
|
|
+ toolCallData.callId,
|
|
|
+ toolCallData.name,
|
|
|
+ toolCallData.arguments,
|
|
|
+ )
|
|
|
+ return
|
|
|
+ })(),
|
|
|
+ text: (async function* () {
|
|
|
+ yield JSON.stringify({ type: "tool_call", ...toolCallData })
|
|
|
+ return
|
|
|
+ })(),
|
|
|
+ })
|
|
|
+
|
|
|
+ const stream = handler.createMessage(systemPrompt, messages, {
|
|
|
+ taskId: "test-task",
|
|
|
+ toolProtocol: "native",
|
|
|
+ tools,
|
|
|
+ })
|
|
|
+ const chunks = []
|
|
|
+ for await (const chunk of stream) {
|
|
|
+ chunks.push(chunk)
|
|
|
+ }
|
|
|
+
|
|
|
+ expect(chunks).toHaveLength(2) // Tool call chunk + usage chunk
|
|
|
+ expect(chunks[0]).toEqual({
|
|
|
+ type: "tool_call",
|
|
|
+ id: toolCallData.callId,
|
|
|
+ name: toolCallData.name,
|
|
|
+ arguments: JSON.stringify(toolCallData.arguments),
|
|
|
+ })
|
|
|
+ })
|
|
|
+
|
|
|
+ it("should pass tools to request options when using native tool protocol", async () => {
|
|
|
+ const systemPrompt = "You are a helpful assistant"
|
|
|
+ const messages: Anthropic.Messages.MessageParam[] = [
|
|
|
+ {
|
|
|
+ role: "user" as const,
|
|
|
+ content: "Calculate 2+2",
|
|
|
+ },
|
|
|
+ ]
|
|
|
+
|
|
|
+ const tools = [
|
|
|
+ {
|
|
|
+ type: "function" as const,
|
|
|
+ function: {
|
|
|
+ name: "calculator",
|
|
|
+ description: "A simple calculator",
|
|
|
+ parameters: {
|
|
|
+ type: "object",
|
|
|
+ properties: {
|
|
|
+ operation: { type: "string" },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ ]
|
|
|
+
|
|
|
+ mockLanguageModelChat.sendRequest.mockResolvedValueOnce({
|
|
|
+ stream: (async function* () {
|
|
|
+ yield new vscode.LanguageModelTextPart("Result: 4")
|
|
|
+ return
|
|
|
+ })(),
|
|
|
+ text: (async function* () {
|
|
|
+ yield "Result: 4"
|
|
|
+ return
|
|
|
+ })(),
|
|
|
+ })
|
|
|
+
|
|
|
+ const stream = handler.createMessage(systemPrompt, messages, {
|
|
|
+ taskId: "test-task",
|
|
|
+ toolProtocol: "native",
|
|
|
+ tools,
|
|
|
+ })
|
|
|
+ const chunks = []
|
|
|
+ for await (const chunk of stream) {
|
|
|
+ chunks.push(chunk)
|
|
|
+ }
|
|
|
+
|
|
|
+ // Verify sendRequest was called with tools in options
|
|
|
+ expect(mockLanguageModelChat.sendRequest).toHaveBeenCalledWith(
|
|
|
+ expect.any(Array),
|
|
|
+ expect.objectContaining({
|
|
|
+ tools: [
|
|
|
+ {
|
|
|
+ name: "calculator",
|
|
|
+ description: "A simple calculator",
|
|
|
+ inputSchema: {
|
|
|
+ type: "object",
|
|
|
+ properties: {
|
|
|
+ operation: { type: "string" },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ },
|
|
|
+ ],
|
|
|
+ }),
|
|
|
+ expect.anything(),
|
|
|
+ )
|
|
|
+ })
|
|
|
+
|
|
|
it("should handle errors", async () => {
|
|
|
const systemPrompt = "You are a helpful assistant"
|
|
|
const messages: Anthropic.Messages.MessageParam[] = [
|
|
|
@@ -259,6 +392,26 @@ describe("VsCodeLmHandler", () => {
|
|
|
expect(model.id).toBe("test-vendor/test-family")
|
|
|
expect(model.info).toBeDefined()
|
|
|
})
|
|
|
+
|
|
|
+ it("should return supportsNativeTools and defaultToolProtocol in model info", async () => {
|
|
|
+ const mockModel = { ...mockLanguageModelChat }
|
|
|
+ ;(vscode.lm.selectChatModels as Mock).mockResolvedValueOnce([mockModel])
|
|
|
+
|
|
|
+ // Initialize client
|
|
|
+ await handler["getClient"]()
|
|
|
+
|
|
|
+ const model = handler.getModel()
|
|
|
+ expect(model.info.supportsNativeTools).toBe(true)
|
|
|
+ expect(model.info.defaultToolProtocol).toBe("native")
|
|
|
+ })
|
|
|
+
|
|
|
+ it("should return supportsNativeTools and defaultToolProtocol in fallback model info", () => {
|
|
|
+ // Clear the client first
|
|
|
+ handler["client"] = null
|
|
|
+ const model = handler.getModel()
|
|
|
+ expect(model.info.supportsNativeTools).toBe(true)
|
|
|
+ expect(model.info.defaultToolProtocol).toBe("native")
|
|
|
+ })
|
|
|
})
|
|
|
|
|
|
describe("completePrompt", () => {
|