openai-format.test.ts 3.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131
  1. // npx jest src/api/transform/__tests__/openai-format.test.ts
  2. import { Anthropic } from "@anthropic-ai/sdk"
  3. import OpenAI from "openai"
  4. import { convertToOpenAiMessages } from "../openai-format"
  5. describe("convertToOpenAiMessages", () => {
  6. it("should convert simple text messages", () => {
  7. const anthropicMessages: Anthropic.Messages.MessageParam[] = [
  8. {
  9. role: "user",
  10. content: "Hello",
  11. },
  12. {
  13. role: "assistant",
  14. content: "Hi there!",
  15. },
  16. ]
  17. const openAiMessages = convertToOpenAiMessages(anthropicMessages)
  18. expect(openAiMessages).toHaveLength(2)
  19. expect(openAiMessages[0]).toEqual({
  20. role: "user",
  21. content: "Hello",
  22. })
  23. expect(openAiMessages[1]).toEqual({
  24. role: "assistant",
  25. content: "Hi there!",
  26. })
  27. })
  28. it("should handle messages with image content", () => {
  29. const anthropicMessages: Anthropic.Messages.MessageParam[] = [
  30. {
  31. role: "user",
  32. content: [
  33. {
  34. type: "text",
  35. text: "What is in this image?",
  36. },
  37. {
  38. type: "image",
  39. source: {
  40. type: "base64",
  41. media_type: "image/jpeg",
  42. data: "base64data",
  43. },
  44. },
  45. ],
  46. },
  47. ]
  48. const openAiMessages = convertToOpenAiMessages(anthropicMessages)
  49. expect(openAiMessages).toHaveLength(1)
  50. expect(openAiMessages[0].role).toBe("user")
  51. const content = openAiMessages[0].content as Array<{
  52. type: string
  53. text?: string
  54. image_url?: { url: string }
  55. }>
  56. expect(Array.isArray(content)).toBe(true)
  57. expect(content).toHaveLength(2)
  58. expect(content[0]).toEqual({ type: "text", text: "What is in this image?" })
  59. expect(content[1]).toEqual({
  60. type: "image_url",
  61. image_url: { url: "data:image/jpeg;base64,base64data" },
  62. })
  63. })
  64. it("should handle assistant messages with tool use", () => {
  65. const anthropicMessages: Anthropic.Messages.MessageParam[] = [
  66. {
  67. role: "assistant",
  68. content: [
  69. {
  70. type: "text",
  71. text: "Let me check the weather.",
  72. },
  73. {
  74. type: "tool_use",
  75. id: "weather-123",
  76. name: "get_weather",
  77. input: { city: "London" },
  78. },
  79. ],
  80. },
  81. ]
  82. const openAiMessages = convertToOpenAiMessages(anthropicMessages)
  83. expect(openAiMessages).toHaveLength(1)
  84. const assistantMessage = openAiMessages[0] as OpenAI.Chat.ChatCompletionAssistantMessageParam
  85. expect(assistantMessage.role).toBe("assistant")
  86. expect(assistantMessage.content).toBe("Let me check the weather.")
  87. expect(assistantMessage.tool_calls).toHaveLength(1)
  88. expect(assistantMessage.tool_calls![0]).toEqual({
  89. id: "weather-123",
  90. type: "function",
  91. function: {
  92. name: "get_weather",
  93. arguments: JSON.stringify({ city: "London" }),
  94. },
  95. })
  96. })
  97. it("should handle user messages with tool results", () => {
  98. const anthropicMessages: Anthropic.Messages.MessageParam[] = [
  99. {
  100. role: "user",
  101. content: [
  102. {
  103. type: "tool_result",
  104. tool_use_id: "weather-123",
  105. content: "Current temperature in London: 20°C",
  106. },
  107. ],
  108. },
  109. ]
  110. const openAiMessages = convertToOpenAiMessages(anthropicMessages)
  111. expect(openAiMessages).toHaveLength(1)
  112. const toolMessage = openAiMessages[0] as OpenAI.Chat.ChatCompletionToolMessageParam
  113. expect(toolMessage.role).toBe("tool")
  114. expect(toolMessage.tool_call_id).toBe("weather-123")
  115. expect(toolMessage.content).toBe("Current temperature in London: 20°C")
  116. })
  117. })