index.ts 1.6 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647
  1. import { Anthropic } from "@anthropic-ai/sdk"
  2. import { ApiConfiguration, ModelInfo } from "../shared/api"
  3. import { AnthropicHandler } from "./providers/anthropic"
  4. import { AwsBedrockHandler } from "./providers/bedrock"
  5. import { OpenRouterHandler } from "./providers/openrouter"
  6. import { VertexHandler } from "./providers/vertex"
  7. import { OpenAiHandler } from "./providers/openai"
  8. import { OllamaHandler } from "./providers/ollama"
  9. import { LmStudioHandler } from "./providers/lmstudio"
  10. import { GeminiHandler } from "./providers/gemini"
  11. import { OpenAiNativeHandler } from "./providers/openai-native"
  12. import { ApiStream } from "./transform/stream"
  13. export interface SingleCompletionHandler {
  14. completePrompt(prompt: string): Promise<string>
  15. }
  16. export interface ApiHandler {
  17. createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream
  18. getModel(): { id: string; info: ModelInfo }
  19. }
  20. export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
  21. const { apiProvider, ...options } = configuration
  22. switch (apiProvider) {
  23. case "anthropic":
  24. return new AnthropicHandler(options)
  25. case "openrouter":
  26. return new OpenRouterHandler(options)
  27. case "bedrock":
  28. return new AwsBedrockHandler(options)
  29. case "vertex":
  30. return new VertexHandler(options)
  31. case "openai":
  32. return new OpenAiHandler(options)
  33. case "ollama":
  34. return new OllamaHandler(options)
  35. case "lmstudio":
  36. return new LmStudioHandler(options)
  37. case "gemini":
  38. return new GeminiHandler(options)
  39. case "openai-native":
  40. return new OpenAiNativeHandler(options)
  41. default:
  42. return new AnthropicHandler(options)
  43. }
  44. }