| 123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649 |
- package openai
- import (
- "bytes"
- "encoding/json"
- "errors"
- "fmt"
- "io"
- "mime/multipart"
- "net/http"
- "net/textproto"
- "path/filepath"
- "strings"
- "github.com/QuantumNous/new-api/common"
- "github.com/QuantumNous/new-api/constant"
- "github.com/QuantumNous/new-api/dto"
- "github.com/QuantumNous/new-api/logger"
- "github.com/QuantumNous/new-api/relay/channel"
- "github.com/QuantumNous/new-api/relay/channel/ai360"
- "github.com/QuantumNous/new-api/relay/channel/lingyiwanwu"
- //"github.com/QuantumNous/new-api/relay/channel/minimax"
- "github.com/QuantumNous/new-api/relay/channel/openrouter"
- "github.com/QuantumNous/new-api/relay/channel/xinference"
- relaycommon "github.com/QuantumNous/new-api/relay/common"
- "github.com/QuantumNous/new-api/relay/common_handler"
- relayconstant "github.com/QuantumNous/new-api/relay/constant"
- "github.com/QuantumNous/new-api/service"
- "github.com/QuantumNous/new-api/setting/model_setting"
- "github.com/QuantumNous/new-api/types"
- "github.com/gin-gonic/gin"
- )
- type Adaptor struct {
- ChannelType int
- ResponseFormat string
- }
- // parseReasoningEffortFromModelSuffix 从模型名称中解析推理级别
- // support OAI models: o1-mini/o3-mini/o4-mini/o1/o3 etc...
- // minimal effort only available in gpt-5
- func parseReasoningEffortFromModelSuffix(model string) (string, string) {
- effortSuffixes := []string{"-high", "-minimal", "-low", "-medium", "-none"}
- for _, suffix := range effortSuffixes {
- if strings.HasSuffix(model, suffix) {
- effort := strings.TrimPrefix(suffix, "-")
- originModel := strings.TrimSuffix(model, suffix)
- return effort, originModel
- }
- }
- return "", model
- }
- func (a *Adaptor) ConvertGeminiRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeminiChatRequest) (any, error) {
- // 使用 service.GeminiToOpenAIRequest 转换请求格式
- openaiRequest, err := service.GeminiToOpenAIRequest(request, info)
- if err != nil {
- return nil, err
- }
- return a.ConvertOpenAIRequest(c, info, openaiRequest)
- }
- func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.ClaudeRequest) (any, error) {
- //if !strings.Contains(request.Model, "claude") {
- // return nil, fmt.Errorf("you are using openai channel type with path /v1/messages, only claude model supported convert, but got %s", request.Model)
- //}
- //if common.DebugEnabled {
- // bodyBytes := []byte(common.GetJsonString(request))
- // err := os.WriteFile(fmt.Sprintf("claude_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644)
- // if err != nil {
- // println(fmt.Sprintf("failed to save request body to file: %v", err))
- // }
- //}
- aiRequest, err := service.ClaudeToOpenAIRequest(*request, info)
- if err != nil {
- return nil, err
- }
- //if common.DebugEnabled {
- // println(fmt.Sprintf("convert claude to openai request result: %s", common.GetJsonString(aiRequest)))
- // // Save request body to file for debugging
- // bodyBytes := []byte(common.GetJsonString(aiRequest))
- // err = os.WriteFile(fmt.Sprintf("claude_to_openai_request_%s.txt", c.GetString(common.RequestIdKey)), bodyBytes, 0644)
- // if err != nil {
- // println(fmt.Sprintf("failed to save request body to file: %v", err))
- // }
- //}
- if info.SupportStreamOptions && info.IsStream {
- aiRequest.StreamOptions = &dto.StreamOptions{
- IncludeUsage: true,
- }
- }
- return a.ConvertOpenAIRequest(c, info, aiRequest)
- }
- func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
- a.ChannelType = info.ChannelType
- // initialize ThinkingContentInfo when thinking_to_content is enabled
- if info.ChannelSetting.ThinkingToContent {
- info.ThinkingContentInfo = relaycommon.ThinkingContentInfo{
- IsFirstThinkingContent: true,
- SendLastThinkingContent: false,
- HasSentThinkingContent: false,
- }
- }
- }
- func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
- if info.RelayMode == relayconstant.RelayModeRealtime {
- if strings.HasPrefix(info.ChannelBaseUrl, "https://") {
- baseUrl := strings.TrimPrefix(info.ChannelBaseUrl, "https://")
- baseUrl = "wss://" + baseUrl
- info.ChannelBaseUrl = baseUrl
- } else if strings.HasPrefix(info.ChannelBaseUrl, "http://") {
- baseUrl := strings.TrimPrefix(info.ChannelBaseUrl, "http://")
- baseUrl = "ws://" + baseUrl
- info.ChannelBaseUrl = baseUrl
- }
- }
- switch info.ChannelType {
- case constant.ChannelTypeAzure:
- apiVersion := info.ApiVersion
- if apiVersion == "" {
- apiVersion = constant.AzureDefaultAPIVersion
- }
- // https://learn.microsoft.com/en-us/azure/cognitive-services/openai/chatgpt-quickstart?pivots=rest-api&tabs=command-line#rest-api
- requestURL := strings.Split(info.RequestURLPath, "?")[0]
- requestURL = fmt.Sprintf("%s?api-version=%s", requestURL, apiVersion)
- task := strings.TrimPrefix(requestURL, "/v1/")
- if info.RelayFormat == types.RelayFormatClaude {
- task = strings.TrimPrefix(task, "messages")
- task = "chat/completions" + task
- }
- // 特殊处理 responses API
- if info.RelayMode == relayconstant.RelayModeResponses {
- responsesApiVersion := "preview"
- subUrl := "/openai/v1/responses"
- if strings.Contains(info.ChannelBaseUrl, "cognitiveservices.azure.com") {
- subUrl = "/openai/responses"
- responsesApiVersion = apiVersion
- }
- if info.ChannelOtherSettings.AzureResponsesVersion != "" {
- responsesApiVersion = info.ChannelOtherSettings.AzureResponsesVersion
- }
- requestURL = fmt.Sprintf("%s?api-version=%s", subUrl, responsesApiVersion)
- return relaycommon.GetFullRequestURL(info.ChannelBaseUrl, requestURL, info.ChannelType), nil
- }
- model_ := info.UpstreamModelName
- // 2025年5月10日后创建的渠道不移除.
- if info.ChannelCreateTime < constant.AzureNoRemoveDotTime {
- model_ = strings.Replace(model_, ".", "", -1)
- }
- // https://github.com/songquanpeng/one-api/issues/67
- requestURL = fmt.Sprintf("/openai/deployments/%s/%s", model_, task)
- if info.RelayMode == relayconstant.RelayModeRealtime {
- requestURL = fmt.Sprintf("/openai/realtime?deployment=%s&api-version=%s", model_, apiVersion)
- }
- return relaycommon.GetFullRequestURL(info.ChannelBaseUrl, requestURL, info.ChannelType), nil
- //case constant.ChannelTypeMiniMax:
- // return minimax.GetRequestURL(info)
- case constant.ChannelTypeCustom:
- url := info.ChannelBaseUrl
- url = strings.Replace(url, "{model}", info.UpstreamModelName, -1)
- return url, nil
- default:
- if info.RelayFormat == types.RelayFormatClaude || info.RelayFormat == types.RelayFormatGemini {
- return fmt.Sprintf("%s/v1/chat/completions", info.ChannelBaseUrl), nil
- }
- return relaycommon.GetFullRequestURL(info.ChannelBaseUrl, info.RequestURLPath, info.ChannelType), nil
- }
- }
- func (a *Adaptor) SetupRequestHeader(c *gin.Context, header *http.Header, info *relaycommon.RelayInfo) error {
- channel.SetupApiRequestHeader(info, c, header)
- if info.ChannelType == constant.ChannelTypeAzure {
- header.Set("api-key", info.ApiKey)
- return nil
- }
- if info.ChannelType == constant.ChannelTypeOpenAI && "" != info.Organization {
- header.Set("OpenAI-Organization", info.Organization)
- }
- if info.RelayMode == relayconstant.RelayModeRealtime {
- swp := c.Request.Header.Get("Sec-WebSocket-Protocol")
- if swp != "" {
- items := []string{
- "realtime",
- "openai-insecure-api-key." + info.ApiKey,
- "openai-beta.realtime-v1",
- }
- header.Set("Sec-WebSocket-Protocol", strings.Join(items, ","))
- //req.Header.Set("Sec-WebSocket-Key", c.Request.Header.Get("Sec-WebSocket-Key"))
- //req.Header.Set("Sec-Websocket-Extensions", c.Request.Header.Get("Sec-Websocket-Extensions"))
- //req.Header.Set("Sec-Websocket-Version", c.Request.Header.Get("Sec-Websocket-Version"))
- } else {
- header.Set("openai-beta", "realtime=v1")
- header.Set("Authorization", "Bearer "+info.ApiKey)
- }
- } else {
- header.Set("Authorization", "Bearer "+info.ApiKey)
- }
- if info.ChannelType == constant.ChannelTypeOpenRouter {
- header.Set("HTTP-Referer", "https://www.newapi.ai")
- header.Set("X-Title", "New API")
- }
- return nil
- }
- func (a *Adaptor) ConvertOpenAIRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
- if request == nil {
- return nil, errors.New("request is nil")
- }
- if info.ChannelType != constant.ChannelTypeOpenAI && info.ChannelType != constant.ChannelTypeAzure {
- request.StreamOptions = nil
- }
- if info.ChannelType == constant.ChannelTypeOpenRouter {
- if len(request.Usage) == 0 {
- request.Usage = json.RawMessage(`{"include":true}`)
- }
- // 适配 OpenRouter 的 thinking 后缀
- if !model_setting.ShouldPreserveThinkingSuffix(info.OriginModelName) &&
- strings.HasSuffix(info.UpstreamModelName, "-thinking") {
- info.UpstreamModelName = strings.TrimSuffix(info.UpstreamModelName, "-thinking")
- request.Model = info.UpstreamModelName
- if len(request.Reasoning) == 0 {
- reasoning := map[string]any{
- "enabled": true,
- }
- if request.ReasoningEffort != "" && request.ReasoningEffort != "none" {
- reasoning["effort"] = request.ReasoningEffort
- }
- marshal, err := common.Marshal(reasoning)
- if err != nil {
- return nil, fmt.Errorf("error marshalling reasoning: %w", err)
- }
- request.Reasoning = marshal
- }
- // 清空多余的ReasoningEffort
- request.ReasoningEffort = ""
- } else {
- if len(request.Reasoning) == 0 {
- // 适配 OpenAI 的 ReasoningEffort 格式
- if request.ReasoningEffort != "" {
- reasoning := map[string]any{
- "enabled": true,
- }
- if request.ReasoningEffort != "none" {
- reasoning["effort"] = request.ReasoningEffort
- marshal, err := common.Marshal(reasoning)
- if err != nil {
- return nil, fmt.Errorf("error marshalling reasoning: %w", err)
- }
- request.Reasoning = marshal
- }
- }
- }
- request.ReasoningEffort = ""
- }
- // https://docs.anthropic.com/en/api/openai-sdk#extended-thinking-support
- // 没有做排除3.5Haiku等,要出问题再加吧,最佳兼容性(不是
- if request.THINKING != nil && strings.HasPrefix(info.UpstreamModelName, "anthropic") {
- var thinking dto.Thinking // Claude标准Thinking格式
- if err := json.Unmarshal(request.THINKING, &thinking); err != nil {
- return nil, fmt.Errorf("error Unmarshal thinking: %w", err)
- }
- // 只有当 thinking.Type 是 "enabled" 时才处理
- if thinking.Type == "enabled" {
- // 检查 BudgetTokens 是否为 nil
- if thinking.BudgetTokens == nil {
- return nil, fmt.Errorf("BudgetTokens is nil when thinking is enabled")
- }
- reasoning := openrouter.RequestReasoning{
- MaxTokens: *thinking.BudgetTokens,
- }
- marshal, err := common.Marshal(reasoning)
- if err != nil {
- return nil, fmt.Errorf("error marshalling reasoning: %w", err)
- }
- request.Reasoning = marshal
- }
- // 清空 THINKING
- request.THINKING = nil
- }
- }
- if strings.HasPrefix(info.UpstreamModelName, "o") || strings.HasPrefix(info.UpstreamModelName, "gpt-5") {
- if request.MaxCompletionTokens == 0 && request.MaxTokens != 0 {
- request.MaxCompletionTokens = request.MaxTokens
- request.MaxTokens = 0
- }
- if strings.HasPrefix(info.UpstreamModelName, "o") {
- request.Temperature = nil
- }
- if strings.HasPrefix(info.UpstreamModelName, "gpt-5") {
- if info.UpstreamModelName != "gpt-5-chat-latest" {
- request.Temperature = nil
- }
- }
- // 转换模型推理力度后缀
- effort, originModel := parseReasoningEffortFromModelSuffix(info.UpstreamModelName)
- if effort != "" {
- request.ReasoningEffort = effort
- info.UpstreamModelName = originModel
- request.Model = originModel
- }
- info.ReasoningEffort = request.ReasoningEffort
- // o系列模型developer适配(o1-mini除外)
- if !strings.HasPrefix(info.UpstreamModelName, "o1-mini") && !strings.HasPrefix(info.UpstreamModelName, "o1-preview") {
- //修改第一个Message的内容,将system改为developer
- if len(request.Messages) > 0 && request.Messages[0].Role == "system" {
- request.Messages[0].Role = "developer"
- }
- }
- }
- return request, nil
- }
- func (a *Adaptor) ConvertRerankRequest(c *gin.Context, relayMode int, request dto.RerankRequest) (any, error) {
- return request, nil
- }
- func (a *Adaptor) ConvertEmbeddingRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.EmbeddingRequest) (any, error) {
- return request, nil
- }
- func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
- a.ResponseFormat = request.ResponseFormat
- if info.RelayMode == relayconstant.RelayModeAudioSpeech {
- jsonData, err := json.Marshal(request)
- if err != nil {
- return nil, fmt.Errorf("error marshalling object: %w", err)
- }
- return bytes.NewReader(jsonData), nil
- } else {
- var requestBody bytes.Buffer
- writer := multipart.NewWriter(&requestBody)
- writer.WriteField("model", request.Model)
- formData, err2 := common.ParseMultipartFormReusable(c)
- if err2 != nil {
- return nil, fmt.Errorf("error parsing multipart form: %w", err2)
- }
- // 打印类似 curl 命令格式的信息
- logger.LogDebug(c.Request.Context(), fmt.Sprintf("--form 'model=\"%s\"'", request.Model))
- // 遍历表单字段并打印输出
- for key, values := range formData.Value {
- if key == "model" {
- continue
- }
- for _, value := range values {
- writer.WriteField(key, value)
- logger.LogDebug(c.Request.Context(), fmt.Sprintf("--form '%s=\"%s\"'", key, value))
- }
- }
- // 从 formData 中获取文件
- fileHeaders := formData.File["file"]
- if len(fileHeaders) == 0 {
- return nil, errors.New("file is required")
- }
- // 使用 formData 中的第一个文件
- fileHeader := fileHeaders[0]
- logger.LogDebug(c.Request.Context(), fmt.Sprintf("--form 'file=@\"%s\"' (size: %d bytes, content-type: %s)",
- fileHeader.Filename, fileHeader.Size, fileHeader.Header.Get("Content-Type")))
- file, err := fileHeader.Open()
- if err != nil {
- return nil, fmt.Errorf("error opening audio file: %v", err)
- }
- defer file.Close()
- part, err := writer.CreateFormFile("file", fileHeader.Filename)
- if err != nil {
- return nil, errors.New("create form file failed")
- }
- if _, err := io.Copy(part, file); err != nil {
- return nil, errors.New("copy file failed")
- }
- // 关闭 multipart 编写器以设置分界线
- writer.Close()
- c.Request.Header.Set("Content-Type", writer.FormDataContentType())
- logger.LogDebug(c.Request.Context(), fmt.Sprintf("--header 'Content-Type: %s'", writer.FormDataContentType()))
- return &requestBody, nil
- }
- }
- func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
- switch info.RelayMode {
- case relayconstant.RelayModeImagesEdits:
- var requestBody bytes.Buffer
- writer := multipart.NewWriter(&requestBody)
- writer.WriteField("model", request.Model)
- // 使用已解析的 multipart 表单,避免重复解析
- mf := c.Request.MultipartForm
- if mf == nil {
- if _, err := c.MultipartForm(); err != nil {
- return nil, errors.New("failed to parse multipart form")
- }
- mf = c.Request.MultipartForm
- }
- // 写入所有非文件字段
- if mf != nil {
- for key, values := range mf.Value {
- if key == "model" {
- continue
- }
- for _, value := range values {
- writer.WriteField(key, value)
- }
- }
- }
- if mf != nil && mf.File != nil {
- // Check if "image" field exists in any form, including array notation
- var imageFiles []*multipart.FileHeader
- var exists bool
- // First check for standard "image" field
- if imageFiles, exists = mf.File["image"]; !exists || len(imageFiles) == 0 {
- // If not found, check for "image[]" field
- if imageFiles, exists = mf.File["image[]"]; !exists || len(imageFiles) == 0 {
- // If still not found, iterate through all fields to find any that start with "image["
- foundArrayImages := false
- for fieldName, files := range mf.File {
- if strings.HasPrefix(fieldName, "image[") && len(files) > 0 {
- foundArrayImages = true
- imageFiles = append(imageFiles, files...)
- }
- }
- // If no image fields found at all
- if !foundArrayImages && (len(imageFiles) == 0) {
- return nil, errors.New("image is required")
- }
- }
- }
- // Process all image files
- for i, fileHeader := range imageFiles {
- file, err := fileHeader.Open()
- if err != nil {
- return nil, fmt.Errorf("failed to open image file %d: %w", i, err)
- }
- // If multiple images, use image[] as the field name
- fieldName := "image"
- if len(imageFiles) > 1 {
- fieldName = "image[]"
- }
- // Determine MIME type based on file extension
- mimeType := detectImageMimeType(fileHeader.Filename)
- // Create a form file with the appropriate content type
- h := make(textproto.MIMEHeader)
- h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="%s"; filename="%s"`, fieldName, fileHeader.Filename))
- h.Set("Content-Type", mimeType)
- part, err := writer.CreatePart(h)
- if err != nil {
- return nil, fmt.Errorf("create form part failed for image %d: %w", i, err)
- }
- if _, err := io.Copy(part, file); err != nil {
- return nil, fmt.Errorf("copy file failed for image %d: %w", i, err)
- }
- // 复制完立即关闭,避免在循环内使用 defer 占用资源
- _ = file.Close()
- }
- // Handle mask file if present
- if maskFiles, exists := mf.File["mask"]; exists && len(maskFiles) > 0 {
- maskFile, err := maskFiles[0].Open()
- if err != nil {
- return nil, errors.New("failed to open mask file")
- }
- // 复制完立即关闭,避免在循环内使用 defer 占用资源
- // Determine MIME type for mask file
- mimeType := detectImageMimeType(maskFiles[0].Filename)
- // Create a form file with the appropriate content type
- h := make(textproto.MIMEHeader)
- h.Set("Content-Disposition", fmt.Sprintf(`form-data; name="mask"; filename="%s"`, maskFiles[0].Filename))
- h.Set("Content-Type", mimeType)
- maskPart, err := writer.CreatePart(h)
- if err != nil {
- return nil, errors.New("create form file failed for mask")
- }
- if _, err := io.Copy(maskPart, maskFile); err != nil {
- return nil, errors.New("copy mask file failed")
- }
- _ = maskFile.Close()
- }
- } else {
- return nil, errors.New("no multipart form data found")
- }
- // 关闭 multipart 编写器以设置分界线
- writer.Close()
- c.Request.Header.Set("Content-Type", writer.FormDataContentType())
- return &requestBody, nil
- default:
- return request, nil
- }
- }
- // detectImageMimeType determines the MIME type based on the file extension
- func detectImageMimeType(filename string) string {
- ext := strings.ToLower(filepath.Ext(filename))
- switch ext {
- case ".jpg", ".jpeg":
- return "image/jpeg"
- case ".png":
- return "image/png"
- case ".webp":
- return "image/webp"
- default:
- // Try to detect from extension if possible
- if strings.HasPrefix(ext, ".jp") {
- return "image/jpeg"
- }
- // Default to png as a fallback
- return "image/png"
- }
- }
- func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) {
- // 转换模型推理力度后缀
- effort, originModel := parseReasoningEffortFromModelSuffix(request.Model)
- if effort != "" {
- if request.Reasoning == nil {
- request.Reasoning = &dto.Reasoning{
- Effort: effort,
- }
- } else {
- request.Reasoning.Effort = effort
- }
- request.Model = originModel
- }
- return request, nil
- }
- func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (any, error) {
- if info.RelayMode == relayconstant.RelayModeAudioTranscription ||
- info.RelayMode == relayconstant.RelayModeAudioTranslation ||
- info.RelayMode == relayconstant.RelayModeImagesEdits {
- return channel.DoFormRequest(a, c, info, requestBody)
- } else if info.RelayMode == relayconstant.RelayModeRealtime {
- return channel.DoWssRequest(a, c, info, requestBody)
- } else {
- return channel.DoApiRequest(a, c, info, requestBody)
- }
- }
- func (a *Adaptor) DoResponse(c *gin.Context, resp *http.Response, info *relaycommon.RelayInfo) (usage any, err *types.NewAPIError) {
- switch info.RelayMode {
- case relayconstant.RelayModeRealtime:
- err, usage = OpenaiRealtimeHandler(c, info)
- case relayconstant.RelayModeAudioSpeech:
- usage = OpenaiTTSHandler(c, resp, info)
- case relayconstant.RelayModeAudioTranslation:
- fallthrough
- case relayconstant.RelayModeAudioTranscription:
- err, usage = OpenaiSTTHandler(c, resp, info, a.ResponseFormat)
- case relayconstant.RelayModeImagesGenerations, relayconstant.RelayModeImagesEdits:
- usage, err = OpenaiHandlerWithUsage(c, info, resp)
- case relayconstant.RelayModeRerank:
- usage, err = common_handler.RerankHandler(c, info, resp)
- case relayconstant.RelayModeResponses:
- if info.IsStream {
- usage, err = OaiResponsesStreamHandler(c, info, resp)
- } else {
- usage, err = OaiResponsesHandler(c, info, resp)
- }
- default:
- if info.IsStream {
- usage, err = OaiStreamHandler(c, info, resp)
- } else {
- usage, err = OpenaiHandler(c, info, resp)
- }
- }
- return
- }
- func (a *Adaptor) GetModelList() []string {
- switch a.ChannelType {
- case constant.ChannelType360:
- return ai360.ModelList
- case constant.ChannelTypeLingYiWanWu:
- return lingyiwanwu.ModelList
- //case constant.ChannelTypeMiniMax:
- // return minimax.ModelList
- case constant.ChannelTypeXinference:
- return xinference.ModelList
- case constant.ChannelTypeOpenRouter:
- return openrouter.ModelList
- default:
- return ModelList
- }
- }
- func (a *Adaptor) GetChannelName() string {
- switch a.ChannelType {
- case constant.ChannelType360:
- return ai360.ChannelName
- case constant.ChannelTypeLingYiWanWu:
- return lingyiwanwu.ChannelName
- //case constant.ChannelTypeMiniMax:
- // return minimax.ChannelName
- case constant.ChannelTypeXinference:
- return xinference.ChannelName
- case constant.ChannelTypeOpenRouter:
- return openrouter.ChannelName
- default:
- return ChannelName
- }
- }
|