responses_handler.go 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168
  1. package relay
  2. import (
  3. "bytes"
  4. "encoding/json"
  5. "errors"
  6. "fmt"
  7. "io"
  8. "net/http"
  9. "one-api/common"
  10. "one-api/dto"
  11. relaycommon "one-api/relay/common"
  12. "one-api/relay/helper"
  13. "one-api/service"
  14. "one-api/setting"
  15. "one-api/setting/model_setting"
  16. "strings"
  17. "github.com/gin-gonic/gin"
  18. )
  19. func getAndValidateResponsesRequest(c *gin.Context) (*dto.OpenAIResponsesRequest, error) {
  20. request := &dto.OpenAIResponsesRequest{}
  21. err := common.UnmarshalBodyReusable(c, request)
  22. if err != nil {
  23. return nil, err
  24. }
  25. if request.Model == "" {
  26. return nil, errors.New("model is required")
  27. }
  28. if len(request.Input) == 0 {
  29. return nil, errors.New("input is required")
  30. }
  31. return request, nil
  32. }
  33. func checkInputSensitive(textRequest *dto.OpenAIResponsesRequest, info *relaycommon.RelayInfo) ([]string, error) {
  34. sensitiveWords, err := service.CheckSensitiveInput(textRequest.Input)
  35. return sensitiveWords, err
  36. }
  37. func getInputTokens(req *dto.OpenAIResponsesRequest, info *relaycommon.RelayInfo) int {
  38. inputTokens := service.CountTokenInput(req.Input, req.Model)
  39. info.PromptTokens = inputTokens
  40. return inputTokens
  41. }
  42. func ResponsesHelper(c *gin.Context) (openaiErr *dto.OpenAIErrorWithStatusCode) {
  43. req, err := getAndValidateResponsesRequest(c)
  44. if err != nil {
  45. common.LogError(c, fmt.Sprintf("getAndValidateResponsesRequest error: %s", err.Error()))
  46. return service.OpenAIErrorWrapperLocal(err, "invalid_responses_request", http.StatusBadRequest)
  47. }
  48. relayInfo := relaycommon.GenRelayInfoResponses(c, req)
  49. if setting.ShouldCheckPromptSensitive() {
  50. sensitiveWords, err := checkInputSensitive(req, relayInfo)
  51. if err != nil {
  52. common.LogWarn(c, fmt.Sprintf("user sensitive words detected: %s", strings.Join(sensitiveWords, ", ")))
  53. return service.OpenAIErrorWrapperLocal(err, "check_request_sensitive_error", http.StatusBadRequest)
  54. }
  55. }
  56. err = helper.ModelMappedHelper(c, relayInfo, req)
  57. if err != nil {
  58. return service.OpenAIErrorWrapperLocal(err, "model_mapped_error", http.StatusBadRequest)
  59. }
  60. if value, exists := c.Get("prompt_tokens"); exists {
  61. promptTokens := value.(int)
  62. relayInfo.SetPromptTokens(promptTokens)
  63. } else {
  64. promptTokens := getInputTokens(req, relayInfo)
  65. c.Set("prompt_tokens", promptTokens)
  66. }
  67. priceData, err := helper.ModelPriceHelper(c, relayInfo, relayInfo.PromptTokens, int(req.MaxOutputTokens))
  68. if err != nil {
  69. return service.OpenAIErrorWrapperLocal(err, "model_price_error", http.StatusInternalServerError)
  70. }
  71. // pre consume quota
  72. preConsumedQuota, userQuota, openaiErr := preConsumeQuota(c, priceData.ShouldPreConsumedQuota, relayInfo)
  73. if openaiErr != nil {
  74. return openaiErr
  75. }
  76. defer func() {
  77. if openaiErr != nil {
  78. returnPreConsumedQuota(c, relayInfo, userQuota, preConsumedQuota)
  79. }
  80. }()
  81. adaptor := GetAdaptor(relayInfo.ApiType)
  82. if adaptor == nil {
  83. return service.OpenAIErrorWrapperLocal(fmt.Errorf("invalid api type: %d", relayInfo.ApiType), "invalid_api_type", http.StatusBadRequest)
  84. }
  85. adaptor.Init(relayInfo)
  86. var requestBody io.Reader
  87. if model_setting.GetGlobalSettings().PassThroughRequestEnabled {
  88. body, err := common.GetRequestBody(c)
  89. if err != nil {
  90. return service.OpenAIErrorWrapperLocal(err, "get_request_body_error", http.StatusInternalServerError)
  91. }
  92. requestBody = bytes.NewBuffer(body)
  93. } else {
  94. convertedRequest, err := adaptor.ConvertOpenAIResponsesRequest(c, relayInfo, *req)
  95. if err != nil {
  96. return service.OpenAIErrorWrapperLocal(err, "convert_request_error", http.StatusBadRequest)
  97. }
  98. jsonData, err := json.Marshal(convertedRequest)
  99. if err != nil {
  100. return service.OpenAIErrorWrapperLocal(err, "marshal_request_error", http.StatusInternalServerError)
  101. }
  102. // apply param override
  103. if len(relayInfo.ParamOverride) > 0 {
  104. reqMap := make(map[string]interface{})
  105. err = json.Unmarshal(jsonData, &reqMap)
  106. if err != nil {
  107. return service.OpenAIErrorWrapperLocal(err, "param_override_unmarshal_failed", http.StatusInternalServerError)
  108. }
  109. for key, value := range relayInfo.ParamOverride {
  110. reqMap[key] = value
  111. }
  112. jsonData, err = json.Marshal(reqMap)
  113. if err != nil {
  114. return service.OpenAIErrorWrapperLocal(err, "param_override_marshal_failed", http.StatusInternalServerError)
  115. }
  116. }
  117. if common.DebugEnabled {
  118. println("requestBody: ", string(jsonData))
  119. }
  120. requestBody = bytes.NewBuffer(jsonData)
  121. }
  122. var httpResp *http.Response
  123. resp, err := adaptor.DoRequest(c, relayInfo, requestBody)
  124. if err != nil {
  125. return service.OpenAIErrorWrapper(err, "do_request_failed", http.StatusInternalServerError)
  126. }
  127. statusCodeMappingStr := c.GetString("status_code_mapping")
  128. if resp != nil {
  129. httpResp = resp.(*http.Response)
  130. if httpResp.StatusCode != http.StatusOK {
  131. openaiErr = service.RelayErrorHandler(httpResp, false)
  132. // reset status code 重置状态码
  133. service.ResetStatusCode(openaiErr, statusCodeMappingStr)
  134. return openaiErr
  135. }
  136. }
  137. usage, openaiErr := adaptor.DoResponse(c, httpResp, relayInfo)
  138. if openaiErr != nil {
  139. // reset status code 重置状态码
  140. service.ResetStatusCode(openaiErr, statusCodeMappingStr)
  141. return openaiErr
  142. }
  143. if strings.HasPrefix(relayInfo.OriginModelName, "gpt-4o-audio") {
  144. service.PostAudioConsumeQuota(c, relayInfo, usage.(*dto.Usage), preConsumedQuota, userQuota, priceData, "")
  145. } else {
  146. postConsumeQuota(c, relayInfo, usage.(*dto.Usage), preConsumedQuota, userQuota, priceData, "")
  147. }
  148. return nil
  149. }