error.go 1.1 KB

1234567891011121314151617181920212223242526272829303132
  1. package ali
  2. import (
  3. "net/http"
  4. "strings"
  5. "github.com/labring/aiproxy/core/relay/adaptor"
  6. "github.com/labring/aiproxy/core/relay/adaptor/openai"
  7. relaymodel "github.com/labring/aiproxy/core/relay/model"
  8. )
  9. // https://help.aliyun.com/zh/model-studio/error-code?userCode=okjhlpr5
  10. func ErrorHanlder(resp *http.Response) adaptor.Error {
  11. statusCode, openAIError := openai.GetError(resp)
  12. // {"error":{"code":"ServiceUnavailable","message":"<503> InternalError.Algo: An error occurred in model serving, error message is: [Too many requests. Your requests are being throttled due to system capacity limits. Please try again later.]","type":"ServiceUnavailable"}}
  13. switch openAIError.Type {
  14. case "ServiceUnavailable":
  15. statusCode = http.StatusServiceUnavailable
  16. openAIError.Type = relaymodel.ErrorTypeUpstream
  17. case "RequestTimeOut":
  18. statusCode = http.StatusRequestTimeout
  19. openAIError.Type = relaymodel.ErrorTypeUpstream
  20. }
  21. if strings.Contains(openAIError.Message, "object is not iterable") {
  22. statusCode = http.StatusBadRequest
  23. }
  24. return relaymodel.NewOpenAIError(statusCode, openAIError)
  25. }