Просмотр исходного кода

refactor: adaptor error handler and result (#215)

zijiren 7 месяцев назад
Родитель
Сommit
5431d0d20b
89 измененных файлов с 1047 добавлено и 956 удалено
  1. 4 2
      core/controller/channel-test.go
  2. 71 50
      core/controller/relay-controller.go
  3. 4 4
      core/controller/relay-model.go
  4. 1 3
      core/middleware/auth.go
  5. 1 0
      core/middleware/ctxkey.go
  6. 12 37
      core/middleware/distributor.go
  7. 1 3
      core/middleware/mcp.go
  8. 23 29
      core/middleware/utils.go
  9. 7 7
      core/relay/adaptor/ali/adaptor.go
  10. 12 11
      core/relay/adaptor/ali/embeddings.go
  11. 18 20
      core/relay/adaptor/ali/image.go
  12. 13 8
      core/relay/adaptor/ali/rerank.go
  13. 22 18
      core/relay/adaptor/ali/stt-realtime.go
  14. 16 12
      core/relay/adaptor/ali/tts.go
  15. 12 9
      core/relay/adaptor/anthropic/adaptor.go
  16. 47 22
      core/relay/adaptor/anthropic/error.go
  17. 17 12
      core/relay/adaptor/anthropic/main.go
  18. 0 11
      core/relay/adaptor/anthropic/model.go
  19. 7 6
      core/relay/adaptor/anthropic/openai.go
  20. 5 8
      core/relay/adaptor/aws/adaptor.go
  21. 9 6
      core/relay/adaptor/aws/claude/adapter.go
  22. 17 16
      core/relay/adaptor/aws/claude/main.go
  23. 9 6
      core/relay/adaptor/aws/llama3/adapter.go
  24. 14 13
      core/relay/adaptor/aws/llama3/main.go
  25. 3 4
      core/relay/adaptor/aws/utils/adaptor.go
  26. 0 16
      core/relay/adaptor/aws/utils/utils.go
  27. 5 5
      core/relay/adaptor/baidu/adaptor.go
  28. 6 6
      core/relay/adaptor/baidu/embeddings.go
  29. 8 8
      core/relay/adaptor/baidu/error.go
  30. 5 5
      core/relay/adaptor/baidu/image.go
  31. 13 9
      core/relay/adaptor/baidu/main.go
  32. 6 6
      core/relay/adaptor/baidu/rerank.go
  33. 5 5
      core/relay/adaptor/baiduv2/adaptor.go
  34. 11 8
      core/relay/adaptor/cohere/adaptor.go
  35. 6 5
      core/relay/adaptor/cohere/main.go
  36. 12 9
      core/relay/adaptor/coze/adaptor.go
  37. 6 5
      core/relay/adaptor/coze/main.go
  38. 4 6
      core/relay/adaptor/doc2x/adaptor.go
  39. 15 11
      core/relay/adaptor/doc2x/pdf.go
  40. 14 11
      core/relay/adaptor/doubao/main.go
  41. 5 6
      core/relay/adaptor/doubaoaudio/main.go
  42. 15 11
      core/relay/adaptor/doubaoaudio/tts.go
  43. 5 6
      core/relay/adaptor/gemini/adaptor.go
  44. 13 9
      core/relay/adaptor/gemini/embeddings.go
  45. 16 12
      core/relay/adaptor/gemini/main.go
  46. 58 26
      core/relay/adaptor/interface.go
  47. 3 4
      core/relay/adaptor/jina/adaptor.go
  48. 9 5
      core/relay/adaptor/jina/embeddings.go
  49. 11 14
      core/relay/adaptor/jina/error.go
  50. 10 10
      core/relay/adaptor/jina/rerank.go
  51. 2 4
      core/relay/adaptor/minimax/adaptor.go
  52. 15 10
      core/relay/adaptor/minimax/tts.go
  53. 6 7
      core/relay/adaptor/ollama/adaptor.go
  54. 6 6
      core/relay/adaptor/ollama/error.go
  55. 27 19
      core/relay/adaptor/ollama/main.go
  56. 16 13
      core/relay/adaptor/openai/adaptor.go
  57. 20 5
      core/relay/adaptor/openai/balance.go
  58. 9 5
      core/relay/adaptor/openai/embeddings.go
  59. 45 79
      core/relay/adaptor/openai/error.go
  60. 30 21
      core/relay/adaptor/openai/image.go
  61. 12 11
      core/relay/adaptor/openai/main.go
  62. 0 16
      core/relay/adaptor/openai/model.go
  63. 6 5
      core/relay/adaptor/openai/moderations.go
  64. 13 8
      core/relay/adaptor/openai/rerank.go
  65. 23 18
      core/relay/adaptor/openai/stt.go
  66. 16 12
      core/relay/adaptor/openai/tts.go
  67. 0 28
      core/relay/adaptor/openai/util.go
  68. 2 2
      core/relay/adaptor/openrouter/adaptor.go
  69. 1 2
      core/relay/adaptor/siliconflow/adaptor.go
  70. 1 2
      core/relay/adaptor/stepfun/adaptor.go
  71. 5 5
      core/relay/adaptor/text-embeddings-inference/adaptor.go
  72. 2 2
      core/relay/adaptor/text-embeddings-inference/embeddings.go
  73. 8 22
      core/relay/adaptor/text-embeddings-inference/error.go
  74. 19 16
      core/relay/adaptor/text-embeddings-inference/rerank.go
  75. 5 5
      core/relay/adaptor/text-embeddings-inference/rerank_test.go
  76. 5 6
      core/relay/adaptor/vertexai/adaptor.go
  77. 12 9
      core/relay/adaptor/vertexai/claude/adapter.go
  78. 3 4
      core/relay/adaptor/vertexai/gemini/adapter.go
  79. 3 4
      core/relay/adaptor/vertexai/registry.go
  80. 2 2
      core/relay/adaptor/xai/adaptor.go
  81. 6 6
      core/relay/adaptor/xai/error.go
  82. 2 7
      core/relay/adaptor/xunfei/adaptor.go
  83. 1 2
      core/relay/adaptor/zhipu/adaptor.go
  84. 8 3
      core/relay/adaptor/zhipu/main.go
  85. 26 25
      core/relay/controller/dohelper.go
  86. 1 2
      core/relay/controller/handle.go
  87. 30 0
      core/relay/model/anthropic.go
  88. 18 28
      core/relay/model/chat.go
  89. 35 0
      core/relay/model/errors.go

+ 4 - 2
core/controller/channel-test.go

@@ -16,6 +16,7 @@ import (
 	"time"
 
 	"github.com/gin-gonic/gin"
+	"github.com/labring/aiproxy/core/common/conv"
 	"github.com/labring/aiproxy/core/common/notify"
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/common/trylock"
@@ -115,8 +116,9 @@ func testSingleModel(mc *model.ModelCaches, channel *model.Channel, modelName st
 		}
 		code = w.Code
 	} else {
-		respStr = result.Error.JSONOrEmpty()
-		code = result.Error.StatusCode
+		respBody, _ := result.Error.MarshalJSON()
+		respStr = conv.BytesToString(respBody)
+		code = result.Error.StatusCode()
 	}
 
 	return channel.UpdateModelTest(

+ 71 - 50
core/controller/relay-controller.go

@@ -12,10 +12,13 @@ import (
 	"strconv"
 	"time"
 
+	"github.com/bytedance/sonic"
+	"github.com/bytedance/sonic/ast"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/common/config"
 	"github.com/labring/aiproxy/core/common/consume"
+	"github.com/labring/aiproxy/core/common/conv"
 	"github.com/labring/aiproxy/core/common/notify"
 	"github.com/labring/aiproxy/core/common/reqlimit"
 	"github.com/labring/aiproxy/core/common/trylock"
@@ -23,7 +26,6 @@ import (
 	"github.com/labring/aiproxy/core/model"
 	"github.com/labring/aiproxy/core/monitor"
 	"github.com/labring/aiproxy/core/relay/adaptor"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/adaptors"
 	"github.com/labring/aiproxy/core/relay/controller"
 	"github.com/labring/aiproxy/core/relay/meta"
@@ -46,9 +48,7 @@ type RelayController struct {
 	Handler         RelayHandler
 }
 
-var ErrInvalidChannelTypeCode = "invalid_channel_type"
-
-type warpAdaptor struct {
+type wrapAdaptor struct {
 	adaptor.Adaptor
 }
 
@@ -101,7 +101,7 @@ func updateChannelModelTokensRequestRate(c *gin.Context, meta *meta.Meta, tpm, t
 	log.Data["ch_tps"] = tps
 }
 
-func (w *warpAdaptor) DoRequest(meta *meta.Meta, c *gin.Context, req *http.Request) (*http.Response, error) {
+func (w *wrapAdaptor) DoRequest(meta *meta.Meta, c *gin.Context, req *http.Request) (*http.Response, error) {
 	count, overLimitCount, secondCount := reqlimit.PushChannelModelRequest(
 		context.Background(),
 		strconv.Itoa(meta.Channel.ID),
@@ -111,7 +111,7 @@ func (w *warpAdaptor) DoRequest(meta *meta.Meta, c *gin.Context, req *http.Reque
 	return w.Adaptor.DoRequest(meta, c, req)
 }
 
-func (w *warpAdaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func (w *wrapAdaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	usage, relayErr := w.Adaptor.DoResponse(meta, c, resp)
 	if usage == nil {
 		return nil, relayErr
@@ -155,15 +155,15 @@ func relayHandler(c *gin.Context, meta *meta.Meta) *controller.HandleResult {
 	adaptor, ok := adaptors.GetAdaptor(meta.Channel.Type)
 	if !ok {
 		return &controller.HandleResult{
-			Error: openai.ErrorWrapperWithMessage(
+			Error: relaymodel.WrapperOpenAIErrorWithMessage(
 				fmt.Sprintf("invalid channel type: %d", meta.Channel.Type),
-				ErrInvalidChannelTypeCode,
+				"invalid_channel_type",
 				http.StatusInternalServerError,
 			),
 		}
 	}
 
-	return controller.Handle(&warpAdaptor{adaptor}, c, meta)
+	return controller.Handle(&wrapAdaptor{adaptor}, c, meta)
 }
 
 func relayController(m mode.Mode) RelayController {
@@ -220,9 +220,9 @@ func RelayHelper(c *gin.Context, meta *meta.Meta, handel RelayHandler) (*control
 		}
 		return result, false
 	}
-	shouldRetry := shouldRetry(c, *result.Error)
+	shouldRetry := shouldRetry(c, result.Error)
 	if shouldRetry {
-		hasPermission := channelHasPermission(*result.Error)
+		hasPermission := channelHasPermission(result.Error)
 		beyondThreshold, banExecution, err := monitor.AddRequest(
 			context.Background(),
 			meta.OriginModel,
@@ -236,17 +236,17 @@ func RelayHelper(c *gin.Context, meta *meta.Meta, handel RelayHandler) (*control
 		}
 		switch {
 		case banExecution:
-			notifyChannelIssue(c, meta, "autoBanned", "Auto Banned", *result.Error)
+			notifyChannelIssue(c, meta, "autoBanned", "Auto Banned", result.Error)
 		case beyondThreshold:
-			notifyChannelIssue(c, meta, "beyondThreshold", "Error Rate Beyond Threshold", *result.Error)
+			notifyChannelIssue(c, meta, "beyondThreshold", "Error Rate Beyond Threshold", result.Error)
 		case !hasPermission:
-			notifyChannelIssue(c, meta, "channelHasPermission", "No Permission", *result.Error)
+			notifyChannelIssue(c, meta, "channelHasPermission", "No Permission", result.Error)
 		}
 	}
 	return result, shouldRetry
 }
 
-func notifyChannelIssue(c *gin.Context, meta *meta.Meta, issueType string, titleSuffix string, err relaymodel.ErrorWithStatusCode) {
+func notifyChannelIssue(c *gin.Context, meta *meta.Meta, issueType string, titleSuffix string, err adaptor.Error) {
 	var notifyFunc func(title string, message string)
 
 	lockKey := fmt.Sprintf("%s:%d:%s", issueType, meta.Channel.ID, meta.OriginModel)
@@ -261,6 +261,8 @@ func notifyChannelIssue(c *gin.Context, meta *meta.Meta, issueType string, title
 		}
 	}
 
+	respBody, _ := err.MarshalJSON()
+
 	message := fmt.Sprintf(
 		"channel: %s (type: %d, type name: %s, id: %d)\nmodel: %s\nmode: %s\nstatus code: %d\ndetail: %s\nrequest id: %s",
 		meta.Channel.Name,
@@ -269,12 +271,12 @@ func notifyChannelIssue(c *gin.Context, meta *meta.Meta, issueType string, title
 		meta.Channel.ID,
 		meta.OriginModel,
 		meta.Mode,
-		err.StatusCode,
-		err.JSONOrEmpty(),
+		err.StatusCode(),
+		conv.BytesToString(respBody),
 		meta.RequestID,
 	)
 
-	if err.StatusCode == http.StatusTooManyRequests {
+	if err.StatusCode() == http.StatusTooManyRequests {
 		if !trylock.Lock(lockKey, time.Minute) {
 			return
 		}
@@ -408,7 +410,7 @@ func relay(c *gin.Context, mode mode.Mode, relayController RelayController) {
 	// Get initial channel
 	initialChannel, err := getInitialChannel(c, requestModel, log)
 	if err != nil || initialChannel == nil || initialChannel.channel == nil {
-		middleware.AbortLogWithMessage(c,
+		middleware.AbortLogWithMessageWithMode(mode, c,
 			http.StatusServiceUnavailable,
 			"the upstream load is saturated, please try again later",
 		)
@@ -421,7 +423,7 @@ func relay(c *gin.Context, mode mode.Mode, relayController RelayController) {
 	if billingEnabled && relayController.GetRequestPrice != nil {
 		price, err = relayController.GetRequestPrice(c, mc)
 		if err != nil {
-			middleware.AbortLogWithMessage(c,
+			middleware.AbortLogWithMessageWithMode(mode, c,
 				http.StatusInternalServerError,
 				"get request price failed: "+err.Error(),
 			)
@@ -434,7 +436,7 @@ func relay(c *gin.Context, mode mode.Mode, relayController RelayController) {
 	if billingEnabled && relayController.GetRequestUsage != nil {
 		requestUsage, err := relayController.GetRequestUsage(c, mc)
 		if err != nil {
-			middleware.AbortLogWithMessage(c,
+			middleware.AbortLogWithMessageWithMode(mode, c,
 				http.StatusInternalServerError,
 				"get request usage failed: "+err.Error(),
 			)
@@ -442,12 +444,10 @@ func relay(c *gin.Context, mode mode.Mode, relayController RelayController) {
 		}
 		gbc := middleware.GetGroupBalanceConsumerFromContext(c)
 		if !gbc.CheckBalance(consume.CalculateAmount(http.StatusOK, requestUsage, price)) {
-			middleware.AbortLogWithMessage(c,
+			middleware.AbortLogWithMessageWithMode(mode, c,
 				http.StatusForbidden,
 				fmt.Sprintf("group (%s) balance not enough", gbc.Group),
-				&middleware.ErrorField{
-					Code: middleware.GroupBalanceNotEnough,
-				},
+				middleware.GroupBalanceNotEnough,
 			)
 			return
 		}
@@ -503,8 +503,9 @@ func recordResult(
 	code := http.StatusOK
 	content := ""
 	if result.Error != nil {
-		code = result.Error.StatusCode
-		content = result.Error.JSONOrEmpty()
+		code = result.Error.StatusCode()
+		respBody, _ := result.Error.MarshalJSON()
+		content = conv.BytesToString(respBody)
 	}
 
 	var detail *model.RequestDetail
@@ -606,15 +607,14 @@ func getInitialChannel(c *gin.Context, modelName string, log *log.Entry) (*initi
 	}, nil
 }
 
-func handleRelayResult(c *gin.Context, bizErr *relaymodel.ErrorWithStatusCode, retry bool, retryTimes int) (done bool) {
+func handleRelayResult(c *gin.Context, bizErr adaptor.Error, retry bool, retryTimes int) (done bool) {
 	if bizErr == nil {
 		return true
 	}
 	if !retry ||
 		retryTimes == 0 ||
 		c.Request.Context().Err() != nil {
-		bizErr.Error.Message = middleware.MessageWithRequestID(c, bizErr.Error.Message)
-		c.JSON(bizErr.StatusCode, bizErr)
+		ErrorWithRequestID(c, bizErr)
 		return true
 	}
 	return false
@@ -636,7 +636,7 @@ func initRetryState(retryTimes int, channel *initialChannel, meta *meta.Meta, re
 		state.exhausted = true
 	}
 
-	if !channelHasPermission(*result.Error) {
+	if !channelHasPermission(result.Error) {
 		state.ignoreChannelIDs = append(state.ignoreChannelIDs, int64(channel.channel.ID))
 	} else {
 		state.lastHasPermissionChannel = channel.channel
@@ -650,7 +650,7 @@ func retryLoop(c *gin.Context, mode mode.Mode, state *retryState, relayControlle
 	i := 0
 
 	for {
-		lastStatusCode := state.result.Error.StatusCode
+		lastStatusCode := state.result.Error.StatusCode()
 		lastChannelID := state.meta.Channel.ID
 		newChannel, err := getRetryChannel(state)
 		if err == nil {
@@ -734,8 +734,7 @@ func retryLoop(c *gin.Context, mode mode.Mode, state *retryState, relayControlle
 	}
 
 	if state.result.Error != nil {
-		state.result.Error.Error.Message = middleware.MessageWithRequestID(c, state.result.Error.Error.Message)
-		c.JSON(state.result.Error.StatusCode, state.result.Error)
+		ErrorWithRequestID(c, state.result.Error)
 	}
 }
 
@@ -776,7 +775,7 @@ func handleRetryResult(ctx *gin.Context, retry bool, newChannel *model.Channel,
 		return true
 	}
 
-	hasPermission := channelHasPermission(*state.result.Error)
+	hasPermission := channelHasPermission(state.result.Error)
 
 	if state.exhausted {
 		if !hasPermission {
@@ -802,11 +801,8 @@ var channelNoRetryStatusCodesMap = map[int]struct{}{
 }
 
 // 仅当是channel错误时,才需要记录,用户请求参数错误时,不需要记录
-func shouldRetry(_ *gin.Context, relayErr relaymodel.ErrorWithStatusCode) bool {
-	if relayErr.Error.Code == ErrInvalidChannelTypeCode {
-		return false
-	}
-	_, ok := channelNoRetryStatusCodesMap[relayErr.StatusCode]
+func shouldRetry(_ *gin.Context, relayErr adaptor.Error) bool {
+	_, ok := channelNoRetryStatusCodesMap[relayErr.StatusCode()]
 	return !ok
 }
 
@@ -817,11 +813,8 @@ var channelNoPermissionStatusCodesMap = map[int]struct{}{
 	http.StatusNotFound:        {},
 }
 
-func channelHasPermission(relayErr relaymodel.ErrorWithStatusCode) bool {
-	if relayErr.Error.Code == ErrInvalidChannelTypeCode {
-		return false
-	}
-	_, ok := channelNoPermissionStatusCodesMap[relayErr.StatusCode]
+func channelHasPermission(relayErr adaptor.Error) bool {
+	_, ok := channelNoPermissionStatusCodesMap[relayErr.StatusCode()]
 	return !ok
 }
 
@@ -843,11 +836,39 @@ func relayDelay() {
 }
 
 func RelayNotImplemented(c *gin.Context) {
-	c.JSON(http.StatusNotImplemented, gin.H{
-		"error": &relaymodel.Error{
+	ErrorWithRequestID(c,
+		relaymodel.NewOpenAIError(http.StatusNotImplemented, relaymodel.OpenAIError{
 			Message: "API not implemented",
-			Type:    middleware.ErrorTypeAIPROXY,
+			Type:    relaymodel.ErrorTypeAIPROXY,
 			Code:    "api_not_implemented",
-		},
-	})
+		}),
+	)
+}
+
+func ErrorWithRequestID(c *gin.Context, relayErr adaptor.Error) {
+	requestID := middleware.GetRequestID(c)
+	if requestID == "" {
+		c.JSON(relayErr.StatusCode(), relayErr)
+		return
+	}
+	log := middleware.GetLogger(c)
+	data, err := relayErr.MarshalJSON()
+	if err != nil {
+		log.Errorf("marshal error failed: %+v", err)
+		c.JSON(relayErr.StatusCode(), relayErr)
+		return
+	}
+	node, err := sonic.Get(data)
+	if err != nil {
+		log.Errorf("get node failed: %+v", err)
+		c.JSON(relayErr.StatusCode(), relayErr)
+		return
+	}
+	_, err = node.Set("aiproxy", ast.NewString(requestID))
+	if err != nil {
+		log.Errorf("set request id failed: %+v", err)
+		c.JSON(relayErr.StatusCode(), relayErr)
+		return
+	}
+	c.JSON(relayErr.StatusCode(), &node)
 }

+ 4 - 4
core/controller/relay-model.go

@@ -6,7 +6,7 @@ import (
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/middleware"
-	model "github.com/labring/aiproxy/core/relay/model"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 // ListModels godoc
@@ -65,8 +65,8 @@ func RetrieveModel(c *gin.Context) {
 	}
 
 	if !ok {
-		c.JSON(200, gin.H{
-			"error": &model.Error{
+		c.JSON(http.StatusNotFound, gin.H{
+			"error": &relaymodel.OpenAIError{
 				Message: fmt.Sprintf("the model '%s' does not exist", modelName),
 				Type:    "invalid_request_error",
 				Param:   "model",
@@ -76,7 +76,7 @@ func RetrieveModel(c *gin.Context) {
 		return
 	}
 
-	c.JSON(200, &OpenAIModels{
+	c.JSON(http.StatusOK, &OpenAIModels{
 		ID:         modelName,
 		Object:     "model",
 		Created:    1626777600,

+ 1 - 3
core/middleware/auth.go

@@ -88,9 +88,7 @@ func TokenAuth(c *gin.Context) {
 		var err error
 		token, err = model.ValidateAndGetToken(key)
 		if err != nil {
-			AbortLogWithMessage(c, http.StatusUnauthorized, err.Error(), &ErrorField{
-				Code: "invalid_token",
-			})
+			AbortLogWithMessage(c, http.StatusUnauthorized, err.Error(), "invalid_token")
 			return
 		}
 	}

+ 1 - 0
core/middleware/ctxkey.go

@@ -16,4 +16,5 @@ const (
 	RequestID          = "request_id"
 	ModelCaches        = "model_caches"
 	ModelConfig        = "model_config"
+	Mode               = "mode"
 )

+ 12 - 37
core/middleware/distributor.go

@@ -235,9 +235,7 @@ func checkGroupBalance(c *gin.Context, group *model.GroupCache) bool {
 	gbc, err := GetGroupBalanceConsumer(c, group)
 	if err != nil {
 		if errors.Is(err, balance.ErrNoRealNameUsedAmountLimit) {
-			AbortLogWithMessage(c, http.StatusForbidden, err.Error(), &ErrorField{
-				Code: "no_real_name_used_amount_limit",
-			})
+			AbortLogWithMessage(c, http.StatusForbidden, err.Error(), "no_real_name_used_amount_limit")
 			return false
 		}
 		notify.ErrorThrottle(
@@ -246,9 +244,7 @@ func checkGroupBalance(c *gin.Context, group *model.GroupCache) bool {
 			fmt.Sprintf("Get group `%s` balance error", group.ID),
 			err.Error(),
 		)
-		AbortWithMessage(c, http.StatusInternalServerError, fmt.Sprintf("get group `%s` balance error", group.ID), &ErrorField{
-			Code: "get_group_balance_error",
-		})
+		AbortWithMessage(c, http.StatusInternalServerError, fmt.Sprintf("get group `%s` balance error", group.ID), "get_group_balance_error")
 		return false
 	}
 
@@ -264,9 +260,7 @@ func checkGroupBalance(c *gin.Context, group *model.GroupCache) bool {
 	}
 
 	if !gbc.CheckBalance(0) {
-		AbortLogWithMessage(c, http.StatusForbidden, fmt.Sprintf("group `%s` balance not enough", group.ID), &ErrorField{
-			Code: GroupBalanceNotEnough,
-		})
+		AbortLogWithMessage(c, http.StatusForbidden, fmt.Sprintf("group `%s` balance not enough", group.ID), GroupBalanceNotEnough)
 		return false
 	}
 	return true
@@ -329,6 +323,8 @@ func CheckRelayMode(requestMode mode.Mode, modelMode mode.Mode) bool {
 }
 
 func distribute(c *gin.Context, mode mode.Mode) {
+	c.Set(Mode, mode)
+
 	if config.GetDisableServe() {
 		AbortLogWithMessage(c, http.StatusServiceUnavailable, "service is under maintenance")
 		return
@@ -344,17 +340,11 @@ func distribute(c *gin.Context, mode mode.Mode) {
 
 	requestModel, err := getRequestModel(c, mode)
 	if err != nil {
-		AbortLogWithMessage(c, http.StatusInternalServerError, err.Error(), &ErrorField{
-			Type: "invalid_request_error",
-			Code: "get_request_model_error",
-		})
+		AbortLogWithMessage(c, http.StatusInternalServerError, err.Error(), "get_request_model_error")
 		return
 	}
 	if requestModel == "" {
-		AbortLogWithMessage(c, http.StatusBadRequest, "no model provided", &ErrorField{
-			Type: "invalid_request_error",
-			Code: "no_model_provided",
-		})
+		AbortLogWithMessage(c, http.StatusBadRequest, "no model provided", "no_model_provided")
 		return
 	}
 
@@ -367,10 +357,7 @@ func distribute(c *gin.Context, mode mode.Mode) {
 		AbortLogWithMessage(c,
 			http.StatusNotFound,
 			fmt.Sprintf("The model `%s` does not exist or you do not have access to it.", requestModel),
-			&ErrorField{
-				Type: "invalid_request_error",
-				Code: "model_not_found",
-			},
+			"model_not_found",
 		)
 		return
 	}
@@ -389,10 +376,7 @@ func distribute(c *gin.Context, mode mode.Mode) {
 			AbortLogWithMessage(c,
 				http.StatusNotFound,
 				fmt.Sprintf("The model `%s` does not exist or you do not have access to it.", requestModel),
-				&ErrorField{
-					Type: "invalid_request_error",
-					Code: "model_not_found",
-				},
+				"model_not_found",
 			)
 			return
 		}
@@ -400,20 +384,14 @@ func distribute(c *gin.Context, mode mode.Mode) {
 
 	user, err := getRequestUser(c, mode)
 	if err != nil {
-		AbortLogWithMessage(c, http.StatusInternalServerError, err.Error(), &ErrorField{
-			Type: "invalid_request_error",
-			Code: "get_request_user_error",
-		})
+		AbortLogWithMessage(c, http.StatusInternalServerError, err.Error(), "get_request_user_error")
 		return
 	}
 	c.Set(RequestUser, user)
 
 	metadata, err := getRequestMetadata(c, mode)
 	if err != nil {
-		AbortLogWithMessage(c, http.StatusInternalServerError, err.Error(), &ErrorField{
-			Type: "invalid_request_error",
-			Code: "get_request_metadata_error",
-		})
+		AbortLogWithMessage(c, http.StatusInternalServerError, err.Error(), "get_request_metadata_error")
 		return
 	}
 	c.Set(RequestMetadata, metadata)
@@ -439,10 +417,7 @@ func distribute(c *gin.Context, mode mode.Mode) {
 			model.RequestRate{},
 			GetGroupModelTokenRequestRate(c),
 		)
-		AbortLogWithMessage(c, http.StatusTooManyRequests, errMsg, &ErrorField{
-			Type: "invalid_request_error",
-			Code: "request_rate_limit_exceeded",
-		})
+		AbortLogWithMessage(c, http.StatusTooManyRequests, errMsg, "request_rate_limit_exceeded")
 		return
 	}
 

+ 1 - 3
core/middleware/mcp.go

@@ -34,9 +34,7 @@ func MCPAuth(c *gin.Context) {
 		var err error
 		token, err = model.ValidateAndGetToken(key)
 		if err != nil {
-			AbortLogWithMessage(c, http.StatusUnauthorized, err.Error(), &ErrorField{
-				Code: "invalid_token",
-			})
+			AbortLogWithMessage(c, http.StatusUnauthorized, err.Error(), "invalid_token")
 			return
 		}
 	}

+ 23 - 29
core/middleware/utils.go

@@ -1,45 +1,39 @@
 package middleware
 
 import (
-	"fmt"
-
 	"github.com/gin-gonic/gin"
-	"github.com/labring/aiproxy/core/relay/model"
+	"github.com/labring/aiproxy/core/relay/mode"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-const (
-	ErrorTypeAIPROXY = "aiproxy_error"
-)
+func AbortLogWithMessageWithMode(m mode.Mode, c *gin.Context, statusCode int, message string, typ ...string) {
+	GetLogger(c).Error(message)
+	AbortWithMessageWithMode(m, c, statusCode, message, typ...)
+}
 
-func MessageWithRequestID(c *gin.Context, message string) string {
-	return fmt.Sprintf("%s (aiproxy: %s)", message, GetRequestID(c))
+func AbortWithMessageWithMode(m mode.Mode, c *gin.Context, statusCode int, message string, typ ...string) {
+	c.JSON(statusCode,
+		relaymodel.WrapperErrorWithMessage(m, statusCode, message, typ...),
+	)
+	c.Abort()
 }
 
-func AbortLogWithMessage(c *gin.Context, statusCode int, message string, fields ...*ErrorField) {
+func AbortLogWithMessage(c *gin.Context, statusCode int, message string, typ ...string) {
 	GetLogger(c).Error(message)
-	AbortWithMessage(c, statusCode, message, fields...)
+	AbortWithMessage(c, statusCode, message, typ...)
 }
 
-type ErrorField struct {
-	Type string `json:"type"`
-	Code any    `json:"code"`
+func AbortWithMessage(c *gin.Context, statusCode int, message string, typ ...string) {
+	c.JSON(statusCode,
+		relaymodel.WrapperErrorWithMessage(GetMode(c), statusCode, message, typ...),
+	)
+	c.Abort()
 }
 
-func AbortWithMessage(c *gin.Context, statusCode int, message string, fields ...*ErrorField) {
-	typeName := ErrorTypeAIPROXY
-	var code any
-	if len(fields) > 0 {
-		if fields[0].Type != "" {
-			typeName = fields[0].Type
-		}
-		code = fields[0].Code
+func GetMode(c *gin.Context) mode.Mode {
+	m, exists := c.Get(Mode)
+	if !exists {
+		return mode.Unknown
 	}
-	c.JSON(statusCode, gin.H{
-		"error": &model.Error{
-			Message: MessageWithRequestID(c, message),
-			Type:    typeName,
-			Code:    code,
-		},
-	})
-	c.Abort()
+	return m.(mode.Mode)
 }

+ 7 - 7
core/relay/adaptor/ali/adaptor.go

@@ -3,7 +3,6 @@ package ali
 import (
 	"errors"
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
@@ -11,6 +10,7 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
@@ -58,7 +58,7 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.ImagesGenerations:
 		return ConvertImageRequest(meta, req)
@@ -71,7 +71,7 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, ht
 	case mode.AudioTranscription:
 		return ConvertSTTRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -88,7 +88,7 @@ func (a *Adaptor) DoRequest(meta *meta.Meta, _ *gin.Context, req *http.Request)
 	}
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	switch meta.Mode {
 	case mode.ImagesGenerations:
 		return ImageHandler(meta, c, resp)
@@ -97,11 +97,11 @@ func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Respons
 	case mode.ChatCompletions:
 		reqBody, err := common.GetRequestBody(c.Request)
 		if err != nil {
-			return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("get request body failed: %s", err), "get_request_body_failed", http.StatusInternalServerError)
+			return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("get request body failed: %s", err), "get_request_body_failed", http.StatusInternalServerError)
 		}
 		enableSearch, err := getEnableSearch(reqBody)
 		if err != nil {
-			return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("get enable_search failed: %s", err), "get_enable_search_failed", http.StatusInternalServerError)
+			return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("get enable_search failed: %s", err), "get_enable_search_failed", http.StatusInternalServerError)
 		}
 		u, e := openai.DoResponse(meta, c, resp)
 		if e != nil {
@@ -118,7 +118,7 @@ func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Respons
 	case mode.AudioTranscription:
 		return STTDoResponse(meta, c, resp)
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 }
 

+ 12 - 11
core/relay/adaptor/ali/embeddings.go

@@ -10,9 +10,10 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/middleware"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
-	model "github.com/labring/aiproxy/core/relay/model"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 // Deprecated: Use openai.ConvertRequest instead
@@ -55,16 +56,16 @@ func ConvertEmbeddingsRequest(meta *meta.Meta, req *http.Request) (string, http.
 	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
 }
 
-func embeddingResponse2OpenAI(meta *meta.Meta, response *EmbeddingResponse) *model.EmbeddingResponse {
-	openAIEmbeddingResponse := model.EmbeddingResponse{
+func embeddingResponse2OpenAI(meta *meta.Meta, response *EmbeddingResponse) *relaymodel.EmbeddingResponse {
+	openAIEmbeddingResponse := relaymodel.EmbeddingResponse{
 		Object: "list",
-		Data:   make([]*model.EmbeddingResponseItem, 0, 1),
+		Data:   make([]*relaymodel.EmbeddingResponseItem, 0, 1),
 		Model:  meta.OriginModel,
 		Usage:  response.Usage,
 	}
 
 	for i, embedding := range response.Output.Embeddings {
-		openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, &model.EmbeddingResponseItem{
+		openAIEmbeddingResponse.Data = append(openAIEmbeddingResponse.Data, &relaymodel.EmbeddingResponseItem{
 			Object:    "embedding",
 			Index:     i,
 			Embedding: embedding.Embedding,
@@ -73,19 +74,19 @@ func embeddingResponse2OpenAI(meta *meta.Meta, response *EmbeddingResponse) *mod
 	return &openAIEmbeddingResponse
 }
 
-func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *model.ErrorWithStatusCode) {
+func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	log := middleware.GetLogger(c)
 
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", resp.StatusCode)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", resp.StatusCode)
 	}
 	var respBody EmbeddingResponse
 	err = sonic.Unmarshal(responseBody, &respBody)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", resp.StatusCode)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", resp.StatusCode)
 	}
 	if respBody.Usage.PromptTokens == 0 {
 		respBody.Usage.PromptTokens = respBody.Usage.TotalTokens
@@ -93,11 +94,11 @@ func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*m
 	openaiResponse := embeddingResponse2OpenAI(meta, &respBody)
 	data, err := sonic.Marshal(openaiResponse)
 	if err != nil {
-		return &respBody.Usage, openai.ErrorWrapper(err, "marshal_response_body_failed", resp.StatusCode)
+		return openaiResponse.Usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", resp.StatusCode)
 	}
 	_, err = c.Writer.Write(data)
 	if err != nil {
 		log.Warnf("write response body failed: %v", err)
 	}
-	return &openaiResponse.Usage, nil
+	return openaiResponse.Usage.ToModelUsage(), nil
 }

+ 18 - 20
core/relay/adaptor/ali/image.go

@@ -14,6 +14,7 @@ import (
 	"github.com/labring/aiproxy/core/common/image"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -23,10 +24,10 @@ import (
 
 const MetaResponseFormat = "response_format"
 
-func ConvertImageRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertImageRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := utils.UnmarshalImageRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	request.Model = meta.ActualModel
 
@@ -41,14 +42,18 @@ func ConvertImageRequest(meta *meta.Meta, req *http.Request) (string, http.Heade
 
 	data, err := sonic.Marshal(&imageRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, http.Header{
-		"X-Dashscope-Async": {"enable"},
-	}, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: http.Header{
+			"X-Dashscope-Async": {"enable"},
+		},
+		Body: bytes.NewReader(data),
+	}, nil
 }
 
-func ImageHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func ImageHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -62,38 +67,31 @@ func ImageHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.
 	var aliTaskResponse TaskResponse
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 	err = sonic.Unmarshal(responseBody, &aliTaskResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	if aliTaskResponse.Message != "" {
 		log.Error("aliAsyncTask err: " + aliTaskResponse.Message)
-		return nil, openai.ErrorWrapper(errors.New(aliTaskResponse.Message), "ali_async_task_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(errors.New(aliTaskResponse.Message), "ali_async_task_failed", http.StatusInternalServerError)
 	}
 
 	aliResponse, err := asyncTaskWait(c, aliTaskResponse.Output.TaskID, meta.Channel.Key)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "ali_async_task_wait_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "ali_async_task_wait_failed", http.StatusInternalServerError)
 	}
 
 	if aliResponse.Output.TaskStatus != "SUCCEEDED" {
-		return nil, &relaymodel.ErrorWithStatusCode{
-			Error: relaymodel.Error{
-				Message: aliResponse.Output.Message,
-				Type:    "ali_error",
-				Code:    aliResponse.Output.Code,
-			},
-			StatusCode: resp.StatusCode,
-		}
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(aliResponse.Output.Message, "ali_error", resp.StatusCode)
 	}
 
 	fullTextResponse := responseAli2OpenAIImage(c.Request.Context(), aliResponse, responseFormat)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 13 - 8
core/relay/adaptor/ali/rerank.go

@@ -10,6 +10,7 @@ import (
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -27,11 +28,11 @@ type RerankUsage struct {
 	TotalTokens int64 `json:"total_tokens"`
 }
 
-func ConvertRerankRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRerankRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	reqMap := make(map[string]any)
 	err := common.UnmarshalBodyReusable(req, &reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	reqMap["model"] = meta.ActualModel
 	reqMap["input"] = map[string]any{
@@ -51,12 +52,16 @@ func ConvertRerankRequest(meta *meta.Meta, req *http.Request) (string, http.Head
 	reqMap["parameters"] = parameters
 	jsonData, err := sonic.Marshal(reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }
 
-func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -67,12 +72,12 @@ func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 	var rerankResponse RerankResponse
 	err = sonic.Unmarshal(responseBody, &rerankResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	c.Writer.WriteHeader(resp.StatusCode)
@@ -103,7 +108,7 @@ func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 
 	jsonResponse, err := sonic.Marshal(&rerankResp)
 	if err != nil {
-		return usage, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return usage, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	_, err = c.Writer.Write(jsonResponse)
 	if err != nil {

+ 22 - 18
core/relay/adaptor/ali/stt-realtime.go

@@ -12,7 +12,7 @@ import (
 	"github.com/google/uuid"
 	"github.com/gorilla/websocket"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
@@ -65,18 +65,18 @@ type STTUsage struct {
 	Characters int64 `json:"characters"`
 }
 
-func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error) {
 	err := request.ParseMultipartForm(1024 * 1024 * 4)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	audioFile, _, err := request.FormFile("file")
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	audioData, err := io.ReadAll(audioFile)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	format := "mp3"
 	if request.FormValue("format") != "" {
@@ -86,7 +86,7 @@ func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (string, http.Hea
 	if request.FormValue("sample_rate") != "" {
 		sampleRate, err = strconv.Atoi(request.FormValue("sample_rate"))
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
@@ -111,13 +111,17 @@ func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (string, http.Hea
 
 	data, err := sonic.Marshal(sttRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	meta.Set("audio_data", audioData)
 	meta.Set("task_id", sttRequest.Header.TaskID)
-	return http.MethodPost, http.Header{
-		"X-DashScope-DataInspection": {"enable"},
-	}, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: http.Header{
+			"X-DashScope-DataInspection": {"enable"},
+		},
+		Body: bytes.NewReader(data),
+	}, nil
 }
 
 func STTDoRequest(meta *meta.Meta, req *http.Request) (*http.Response, error) {
@@ -146,7 +150,7 @@ func STTDoRequest(meta *meta.Meta, req *http.Request) (*http.Response, error) {
 	}, nil
 }
 
-func STTDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func STTDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *model.Usage, err adaptor.Error) {
 	audioData := meta.MustGet("audio_data").([]byte)
 	taskID := meta.MustGet("task_id").(string)
 
@@ -160,17 +164,17 @@ func STTDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *mo
 	for {
 		messageType, data, err := conn.ReadMessage()
 		if err != nil {
-			return usage, openai.ErrorWrapperWithMessage("ali_wss_read_msg_failed", "ali_wss_read_msg_failed", http.StatusInternalServerError)
+			return usage, relaymodel.WrapperOpenAIErrorWithMessage("ali_wss_read_msg_failed", nil, http.StatusInternalServerError)
 		}
 
 		if messageType != websocket.TextMessage {
-			return usage, openai.ErrorWrapperWithMessage("expect text message, but got binary message", "ali_wss_read_msg_failed", http.StatusInternalServerError)
+			return usage, relaymodel.WrapperOpenAIErrorWithMessage("expect text message, but got binary message", nil, http.StatusInternalServerError)
 		}
 
 		var msg STTMessage
 		err = sonic.Unmarshal(data, &msg)
 		if err != nil {
-			return usage, openai.ErrorWrapperWithMessage("ali_wss_read_msg_failed", "ali_wss_read_msg_failed", http.StatusInternalServerError)
+			return usage, relaymodel.WrapperOpenAIErrorWithMessage("ali_wss_read_msg_failed", nil, http.StatusInternalServerError)
 		}
 		switch msg.Header.Event {
 		case "task-started":
@@ -183,7 +187,7 @@ func STTDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *mo
 				chunk := audioData[i:end]
 				err = conn.WriteMessage(websocket.BinaryMessage, chunk)
 				if err != nil {
-					return usage, openai.ErrorWrapperWithMessage("ali_wss_write_msg_failed", "ali_wss_write_msg_failed", http.StatusInternalServerError)
+					return usage, relaymodel.WrapperOpenAIErrorWithMessage("ali_wss_write_msg_failed", nil, http.StatusInternalServerError)
 				}
 			}
 			finishMsg := STTMessage{
@@ -198,11 +202,11 @@ func STTDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *mo
 			}
 			finishData, err := sonic.Marshal(finishMsg)
 			if err != nil {
-				return usage, openai.ErrorWrapperWithMessage("ali_wss_write_msg_failed", "ali_wss_write_msg_failed", http.StatusInternalServerError)
+				return usage, relaymodel.WrapperOpenAIErrorWithMessage("ali_wss_write_msg_failed", nil, http.StatusInternalServerError)
 			}
 			err = conn.WriteMessage(websocket.TextMessage, finishData)
 			if err != nil {
-				return usage, openai.ErrorWrapperWithMessage("ali_wss_write_msg_failed", "ali_wss_write_msg_failed", http.StatusInternalServerError)
+				return usage, relaymodel.WrapperOpenAIErrorWithMessage("ali_wss_write_msg_failed", nil, http.StatusInternalServerError)
 			}
 		case "result-generated":
 			if msg.Payload.Output.STTSentence.EndTime != nil &&
@@ -222,7 +226,7 @@ func STTDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *mo
 			})
 			return usage, nil
 		case "task-failed":
-			return usage, openai.ErrorWrapperWithMessage(msg.Header.ErrorMessage, msg.Header.ErrorCode, http.StatusInternalServerError)
+			return usage, relaymodel.WrapperOpenAIErrorWithMessage(msg.Header.ErrorMessage, msg.Header.ErrorCode, http.StatusInternalServerError)
 		}
 	}
 }

+ 16 - 12
core/relay/adaptor/ali/tts.go

@@ -13,7 +13,7 @@ import (
 	"github.com/gorilla/websocket"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
@@ -94,14 +94,14 @@ var ttsSupportedFormat = map[string]struct{}{
 	"mp3": {},
 }
 
-func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := utils.UnmarshalTTSRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	reqMap, err := utils.UnmarshalMap(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	var sampleRate int
 	sampleRateI, ok := reqMap["sample_rate"].(float64)
@@ -157,11 +157,15 @@ func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (string, http.Header,
 
 	data, err := sonic.Marshal(ttsRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, http.Header{
-		"X-DashScope-DataInspection": {"enable"},
-	}, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: http.Header{
+			"X-DashScope-DataInspection": {"enable"},
+		},
+		Body: bytes.NewReader(data),
+	}, nil
 }
 
 func TTSDoRequest(meta *meta.Meta, req *http.Request) (*http.Response, error) {
@@ -191,7 +195,7 @@ func TTSDoRequest(meta *meta.Meta, req *http.Request) (*http.Response, error) {
 	}, nil
 }
 
-func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *model.Usage, err adaptor.Error) {
 	log := middleware.GetLogger(c)
 
 	conn := meta.MustGet("ws_conn").(*websocket.Conn)
@@ -202,7 +206,7 @@ func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *mo
 	for {
 		messageType, data, err := conn.ReadMessage()
 		if err != nil {
-			return usage, openai.ErrorWrapperWithMessage("ali_wss_read_msg_failed", "ali_wss_read_msg_failed", http.StatusInternalServerError)
+			return usage, relaymodel.WrapperOpenAIErrorWithMessage("ali_wss_read_msg_failed", nil, http.StatusInternalServerError)
 		}
 
 		var msg TTSMessage
@@ -210,7 +214,7 @@ func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *mo
 		case websocket.TextMessage:
 			err = sonic.Unmarshal(data, &msg)
 			if err != nil {
-				return usage, openai.ErrorWrapperWithMessage("ali_wss_read_msg_failed", "ali_wss_read_msg_failed", http.StatusInternalServerError)
+				return usage, relaymodel.WrapperOpenAIErrorWithMessage("ali_wss_read_msg_failed", nil, http.StatusInternalServerError)
 			}
 			switch msg.Header.Event {
 			case "task-started":
@@ -222,7 +226,7 @@ func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *mo
 				usage.TotalTokens = model.ZeroNullInt64(msg.Payload.Usage.Characters)
 				return usage, nil
 			case "task-failed":
-				return usage, openai.ErrorWrapperWithMessage(msg.Header.ErrorMessage, msg.Header.ErrorCode, http.StatusInternalServerError)
+				return usage, relaymodel.WrapperOpenAIErrorWithMessage(msg.Header.ErrorMessage, msg.Header.ErrorCode, http.StatusInternalServerError)
 			}
 		case websocket.BinaryMessage:
 			_, writeErr := c.Writer.Write(data)

+ 12 - 9
core/relay/adaptor/anthropic/adaptor.go

@@ -3,14 +3,13 @@ package anthropic
 import (
 	"bytes"
 	"fmt"
-	"io"
 	"net/http"
 	"strings"
 
 	"github.com/bytedance/sonic"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -58,23 +57,27 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, c *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.ChatCompletions:
 		data, err := OpenAIConvertRequest(meta, req)
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 
 		data2, err := sonic.Marshal(data)
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
-		return http.MethodPost, nil, bytes.NewReader(data2), nil
+		return &adaptor.ConvertRequestResult{
+			Method: http.MethodPost,
+			Header: nil,
+			Body:   bytes.NewReader(data2),
+		}, nil
 	case mode.Anthropic:
 		return ConvertRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -82,7 +85,7 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.ChatCompletions:
 		if utils.IsStreamResponse(resp) {
@@ -97,7 +100,7 @@ func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Respons
 			usage, err = Handler(meta, c, resp)
 		}
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 	return
 }

+ 47 - 22
core/relay/adaptor/anthropic/error.go

@@ -1,37 +1,62 @@
 package anthropic
 
 import (
+	"io"
 	"net/http"
 	"strings"
 
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
-	"github.com/labring/aiproxy/core/relay/model"
+	"github.com/bytedance/sonic"
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-// status 400 {"type":"error","error":{"type":"invalid_request_error","message":"Your credit balance is too low to access the Anthropic API. Please go to Plans & Billing to upgrade or purchase credits."}}
-// status 529 {Message:Overloaded Type:overloaded_error Param:}
-func OpenAIErrorHandler(resp *http.Response) *model.ErrorWithStatusCode {
-	err := openai.ErrorHanlder(resp)
-	if strings.Contains(err.Error.Message, "balance is too low") {
-		err.StatusCode = http.StatusPaymentRequired
-		return err
+func OpenAIErrorHandler(resp *http.Response) adaptor.Error {
+	defer resp.Body.Close()
+	respBody, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return relaymodel.WrapperOpenAIError(err, "read_response_failed", http.StatusInternalServerError)
 	}
-	if strings.Contains(err.Error.Message, "Overloaded") {
-		err.StatusCode = http.StatusTooManyRequests
-		return err
+
+	return OpenAIErrorHandlerWithBody(resp.StatusCode, respBody)
+}
+
+func OpenAIErrorHandlerWithBody(statusCode int, respBody []byte) adaptor.Error {
+	statusCode, e := GetErrorWithBody(statusCode, respBody)
+	return relaymodel.WrapperOpenAIErrorWithMessage(e.Message, e.Type, statusCode)
+}
+
+func ErrorHandler(resp *http.Response) adaptor.Error {
+	statusCode, e := GetError(resp)
+	return relaymodel.NewAnthropicError(statusCode, e)
+}
+
+func GetError(resp *http.Response) (int, relaymodel.AnthropicError) {
+	defer resp.Body.Close()
+
+	respBody, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return resp.StatusCode, relaymodel.AnthropicError{
+			Type:    "aiproxy_error",
+			Message: err.Error(),
+		}
 	}
-	return err
+
+	return GetErrorWithBody(resp.StatusCode, respBody)
 }
 
-func OpenAIErrorHandlerWithBody(statusCode int, respBody []byte) *model.ErrorWithStatusCode {
-	err := openai.ErrorHanlderWithBody(statusCode, respBody)
-	if strings.Contains(err.Error.Message, "balance is too low") {
-		err.StatusCode = http.StatusPaymentRequired
-		return err
+// status 400 {"type":"error","error":{"type":"invalid_request_error","message":"Your credit balance is too low to access the Anthropic API. Please go to Plans & Billing to upgrade or purchase credits."}}
+// status 529 {Message:Overloaded Type:overloaded_error Param:}
+func GetErrorWithBody(statusCode int, respBody []byte) (int, relaymodel.AnthropicError) {
+	var e relaymodel.AnthropicErrorResponse
+	err := sonic.Unmarshal(respBody, &e)
+	if err != nil {
+		return statusCode, e.Error
+	}
+	if strings.Contains(e.Error.Message, "balance is too low") {
+		return http.StatusPaymentRequired, e.Error
 	}
-	if strings.Contains(err.Error.Message, "Overloaded") {
-		err.StatusCode = http.StatusTooManyRequests
-		return err
+	if strings.Contains(e.Error.Message, "Overloaded") {
+		return http.StatusTooManyRequests, e.Error
 	}
-	return err
+	return statusCode, e.Error
 }

+ 17 - 12
core/relay/adaptor/anthropic/main.go

@@ -18,38 +18,43 @@ import (
 	"github.com/labring/aiproxy/core/common/image"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"golang.org/x/sync/semaphore"
 )
 
-func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	// Parse request body into AST node
 	node, err := common.UnmarshalBody2Node(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	// Set the actual model in the request
 	_, err = node.Set("model", ast.NewString(meta.ActualModel))
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	// Process image content if present
 	err = ConvertImage2Base64(req.Context(), &node)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	// Serialize the modified node
 	newBody, err := node.MarshalJSON()
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
-	return http.MethodPost, nil, bytes.NewReader(newBody), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(newBody),
+	}, nil
 }
 
 // ConvertImage2Base64 handles converting image URLs to base64 encoded data
@@ -151,9 +156,9 @@ func convertImageURLToBase64(ctx context.Context, contentItem *ast.Node) error {
 	return nil
 }
 
-func StreamHandler(m *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(m *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
-		return nil, OpenAIErrorHandler(resp)
+		return nil, ErrorHandler(resp)
 	}
 
 	defer resp.Body.Close()
@@ -226,22 +231,22 @@ func StreamHandler(m *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 	return usage.ToModelUsage(), nil
 }
 
-func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
-		return nil, OpenAIErrorHandler(resp)
+		return nil, ErrorHandler(resp)
 	}
 
 	defer resp.Body.Close()
 
 	respBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperAnthropicError(err, "read_response_failed", http.StatusInternalServerError)
 	}
 
 	var claudeResponse Response
 	err = sonic.Unmarshal(respBody, &claudeResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperAnthropicError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	fullTextResponse := Response2OpenAI(meta, &claudeResponse)
 	c.Writer.Header().Set("Content-Type", "application/json")

+ 0 - 11
core/relay/adaptor/anthropic/model.go

@@ -173,20 +173,9 @@ type CacheCreation struct {
 	Ephemeral1hInputTokens int64 `json:"ephemeral_1h_input_tokens,omitempty"`
 }
 
-type Error struct {
-	Type    string `json:"type"`
-	Message string `json:"message"`
-}
-
-type ErrorResponse struct {
-	Type  string `json:"type"`
-	Error Error  `json:"error"`
-}
-
 type Response struct {
 	StopReason   string    `json:"stop_reason,omitempty"`
 	StopSequence *string   `json:"stop_sequence"`
-	Error        *Error    `json:"error"`
 	ID           string    `json:"id"`
 	Type         string    `json:"type"`
 	Role         string    `json:"role"`

+ 7 - 6
core/relay/adaptor/anthropic/openai.go

@@ -17,6 +17,7 @@ import (
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -273,7 +274,7 @@ func batchPatchImage2Base64(ctx context.Context, imageTasks []*Content) error {
 }
 
 // https://docs.anthropic.com/claude/reference/messages-streaming
-func StreamResponse2OpenAI(meta *meta.Meta, respData []byte) (*relaymodel.ChatCompletionsStreamResponse, *relaymodel.ErrorWithStatusCode) {
+func StreamResponse2OpenAI(meta *meta.Meta, respData []byte) (*relaymodel.ChatCompletionsStreamResponse, adaptor.Error) {
 	var usage *relaymodel.Usage
 	var content string
 	var thinking string
@@ -283,7 +284,7 @@ func StreamResponse2OpenAI(meta *meta.Meta, respData []byte) (*relaymodel.ChatCo
 	var claudeResponse StreamResponse
 	err := sonic.Unmarshal(respData, &claudeResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response", http.StatusInternalServerError)
 	}
 
 	switch claudeResponse.Type {
@@ -420,7 +421,7 @@ func Response2OpenAI(meta *meta.Meta, claudeResponse *Response) *relaymodel.Text
 	return &fullTextResponse
 }
 
-func OpenAIStreamHandler(m *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func OpenAIStreamHandler(m *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, OpenAIErrorHandler(resp)
 	}
@@ -511,7 +512,7 @@ func OpenAIStreamHandler(m *meta.Meta, c *gin.Context, resp *http.Response) (*mo
 	return usage.ToModelUsage(), nil
 }
 
-func OpenAIHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func OpenAIHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, OpenAIErrorHandler(resp)
 	}
@@ -521,12 +522,12 @@ func OpenAIHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 	var claudeResponse Response
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&claudeResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	fullTextResponse := Response2OpenAI(meta, &claudeResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 5 - 8
core/relay/adaptor/aws/adaptor.go

@@ -2,11 +2,11 @@ package aws
 
 import (
 	"errors"
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/aws/utils"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -18,22 +18,19 @@ func (a *Adaptor) GetBaseURL() string {
 	return ""
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	adaptor := GetAdaptor(meta.ActualModel)
 	if adaptor == nil {
-		return "", nil, nil, errors.New("adaptor not found")
+		return nil, errors.New("adaptor not found")
 	}
 	meta.Set("awsAdapter", adaptor)
 	return adaptor.ConvertRequest(meta, req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (usage *model.Usage, err adaptor.Error) {
 	adaptor, ok := meta.Get("awsAdapter")
 	if !ok {
-		return nil, &relaymodel.ErrorWithStatusCode{
-			StatusCode: http.StatusInternalServerError,
-			Error:      relaymodel.Error{Message: "awsAdapter not found"},
-		}
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("awsAdapter not found", nil, http.StatusInternalServerError)
 	}
 	return adaptor.(utils.AwsAdapter).DoResponse(meta, c)
 }

+ 9 - 6
core/relay/adaptor/aws/claude/adapter.go

@@ -1,14 +1,13 @@
 package aws
 
 import (
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/anthropic"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 const (
@@ -17,17 +16,21 @@ const (
 
 type Adaptor struct{}
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	r, err := anthropic.OpenAIConvertRequest(meta, req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	meta.Set("stream", r.Stream)
 	meta.Set(ConvertedRequest, r)
-	return "", nil, nil, nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   nil,
+	}, nil
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context) (usage *model.Usage, err adaptor.Error) {
 	if meta.GetBool("stream") {
 		usage, err = StreamHandler(meta, c)
 	} else {

+ 17 - 16
core/relay/adaptor/aws/claude/main.go

@@ -15,6 +15,7 @@ import (
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/anthropic"
 	"github.com/labring/aiproxy/core/relay/adaptor/aws/utils"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
@@ -91,10 +92,10 @@ func awsModelID(requestModel string) (string, error) {
 	return "", errors.Errorf("model %s not found", requestModel)
 }
 
-func Handler(meta *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context) (*model.Usage, adaptor.Error) {
 	awsModelID, err := awsModelID(meta.ActualModel)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "awsModelID"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsReq := &bedrockruntime.InvokeModelInput{
@@ -105,35 +106,35 @@ func Handler(meta *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.ErrorWi
 
 	convReq, ok := meta.Get(ConvertedRequest)
 	if !ok {
-		return nil, utils.WrapErr(errors.New("request not found"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("request not found", nil, http.StatusInternalServerError)
 	}
 	claudeReq := convReq.(*anthropic.Request)
 	awsClaudeReq := &Request{
 		AnthropicVersion: "bedrock-2023-05-31",
 	}
 	if err = copier.Copy(awsClaudeReq, claudeReq); err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "copy request"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsReq.Body, err = sonic.Marshal(awsClaudeReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "marshal request"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsClient, err := utils.AwsClientFromMeta(meta)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "get aws client"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsResp, err := awsClient.InvokeModel(c.Request.Context(), awsReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "InvokeModel"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	claudeResponse := new(anthropic.Response)
 	err = sonic.Unmarshal(awsResp.Body, claudeResponse)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "unmarshal response"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	openaiResp := anthropic.Response2OpenAI(meta, claudeResponse)
@@ -141,11 +142,11 @@ func Handler(meta *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.ErrorWi
 	return openaiResp.Usage.ToModelUsage(), nil
 }
 
-func StreamHandler(m *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(m *meta.Meta, c *gin.Context) (*model.Usage, adaptor.Error) {
 	log := middleware.GetLogger(c)
 	awsModelID, err := awsModelID(m.ActualModel)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "awsModelID"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsReq := &bedrockruntime.InvokeModelWithResponseStreamInput{
@@ -156,32 +157,32 @@ func StreamHandler(m *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.Erro
 
 	convReq, ok := m.Get(ConvertedRequest)
 	if !ok {
-		return nil, utils.WrapErr(errors.New("request not found"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("request not found", nil, http.StatusInternalServerError)
 	}
 	claudeReq, ok := convReq.(*anthropic.Request)
 	if !ok {
-		return nil, utils.WrapErr(errors.New("request not found"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("request not found", nil, http.StatusInternalServerError)
 	}
 
 	awsClaudeReq := &Request{
 		AnthropicVersion: "bedrock-2023-05-31",
 	}
 	if err = copier.Copy(awsClaudeReq, claudeReq); err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "copy request"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	awsReq.Body, err = sonic.Marshal(awsClaudeReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "marshal request"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsClient, err := utils.AwsClientFromMeta(m)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "get aws client"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsResp, err := awsClient.InvokeModelWithResponseStream(c.Request.Context(), awsReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "InvokeModelWithResponseStream"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	stream := awsResp.GetStream()
 	defer stream.Close()

+ 9 - 6
core/relay/adaptor/aws/llama3/adapter.go

@@ -1,13 +1,12 @@
 package aws
 
 import (
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	relayutils "github.com/labring/aiproxy/core/relay/utils"
 )
 
@@ -17,19 +16,23 @@ const (
 
 type Adaptor struct{}
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := relayutils.UnmarshalGeneralOpenAIRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	request.Model = meta.ActualModel
 	meta.Set("stream", request.Stream)
 	llamaReq := ConvertRequest(request)
 	meta.Set(ConvertedRequest, llamaReq)
-	return "", nil, nil, nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   nil,
+	}, nil
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context) (usage *model.Usage, err adaptor.Error) {
 	if meta.GetBool("stream") {
 		usage, err = StreamHandler(meta, c)
 	} else {

+ 14 - 13
core/relay/adaptor/aws/llama3/main.go

@@ -16,6 +16,7 @@ import (
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/aws/utils"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
@@ -89,10 +90,10 @@ func ConvertRequest(textRequest *relaymodel.GeneralOpenAIRequest) *Request {
 	return &llamaRequest
 }
 
-func Handler(meta *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context) (*model.Usage, adaptor.Error) {
 	awsModelID, err := awsModelID(meta.ActualModel)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "awsModelID"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsReq := &bedrockruntime.InvokeModelInput{
@@ -103,28 +104,28 @@ func Handler(meta *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.ErrorWi
 
 	llamaReq, ok := meta.Get(ConvertedRequest)
 	if !ok {
-		return nil, utils.WrapErr(errors.New("request not found"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("request not found", nil, http.StatusInternalServerError)
 	}
 
 	awsReq.Body, err = sonic.Marshal(llamaReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "marshal request"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsClient, err := utils.AwsClientFromMeta(meta)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "get aws client"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsResp, err := awsClient.InvokeModel(c.Request.Context(), awsReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "InvokeModel"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	var llamaResponse Response
 	err = sonic.Unmarshal(awsResp.Body, &llamaResponse)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "unmarshal response"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	openaiResp := ResponseLlama2OpenAI(&llamaResponse)
@@ -163,13 +164,13 @@ func ResponseLlama2OpenAI(llamaResponse *Response) *relaymodel.TextResponse {
 	return &fullTextResponse
 }
 
-func StreamHandler(meta *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(meta *meta.Meta, c *gin.Context) (*model.Usage, adaptor.Error) {
 	log := middleware.GetLogger(c)
 
 	createdTime := time.Now().Unix()
 	awsModelID, err := awsModelID(meta.ActualModel)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "awsModelID"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsReq := &bedrockruntime.InvokeModelWithResponseStreamInput{
@@ -180,22 +181,22 @@ func StreamHandler(meta *meta.Meta, c *gin.Context) (*model.Usage, *relaymodel.E
 
 	llamaReq, ok := meta.Get(ConvertedRequest)
 	if !ok {
-		return nil, utils.WrapErr(errors.New("request not found"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("request not found", nil, http.StatusInternalServerError)
 	}
 
 	awsReq.Body, err = sonic.Marshal(llamaReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "marshal request"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsClient, err := utils.AwsClientFromMeta(meta)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "get aws client"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	awsResp, err := awsClient.InvokeModelWithResponseStream(c.Request.Context(), awsReq)
 	if err != nil {
-		return nil, utils.WrapErr(errors.Wrap(err, "InvokeModelWithResponseStream"))
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	stream := awsResp.GetStream()
 	defer stream.Close()

+ 3 - 4
core/relay/adaptor/aws/utils/adaptor.go

@@ -2,7 +2,6 @@ package utils
 
 import (
 	"errors"
-	"io"
 	"net/http"
 	"strings"
 
@@ -11,13 +10,13 @@ import (
 	"github.com/aws/aws-sdk-go-v2/service/bedrockruntime"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type AwsAdapter interface {
-	ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error)
-	DoResponse(meta *meta.Meta, c *gin.Context) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode)
+	ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error)
+	DoResponse(meta *meta.Meta, c *gin.Context) (usage *model.Usage, err adaptor.Error)
 }
 
 type AwsConfig struct {

+ 0 - 16
core/relay/adaptor/aws/utils/utils.go

@@ -1,16 +0,0 @@
-package utils
-
-import (
-	"net/http"
-
-	model "github.com/labring/aiproxy/core/relay/model"
-)
-
-func WrapErr(err error) *model.ErrorWithStatusCode {
-	return &model.ErrorWithStatusCode{
-		StatusCode: http.StatusInternalServerError,
-		Error: model.Error{
-			Message: err.Error(),
-		},
-	}
-}

+ 5 - 5
core/relay/adaptor/baidu/adaptor.go

@@ -3,12 +3,12 @@ package baidu
 import (
 	"context"
 	"fmt"
-	"io"
 	"net/http"
 	"strings"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
@@ -89,7 +89,7 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		return openai.ConvertEmbeddingsRequest(meta, req, true)
@@ -100,7 +100,7 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, ht
 	case mode.ChatCompletions:
 		return ConvertRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -108,7 +108,7 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		usage, err = EmbeddingsHandler(meta, c, resp)
@@ -123,7 +123,7 @@ func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Respons
 			usage, err = Handler(meta, c, resp)
 		}
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), nil, http.StatusBadRequest)
 	}
 	return
 }

+ 6 - 6
core/relay/adaptor/baidu/embeddings.go

@@ -8,7 +8,7 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
@@ -18,19 +18,19 @@ type EmbeddingsResponse struct {
 	Usage relaymodel.Usage `json:"usage"`
 }
 
-func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	log := middleware.GetLogger(c)
 
 	body, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	var baiduResponse EmbeddingsResponse
 	err = sonic.Unmarshal(body, &baiduResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	if baiduResponse.Error != nil && baiduResponse.ErrorCode != 0 {
 		return baiduResponse.Usage.ToModelUsage(), ErrorHandler(baiduResponse.Error)
@@ -39,14 +39,14 @@ func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*m
 	respMap := make(map[string]any)
 	err = sonic.Unmarshal(body, &respMap)
 	if err != nil {
-		return baiduResponse.Usage.ToModelUsage(), openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return baiduResponse.Usage.ToModelUsage(), relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	respMap["model"] = meta.OriginModel
 	respMap["object"] = "list"
 
 	data, err := sonic.Marshal(respMap)
 	if err != nil {
-		return baiduResponse.Usage.ToModelUsage(), openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return baiduResponse.Usage.ToModelUsage(), relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	_, err = c.Writer.Write(data)
 	if err != nil {

+ 8 - 8
core/relay/adaptor/baidu/error.go

@@ -4,22 +4,22 @@ import (
 	"net/http"
 	"strconv"
 
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
-	model "github.com/labring/aiproxy/core/relay/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 // https://cloud.baidu.com/doc/WENXINWORKSHOP/s/tlmyncueh
 
-func ErrorHandler(baiduError *Error) *model.ErrorWithStatusCode {
+func ErrorHandler(baiduError *Error) adaptor.Error {
 	switch baiduError.ErrorCode {
 	case 13, 14, 100, 110:
-		return openai.ErrorWrapperWithMessage(
+		return relaymodel.WrapperOpenAIErrorWithMessage(
 			baiduError.ErrorMsg,
 			"upstream_"+strconv.Itoa(baiduError.ErrorCode),
 			http.StatusUnauthorized,
 		)
 	case 17, 19, 111:
-		return openai.ErrorWrapperWithMessage(
+		return relaymodel.WrapperOpenAIErrorWithMessage(
 			baiduError.ErrorMsg,
 			"upstream_"+strconv.Itoa(baiduError.ErrorCode),
 			http.StatusForbidden,
@@ -30,7 +30,7 @@ func ErrorHandler(baiduError *Error) *model.ErrorWithStatusCode {
 		336106, 336118, 336122,
 		336123, 336221, 337006,
 		337008, 337009:
-		return openai.ErrorWrapperWithMessage(
+		return relaymodel.WrapperOpenAIErrorWithMessage(
 			baiduError.ErrorMsg,
 			"upstream_"+strconv.Itoa(baiduError.ErrorCode),
 			http.StatusBadRequest,
@@ -38,11 +38,11 @@ func ErrorHandler(baiduError *Error) *model.ErrorWithStatusCode {
 	case 4, 18, 336117, 336501, 336502,
 		336503, 336504, 336505,
 		336507:
-		return openai.ErrorWrapperWithMessage(
+		return relaymodel.WrapperOpenAIErrorWithMessage(
 			baiduError.ErrorMsg,
 			"upstream_"+strconv.Itoa(baiduError.ErrorCode),
 			http.StatusTooManyRequests,
 		)
 	}
-	return openai.ErrorWrapperWithMessage(baiduError.ErrorMsg, "upstream_"+strconv.Itoa(baiduError.ErrorCode), http.StatusInternalServerError)
+	return relaymodel.WrapperOpenAIErrorWithMessage(baiduError.ErrorMsg, "upstream_"+strconv.Itoa(baiduError.ErrorCode), http.StatusInternalServerError)
 }

+ 5 - 5
core/relay/adaptor/baidu/image.go

@@ -8,7 +8,7 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
@@ -24,19 +24,19 @@ type ImageResponse struct {
 	Created int64        `json:"created"`
 }
 
-func ImageHandler(_ *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func ImageHandler(_ *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	log := middleware.GetLogger(c)
 
 	body, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	var imageResponse ImageResponse
 	err = sonic.Unmarshal(body, &imageResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	usage := &model.Usage{
@@ -51,7 +51,7 @@ func ImageHandler(_ *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usa
 	openaiResponse := ToOpenAIImageResponse(&imageResponse)
 	data, err := sonic.Marshal(openaiResponse)
 	if err != nil {
-		return usage, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return usage, relaymodel.WrapperOpenAIErrorWithMessage(err.Error(), nil, http.StatusInternalServerError)
 	}
 	_, err = c.Writer.Write(data)
 	if err != nil {

+ 13 - 9
core/relay/adaptor/baidu/main.go

@@ -3,7 +3,6 @@ package baidu
 import (
 	"bufio"
 	"bytes"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
@@ -12,6 +11,7 @@ import (
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -38,10 +38,10 @@ type ChatRequest struct {
 	EnableCitation  bool                  `json:"enable_citation,omitempty"`
 }
 
-func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := utils.UnmarshalGeneralOpenAIRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	request.Model = meta.ActualModel
 	baiduRequest := ChatRequest{
@@ -78,9 +78,13 @@ func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io
 
 	data, err := sonic.Marshal(baiduRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
 func response2OpenAI(meta *meta.Meta, response *ChatResponse) *relaymodel.TextResponse {
@@ -122,7 +126,7 @@ func streamResponse2OpenAI(meta *meta.Meta, baiduResponse *ChatStreamResponse) *
 	return &response
 }
 
-func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	log := middleware.GetLogger(c)
@@ -166,13 +170,13 @@ func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 	return usage.ToModelUsage(), nil
 }
 
-func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	var baiduResponse ChatResponse
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&baiduResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	if baiduResponse.Error != nil && baiduResponse.Error.ErrorCode != 0 {
 		return nil, ErrorHandler(baiduResponse.Error)
@@ -180,7 +184,7 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage
 	fullTextResponse := response2OpenAI(meta, &baiduResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 6 - 6
core/relay/adaptor/baidu/rerank.go

@@ -8,7 +8,7 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
@@ -18,19 +18,19 @@ type RerankResponse struct {
 	Usage relaymodel.Usage `json:"usage"`
 }
 
-func RerankHandler(_ *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func RerankHandler(_ *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	log := middleware.GetLogger(c)
 
 	respBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 	reRankResp := &RerankResponse{}
 	err = sonic.Unmarshal(respBody, reRankResp)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	if reRankResp.Error != nil && reRankResp.Error.ErrorCode != 0 {
 		return nil, ErrorHandler(reRankResp.Error)
@@ -38,7 +38,7 @@ func RerankHandler(_ *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 	respMap := make(map[string]any)
 	err = sonic.Unmarshal(respBody, &respMap)
 	if err != nil {
-		return reRankResp.Usage.ToModelUsage(), openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return reRankResp.Usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	delete(respMap, "model")
 	delete(respMap, "usage")
@@ -52,7 +52,7 @@ func RerankHandler(_ *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 	delete(respMap, "results")
 	jsonData, err := sonic.Marshal(respMap)
 	if err != nil {
-		return reRankResp.Usage.ToModelUsage(), openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return reRankResp.Usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	_, err = c.Writer.Write(jsonData)
 	if err != nil {

+ 5 - 5
core/relay/adaptor/baiduv2/adaptor.go

@@ -3,12 +3,12 @@ package baiduv2
 import (
 	"context"
 	"fmt"
-	"io"
 	"net/http"
 	"strings"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
@@ -59,7 +59,7 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.ChatCompletions, mode.Rerank:
 		actModel := meta.ActualModel
@@ -70,7 +70,7 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, ht
 		}
 		return openai.ConvertRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -78,12 +78,12 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.ChatCompletions, mode.Rerank:
 		return openai.DoResponse(meta, c, resp)
 	default:
-		return nil, openai.ErrorWrapperWithMessage(
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(
 			fmt.Sprintf("unsupported mode: %s", meta.Mode),
 			nil,
 			http.StatusBadRequest,

+ 11 - 8
core/relay/adaptor/cohere/adaptor.go

@@ -3,16 +3,15 @@ package cohere
 import (
 	"bytes"
 	"errors"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
 )
 
@@ -33,28 +32,32 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := utils.UnmarshalGeneralOpenAIRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	request.Model = meta.ActualModel
 	requestBody := ConvertRequest(request)
 	if requestBody == nil {
-		return "", nil, nil, errors.New("request body is nil")
+		return nil, errors.New("request body is nil")
 	}
 	data, err := sonic.Marshal(requestBody)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
 func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*http.Response, error) {
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.Rerank:
 		usage, err = openai.RerankHandler(meta, c, resp)

+ 6 - 5
core/relay/adaptor/cohere/main.go

@@ -12,6 +12,7 @@ import (
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -146,7 +147,7 @@ func Response2OpenAI(meta *meta.Meta, cohereResponse *Response) *relaymodel.Text
 	return &fullTextResponse
 }
 
-func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -190,7 +191,7 @@ func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 	return usage.ToModelUsage(), nil
 }
 
-func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -200,15 +201,15 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage
 	var cohereResponse Response
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&cohereResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	if cohereResponse.ResponseID == "" {
-		return nil, openai.ErrorWrapperWithMessage(cohereResponse.Message, resp.StatusCode, resp.StatusCode)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(cohereResponse.Message, resp.StatusCode, resp.StatusCode)
 	}
 	fullTextResponse := Response2OpenAI(meta, &cohereResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 12 - 9
core/relay/adaptor/coze/adaptor.go

@@ -3,16 +3,15 @@ package coze
 import (
 	"bytes"
 	"errors"
-	"io"
 	"net/http"
 	"strings"
 
 	"github.com/bytedance/sonic"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
 )
 
@@ -37,17 +36,17 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	if meta.Mode != mode.ChatCompletions {
-		return "", nil, nil, errors.New("coze only support chat completions")
+		return nil, errors.New("coze only support chat completions")
 	}
 	request, err := utils.UnmarshalGeneralOpenAIRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	_, userID, err := getTokenAndUserID(meta.Channel.Key)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	request.User = userID
 	request.Model = meta.ActualModel
@@ -69,16 +68,20 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, ht
 	}
 	data, err := sonic.Marshal(cozeRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
 func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*http.Response, error) {
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	if utils.IsStreamResponse(resp) {
 		usage, err = StreamHandler(meta, c, resp)
 	} else {

+ 6 - 5
core/relay/adaptor/coze/main.go

@@ -12,6 +12,7 @@ import (
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/coze/constant/messagetype"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
@@ -88,7 +89,7 @@ func Response2OpenAI(meta *meta.Meta, cozeResponse *Response) *relaymodel.TextRe
 	return &fullTextResponse
 }
 
-func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -146,7 +147,7 @@ func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 	return openai.ResponseText2Usage(responseText.String(), meta.ActualModel, int64(meta.RequestUsage.InputTokens)).ToModelUsage(), nil
 }
 
-func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -158,15 +159,15 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage
 	var cozeResponse Response
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&cozeResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	if cozeResponse.Code != 0 {
-		return nil, openai.ErrorWrapperWithMessage(cozeResponse.Msg, cozeResponse.Code, resp.StatusCode)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(cozeResponse.Msg, cozeResponse.Code, resp.StatusCode)
 	}
 	fullTextResponse := Response2OpenAI(meta, &cozeResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 4 - 6
core/relay/adaptor/doc2x/adaptor.go

@@ -2,13 +2,11 @@ package doc2x
 
 import (
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
 	"github.com/labring/aiproxy/core/relay/adaptor"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -34,12 +32,12 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 	}
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.ParsePdf:
 		return ConvertParsePdfRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -47,12 +45,12 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	switch meta.Mode {
 	case mode.ParsePdf:
 		return HandleParsePdfResponse(meta, c, resp)
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 }
 

+ 15 - 11
core/relay/adaptor/doc2x/pdf.go

@@ -17,27 +17,31 @@ import (
 	"github.com/bytedance/sonic"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	log "github.com/sirupsen/logrus"
 )
 
-func ConvertParsePdfRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertParsePdfRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	err := req.ParseMultipartForm(1024 * 1024 * 4)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	file, _, err := req.FormFile("file")
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	responseFormat := req.FormValue("response_format")
 	meta.Set("response_format", responseFormat)
 
-	return http.MethodPost, nil, file, nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   file,
+	}, nil
 }
 
 type ParsePdfResponse struct {
@@ -50,21 +54,21 @@ type ParsePdfResponseData struct {
 	UID string `json:"uid"`
 }
 
-func HandleParsePdfResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func HandleParsePdfResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	var response ParsePdfResponse
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&response)
 	if err != nil {
-		return nil, openai.ErrorWrapperWithMessage("decode response failed: "+err.Error(), "decode_response_failed", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("decode response failed: "+err.Error(), "decode_response_failed", http.StatusBadRequest)
 	}
 
 	if response.Code != "success" {
-		return nil, openai.ErrorWrapperWithMessage("parse pdf failed: "+response.Msg, "parse_pdf_failed", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage("parse pdf failed: "+response.Msg, "parse_pdf_failed", http.StatusBadRequest)
 	}
 
 	for {
 		status, err := GetStatus(context.Background(), meta, response.Data.UID)
 		if err != nil {
-			return nil, openai.ErrorWrapperWithMessage("get status failed: "+err.Error(), "get_status_failed", http.StatusInternalServerError)
+			return nil, relaymodel.WrapperOpenAIErrorWithMessage("get status failed: "+err.Error(), "get_status_failed", http.StatusInternalServerError)
 		}
 
 		switch status.Status {
@@ -73,7 +77,7 @@ func HandleParsePdfResponse(meta *meta.Meta, c *gin.Context, resp *http.Response
 		case StatusResponseDataStatusProcessing:
 			time.Sleep(1 * time.Second)
 		case StatusResponseDataStatusFailed:
-			return nil, openai.ErrorWrapperWithMessage("parse pdf failed: "+status.Detail, "parse_pdf_failed", http.StatusBadRequest)
+			return nil, relaymodel.WrapperOpenAIErrorWithMessage("parse pdf failed: "+status.Detail, "parse_pdf_failed", http.StatusBadRequest)
 		}
 	}
 }
@@ -317,7 +321,7 @@ func handleConvertPdfToMd(ctx context.Context, str string) string {
 	return result
 }
 
-func handleParsePdfResponse(meta *meta.Meta, c *gin.Context, response *StatusResponseDataResult) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func handleParsePdfResponse(meta *meta.Meta, c *gin.Context, response *StatusResponseDataResult) (*model.Usage, adaptor.Error) {
 	mds := make([]string, 0, len(response.Pages))
 	totalLength := 0
 	for _, page := range response.Pages {

+ 14 - 11
core/relay/adaptor/doubao/main.go

@@ -4,7 +4,6 @@ import (
 	"bytes"
 	"errors"
 	"fmt"
-	"io"
 	"net/http"
 	"strings"
 
@@ -53,23 +52,23 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 	return GetRequestURL(meta)
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
-	method, header, body, err := a.Adaptor.ConvertRequest(meta, req)
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
+	result, err := a.Adaptor.ConvertRequest(meta, req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	if meta.Mode != mode.ChatCompletions || meta.OriginModel != "deepseek-reasoner" {
-		return method, header, body, nil
+		return result, nil
 	}
 
 	m := make(map[string]any)
-	err = sonic.ConfigDefault.NewDecoder(body).Decode(&m)
+	err = sonic.ConfigDefault.NewDecoder(result.Body).Decode(&m)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	messages, _ := m["messages"].([]any)
 	if len(messages) == 0 {
-		return "", nil, nil, errors.New("messages is empty")
+		return nil, errors.New("messages is empty")
 	}
 	sysMessage := relaymodel.Message{
 		Role:    "system",
@@ -79,10 +78,14 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, ht
 	m["messages"] = messages
 	newBody, err := sonic.Marshal(m)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
-	return method, header, bytes.NewReader(newBody), nil
+	return &adaptor.ConvertRequestResult{
+		Method: result.Method,
+		Header: result.Header,
+		Body:   bytes.NewReader(newBody),
+	}, nil
 }
 
 func newHandlerPreHandler(websearchCount *int64) func(_ *meta.Meta, node *ast.Node) error {
@@ -130,7 +133,7 @@ func handlerPreHandler(meta *meta.Meta, node *ast.Node, websearchCount *int64) e
 	})
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.ChatCompletions:
 		websearchCount := int64(0)

+ 5 - 6
core/relay/adaptor/doubaoaudio/main.go

@@ -2,12 +2,11 @@ package doubaoaudio
 
 import (
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -39,12 +38,12 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 	return GetRequestURL(meta)
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.AudioSpeech:
 		return ConvertTTSRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -71,12 +70,12 @@ func (a *Adaptor) DoRequest(meta *meta.Meta, _ *gin.Context, req *http.Request)
 	}
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	switch meta.Mode {
 	case mode.AudioSpeech:
 		return TTSDoResponse(meta, c, resp)
 	default:
-		return nil, openai.ErrorWrapperWithMessage(
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(
 			fmt.Sprintf("unsupported mode: %s", meta.Mode),
 			nil,
 			http.StatusBadRequest,

+ 15 - 11
core/relay/adaptor/doubaoaudio/tts.go

@@ -16,7 +16,7 @@ import (
 	"github.com/labring/aiproxy/core/common/conv"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
@@ -64,20 +64,20 @@ type RequestConfig struct {
 var defaultHeader = []byte{0x11, 0x10, 0x11, 0x00}
 
 //nolint:gosec
-func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := utils.UnmarshalTTSRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	reqMap, err := utils.UnmarshalMap(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	appID, token, err := getAppIDAndToken(meta.Channel.Key)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	cluster := "volcano_tts"
@@ -128,12 +128,12 @@ func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (string, http.Header,
 
 	data, err := sonic.Marshal(doubaoRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	compressedData, err := gzipCompress(data)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	payloadArr := make([]byte, 4)
@@ -145,7 +145,11 @@ func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (string, http.Header,
 	//nolint:makezero
 	clientRequest = append(clientRequest, compressedData...)
 
-	return http.MethodPost, nil, bytes.NewReader(clientRequest), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(clientRequest),
+	}, nil
 }
 
 func TTSDoRequest(meta *meta.Meta, req *http.Request) (*http.Response, error) {
@@ -175,7 +179,7 @@ func TTSDoRequest(meta *meta.Meta, req *http.Request) (*http.Response, error) {
 	}, nil
 }
 
-func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (*model.Usage, adaptor.Error) {
 	log := middleware.GetLogger(c)
 
 	conn := meta.MustGet("ws_conn").(*websocket.Conn)
@@ -189,12 +193,12 @@ func TTSDoResponse(meta *meta.Meta, c *gin.Context, _ *http.Response) (*model.Us
 	for {
 		_, message, err := conn.ReadMessage()
 		if err != nil {
-			return usage, openai.ErrorWrapperWithMessage(err.Error(), "doubao_wss_read_msg_failed", http.StatusInternalServerError)
+			return usage, relaymodel.WrapperOpenAIError(err, "doubao_wss_read_msg_failed", http.StatusInternalServerError)
 		}
 
 		resp, err := parseResponse(message)
 		if err != nil {
-			return usage, openai.ErrorWrapperWithMessage(err.Error(), "doubao_tts_parse_response_failed", http.StatusInternalServerError)
+			return usage, relaymodel.WrapperOpenAIError(err, "doubao_tts_parse_response_failed", http.StatusInternalServerError)
 		}
 
 		_, err = c.Writer.Write(resp.Audio)

+ 5 - 6
core/relay/adaptor/gemini/adaptor.go

@@ -2,12 +2,11 @@ package gemini
 
 import (
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -56,14 +55,14 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		return ConvertEmbeddingRequest(meta, req)
 	case mode.ChatCompletions:
 		return ConvertRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -71,7 +70,7 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		usage, err = EmbeddingHandler(meta, c, resp)
@@ -82,7 +81,7 @@ func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Respons
 			usage, err = Handler(meta, c, resp)
 		}
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 	return
 }

+ 13 - 9
core/relay/adaptor/gemini/embeddings.go

@@ -2,22 +2,22 @@ package gemini
 
 import (
 	"bytes"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
 )
 
-func ConvertEmbeddingRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertEmbeddingRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := utils.UnmarshalGeneralOpenAIRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	request.Model = meta.ActualModel
 
@@ -42,12 +42,16 @@ func ConvertEmbeddingRequest(meta *meta.Meta, req *http.Request) (string, http.H
 		Requests: requests,
 	})
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
-func EmbeddingHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func EmbeddingHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -57,15 +61,15 @@ func EmbeddingHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*mo
 	var geminiEmbeddingResponse EmbeddingResponse
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&geminiEmbeddingResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	if geminiEmbeddingResponse.Error != nil {
-		return nil, openai.ErrorWrapperWithMessage(geminiEmbeddingResponse.Error.Message, geminiEmbeddingResponse.Error.Code, resp.StatusCode)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(geminiEmbeddingResponse.Error.Message, geminiEmbeddingResponse.Error.Code, resp.StatusCode)
 	}
 	fullTextResponse := embeddingResponse2OpenAI(meta, &geminiEmbeddingResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 16 - 12
core/relay/adaptor/gemini/main.go

@@ -6,7 +6,6 @@ import (
 	"context"
 	"errors"
 	"fmt"
-	"io"
 	"net/http"
 	"strings"
 	"sync"
@@ -20,6 +19,7 @@ import (
 	"github.com/labring/aiproxy/core/common/render"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -288,16 +288,16 @@ func processImageTasks(ctx context.Context, imageTasks []*Part) error {
 }
 
 // Setting safety to the lowest possible values since Gemini is already powerless enough
-func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	adaptorConfig := Config{}
 	err := meta.ChannelConfig.SpecConfig(&adaptorConfig)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	textRequest, err := utils.UnmarshalGeneralOpenAIRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	textRequest.Model = meta.ActualModel
@@ -308,13 +308,13 @@ func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io
 	// Process image tasks concurrently
 	if len(imageTasks) > 0 {
 		if err := processImageTasks(req.Context(), imageTasks); err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
 	config, err := buildGenerationConfig(meta, req, textRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	// Build actual request
@@ -329,10 +329,14 @@ func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io
 
 	data, err := sonic.Marshal(geminiRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
 type ChatResponse struct {
@@ -602,7 +606,7 @@ func PutImageScannerBuffer(buf *[]byte) {
 	scannerBufferPool.Put(buf)
 }
 
-func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	if resp.StatusCode != http.StatusOK {
@@ -660,7 +664,7 @@ func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 	return usage.ToModelUsage(), nil
 }
 
-func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -670,12 +674,12 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage
 	var geminiResponse ChatResponse
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&geminiResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	fullTextResponse := responseChat2OpenAI(meta, &geminiResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 58 - 26
core/relay/adaptor/interface.go

@@ -1,18 +1,75 @@
 package adaptor
 
 import (
+	"encoding/json"
 	"errors"
 	"fmt"
 	"io"
 	"net/http"
 	"reflect"
 
+	"github.com/bytedance/sonic"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
+type Adaptor interface {
+	GetBaseURL() string
+	GetRequestURL(meta *meta.Meta) (string, error)
+	SetupRequestHeader(meta *meta.Meta, c *gin.Context, req *http.Request) error
+	ConvertRequest(meta *meta.Meta, req *http.Request) (*ConvertRequestResult, error)
+	DoRequest(meta *meta.Meta, c *gin.Context, req *http.Request) (*http.Response, error)
+	DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, Error)
+	GetModelList() []*model.ModelConfig
+}
+
+type ConvertRequestResult struct {
+	Method string
+	Header http.Header
+	Body   io.Reader
+}
+
+type Error interface {
+	json.Marshaler
+	StatusCode() int
+}
+
+type ErrorImpl[T any] struct {
+	error      T
+	statusCode int
+}
+
+func (e ErrorImpl[T]) MarshalJSON() ([]byte, error) {
+	return sonic.Marshal(e.error)
+}
+
+func (e ErrorImpl[T]) StatusCode() int {
+	return e.statusCode
+}
+
+func NewError[T any](statusCode int, err T) Error {
+	return ErrorImpl[T]{
+		error:      err,
+		statusCode: statusCode,
+	}
+}
+
+var ErrGetBalanceNotImplemented = errors.New("get balance not implemented")
+
+type Balancer interface {
+	GetBalance(channel *model.Channel) (float64, error)
+}
+
+type KeyValidator interface {
+	ValidateKey(key string) error
+	KeyHelp() string
+}
+
+type Features interface {
+	Features() []string
+}
+
 type ConfigType string
 
 const (
@@ -82,31 +139,6 @@ func ValidateConfigTemplateValue(template ConfigTemplate, value any) error {
 
 type ConfigTemplates = map[string]ConfigTemplate
 
-type Adaptor interface {
-	GetBaseURL() string
-	GetRequestURL(meta *meta.Meta) (string, error)
-	SetupRequestHeader(meta *meta.Meta, c *gin.Context, req *http.Request) error
-	ConvertRequest(meta *meta.Meta, req *http.Request) (method string, header http.Header, body io.Reader, err error)
-	DoRequest(meta *meta.Meta, c *gin.Context, req *http.Request) (*http.Response, error)
-	DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode)
-	GetModelList() []*model.ModelConfig
-}
-
-var ErrGetBalanceNotImplemented = errors.New("get balance not implemented")
-
-type Balancer interface {
-	GetBalance(channel *model.Channel) (float64, error)
-}
-
-type KeyValidator interface {
-	ValidateKey(key string) error
-	KeyHelp() string
-}
-
-type Features interface {
-	Features() []string
-}
-
 type Config interface {
 	ConfigTemplates() ConfigTemplates
 }

+ 3 - 4
core/relay/adaptor/jina/adaptor.go

@@ -1,15 +1,14 @@
 package jina
 
 import (
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type Adaptor struct {
@@ -22,7 +21,7 @@ func (a *Adaptor) GetBaseURL() string {
 	return baseURL
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		return ConvertEmbeddingsRequest(meta, req)
@@ -31,7 +30,7 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, ht
 	}
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.Rerank:
 		return RerankHandler(meta, c, resp)

+ 9 - 5
core/relay/adaptor/jina/embeddings.go

@@ -2,20 +2,20 @@ package jina
 
 import (
 	"bytes"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
 	"github.com/labring/aiproxy/core/common"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 )
 
 //nolint:gocritic
-func ConvertEmbeddingsRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertEmbeddingsRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	reqMap := make(map[string]any)
 	err := common.UnmarshalBodyReusable(req, &reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	reqMap["model"] = meta.ActualModel
@@ -29,7 +29,11 @@ func ConvertEmbeddingsRequest(meta *meta.Meta, req *http.Request) (string, http.
 
 	jsonData, err := sonic.Marshal(reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }

+ 11 - 14
core/relay/adaptor/jina/error.go

@@ -5,8 +5,8 @@ import (
 	"net/http"
 
 	"github.com/bytedance/sonic"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
-	"github.com/labring/aiproxy/core/relay/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type Detail struct {
@@ -15,21 +15,21 @@ type Detail struct {
 	Type string   `json:"type"`
 }
 
-func ErrorHanlder(resp *http.Response) *model.ErrorWithStatusCode {
+func ErrorHanlder(resp *http.Response) adaptor.Error {
 	defer resp.Body.Close()
 
 	body, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return openai.ErrorWrapper(err, "read_response_body_failed", resp.StatusCode)
+		return relaymodel.WrapperOpenAIError(err, "read_response_body_failed", resp.StatusCode)
 	}
 
 	detailValue, err := sonic.Get(body, "detail")
 	if err != nil {
-		return openai.ErrorWrapper(err, "unmarshal_response_body_failed", resp.StatusCode)
+		return relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", resp.StatusCode)
 	}
 
 	errorMessage := "unknown error"
-	errorType := openai.ErrorTypeUpstream
+	errorType := relaymodel.ErrorTypeUpstream
 
 	if detailStr, err := detailValue.String(); err == nil {
 		errorMessage = detailStr
@@ -44,12 +44,9 @@ func ErrorHanlder(resp *http.Response) *model.ErrorWithStatusCode {
 		}
 	}
 
-	return &model.ErrorWithStatusCode{
-		Error: model.Error{
-			Message: errorMessage,
-			Type:    errorType,
-			Code:    resp.StatusCode,
-		},
-		StatusCode: resp.StatusCode,
-	}
+	return relaymodel.NewOpenAIError(resp.StatusCode, relaymodel.OpenAIError{
+		Message: errorMessage,
+		Type:    errorType,
+		Code:    resp.StatusCode,
+	})
 }

+ 10 - 10
core/relay/adaptor/jina/rerank.go

@@ -9,12 +9,12 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}
@@ -25,21 +25,21 @@ func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 	node, err := sonic.Get(responseBody)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	var usage relaymodel.Usage
 	usageNode := node.Get("usage")
 	usageStr, err := usageNode.Raw()
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_usage_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_usage_failed", http.StatusInternalServerError)
 	}
 	err = sonic.UnmarshalString(usageStr, &usage)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_usage_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_usage_failed", http.StatusInternalServerError)
 	}
 	if usage.PromptTokens == 0 && usage.TotalTokens != 0 {
 		usage.PromptTokens = usage.TotalTokens
@@ -52,20 +52,20 @@ func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 		"tokens": modelUsage,
 	})
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_usage_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_usage_failed", http.StatusInternalServerError)
 	}
 	_, err = node.Unset("usage")
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_usage_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_usage_failed", http.StatusInternalServerError)
 	}
 	_, err = node.Set("model", ast.NewString(meta.OriginModel))
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_usage_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_usage_failed", http.StatusInternalServerError)
 	}
 	c.Writer.WriteHeader(resp.StatusCode)
 	respData, err := node.MarshalJSON()
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_failed", http.StatusInternalServerError)
 	}
 	_, err = c.Writer.Write(respData)
 	if err != nil {

+ 2 - 4
core/relay/adaptor/minimax/adaptor.go

@@ -2,7 +2,6 @@ package minimax
 
 import (
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
@@ -11,7 +10,6 @@ import (
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type Adaptor struct {
@@ -54,7 +52,7 @@ func (a *Adaptor) GetRequestURL(meta *meta.Meta) (string, error) {
 	}
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.ChatCompletions:
 		return openai.ConvertTextRequest(meta, req, true)
@@ -65,7 +63,7 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, ht
 	}
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.AudioSpeech:
 		return TTSHandler(meta, c, resp)

+ 15 - 10
core/relay/adaptor/minimax/tts.go

@@ -13,16 +13,17 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
 )
 
-func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	reqMap, err := utils.UnmarshalMap(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	reqMap["model"] = meta.ActualModel
@@ -79,10 +80,14 @@ func ConvertTTSRequest(meta *meta.Meta, req *http.Request) (string, http.Header,
 
 	body, err := sonic.Marshal(reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
-	return http.MethodPost, nil, bytes.NewReader(body), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(body),
+	}, nil
 }
 
 type TTSExtraInfo struct {
@@ -106,7 +111,7 @@ type TTSResponse struct {
 	Data      TTSData      `json:"data"`
 }
 
-func TTSHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func TTSHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, openai.ErrorHanlder(resp)
 	}
@@ -121,22 +126,22 @@ func TTSHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 
 	body, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "TTS_ERROR", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "TTS_ERROR", http.StatusInternalServerError)
 	}
 
 	var result TTSResponse
 	if err := sonic.Unmarshal(body, &result); err != nil {
-		return nil, openai.ErrorWrapper(err, "TTS_ERROR", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "TTS_ERROR", http.StatusInternalServerError)
 	}
 	if result.BaseResp != nil && result.BaseResp.StatusCode != 0 {
-		return nil, openai.ErrorWrapperWithMessage(result.BaseResp.StatusMsg, "TTS_ERROR_"+strconv.Itoa(result.BaseResp.StatusCode), http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(result.BaseResp.StatusMsg, "TTS_ERROR_"+strconv.Itoa(result.BaseResp.StatusCode), http.StatusInternalServerError)
 	}
 
 	resp.Header.Set("Content-Type", "audio/"+result.ExtraInfo.AudioFormat)
 
 	audioBytes, err := hex.DecodeString(result.Data.Audio)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "TTS_ERROR", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "TTS_ERROR", http.StatusInternalServerError)
 	}
 
 	_, err = c.Writer.Write(audioBytes)
@@ -155,7 +160,7 @@ func TTSHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 	}, nil
 }
 
-func ttsStreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func ttsStreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	defer resp.Body.Close()
 
 	resp.Header.Set("Content-Type", "application/octet-stream")

+ 6 - 7
core/relay/adaptor/ollama/adaptor.go

@@ -3,12 +3,11 @@ package ollama
 import (
 	"errors"
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -43,9 +42,9 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error) {
 	if request == nil {
-		return "", nil, nil, errors.New("request is nil")
+		return nil, errors.New("request is nil")
 	}
 	switch meta.Mode {
 	case mode.Embeddings:
@@ -53,7 +52,7 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (string
 	case mode.ChatCompletions, mode.Completions:
 		return ConvertRequest(meta, request)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -61,7 +60,7 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		usage, err = EmbeddingHandler(meta, c, resp)
@@ -72,7 +71,7 @@ func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Respons
 			usage, err = Handler(meta, c, resp)
 		}
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 	return
 }

+ 6 - 6
core/relay/adaptor/ollama/error.go

@@ -6,26 +6,26 @@ import (
 
 	"github.com/bytedance/sonic"
 	"github.com/labring/aiproxy/core/common/conv"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
-	model "github.com/labring/aiproxy/core/relay/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type errorResponse struct {
 	Error string `json:"error"`
 }
 
-func ErrorHandler(resp *http.Response) *model.ErrorWithStatusCode {
+func ErrorHandler(resp *http.Response) adaptor.Error {
 	defer resp.Body.Close()
 
 	data, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return openai.ErrorWrapperWithMessage("read response body error: "+err.Error(), nil, http.StatusInternalServerError)
+		return relaymodel.WrapperOpenAIErrorWithMessage("read response body error: "+err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	var er errorResponse
 	err = sonic.Unmarshal(data, &er)
 	if err != nil {
-		return openai.ErrorWrapperWithMessage(conv.BytesToString(data), nil, http.StatusInternalServerError)
+		return relaymodel.WrapperOpenAIErrorWithMessage(conv.BytesToString(data), nil, http.StatusInternalServerError)
 	}
-	return openai.ErrorWrapperWithMessage(er.Error, nil, resp.StatusCode)
+	return relaymodel.WrapperOpenAIErrorWithMessage(er.Error, nil, resp.StatusCode)
 }

+ 27 - 19
core/relay/adaptor/ollama/main.go

@@ -3,7 +3,6 @@ package ollama
 import (
 	"bufio"
 	"bytes"
-	"io"
 	"net/http"
 	"time"
 
@@ -15,17 +14,18 @@ import (
 	"github.com/labring/aiproxy/core/common/splitter"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
 )
 
-func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	var request relaymodel.GeneralOpenAIRequest
 	err := common.UnmarshalBodyReusable(req, &request)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	ollamaRequest := ChatRequest{
@@ -64,7 +64,7 @@ func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io
 			case relaymodel.ContentTypeImageURL:
 				_, data, err := image.GetImageFromURL(req.Context(), part.ImageURL.URL)
 				if err != nil {
-					return "", nil, nil, err
+					return nil, err
 				}
 				imageUrls = append(imageUrls, data)
 			}
@@ -106,10 +106,14 @@ func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io
 
 	data, err := sonic.Marshal(ollamaRequest)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
 func getToolCalls(ollamaResponse *ChatResponse) []*relaymodel.Tool {
@@ -196,7 +200,7 @@ func streamResponse2OpenAI(meta *meta.Meta, ollamaResponse *ChatResponse) *relay
 	return &response
 }
 
-func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHandler(resp)
 	}
@@ -250,10 +254,10 @@ func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 	return usage.ToModelUsage(), nil
 }
 
-func ConvertEmbeddingRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertEmbeddingRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	request, err := utils.UnmarshalGeneralOpenAIRequest(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	request.Model = meta.ActualModel
 	data, err := sonic.Marshal(&EmbeddingRequest{
@@ -268,12 +272,16 @@ func ConvertEmbeddingRequest(meta *meta.Meta, req *http.Request) (string, http.H
 		},
 	})
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
-func EmbeddingHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func EmbeddingHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHandler(resp)
 	}
@@ -283,17 +291,17 @@ func EmbeddingHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*mo
 	var ollamaResponse EmbeddingResponse
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&ollamaResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	if ollamaResponse.Error != "" {
-		return nil, openai.ErrorWrapperWithMessage(ollamaResponse.Error, openai.ErrorTypeUpstream, resp.StatusCode)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(ollamaResponse.Error, relaymodel.ErrorTypeUpstream, resp.StatusCode)
 	}
 
 	fullTextResponse := embeddingResponseOllama2OpenAI(meta, &ollamaResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)
@@ -321,7 +329,7 @@ func embeddingResponseOllama2OpenAI(meta *meta.Meta, response *EmbeddingResponse
 	return &openAIEmbeddingResponse
 }
 
-func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHandler(resp)
 	}
@@ -331,10 +339,10 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage
 	var ollamaResponse ChatResponse
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&ollamaResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	if ollamaResponse.Error != "" {
-		return nil, openai.ErrorWrapperWithMessage(ollamaResponse.Error, openai.ErrorTypeUpstream, resp.StatusCode)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(ollamaResponse.Error, relaymodel.ErrorTypeUpstream, resp.StatusCode)
 	}
 	fullTextResponse := response2OpenAI(meta, &ollamaResponse)
 
@@ -344,7 +352,7 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage
 
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 16 - 13
core/relay/adaptor/openai/adaptor.go

@@ -4,7 +4,6 @@ import (
 	"bytes"
 	"errors"
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
@@ -65,13 +64,13 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	return ConvertRequest(meta, req)
 }
 
-func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	if req == nil {
-		return "", nil, nil, errors.New("request is nil")
+		return nil, errors.New("request is nil")
 	}
 	switch meta.Mode {
 	case mode.Moderations:
@@ -91,11 +90,11 @@ func ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io
 	case mode.Rerank:
 		return ConvertRerankRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
-func DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.ImagesGenerations, mode.ImagesEdits:
 		usage, err = ImagesHandler(meta, c, resp)
@@ -116,30 +115,34 @@ func DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *mo
 			usage, err = Handler(meta, c, resp, nil)
 		}
 	default:
-		return nil, ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 	return usage, err
 }
 
-func ConvertTextRequest(meta *meta.Meta, req *http.Request, doNotPatchStreamOptionsIncludeUsage bool) (string, http.Header, io.Reader, error) {
+func ConvertTextRequest(meta *meta.Meta, req *http.Request, doNotPatchStreamOptionsIncludeUsage bool) (*adaptor.ConvertRequestResult, error) {
 	reqMap := make(map[string]any)
 	err := common.UnmarshalBodyReusable(req, &reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	if !doNotPatchStreamOptionsIncludeUsage {
 		if err := patchStreamOptions(reqMap); err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
 	reqMap["model"] = meta.ActualModel
 	jsonData, err := sonic.Marshal(reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }
 
 func patchStreamOptions(reqMap map[string]any) error {
@@ -178,7 +181,7 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	return DoResponse(meta, c, resp)
 }
 

+ 20 - 5
core/relay/adaptor/openai/balance.go

@@ -13,11 +13,26 @@ import (
 var _ adaptor.Balancer = (*Adaptor)(nil)
 
 func (a *Adaptor) GetBalance(channel *model.Channel) (float64, error) {
-	return GetBalance(channel)
+	return GetBalance(channel.BaseURL, channel.Key)
 }
 
-func GetBalance(channel *model.Channel) (float64, error) {
-	u := channel.BaseURL
+type SubscriptionResponse struct {
+	Object             string  `json:"object"`
+	HasPaymentMethod   bool    `json:"has_payment_method"`
+	SoftLimitUSD       float64 `json:"soft_limit_usd"`
+	HardLimitUSD       float64 `json:"hard_limit_usd"`
+	SystemHardLimitUSD float64 `json:"system_hard_limit_usd"`
+	AccessUntil        int64   `json:"access_until"`
+}
+
+type UsageResponse struct {
+	Object string `json:"object"`
+	// DailyCosts []OpenAIUsageDailyCost `json:"daily_costs"`
+	TotalUsage float64 `json:"total_usage"` // unit: 0.01 dollar
+}
+
+func GetBalance(baseURL string, key string) (float64, error) {
+	u := baseURL
 	if u == "" {
 		u = baseURL
 	}
@@ -27,7 +42,7 @@ func GetBalance(channel *model.Channel) (float64, error) {
 	if err != nil {
 		return 0, err
 	}
-	req1.Header.Set("Authorization", "Bearer "+channel.Key)
+	req1.Header.Set("Authorization", "Bearer "+key)
 	res1, err := http.DefaultClient.Do(req1)
 	if err != nil {
 		return 0, err
@@ -49,7 +64,7 @@ func GetBalance(channel *model.Channel) (float64, error) {
 	if err != nil {
 		return 0, err
 	}
-	req2.Header.Set("Authorization", "Bearer "+channel.Key)
+	req2.Header.Set("Authorization", "Bearer "+key)
 	res2, err := http.DefaultClient.Do(req2)
 	if err != nil {
 		return 0, err

+ 9 - 5
core/relay/adaptor/openai/embeddings.go

@@ -2,20 +2,20 @@ package openai
 
 import (
 	"bytes"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
 	"github.com/labring/aiproxy/core/common"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 )
 
 //nolint:gocritic
-func ConvertEmbeddingsRequest(meta *meta.Meta, req *http.Request, inputToSlices bool) (string, http.Header, io.Reader, error) {
+func ConvertEmbeddingsRequest(meta *meta.Meta, req *http.Request, inputToSlices bool) (*adaptor.ConvertRequestResult, error) {
 	reqMap := make(map[string]any)
 	err := common.UnmarshalBodyReusable(req, &reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	reqMap["model"] = meta.ActualModel
@@ -29,7 +29,11 @@ func ConvertEmbeddingsRequest(meta *meta.Meta, req *http.Request, inputToSlices
 
 	jsonData, err := sonic.Marshal(reqMap)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }

+ 45 - 79
core/relay/adaptor/openai/error.go

@@ -9,109 +9,75 @@ import (
 
 	"github.com/bytedance/sonic"
 	"github.com/labring/aiproxy/core/common/conv"
-	"github.com/labring/aiproxy/core/middleware"
-	"github.com/labring/aiproxy/core/relay/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-type GeneralErrorResponse struct {
-	Error    model.Error `json:"error"`
-	Message  string      `json:"message"`
-	Msg      string      `json:"msg"`
-	Err      string      `json:"err"`
-	ErrorMsg string      `json:"error_msg"`
-	Header   struct {
-		Message string `json:"message"`
-	} `json:"header"`
-	Response struct {
-		Error struct {
-			Message string `json:"message"`
-		} `json:"error"`
-	} `json:"response"`
-}
-
-func (e GeneralErrorResponse) ToMessage() string {
-	if e.Error.Message != "" {
-		return e.Error.Message
-	}
-	if e.Message != "" {
-		return e.Message
-	}
-	if e.Msg != "" {
-		return e.Msg
-	}
-	if e.Err != "" {
-		return e.Err
-	}
-	if e.ErrorMsg != "" {
-		return e.ErrorMsg
-	}
-	if e.Header.Message != "" {
-		return e.Header.Message
-	}
-	if e.Response.Error.Message != "" {
-		return e.Response.Error.Message
-	}
-	return ""
-}
-
-const (
-	ErrorTypeAIProxy     = middleware.ErrorTypeAIPROXY
-	ErrorTypeUpstream    = "upstream_error"
-	ErrorCodeBadResponse = "bad_response"
-)
-
-func ErrorHanlder(resp *http.Response) *model.ErrorWithStatusCode {
+func GetError(resp *http.Response) (int, relaymodel.OpenAIError) {
 	defer resp.Body.Close()
 
 	respBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return &model.ErrorWithStatusCode{
-			StatusCode: resp.StatusCode,
-			Error: model.Error{
-				Message: err.Error(),
-				Type:    ErrorTypeUpstream,
-				Code:    ErrorCodeBadResponse,
-			},
+		return resp.StatusCode, relaymodel.OpenAIError{
+			Message: err.Error(),
+			Type:    relaymodel.ErrorTypeUpstream,
+			Code:    relaymodel.ErrorCodeBadResponse,
 		}
 	}
 
-	return ErrorHanlderWithBody(resp.StatusCode, respBody)
+	return GetErrorWithBody(resp.StatusCode, respBody)
 }
 
-func ErrorHanlderWithBody(statucCode int, respBody []byte) *model.ErrorWithStatusCode {
-	ErrorWithStatusCode := &model.ErrorWithStatusCode{
-		StatusCode: statucCode,
-		Error: model.Error{
-			Type:  ErrorTypeUpstream,
-			Code:  ErrorCodeBadResponse,
-			Param: strconv.Itoa(statucCode),
-		},
+func GetErrorWithBody(statusCode int, respBody []byte) (int, relaymodel.OpenAIError) {
+	openAIError := relaymodel.OpenAIError{
+		Type:  relaymodel.ErrorTypeUpstream,
+		Code:  relaymodel.ErrorCodeBadResponse,
+		Param: strconv.Itoa(statusCode),
 	}
 
-	var errResponse GeneralErrorResponse
+	var errResponse relaymodel.OpenAIErrorResponse
 	err := sonic.Unmarshal(respBody, &errResponse)
 	if err != nil {
-		ErrorWithStatusCode.Error.Message = conv.BytesToString(respBody)
-		return ErrorWithStatusCode
+		openAIError.Message = conv.BytesToString(respBody)
+		return statusCode, openAIError
 	}
 
 	if errResponse.Error.Message != "" {
 		// OpenAI format error, so we override the default one
-		ErrorWithStatusCode.Error = errResponse.Error
-	} else {
-		ErrorWithStatusCode.Error.Message = errResponse.ToMessage()
+		openAIError = errResponse.Error
 	}
-	if ErrorWithStatusCode.Error.Message == "" {
-		ErrorWithStatusCode.Error.Message = fmt.Sprintf("bad response status code %d", statucCode)
+
+	if openAIError.Message == "" {
+		openAIError.Message = fmt.Sprintf("bad response status code %d", statusCode)
 	}
 
-	if code, ok := ErrorWithStatusCode.Error.Code.(int64); ok && code >= 400 && code < 600 {
-		ErrorWithStatusCode.StatusCode = int(code)
+	if code, ok := openAIError.Code.(int64); ok && code >= 400 && code < 600 {
+		statusCode = int(code)
 	}
 
-	if strings.HasPrefix(ErrorWithStatusCode.Error.Message, "tools is not supported in this model.") {
-		ErrorWithStatusCode.StatusCode = http.StatusBadRequest
+	if strings.HasPrefix(openAIError.Message, "tools is not supported in this model.") {
+		statusCode = http.StatusBadRequest
 	}
 
-	return ErrorWithStatusCode
+	return statusCode, openAIError
+}
+
+func ErrorHanlder(resp *http.Response) adaptor.Error {
+	defer resp.Body.Close()
+
+	respBody, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return relaymodel.NewOpenAIError(resp.StatusCode, relaymodel.OpenAIError{
+			Message: err.Error(),
+			Type:    relaymodel.ErrorTypeUpstream,
+			Code:    relaymodel.ErrorCodeBadResponse,
+		})
+	}
+
+	return ErrorHanlderWithBody(resp.StatusCode, respBody)
+}
+
+func ErrorHanlderWithBody(statusCode int, respBody []byte) adaptor.Error {
+	statusCode, openAIError := GetErrorWithBody(statusCode, respBody)
+	return relaymodel.NewOpenAIError(statusCode, openAIError)
 }

+ 30 - 21
core/relay/adaptor/openai/image.go

@@ -14,38 +14,43 @@ import (
 	"github.com/labring/aiproxy/core/common/image"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func ConvertImagesRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertImagesRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	node, err := common.UnmarshalBody2Node(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 	responseFormat, err := node.Get("response_format").String()
 	if err != nil && !errors.Is(err, ast.ErrNotExist) {
-		return "", nil, nil, err
+		return nil, err
 	}
 	meta.Set(MetaResponseFormat, responseFormat)
 
 	_, err = node.Set("model", ast.NewString(meta.ActualModel))
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	jsonData, err := node.MarshalJSON()
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }
 
-func ConvertImagesEditsRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertImagesEditsRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error) {
 	err := request.ParseMultipartForm(1024 * 1024 * 4)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	multipartBody := &bytes.Buffer{}
@@ -59,7 +64,7 @@ func ConvertImagesEditsRequest(meta *meta.Meta, request *http.Request) (string,
 		if key == "model" {
 			err = multipartWriter.WriteField(key, meta.ActualModel)
 			if err != nil {
-				return "", nil, nil, err
+				return nil, err
 			}
 			continue
 		}
@@ -69,7 +74,7 @@ func ConvertImagesEditsRequest(meta *meta.Meta, request *http.Request) (string,
 		}
 		err = multipartWriter.WriteField(key, value)
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
@@ -80,28 +85,32 @@ func ConvertImagesEditsRequest(meta *meta.Meta, request *http.Request) (string,
 		fileHeader := files[0]
 		file, err := fileHeader.Open()
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 		w, err := multipartWriter.CreateFormFile(key, fileHeader.Filename)
 		if err != nil {
 			file.Close()
-			return "", nil, nil, err
+			return nil, err
 		}
 		_, err = io.Copy(w, file)
 		file.Close()
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
 	multipartWriter.Close()
 	ContentType := multipartWriter.FormDataContentType()
-	return http.MethodPost, http.Header{
-		"Content-Type": {ContentType},
-	}, multipartBody, nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: http.Header{
+			"Content-Type": {ContentType},
+		},
+		Body: multipartBody,
+	}, nil
 }
 
-func ImagesHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func ImagesHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}
@@ -112,12 +121,12 @@ func ImagesHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 	var imageResponse relaymodel.ImageResponse
 	err = sonic.Unmarshal(responseBody, &imageResponse)
 	if err != nil {
-		return nil, ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	usage := &model.Usage{
@@ -137,14 +146,14 @@ func ImagesHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 			}
 			_, data.B64Json, err = image.GetImageFromURL(c.Request.Context(), data.URL)
 			if err != nil {
-				return usage, ErrorWrapper(err, "get_image_from_url_failed", http.StatusInternalServerError)
+				return usage, relaymodel.WrapperOpenAIError(err, "get_image_from_url_failed", http.StatusInternalServerError)
 			}
 		}
 	}
 
 	data, err := sonic.Marshal(imageResponse)
 	if err != nil {
-		return usage, ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return usage, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	_, err = c.Writer.Write(data)

+ 12 - 11
core/relay/adaptor/openai/main.go

@@ -19,6 +19,7 @@ import (
 	"github.com/labring/aiproxy/core/common/splitter"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
@@ -90,7 +91,7 @@ func GetUsageOrChatChoicesResponseFromNode(node *ast.Node) (*relaymodel.Usage, [
 
 type PreHandler func(meta *meta.Meta, node *ast.Node) error
 
-func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response, preHandler PreHandler) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func StreamHandler(meta *meta.Meta, c *gin.Context, resp *http.Response, preHandler PreHandler) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}
@@ -354,7 +355,7 @@ func GetUsageOrChoicesResponseFromNode(node *ast.Node) (*relaymodel.Usage, []*re
 	return nil, choices, nil
 }
 
-func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response, preHandler PreHandler) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response, preHandler PreHandler) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}
@@ -365,22 +366,22 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response, preHandler Pr
 
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 
 	node, err := sonic.Get(responseBody)
 	if err != nil {
-		return nil, ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	if preHandler != nil {
 		err := preHandler(meta, &node)
 		if err != nil {
-			return nil, ErrorWrapper(err, "pre_handler_failed", http.StatusInternalServerError)
+			return nil, relaymodel.WrapperOpenAIError(err, "pre_handler_failed", http.StatusInternalServerError)
 		}
 	}
 	usage, choices, err := GetUsageOrChoicesResponseFromNode(&node)
 	if err != nil {
-		return nil, ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	if usage == nil || usage.TotalTokens == 0 || (usage.PromptTokens == 0 && usage.CompletionTokens == 0) {
@@ -399,26 +400,26 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response, preHandler Pr
 		}
 		_, err = node.Set("usage", ast.NewAny(usage))
 		if err != nil {
-			return usage.ToModelUsage(), ErrorWrapper(err, "set_usage_failed", http.StatusInternalServerError)
+			return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "set_usage_failed", http.StatusInternalServerError)
 		}
 	} else if usage.TotalTokens != 0 && usage.PromptTokens == 0 { // some channels don't return prompt tokens & completion tokens
 		usage.PromptTokens = int64(meta.RequestUsage.InputTokens)
 		usage.CompletionTokens = usage.TotalTokens - int64(meta.RequestUsage.InputTokens)
 		_, err = node.Set("usage", ast.NewAny(usage))
 		if err != nil {
-			return usage.ToModelUsage(), ErrorWrapper(err, "set_usage_failed", http.StatusInternalServerError)
+			return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "set_usage_failed", http.StatusInternalServerError)
 		}
 	}
 
 	_, err = node.Set("model", ast.NewString(meta.OriginModel))
 	if err != nil {
-		return usage.ToModelUsage(), ErrorWrapper(err, "set_model_failed", http.StatusInternalServerError)
+		return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "set_model_failed", http.StatusInternalServerError)
 	}
 
 	if meta.ChannelConfig.SplitThink {
 		respMap, err := node.Map()
 		if err != nil {
-			return usage.ToModelUsage(), ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+			return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 		}
 		SplitThink(respMap)
 		c.JSON(http.StatusOK, respMap)
@@ -427,7 +428,7 @@ func Handler(meta *meta.Meta, c *gin.Context, resp *http.Response, preHandler Pr
 
 	newData, err := sonic.Marshal(&node)
 	if err != nil {
-		return usage.ToModelUsage(), ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	_, err = c.Writer.Write(newData)

+ 0 - 16
core/relay/adaptor/openai/model.go

@@ -1,16 +0,0 @@
-package openai
-
-type SubscriptionResponse struct {
-	Object             string  `json:"object"`
-	HasPaymentMethod   bool    `json:"has_payment_method"`
-	SoftLimitUSD       float64 `json:"soft_limit_usd"`
-	HardLimitUSD       float64 `json:"hard_limit_usd"`
-	SystemHardLimitUSD float64 `json:"system_hard_limit_usd"`
-	AccessUntil        int64   `json:"access_until"`
-}
-
-type UsageResponse struct {
-	Object string `json:"object"`
-	// DailyCosts []OpenAIUsageDailyCost `json:"daily_costs"`
-	TotalUsage float64 `json:"total_usage"` // unit: 0.01 dollar
-}

+ 6 - 5
core/relay/adaptor/openai/moderations.go

@@ -9,11 +9,12 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func ModerationsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func ModerationsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}
@@ -24,21 +25,21 @@ func ModerationsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*
 
 	body, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 
 	node, err := sonic.Get(body)
 	if err != nil {
-		return nil, ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	if _, err := node.Set("model", ast.NewString(meta.OriginModel)); err != nil {
-		return nil, ErrorWrapper(err, "set_model_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "set_model_failed", http.StatusInternalServerError)
 	}
 
 	newData, err := node.MarshalJSON()
 	if err != nil {
-		return nil, ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	usage := &model.Usage{

+ 13 - 8
core/relay/adaptor/openai/rerank.go

@@ -11,29 +11,34 @@ import (
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func ConvertRerankRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRerankRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	node, err := common.UnmarshalBody2Node(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	_, err = node.Set("model", ast.NewString(meta.ActualModel))
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	jsonData, err := node.MarshalJSON()
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }
 
-func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}
@@ -44,12 +49,12 @@ func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 	var rerankResponse relaymodel.SlimRerankResponse
 	err = sonic.Unmarshal(responseBody, &rerankResponse)
 	if err != nil {
-		return nil, ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	c.Writer.WriteHeader(resp.StatusCode)

+ 23 - 18
core/relay/adaptor/openai/stt.go

@@ -14,14 +14,15 @@ import (
 	"github.com/labring/aiproxy/core/common/conv"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error) {
 	err := request.ParseMultipartForm(1024 * 1024 * 4)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	multipartBody := &bytes.Buffer{}
@@ -35,7 +36,7 @@ func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (string, http.Hea
 		if key == "model" {
 			err = multipartWriter.WriteField(key, meta.ActualModel)
 			if err != nil {
-				return "", nil, nil, err
+				return nil, err
 			}
 			continue
 		}
@@ -45,7 +46,7 @@ func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (string, http.Hea
 		}
 		err = multipartWriter.WriteField(key, value)
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
@@ -56,28 +57,32 @@ func ConvertSTTRequest(meta *meta.Meta, request *http.Request) (string, http.Hea
 		fileHeader := files[0]
 		file, err := fileHeader.Open()
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 		w, err := multipartWriter.CreateFormFile(key, fileHeader.Filename)
 		if err != nil {
 			file.Close()
-			return "", nil, nil, err
+			return nil, err
 		}
 		_, err = io.Copy(w, file)
 		file.Close()
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
 	multipartWriter.Close()
 	ContentType := multipartWriter.FormDataContentType()
-	return http.MethodPost, http.Header{
-		"Content-Type": {ContentType},
-	}, multipartBody, nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: http.Header{
+			"Content-Type": {ContentType},
+		},
+		Body: multipartBody,
+	}, nil
 }
 
-func STTHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func STTHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}
@@ -90,7 +95,7 @@ func STTHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 
 	responseBody, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return nil, ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 
 	var text string
@@ -109,7 +114,7 @@ func STTHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 		text, err = getTextFromJSON(responseBody)
 	}
 	if err != nil {
-		return nil, ErrorWrapper(err, "get_text_from_body_err", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "get_text_from_body_err", http.StatusInternalServerError)
 	}
 	var promptTokens int64
 	if meta.RequestUsage.InputTokens > 0 {
@@ -129,16 +134,16 @@ func STTHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 		strings.Contains(resp.Header.Get("Content-Type"), "json"):
 		node, err := sonic.Get(responseBody)
 		if err != nil {
-			return usage.ToModelUsage(), ErrorWrapper(err, "get_node_from_body_err", http.StatusInternalServerError)
+			return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "get_node_from_body_err", http.StatusInternalServerError)
 		}
 		if node.Get("usage").Exists() {
 			usageStr, err := node.Get("usage").Raw()
 			if err != nil {
-				return usage.ToModelUsage(), ErrorWrapper(err, "unmarshal_response_err", http.StatusInternalServerError)
+				return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "unmarshal_response_err", http.StatusInternalServerError)
 			}
 			err = sonic.UnmarshalString(usageStr, usage)
 			if err != nil {
-				return usage.ToModelUsage(), ErrorWrapper(err, "unmarshal_response_err", http.StatusInternalServerError)
+				return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "unmarshal_response_err", http.StatusInternalServerError)
 			}
 			switch {
 			case usage.PromptTokens != 0 && usage.TotalTokens == 0:
@@ -153,11 +158,11 @@ func STTHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Us
 
 		_, err = node.SetAny("usage", usage)
 		if err != nil {
-			return usage.ToModelUsage(), ErrorWrapper(err, "marshal_response_err", http.StatusInternalServerError)
+			return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "marshal_response_err", http.StatusInternalServerError)
 		}
 		responseBody, err = node.MarshalJSON()
 		if err != nil {
-			return usage.ToModelUsage(), ErrorWrapper(err, "marshal_response_err", http.StatusInternalServerError)
+			return usage.ToModelUsage(), relaymodel.WrapperOpenAIError(err, "marshal_response_err", http.StatusInternalServerError)
 		}
 	}
 

+ 16 - 12
core/relay/adaptor/openai/tts.go

@@ -11,51 +11,55 @@ import (
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func ConvertTTSRequest(meta *meta.Meta, req *http.Request, defaultVoice string) (string, http.Header, io.Reader, error) {
+func ConvertTTSRequest(meta *meta.Meta, req *http.Request, defaultVoice string) (*adaptor.ConvertRequestResult, error) {
 	node, err := common.UnmarshalBody2Node(req)
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	input, err := node.Get("input").String()
 	if err != nil {
 		if errors.Is(err, ast.ErrNotExist) {
-			return "", nil, nil, errors.New("input is required")
+			return nil, errors.New("input is required")
 		}
-		return "", nil, nil, err
+		return nil, err
 	}
 	if len(input) > 4096 {
-		return "", nil, nil, errors.New("input is too long (over 4096 characters)")
+		return nil, errors.New("input is too long (over 4096 characters)")
 	}
 
 	voice, err := node.Get("voice").String()
 	if err != nil && !errors.Is(err, ast.ErrNotExist) {
-		return "", nil, nil, err
+		return nil, err
 	}
 	if voice == "" && defaultVoice != "" {
 		_, err = node.Set("voice", ast.NewString(defaultVoice))
 		if err != nil {
-			return "", nil, nil, err
+			return nil, err
 		}
 	}
 
 	_, err = node.Set("model", ast.NewString(meta.ActualModel))
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	jsonData, err := node.MarshalJSON()
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }
 
-func TTSHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func TTSHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHanlder(resp)
 	}

+ 0 - 28
core/relay/adaptor/openai/util.go

@@ -1,28 +0,0 @@
-package openai
-
-import (
-	"github.com/labring/aiproxy/core/middleware"
-	model "github.com/labring/aiproxy/core/relay/model"
-)
-
-func ErrorWrapper(err error, code any, statusCode int) *model.ErrorWithStatusCode {
-	return &model.ErrorWithStatusCode{
-		Error: model.Error{
-			Message: err.Error(),
-			Type:    middleware.ErrorTypeAIPROXY,
-			Code:    code,
-		},
-		StatusCode: statusCode,
-	}
-}
-
-func ErrorWrapperWithMessage(message string, code any, statusCode int) *model.ErrorWithStatusCode {
-	return &model.ErrorWithStatusCode{
-		Error: model.Error{
-			Message: message,
-			Type:    middleware.ErrorTypeAIPROXY,
-			Code:    code,
-		},
-		StatusCode: statusCode,
-	}
-}

+ 2 - 2
core/relay/adaptor/openrouter/adaptor.go

@@ -7,10 +7,10 @@ import (
 	"github.com/bytedance/sonic/ast"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
 )
 
@@ -88,7 +88,7 @@ func handlerPreHandler(_ *meta.Meta, node *ast.Node) error {
 	return nil
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.ChatCompletions:
 		if utils.IsStreamResponse(resp) {

+ 1 - 2
core/relay/adaptor/siliconflow/adaptor.go

@@ -9,7 +9,6 @@ import (
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 var _ adaptor.Adaptor = (*Adaptor)(nil)
@@ -29,7 +28,7 @@ func (a *Adaptor) GetModelList() []*model.ModelConfig {
 }
 
 //nolint:gocritic
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	usage, err = a.Adaptor.DoResponse(meta, c, resp)
 	if err != nil {
 		return nil, err

+ 1 - 2
core/relay/adaptor/stepfun/adaptor.go

@@ -1,7 +1,6 @@
 package stepfun
 
 import (
-	"io"
 	"net/http"
 
 	"github.com/labring/aiproxy/core/model"
@@ -21,7 +20,7 @@ func (a *Adaptor) GetBaseURL() string {
 	return baseURL
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.AudioSpeech:
 		return openai.ConvertTTSRequest(meta, req, "cixingnansheng")

+ 5 - 5
core/relay/adaptor/text-embeddings-inference/adaptor.go

@@ -2,11 +2,11 @@ package textembeddingsinference
 
 import (
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
@@ -46,14 +46,14 @@ func (a *Adaptor) SetupRequestHeader(meta *meta.Meta, _ *gin.Context, req *http.
 	return nil
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	switch meta.Mode {
 	case mode.Rerank:
 		return ConvertRerankRequest(meta, req)
 	case mode.Embeddings:
 		return openai.ConvertRequest(meta, req)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 }
 
@@ -61,13 +61,13 @@ func (a *Adaptor) DoRequest(_ *meta.Meta, _ *gin.Context, req *http.Request) (*h
 	return utils.DoRequest(req)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	switch meta.Mode {
 	case mode.Rerank:
 		return RerankHandler(meta, c, resp)
 	case mode.Embeddings:
 		return EmbeddingsHandler(meta, c, resp)
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 }

+ 2 - 2
core/relay/adaptor/text-embeddings-inference/embeddings.go

@@ -5,12 +5,12 @@ import (
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func EmbeddingsHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, EmbeddingsErrorHanlder(resp)
 	}

+ 8 - 22
core/relay/adaptor/text-embeddings-inference/error.go

@@ -4,8 +4,8 @@ import (
 	"net/http"
 
 	"github.com/bytedance/sonic"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
-	"github.com/labring/aiproxy/core/relay/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type RerankErrorResponse struct {
@@ -13,23 +13,16 @@ type RerankErrorResponse struct {
 	ErrorType string `json:"error_type"`
 }
 
-func RerankErrorHanlder(resp *http.Response) *model.ErrorWithStatusCode {
+func RerankErrorHanlder(resp *http.Response) adaptor.Error {
 	defer resp.Body.Close()
 
 	errResp := RerankErrorResponse{}
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&errResp)
 	if err != nil {
-		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 
-	return &model.ErrorWithStatusCode{
-		Error: model.Error{
-			Message: errResp.Error,
-			Type:    errResp.ErrorType,
-			Code:    resp.StatusCode,
-		},
-		StatusCode: resp.StatusCode,
-	}
+	return relaymodel.WrapperOpenAIErrorWithMessage(errResp.Error, errResp.ErrorType, resp.StatusCode)
 }
 
 type EmbeddingsErrorResponse struct {
@@ -37,21 +30,14 @@ type EmbeddingsErrorResponse struct {
 	Message string `json:"message"`
 }
 
-func EmbeddingsErrorHanlder(resp *http.Response) *model.ErrorWithStatusCode {
+func EmbeddingsErrorHanlder(resp *http.Response) adaptor.Error {
 	defer resp.Body.Close()
 
 	errResp := EmbeddingsErrorResponse{}
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&errResp)
 	if err != nil {
-		return openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 
-	return &model.ErrorWithStatusCode{
-		Error: model.Error{
-			Message: errResp.Message,
-			Type:    errResp.Type,
-			Code:    resp.StatusCode,
-		},
-		StatusCode: resp.StatusCode,
-	}
+	return relaymodel.WrapperOpenAIErrorWithMessage(errResp.Message, errResp.Type, resp.StatusCode)
 }

+ 19 - 16
core/relay/adaptor/text-embeddings-inference/rerank.go

@@ -4,7 +4,6 @@ import (
 	"bytes"
 	"errors"
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
@@ -13,64 +12,68 @@ import (
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
-func ConvertRerankRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func ConvertRerankRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	node, err := common.UnmarshalBody2Node(req)
 	if err != nil {
-		return "", nil, nil, fmt.Errorf("failed to parse request body: %w", err)
+		return nil, fmt.Errorf("failed to parse request body: %w", err)
 	}
 
 	// Set the actual model in the request
 	_, err = node.Set("model", ast.NewString(meta.ActualModel))
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
 	// Get the documents array and rename it to texts
 	documentsNode := node.Get("documents")
 	if !documentsNode.Exists() {
-		return "", nil, nil, errors.New("documents field not found")
+		return nil, errors.New("documents field not found")
 	}
 
 	// Set the texts field with the documents value
 	_, err = node.Set("texts", *documentsNode)
 	if err != nil {
-		return "", nil, nil, fmt.Errorf("failed to set texts field: %w", err)
+		return nil, fmt.Errorf("failed to set texts field: %w", err)
 	}
 
 	// Remove the documents field
 	_, err = node.Unset("documents")
 	if err != nil {
-		return "", nil, nil, fmt.Errorf("failed to remove documents field: %w", err)
+		return nil, fmt.Errorf("failed to remove documents field: %w", err)
 	}
 
 	returnDocumentsNode := node.Get("return_documents")
 	if returnDocumentsNode.Exists() {
 		returnDocuments, err := returnDocumentsNode.Bool()
 		if err != nil {
-			return "", nil, nil, fmt.Errorf("failed to unmarshal return_documents field: %w", err)
+			return nil, fmt.Errorf("failed to unmarshal return_documents field: %w", err)
 		}
 		_, err = node.Unset("return_documents")
 		if err != nil {
-			return "", nil, nil, fmt.Errorf("failed to remove return_documents field: %w", err)
+			return nil, fmt.Errorf("failed to remove return_documents field: %w", err)
 		}
 		_, err = node.Set("return_text", ast.NewBool(returnDocuments))
 		if err != nil {
-			return "", nil, nil, fmt.Errorf("failed to set return_text field: %w", err)
+			return nil, fmt.Errorf("failed to set return_text field: %w", err)
 		}
 	}
 
 	// Convert back to JSON
 	jsonData, err := node.MarshalJSON()
 	if err != nil {
-		return "", nil, nil, fmt.Errorf("failed to marshal request: %w", err)
+		return nil, fmt.Errorf("failed to marshal request: %w", err)
 	}
 
-	return http.MethodPost, nil, bytes.NewReader(jsonData), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(jsonData),
+	}, nil
 }
 
 type RerankResponse []RerankResponseItem
@@ -95,7 +98,7 @@ func (rri *RerankResponseItem) ToRerankModel() *relaymodel.RerankResult {
 	}
 }
 
-func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, RerankErrorHanlder(resp)
 	}
@@ -107,7 +110,7 @@ func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 	respSlice := RerankResponse{}
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&respSlice)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "read_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "read_response_body_failed", http.StatusInternalServerError)
 	}
 
 	usage := &model.Usage{
@@ -132,7 +135,7 @@ func RerankHandler(meta *meta.Meta, c *gin.Context, resp *http.Response) (*model
 
 	jsonResponse, err := sonic.Marshal(rerankResp)
 	if err != nil {
-		return usage, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return usage, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 
 	_, err = c.Writer.Write(jsonResponse)

+ 5 - 5
core/relay/adaptor/text-embeddings-inference/rerank_test.go

@@ -40,16 +40,16 @@ func TestConvertRerankRequestSuccess(t *testing.T) {
 	}
 
 	// Call the function under test
-	method, _, bodyReader, err := textembeddingsinference.ConvertRerankRequest(testMeta, req)
+	result, err := textembeddingsinference.ConvertRerankRequest(testMeta, req)
 
 	// Assert no error
 	require.NoError(t, err)
 
 	// Assert method
-	assert.Equal(t, http.MethodPost, method)
+	assert.Equal(t, http.MethodPost, result.Method)
 
 	// Read the transformed body
-	bodyBytes, err := io.ReadAll(bodyReader)
+	bodyBytes, err := io.ReadAll(result.Body)
 	require.NoError(t, err)
 
 	// Parse the body back to verify the transformation
@@ -97,7 +97,7 @@ func TestConvertRerankRequestMissingDocuments(t *testing.T) {
 	}
 
 	// Call the function under test
-	_, _, _, err = textembeddingsinference.ConvertRerankRequest(testMeta, req)
+	_, err = textembeddingsinference.ConvertRerankRequest(testMeta, req)
 
 	// Assert error for missing documents
 	require.Error(t, err)
@@ -121,7 +121,7 @@ func TestConvertRerankRequestInvalidJSON(t *testing.T) {
 	}
 
 	// Call the function under test
-	_, _, _, err = textembeddingsinference.ConvertRerankRequest(testMeta, req)
+	_, err = textembeddingsinference.ConvertRerankRequest(testMeta, req)
 
 	// Assert error for invalid JSON
 	require.Error(t, err)

+ 5 - 6
core/relay/adaptor/vertexai/adaptor.go

@@ -4,13 +4,12 @@ import (
 	"context"
 	"errors"
 	"fmt"
-	"io"
 	"net/http"
 	"strings"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
@@ -28,19 +27,19 @@ type Config struct {
 	ADCJSON   string
 }
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error) {
 	adaptor := GetAdaptor(meta.ActualModel)
 	if adaptor == nil {
-		return "", nil, nil, errors.New("adaptor not found")
+		return nil, errors.New("adaptor not found")
 	}
 
 	return adaptor.ConvertRequest(meta, request)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	adaptor := GetAdaptor(meta.ActualModel)
 	if adaptor == nil {
-		return nil, openai.ErrorWrapperWithMessage(meta.ActualModel+" adaptor not found", "adaptor_not_found", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(meta.ActualModel+" adaptor not found", "adaptor_not_found", http.StatusInternalServerError)
 	}
 	return adaptor.DoResponse(meta, c, resp)
 }

+ 12 - 9
core/relay/adaptor/vertexai/claude/adapter.go

@@ -4,7 +4,6 @@ import (
 	"bytes"
 	"context"
 	"fmt"
-	"io"
 	"net/http"
 
 	"github.com/bytedance/sonic"
@@ -12,8 +11,8 @@ import (
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/common"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/anthropic"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -25,9 +24,9 @@ const anthropicVersion = "vertex-2023-10-16"
 
 type Adaptor struct{}
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error) {
 	if request == nil {
-		return "", nil, nil, errors.New("request is nil")
+		return nil, errors.New("request is nil")
 	}
 
 	var (
@@ -41,14 +40,18 @@ func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (string
 	case mode.Anthropic:
 		data, err = handleAnthropicRequest(meta, request)
 	default:
-		return "", nil, nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
+		return nil, fmt.Errorf("unsupported mode: %s", meta.Mode)
 	}
 
 	if err != nil {
-		return "", nil, nil, err
+		return nil, err
 	}
 
-	return http.MethodPost, nil, bytes.NewReader(data), nil
+	return &adaptor.ConvertRequestResult{
+		Method: http.MethodPost,
+		Header: nil,
+		Body:   bytes.NewReader(data),
+	}, nil
 }
 
 func handleChatCompletionsRequest(meta *meta.Meta, request *http.Request) ([]byte, error) {
@@ -97,7 +100,7 @@ func handleAnthropicRequest(meta *meta.Meta, request *http.Request) ([]byte, err
 	return node.MarshalJSON()
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.ChatCompletions:
 		if utils.IsStreamResponse(resp) {
@@ -112,7 +115,7 @@ func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Respons
 			usage, err = anthropic.Handler(meta, c, resp)
 		}
 	default:
-		return nil, openai.ErrorWrapperWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
+		return nil, relaymodel.WrapperOpenAIErrorWithMessage(fmt.Sprintf("unsupported mode: %s", meta.Mode), "unsupported_mode", http.StatusBadRequest)
 	}
 	return
 }

+ 3 - 4
core/relay/adaptor/vertexai/gemini/adapter.go

@@ -1,25 +1,24 @@
 package vertexai
 
 import (
-	"io"
 	"net/http"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/gemini"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 	"github.com/labring/aiproxy/core/relay/utils"
 )
 
 type Adaptor struct{}
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error) {
 	return gemini.ConvertRequest(meta, request)
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		usage, err = gemini.EmbeddingHandler(meta, c, resp)

+ 3 - 4
core/relay/adaptor/vertexai/registry.go

@@ -1,17 +1,16 @@
 package vertexai
 
 import (
-	"io"
 	"net/http"
 	"strings"
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/gemini"
 	vertexclaude "github.com/labring/aiproxy/core/relay/adaptor/vertexai/claude"
 	vertexgemini "github.com/labring/aiproxy/core/relay/adaptor/vertexai/gemini"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type ModelType int
@@ -30,8 +29,8 @@ func init() {
 }
 
 type innerAIAdapter interface {
-	ConvertRequest(meta *meta.Meta, request *http.Request) (string, http.Header, io.Reader, error)
-	DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode)
+	ConvertRequest(meta *meta.Meta, request *http.Request) (*adaptor.ConvertRequestResult, error)
+	DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error)
 }
 
 func GetAdaptor(model string) innerAIAdapter {

+ 2 - 2
core/relay/adaptor/xai/adaptor.go

@@ -5,9 +5,9 @@ import (
 
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type Adaptor struct {
@@ -20,7 +20,7 @@ func (a *Adaptor) GetBaseURL() string {
 	return baseURL
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	if resp.StatusCode != http.StatusOK {
 		return nil, ErrorHandler(resp)
 	}

+ 6 - 6
core/relay/adaptor/xai/error.go

@@ -7,8 +7,8 @@ import (
 
 	"github.com/bytedance/sonic"
 	"github.com/labring/aiproxy/core/common/conv"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
-	model "github.com/labring/aiproxy/core/relay/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type errorResponse struct {
@@ -16,18 +16,18 @@ type errorResponse struct {
 	Code  string `json:"code"`
 }
 
-func ErrorHandler(resp *http.Response) *model.ErrorWithStatusCode {
+func ErrorHandler(resp *http.Response) adaptor.Error {
 	defer resp.Body.Close()
 
 	data, err := io.ReadAll(resp.Body)
 	if err != nil {
-		return openai.ErrorWrapperWithMessage("read response body error: "+err.Error(), nil, http.StatusInternalServerError)
+		return relaymodel.WrapperOpenAIErrorWithMessage("read response body error: "+err.Error(), nil, http.StatusInternalServerError)
 	}
 
 	var er errorResponse
 	err = sonic.Unmarshal(data, &er)
 	if err != nil {
-		return openai.ErrorWrapperWithMessage(conv.BytesToString(data), nil, http.StatusInternalServerError)
+		return relaymodel.WrapperOpenAIErrorWithMessage(conv.BytesToString(data), nil, http.StatusInternalServerError)
 	}
 
 	statusCode := resp.StatusCode
@@ -36,5 +36,5 @@ func ErrorHandler(resp *http.Response) *model.ErrorWithStatusCode {
 		statusCode = http.StatusUnauthorized
 	}
 
-	return openai.ErrorWrapperWithMessage(er.Error, er.Code, statusCode)
+	return relaymodel.WrapperOpenAIErrorWithMessage(er.Error, er.Code, statusCode)
 }

+ 2 - 7
core/relay/adaptor/xunfei/adaptor.go

@@ -1,7 +1,6 @@
 package xunfei
 
 import (
-	"io"
 	"net/http"
 
 	"github.com/labring/aiproxy/core/model"
@@ -20,18 +19,14 @@ func (a *Adaptor) GetBaseURL() string {
 
 const baseURL = "https://spark-api-open.xf-yun.com/v1"
 
-func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (string, http.Header, io.Reader, error) {
+func (a *Adaptor) ConvertRequest(meta *meta.Meta, req *http.Request) (*adaptor.ConvertRequestResult, error) {
 	domain := getXunfeiDomain(meta.ActualModel)
 	model := meta.ActualModel
 	meta.ActualModel = domain
 	defer func() {
 		meta.ActualModel = model
 	}()
-	method, h, body, err := a.Adaptor.ConvertRequest(meta, req)
-	if err != nil {
-		return "", nil, nil, err
-	}
-	return method, h, body, nil
+	return a.Adaptor.ConvertRequest(meta, req)
 }
 
 func (a *Adaptor) GetModelList() []*model.ModelConfig {

+ 1 - 2
core/relay/adaptor/zhipu/adaptor.go

@@ -9,7 +9,6 @@ import (
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 type Adaptor struct {
@@ -22,7 +21,7 @@ func (a *Adaptor) GetBaseURL() string {
 	return baseURL
 }
 
-func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err *relaymodel.ErrorWithStatusCode) {
+func (a *Adaptor) DoResponse(meta *meta.Meta, c *gin.Context, resp *http.Response) (usage *model.Usage, err adaptor.Error) {
 	switch meta.Mode {
 	case mode.Embeddings:
 		usage, err = EmbeddingsHandler(c, resp)

+ 8 - 3
core/relay/adaptor/zhipu/main.go

@@ -6,6 +6,7 @@ import (
 	"github.com/bytedance/sonic"
 	"github.com/gin-gonic/gin"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
@@ -15,18 +16,22 @@ import (
 // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/invoke
 // https://open.bigmodel.cn/api/paas/v3/model-api/chatglm_std/sse-invoke
 
-func EmbeddingsHandler(c *gin.Context, resp *http.Response) (*model.Usage, *relaymodel.ErrorWithStatusCode) {
+func EmbeddingsHandler(c *gin.Context, resp *http.Response) (*model.Usage, adaptor.Error) {
+	if resp.StatusCode != http.StatusOK {
+		return nil, openai.ErrorHanlder(resp)
+	}
+
 	defer resp.Body.Close()
 
 	var zhipuResponse EmbeddingResponse
 	err := sonic.ConfigDefault.NewDecoder(resp.Body).Decode(&zhipuResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "unmarshal_response_body_failed", http.StatusInternalServerError)
 	}
 	fullTextResponse := embeddingResponseZhipu2OpenAI(&zhipuResponse)
 	jsonResponse, err := sonic.Marshal(fullTextResponse)
 	if err != nil {
-		return nil, openai.ErrorWrapper(err, "marshal_response_body_failed", http.StatusInternalServerError)
+		return nil, relaymodel.WrapperOpenAIError(err, "marshal_response_body_failed", http.StatusInternalServerError)
 	}
 	c.Writer.Header().Set("Content-Type", "application/json")
 	c.Writer.WriteHeader(resp.StatusCode)

+ 26 - 25
core/relay/controller/dohelper.go

@@ -16,7 +16,6 @@ import (
 	"github.com/labring/aiproxy/core/middleware"
 	"github.com/labring/aiproxy/core/model"
 	"github.com/labring/aiproxy/core/relay/adaptor"
-	"github.com/labring/aiproxy/core/relay/adaptor/openai"
 	"github.com/labring/aiproxy/core/relay/meta"
 	"github.com/labring/aiproxy/core/relay/mode"
 	relaymodel "github.com/labring/aiproxy/core/relay/model"
@@ -89,7 +88,7 @@ func DoHelper(
 ) (
 	model.Usage,
 	*RequestDetail,
-	*relaymodel.ErrorWithStatusCode,
+	adaptor.Error,
 ) {
 	detail := RequestDetail{}
 
@@ -106,8 +105,9 @@ func DoHelper(
 
 	// 3. Handle error response
 	if resp == nil {
-		relayErr := openai.ErrorWrapperWithMessage("response is nil", openai.ErrorCodeBadResponse, http.StatusInternalServerError)
-		detail.ResponseBody = relayErr.JSONOrEmpty()
+		relayErr := relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusInternalServerError, "response is nil", relaymodel.ErrorCodeBadResponse)
+		respBody, _ := relayErr.MarshalJSON()
+		detail.ResponseBody = conv.BytesToString(respBody)
 		return model.Usage{}, &detail, relayErr
 	}
 
@@ -125,7 +125,7 @@ func DoHelper(
 	return usage, &detail, nil
 }
 
-func getRequestBody(meta *meta.Meta, c *gin.Context, detail *RequestDetail) *relaymodel.ErrorWithStatusCode {
+func getRequestBody(meta *meta.Meta, c *gin.Context, detail *RequestDetail) adaptor.Error {
 	switch {
 	case meta.Mode == mode.AudioTranscription,
 		meta.Mode == mode.AudioTranslation,
@@ -136,21 +136,21 @@ func getRequestBody(meta *meta.Meta, c *gin.Context, detail *RequestDetail) *rel
 	default:
 		reqBody, err := common.GetRequestBody(c.Request)
 		if err != nil {
-			return openai.ErrorWrapperWithMessage("get request body failed: "+err.Error(), "get_request_body_failed", http.StatusBadRequest)
+			return relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusBadRequest, "get request body failed: "+err.Error(), "get_request_body_failed")
 		}
 		detail.RequestBody = conv.BytesToString(reqBody)
 		return nil
 	}
 }
 
-func prepareAndDoRequest(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta) (*http.Response, *relaymodel.ErrorWithStatusCode) {
+func prepareAndDoRequest(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta) (*http.Response, adaptor.Error) {
 	log := middleware.GetLogger(c)
 
-	method, header, body, err := a.ConvertRequest(meta, c.Request)
+	convertResult, err := a.ConvertRequest(meta, c.Request)
 	if err != nil {
-		return nil, openai.ErrorWrapperWithMessage("convert request failed: "+err.Error(), "convert_request_failed", http.StatusBadRequest)
+		return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusBadRequest, "convert request failed: "+err.Error(), "convert_request_failed")
 	}
-	if closer, ok := body.(io.Closer); ok {
+	if closer, ok := convertResult.Body.(io.Closer); ok {
 		defer closer.Close()
 	}
 
@@ -160,10 +160,10 @@ func prepareAndDoRequest(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta) (*h
 
 	fullRequestURL, err := a.GetRequestURL(meta)
 	if err != nil {
-		return nil, openai.ErrorWrapperWithMessage("get request url failed: "+err.Error(), "get_request_url_failed", http.StatusBadRequest)
+		return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusBadRequest, "get request url failed: "+err.Error(), "get_request_url_failed")
 	}
 
-	log.Debugf("request url: %s %s", method, fullRequestURL)
+	log.Debugf("request url: %s %s", convertResult.Method, fullRequestURL)
 
 	ctx := context.Background()
 	if timeout := meta.ModelConfig.Timeout; timeout > 0 {
@@ -174,19 +174,19 @@ func prepareAndDoRequest(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta) (*h
 		defer cancel()
 	}
 
-	req, err := http.NewRequestWithContext(ctx, method, fullRequestURL, body)
+	req, err := http.NewRequestWithContext(ctx, convertResult.Method, fullRequestURL, convertResult.Body)
 	if err != nil {
-		return nil, openai.ErrorWrapperWithMessage("new request failed: "+err.Error(), "new_request_failed", http.StatusBadRequest)
+		return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusBadRequest, "new request failed: "+err.Error(), "new_request_failed")
 	}
 
-	if err := setupRequestHeader(a, c, meta, req, header); err != nil {
+	if err := setupRequestHeader(a, c, meta, req, convertResult.Header); err != nil {
 		return nil, err
 	}
 
 	return doRequest(a, c, meta, req)
 }
 
-func setupRequestHeader(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, req *http.Request, header http.Header) *relaymodel.ErrorWithStatusCode {
+func setupRequestHeader(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, req *http.Request, header http.Header) adaptor.Error {
 	contentType := req.Header.Get("Content-Type")
 	if contentType == "" {
 		contentType = "application/json; charset=utf-8"
@@ -196,32 +196,32 @@ func setupRequestHeader(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, req
 		req.Header[key] = value
 	}
 	if err := a.SetupRequestHeader(meta, c, req); err != nil {
-		return openai.ErrorWrapperWithMessage("setup request header failed: "+err.Error(), "setup_request_header_failed", http.StatusInternalServerError)
+		return relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusInternalServerError, "setup request header failed: "+err.Error(), "setup_request_header_failed")
 	}
 	return nil
 }
 
-func doRequest(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, req *http.Request) (*http.Response, *relaymodel.ErrorWithStatusCode) {
+func doRequest(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, req *http.Request) (*http.Response, adaptor.Error) {
 	resp, err := a.DoRequest(meta, c, req)
 	if err != nil {
 		if errors.Is(err, context.Canceled) {
-			return nil, openai.ErrorWrapperWithMessage("do request failed: request canceled by client", "request_canceled", http.StatusBadRequest)
+			return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusBadRequest, "do request failed: request canceled by client", "request_canceled")
 		}
 		if errors.Is(err, context.DeadlineExceeded) {
-			return nil, openai.ErrorWrapperWithMessage("do request failed: request timeout", "request_timeout", http.StatusGatewayTimeout)
+			return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusGatewayTimeout, "do request failed: request timeout", "request_timeout")
 		}
 		if errors.Is(err, io.EOF) {
-			return nil, openai.ErrorWrapperWithMessage("do request failed: "+err.Error(), "request_failed", http.StatusServiceUnavailable)
+			return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusServiceUnavailable, "do request failed: "+err.Error(), "request_failed")
 		}
 		if errors.Is(err, io.ErrUnexpectedEOF) {
-			return nil, openai.ErrorWrapperWithMessage("do request failed: "+err.Error(), "request_failed", http.StatusInternalServerError)
+			return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusInternalServerError, "do request failed: "+err.Error(), "request_failed")
 		}
-		return nil, openai.ErrorWrapperWithMessage("do request failed: "+err.Error(), "request_failed", http.StatusBadRequest)
+		return nil, relaymodel.WrapperErrorWithMessage(meta.Mode, http.StatusBadRequest, "do request failed: "+err.Error(), "request_failed")
 	}
 	return resp, nil
 }
 
-func handleResponse(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, resp *http.Response, detail *RequestDetail) (model.Usage, *relaymodel.ErrorWithStatusCode) {
+func handleResponse(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, resp *http.Response, detail *RequestDetail) (model.Usage, adaptor.Error) {
 	buf := getBuffer()
 	defer putBuffer(buf)
 
@@ -240,7 +240,8 @@ func handleResponse(a adaptor.Adaptor, c *gin.Context, meta *meta.Meta, resp *ht
 
 	usage, relayErr := a.DoResponse(meta, c, resp)
 	if relayErr != nil {
-		detail.ResponseBody = relayErr.JSONOrEmpty()
+		respBody, _ := relayErr.MarshalJSON()
+		detail.ResponseBody = conv.BytesToString(respBody)
 	} else {
 		// copy body buffer
 		// do not use bytes conv

+ 1 - 2
core/relay/controller/handle.go

@@ -7,12 +7,11 @@ import (
 	"github.com/labring/aiproxy/core/model"
 	"github.com/labring/aiproxy/core/relay/adaptor"
 	"github.com/labring/aiproxy/core/relay/meta"
-	relaymodel "github.com/labring/aiproxy/core/relay/model"
 )
 
 // HandleResult contains all the information needed for consumption recording
 type HandleResult struct {
-	Error  *relaymodel.ErrorWithStatusCode
+	Error  adaptor.Error
 	Usage  model.Usage
 	Detail *RequestDetail
 }

+ 30 - 0
core/relay/model/anthropic.go

@@ -1,6 +1,36 @@
 package model
 
+import "github.com/labring/aiproxy/core/relay/adaptor"
+
 type AnthropicMessageRequest struct {
 	Model    string     `json:"model,omitempty"`
 	Messages []*Message `json:"messages,omitempty"`
 }
+
+type AnthropicError struct {
+	Type    string `json:"type"`
+	Message string `json:"message"`
+}
+
+type AnthropicErrorResponse struct {
+	Type  string         `json:"type"`
+	Error AnthropicError `json:"error"`
+}
+
+func NewAnthropicError(statusCode int, err AnthropicError) adaptor.Error {
+	return adaptor.NewError(statusCode, AnthropicErrorResponse{
+		Type:  "error",
+		Error: err,
+	})
+}
+
+func WrapperAnthropicError(err error, typ string, statusCode int) adaptor.Error {
+	return WrapperAnthropicErrorWithMessage(err.Error(), typ, statusCode)
+}
+
+func WrapperAnthropicErrorWithMessage(message string, typ string, statusCode int) adaptor.Error {
+	return NewAnthropicError(statusCode, AnthropicError{
+		Type:    typ,
+		Message: message,
+	})
+}

+ 18 - 28
core/relay/model/chat.go

@@ -1,9 +1,8 @@
 package model
 
 import (
-	"github.com/bytedance/sonic"
-	"github.com/labring/aiproxy/core/common/conv"
 	"github.com/labring/aiproxy/core/model"
+	"github.com/labring/aiproxy/core/relay/adaptor"
 )
 
 type Usage struct {
@@ -74,40 +73,31 @@ type CompletionTokensDetails struct {
 	RejectedPredictionTokens int64 `json:"rejected_prediction_tokens"`
 }
 
-type Error struct {
+type OpenAIErrorResponse struct {
+	Error OpenAIError `json:"error"`
+}
+
+type OpenAIError struct {
 	Code    any    `json:"code,omitempty"`
 	Message string `json:"message,omitempty"`
 	Type    string `json:"type,omitempty"`
 	Param   string `json:"param,omitempty"`
 }
 
-func (e *Error) IsEmpty() bool {
-	return e == nil || (e.Code == nil && e.Message == "" && e.Type == "" && e.Param == "")
-}
-
-func (e *Error) JSONOrEmpty() string {
-	if e.IsEmpty() {
-		return ""
-	}
-	jsonBuf, err := sonic.Marshal(e)
-	if err != nil {
-		return ""
-	}
-	return conv.BytesToString(jsonBuf)
+func NewOpenAIError(statusCode int, err OpenAIError) adaptor.Error {
+	return adaptor.NewError(statusCode, OpenAIErrorResponse{
+		Error: err,
+	})
 }
 
-type ErrorWithStatusCode struct {
-	Error      Error `json:"error,omitempty"`
-	StatusCode int   `json:"-"`
+func WrapperOpenAIError(err error, code any, statusCode int) adaptor.Error {
+	return WrapperOpenAIErrorWithMessage(err.Error(), code, statusCode)
 }
 
-func (e *ErrorWithStatusCode) JSONOrEmpty() string {
-	if e.StatusCode == 0 && e.Error.IsEmpty() {
-		return ""
-	}
-	jsonBuf, err := sonic.MarshalString(e)
-	if err != nil {
-		return ""
-	}
-	return jsonBuf
+func WrapperOpenAIErrorWithMessage(message string, code any, statusCode int) adaptor.Error {
+	return NewOpenAIError(statusCode, OpenAIError{
+		Message: message,
+		Type:    ErrorTypeAIPROXY,
+		Code:    code,
+	})
 }

+ 35 - 0
core/relay/model/errors.go

@@ -0,0 +1,35 @@
+package model
+
+import (
+	"github.com/labring/aiproxy/core/relay/adaptor"
+	"github.com/labring/aiproxy/core/relay/mode"
+)
+
+const (
+	ErrorTypeAIPROXY     = "aiproxy_error"
+	ErrorTypeUpstream    = "upstream_error"
+	ErrorCodeBadResponse = "bad_response"
+)
+
+func WrapperError(m mode.Mode, statusCode int, err error, typ ...string) adaptor.Error {
+	return WrapperErrorWithMessage(m, statusCode, err.Error(), typ...)
+}
+
+func WrapperErrorWithMessage(m mode.Mode, statusCode int, message string, typ ...string) adaptor.Error {
+	respType := ErrorTypeAIPROXY
+	if len(typ) > 0 {
+		respType = typ[0]
+	}
+	switch m {
+	case mode.Anthropic:
+		return NewAnthropicError(statusCode, AnthropicError{
+			Message: message,
+			Type:    respType,
+		})
+	default:
+		return NewOpenAIError(statusCode, OpenAIError{
+			Message: message,
+			Type:    respType,
+		})
+	}
+}