relay-utils.go 5.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192
  1. package controller
  2. import (
  3. "encoding/json"
  4. "fmt"
  5. "github.com/gin-gonic/gin"
  6. "github.com/pkoukk/tiktoken-go"
  7. "io"
  8. "net/http"
  9. "one-api/common"
  10. "strconv"
  11. "strings"
  12. )
  13. var stopFinishReason = "stop"
  14. // tokenEncoderMap won't grow after initialization
  15. var tokenEncoderMap = map[string]*tiktoken.Tiktoken{}
  16. var defaultTokenEncoder *tiktoken.Tiktoken
  17. func InitTokenEncoders() {
  18. common.SysLog("initializing token encoders")
  19. gpt35TokenEncoder, err := tiktoken.EncodingForModel("gpt-3.5-turbo")
  20. if err != nil {
  21. common.FatalLog(fmt.Sprintf("failed to get gpt-3.5-turbo token encoder: %s", err.Error()))
  22. }
  23. defaultTokenEncoder = gpt35TokenEncoder
  24. gpt4TokenEncoder, err := tiktoken.EncodingForModel("gpt-4")
  25. if err != nil {
  26. common.FatalLog(fmt.Sprintf("failed to get gpt-4 token encoder: %s", err.Error()))
  27. }
  28. for model, _ := range common.ModelRatio {
  29. if strings.HasPrefix(model, "gpt-3.5") {
  30. tokenEncoderMap[model] = gpt35TokenEncoder
  31. } else if strings.HasPrefix(model, "gpt-4") {
  32. tokenEncoderMap[model] = gpt4TokenEncoder
  33. } else {
  34. tokenEncoderMap[model] = nil
  35. }
  36. }
  37. common.SysLog("token encoders initialized")
  38. }
  39. func getTokenEncoder(model string) *tiktoken.Tiktoken {
  40. tokenEncoder, ok := tokenEncoderMap[model]
  41. if ok && tokenEncoder != nil {
  42. return tokenEncoder
  43. }
  44. if ok {
  45. tokenEncoder, err := tiktoken.EncodingForModel(model)
  46. if err != nil {
  47. common.SysError(fmt.Sprintf("failed to get token encoder for model %s: %s, using encoder for gpt-3.5-turbo", model, err.Error()))
  48. tokenEncoder = defaultTokenEncoder
  49. }
  50. tokenEncoderMap[model] = tokenEncoder
  51. return tokenEncoder
  52. }
  53. return defaultTokenEncoder
  54. }
  55. func getTokenNum(tokenEncoder *tiktoken.Tiktoken, text string) int {
  56. return len(tokenEncoder.Encode(text, nil, nil))
  57. }
  58. func countTokenMessages(messages []Message, model string) int {
  59. tokenEncoder := getTokenEncoder(model)
  60. // Reference:
  61. // https://github.com/openai/openai-cookbook/blob/main/examples/How_to_count_tokens_with_tiktoken.ipynb
  62. // https://github.com/pkoukk/tiktoken-go/issues/6
  63. //
  64. // Every message follows <|start|>{role/name}\n{content}<|end|>\n
  65. var tokensPerMessage int
  66. var tokensPerName int
  67. if model == "gpt-3.5-turbo-0301" {
  68. tokensPerMessage = 4
  69. tokensPerName = -1 // If there's a name, the role is omitted
  70. } else {
  71. tokensPerMessage = 3
  72. tokensPerName = 1
  73. }
  74. tokenNum := 0
  75. for _, message := range messages {
  76. tokenNum += tokensPerMessage
  77. tokenNum += getTokenNum(tokenEncoder, message.Content)
  78. tokenNum += getTokenNum(tokenEncoder, message.Role)
  79. if message.Name != nil {
  80. tokenNum += tokensPerName
  81. tokenNum += getTokenNum(tokenEncoder, *message.Name)
  82. }
  83. }
  84. tokenNum += 3 // Every reply is primed with <|start|>assistant<|message|>
  85. return tokenNum
  86. }
  87. func countTokenInput(input any, model string) int {
  88. switch input.(type) {
  89. case string:
  90. return countTokenText(input.(string), model)
  91. case []string:
  92. text := ""
  93. for _, s := range input.([]string) {
  94. text += s
  95. }
  96. return countTokenText(text, model)
  97. }
  98. return 0
  99. }
  100. func countTokenText(text string, model string) int {
  101. tokenEncoder := getTokenEncoder(model)
  102. return getTokenNum(tokenEncoder, text)
  103. }
  104. func errorWrapper(err error, code string, statusCode int) *OpenAIErrorWithStatusCode {
  105. text := err.Error()
  106. // 定义一个正则表达式匹配URL
  107. if strings.Contains(text, "Post") {
  108. text = "请求上游地址失败"
  109. }
  110. //避免暴露内部错误
  111. openAIError := OpenAIError{
  112. Message: text,
  113. Type: "one_api_error",
  114. Code: code,
  115. }
  116. return &OpenAIErrorWithStatusCode{
  117. OpenAIError: openAIError,
  118. StatusCode: statusCode,
  119. }
  120. }
  121. func shouldDisableChannel(err *OpenAIError, statusCode int) bool {
  122. if !common.AutomaticDisableChannelEnabled {
  123. return false
  124. }
  125. if err == nil {
  126. return false
  127. }
  128. if statusCode == http.StatusUnauthorized {
  129. return true
  130. }
  131. if err.Type == "insufficient_quota" || err.Code == "invalid_api_key" || err.Code == "account_deactivated" {
  132. return true
  133. }
  134. return false
  135. }
  136. func setEventStreamHeaders(c *gin.Context) {
  137. c.Writer.Header().Set("Content-Type", "text/event-stream")
  138. c.Writer.Header().Set("Cache-Control", "no-cache")
  139. c.Writer.Header().Set("Connection", "keep-alive")
  140. c.Writer.Header().Set("Transfer-Encoding", "chunked")
  141. c.Writer.Header().Set("X-Accel-Buffering", "no")
  142. }
  143. func relayErrorHandler(resp *http.Response) (openAIErrorWithStatusCode *OpenAIErrorWithStatusCode) {
  144. openAIErrorWithStatusCode = &OpenAIErrorWithStatusCode{
  145. StatusCode: resp.StatusCode,
  146. OpenAIError: OpenAIError{
  147. Message: fmt.Sprintf("bad response status code %d", resp.StatusCode),
  148. Type: "upstream_error",
  149. Code: "bad_response_status_code",
  150. Param: strconv.Itoa(resp.StatusCode),
  151. },
  152. }
  153. responseBody, err := io.ReadAll(resp.Body)
  154. if err != nil {
  155. return
  156. }
  157. err = resp.Body.Close()
  158. if err != nil {
  159. return
  160. }
  161. var textResponse TextResponse
  162. err = json.Unmarshal(responseBody, &textResponse)
  163. if err != nil {
  164. return
  165. }
  166. openAIErrorWithStatusCode.OpenAIError = textResponse.Error
  167. return
  168. }
  169. func getFullRequestURL(baseURL string, requestURL string, channelType int) string {
  170. fullRequestURL := fmt.Sprintf("%s%s", baseURL, requestURL)
  171. if channelType == common.ChannelTypeOpenAI {
  172. if strings.HasPrefix(baseURL, "https://gateway.ai.cloudflare.com") {
  173. fullRequestURL = fmt.Sprintf("%s%s", baseURL, strings.TrimPrefix(requestURL, "/v1"))
  174. }
  175. }
  176. return fullRequestURL
  177. }