relay-gemini.go 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958
  1. package gemini
  2. import (
  3. "encoding/json"
  4. "errors"
  5. "fmt"
  6. "io"
  7. "net/http"
  8. "one-api/common"
  9. "one-api/constant"
  10. "one-api/dto"
  11. relaycommon "one-api/relay/common"
  12. "one-api/relay/helper"
  13. "one-api/service"
  14. "one-api/setting/model_setting"
  15. "one-api/types"
  16. "strconv"
  17. "strings"
  18. "unicode/utf8"
  19. "github.com/gin-gonic/gin"
  20. )
  21. var geminiSupportedMimeTypes = map[string]bool{
  22. "application/pdf": true,
  23. "audio/mpeg": true,
  24. "audio/mp3": true,
  25. "audio/wav": true,
  26. "image/png": true,
  27. "image/jpeg": true,
  28. "text/plain": true,
  29. "video/mov": true,
  30. "video/mpeg": true,
  31. "video/mp4": true,
  32. "video/mpg": true,
  33. "video/avi": true,
  34. "video/wmv": true,
  35. "video/mpegps": true,
  36. "video/flv": true,
  37. }
  38. // Gemini 允许的思考预算范围
  39. const (
  40. pro25MinBudget = 128
  41. pro25MaxBudget = 32768
  42. flash25MaxBudget = 24576
  43. flash25LiteMinBudget = 512
  44. flash25LiteMaxBudget = 24576
  45. )
  46. // clampThinkingBudget 根据模型名称将预算限制在允许的范围内
  47. func clampThinkingBudget(modelName string, budget int) int {
  48. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  49. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  50. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  51. is25FlashLite := strings.HasPrefix(modelName, "gemini-2.5-flash-lite")
  52. if is25FlashLite {
  53. if budget < flash25LiteMinBudget {
  54. return flash25LiteMinBudget
  55. }
  56. if budget > flash25LiteMaxBudget {
  57. return flash25LiteMaxBudget
  58. }
  59. } else if isNew25Pro {
  60. if budget < pro25MinBudget {
  61. return pro25MinBudget
  62. }
  63. if budget > pro25MaxBudget {
  64. return pro25MaxBudget
  65. }
  66. } else { // 其他模型
  67. if budget < 0 {
  68. return 0
  69. }
  70. if budget > flash25MaxBudget {
  71. return flash25MaxBudget
  72. }
  73. }
  74. return budget
  75. }
  76. func ThinkingAdaptor(geminiRequest *GeminiChatRequest, info *relaycommon.RelayInfo) {
  77. if model_setting.GetGeminiSettings().ThinkingAdapterEnabled {
  78. modelName := info.UpstreamModelName
  79. isNew25Pro := strings.HasPrefix(modelName, "gemini-2.5-pro") &&
  80. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-05-06") &&
  81. !strings.HasPrefix(modelName, "gemini-2.5-pro-preview-03-25")
  82. if strings.Contains(modelName, "-thinking-") {
  83. parts := strings.SplitN(modelName, "-thinking-", 2)
  84. if len(parts) == 2 && parts[1] != "" {
  85. if budgetTokens, err := strconv.Atoi(parts[1]); err == nil {
  86. clampedBudget := clampThinkingBudget(modelName, budgetTokens)
  87. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  88. ThinkingBudget: common.GetPointer(clampedBudget),
  89. IncludeThoughts: true,
  90. }
  91. }
  92. }
  93. } else if strings.HasSuffix(modelName, "-thinking") {
  94. unsupportedModels := []string{
  95. "gemini-2.5-pro-preview-05-06",
  96. "gemini-2.5-pro-preview-03-25",
  97. }
  98. isUnsupported := false
  99. for _, unsupportedModel := range unsupportedModels {
  100. if strings.HasPrefix(modelName, unsupportedModel) {
  101. isUnsupported = true
  102. break
  103. }
  104. }
  105. if isUnsupported {
  106. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  107. IncludeThoughts: true,
  108. }
  109. } else {
  110. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  111. IncludeThoughts: true,
  112. }
  113. if geminiRequest.GenerationConfig.MaxOutputTokens > 0 {
  114. budgetTokens := model_setting.GetGeminiSettings().ThinkingAdapterBudgetTokensPercentage * float64(geminiRequest.GenerationConfig.MaxOutputTokens)
  115. clampedBudget := clampThinkingBudget(modelName, int(budgetTokens))
  116. geminiRequest.GenerationConfig.ThinkingConfig.ThinkingBudget = common.GetPointer(clampedBudget)
  117. }
  118. }
  119. } else if strings.HasSuffix(modelName, "-nothinking") {
  120. if !isNew25Pro {
  121. geminiRequest.GenerationConfig.ThinkingConfig = &GeminiThinkingConfig{
  122. ThinkingBudget: common.GetPointer(0),
  123. }
  124. }
  125. }
  126. }
  127. }
  128. // Setting safety to the lowest possible values since Gemini is already powerless enough
  129. func CovertGemini2OpenAI(textRequest dto.GeneralOpenAIRequest, info *relaycommon.RelayInfo) (*GeminiChatRequest, error) {
  130. geminiRequest := GeminiChatRequest{
  131. Contents: make([]GeminiChatContent, 0, len(textRequest.Messages)),
  132. GenerationConfig: GeminiChatGenerationConfig{
  133. Temperature: textRequest.Temperature,
  134. TopP: textRequest.TopP,
  135. MaxOutputTokens: textRequest.MaxTokens,
  136. Seed: int64(textRequest.Seed),
  137. },
  138. }
  139. if model_setting.IsGeminiModelSupportImagine(info.UpstreamModelName) {
  140. geminiRequest.GenerationConfig.ResponseModalities = []string{
  141. "TEXT",
  142. "IMAGE",
  143. }
  144. }
  145. ThinkingAdaptor(&geminiRequest, info)
  146. safetySettings := make([]GeminiChatSafetySettings, 0, len(SafetySettingList))
  147. for _, category := range SafetySettingList {
  148. safetySettings = append(safetySettings, GeminiChatSafetySettings{
  149. Category: category,
  150. Threshold: model_setting.GetGeminiSafetySetting(category),
  151. })
  152. }
  153. geminiRequest.SafetySettings = safetySettings
  154. // openaiContent.FuncToToolCalls()
  155. if textRequest.Tools != nil {
  156. functions := make([]dto.FunctionRequest, 0, len(textRequest.Tools))
  157. googleSearch := false
  158. codeExecution := false
  159. for _, tool := range textRequest.Tools {
  160. if tool.Function.Name == "googleSearch" {
  161. googleSearch = true
  162. continue
  163. }
  164. if tool.Function.Name == "codeExecution" {
  165. codeExecution = true
  166. continue
  167. }
  168. if tool.Function.Parameters != nil {
  169. params, ok := tool.Function.Parameters.(map[string]interface{})
  170. if ok {
  171. if props, hasProps := params["properties"].(map[string]interface{}); hasProps {
  172. if len(props) == 0 {
  173. tool.Function.Parameters = nil
  174. }
  175. }
  176. }
  177. }
  178. // Clean the parameters before appending
  179. cleanedParams := cleanFunctionParameters(tool.Function.Parameters)
  180. tool.Function.Parameters = cleanedParams
  181. functions = append(functions, tool.Function)
  182. }
  183. if codeExecution {
  184. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  185. CodeExecution: make(map[string]string),
  186. })
  187. }
  188. if googleSearch {
  189. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  190. GoogleSearch: make(map[string]string),
  191. })
  192. }
  193. if len(functions) > 0 {
  194. geminiRequest.Tools = append(geminiRequest.Tools, GeminiChatTool{
  195. FunctionDeclarations: functions,
  196. })
  197. }
  198. // common.SysLog("tools: " + fmt.Sprintf("%+v", geminiRequest.Tools))
  199. // json_data, _ := json.Marshal(geminiRequest.Tools)
  200. // common.SysLog("tools_json: " + string(json_data))
  201. }
  202. if textRequest.ResponseFormat != nil && (textRequest.ResponseFormat.Type == "json_schema" || textRequest.ResponseFormat.Type == "json_object") {
  203. geminiRequest.GenerationConfig.ResponseMimeType = "application/json"
  204. if textRequest.ResponseFormat.JsonSchema != nil && textRequest.ResponseFormat.JsonSchema.Schema != nil {
  205. cleanedSchema := removeAdditionalPropertiesWithDepth(textRequest.ResponseFormat.JsonSchema.Schema, 0)
  206. geminiRequest.GenerationConfig.ResponseSchema = cleanedSchema
  207. }
  208. }
  209. tool_call_ids := make(map[string]string)
  210. var system_content []string
  211. //shouldAddDummyModelMessage := false
  212. for _, message := range textRequest.Messages {
  213. if message.Role == "system" {
  214. system_content = append(system_content, message.StringContent())
  215. continue
  216. } else if message.Role == "tool" || message.Role == "function" {
  217. if len(geminiRequest.Contents) == 0 || geminiRequest.Contents[len(geminiRequest.Contents)-1].Role == "model" {
  218. geminiRequest.Contents = append(geminiRequest.Contents, GeminiChatContent{
  219. Role: "user",
  220. })
  221. }
  222. var parts = &geminiRequest.Contents[len(geminiRequest.Contents)-1].Parts
  223. name := ""
  224. if message.Name != nil {
  225. name = *message.Name
  226. } else if val, exists := tool_call_ids[message.ToolCallId]; exists {
  227. name = val
  228. }
  229. var contentMap map[string]interface{}
  230. contentStr := message.StringContent()
  231. // 1. 尝试解析为 JSON 对象
  232. if err := json.Unmarshal([]byte(contentStr), &contentMap); err != nil {
  233. // 2. 如果失败,尝试解析为 JSON 数组
  234. var contentSlice []interface{}
  235. if err := json.Unmarshal([]byte(contentStr), &contentSlice); err == nil {
  236. // 如果是数组,包装成对象
  237. contentMap = map[string]interface{}{"result": contentSlice}
  238. } else {
  239. // 3. 如果再次失败,作为纯文本处理
  240. contentMap = map[string]interface{}{"content": contentStr}
  241. }
  242. }
  243. functionResp := &FunctionResponse{
  244. Name: name,
  245. Response: contentMap,
  246. }
  247. *parts = append(*parts, GeminiPart{
  248. FunctionResponse: functionResp,
  249. })
  250. continue
  251. }
  252. var parts []GeminiPart
  253. content := GeminiChatContent{
  254. Role: message.Role,
  255. }
  256. // isToolCall := false
  257. if message.ToolCalls != nil {
  258. // message.Role = "model"
  259. // isToolCall = true
  260. for _, call := range message.ParseToolCalls() {
  261. args := map[string]interface{}{}
  262. if call.Function.Arguments != "" {
  263. if json.Unmarshal([]byte(call.Function.Arguments), &args) != nil {
  264. return nil, fmt.Errorf("invalid arguments for function %s, args: %s", call.Function.Name, call.Function.Arguments)
  265. }
  266. }
  267. toolCall := GeminiPart{
  268. FunctionCall: &FunctionCall{
  269. FunctionName: call.Function.Name,
  270. Arguments: args,
  271. },
  272. }
  273. parts = append(parts, toolCall)
  274. tool_call_ids[call.ID] = call.Function.Name
  275. }
  276. }
  277. openaiContent := message.ParseContent()
  278. imageNum := 0
  279. for _, part := range openaiContent {
  280. if part.Type == dto.ContentTypeText {
  281. if part.Text == "" {
  282. continue
  283. }
  284. parts = append(parts, GeminiPart{
  285. Text: part.Text,
  286. })
  287. } else if part.Type == dto.ContentTypeImageURL {
  288. imageNum += 1
  289. if constant.GeminiVisionMaxImageNum != -1 && imageNum > constant.GeminiVisionMaxImageNum {
  290. return nil, fmt.Errorf("too many images in the message, max allowed is %d", constant.GeminiVisionMaxImageNum)
  291. }
  292. // 判断是否是url
  293. if strings.HasPrefix(part.GetImageMedia().Url, "http") {
  294. // 是url,获取文件的类型和base64编码的数据
  295. fileData, err := service.GetFileBase64FromUrl(part.GetImageMedia().Url)
  296. if err != nil {
  297. return nil, fmt.Errorf("get file base64 from url '%s' failed: %w", part.GetImageMedia().Url, err)
  298. }
  299. // 校验 MimeType 是否在 Gemini 支持的白名单中
  300. if _, ok := geminiSupportedMimeTypes[strings.ToLower(fileData.MimeType)]; !ok {
  301. url := part.GetImageMedia().Url
  302. return nil, fmt.Errorf("mime type is not supported by Gemini: '%s', url: '%s', supported types are: %v", fileData.MimeType, url, getSupportedMimeTypesList())
  303. }
  304. parts = append(parts, GeminiPart{
  305. InlineData: &GeminiInlineData{
  306. MimeType: fileData.MimeType, // 使用原始的 MimeType,因为大小写可能对API有意义
  307. Data: fileData.Base64Data,
  308. },
  309. })
  310. } else {
  311. format, base64String, err := service.DecodeBase64FileData(part.GetImageMedia().Url)
  312. if err != nil {
  313. return nil, fmt.Errorf("decode base64 image data failed: %s", err.Error())
  314. }
  315. parts = append(parts, GeminiPart{
  316. InlineData: &GeminiInlineData{
  317. MimeType: format,
  318. Data: base64String,
  319. },
  320. })
  321. }
  322. } else if part.Type == dto.ContentTypeFile {
  323. if part.GetFile().FileId != "" {
  324. return nil, fmt.Errorf("only base64 file is supported in gemini")
  325. }
  326. format, base64String, err := service.DecodeBase64FileData(part.GetFile().FileData)
  327. if err != nil {
  328. return nil, fmt.Errorf("decode base64 file data failed: %s", err.Error())
  329. }
  330. parts = append(parts, GeminiPart{
  331. InlineData: &GeminiInlineData{
  332. MimeType: format,
  333. Data: base64String,
  334. },
  335. })
  336. } else if part.Type == dto.ContentTypeInputAudio {
  337. if part.GetInputAudio().Data == "" {
  338. return nil, fmt.Errorf("only base64 audio is supported in gemini")
  339. }
  340. base64String, err := service.DecodeBase64AudioData(part.GetInputAudio().Data)
  341. if err != nil {
  342. return nil, fmt.Errorf("decode base64 audio data failed: %s", err.Error())
  343. }
  344. parts = append(parts, GeminiPart{
  345. InlineData: &GeminiInlineData{
  346. MimeType: "audio/" + part.GetInputAudio().Format,
  347. Data: base64String,
  348. },
  349. })
  350. }
  351. }
  352. content.Parts = parts
  353. // there's no assistant role in gemini and API shall vomit if Role is not user or model
  354. if content.Role == "assistant" {
  355. content.Role = "model"
  356. }
  357. if len(content.Parts) > 0 {
  358. geminiRequest.Contents = append(geminiRequest.Contents, content)
  359. }
  360. }
  361. if len(system_content) > 0 {
  362. geminiRequest.SystemInstructions = &GeminiChatContent{
  363. Parts: []GeminiPart{
  364. {
  365. Text: strings.Join(system_content, "\n"),
  366. },
  367. },
  368. }
  369. }
  370. return &geminiRequest, nil
  371. }
  372. // Helper function to get a list of supported MIME types for error messages
  373. func getSupportedMimeTypesList() []string {
  374. keys := make([]string, 0, len(geminiSupportedMimeTypes))
  375. for k := range geminiSupportedMimeTypes {
  376. keys = append(keys, k)
  377. }
  378. return keys
  379. }
  380. // cleanFunctionParameters recursively removes unsupported fields from Gemini function parameters.
  381. func cleanFunctionParameters(params interface{}) interface{} {
  382. if params == nil {
  383. return nil
  384. }
  385. switch v := params.(type) {
  386. case map[string]interface{}:
  387. // Create a copy to avoid modifying the original
  388. cleanedMap := make(map[string]interface{})
  389. for k, val := range v {
  390. cleanedMap[k] = val
  391. }
  392. // Remove unsupported root-level fields
  393. delete(cleanedMap, "default")
  394. delete(cleanedMap, "exclusiveMaximum")
  395. delete(cleanedMap, "exclusiveMinimum")
  396. delete(cleanedMap, "$schema")
  397. delete(cleanedMap, "additionalProperties")
  398. // Check and clean 'format' for string types
  399. if propType, typeExists := cleanedMap["type"].(string); typeExists && propType == "string" {
  400. if formatValue, formatExists := cleanedMap["format"].(string); formatExists {
  401. if formatValue != "enum" && formatValue != "date-time" {
  402. delete(cleanedMap, "format")
  403. }
  404. }
  405. }
  406. // Clean properties
  407. if props, ok := cleanedMap["properties"].(map[string]interface{}); ok && props != nil {
  408. cleanedProps := make(map[string]interface{})
  409. for propName, propValue := range props {
  410. cleanedProps[propName] = cleanFunctionParameters(propValue)
  411. }
  412. cleanedMap["properties"] = cleanedProps
  413. }
  414. // Recursively clean items in arrays
  415. if items, ok := cleanedMap["items"].(map[string]interface{}); ok && items != nil {
  416. cleanedMap["items"] = cleanFunctionParameters(items)
  417. }
  418. // Also handle items if it's an array of schemas
  419. if itemsArray, ok := cleanedMap["items"].([]interface{}); ok {
  420. cleanedItemsArray := make([]interface{}, len(itemsArray))
  421. for i, item := range itemsArray {
  422. cleanedItemsArray[i] = cleanFunctionParameters(item)
  423. }
  424. cleanedMap["items"] = cleanedItemsArray
  425. }
  426. // Recursively clean other schema composition keywords
  427. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  428. if nested, ok := cleanedMap[field].([]interface{}); ok {
  429. cleanedNested := make([]interface{}, len(nested))
  430. for i, item := range nested {
  431. cleanedNested[i] = cleanFunctionParameters(item)
  432. }
  433. cleanedMap[field] = cleanedNested
  434. }
  435. }
  436. // Recursively clean patternProperties
  437. if patternProps, ok := cleanedMap["patternProperties"].(map[string]interface{}); ok {
  438. cleanedPatternProps := make(map[string]interface{})
  439. for pattern, schema := range patternProps {
  440. cleanedPatternProps[pattern] = cleanFunctionParameters(schema)
  441. }
  442. cleanedMap["patternProperties"] = cleanedPatternProps
  443. }
  444. // Recursively clean definitions
  445. if definitions, ok := cleanedMap["definitions"].(map[string]interface{}); ok {
  446. cleanedDefinitions := make(map[string]interface{})
  447. for defName, defSchema := range definitions {
  448. cleanedDefinitions[defName] = cleanFunctionParameters(defSchema)
  449. }
  450. cleanedMap["definitions"] = cleanedDefinitions
  451. }
  452. // Recursively clean $defs (newer JSON Schema draft)
  453. if defs, ok := cleanedMap["$defs"].(map[string]interface{}); ok {
  454. cleanedDefs := make(map[string]interface{})
  455. for defName, defSchema := range defs {
  456. cleanedDefs[defName] = cleanFunctionParameters(defSchema)
  457. }
  458. cleanedMap["$defs"] = cleanedDefs
  459. }
  460. // Clean conditional keywords
  461. for _, field := range []string{"if", "then", "else", "not"} {
  462. if nested, ok := cleanedMap[field]; ok {
  463. cleanedMap[field] = cleanFunctionParameters(nested)
  464. }
  465. }
  466. return cleanedMap
  467. case []interface{}:
  468. // Handle arrays of schemas
  469. cleanedArray := make([]interface{}, len(v))
  470. for i, item := range v {
  471. cleanedArray[i] = cleanFunctionParameters(item)
  472. }
  473. return cleanedArray
  474. default:
  475. // Not a map or array, return as is (e.g., could be a primitive)
  476. return params
  477. }
  478. }
  479. func removeAdditionalPropertiesWithDepth(schema interface{}, depth int) interface{} {
  480. if depth >= 5 {
  481. return schema
  482. }
  483. v, ok := schema.(map[string]interface{})
  484. if !ok || len(v) == 0 {
  485. return schema
  486. }
  487. // 删除所有的title字段
  488. delete(v, "title")
  489. delete(v, "$schema")
  490. // 如果type不为object和array,则直接返回
  491. if typeVal, exists := v["type"]; !exists || (typeVal != "object" && typeVal != "array") {
  492. return schema
  493. }
  494. switch v["type"] {
  495. case "object":
  496. delete(v, "additionalProperties")
  497. // 处理 properties
  498. if properties, ok := v["properties"].(map[string]interface{}); ok {
  499. for key, value := range properties {
  500. properties[key] = removeAdditionalPropertiesWithDepth(value, depth+1)
  501. }
  502. }
  503. for _, field := range []string{"allOf", "anyOf", "oneOf"} {
  504. if nested, ok := v[field].([]interface{}); ok {
  505. for i, item := range nested {
  506. nested[i] = removeAdditionalPropertiesWithDepth(item, depth+1)
  507. }
  508. }
  509. }
  510. case "array":
  511. if items, ok := v["items"].(map[string]interface{}); ok {
  512. v["items"] = removeAdditionalPropertiesWithDepth(items, depth+1)
  513. }
  514. }
  515. return v
  516. }
  517. func unescapeString(s string) (string, error) {
  518. var result []rune
  519. escaped := false
  520. i := 0
  521. for i < len(s) {
  522. r, size := utf8.DecodeRuneInString(s[i:]) // 正确解码UTF-8字符
  523. if r == utf8.RuneError {
  524. return "", fmt.Errorf("invalid UTF-8 encoding")
  525. }
  526. if escaped {
  527. // 如果是转义符后的字符,检查其类型
  528. switch r {
  529. case '"':
  530. result = append(result, '"')
  531. case '\\':
  532. result = append(result, '\\')
  533. case '/':
  534. result = append(result, '/')
  535. case 'b':
  536. result = append(result, '\b')
  537. case 'f':
  538. result = append(result, '\f')
  539. case 'n':
  540. result = append(result, '\n')
  541. case 'r':
  542. result = append(result, '\r')
  543. case 't':
  544. result = append(result, '\t')
  545. case '\'':
  546. result = append(result, '\'')
  547. default:
  548. // 如果遇到一个非法的转义字符,直接按原样输出
  549. result = append(result, '\\', r)
  550. }
  551. escaped = false
  552. } else {
  553. if r == '\\' {
  554. escaped = true // 记录反斜杠作为转义符
  555. } else {
  556. result = append(result, r)
  557. }
  558. }
  559. i += size // 移动到下一个字符
  560. }
  561. return string(result), nil
  562. }
  563. func unescapeMapOrSlice(data interface{}) interface{} {
  564. switch v := data.(type) {
  565. case map[string]interface{}:
  566. for k, val := range v {
  567. v[k] = unescapeMapOrSlice(val)
  568. }
  569. case []interface{}:
  570. for i, val := range v {
  571. v[i] = unescapeMapOrSlice(val)
  572. }
  573. case string:
  574. if unescaped, err := unescapeString(v); err != nil {
  575. return v
  576. } else {
  577. return unescaped
  578. }
  579. }
  580. return data
  581. }
  582. func getResponseToolCall(item *GeminiPart) *dto.ToolCallResponse {
  583. var argsBytes []byte
  584. var err error
  585. if result, ok := item.FunctionCall.Arguments.(map[string]interface{}); ok {
  586. argsBytes, err = json.Marshal(unescapeMapOrSlice(result))
  587. } else {
  588. argsBytes, err = json.Marshal(item.FunctionCall.Arguments)
  589. }
  590. if err != nil {
  591. return nil
  592. }
  593. return &dto.ToolCallResponse{
  594. ID: fmt.Sprintf("call_%s", common.GetUUID()),
  595. Type: "function",
  596. Function: dto.FunctionResponse{
  597. Arguments: string(argsBytes),
  598. Name: item.FunctionCall.FunctionName,
  599. },
  600. }
  601. }
  602. func responseGeminiChat2OpenAI(c *gin.Context, response *GeminiChatResponse) *dto.OpenAITextResponse {
  603. fullTextResponse := dto.OpenAITextResponse{
  604. Id: helper.GetResponseID(c),
  605. Object: "chat.completion",
  606. Created: common.GetTimestamp(),
  607. Choices: make([]dto.OpenAITextResponseChoice, 0, len(response.Candidates)),
  608. }
  609. isToolCall := false
  610. for _, candidate := range response.Candidates {
  611. choice := dto.OpenAITextResponseChoice{
  612. Index: int(candidate.Index),
  613. Message: dto.Message{
  614. Role: "assistant",
  615. Content: "",
  616. },
  617. FinishReason: constant.FinishReasonStop,
  618. }
  619. if len(candidate.Content.Parts) > 0 {
  620. var texts []string
  621. var toolCalls []dto.ToolCallResponse
  622. for _, part := range candidate.Content.Parts {
  623. if part.FunctionCall != nil {
  624. choice.FinishReason = constant.FinishReasonToolCalls
  625. if call := getResponseToolCall(&part); call != nil {
  626. toolCalls = append(toolCalls, *call)
  627. }
  628. } else if part.Thought {
  629. choice.Message.ReasoningContent = part.Text
  630. } else {
  631. if part.ExecutableCode != nil {
  632. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```")
  633. } else if part.CodeExecutionResult != nil {
  634. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```")
  635. } else {
  636. // 过滤掉空行
  637. if part.Text != "\n" {
  638. texts = append(texts, part.Text)
  639. }
  640. }
  641. }
  642. }
  643. if len(toolCalls) > 0 {
  644. choice.Message.SetToolCalls(toolCalls)
  645. isToolCall = true
  646. }
  647. choice.Message.SetStringContent(strings.Join(texts, "\n"))
  648. }
  649. if candidate.FinishReason != nil {
  650. switch *candidate.FinishReason {
  651. case "STOP":
  652. choice.FinishReason = constant.FinishReasonStop
  653. case "MAX_TOKENS":
  654. choice.FinishReason = constant.FinishReasonLength
  655. default:
  656. choice.FinishReason = constant.FinishReasonContentFilter
  657. }
  658. }
  659. if isToolCall {
  660. choice.FinishReason = constant.FinishReasonToolCalls
  661. }
  662. fullTextResponse.Choices = append(fullTextResponse.Choices, choice)
  663. }
  664. return &fullTextResponse
  665. }
  666. func streamResponseGeminiChat2OpenAI(geminiResponse *GeminiChatResponse) (*dto.ChatCompletionsStreamResponse, bool, bool) {
  667. choices := make([]dto.ChatCompletionsStreamResponseChoice, 0, len(geminiResponse.Candidates))
  668. isStop := false
  669. hasImage := false
  670. for _, candidate := range geminiResponse.Candidates {
  671. if candidate.FinishReason != nil && *candidate.FinishReason == "STOP" {
  672. isStop = true
  673. candidate.FinishReason = nil
  674. }
  675. choice := dto.ChatCompletionsStreamResponseChoice{
  676. Index: int(candidate.Index),
  677. Delta: dto.ChatCompletionsStreamResponseChoiceDelta{
  678. Role: "assistant",
  679. },
  680. }
  681. var texts []string
  682. isTools := false
  683. isThought := false
  684. if candidate.FinishReason != nil {
  685. // p := GeminiConvertFinishReason(*candidate.FinishReason)
  686. switch *candidate.FinishReason {
  687. case "STOP":
  688. choice.FinishReason = &constant.FinishReasonStop
  689. case "MAX_TOKENS":
  690. choice.FinishReason = &constant.FinishReasonLength
  691. default:
  692. choice.FinishReason = &constant.FinishReasonContentFilter
  693. }
  694. }
  695. for _, part := range candidate.Content.Parts {
  696. if part.InlineData != nil {
  697. if strings.HasPrefix(part.InlineData.MimeType, "image") {
  698. imgText := "![image](data:" + part.InlineData.MimeType + ";base64," + part.InlineData.Data + ")"
  699. texts = append(texts, imgText)
  700. hasImage = true
  701. }
  702. } else if part.FunctionCall != nil {
  703. isTools = true
  704. if call := getResponseToolCall(&part); call != nil {
  705. call.SetIndex(len(choice.Delta.ToolCalls))
  706. choice.Delta.ToolCalls = append(choice.Delta.ToolCalls, *call)
  707. }
  708. } else if part.Thought {
  709. isThought = true
  710. texts = append(texts, part.Text)
  711. } else {
  712. if part.ExecutableCode != nil {
  713. texts = append(texts, "```"+part.ExecutableCode.Language+"\n"+part.ExecutableCode.Code+"\n```\n")
  714. } else if part.CodeExecutionResult != nil {
  715. texts = append(texts, "```output\n"+part.CodeExecutionResult.Output+"\n```\n")
  716. } else {
  717. if part.Text != "\n" {
  718. texts = append(texts, part.Text)
  719. }
  720. }
  721. }
  722. }
  723. if isThought {
  724. choice.Delta.SetReasoningContent(strings.Join(texts, "\n"))
  725. } else {
  726. choice.Delta.SetContentString(strings.Join(texts, "\n"))
  727. }
  728. if isTools {
  729. choice.FinishReason = &constant.FinishReasonToolCalls
  730. }
  731. choices = append(choices, choice)
  732. }
  733. var response dto.ChatCompletionsStreamResponse
  734. response.Object = "chat.completion.chunk"
  735. response.Choices = choices
  736. return &response, isStop, hasImage
  737. }
  738. func GeminiChatStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  739. // responseText := ""
  740. id := helper.GetResponseID(c)
  741. createAt := common.GetTimestamp()
  742. var usage = &dto.Usage{}
  743. var imageCount int
  744. helper.StreamScannerHandler(c, resp, info, func(data string) bool {
  745. var geminiResponse GeminiChatResponse
  746. err := common.UnmarshalJsonStr(data, &geminiResponse)
  747. if err != nil {
  748. common.LogError(c, "error unmarshalling stream response: "+err.Error())
  749. return false
  750. }
  751. response, isStop, hasImage := streamResponseGeminiChat2OpenAI(&geminiResponse)
  752. if hasImage {
  753. imageCount++
  754. }
  755. response.Id = id
  756. response.Created = createAt
  757. response.Model = info.UpstreamModelName
  758. if geminiResponse.UsageMetadata.TotalTokenCount != 0 {
  759. usage.PromptTokens = geminiResponse.UsageMetadata.PromptTokenCount
  760. usage.CompletionTokens = geminiResponse.UsageMetadata.CandidatesTokenCount
  761. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  762. usage.TotalTokens = geminiResponse.UsageMetadata.TotalTokenCount
  763. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  764. if detail.Modality == "AUDIO" {
  765. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  766. } else if detail.Modality == "TEXT" {
  767. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  768. }
  769. }
  770. }
  771. err = helper.ObjectData(c, response)
  772. if err != nil {
  773. common.LogError(c, err.Error())
  774. }
  775. if isStop {
  776. response := helper.GenerateStopResponse(id, createAt, info.UpstreamModelName, constant.FinishReasonStop)
  777. helper.ObjectData(c, response)
  778. }
  779. return true
  780. })
  781. var response *dto.ChatCompletionsStreamResponse
  782. if imageCount != 0 {
  783. if usage.CompletionTokens == 0 {
  784. usage.CompletionTokens = imageCount * 258
  785. }
  786. }
  787. usage.PromptTokensDetails.TextTokens = usage.PromptTokens
  788. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  789. if info.ShouldIncludeUsage {
  790. response = helper.GenerateFinalUsageResponse(id, createAt, info.UpstreamModelName, *usage)
  791. err := helper.ObjectData(c, response)
  792. if err != nil {
  793. common.SysError("send final response failed: " + err.Error())
  794. }
  795. }
  796. helper.Done(c)
  797. //resp.Body.Close()
  798. return usage, nil
  799. }
  800. func GeminiChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  801. responseBody, err := io.ReadAll(resp.Body)
  802. if err != nil {
  803. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  804. }
  805. common.CloseResponseBodyGracefully(resp)
  806. if common.DebugEnabled {
  807. println(string(responseBody))
  808. }
  809. var geminiResponse GeminiChatResponse
  810. err = common.Unmarshal(responseBody, &geminiResponse)
  811. if err != nil {
  812. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  813. }
  814. if len(geminiResponse.Candidates) == 0 {
  815. return nil, types.NewError(errors.New("no candidates returned"), types.ErrorCodeBadResponseBody)
  816. }
  817. fullTextResponse := responseGeminiChat2OpenAI(c, &geminiResponse)
  818. fullTextResponse.Model = info.UpstreamModelName
  819. usage := dto.Usage{
  820. PromptTokens: geminiResponse.UsageMetadata.PromptTokenCount,
  821. CompletionTokens: geminiResponse.UsageMetadata.CandidatesTokenCount,
  822. TotalTokens: geminiResponse.UsageMetadata.TotalTokenCount,
  823. }
  824. usage.CompletionTokenDetails.ReasoningTokens = geminiResponse.UsageMetadata.ThoughtsTokenCount
  825. usage.CompletionTokens = usage.TotalTokens - usage.PromptTokens
  826. for _, detail := range geminiResponse.UsageMetadata.PromptTokensDetails {
  827. if detail.Modality == "AUDIO" {
  828. usage.PromptTokensDetails.AudioTokens = detail.TokenCount
  829. } else if detail.Modality == "TEXT" {
  830. usage.PromptTokensDetails.TextTokens = detail.TokenCount
  831. }
  832. }
  833. fullTextResponse.Usage = usage
  834. jsonResponse, err := json.Marshal(fullTextResponse)
  835. if err != nil {
  836. return nil, types.NewError(err, types.ErrorCodeBadResponseBody)
  837. }
  838. c.Writer.Header().Set("Content-Type", "application/json")
  839. c.Writer.WriteHeader(resp.StatusCode)
  840. c.Writer.Write(jsonResponse)
  841. return &usage, nil
  842. }
  843. func GeminiEmbeddingHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
  844. defer common.CloseResponseBodyGracefully(resp)
  845. responseBody, readErr := io.ReadAll(resp.Body)
  846. if readErr != nil {
  847. return nil, types.NewError(readErr, types.ErrorCodeBadResponseBody)
  848. }
  849. var geminiResponse GeminiEmbeddingResponse
  850. if jsonErr := common.Unmarshal(responseBody, &geminiResponse); jsonErr != nil {
  851. return nil, types.NewError(jsonErr, types.ErrorCodeBadResponseBody)
  852. }
  853. // convert to openai format response
  854. openAIResponse := dto.OpenAIEmbeddingResponse{
  855. Object: "list",
  856. Data: []dto.OpenAIEmbeddingResponseItem{
  857. {
  858. Object: "embedding",
  859. Embedding: geminiResponse.Embedding.Values,
  860. Index: 0,
  861. },
  862. },
  863. Model: info.UpstreamModelName,
  864. }
  865. // calculate usage
  866. // https://ai.google.dev/gemini-api/docs/pricing?hl=zh-cn#text-embedding-004
  867. // Google has not yet clarified how embedding models will be billed
  868. // refer to openai billing method to use input tokens billing
  869. // https://platform.openai.com/docs/guides/embeddings#what-are-embeddings
  870. usage := &dto.Usage{
  871. PromptTokens: info.PromptTokens,
  872. CompletionTokens: 0,
  873. TotalTokens: info.PromptTokens,
  874. }
  875. openAIResponse.Usage = *usage
  876. jsonResponse, jsonErr := common.Marshal(openAIResponse)
  877. if jsonErr != nil {
  878. return nil, types.NewError(jsonErr, types.ErrorCodeBadResponseBody)
  879. }
  880. common.IOCopyBytesGracefully(c, resp, jsonResponse)
  881. return usage, nil
  882. }