Quellcode durchsuchen

Merge pull request #1957 from seefs001/pr/custom-currency-1923

💱 feat(settings): introduce site-wide quota display type
Calcium-Ion vor 2 Monaten
Ursprung
Commit
cc6fcebda1

+ 1 - 0
common/constants.go

@@ -19,6 +19,7 @@ var TopUpLink = ""
 // var ChatLink = ""
 // var ChatLink2 = ""
 var QuotaPerUnit = 500 * 1000.0 // $0.002 / 1K tokens
+// 保留旧变量以兼容历史逻辑,实际展示由 general_setting.quota_display_type 控制
 var DisplayInCurrencyEnabled = true
 var DisplayTokenStatEnabled = true
 var DrawingEnabled = true

+ 1 - 1
common/database.go

@@ -12,4 +12,4 @@ var LogSqlType = DatabaseTypeSQLite // Default to SQLite for logging SQL queries
 var UsingMySQL = false
 var UsingClickHouse = false
 
-var SQLitePath = "one-api.db?_busy_timeout=30000"
+var SQLitePath = "one-api.db?_busy_timeout=30000"

+ 3 - 3
constant/api_type.go

@@ -31,7 +31,7 @@ const (
 	APITypeXai
 	APITypeCoze
 	APITypeJimeng
-     APITypeMoonshot
-     APITypeSubmodel
-     APITypeDummy    // this one is only for count, do not add any channel after this
+	APITypeMoonshot
+	APITypeSubmodel
+	APITypeDummy // this one is only for count, do not add any channel after this
 )

+ 20 - 4
controller/billing.go

@@ -5,6 +5,7 @@ import (
 	"one-api/common"
 	"one-api/dto"
 	"one-api/model"
+	"one-api/setting/operation_setting"
 )
 
 func GetSubscription(c *gin.Context) {
@@ -39,8 +40,18 @@ func GetSubscription(c *gin.Context) {
 	}
 	quota := remainQuota + usedQuota
 	amount := float64(quota)
-	if common.DisplayInCurrencyEnabled {
-		amount /= common.QuotaPerUnit
+	// OpenAI 兼容接口中的 *_USD 字段含义保持“额度单位”对应值:
+	// 我们将其解释为以“站点展示类型”为准:
+	// - USD: 直接除以 QuotaPerUnit
+	// - CNY: 先转 USD 再乘汇率
+	// - TOKENS: 直接使用 tokens 数量
+	switch operation_setting.GetQuotaDisplayType() {
+	case operation_setting.QuotaDisplayTypeCNY:
+		amount = amount / common.QuotaPerUnit * operation_setting.USDExchangeRate
+	case operation_setting.QuotaDisplayTypeTokens:
+		// amount 保持 tokens 数值
+	default:
+		amount = amount / common.QuotaPerUnit
 	}
 	if token != nil && token.UnlimitedQuota {
 		amount = 100000000
@@ -80,8 +91,13 @@ func GetUsage(c *gin.Context) {
 		return
 	}
 	amount := float64(quota)
-	if common.DisplayInCurrencyEnabled {
-		amount /= common.QuotaPerUnit
+	switch operation_setting.GetQuotaDisplayType() {
+	case operation_setting.QuotaDisplayTypeCNY:
+		amount = amount / common.QuotaPerUnit * operation_setting.USDExchangeRate
+	case operation_setting.QuotaDisplayTypeTokens:
+		// tokens 保持原值
+	default:
+		amount = amount / common.QuotaPerUnit
 	}
 	usage := OpenAIUsageResponse{
 		Object:     "list",

+ 16 - 12
controller/misc.go

@@ -66,18 +66,22 @@ func GetStatus(c *gin.Context) {
 		"top_up_link":                 common.TopUpLink,
 		"docs_link":                   operation_setting.GetGeneralSetting().DocsLink,
 		"quota_per_unit":              common.QuotaPerUnit,
-		"display_in_currency":         common.DisplayInCurrencyEnabled,
-		"enable_batch_update":         common.BatchUpdateEnabled,
-		"enable_drawing":              common.DrawingEnabled,
-		"enable_task":                 common.TaskEnabled,
-		"enable_data_export":          common.DataExportEnabled,
-		"data_export_default_time":    common.DataExportDefaultTime,
-		"default_collapse_sidebar":    common.DefaultCollapseSidebar,
-		"mj_notify_enabled":           setting.MjNotifyEnabled,
-		"chats":                       setting.Chats,
-		"demo_site_enabled":           operation_setting.DemoSiteEnabled,
-		"self_use_mode_enabled":       operation_setting.SelfUseModeEnabled,
-		"default_use_auto_group":      setting.DefaultUseAutoGroup,
+		// 兼容旧前端:保留 display_in_currency,同时提供新的 quota_display_type
+		"display_in_currency":           operation_setting.IsCurrencyDisplay(),
+		"quota_display_type":            operation_setting.GetQuotaDisplayType(),
+		"custom_currency_symbol":        operation_setting.GetGeneralSetting().CustomCurrencySymbol,
+		"custom_currency_exchange_rate": operation_setting.GetGeneralSetting().CustomCurrencyExchangeRate,
+		"enable_batch_update":           common.BatchUpdateEnabled,
+		"enable_drawing":                common.DrawingEnabled,
+		"enable_task":                   common.TaskEnabled,
+		"enable_data_export":            common.DataExportEnabled,
+		"data_export_default_time":      common.DataExportDefaultTime,
+		"default_collapse_sidebar":      common.DefaultCollapseSidebar,
+		"mj_notify_enabled":             setting.MjNotifyEnabled,
+		"chats":                         setting.Chats,
+		"demo_site_enabled":             operation_setting.DemoSiteEnabled,
+		"self_use_mode_enabled":         operation_setting.SelfUseModeEnabled,
+		"default_use_auto_group":        setting.DefaultUseAutoGroup,
 
 		"usd_exchange_rate": operation_setting.USDExchangeRate,
 		"price":             operation_setting.Price,

+ 1 - 1
controller/setup.go

@@ -178,4 +178,4 @@ func boolToString(b bool) string {
 		return "true"
 	}
 	return "false"
-}
+}

+ 5 - 4
controller/topup.go

@@ -86,8 +86,9 @@ func GetEpayClient() *epay.Client {
 
 func getPayMoney(amount int64, group string) float64 {
 	dAmount := decimal.NewFromInt(amount)
-
-	if !common.DisplayInCurrencyEnabled {
+	// 充值金额以“展示类型”为准:
+	// - USD/CNY: 前端传 amount 为金额单位;TOKENS: 前端传 tokens,需要换成 USD 金额
+	if operation_setting.GetQuotaDisplayType() == operation_setting.QuotaDisplayTypeTokens {
 		dQuotaPerUnit := decimal.NewFromFloat(common.QuotaPerUnit)
 		dAmount = dAmount.Div(dQuotaPerUnit)
 	}
@@ -115,7 +116,7 @@ func getPayMoney(amount int64, group string) float64 {
 
 func getMinTopup() int64 {
 	minTopup := operation_setting.MinTopUp
-	if !common.DisplayInCurrencyEnabled {
+	if operation_setting.GetQuotaDisplayType() == operation_setting.QuotaDisplayTypeTokens {
 		dMinTopup := decimal.NewFromInt(int64(minTopup))
 		dQuotaPerUnit := decimal.NewFromFloat(common.QuotaPerUnit)
 		minTopup = int(dMinTopup.Mul(dQuotaPerUnit).IntPart())
@@ -176,7 +177,7 @@ func RequestEpay(c *gin.Context) {
 		return
 	}
 	amount := req.Amount
-	if !common.DisplayInCurrencyEnabled {
+	if operation_setting.GetQuotaDisplayType() == operation_setting.QuotaDisplayTypeTokens {
 		dAmount := decimal.NewFromInt(int64(amount))
 		dQuotaPerUnit := decimal.NewFromFloat(common.QuotaPerUnit)
 		amount = dAmount.Div(dQuotaPerUnit).IntPart()

+ 2 - 2
controller/topup_stripe.go

@@ -258,7 +258,7 @@ func GetChargedAmount(count float64, user model.User) float64 {
 
 func getStripePayMoney(amount float64, group string) float64 {
 	originalAmount := amount
-	if !common.DisplayInCurrencyEnabled {
+	if operation_setting.GetQuotaDisplayType() == operation_setting.QuotaDisplayTypeTokens {
 		amount = amount / common.QuotaPerUnit
 	}
 	// Using float64 for monetary calculations is acceptable here due to the small amounts involved
@@ -279,7 +279,7 @@ func getStripePayMoney(amount float64, group string) float64 {
 
 func getStripeMinTopup() int64 {
 	minTopup := setting.StripeMinTopUp
-	if !common.DisplayInCurrencyEnabled {
+	if operation_setting.GetQuotaDisplayType() == operation_setting.QuotaDisplayTypeTokens {
 		minTopup = minTopup * int(common.QuotaPerUnit)
 	}
 	return int64(minTopup)

+ 44 - 6
logger/logger.go

@@ -7,6 +7,7 @@ import (
 	"io"
 	"log"
 	"one-api/common"
+	"one-api/setting/operation_setting"
 	"os"
 	"path/filepath"
 	"sync"
@@ -92,18 +93,55 @@ func logHelper(ctx context.Context, level string, msg string) {
 }
 
 func LogQuota(quota int) string {
-	if common.DisplayInCurrencyEnabled {
-		return fmt.Sprintf("$%.6f 额度", float64(quota)/common.QuotaPerUnit)
-	} else {
+	// 新逻辑:根据额度展示类型输出
+	q := float64(quota)
+	switch operation_setting.GetQuotaDisplayType() {
+	case operation_setting.QuotaDisplayTypeCNY:
+		usd := q / common.QuotaPerUnit
+		cny := usd * operation_setting.USDExchangeRate
+		return fmt.Sprintf("¥%.6f 额度", cny)
+	case operation_setting.QuotaDisplayTypeCustom:
+		usd := q / common.QuotaPerUnit
+		rate := operation_setting.GetGeneralSetting().CustomCurrencyExchangeRate
+		symbol := operation_setting.GetGeneralSetting().CustomCurrencySymbol
+		if symbol == "" {
+			symbol = "¤"
+		}
+		if rate <= 0 {
+			rate = 1
+		}
+		v := usd * rate
+		return fmt.Sprintf("%s%.6f 额度", symbol, v)
+	case operation_setting.QuotaDisplayTypeTokens:
 		return fmt.Sprintf("%d 点额度", quota)
+	default: // USD
+		return fmt.Sprintf("$%.6f 额度", q/common.QuotaPerUnit)
 	}
 }
 
 func FormatQuota(quota int) string {
-	if common.DisplayInCurrencyEnabled {
-		return fmt.Sprintf("$%.6f", float64(quota)/common.QuotaPerUnit)
-	} else {
+	q := float64(quota)
+	switch operation_setting.GetQuotaDisplayType() {
+	case operation_setting.QuotaDisplayTypeCNY:
+		usd := q / common.QuotaPerUnit
+		cny := usd * operation_setting.USDExchangeRate
+		return fmt.Sprintf("¥%.6f", cny)
+	case operation_setting.QuotaDisplayTypeCustom:
+		usd := q / common.QuotaPerUnit
+		rate := operation_setting.GetGeneralSetting().CustomCurrencyExchangeRate
+		symbol := operation_setting.GetGeneralSetting().CustomCurrencySymbol
+		if symbol == "" {
+			symbol = "¤"
+		}
+		if rate <= 0 {
+			rate = 1
+		}
+		v := usd * rate
+		return fmt.Sprintf("%s%.6f", symbol, v)
+	case operation_setting.QuotaDisplayTypeTokens:
 		return fmt.Sprintf("%d", quota)
+	default:
+		return fmt.Sprintf("$%.6f", q/common.QuotaPerUnit)
 	}
 }
 

+ 9 - 1
model/option.go

@@ -240,7 +240,15 @@ func updateOptionMap(key string, value string) (err error) {
 		case "LogConsumeEnabled":
 			common.LogConsumeEnabled = boolValue
 		case "DisplayInCurrencyEnabled":
-			common.DisplayInCurrencyEnabled = boolValue
+			// 兼容旧字段:同步到新配置 general_setting.quota_display_type(运行时生效)
+			// true -> USD, false -> TOKENS
+			newVal := "USD"
+			if !boolValue {
+				newVal = "TOKENS"
+			}
+			if cfg := config.GlobalConfig.Get("general_setting"); cfg != nil {
+				_ = config.UpdateConfigFromMap(cfg, map[string]string{"quota_display_type": newVal})
+			}
 		case "DisplayTokenStatEnabled":
 			common.DisplayTokenStatEnabled = boolValue
 		case "DrawingEnabled":

+ 22 - 8
relay/channel/ollama/adaptor.go

@@ -18,7 +18,9 @@ import (
 type Adaptor struct {
 }
 
-func (a *Adaptor) ConvertGeminiRequest(*gin.Context, *relaycommon.RelayInfo, *dto.GeminiChatRequest) (any, error) { return nil, errors.New("not implemented") }
+func (a *Adaptor) ConvertGeminiRequest(*gin.Context, *relaycommon.RelayInfo, *dto.GeminiChatRequest) (any, error) {
+	return nil, errors.New("not implemented")
+}
 
 func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.ClaudeRequest) (any, error) {
 	openaiAdaptor := openai.Adaptor{}
@@ -33,17 +35,25 @@ func (a *Adaptor) ConvertClaudeRequest(c *gin.Context, info *relaycommon.RelayIn
 	return openAIChatToOllamaChat(c, openaiRequest.(*dto.GeneralOpenAIRequest))
 }
 
-func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) { return nil, errors.New("not implemented") }
+func (a *Adaptor) ConvertAudioRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.AudioRequest) (io.Reader, error) {
+	return nil, errors.New("not implemented")
+}
 
-func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) { return nil, errors.New("not implemented") }
+func (a *Adaptor) ConvertImageRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.ImageRequest) (any, error) {
+	return nil, errors.New("not implemented")
+}
 
 func (a *Adaptor) Init(info *relaycommon.RelayInfo) {
 }
 
 func (a *Adaptor) GetRequestURL(info *relaycommon.RelayInfo) (string, error) {
-    if info.RelayMode == relayconstant.RelayModeEmbeddings { return info.ChannelBaseUrl + "/api/embed", nil }
-    if strings.Contains(info.RequestURLPath, "/v1/completions") || info.RelayMode == relayconstant.RelayModeCompletions { return info.ChannelBaseUrl + "/api/generate", nil }
-    return info.ChannelBaseUrl + "/api/chat", nil
+	if info.RelayMode == relayconstant.RelayModeEmbeddings {
+		return info.ChannelBaseUrl + "/api/embed", nil
+	}
+	if strings.Contains(info.RequestURLPath, "/v1/completions") || info.RelayMode == relayconstant.RelayModeCompletions {
+		return info.ChannelBaseUrl + "/api/generate", nil
+	}
+	return info.ChannelBaseUrl + "/api/chat", nil
 }
 
 func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Header, info *relaycommon.RelayInfo) error {
@@ -53,7 +63,9 @@ func (a *Adaptor) SetupRequestHeader(c *gin.Context, req *http.Header, info *rel
 }
 
 func (a *Adaptor) ConvertOpenAIRequest(c *gin.Context, info *relaycommon.RelayInfo, request *dto.GeneralOpenAIRequest) (any, error) {
-	if request == nil { return nil, errors.New("request is nil") }
+	if request == nil {
+		return nil, errors.New("request is nil")
+	}
 	// decide generate or chat
 	if strings.Contains(info.RequestURLPath, "/v1/completions") || info.RelayMode == relayconstant.RelayModeCompletions {
 		return openAIToGenerate(c, request)
@@ -69,7 +81,9 @@ func (a *Adaptor) ConvertEmbeddingRequest(c *gin.Context, info *relaycommon.Rela
 	return requestOpenAI2Embeddings(request), nil
 }
 
-func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) { return nil, errors.New("not implemented") }
+func (a *Adaptor) ConvertOpenAIResponsesRequest(c *gin.Context, info *relaycommon.RelayInfo, request dto.OpenAIResponsesRequest) (any, error) {
+	return nil, errors.New("not implemented")
+}
 
 func (a *Adaptor) DoRequest(c *gin.Context, info *relaycommon.RelayInfo, requestBody io.Reader) (any, error) {
 	return channel.DoApiRequest(a, c, info, requestBody)

+ 22 - 23
relay/channel/ollama/dto.go

@@ -5,12 +5,12 @@ import (
 )
 
 type OllamaChatMessage struct {
-	Role      string            `json:"role"`
-	Content   string            `json:"content,omitempty"`
-	Images    []string          `json:"images,omitempty"`
-	ToolCalls []OllamaToolCall  `json:"tool_calls,omitempty"`
-	ToolName  string            `json:"tool_name,omitempty"`
-	Thinking  json.RawMessage   `json:"thinking,omitempty"`
+	Role      string           `json:"role"`
+	Content   string           `json:"content,omitempty"`
+	Images    []string         `json:"images,omitempty"`
+	ToolCalls []OllamaToolCall `json:"tool_calls,omitempty"`
+	ToolName  string           `json:"tool_name,omitempty"`
+	Thinking  json.RawMessage  `json:"thinking,omitempty"`
 }
 
 type OllamaToolFunction struct {
@@ -20,7 +20,7 @@ type OllamaToolFunction struct {
 }
 
 type OllamaTool struct {
-	Type     string            `json:"type"`
+	Type     string             `json:"type"`
 	Function OllamaToolFunction `json:"function"`
 }
 
@@ -43,28 +43,27 @@ type OllamaChatRequest struct {
 }
 
 type OllamaGenerateRequest struct {
-	Model     string         `json:"model"`
-	Prompt    string         `json:"prompt,omitempty"`
-	Suffix    string         `json:"suffix,omitempty"`
-	Images    []string       `json:"images,omitempty"`
-	Format    interface{}    `json:"format,omitempty"`
-	Stream    bool           `json:"stream,omitempty"`
-	Options   map[string]any `json:"options,omitempty"`
-	KeepAlive interface{}    `json:"keep_alive,omitempty"`
+	Model     string          `json:"model"`
+	Prompt    string          `json:"prompt,omitempty"`
+	Suffix    string          `json:"suffix,omitempty"`
+	Images    []string        `json:"images,omitempty"`
+	Format    interface{}     `json:"format,omitempty"`
+	Stream    bool            `json:"stream,omitempty"`
+	Options   map[string]any  `json:"options,omitempty"`
+	KeepAlive interface{}     `json:"keep_alive,omitempty"`
 	Think     json.RawMessage `json:"think,omitempty"`
 }
 
 type OllamaEmbeddingRequest struct {
-	Model     string         `json:"model"`
-	Input     interface{}    `json:"input"`
-	Options   map[string]any `json:"options,omitempty"`
+	Model      string         `json:"model"`
+	Input      interface{}    `json:"input"`
+	Options    map[string]any `json:"options,omitempty"`
 	Dimensions int            `json:"dimensions,omitempty"`
 }
 
 type OllamaEmbeddingResponse struct {
-	Error           string        `json:"error,omitempty"`
-	Model           string        `json:"model"`
-	Embeddings      [][]float64   `json:"embeddings"`
-	PromptEvalCount int           `json:"prompt_eval_count,omitempty"`
+	Error           string      `json:"error,omitempty"`
+	Model           string      `json:"model"`
+	Embeddings      [][]float64 `json:"embeddings"`
+	PromptEvalCount int         `json:"prompt_eval_count,omitempty"`
 }
-

+ 144 - 50
relay/channel/ollama/relay-ollama.go

@@ -35,13 +35,27 @@ func openAIChatToOllamaChat(c *gin.Context, r *dto.GeneralOpenAIRequest) (*Ollam
 	}
 
 	// options mapping
-	if r.Temperature != nil { chatReq.Options["temperature"] = r.Temperature }
-	if r.TopP != 0 { chatReq.Options["top_p"] = r.TopP }
-	if r.TopK != 0 { chatReq.Options["top_k"] = r.TopK }
-	if r.FrequencyPenalty != 0 { chatReq.Options["frequency_penalty"] = r.FrequencyPenalty }
-	if r.PresencePenalty != 0 { chatReq.Options["presence_penalty"] = r.PresencePenalty }
-	if r.Seed != 0 { chatReq.Options["seed"] = int(r.Seed) }
-	if mt := r.GetMaxTokens(); mt != 0 { chatReq.Options["num_predict"] = int(mt) }
+	if r.Temperature != nil {
+		chatReq.Options["temperature"] = r.Temperature
+	}
+	if r.TopP != 0 {
+		chatReq.Options["top_p"] = r.TopP
+	}
+	if r.TopK != 0 {
+		chatReq.Options["top_k"] = r.TopK
+	}
+	if r.FrequencyPenalty != 0 {
+		chatReq.Options["frequency_penalty"] = r.FrequencyPenalty
+	}
+	if r.PresencePenalty != 0 {
+		chatReq.Options["presence_penalty"] = r.PresencePenalty
+	}
+	if r.Seed != 0 {
+		chatReq.Options["seed"] = int(r.Seed)
+	}
+	if mt := r.GetMaxTokens(); mt != 0 {
+		chatReq.Options["num_predict"] = int(mt)
+	}
 
 	if r.Stop != nil {
 		switch v := r.Stop.(type) {
@@ -50,21 +64,27 @@ func openAIChatToOllamaChat(c *gin.Context, r *dto.GeneralOpenAIRequest) (*Ollam
 		case []string:
 			chatReq.Options["stop"] = v
 		case []any:
-			arr := make([]string,0,len(v))
-			for _, i := range v { if s,ok:=i.(string); ok { arr = append(arr,s) } }
-			if len(arr)>0 { chatReq.Options["stop"] = arr }
+			arr := make([]string, 0, len(v))
+			for _, i := range v {
+				if s, ok := i.(string); ok {
+					arr = append(arr, s)
+				}
+			}
+			if len(arr) > 0 {
+				chatReq.Options["stop"] = arr
+			}
 		}
 	}
 
 	if len(r.Tools) > 0 {
-		tools := make([]OllamaTool,0,len(r.Tools))
+		tools := make([]OllamaTool, 0, len(r.Tools))
 		for _, t := range r.Tools {
 			tools = append(tools, OllamaTool{Type: "function", Function: OllamaToolFunction{Name: t.Function.Name, Description: t.Function.Description, Parameters: t.Function.Parameters}})
 		}
 		chatReq.Tools = tools
 	}
 
-	chatReq.Messages = make([]OllamaChatMessage,0,len(r.Messages))
+	chatReq.Messages = make([]OllamaChatMessage, 0, len(r.Messages))
 	for _, m := range r.Messages {
 		var textBuilder strings.Builder
 		var images []string
@@ -79,14 +99,20 @@ func openAIChatToOllamaChat(c *gin.Context, r *dto.GeneralOpenAIRequest) (*Ollam
 						var base64Data string
 						if strings.HasPrefix(img.Url, "http") {
 							fileData, err := service.GetFileBase64FromUrl(c, img.Url, "fetch image for ollama chat")
-							if err != nil { return nil, err }
+							if err != nil {
+								return nil, err
+							}
 							base64Data = fileData.Base64Data
 						} else if strings.HasPrefix(img.Url, "data:") {
-							if idx := strings.Index(img.Url, ","); idx != -1 && idx+1 < len(img.Url) { base64Data = img.Url[idx+1:] }
+							if idx := strings.Index(img.Url, ","); idx != -1 && idx+1 < len(img.Url) {
+								base64Data = img.Url[idx+1:]
+							}
 						} else {
 							base64Data = img.Url
 						}
-						if base64Data != "" { images = append(images, base64Data) }
+						if base64Data != "" {
+							images = append(images, base64Data)
+						}
 					}
 				} else if part.Type == dto.ContentTypeText {
 					textBuilder.WriteString(part.Text)
@@ -94,16 +120,24 @@ func openAIChatToOllamaChat(c *gin.Context, r *dto.GeneralOpenAIRequest) (*Ollam
 			}
 		}
 		cm := OllamaChatMessage{Role: m.Role, Content: textBuilder.String()}
-		if len(images)>0 { cm.Images = images }
-		if m.Role == "tool" && m.Name != nil { cm.ToolName = *m.Name }
+		if len(images) > 0 {
+			cm.Images = images
+		}
+		if m.Role == "tool" && m.Name != nil {
+			cm.ToolName = *m.Name
+		}
 		if m.ToolCalls != nil && len(m.ToolCalls) > 0 {
 			parsed := m.ParseToolCalls()
 			if len(parsed) > 0 {
-				calls := make([]OllamaToolCall,0,len(parsed))
+				calls := make([]OllamaToolCall, 0, len(parsed))
 				for _, tc := range parsed {
 					var args interface{}
-					if tc.Function.Arguments != "" { _ = json.Unmarshal([]byte(tc.Function.Arguments), &args) }
-					if args==nil { args = map[string]any{} }
+					if tc.Function.Arguments != "" {
+						_ = json.Unmarshal([]byte(tc.Function.Arguments), &args)
+					}
+					if args == nil {
+						args = map[string]any{}
+					}
 					oc := OllamaToolCall{}
 					oc.Function.Name = tc.Function.Name
 					oc.Function.Arguments = args
@@ -132,28 +166,67 @@ func openAIToGenerate(c *gin.Context, r *dto.GeneralOpenAIRequest) (*OllamaGener
 			gen.Prompt = v
 		case []any:
 			var sb strings.Builder
-			for _, it := range v { if s,ok:=it.(string); ok { sb.WriteString(s) } }
+			for _, it := range v {
+				if s, ok := it.(string); ok {
+					sb.WriteString(s)
+				}
+			}
 			gen.Prompt = sb.String()
 		default:
 			gen.Prompt = fmt.Sprintf("%v", r.Prompt)
 		}
 	}
-	if r.Suffix != nil { if s,ok:=r.Suffix.(string); ok { gen.Suffix = s } }
+	if r.Suffix != nil {
+		if s, ok := r.Suffix.(string); ok {
+			gen.Suffix = s
+		}
+	}
 	if r.ResponseFormat != nil {
-		if r.ResponseFormat.Type == "json" { gen.Format = "json" } else if r.ResponseFormat.Type == "json_schema" { var schema any; _ = json.Unmarshal(r.ResponseFormat.JsonSchema,&schema); gen.Format=schema }
-	}
-	if r.Temperature != nil { gen.Options["temperature"] = r.Temperature }
-	if r.TopP != 0 { gen.Options["top_p"] = r.TopP }
-	if r.TopK != 0 { gen.Options["top_k"] = r.TopK }
-	if r.FrequencyPenalty != 0 { gen.Options["frequency_penalty"] = r.FrequencyPenalty }
-	if r.PresencePenalty != 0 { gen.Options["presence_penalty"] = r.PresencePenalty }
-	if r.Seed != 0 { gen.Options["seed"] = int(r.Seed) }
-	if mt := r.GetMaxTokens(); mt != 0 { gen.Options["num_predict"] = int(mt) }
+		if r.ResponseFormat.Type == "json" {
+			gen.Format = "json"
+		} else if r.ResponseFormat.Type == "json_schema" {
+			var schema any
+			_ = json.Unmarshal(r.ResponseFormat.JsonSchema, &schema)
+			gen.Format = schema
+		}
+	}
+	if r.Temperature != nil {
+		gen.Options["temperature"] = r.Temperature
+	}
+	if r.TopP != 0 {
+		gen.Options["top_p"] = r.TopP
+	}
+	if r.TopK != 0 {
+		gen.Options["top_k"] = r.TopK
+	}
+	if r.FrequencyPenalty != 0 {
+		gen.Options["frequency_penalty"] = r.FrequencyPenalty
+	}
+	if r.PresencePenalty != 0 {
+		gen.Options["presence_penalty"] = r.PresencePenalty
+	}
+	if r.Seed != 0 {
+		gen.Options["seed"] = int(r.Seed)
+	}
+	if mt := r.GetMaxTokens(); mt != 0 {
+		gen.Options["num_predict"] = int(mt)
+	}
 	if r.Stop != nil {
 		switch v := r.Stop.(type) {
-		case string: gen.Options["stop"] = []string{v}
-		case []string: gen.Options["stop"] = v
-		case []any: arr:=make([]string,0,len(v)); for _,i:= range v { if s,ok:=i.(string); ok { arr=append(arr,s) } }; if len(arr)>0 { gen.Options["stop"]=arr }
+		case string:
+			gen.Options["stop"] = []string{v}
+		case []string:
+			gen.Options["stop"] = v
+		case []any:
+			arr := make([]string, 0, len(v))
+			for _, i := range v {
+				if s, ok := i.(string); ok {
+					arr = append(arr, s)
+				}
+			}
+			if len(arr) > 0 {
+				gen.Options["stop"] = arr
+			}
 		}
 	}
 	return gen, nil
@@ -161,30 +234,51 @@ func openAIToGenerate(c *gin.Context, r *dto.GeneralOpenAIRequest) (*OllamaGener
 
 func requestOpenAI2Embeddings(r dto.EmbeddingRequest) *OllamaEmbeddingRequest {
 	opts := map[string]any{}
-	if r.Temperature != nil { opts["temperature"] = r.Temperature }
-	if r.TopP != 0 { opts["top_p"] = r.TopP }
-	if r.FrequencyPenalty != 0 { opts["frequency_penalty"] = r.FrequencyPenalty }
-	if r.PresencePenalty != 0 { opts["presence_penalty"] = r.PresencePenalty }
-	if r.Seed != 0 { opts["seed"] = int(r.Seed) }
-	if r.Dimensions != 0 { opts["dimensions"] = r.Dimensions }
+	if r.Temperature != nil {
+		opts["temperature"] = r.Temperature
+	}
+	if r.TopP != 0 {
+		opts["top_p"] = r.TopP
+	}
+	if r.FrequencyPenalty != 0 {
+		opts["frequency_penalty"] = r.FrequencyPenalty
+	}
+	if r.PresencePenalty != 0 {
+		opts["presence_penalty"] = r.PresencePenalty
+	}
+	if r.Seed != 0 {
+		opts["seed"] = int(r.Seed)
+	}
+	if r.Dimensions != 0 {
+		opts["dimensions"] = r.Dimensions
+	}
 	input := r.ParseInput()
-	if len(input)==1 { return &OllamaEmbeddingRequest{Model:r.Model, Input: input[0], Options: opts, Dimensions:r.Dimensions} }
-	return &OllamaEmbeddingRequest{Model:r.Model, Input: input, Options: opts, Dimensions:r.Dimensions}
+	if len(input) == 1 {
+		return &OllamaEmbeddingRequest{Model: r.Model, Input: input[0], Options: opts, Dimensions: r.Dimensions}
+	}
+	return &OllamaEmbeddingRequest{Model: r.Model, Input: input, Options: opts, Dimensions: r.Dimensions}
 }
 
 func ollamaEmbeddingHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
 	var oResp OllamaEmbeddingResponse
 	body, err := io.ReadAll(resp.Body)
-	if err != nil { return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) }
+	if err != nil {
+		return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
+	}
 	service.CloseResponseBodyGracefully(resp)
-	if err = common.Unmarshal(body, &oResp); err != nil { return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) }
-	if oResp.Error != "" { return nil, types.NewOpenAIError(fmt.Errorf("ollama error: %s", oResp.Error), types.ErrorCodeBadResponseBody, http.StatusInternalServerError) }
-	data := make([]dto.OpenAIEmbeddingResponseItem,0,len(oResp.Embeddings))
-	for i, emb := range oResp.Embeddings { data = append(data, dto.OpenAIEmbeddingResponseItem{Index:i,Object:"embedding",Embedding:emb}) }
-	usage := &dto.Usage{PromptTokens: oResp.PromptEvalCount, CompletionTokens:0, TotalTokens: oResp.PromptEvalCount}
-	embResp := &dto.OpenAIEmbeddingResponse{Object:"list", Data:data, Model: info.UpstreamModelName, Usage:*usage}
+	if err = common.Unmarshal(body, &oResp); err != nil {
+		return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
+	}
+	if oResp.Error != "" {
+		return nil, types.NewOpenAIError(fmt.Errorf("ollama error: %s", oResp.Error), types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
+	}
+	data := make([]dto.OpenAIEmbeddingResponseItem, 0, len(oResp.Embeddings))
+	for i, emb := range oResp.Embeddings {
+		data = append(data, dto.OpenAIEmbeddingResponseItem{Index: i, Object: "embedding", Embedding: emb})
+	}
+	usage := &dto.Usage{PromptTokens: oResp.PromptEvalCount, CompletionTokens: 0, TotalTokens: oResp.PromptEvalCount}
+	embResp := &dto.OpenAIEmbeddingResponse{Object: "list", Data: data, Model: info.UpstreamModelName, Usage: *usage}
 	out, _ := common.Marshal(embResp)
 	service.IOCopyBytesGracefully(c, resp, out)
 	return usage, nil
 }
-

+ 252 - 184
relay/channel/ollama/stream.go

@@ -1,210 +1,278 @@
 package ollama
 
 import (
-    "bufio"
-    "encoding/json"
-    "fmt"
-    "io"
-    "net/http"
-    "one-api/common"
-    "one-api/dto"
-    "one-api/logger"
-    relaycommon "one-api/relay/common"
-    "one-api/relay/helper"
-    "one-api/service"
-    "one-api/types"
-    "strings"
-    "time"
+	"bufio"
+	"encoding/json"
+	"fmt"
+	"io"
+	"net/http"
+	"one-api/common"
+	"one-api/dto"
+	"one-api/logger"
+	relaycommon "one-api/relay/common"
+	"one-api/relay/helper"
+	"one-api/service"
+	"one-api/types"
+	"strings"
+	"time"
 
-    "github.com/gin-gonic/gin"
+	"github.com/gin-gonic/gin"
 )
 
 type ollamaChatStreamChunk struct {
-    Model            string `json:"model"`
-    CreatedAt        string `json:"created_at"`
-    // chat
-    Message *struct {
-        Role      string `json:"role"`
-        Content   string `json:"content"`
-        Thinking  json.RawMessage `json:"thinking"`
-        ToolCalls []struct {
-            Function struct {
-                Name      string      `json:"name"`
-                Arguments interface{} `json:"arguments"`
-            } `json:"function"`
-        } `json:"tool_calls"`
-    } `json:"message"`
-    // generate
-    Response string `json:"response"`
-    Done         bool    `json:"done"`
-    DoneReason   string  `json:"done_reason"`
-    TotalDuration int64  `json:"total_duration"`
-    LoadDuration  int64  `json:"load_duration"`
-    PromptEvalCount int  `json:"prompt_eval_count"`
-    EvalCount       int  `json:"eval_count"`
-    PromptEvalDuration int64 `json:"prompt_eval_duration"`
-    EvalDuration       int64 `json:"eval_duration"`
+	Model     string `json:"model"`
+	CreatedAt string `json:"created_at"`
+	// chat
+	Message *struct {
+		Role      string          `json:"role"`
+		Content   string          `json:"content"`
+		Thinking  json.RawMessage `json:"thinking"`
+		ToolCalls []struct {
+			Function struct {
+				Name      string      `json:"name"`
+				Arguments interface{} `json:"arguments"`
+			} `json:"function"`
+		} `json:"tool_calls"`
+	} `json:"message"`
+	// generate
+	Response           string `json:"response"`
+	Done               bool   `json:"done"`
+	DoneReason         string `json:"done_reason"`
+	TotalDuration      int64  `json:"total_duration"`
+	LoadDuration       int64  `json:"load_duration"`
+	PromptEvalCount    int    `json:"prompt_eval_count"`
+	EvalCount          int    `json:"eval_count"`
+	PromptEvalDuration int64  `json:"prompt_eval_duration"`
+	EvalDuration       int64  `json:"eval_duration"`
 }
 
 func toUnix(ts string) int64 {
-    if ts == "" { return time.Now().Unix() }
-    // try time.RFC3339 or with nanoseconds
-    t, err := time.Parse(time.RFC3339Nano, ts)
-    if err != nil { t2, err2 := time.Parse(time.RFC3339, ts); if err2==nil { return t2.Unix() }; return time.Now().Unix() }
-    return t.Unix()
+	if ts == "" {
+		return time.Now().Unix()
+	}
+	// try time.RFC3339 or with nanoseconds
+	t, err := time.Parse(time.RFC3339Nano, ts)
+	if err != nil {
+		t2, err2 := time.Parse(time.RFC3339, ts)
+		if err2 == nil {
+			return t2.Unix()
+		}
+		return time.Now().Unix()
+	}
+	return t.Unix()
 }
 
 func ollamaStreamHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
-    if resp == nil || resp.Body == nil { return nil, types.NewOpenAIError(fmt.Errorf("empty response"), types.ErrorCodeBadResponse, http.StatusBadRequest) }
-    defer service.CloseResponseBodyGracefully(resp)
+	if resp == nil || resp.Body == nil {
+		return nil, types.NewOpenAIError(fmt.Errorf("empty response"), types.ErrorCodeBadResponse, http.StatusBadRequest)
+	}
+	defer service.CloseResponseBodyGracefully(resp)
 
-    helper.SetEventStreamHeaders(c)
-    scanner := bufio.NewScanner(resp.Body)
-    usage := &dto.Usage{}
-    var model = info.UpstreamModelName
-    var responseId = common.GetUUID()
-    var created = time.Now().Unix()
-    var toolCallIndex int
-    start := helper.GenerateStartEmptyResponse(responseId, created, model, nil)
-    if data, err := common.Marshal(start); err == nil { _ = helper.StringData(c, string(data)) }
+	helper.SetEventStreamHeaders(c)
+	scanner := bufio.NewScanner(resp.Body)
+	usage := &dto.Usage{}
+	var model = info.UpstreamModelName
+	var responseId = common.GetUUID()
+	var created = time.Now().Unix()
+	var toolCallIndex int
+	start := helper.GenerateStartEmptyResponse(responseId, created, model, nil)
+	if data, err := common.Marshal(start); err == nil {
+		_ = helper.StringData(c, string(data))
+	}
 
-    for scanner.Scan() {
-        line := scanner.Text()
-        line = strings.TrimSpace(line)
-        if line == "" { continue }
-        var chunk ollamaChatStreamChunk
-        if err := json.Unmarshal([]byte(line), &chunk); err != nil {
-            logger.LogError(c, "ollama stream json decode error: "+err.Error()+" line="+line)
-            return usage, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
-        }
-        if chunk.Model != "" { model = chunk.Model }
-        created = toUnix(chunk.CreatedAt)
+	for scanner.Scan() {
+		line := scanner.Text()
+		line = strings.TrimSpace(line)
+		if line == "" {
+			continue
+		}
+		var chunk ollamaChatStreamChunk
+		if err := json.Unmarshal([]byte(line), &chunk); err != nil {
+			logger.LogError(c, "ollama stream json decode error: "+err.Error()+" line="+line)
+			return usage, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
+		}
+		if chunk.Model != "" {
+			model = chunk.Model
+		}
+		created = toUnix(chunk.CreatedAt)
 
-        if !chunk.Done {
-            // delta content
-            var content string
-            if chunk.Message != nil { content = chunk.Message.Content } else { content = chunk.Response }
-            delta := dto.ChatCompletionsStreamResponse{
-                Id:      responseId,
-                Object:  "chat.completion.chunk",
-                Created: created,
-                Model:   model,
-                Choices: []dto.ChatCompletionsStreamResponseChoice{ {
-                    Index: 0,
-                    Delta: dto.ChatCompletionsStreamResponseChoiceDelta{ Role: "assistant" },
-                } },
-            }
-            if content != "" { delta.Choices[0].Delta.SetContentString(content) }
-            if chunk.Message != nil && len(chunk.Message.Thinking) > 0 {
-                raw := strings.TrimSpace(string(chunk.Message.Thinking))
-                if raw != "" && raw != "null" { delta.Choices[0].Delta.SetReasoningContent(raw) }
-            }
-            // tool calls
-            if chunk.Message != nil && len(chunk.Message.ToolCalls) > 0 {
-                delta.Choices[0].Delta.ToolCalls = make([]dto.ToolCallResponse,0,len(chunk.Message.ToolCalls))
-                for _, tc := range chunk.Message.ToolCalls {
-                    // arguments -> string
-                    argBytes, _ := json.Marshal(tc.Function.Arguments)
-                    toolId := fmt.Sprintf("call_%d", toolCallIndex)
-                    tr := dto.ToolCallResponse{ID:toolId, Type:"function", Function: dto.FunctionResponse{Name: tc.Function.Name, Arguments: string(argBytes)}}
-                    tr.SetIndex(toolCallIndex)
-                    toolCallIndex++
-                    delta.Choices[0].Delta.ToolCalls = append(delta.Choices[0].Delta.ToolCalls, tr)
-                }
-            }
-            if data, err := common.Marshal(delta); err == nil { _ = helper.StringData(c, string(data)) }
-            continue
-        }
-        // done frame
-        // finalize once and break loop
-        usage.PromptTokens = chunk.PromptEvalCount
-        usage.CompletionTokens = chunk.EvalCount
-        usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
-    finishReason := chunk.DoneReason
-    if finishReason == "" { finishReason = "stop" }
-        // emit stop delta
-        if stop := helper.GenerateStopResponse(responseId, created, model, finishReason); stop != nil {
-            if data, err := common.Marshal(stop); err == nil { _ = helper.StringData(c, string(data)) }
-        }
-        // emit usage frame
-        if final := helper.GenerateFinalUsageResponse(responseId, created, model, *usage); final != nil {
-            if data, err := common.Marshal(final); err == nil { _ = helper.StringData(c, string(data)) }
-        }
-        // send [DONE]
-        helper.Done(c)
-        break
-    }
-    if err := scanner.Err(); err != nil && err != io.EOF { logger.LogError(c, "ollama stream scan error: "+err.Error()) }
-    return usage, nil
+		if !chunk.Done {
+			// delta content
+			var content string
+			if chunk.Message != nil {
+				content = chunk.Message.Content
+			} else {
+				content = chunk.Response
+			}
+			delta := dto.ChatCompletionsStreamResponse{
+				Id:      responseId,
+				Object:  "chat.completion.chunk",
+				Created: created,
+				Model:   model,
+				Choices: []dto.ChatCompletionsStreamResponseChoice{{
+					Index: 0,
+					Delta: dto.ChatCompletionsStreamResponseChoiceDelta{Role: "assistant"},
+				}},
+			}
+			if content != "" {
+				delta.Choices[0].Delta.SetContentString(content)
+			}
+			if chunk.Message != nil && len(chunk.Message.Thinking) > 0 {
+				raw := strings.TrimSpace(string(chunk.Message.Thinking))
+				if raw != "" && raw != "null" {
+					delta.Choices[0].Delta.SetReasoningContent(raw)
+				}
+			}
+			// tool calls
+			if chunk.Message != nil && len(chunk.Message.ToolCalls) > 0 {
+				delta.Choices[0].Delta.ToolCalls = make([]dto.ToolCallResponse, 0, len(chunk.Message.ToolCalls))
+				for _, tc := range chunk.Message.ToolCalls {
+					// arguments -> string
+					argBytes, _ := json.Marshal(tc.Function.Arguments)
+					toolId := fmt.Sprintf("call_%d", toolCallIndex)
+					tr := dto.ToolCallResponse{ID: toolId, Type: "function", Function: dto.FunctionResponse{Name: tc.Function.Name, Arguments: string(argBytes)}}
+					tr.SetIndex(toolCallIndex)
+					toolCallIndex++
+					delta.Choices[0].Delta.ToolCalls = append(delta.Choices[0].Delta.ToolCalls, tr)
+				}
+			}
+			if data, err := common.Marshal(delta); err == nil {
+				_ = helper.StringData(c, string(data))
+			}
+			continue
+		}
+		// done frame
+		// finalize once and break loop
+		usage.PromptTokens = chunk.PromptEvalCount
+		usage.CompletionTokens = chunk.EvalCount
+		usage.TotalTokens = usage.PromptTokens + usage.CompletionTokens
+		finishReason := chunk.DoneReason
+		if finishReason == "" {
+			finishReason = "stop"
+		}
+		// emit stop delta
+		if stop := helper.GenerateStopResponse(responseId, created, model, finishReason); stop != nil {
+			if data, err := common.Marshal(stop); err == nil {
+				_ = helper.StringData(c, string(data))
+			}
+		}
+		// emit usage frame
+		if final := helper.GenerateFinalUsageResponse(responseId, created, model, *usage); final != nil {
+			if data, err := common.Marshal(final); err == nil {
+				_ = helper.StringData(c, string(data))
+			}
+		}
+		// send [DONE]
+		helper.Done(c)
+		break
+	}
+	if err := scanner.Err(); err != nil && err != io.EOF {
+		logger.LogError(c, "ollama stream scan error: "+err.Error())
+	}
+	return usage, nil
 }
 
 // non-stream handler for chat/generate
 func ollamaChatHandler(c *gin.Context, info *relaycommon.RelayInfo, resp *http.Response) (*dto.Usage, *types.NewAPIError) {
-    body, err := io.ReadAll(resp.Body)
-    if err != nil { return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError) }
-    service.CloseResponseBodyGracefully(resp)
-    raw := string(body)
-    if common.DebugEnabled { println("ollama non-stream raw resp:", raw) }
+	body, err := io.ReadAll(resp.Body)
+	if err != nil {
+		return nil, types.NewOpenAIError(err, types.ErrorCodeReadResponseBodyFailed, http.StatusInternalServerError)
+	}
+	service.CloseResponseBodyGracefully(resp)
+	raw := string(body)
+	if common.DebugEnabled {
+		println("ollama non-stream raw resp:", raw)
+	}
 
-    lines := strings.Split(raw, "\n")
-    var (
-        aggContent strings.Builder
-        reasoningBuilder strings.Builder
-        lastChunk ollamaChatStreamChunk
-        parsedAny bool
-    )
-    for _, ln := range lines {
-        ln = strings.TrimSpace(ln)
-        if ln == "" { continue }
-        var ck ollamaChatStreamChunk
-        if err := json.Unmarshal([]byte(ln), &ck); err != nil {
-            if len(lines) == 1 { return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) }
-            continue
-        }
-        parsedAny = true
-        lastChunk = ck
-        if ck.Message != nil && len(ck.Message.Thinking) > 0 {
-            raw := strings.TrimSpace(string(ck.Message.Thinking))
-            if raw != "" && raw != "null" { reasoningBuilder.WriteString(raw) }
-        }
-        if ck.Message != nil && ck.Message.Content != "" { aggContent.WriteString(ck.Message.Content) } else if ck.Response != "" { aggContent.WriteString(ck.Response) }
-    }
+	lines := strings.Split(raw, "\n")
+	var (
+		aggContent       strings.Builder
+		reasoningBuilder strings.Builder
+		lastChunk        ollamaChatStreamChunk
+		parsedAny        bool
+	)
+	for _, ln := range lines {
+		ln = strings.TrimSpace(ln)
+		if ln == "" {
+			continue
+		}
+		var ck ollamaChatStreamChunk
+		if err := json.Unmarshal([]byte(ln), &ck); err != nil {
+			if len(lines) == 1 {
+				return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
+			}
+			continue
+		}
+		parsedAny = true
+		lastChunk = ck
+		if ck.Message != nil && len(ck.Message.Thinking) > 0 {
+			raw := strings.TrimSpace(string(ck.Message.Thinking))
+			if raw != "" && raw != "null" {
+				reasoningBuilder.WriteString(raw)
+			}
+		}
+		if ck.Message != nil && ck.Message.Content != "" {
+			aggContent.WriteString(ck.Message.Content)
+		} else if ck.Response != "" {
+			aggContent.WriteString(ck.Response)
+		}
+	}
 
-    if !parsedAny {
-        var single ollamaChatStreamChunk
-        if err := json.Unmarshal(body, &single); err != nil { return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError) }
-        lastChunk = single
-        if single.Message != nil {
-            if len(single.Message.Thinking) > 0 { raw := strings.TrimSpace(string(single.Message.Thinking)); if raw != "" && raw != "null" { reasoningBuilder.WriteString(raw) } }
-            aggContent.WriteString(single.Message.Content)
-        } else { aggContent.WriteString(single.Response) }
-    }
+	if !parsedAny {
+		var single ollamaChatStreamChunk
+		if err := json.Unmarshal(body, &single); err != nil {
+			return nil, types.NewOpenAIError(err, types.ErrorCodeBadResponseBody, http.StatusInternalServerError)
+		}
+		lastChunk = single
+		if single.Message != nil {
+			if len(single.Message.Thinking) > 0 {
+				raw := strings.TrimSpace(string(single.Message.Thinking))
+				if raw != "" && raw != "null" {
+					reasoningBuilder.WriteString(raw)
+				}
+			}
+			aggContent.WriteString(single.Message.Content)
+		} else {
+			aggContent.WriteString(single.Response)
+		}
+	}
 
-    model := lastChunk.Model
-    if model == "" { model = info.UpstreamModelName }
-    created := toUnix(lastChunk.CreatedAt)
-    usage := &dto.Usage{PromptTokens: lastChunk.PromptEvalCount, CompletionTokens: lastChunk.EvalCount, TotalTokens: lastChunk.PromptEvalCount + lastChunk.EvalCount}
-    content := aggContent.String()
-    finishReason := lastChunk.DoneReason
-    if finishReason == "" { finishReason = "stop" }
+	model := lastChunk.Model
+	if model == "" {
+		model = info.UpstreamModelName
+	}
+	created := toUnix(lastChunk.CreatedAt)
+	usage := &dto.Usage{PromptTokens: lastChunk.PromptEvalCount, CompletionTokens: lastChunk.EvalCount, TotalTokens: lastChunk.PromptEvalCount + lastChunk.EvalCount}
+	content := aggContent.String()
+	finishReason := lastChunk.DoneReason
+	if finishReason == "" {
+		finishReason = "stop"
+	}
 
-    msg := dto.Message{Role: "assistant", Content: contentPtr(content)}
-    if rc := reasoningBuilder.String(); rc != "" { msg.ReasoningContent = rc }
-    full := dto.OpenAITextResponse{
-        Id:      common.GetUUID(),
-        Model:   model,
-        Object:  "chat.completion",
-        Created: created,
-        Choices: []dto.OpenAITextResponseChoice{ {
-            Index: 0,
-            Message: msg,
-            FinishReason: finishReason,
-        } },
-        Usage: *usage,
-    }
-    out, _ := common.Marshal(full)
-    service.IOCopyBytesGracefully(c, resp, out)
-    return usage, nil
+	msg := dto.Message{Role: "assistant", Content: contentPtr(content)}
+	if rc := reasoningBuilder.String(); rc != "" {
+		msg.ReasoningContent = rc
+	}
+	full := dto.OpenAITextResponse{
+		Id:      common.GetUUID(),
+		Model:   model,
+		Object:  "chat.completion",
+		Created: created,
+		Choices: []dto.OpenAITextResponseChoice{{
+			Index:        0,
+			Message:      msg,
+			FinishReason: finishReason,
+		}},
+		Usage: *usage,
+	}
+	out, _ := common.Marshal(full)
+	service.IOCopyBytesGracefully(c, resp, out)
+	return usage, nil
 }
 
-func contentPtr(s string) *string { if s=="" { return nil }; return &s }
+func contentPtr(s string) *string {
+	if s == "" {
+		return nil
+	}
+	return &s
+}

+ 1 - 1
relay/channel/submodel/constants.go

@@ -13,4 +13,4 @@ var ModelList = []string{
 	"deepseek-ai/DeepSeek-V3.1",
 }
 
-const ChannelName = "submodel"
+const ChannelName = "submodel"

+ 69 - 3
setting/operation_setting/general_setting.go

@@ -2,17 +2,34 @@ package operation_setting
 
 import "one-api/setting/config"
 
+// 额度展示类型
+const (
+	QuotaDisplayTypeUSD    = "USD"
+	QuotaDisplayTypeCNY    = "CNY"
+	QuotaDisplayTypeTokens = "TOKENS"
+	QuotaDisplayTypeCustom = "CUSTOM"
+)
+
 type GeneralSetting struct {
 	DocsLink            string `json:"docs_link"`
 	PingIntervalEnabled bool   `json:"ping_interval_enabled"`
 	PingIntervalSeconds int    `json:"ping_interval_seconds"`
+	// 当前站点额度展示类型:USD / CNY / TOKENS
+	QuotaDisplayType string `json:"quota_display_type"`
+	// 自定义货币符号,用于 CUSTOM 展示类型
+	CustomCurrencySymbol string `json:"custom_currency_symbol"`
+	// 自定义货币与美元汇率(1 USD = X Custom)
+	CustomCurrencyExchangeRate float64 `json:"custom_currency_exchange_rate"`
 }
 
 // 默认配置
 var generalSetting = GeneralSetting{
-	DocsLink:            "https://docs.newapi.pro",
-	PingIntervalEnabled: false,
-	PingIntervalSeconds: 60,
+	DocsLink:                   "https://docs.newapi.pro",
+	PingIntervalEnabled:        false,
+	PingIntervalSeconds:        60,
+	QuotaDisplayType:           QuotaDisplayTypeUSD,
+	CustomCurrencySymbol:       "¤",
+	CustomCurrencyExchangeRate: 1.0,
 }
 
 func init() {
@@ -23,3 +40,52 @@ func init() {
 func GetGeneralSetting() *GeneralSetting {
 	return &generalSetting
 }
+
+// IsCurrencyDisplay 是否以货币形式展示(美元或人民币)
+func IsCurrencyDisplay() bool {
+	return generalSetting.QuotaDisplayType != QuotaDisplayTypeTokens
+}
+
+// IsCNYDisplay 是否以人民币展示
+func IsCNYDisplay() bool {
+	return generalSetting.QuotaDisplayType == QuotaDisplayTypeCNY
+}
+
+// GetQuotaDisplayType 返回额度展示类型
+func GetQuotaDisplayType() string {
+	return generalSetting.QuotaDisplayType
+}
+
+// GetCurrencySymbol 返回当前展示类型对应符号
+func GetCurrencySymbol() string {
+	switch generalSetting.QuotaDisplayType {
+	case QuotaDisplayTypeUSD:
+		return "$"
+	case QuotaDisplayTypeCNY:
+		return "¥"
+	case QuotaDisplayTypeCustom:
+		if generalSetting.CustomCurrencySymbol != "" {
+			return generalSetting.CustomCurrencySymbol
+		}
+		return "¤"
+	default:
+		return ""
+	}
+}
+
+// GetUsdToCurrencyRate 返回 1 USD = X <currency> 的 X(TOKENS 不适用)
+func GetUsdToCurrencyRate(usdToCny float64) float64 {
+	switch generalSetting.QuotaDisplayType {
+	case QuotaDisplayTypeUSD:
+		return 1
+	case QuotaDisplayTypeCNY:
+		return usdToCny
+	case QuotaDisplayTypeCustom:
+		if generalSetting.CustomCurrencyExchangeRate > 0 {
+			return generalSetting.CustomCurrencyExchangeRate
+		}
+		return 1
+	default:
+		return 1
+	}
+}

+ 1 - 1
web/index.html

@@ -10,7 +10,7 @@
       content="OpenAI 接口聚合管理,支持多种渠道包括 Azure,可用于二次分发管理 key,仅单可执行文件,已打包好 Docker 镜像,一键部署,开箱即用"
     />
     <title>New API</title>
-<analytics></analytics>
+    <analytics></analytics>
   </head>
 
   <body>

+ 1 - 1
web/src/components/settings/OperationSetting.jsx

@@ -42,7 +42,7 @@ const OperationSetting = () => {
     QuotaPerUnit: 0,
     USDExchangeRate: 0,
     RetryTimes: 0,
-    DisplayInCurrencyEnabled: false,
+    'general_setting.quota_display_type': 'USD',
     DisplayTokenStatEnabled: false,
     DefaultCollapseSidebar: false,
     DemoSiteEnabled: false,

+ 6 - 3
web/src/components/settings/personal/cards/AccountManagement.jsx

@@ -91,7 +91,8 @@ const AccountManagement = ({
     );
   };
   const isBound = (accountId) => Boolean(accountId);
-  const [showTelegramBindModal, setShowTelegramBindModal] = React.useState(false);
+  const [showTelegramBindModal, setShowTelegramBindModal] =
+    React.useState(false);
   const passkeyEnabled = passkeyStatus?.enabled;
   const lastUsedLabel = passkeyStatus?.last_used_at
     ? new Date(passkeyStatus.last_used_at).toLocaleString()
@@ -236,7 +237,8 @@ const AccountManagement = ({
                         onGitHubOAuthClicked(status.github_client_id)
                       }
                       disabled={
-                        isBound(userState.user?.github_id) || !status.github_oauth
+                        isBound(userState.user?.github_id) ||
+                        !status.github_oauth
                       }
                     >
                       {status.github_oauth ? t('绑定') : t('未启用')}
@@ -394,7 +396,8 @@ const AccountManagement = ({
                         onLinuxDOOAuthClicked(status.linuxdo_client_id)
                       }
                       disabled={
-                        isBound(userState.user?.linux_do_id) || !status.linuxdo_oauth
+                        isBound(userState.user?.linux_do_id) ||
+                        !status.linuxdo_oauth
                       }
                     >
                       {status.linuxdo_oauth ? t('绑定') : t('未启用')}

+ 37 - 28
web/src/components/table/channels/modals/EditChannelModal.jsx

@@ -91,8 +91,7 @@ const REGION_EXAMPLE = {
 
 // 支持并且已适配通过接口获取模型列表的渠道类型
 const MODEL_FETCHABLE_TYPES = new Set([
-  1, 4, 14, 34, 17, 26, 24, 47, 25, 20, 23, 31, 35, 40, 42, 48,
-  43,
+  1, 4, 14, 34, 17, 26, 24, 47, 25, 20, 23, 31, 35, 40, 42, 48, 43,
 ]);
 
 function type2secretPrompt(type) {
@@ -408,7 +407,10 @@ const EditChannelModal = (props) => {
           break;
         case 45:
           localModels = getChannelModels(value);
-          setInputs((prevInputs) => ({ ...prevInputs, base_url: 'https://ark.cn-beijing.volces.com' }));
+          setInputs((prevInputs) => ({
+            ...prevInputs,
+            base_url: 'https://ark.cn-beijing.volces.com',
+          }));
           break;
         default:
           localModels = getChannelModels(value);
@@ -502,7 +504,8 @@ const EditChannelModal = (props) => {
           // 读取 Vertex 密钥格式
           data.vertex_key_type = parsedSettings.vertex_key_type || 'json';
           // 读取企业账户设置
-          data.is_enterprise_account = parsedSettings.openrouter_enterprise === true;
+          data.is_enterprise_account =
+            parsedSettings.openrouter_enterprise === true;
           // 读取字段透传控制设置
           data.allow_service_tier = parsedSettings.allow_service_tier || false;
           data.disable_store = parsedSettings.disable_store || false;
@@ -929,7 +932,10 @@ const EditChannelModal = (props) => {
       showInfo(t('请至少选择一个模型!'));
       return;
     }
-    if (localInputs.type === 45 && (!localInputs.base_url || localInputs.base_url.trim() === '')) {
+    if (
+      localInputs.type === 45 &&
+      (!localInputs.base_url || localInputs.base_url.trim() === '')
+    ) {
       showInfo(t('请输入API地址!'));
       return;
     }
@@ -974,7 +980,8 @@ const EditChannelModal = (props) => {
 
     // type === 20: 设置企业账户标识,无论是true还是false都要传到后端
     if (localInputs.type === 20) {
-      settings.openrouter_enterprise = localInputs.is_enterprise_account === true;
+      settings.openrouter_enterprise =
+        localInputs.is_enterprise_account === true;
     }
 
     // type === 1 (OpenAI) 或 type === 14 (Claude): 设置字段透传控制(显式保存布尔值)
@@ -1433,7 +1440,9 @@ const EditChannelModal = (props) => {
                         setIsEnterpriseAccount(value);
                         handleInputChange('is_enterprise_account', value);
                       }}
-                      extraText={t('企业账户为特殊返回格式,需要特殊处理,如果非企业账户,请勿勾选')}
+                      extraText={t(
+                        '企业账户为特殊返回格式,需要特殊处理,如果非企业账户,请勿勾选',
+                      )}
                       initValue={inputs.is_enterprise_account}
                     />
                   )}
@@ -2061,27 +2070,27 @@ const EditChannelModal = (props) => {
                     )}
 
                     {inputs.type === 45 && (
-                        <div>
-                          <Form.Select
-                              field='base_url'
-                              label={t('API地址')}
-                              placeholder={t('请选择API地址')}
-                              onChange={(value) =>
-                                  handleInputChange('base_url', value)
-                              }
-                              optionList={[
-                                {
-                                  value: 'https://ark.cn-beijing.volces.com',
-                                  label: 'https://ark.cn-beijing.volces.com'
-                                },
-                                {
-                                  value: 'https://ark.ap-southeast.bytepluses.com',
-                                  label: 'https://ark.ap-southeast.bytepluses.com'
-                                }
-                              ]}
-                              defaultValue='https://ark.cn-beijing.volces.com'
-                          />
-                        </div>
+                      <div>
+                        <Form.Select
+                          field='base_url'
+                          label={t('API地址')}
+                          placeholder={t('请选择API地址')}
+                          onChange={(value) =>
+                            handleInputChange('base_url', value)
+                          }
+                          optionList={[
+                            {
+                              value: 'https://ark.cn-beijing.volces.com',
+                              label: 'https://ark.cn-beijing.volces.com',
+                            },
+                            {
+                              value: 'https://ark.ap-southeast.bytepluses.com',
+                              label: 'https://ark.ap-southeast.bytepluses.com',
+                            },
+                          ]}
+                          defaultValue='https://ark.cn-beijing.volces.com'
+                        />
+                      </div>
                     )}
                     </Card>
                   </div>

+ 1 - 0
web/src/components/table/model-pricing/filter/PricingDisplaySettings.jsx

@@ -56,6 +56,7 @@ const PricingDisplaySettings = ({
   const currencyItems = [
     { value: 'USD', label: 'USD ($)' },
     { value: 'CNY', label: 'CNY (¥)' },
+    { value: 'CUSTOM', label: t('自定义货币') },
   ];
 
   const handleChange = (value) => {

+ 1 - 0
web/src/components/table/model-pricing/layout/header/SearchActions.jsx

@@ -107,6 +107,7 @@ const SearchActions = memo(
                 optionList={[
                   { value: 'USD', label: 'USD' },
                   { value: 'CNY', label: 'CNY' },
+                  { value: 'CUSTOM', label: t('自定义货币') },
                 ]}
               />
             )}

+ 48 - 30
web/src/components/table/task-logs/modals/ContentModal.jsx

@@ -60,38 +60,54 @@ const ContentModal = ({
     if (videoError) {
       return (
         <div style={{ textAlign: 'center', padding: '40px' }}>
-          <Text type="tertiary" style={{ display: 'block', marginBottom: '16px' }}>
+          <Text
+            type='tertiary'
+            style={{ display: 'block', marginBottom: '16px' }}
+          >
             视频无法在当前浏览器中播放,这可能是由于:
           </Text>
-          <Text type="tertiary" style={{ display: 'block', marginBottom: '8px', fontSize: '12px' }}>
+          <Text
+            type='tertiary'
+            style={{ display: 'block', marginBottom: '8px', fontSize: '12px' }}
+          >
             • 视频服务商的跨域限制
           </Text>
-          <Text type="tertiary" style={{ display: 'block', marginBottom: '8px', fontSize: '12px' }}>
+          <Text
+            type='tertiary'
+            style={{ display: 'block', marginBottom: '8px', fontSize: '12px' }}
+          >
             • 需要特定的请求头或认证
           </Text>
-          <Text type="tertiary" style={{ display: 'block', marginBottom: '16px', fontSize: '12px' }}>
+          <Text
+            type='tertiary'
+            style={{ display: 'block', marginBottom: '16px', fontSize: '12px' }}
+          >
             • 防盗链保护机制
           </Text>
-          
+
           <div style={{ marginTop: '20px' }}>
-            <Button 
+            <Button
               icon={<IconExternalOpen />}
               onClick={handleOpenInNewTab}
               style={{ marginRight: '8px' }}
             >
               在新标签页中打开
             </Button>
-            <Button 
-              icon={<IconCopy />}
-              onClick={handleCopyUrl}
-            >
+            <Button icon={<IconCopy />} onClick={handleCopyUrl}>
               复制链接
             </Button>
           </div>
-          
-          <div style={{ marginTop: '16px', padding: '8px', backgroundColor: '#f8f9fa', borderRadius: '4px' }}>
-            <Text 
-              type="tertiary" 
+
+          <div
+            style={{
+              marginTop: '16px',
+              padding: '8px',
+              backgroundColor: '#f8f9fa',
+              borderRadius: '4px',
+            }}
+          >
+            <Text
+              type='tertiary'
               style={{ fontSize: '10px', wordBreak: 'break-all' }}
             >
               {modalContent}
@@ -104,22 +120,24 @@ const ContentModal = ({
     return (
       <div style={{ position: 'relative' }}>
         {isLoading && (
-          <div style={{
-            position: 'absolute',
-            top: '50%',
-            left: '50%',
-            transform: 'translate(-50%, -50%)',
-            zIndex: 10
-          }}>
-            <Spin size="large" />
+          <div
+            style={{
+              position: 'absolute',
+              top: '50%',
+              left: '50%',
+              transform: 'translate(-50%, -50%)',
+              zIndex: 10,
+            }}
+          >
+            <Spin size='large' />
           </div>
         )}
-        <video 
-          src={modalContent} 
-          controls 
-          style={{ width: '100%' }} 
+        <video
+          src={modalContent}
+          controls
+          style={{ width: '100%' }}
           autoPlay
-          crossOrigin="anonymous"
+          crossOrigin='anonymous'
           onError={handleVideoError}
           onLoadedData={handleVideoLoaded}
           onLoadStart={() => setIsLoading(true)}
@@ -134,10 +152,10 @@ const ContentModal = ({
       onOk={() => setIsModalOpen(false)}
       onCancel={() => setIsModalOpen(false)}
       closable={null}
-      bodyStyle={{ 
-        height: isVideo ? '450px' : '400px', 
+      bodyStyle={{
+        height: isVideo ? '450px' : '400px',
         overflow: 'auto',
-        padding: isVideo && videoError ? '0' : '24px'
+        padding: isVideo && videoError ? '0' : '24px',
       }}
       width={800}
     >

+ 2 - 0
web/src/helpers/data.js

@@ -23,7 +23,9 @@ export function setStatusData(data) {
   localStorage.setItem('logo', data.logo);
   localStorage.setItem('footer_html', data.footer_html);
   localStorage.setItem('quota_per_unit', data.quota_per_unit);
+  // 兼容:保留旧字段,同时写入新的额度展示类型
   localStorage.setItem('display_in_currency', data.display_in_currency);
+  localStorage.setItem('quota_display_type', data.quota_display_type || 'USD');
   localStorage.setItem('enable_drawing', data.enable_drawing);
   localStorage.setItem('enable_task', data.enable_task);
   localStorage.setItem('enable_data_export', data.enable_data_export);

+ 71 - 25
web/src/helpers/render.jsx

@@ -832,12 +832,25 @@ export function renderQuotaNumberWithDigit(num, digits = 2) {
   if (typeof num !== 'number' || isNaN(num)) {
     return 0;
   }
-  let displayInCurrency = localStorage.getItem('display_in_currency');
+  const quotaDisplayType = localStorage.getItem('quota_display_type') || 'USD';
   num = num.toFixed(digits);
-  if (displayInCurrency) {
+  if (quotaDisplayType === 'CNY') {
+    return '¥' + num;
+  } else if (quotaDisplayType === 'USD') {
     return '$' + num;
+  } else if (quotaDisplayType === 'CUSTOM') {
+    const statusStr = localStorage.getItem('status');
+    let symbol = '¤';
+    try {
+      if (statusStr) {
+        const s = JSON.parse(statusStr);
+        symbol = s?.custom_currency_symbol || symbol;
+      }
+    } catch (e) {}
+    return symbol + num;
+  } else {
+    return num;
   }
-  return num;
 }
 
 export function renderNumberWithPoint(num) {
@@ -889,33 +902,67 @@ export function getQuotaWithUnit(quota, digits = 6) {
 }
 
 export function renderQuotaWithAmount(amount) {
-  let displayInCurrency = localStorage.getItem('display_in_currency');
-  displayInCurrency = displayInCurrency === 'true';
-  if (displayInCurrency) {
-    return '$' + amount;
-  } else {
+  const quotaDisplayType = localStorage.getItem('quota_display_type') || 'USD';
+  if (quotaDisplayType === 'TOKENS') {
     return renderNumber(renderUnitWithQuota(amount));
   }
+  if (quotaDisplayType === 'CNY') {
+    return '¥' + amount;
+  } else if (quotaDisplayType === 'CUSTOM') {
+    const statusStr = localStorage.getItem('status');
+    let symbol = '¤';
+    try {
+      if (statusStr) {
+        const s = JSON.parse(statusStr);
+        symbol = s?.custom_currency_symbol || symbol;
+      }
+    } catch (e) {}
+    return symbol + amount;
+  }
+  return '$' + amount;
 }
 
 export function renderQuota(quota, digits = 2) {
   let quotaPerUnit = localStorage.getItem('quota_per_unit');
-  let displayInCurrency = localStorage.getItem('display_in_currency');
+  const quotaDisplayType = localStorage.getItem('quota_display_type') || 'USD';
   quotaPerUnit = parseFloat(quotaPerUnit);
-  displayInCurrency = displayInCurrency === 'true';
-  if (displayInCurrency) {
-    const result = quota / quotaPerUnit;
-    const fixedResult = result.toFixed(digits);
-
-    // 如果 toFixed 后结果为 0 但原始值不为 0,显示最小值
-    if (parseFloat(fixedResult) === 0 && quota > 0 && result > 0) {
-      const minValue = Math.pow(10, -digits);
-      return '$' + minValue.toFixed(digits);
-    }
-
-    return '$' + fixedResult;
+  if (quotaDisplayType === 'TOKENS') {
+    return renderNumber(quota);
+  }
+  const resultUSD = quota / quotaPerUnit;
+  let symbol = '$';
+  let value = resultUSD;
+  if (quotaDisplayType === 'CNY') {
+    const statusStr = localStorage.getItem('status');
+    let usdRate = 1;
+    try {
+      if (statusStr) {
+        const s = JSON.parse(statusStr);
+        usdRate = s?.usd_exchange_rate || 1;
+      }
+    } catch (e) {}
+    value = resultUSD * usdRate;
+    symbol = '¥';
+  } else if (quotaDisplayType === 'CUSTOM') {
+    const statusStr = localStorage.getItem('status');
+    let symbolCustom = '¤';
+    let rate = 1;
+    try {
+      if (statusStr) {
+        const s = JSON.parse(statusStr);
+        symbolCustom = s?.custom_currency_symbol || symbolCustom;
+        rate = s?.custom_currency_exchange_rate || rate;
+      }
+    } catch (e) {}
+    value = resultUSD * rate;
+    symbol = symbolCustom;
+  }
+  const fixedResult = value.toFixed(digits);
+  if (parseFloat(fixedResult) === 0 && quota > 0 && value > 0) {
+    const minValue = Math.pow(10, -digits);
+    return symbol + minValue.toFixed(digits);
   }
-  return renderNumber(quota);
+  return symbol + fixedResult;
 }
 
 function isValidGroupRatio(ratio) {
@@ -1512,9 +1559,8 @@ export function renderAudioModelPrice(
 }
 
 export function renderQuotaWithPrompt(quota, digits) {
-  let displayInCurrency = localStorage.getItem('display_in_currency');
-  displayInCurrency = displayInCurrency === 'true';
-  if (displayInCurrency) {
+  const quotaDisplayType = localStorage.getItem('quota_display_type') || 'USD';
+  if (quotaDisplayType !== 'TOKENS') {
     return i18next.t('等价金额:') + renderQuota(quota, digits);
   }
   return '';

+ 18 - 2
web/src/helpers/utils.jsx

@@ -646,9 +646,25 @@ export const calculateModelPrice = ({
     const numCompletion =
       parseFloat(rawDisplayCompletion.replace(/[^0-9.]/g, '')) / unitDivisor;
 
+    let symbol = '$';
+    if (currency === 'CNY') {
+      symbol = '¥';
+    } else if (currency === 'CUSTOM') {
+      try {
+        const statusStr = localStorage.getItem('status');
+        if (statusStr) {
+          const s = JSON.parse(statusStr);
+          symbol = s?.custom_currency_symbol || '¤';
+        } else {
+          symbol = '¤';
+        }
+      } catch (e) {
+        symbol = '¤';
+      }
+    }
     return {
-      inputPrice: `${currency === 'CNY' ? '¥' : '$'}${numInput.toFixed(precision)}`,
-      completionPrice: `${currency === 'CNY' ? '¥' : '$'}${numCompletion.toFixed(precision)}`,
+      inputPrice: `${symbol}${numInput.toFixed(precision)}`,
+      completionPrice: `${symbol}${numCompletion.toFixed(precision)}`,
       unitLabel,
       isPerToken: true,
       usedGroup,

+ 126 - 51
web/src/hooks/channels/useChannelsData.jsx

@@ -25,9 +25,13 @@ import {
   showInfo,
   showSuccess,
   loadChannelModels,
-  copy
+  copy,
 } from '../../helpers';
-import { CHANNEL_OPTIONS, ITEMS_PER_PAGE, MODEL_TABLE_PAGE_SIZE } from '../../constants';
+import {
+  CHANNEL_OPTIONS,
+  ITEMS_PER_PAGE,
+  MODEL_TABLE_PAGE_SIZE,
+} from '../../constants';
 import { useIsMobile } from '../common/useIsMobile';
 import { useTableCompactMode } from '../common/useTableCompactMode';
 import { Modal } from '@douyinfe/semi-ui';
@@ -64,7 +68,7 @@ export const useChannelsData = () => {
 
   // Status filter
   const [statusFilter, setStatusFilter] = useState(
-    localStorage.getItem('channel-status-filter') || 'all'
+    localStorage.getItem('channel-status-filter') || 'all',
   );
 
   // Type tabs states
@@ -80,8 +84,8 @@ export const useChannelsData = () => {
   const [selectedModelKeys, setSelectedModelKeys] = useState([]);
   const [isBatchTesting, setIsBatchTesting] = useState(false);
   const [modelTablePage, setModelTablePage] = useState(1);
-  const [selectedEndpointType, setSelectedEndpointType] = useState('');
-  
+const [selectedEndpointType, setSelectedEndpointType] = useState('');
+
   // 使用 ref 来避免闭包问题,类似旧版实现
   const shouldStopBatchTestingRef = useRef(false);
 
@@ -117,9 +121,12 @@ export const useChannelsData = () => {
   // Initialize from localStorage
   useEffect(() => {
     const localIdSort = localStorage.getItem('id-sort') === 'true';
-    const localPageSize = parseInt(localStorage.getItem('page-size')) || ITEMS_PER_PAGE;
-    const localEnableTagMode = localStorage.getItem('enable-tag-mode') === 'true';
-    const localEnableBatchDelete = localStorage.getItem('enable-batch-delete') === 'true';
+    const localPageSize =
+      parseInt(localStorage.getItem('page-size')) || ITEMS_PER_PAGE;
+    const localEnableTagMode =
+      localStorage.getItem('enable-tag-mode') === 'true';
+    const localEnableBatchDelete =
+      localStorage.getItem('enable-batch-delete') === 'true';
 
     setIdSort(localIdSort);
     setPageSize(localPageSize);
@@ -177,7 +184,10 @@ export const useChannelsData = () => {
   // Save column preferences
   useEffect(() => {
     if (Object.keys(visibleColumns).length > 0) {
-      localStorage.setItem('channels-table-columns', JSON.stringify(visibleColumns));
+      localStorage.setItem(
+        'channels-table-columns',
+        JSON.stringify(visibleColumns),
+      );
     }
   }, [visibleColumns]);
 
@@ -291,14 +301,21 @@ export const useChannelsData = () => {
     const { searchKeyword, searchGroup, searchModel } = getFormValues();
     if (searchKeyword !== '' || searchGroup !== '' || searchModel !== '') {
       setLoading(true);
-      await searchChannels(enableTagMode, typeKey, statusF, page, pageSize, idSort);
+      await searchChannels(
+        enableTagMode,
+        typeKey,
+        statusF,
+        page,
+        pageSize,
+        idSort,
+      );
       setLoading(false);
       return;
     }
 
     const reqId = ++requestCounter.current;
     setLoading(true);
-    const typeParam = (typeKey !== 'all') ? `&type=${typeKey}` : '';
+    const typeParam = typeKey !== 'all' ? `&type=${typeKey}` : '';
     const statusParam = statusF !== 'all' ? `&status=${statusF}` : '';
     const res = await API.get(
       `/api/channel/?p=${page}&page_size=${pageSize}&id_sort=${idSort}&tag_mode=${enableTagMode}${typeParam}${statusParam}`,
@@ -312,7 +329,10 @@ export const useChannelsData = () => {
     if (success) {
       const { items, total, type_counts } = data;
       if (type_counts) {
-        const sumAll = Object.values(type_counts).reduce((acc, v) => acc + v, 0);
+        const sumAll = Object.values(type_counts).reduce(
+          (acc, v) => acc + v,
+          0,
+        );
         setTypeCounts({ ...type_counts, all: sumAll });
       }
       setChannelFormat(items, enableTagMode);
@@ -336,11 +356,18 @@ export const useChannelsData = () => {
     setSearching(true);
     try {
       if (searchKeyword === '' && searchGroup === '' && searchModel === '') {
-        await loadChannels(page, pageSz, sortFlag, enableTagMode, typeKey, statusF);
+        await loadChannels(
+          page,
+          pageSz,
+          sortFlag,
+          enableTagMode,
+          typeKey,
+          statusF,
+        );
         return;
       }
 
-      const typeParam = (typeKey !== 'all') ? `&type=${typeKey}` : '';
+      const typeParam = typeKey !== 'all' ? `&type=${typeKey}` : '';
       const statusParam = statusF !== 'all' ? `&status=${statusF}` : '';
       const res = await API.get(
         `/api/channel/search?keyword=${searchKeyword}&group=${searchGroup}&model=${searchModel}&id_sort=${sortFlag}&tag_mode=${enableTagMode}&p=${page}&page_size=${pageSz}${typeParam}${statusParam}`,
@@ -348,7 +375,10 @@ export const useChannelsData = () => {
       const { success, message, data } = res.data;
       if (success) {
         const { items = [], total = 0, type_counts = {} } = data;
-        const sumAll = Object.values(type_counts).reduce((acc, v) => acc + v, 0);
+        const sumAll = Object.values(type_counts).reduce(
+          (acc, v) => acc + v,
+          0,
+        );
         setTypeCounts({ ...type_counts, all: sumAll });
         setChannelFormat(items, enableTagMode);
         setChannelCount(total);
@@ -367,7 +397,14 @@ export const useChannelsData = () => {
     if (searchKeyword === '' && searchGroup === '' && searchModel === '') {
       await loadChannels(page, pageSize, idSort, enableTagMode);
     } else {
-      await searchChannels(enableTagMode, activeTypeKey, statusFilter, page, pageSize, idSort);
+      await searchChannels(
+        enableTagMode,
+        activeTypeKey,
+        statusFilter,
+        page,
+        pageSize,
+        idSort,
+      );
     }
   };
 
@@ -453,9 +490,16 @@ export const useChannelsData = () => {
     const { searchKeyword, searchGroup, searchModel } = getFormValues();
     setActivePage(page);
     if (searchKeyword === '' && searchGroup === '' && searchModel === '') {
-      loadChannels(page, pageSize, idSort, enableTagMode).then(() => { });
+      loadChannels(page, pageSize, idSort, enableTagMode).then(() => {});
     } else {
-      searchChannels(enableTagMode, activeTypeKey, statusFilter, page, pageSize, idSort);
+      searchChannels(
+        enableTagMode,
+        activeTypeKey,
+        statusFilter,
+        page,
+        pageSize,
+        idSort,
+      );
     }
   };
 
@@ -471,7 +515,14 @@ export const useChannelsData = () => {
           showError(reason);
         });
     } else {
-      searchChannels(enableTagMode, activeTypeKey, statusFilter, 1, size, idSort);
+      searchChannels(
+        enableTagMode,
+        activeTypeKey,
+        statusFilter,
+        1,
+        size,
+        idSort,
+      );
     }
   };
 
@@ -502,7 +553,10 @@ export const useChannelsData = () => {
         showError(res?.data?.message || t('渠道复制失败'));
       }
     } catch (error) {
-      showError(t('渠道复制失败: ') + (error?.response?.data?.message || error?.message || error));
+      showError(
+        t('渠道复制失败: ') +
+          (error?.response?.data?.message || error?.message || error),
+      );
     }
   };
 
@@ -541,7 +595,11 @@ export const useChannelsData = () => {
         data.priority = parseInt(data.priority);
         break;
       case 'weight':
-        if (data.weight === undefined || data.weight < 0 || data.weight === '') {
+        if (
+          data.weight === undefined ||
+          data.weight < 0 ||
+          data.weight === ''
+        ) {
           showInfo('权重必须是非负整数!');
           return;
         }
@@ -684,7 +742,11 @@ export const useChannelsData = () => {
     const res = await API.post(`/api/channel/fix`);
     const { success, message, data } = res.data;
     if (success) {
-      showSuccess(t('已修复 ${success} 个通道,失败 ${fails} 个通道。').replace('${success}', data.success).replace('${fails}', data.fails));
+      showSuccess(
+        t('已修复 ${success} 个通道,失败 ${fails} 个通道。')
+          .replace('${success}', data.success)
+          .replace('${fails}', data.fails),
+      );
       await refresh();
     } else {
       showError(message);
@@ -701,7 +763,7 @@ export const useChannelsData = () => {
     }
 
     // 添加到正在测试的模型集合
-    setTestingModels(prev => new Set([...prev, model]));
+    setTestingModels((prev) => new Set([...prev, model]));
 
     try {
       let url = `/api/channel/test/${record.id}?model=${model}`;
@@ -718,14 +780,14 @@ export const useChannelsData = () => {
       const { success, message, time } = res.data;
 
       // 更新测试结果
-      setModelTestResults(prev => ({
+      setModelTestResults((prev) => ({
         ...prev,
         [testKey]: {
           success,
           message,
           time: time || 0,
-          timestamp: Date.now()
-        }
+          timestamp: Date.now(),
+        },
       }));
 
       if (success) {
@@ -743,7 +805,9 @@ export const useChannelsData = () => {
           );
         } else {
           showInfo(
-            t('通道 ${name} 测试成功,模型 ${model} 耗时 ${time.toFixed(2)} 秒。')
+            t(
+              '通道 ${name} 测试成功,模型 ${model} 耗时 ${time.toFixed(2)} 秒。',
+            )
               .replace('${name}', record.name)
               .replace('${model}', model)
               .replace('${time.toFixed(2)}', time.toFixed(2)),
@@ -755,19 +819,19 @@ export const useChannelsData = () => {
     } catch (error) {
       // 处理网络错误
       const testKey = `${record.id}-${model}`;
-      setModelTestResults(prev => ({
+      setModelTestResults((prev) => ({
         ...prev,
         [testKey]: {
           success: false,
           message: error.message || t('网络错误'),
           time: 0,
-          timestamp: Date.now()
-        }
+          timestamp: Date.now(),
+        },
       }));
       showError(`${t('模型')} ${model}: ${error.message || t('测试失败')}`);
     } finally {
       // 从正在测试的模型集合中移除
-      setTestingModels(prev => {
+      setTestingModels((prev) => {
         const newSet = new Set(prev);
         newSet.delete(model);
         return newSet;
@@ -782,9 +846,11 @@ export const useChannelsData = () => {
       return;
     }
 
-    const models = currentTestChannel.models.split(',').filter(model =>
-      model.toLowerCase().includes(modelSearchKeyword.toLowerCase())
-    );
+    const models = currentTestChannel.models
+      .split(',')
+      .filter((model) =>
+        model.toLowerCase().includes(modelSearchKeyword.toLowerCase()),
+      );
 
     if (models.length === 0) {
       showError(t('没有找到匹配的模型'));
@@ -795,9 +861,9 @@ export const useChannelsData = () => {
     shouldStopBatchTestingRef.current = false; // 重置停止标志
 
     // 清空该渠道之前的测试结果
-    setModelTestResults(prev => {
+    setModelTestResults((prev) => {
       const newResults = { ...prev };
-      models.forEach(model => {
+      models.forEach((model) => {
         const testKey = `${currentTestChannel.id}-${model}`;
         delete newResults[testKey];
       });
@@ -805,7 +871,12 @@ export const useChannelsData = () => {
     });
 
     try {
-      showInfo(t('开始批量测试 ${count} 个模型,已清空上次结果...').replace('${count}', models.length));
+      showInfo(
+        t('开始批量测试 ${count} 个模型,已清空上次结果...').replace(
+          '${count}',
+          models.length,
+        ),
+      );
 
       // 提高并发数量以加快测试速度,参考旧版的并发限制
       const concurrencyLimit = 5;
@@ -819,13 +890,16 @@ export const useChannelsData = () => {
         }
 
         const batch = models.slice(i, i + concurrencyLimit);
-        showInfo(t('正在测试第 ${current} - ${end} 个模型 (共 ${total} 个)')
-          .replace('${current}', i + 1)
-          .replace('${end}', Math.min(i + concurrencyLimit, models.length))
-          .replace('${total}', models.length)
+        showInfo(
+          t('正在测试第 ${current} - ${end} 个模型 (共 ${total} 个)')
+            .replace('${current}', i + 1)
+            .replace('${end}', Math.min(i + concurrencyLimit, models.length))
+            .replace('${total}', models.length),
         );
 
-        const batchPromises = batch.map(model => testChannel(currentTestChannel, model, selectedEndpointType));
+        const batchPromises = batch.map((model) =>
+          testChannel(currentTestChannel, model, selectedEndpointType),
+        );
         const batchResults = await Promise.allSettled(batchPromises);
         results.push(...batchResults);
 
@@ -837,20 +911,20 @@ export const useChannelsData = () => {
 
         // 短暂延迟避免过于频繁的请求
         if (i + concurrencyLimit < models.length) {
-          await new Promise(resolve => setTimeout(resolve, 100));
+          await new Promise((resolve) => setTimeout(resolve, 100));
         }
       }
 
       if (!shouldStopBatchTestingRef.current) {
         // 等待一小段时间确保所有结果都已更新
-        await new Promise(resolve => setTimeout(resolve, 300));
+        await new Promise((resolve) => setTimeout(resolve, 300));
 
         // 使用当前状态重新计算结果统计
-        setModelTestResults(currentResults => {
+        setModelTestResults((currentResults) => {
           let successCount = 0;
           let failCount = 0;
 
-          models.forEach(model => {
+          models.forEach((model) => {
             const testKey = `${currentTestChannel.id}-${model}`;
             const result = currentResults[testKey];
             if (result && result.success) {
@@ -862,10 +936,11 @@ export const useChannelsData = () => {
 
           // 显示完成消息
           setTimeout(() => {
-            showSuccess(t('批量测试完成!成功: ${success}, 失败: ${fail}, 总计: ${total}')
-              .replace('${success}', successCount)
-              .replace('${fail}', failCount)
-              .replace('${total}', models.length)
+            showSuccess(
+              t('批量测试完成!成功: ${success}, 失败: ${fail}, 总计: ${total}')
+                .replace('${success}', successCount)
+                .replace('${fail}', failCount)
+                .replace('${total}', models.length),
             );
           }, 100);
 
@@ -1053,4 +1128,4 @@ export const useChannelsData = () => {
     setCompactMode,
     setActivePage,
   };
-}; 
+};

+ 25 - 0
web/src/hooks/model-pricing/useModelPricingData.jsx

@@ -64,6 +64,29 @@ export const useModelPricingData = () => {
     () => statusState?.status?.usd_exchange_rate ?? priceRate,
     [statusState, priceRate],
   );
+  const customExchangeRate = useMemo(
+    () => statusState?.status?.custom_currency_exchange_rate ?? 1,
+    [statusState],
+  );
+  const customCurrencySymbol = useMemo(
+    () => statusState?.status?.custom_currency_symbol ?? '¤',
+    [statusState],
+  );
+
+  // 默认货币与站点展示类型同步(USD/CNY),TOKENS 时仍允许切换视图内货币
+  const siteDisplayType = useMemo(
+    () => statusState?.status?.quota_display_type || 'USD',
+    [statusState],
+  );
+  useEffect(() => {
+    if (
+      siteDisplayType === 'USD' ||
+      siteDisplayType === 'CNY' ||
+      siteDisplayType === 'CUSTOM'
+    ) {
+      setCurrency(siteDisplayType);
+    }
+  }, [siteDisplayType]);
 
   const filteredModels = useMemo(() => {
     let result = models;
@@ -156,6 +179,8 @@ export const useModelPricingData = () => {
 
     if (currency === 'CNY') {
       return `¥${(priceInUSD * usdExchangeRate).toFixed(3)}`;
+    } else if (currency === 'CUSTOM') {
+      return `${customCurrencySymbol}${(priceInUSD * customExchangeRate).toFixed(3)}`;
     }
     return `$${priceInUSD.toFixed(3)}`;
   };

+ 4 - 1
web/src/i18n/locales/en.json

@@ -1810,7 +1810,10 @@
   "自定义模型名称": "Custom model name",
   "启用全部密钥": "Enable all keys",
   "充值价格显示": "Recharge price",
-  "美元汇率(非充值汇率,仅用于定价页面换算)": "USD exchange rate (not recharge rate, only used for pricing page conversion)",
+  "自定义货币": "Custom currency",
+  "自定义货币符号": "Custom currency symbol",
+  "例如 €, £, Rp, ₩, ₹...": "For example, €, £, Rp, ₩, ₹...",
+  "站点额度展示类型及汇率": "Site quota display type and exchange rate",
   "美元汇率": "USD exchange rate",
   "隐藏操作项": "Hide actions",
   "显示操作项": "Show actions",

+ 5 - 2
web/src/i18n/locales/fr.json

@@ -1806,7 +1806,10 @@
   "自定义模型名称": "Nom de modèle personnalisé",
   "启用全部密钥": "Activer toutes les clés",
   "充值价格显示": "Prix de recharge",
-  "美元汇率(非充值汇率,仅用于定价页面换算)": "Taux de change USD (pas de taux de recharge, uniquement utilisé pour la conversion de la page de tarification)",
+  "站点额度展示类型及汇率": "Type d'affichage du quota du site et taux de change",
+  "自定义货币": "Devise personnalisée",
+  "自定义货币符号": "Symbole de devise personnalisé",
+  "例如 €, £, Rp, ₩, ₹...": "Par exemple, €, £, Rp, ₩, ₹...",
   "美元汇率": "Taux de change USD",
   "隐藏操作项": "Masquer les actions",
   "显示操作项": "Afficher les actions",
@@ -2236,4 +2239,4 @@
   "重置 2FA": "Réinitialiser 2FA",
   "重置 Passkey": "Réinitialiser le Passkey",
   "默认使用系统名称": "Le nom du système est utilisé par défaut"
-}
+}

+ 105 - 38
web/src/pages/Setting/Operation/SettingsGeneral.jsx

@@ -17,8 +17,19 @@ along with this program. If not, see <https://www.gnu.org/licenses/>.
 For commercial licensing, please contact [email protected]
 */
 
-import React, { useEffect, useState, useRef } from 'react';
-import { Banner, Button, Col, Form, Row, Spin, Modal } from '@douyinfe/semi-ui';
+import React, { useEffect, useState, useRef, useMemo } from 'react';
+import {
+  Banner,
+  Button,
+  Col,
+  Form,
+  Row,
+  Spin,
+  Modal,
+  Select,
+  InputGroup,
+  Input,
+} from '@douyinfe/semi-ui';
 import {
   compareObjects,
   API,
@@ -35,10 +46,12 @@ export default function GeneralSettings(props) {
   const [inputs, setInputs] = useState({
     TopUpLink: '',
     'general_setting.docs_link': '',
+    'general_setting.quota_display_type': 'USD',
+    'general_setting.custom_currency_symbol': '¤',
+    'general_setting.custom_currency_exchange_rate': '',
     QuotaPerUnit: '',
     RetryTimes: '',
     USDExchangeRate: '',
-    DisplayInCurrencyEnabled: false,
     DisplayTokenStatEnabled: false,
     DefaultCollapseSidebar: false,
     DemoSiteEnabled: false,
@@ -88,6 +101,30 @@ export default function GeneralSettings(props) {
       });
   }
 
+  // 计算展示在输入框中的“1 USD = X <currency>”中的 X
+  const combinedRate = useMemo(() => {
+    const type = inputs['general_setting.quota_display_type'];
+    if (type === 'USD') return '1';
+    if (type === 'CNY') return String(inputs['USDExchangeRate'] || '');
+    if (type === 'TOKENS') return String(inputs['QuotaPerUnit'] || '');
+    if (type === 'CUSTOM')
+      return String(
+        inputs['general_setting.custom_currency_exchange_rate'] || '',
+      );
+    return '';
+  }, [inputs]);
+
+  const onCombinedRateChange = (val) => {
+    const type = inputs['general_setting.quota_display_type'];
+    if (type === 'CNY') {
+      handleFieldChange('USDExchangeRate')(val);
+    } else if (type === 'TOKENS') {
+      handleFieldChange('QuotaPerUnit')(val);
+    } else if (type === 'CUSTOM') {
+      handleFieldChange('general_setting.custom_currency_exchange_rate')(val);
+    }
+  };
+
   useEffect(() => {
     const currentInputs = {};
     for (let key in props.options) {
@@ -95,6 +132,28 @@ export default function GeneralSettings(props) {
         currentInputs[key] = props.options[key];
       }
     }
+    // 若旧字段存在且新字段缺失,则做一次兜底映射
+    if (
+      currentInputs['general_setting.quota_display_type'] === undefined &&
+      props.options?.DisplayInCurrencyEnabled !== undefined
+    ) {
+      currentInputs['general_setting.quota_display_type'] = props.options
+        .DisplayInCurrencyEnabled
+        ? 'USD'
+        : 'TOKENS';
+    }
+    // 回填自定义货币相关字段(如果后端已存在)
+    if (props.options['general_setting.custom_currency_symbol'] !== undefined) {
+      currentInputs['general_setting.custom_currency_symbol'] =
+        props.options['general_setting.custom_currency_symbol'];
+    }
+    if (
+      props.options['general_setting.custom_currency_exchange_rate'] !==
+      undefined
+    ) {
+      currentInputs['general_setting.custom_currency_exchange_rate'] =
+        props.options['general_setting.custom_currency_exchange_rate'];
+    }
     setInputs(currentInputs);
     setInputsRow(structuredClone(currentInputs));
     refForm.current.setValues(currentInputs);
@@ -130,30 +189,7 @@ export default function GeneralSettings(props) {
                   showClear
                 />
               </Col>
-              {inputs.QuotaPerUnit !== '500000' &&
-                inputs.QuotaPerUnit !== 500000 && (
-                  <Col xs={24} sm={12} md={8} lg={8} xl={8}>
-                    <Form.Input
-                      field={'QuotaPerUnit'}
-                      label={t('单位美元额度')}
-                      initValue={''}
-                      placeholder={t('一单位货币能兑换的额度')}
-                      onChange={handleFieldChange('QuotaPerUnit')}
-                      showClear
-                      onClick={() => setShowQuotaWarning(true)}
-                    />
-                  </Col>
-                )}
-              <Col xs={24} sm={12} md={8} lg={8} xl={8}>
-                <Form.Input
-                  field={'USDExchangeRate'}
-                  label={t('美元汇率(非充值汇率,仅用于定价页面换算)')}
-                  initValue={''}
-                  placeholder={t('美元汇率')}
-                  onChange={handleFieldChange('USDExchangeRate')}
-                  showClear
-                />
-              </Col>
+              {/* 单位美元额度已合入汇率组合控件(TOKENS 模式下编辑),不再单独展示 */}
               <Col xs={24} sm={12} md={8} lg={8} xl={8}>
                 <Form.Input
                   field={'RetryTimes'}
@@ -164,18 +200,51 @@ export default function GeneralSettings(props) {
                   showClear
                 />
               </Col>
-            </Row>
-            <Row gutter={16}>
               <Col xs={24} sm={12} md={8} lg={8} xl={8}>
-                <Form.Switch
-                  field={'DisplayInCurrencyEnabled'}
-                  label={t('以货币形式显示额度')}
-                  size='default'
-                  checkedText='|'
-                  uncheckedText='〇'
-                  onChange={handleFieldChange('DisplayInCurrencyEnabled')}
+                <Form.Slot label={t('站点额度展示类型及汇率')}>
+                  <InputGroup style={{ width: '100%' }}>
+                    <Input
+                      prefix={'1 USD = '}
+                      style={{ width: '50%' }}
+                      value={combinedRate}
+                      onChange={onCombinedRateChange}
+                      disabled={
+                        inputs['general_setting.quota_display_type'] === 'USD'
+                      }
+                    />
+                    <Select
+                      style={{ width: '50%' }}
+                      value={inputs['general_setting.quota_display_type']}
+                      onChange={handleFieldChange(
+                        'general_setting.quota_display_type',
+                      )}
+                    >
+                      <Select.Option value='USD'>USD ($)</Select.Option>
+                      <Select.Option value='CNY'>CNY (¥)</Select.Option>
+                      <Select.Option value='TOKENS'>Tokens</Select.Option>
+                      <Select.Option value='CUSTOM'>
+                        {t('自定义货币')}
+                      </Select.Option>
+                    </Select>
+                  </InputGroup>
+                </Form.Slot>
+              </Col>
+              <Col xs={24} sm={12} md={8} lg={8} xl={8}>
+                <Form.Input
+                  field={'general_setting.custom_currency_symbol'}
+                  label={t('自定义货币符号')}
+                  placeholder={t('例如 €, £, Rp, ₩, ₹...')}
+                  onChange={handleFieldChange(
+                    'general_setting.custom_currency_symbol',
+                  )}
+                  showClear
+                  disabled={
+                    inputs['general_setting.quota_display_type'] !== 'CUSTOM'
+                  }
                 />
               </Col>
+            </Row>
+            <Row gutter={16}>
               <Col xs={24} sm={12} md={8} lg={8} xl={8}>
                 <Form.Switch
                   field={'DisplayTokenStatEnabled'}
@@ -196,8 +265,6 @@ export default function GeneralSettings(props) {
                   onChange={handleFieldChange('DefaultCollapseSidebar')}
                 />
               </Col>
-            </Row>
-            <Row gutter={16}>
               <Col xs={24} sm={12} md={8} lg={8} xl={8}>
                 <Form.Switch
                   field={'DemoSiteEnabled'}