Jelajahi Sumber

fix: response format

CalciumIon 1 tahun lalu
induk
melakukan
2e3c266bd6
2 mengubah file dengan 37 tambahan dan 37 penghapusan
  1. 25 25
      dto/text_request.go
  2. 12 12
      relay/channel/ollama/dto.go

+ 25 - 25
dto/text_request.go

@@ -7,31 +7,31 @@ type ResponseFormat struct {
 }
 
 type GeneralOpenAIRequest struct {
-	Model            string          `json:"model,omitempty"`
-	Messages         []Message       `json:"messages,omitempty"`
-	Prompt           any             `json:"prompt,omitempty"`
-	Stream           bool            `json:"stream,omitempty"`
-	StreamOptions    *StreamOptions  `json:"stream_options,omitempty"`
-	MaxTokens        uint            `json:"max_tokens,omitempty"`
-	Temperature      float64         `json:"temperature,omitempty"`
-	TopP             float64         `json:"top_p,omitempty"`
-	TopK             int             `json:"top_k,omitempty"`
-	Stop             any             `json:"stop,omitempty"`
-	N                int             `json:"n,omitempty"`
-	Input            any             `json:"input,omitempty"`
-	Instruction      string          `json:"instruction,omitempty"`
-	Size             string          `json:"size,omitempty"`
-	Functions        any             `json:"functions,omitempty"`
-	FrequencyPenalty float64         `json:"frequency_penalty,omitempty"`
-	PresencePenalty  float64         `json:"presence_penalty,omitempty"`
-	ResponseFormat   *ResponseFormat `json:"response_format,omitempty"`
-	Seed             float64         `json:"seed,omitempty"`
-	Tools            []ToolCall      `json:"tools,omitempty"`
-	ToolChoice       any             `json:"tool_choice,omitempty"`
-	User             string          `json:"user,omitempty"`
-	LogProbs         bool            `json:"logprobs,omitempty"`
-	TopLogProbs      int             `json:"top_logprobs,omitempty"`
-	Dimensions       int             `json:"dimensions,omitempty"`
+	Model            string         `json:"model,omitempty"`
+	Messages         []Message      `json:"messages,omitempty"`
+	Prompt           any            `json:"prompt,omitempty"`
+	Stream           bool           `json:"stream,omitempty"`
+	StreamOptions    *StreamOptions `json:"stream_options,omitempty"`
+	MaxTokens        uint           `json:"max_tokens,omitempty"`
+	Temperature      float64        `json:"temperature,omitempty"`
+	TopP             float64        `json:"top_p,omitempty"`
+	TopK             int            `json:"top_k,omitempty"`
+	Stop             any            `json:"stop,omitempty"`
+	N                int            `json:"n,omitempty"`
+	Input            any            `json:"input,omitempty"`
+	Instruction      string         `json:"instruction,omitempty"`
+	Size             string         `json:"size,omitempty"`
+	Functions        any            `json:"functions,omitempty"`
+	FrequencyPenalty float64        `json:"frequency_penalty,omitempty"`
+	PresencePenalty  float64        `json:"presence_penalty,omitempty"`
+	ResponseFormat   any            `json:"response_format,omitempty"`
+	Seed             float64        `json:"seed,omitempty"`
+	Tools            []ToolCall     `json:"tools,omitempty"`
+	ToolChoice       any            `json:"tool_choice,omitempty"`
+	User             string         `json:"user,omitempty"`
+	LogProbs         bool           `json:"logprobs,omitempty"`
+	TopLogProbs      int            `json:"top_logprobs,omitempty"`
+	Dimensions       int            `json:"dimensions,omitempty"`
 }
 
 type OpenAITools struct {

+ 12 - 12
relay/channel/ollama/dto.go

@@ -3,18 +3,18 @@ package ollama
 import "one-api/dto"
 
 type OllamaRequest struct {
-	Model            string              `json:"model,omitempty"`
-	Messages         []dto.Message       `json:"messages,omitempty"`
-	Stream           bool                `json:"stream,omitempty"`
-	Temperature      float64             `json:"temperature,omitempty"`
-	Seed             float64             `json:"seed,omitempty"`
-	Topp             float64             `json:"top_p,omitempty"`
-	TopK             int                 `json:"top_k,omitempty"`
-	Stop             any                 `json:"stop,omitempty"`
-	Tools            []dto.ToolCall      `json:"tools,omitempty"`
-	ResponseFormat   *dto.ResponseFormat `json:"response_format,omitempty"`
-	FrequencyPenalty float64             `json:"frequency_penalty,omitempty"`
-	PresencePenalty  float64             `json:"presence_penalty,omitempty"`
+	Model            string         `json:"model,omitempty"`
+	Messages         []dto.Message  `json:"messages,omitempty"`
+	Stream           bool           `json:"stream,omitempty"`
+	Temperature      float64        `json:"temperature,omitempty"`
+	Seed             float64        `json:"seed,omitempty"`
+	Topp             float64        `json:"top_p,omitempty"`
+	TopK             int            `json:"top_k,omitempty"`
+	Stop             any            `json:"stop,omitempty"`
+	Tools            []dto.ToolCall `json:"tools,omitempty"`
+	ResponseFormat   any            `json:"response_format,omitempty"`
+	FrequencyPenalty float64        `json:"frequency_penalty,omitempty"`
+	PresencePenalty  float64        `json:"presence_penalty,omitempty"`
 }
 
 type OllamaEmbeddingRequest struct {