Saoud Rizwan 1 year ago
parent
commit
90b0c8243c
1 changed files with 4 additions and 1 deletions
  1. 4 1
      src/api/providers/openai-native.ts

+ 4 - 1
src/api/providers/openai-native.ts

@@ -24,13 +24,16 @@ export class OpenAiNativeHandler implements ApiHandler {
 
 
 	async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
 	async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
 		let systemPromptMessage: OpenAI.Chat.ChatCompletionMessageParam
 		let systemPromptMessage: OpenAI.Chat.ChatCompletionMessageParam
+		let temperature = 0
 		switch (this.getModel().id) {
 		switch (this.getModel().id) {
 			case "o1-preview":
 			case "o1-preview":
 			case "o1-mini":
 			case "o1-mini":
 				systemPromptMessage = { role: "user", content: systemPrompt }
 				systemPromptMessage = { role: "user", content: systemPrompt }
+				temperature = 1
 				break
 				break
 			default:
 			default:
 				systemPromptMessage = { role: "system", content: systemPrompt }
 				systemPromptMessage = { role: "system", content: systemPrompt }
+				temperature = 0
 		}
 		}
 
 
 		const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
 		const openAiMessages: OpenAI.Chat.ChatCompletionMessageParam[] = [
@@ -41,7 +44,7 @@ export class OpenAiNativeHandler implements ApiHandler {
 		const stream = await this.client.chat.completions.create({
 		const stream = await this.client.chat.completions.create({
 			model: this.getModel().id,
 			model: this.getModel().id,
 			// max_completion_tokens: this.getModel().info.maxTokens,
 			// max_completion_tokens: this.getModel().info.maxTokens,
-			temperature: 0,
+			temperature,
 			messages: openAiMessages,
 			messages: openAiMessages,
 			stream: true,
 			stream: true,
 			stream_options: { include_usage: true },
 			stream_options: { include_usage: true },