|
|
@@ -114,11 +114,21 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
|
|
|
}
|
|
|
|
|
|
let temperature = 0
|
|
|
- if (this.getModel().id === "deepseek/deepseek-r1" || this.getModel().id.startsWith("deepseek/deepseek-r1:")) {
|
|
|
+ let topP: number | undefined = undefined
|
|
|
+
|
|
|
+ // Handle models based on deepseek-r1
|
|
|
+ if (
|
|
|
+ this.getModel().id === "deepseek/deepseek-r1" ||
|
|
|
+ this.getModel().id.startsWith("deepseek/deepseek-r1:") ||
|
|
|
+ this.getModel().id === "perplexity/sonar-reasoning"
|
|
|
+ ) {
|
|
|
// Recommended temperature for DeepSeek reasoning models
|
|
|
temperature = 0.6
|
|
|
- // DeepSeek highly recommends using user instead of system role
|
|
|
+ // DeepSeek highly recommends using user instead of system
|
|
|
+ // role
|
|
|
openAiMessages = convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
|
|
|
+ // Some provider support topP and 0.95 is value that Deepseek used in their benchmarks
|
|
|
+ topP = 0.95
|
|
|
}
|
|
|
|
|
|
// https://openrouter.ai/docs/transforms
|
|
|
@@ -127,6 +137,7 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
|
|
|
model: this.getModel().id,
|
|
|
max_tokens: maxTokens,
|
|
|
temperature: temperature,
|
|
|
+ top_p: topP,
|
|
|
messages: openAiMessages,
|
|
|
stream: true,
|
|
|
include_reasoning: true,
|