|
|
@@ -126,7 +126,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
|
|
|
}
|
|
|
}
|
|
|
|
|
|
- convertedMessages = [systemMessage, ...convertToOpenAiMessages(messages)]
|
|
|
+ convertedMessages = [systemMessage, ...convertToOpenAiMessages(messages, { mergeToolResultText: true })]
|
|
|
|
|
|
if (modelInfo.supportsPromptCache) {
|
|
|
// Note: the following logic is copied from openrouter:
|
|
|
@@ -234,7 +234,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
|
|
|
? convertToR1Format([{ role: "user", content: systemPrompt }, ...messages])
|
|
|
: enabledLegacyFormat
|
|
|
? [systemMessage, ...convertToSimpleMessages(messages)]
|
|
|
- : [systemMessage, ...convertToOpenAiMessages(messages)],
|
|
|
+ : [systemMessage, ...convertToOpenAiMessages(messages, { mergeToolResultText: true })],
|
|
|
...(metadata?.tools && { tools: this.convertToolsForOpenAI(metadata.tools) }),
|
|
|
...(metadata?.tool_choice && { tool_choice: metadata.tool_choice }),
|
|
|
...(metadata?.toolProtocol === "native" && {
|
|
|
@@ -349,7 +349,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
|
|
|
role: "developer",
|
|
|
content: `Formatting re-enabled\n${systemPrompt}`,
|
|
|
},
|
|
|
- ...convertToOpenAiMessages(messages),
|
|
|
+ ...convertToOpenAiMessages(messages, { mergeToolResultText: true }),
|
|
|
],
|
|
|
stream: true,
|
|
|
...(isGrokXAI ? {} : { stream_options: { include_usage: true } }),
|
|
|
@@ -386,7 +386,7 @@ export class OpenAiHandler extends BaseProvider implements SingleCompletionHandl
|
|
|
role: "developer",
|
|
|
content: `Formatting re-enabled\n${systemPrompt}`,
|
|
|
},
|
|
|
- ...convertToOpenAiMessages(messages),
|
|
|
+ ...convertToOpenAiMessages(messages, { mergeToolResultText: true }),
|
|
|
],
|
|
|
reasoning_effort: modelInfo.reasoningEffort as "low" | "medium" | "high" | undefined,
|
|
|
temperature: undefined,
|