|
|
@@ -66,15 +66,16 @@ export function convertToOpenAiMessages(
|
|
|
// "Messages following `tool_use` blocks must begin with a matching number of `tool_result` blocks."
|
|
|
// Therefore we need to send these images after the tool result messages
|
|
|
// NOTE: it's actually okay to have multiple user messages in a row, the model will treat them as a continuation of the same input (this way works better than combining them into one message, since the tool result specifically mentions (see following user message for image)
|
|
|
- if (toolResultImages.length > 0) {
|
|
|
- openAiMessages.push({
|
|
|
- role: "user",
|
|
|
- content: toolResultImages.map((part) => ({
|
|
|
- type: "image_url",
|
|
|
- image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` },
|
|
|
- })),
|
|
|
- })
|
|
|
- }
|
|
|
+ // UPDATE v2.0: we don't use tools anymore, but if we did it's important to note that the openrouter prompt caching mechanism requires one user message at a time, so we would need to add these images to the user content array instead.
|
|
|
+ // if (toolResultImages.length > 0) {
|
|
|
+ // openAiMessages.push({
|
|
|
+ // role: "user",
|
|
|
+ // content: toolResultImages.map((part) => ({
|
|
|
+ // type: "image_url",
|
|
|
+ // image_url: { url: `data:${part.source.media_type};base64,${part.source.data}` },
|
|
|
+ // })),
|
|
|
+ // })
|
|
|
+ // }
|
|
|
|
|
|
// Process non-tool messages
|
|
|
if (nonToolMessages.length > 0) {
|