Browse Source

fix: extract raw error message from OpenRouter metadata (#10039)

OpenRouter wraps upstream provider errors in a generic message but includes
the actual error in metadata.raw. This change:

- Adds OpenRouterErrorResponse interface for proper typing
- Creates handleStreamingError() helper for DRY error handling
- Extracts metadata.raw for actionable error messages in PostHog
- Includes nested error structure so getErrorMessage() can extract raw message

Before: PostHog receives '400 Provider returned error' (generic)
After: PostHog receives 'Model xyz not found' (actionable)

This enables proper error tracking and debugging via PostHog telemetry.
Daniel 1 month ago
parent
commit
23a214c519
1 changed files with 38 additions and 52 deletions
  1. 38 52
      src/api/providers/openrouter.ts

+ 38 - 52
src/api/providers/openrouter.ts

@@ -42,6 +42,13 @@ type OpenRouterChatCompletionParams = OpenAI.Chat.ChatCompletionCreateParams & {
 	reasoning?: OpenRouterReasoningParams
 }
 
+// OpenRouter error structure that may include metadata.raw with actual upstream error
+interface OpenRouterErrorResponse {
+	message?: string
+	code?: number
+	metadata?: { raw?: string }
+}
+
 // See `OpenAI.Chat.Completions.ChatCompletionChunk["usage"]`
 // `CompletionsAPI.CompletionUsage`
 // See also: https://openrouter.ai/docs/use-cases/usage-accounting
@@ -109,6 +116,29 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
 		return this.currentReasoningDetails.length > 0 ? this.currentReasoningDetails : undefined
 	}
 
+	/**
+	 * Handle OpenRouter streaming error response and report to telemetry.
+	 * OpenRouter may include metadata.raw with the actual upstream provider error.
+	 */
+	private handleStreamingError(error: OpenRouterErrorResponse, modelId: string, operation: string): never {
+		const rawErrorMessage = error?.metadata?.raw || error?.message
+
+		const apiError = Object.assign(
+			new ApiProviderError(
+				rawErrorMessage ?? "Unknown error",
+				this.providerName,
+				modelId,
+				operation,
+				error?.code,
+			),
+			{ status: error?.code, error: { message: error?.message, metadata: error?.metadata } },
+		)
+
+		TelemetryService.instance.captureException(apiError)
+
+		throw new Error(`OpenRouter API Error ${error?.code}: ${rawErrorMessage}`)
+	}
+
 	override async *createMessage(
 		systemPrompt: string,
 		messages: Anthropic.Messages.MessageParam[],
@@ -226,15 +256,9 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
 		try {
 			stream = await this.client.chat.completions.create(completionParams, requestOptions)
 		} catch (error) {
-			TelemetryService.instance.captureException(
-				new ApiProviderError(
-					error instanceof Error ? error.message : String(error),
-					this.providerName,
-					modelId,
-					"createMessage",
-				),
-			)
-
+			const errorMessage = error instanceof Error ? error.message : String(error)
+			const apiError = new ApiProviderError(errorMessage, this.providerName, modelId, "createMessage")
+			TelemetryService.instance.captureException(apiError)
 			throw handleOpenAIError(error, this.providerName)
 		}
 
@@ -257,23 +281,7 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
 		for await (const chunk of stream) {
 			// OpenRouter returns an error object instead of the OpenAI SDK throwing an error.
 			if ("error" in chunk) {
-				const error = chunk.error as { message?: string; code?: number }
-				console.error(`OpenRouter API Error: ${error?.code} - ${error?.message}`)
-
-				TelemetryService.instance.captureException(
-					Object.assign(
-						new ApiProviderError(
-							error?.message ?? "Unknown error",
-							this.providerName,
-							modelId,
-							"createMessage",
-							error?.code,
-						),
-						{ status: error?.code },
-					),
-				)
-
-				throw new Error(`OpenRouter API Error ${error?.code}: ${error?.message}`)
+				this.handleStreamingError(chunk.error as OpenRouterErrorResponse, modelId, "createMessage")
 			}
 
 			const delta = chunk.choices[0]?.delta
@@ -468,36 +476,14 @@ export class OpenRouterHandler extends BaseProvider implements SingleCompletionH
 		try {
 			response = await this.client.chat.completions.create(completionParams, requestOptions)
 		} catch (error) {
-			TelemetryService.instance.captureException(
-				new ApiProviderError(
-					error instanceof Error ? error.message : String(error),
-					this.providerName,
-					modelId,
-					"completePrompt",
-				),
-			)
-
+			const errorMessage = error instanceof Error ? error.message : String(error)
+			const apiError = new ApiProviderError(errorMessage, this.providerName, modelId, "completePrompt")
+			TelemetryService.instance.captureException(apiError)
 			throw handleOpenAIError(error, this.providerName)
 		}
 
 		if ("error" in response) {
-			const error = response.error as { message?: string; code?: number }
-			console.error(`OpenRouter API Error: ${error?.code} - ${error?.message}`)
-
-			TelemetryService.instance.captureException(
-				Object.assign(
-					new ApiProviderError(
-						error?.message ?? "Unknown error",
-						this.providerName,
-						modelId,
-						"completePrompt",
-						error?.code,
-					),
-					{ status: error?.code },
-				),
-			)
-
-			throw new Error(`OpenRouter API Error ${error?.code}: ${error?.message}`)
+			this.handleStreamingError(response.error as OpenRouterErrorResponse, modelId, "completePrompt")
 		}
 
 		const completion = response as OpenAI.Chat.ChatCompletion