Просмотр исходного кода

Merge pull request #846 from RooVetGit/cte/catch-openai-provider-error

Prevent *all* provider clients from throwing
Matt Rubens 10 месяцев назад
Родитель
Сommit
c641c59160

+ 1 - 1
src/api/providers/__tests__/deepseek.test.ts

@@ -84,7 +84,7 @@ describe("DeepSeekHandler", () => {
 			expect(handler.getModel().id).toBe(mockOptions.apiModelId)
 		})
 
-		it("should throw error if API key is missing", () => {
+		it.skip("should throw error if API key is missing", () => {
 			expect(() => {
 				new DeepSeekHandler({
 					...mockOptions,

+ 1 - 1
src/api/providers/__tests__/gemini.test.ts

@@ -33,7 +33,7 @@ describe("GeminiHandler", () => {
 			expect(handler["options"].apiModelId).toBe("gemini-2.0-flash-thinking-exp-1219")
 		})
 
-		it("should throw if API key is missing", () => {
+		it.skip("should throw if API key is missing", () => {
 			expect(() => {
 				new GeminiHandler({
 					apiModelId: "gemini-2.0-flash-thinking-exp-1219",

+ 1 - 4
src/api/providers/deepseek.ts

@@ -4,12 +4,9 @@ import { deepSeekModels, deepSeekDefaultModelId } from "../../shared/api"
 
 export class DeepSeekHandler extends OpenAiHandler {
 	constructor(options: ApiHandlerOptions) {
-		if (!options.deepSeekApiKey) {
-			throw new Error("DeepSeek API key is required. Please provide it in the settings.")
-		}
 		super({
 			...options,
-			openAiApiKey: options.deepSeekApiKey,
+			openAiApiKey: options.deepSeekApiKey ?? "not-provided",
 			openAiModelId: options.apiModelId ?? deepSeekDefaultModelId,
 			openAiBaseUrl: options.deepSeekBaseUrl ?? "https://api.deepseek.com/v1",
 			openAiStreamingEnabled: true,

+ 1 - 4
src/api/providers/gemini.ts

@@ -10,11 +10,8 @@ export class GeminiHandler implements ApiHandler, SingleCompletionHandler {
 	private client: GoogleGenerativeAI
 
 	constructor(options: ApiHandlerOptions) {
-		if (!options.geminiApiKey) {
-			throw new Error("API key is required for Google Gemini")
-		}
 		this.options = options
-		this.client = new GoogleGenerativeAI(options.geminiApiKey)
+		this.client = new GoogleGenerativeAI(options.geminiApiKey ?? "not-provided")
 	}
 
 	async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {

+ 3 - 4
src/api/providers/glama.ts

@@ -13,10 +13,9 @@ export class GlamaHandler implements ApiHandler, SingleCompletionHandler {
 
 	constructor(options: ApiHandlerOptions) {
 		this.options = options
-		this.client = new OpenAI({
-			baseURL: "https://glama.ai/api/gateway/openai/v1",
-			apiKey: this.options.glamaApiKey,
-		})
+		const baseURL = "https://glama.ai/api/gateway/openai/v1"
+		const apiKey = this.options.glamaApiKey ?? "not-provided"
+		this.client = new OpenAI({ baseURL, apiKey })
 	}
 
 	async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {

+ 10 - 15
src/api/providers/openai-native.ts

@@ -17,9 +17,8 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 
 	constructor(options: ApiHandlerOptions) {
 		this.options = options
-		this.client = new OpenAI({
-			apiKey: this.options.openAiNativeApiKey,
-		})
+		const apiKey = this.options.openAiNativeApiKey ?? "not-provided"
+		this.client = new OpenAI({ apiKey })
 	}
 
 	async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
@@ -41,7 +40,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 	private async *handleO1FamilyMessage(
 		modelId: string,
 		systemPrompt: string,
-		messages: Anthropic.Messages.MessageParam[]
+		messages: Anthropic.Messages.MessageParam[],
 	): ApiStream {
 		// o1 supports developer prompt with formatting
 		// o1-preview and o1-mini only support user messages
@@ -63,7 +62,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 	private async *handleO3FamilyMessage(
 		modelId: string,
 		systemPrompt: string,
-		messages: Anthropic.Messages.MessageParam[]
+		messages: Anthropic.Messages.MessageParam[],
 	): ApiStream {
 		const stream = await this.client.chat.completions.create({
 			model: "o3-mini",
@@ -85,7 +84,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 	private async *handleDefaultModelMessage(
 		modelId: string,
 		systemPrompt: string,
-		messages: Anthropic.Messages.MessageParam[]
+		messages: Anthropic.Messages.MessageParam[],
 	): ApiStream {
 		const stream = await this.client.chat.completions.create({
 			model: modelId,
@@ -98,9 +97,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 		yield* this.handleStreamResponse(stream)
 	}
 
-	private async *yieldResponseData(
-		response: OpenAI.Chat.Completions.ChatCompletion
-	): ApiStream {
+	private async *yieldResponseData(response: OpenAI.Chat.Completions.ChatCompletion): ApiStream {
 		yield {
 			type: "text",
 			text: response.choices[0]?.message.content || "",
@@ -112,9 +109,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 		}
 	}
 
-	private async *handleStreamResponse(
-		stream: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>
-	): ApiStream {
+	private async *handleStreamResponse(stream: AsyncIterable<OpenAI.Chat.Completions.ChatCompletionChunk>): ApiStream {
 		for await (const chunk of stream) {
 			const delta = chunk.choices[0]?.delta
 			if (delta?.content) {
@@ -168,7 +163,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 
 	private getO1CompletionOptions(
 		modelId: string,
-		prompt: string
+		prompt: string,
 	): OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming {
 		return {
 			model: modelId,
@@ -178,7 +173,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 
 	private getO3CompletionOptions(
 		modelId: string,
-		prompt: string
+		prompt: string,
 	): OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming {
 		return {
 			model: "o3-mini",
@@ -189,7 +184,7 @@ export class OpenAiNativeHandler implements ApiHandler, SingleCompletionHandler
 
 	private getDefaultCompletionOptions(
 		modelId: string,
-		prompt: string
+		prompt: string,
 	): OpenAI.Chat.Completions.ChatCompletionCreateParamsNonStreaming {
 		return {
 			model: modelId,

+ 5 - 6
src/api/providers/openai.ts

@@ -19,6 +19,8 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
 	constructor(options: ApiHandlerOptions) {
 		this.options = options
 
+		const baseURL = this.options.openAiBaseUrl ?? "https://api.openai.com/v1"
+		const apiKey = this.options.openAiApiKey ?? "not-provided"
 		let urlHost: string
 
 		try {
@@ -33,15 +35,12 @@ export class OpenAiHandler implements ApiHandler, SingleCompletionHandler {
 			// Azure API shape slightly differs from the core API shape:
 			// https://github.com/openai/openai-node?tab=readme-ov-file#microsoft-azure-openai
 			this.client = new AzureOpenAI({
-				baseURL: this.options.openAiBaseUrl,
-				apiKey: this.options.openAiApiKey,
+				baseURL,
+				apiKey,
 				apiVersion: this.options.azureApiVersion || azureOpenAiDefaultApiVersion,
 			})
 		} else {
-			this.client = new OpenAI({
-				baseURL: this.options.openAiBaseUrl,
-				apiKey: this.options.openAiApiKey,
-			})
+			this.client = new OpenAI({ baseURL, apiKey })
 		}
 	}
 

+ 10 - 8
src/api/providers/openrouter.ts

@@ -27,14 +27,16 @@ export class OpenRouterHandler implements ApiHandler, SingleCompletionHandler {
 
 	constructor(options: ApiHandlerOptions) {
 		this.options = options
-		this.client = new OpenAI({
-			baseURL: this.options.openRouterBaseUrl || "https://openrouter.ai/api/v1",
-			apiKey: this.options.openRouterApiKey,
-			defaultHeaders: {
-				"HTTP-Referer": "https://github.com/RooVetGit/Roo-Cline",
-				"X-Title": "Roo Code",
-			},
-		})
+
+		const baseURL = this.options.openRouterBaseUrl || "https://openrouter.ai/api/v1"
+		const apiKey = this.options.openRouterApiKey ?? "not-provided"
+
+		const defaultHeaders = {
+			"HTTP-Referer": "https://github.com/RooVetGit/Roo-Cline",
+			"X-Title": "Roo Code",
+		}
+
+		this.client = new OpenAI({ baseURL, apiKey, defaultHeaders })
 	}
 
 	async *createMessage(

+ 3 - 4
src/api/providers/unbound.ts

@@ -16,10 +16,9 @@ export class UnboundHandler implements ApiHandler, SingleCompletionHandler {
 
 	constructor(options: ApiHandlerOptions) {
 		this.options = options
-		this.client = new OpenAI({
-			baseURL: "https://api.getunbound.ai/v1",
-			apiKey: this.options.unboundApiKey,
-		})
+		const baseURL = "https://api.getunbound.ai/v1"
+		const apiKey = this.options.unboundApiKey ?? "not-provided"
+		this.client = new OpenAI({ baseURL, apiKey })
 	}
 
 	async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {

+ 2 - 2
src/api/providers/vertex.ts

@@ -12,9 +12,9 @@ export class VertexHandler implements ApiHandler, SingleCompletionHandler {
 	constructor(options: ApiHandlerOptions) {
 		this.options = options
 		this.client = new AnthropicVertex({
-			projectId: this.options.vertexProjectId,
+			projectId: this.options.vertexProjectId ?? "not-provided",
 			// https://cloud.google.com/vertex-ai/generative-ai/docs/partner-models/use-claude#regions
-			region: this.options.vertexRegion,
+			region: this.options.vertexRegion ?? "us-east5",
 		})
 	}