Pārlūkot izejas kodu

feat: Add Groq and Chutes API providers (#3034)

Co-authored-by: ellipsis-dev[bot] <65095814+ellipsis-dev[bot]@users.noreply.github.com>
Co-authored-by: Chris Estreich <[email protected]>
Shariq Riaz 10 mēneši atpakaļ
vecāks
revīzija
883be32b1a
44 mainītis faili ar 935 papildinājumiem un 0 dzēšanām
  1. 6 0
      src/api/index.ts
  2. 142 0
      src/api/providers/__tests__/chutes.test.ts
  3. 142 0
      src/api/providers/__tests__/groq.test.ts
  4. 129 0
      src/api/providers/base-openai-compatible-provider.ts
  5. 17 0
      src/api/providers/chutes.ts
  6. 17 0
      src/api/providers/groq.ts
  7. 6 0
      src/exports/roo-code.d.ts
  8. 6 0
      src/exports/types.ts
  9. 6 0
      src/i18n/locales/ca/common.json
  10. 6 0
      src/i18n/locales/de/common.json
  11. 6 0
      src/i18n/locales/es/common.json
  12. 6 0
      src/i18n/locales/fr/common.json
  13. 6 0
      src/i18n/locales/hi/common.json
  14. 6 0
      src/i18n/locales/it/common.json
  15. 6 0
      src/i18n/locales/ja/common.json
  16. 6 0
      src/i18n/locales/ko/common.json
  17. 6 0
      src/i18n/locales/pl/common.json
  18. 6 0
      src/i18n/locales/pt-BR/common.json
  19. 6 0
      src/i18n/locales/ru/common.json
  20. 6 0
      src/i18n/locales/tr/common.json
  21. 6 0
      src/i18n/locales/vi/common.json
  22. 6 0
      src/i18n/locales/zh-CN/common.json
  23. 6 0
      src/i18n/locales/zh-TW/common.json
  24. 14 0
      src/schemas/index.ts
  25. 245 0
      src/shared/api.ts
  26. 43 0
      webview-ui/src/components/settings/ApiOptions.tsx
  27. 6 0
      webview-ui/src/components/settings/constants.ts
  28. 8 0
      webview-ui/src/components/ui/hooks/useSelectedModel.ts
  29. 4 0
      webview-ui/src/i18n/locales/ca/settings.json
  30. 4 0
      webview-ui/src/i18n/locales/de/settings.json
  31. 4 0
      webview-ui/src/i18n/locales/en/settings.json
  32. 4 0
      webview-ui/src/i18n/locales/es/settings.json
  33. 4 0
      webview-ui/src/i18n/locales/fr/settings.json
  34. 4 0
      webview-ui/src/i18n/locales/hi/settings.json
  35. 4 0
      webview-ui/src/i18n/locales/it/settings.json
  36. 4 0
      webview-ui/src/i18n/locales/ja/settings.json
  37. 4 0
      webview-ui/src/i18n/locales/ko/settings.json
  38. 4 0
      webview-ui/src/i18n/locales/pl/settings.json
  39. 4 0
      webview-ui/src/i18n/locales/pt-BR/settings.json
  40. 4 0
      webview-ui/src/i18n/locales/ru/settings.json
  41. 4 0
      webview-ui/src/i18n/locales/tr/settings.json
  42. 4 0
      webview-ui/src/i18n/locales/vi/settings.json
  43. 4 0
      webview-ui/src/i18n/locales/zh-CN/settings.json
  44. 4 0
      webview-ui/src/i18n/locales/zh-TW/settings.json

+ 6 - 0
src/api/index.ts

@@ -23,6 +23,8 @@ import { RequestyHandler } from "./providers/requesty"
 import { HumanRelayHandler } from "./providers/human-relay"
 import { FakeAIHandler } from "./providers/fake-ai"
 import { XAIHandler } from "./providers/xai"
+import { GroqHandler } from "./providers/groq"
+import { ChutesHandler } from "./providers/chutes"
 
 export interface SingleCompletionHandler {
 	completePrompt(prompt: string): Promise<string>
@@ -88,6 +90,10 @@ export function buildApiHandler(configuration: ApiConfiguration): ApiHandler {
 			return new FakeAIHandler(options)
 		case "xai":
 			return new XAIHandler(options)
+		case "groq":
+			return new GroqHandler(options)
+		case "chutes":
+			return new ChutesHandler(options)
 		default:
 			return new AnthropicHandler(options)
 	}

+ 142 - 0
src/api/providers/__tests__/chutes.test.ts

@@ -0,0 +1,142 @@
+// npx jest src/api/providers/__tests__/chutes.test.ts
+
+import OpenAI from "openai"
+import { Anthropic } from "@anthropic-ai/sdk"
+
+import { ChutesModelId, chutesDefaultModelId, chutesModels } from "../../../shared/api"
+
+import { ChutesHandler } from "../chutes"
+
+jest.mock("openai", () => {
+	const createMock = jest.fn()
+	return jest.fn(() => ({ chat: { completions: { create: createMock } } }))
+})
+
+describe("ChutesHandler", () => {
+	let handler: ChutesHandler
+	let mockCreate: jest.Mock
+
+	beforeEach(() => {
+		jest.clearAllMocks()
+		mockCreate = (OpenAI as unknown as jest.Mock)().chat.completions.create
+		handler = new ChutesHandler({})
+	})
+
+	test("should use the correct Chutes base URL", () => {
+		new ChutesHandler({})
+		expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ baseURL: "https://llm.chutes.ai/v1" }))
+	})
+
+	test("should use the provided API key", () => {
+		const chutesApiKey = "test-chutes-api-key"
+		new ChutesHandler({ chutesApiKey })
+		expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: chutesApiKey }))
+	})
+
+	test("should return default model when no model is specified", () => {
+		const model = handler.getModel()
+		expect(model.id).toBe(chutesDefaultModelId)
+		expect(model.info).toEqual(chutesModels[chutesDefaultModelId])
+	})
+
+	test("should return specified model when valid model is provided", () => {
+		const testModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1"
+		const handlerWithModel = new ChutesHandler({ apiModelId: testModelId })
+		const model = handlerWithModel.getModel()
+
+		expect(model.id).toBe(testModelId)
+		expect(model.info).toEqual(chutesModels[testModelId])
+	})
+
+	test("completePrompt method should return text from Chutes API", async () => {
+		const expectedResponse = "This is a test response from Chutes"
+		mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] })
+		const result = await handler.completePrompt("test prompt")
+		expect(result).toBe(expectedResponse)
+	})
+
+	test("should handle errors in completePrompt", async () => {
+		const errorMessage = "Chutes API error"
+		mockCreate.mockRejectedValueOnce(new Error(errorMessage))
+		await expect(handler.completePrompt("test prompt")).rejects.toThrow(`Chutes completion error: ${errorMessage}`)
+	})
+
+	test("createMessage should yield text content from stream", async () => {
+		const testContent = "This is test content from Chutes stream"
+
+		mockCreate.mockImplementationOnce(() => {
+			return {
+				[Symbol.asyncIterator]: () => ({
+					next: jest
+						.fn()
+						.mockResolvedValueOnce({
+							done: false,
+							value: { choices: [{ delta: { content: testContent } }] },
+						})
+						.mockResolvedValueOnce({ done: true }),
+				}),
+			}
+		})
+
+		const stream = handler.createMessage("system prompt", [])
+		const firstChunk = await stream.next()
+
+		expect(firstChunk.done).toBe(false)
+		expect(firstChunk.value).toEqual({ type: "text", text: testContent })
+	})
+
+	test("createMessage should yield usage data from stream", async () => {
+		mockCreate.mockImplementationOnce(() => {
+			return {
+				[Symbol.asyncIterator]: () => ({
+					next: jest
+						.fn()
+						.mockResolvedValueOnce({
+							done: false,
+							value: { choices: [{ delta: {} }], usage: { prompt_tokens: 10, completion_tokens: 20 } },
+						})
+						.mockResolvedValueOnce({ done: true }),
+				}),
+			}
+		})
+
+		const stream = handler.createMessage("system prompt", [])
+		const firstChunk = await stream.next()
+
+		expect(firstChunk.done).toBe(false)
+		expect(firstChunk.value).toEqual({ type: "usage", inputTokens: 10, outputTokens: 20 })
+	})
+
+	test("createMessage should pass correct parameters to Chutes client", async () => {
+		const modelId: ChutesModelId = "deepseek-ai/DeepSeek-R1"
+		const modelInfo = chutesModels[modelId]
+		const handlerWithModel = new ChutesHandler({ apiModelId: modelId })
+
+		mockCreate.mockImplementationOnce(() => {
+			return {
+				[Symbol.asyncIterator]: () => ({
+					async next() {
+						return { done: true }
+					},
+				}),
+			}
+		})
+
+		const systemPrompt = "Test system prompt for Chutes"
+		const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "Test message for Chutes" }]
+
+		const messageGenerator = handlerWithModel.createMessage(systemPrompt, messages)
+		await messageGenerator.next()
+
+		expect(mockCreate).toHaveBeenCalledWith(
+			expect.objectContaining({
+				model: modelId,
+				max_tokens: modelInfo.maxTokens,
+				temperature: 0.5,
+				messages: expect.arrayContaining([{ role: "system", content: systemPrompt }]),
+				stream: true,
+				stream_options: { include_usage: true },
+			}),
+		)
+	})
+})

+ 142 - 0
src/api/providers/__tests__/groq.test.ts

@@ -0,0 +1,142 @@
+// npx jest src/api/providers/__tests__/groq.test.ts
+
+import OpenAI from "openai"
+import { Anthropic } from "@anthropic-ai/sdk"
+
+import { GroqModelId, groqDefaultModelId, groqModels } from "../../../shared/api"
+
+import { GroqHandler } from "../groq"
+
+jest.mock("openai", () => {
+	const createMock = jest.fn()
+	return jest.fn(() => ({ chat: { completions: { create: createMock } } }))
+})
+
+describe("GroqHandler", () => {
+	let handler: GroqHandler
+	let mockCreate: jest.Mock
+
+	beforeEach(() => {
+		jest.clearAllMocks()
+		mockCreate = (OpenAI as unknown as jest.Mock)().chat.completions.create
+		handler = new GroqHandler({})
+	})
+
+	test("should use the correct Groq base URL", () => {
+		new GroqHandler({})
+		expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ baseURL: "https://api.groq.com/openai/v1" }))
+	})
+
+	test("should use the provided API key", () => {
+		const groqApiKey = "test-groq-api-key"
+		new GroqHandler({ groqApiKey })
+		expect(OpenAI).toHaveBeenCalledWith(expect.objectContaining({ apiKey: groqApiKey }))
+	})
+
+	test("should return default model when no model is specified", () => {
+		const model = handler.getModel()
+		expect(model.id).toBe(groqDefaultModelId) // Use groqDefaultModelId
+		expect(model.info).toEqual(groqModels[groqDefaultModelId]) // Use groqModels
+	})
+
+	test("should return specified model when valid model is provided", () => {
+		const testModelId: GroqModelId = "llama-3.3-70b-versatile" // Use a valid Groq model ID and type
+		const handlerWithModel = new GroqHandler({ apiModelId: testModelId }) // Instantiate GroqHandler
+		const model = handlerWithModel.getModel()
+
+		expect(model.id).toBe(testModelId)
+		expect(model.info).toEqual(groqModels[testModelId]) // Use groqModels
+	})
+
+	test("completePrompt method should return text from Groq API", async () => {
+		const expectedResponse = "This is a test response from Groq"
+		mockCreate.mockResolvedValueOnce({ choices: [{ message: { content: expectedResponse } }] })
+		const result = await handler.completePrompt("test prompt")
+		expect(result).toBe(expectedResponse)
+	})
+
+	test("should handle errors in completePrompt", async () => {
+		const errorMessage = "Groq API error"
+		mockCreate.mockRejectedValueOnce(new Error(errorMessage))
+		await expect(handler.completePrompt("test prompt")).rejects.toThrow(`Groq completion error: ${errorMessage}`)
+	})
+
+	test("createMessage should yield text content from stream", async () => {
+		const testContent = "This is test content from Groq stream"
+
+		mockCreate.mockImplementationOnce(() => {
+			return {
+				[Symbol.asyncIterator]: () => ({
+					next: jest
+						.fn()
+						.mockResolvedValueOnce({
+							done: false,
+							value: { choices: [{ delta: { content: testContent } }] },
+						})
+						.mockResolvedValueOnce({ done: true }),
+				}),
+			}
+		})
+
+		const stream = handler.createMessage("system prompt", [])
+		const firstChunk = await stream.next()
+
+		expect(firstChunk.done).toBe(false)
+		expect(firstChunk.value).toEqual({ type: "text", text: testContent })
+	})
+
+	test("createMessage should yield usage data from stream", async () => {
+		mockCreate.mockImplementationOnce(() => {
+			return {
+				[Symbol.asyncIterator]: () => ({
+					next: jest
+						.fn()
+						.mockResolvedValueOnce({
+							done: false,
+							value: { choices: [{ delta: {} }], usage: { prompt_tokens: 10, completion_tokens: 20 } },
+						})
+						.mockResolvedValueOnce({ done: true }),
+				}),
+			}
+		})
+
+		const stream = handler.createMessage("system prompt", [])
+		const firstChunk = await stream.next()
+
+		expect(firstChunk.done).toBe(false)
+		expect(firstChunk.value).toEqual({ type: "usage", inputTokens: 10, outputTokens: 20 })
+	})
+
+	test("createMessage should pass correct parameters to Groq client", async () => {
+		const modelId: GroqModelId = "llama-3.1-8b-instant"
+		const modelInfo = groqModels[modelId]
+		const handlerWithModel = new GroqHandler({ apiModelId: modelId })
+
+		mockCreate.mockImplementationOnce(() => {
+			return {
+				[Symbol.asyncIterator]: () => ({
+					async next() {
+						return { done: true }
+					},
+				}),
+			}
+		})
+
+		const systemPrompt = "Test system prompt for Groq"
+		const messages: Anthropic.Messages.MessageParam[] = [{ role: "user", content: "Test message for Groq" }]
+
+		const messageGenerator = handlerWithModel.createMessage(systemPrompt, messages)
+		await messageGenerator.next()
+
+		expect(mockCreate).toHaveBeenCalledWith(
+			expect.objectContaining({
+				model: modelId,
+				max_tokens: modelInfo.maxTokens,
+				temperature: 0.5,
+				messages: expect.arrayContaining([{ role: "system", content: systemPrompt }]),
+				stream: true,
+				stream_options: { include_usage: true },
+			}),
+		)
+	})
+})

+ 129 - 0
src/api/providers/base-openai-compatible-provider.ts

@@ -0,0 +1,129 @@
+import { Anthropic } from "@anthropic-ai/sdk"
+import OpenAI from "openai"
+
+import { ApiHandlerOptions, ModelInfo } from "../../shared/api"
+import { ApiStream } from "../transform/stream"
+import { convertToOpenAiMessages } from "../transform/openai-format"
+
+import { SingleCompletionHandler } from "../index"
+import { DEFAULT_HEADERS } from "./constants"
+import { BaseProvider } from "./base-provider"
+
+type BaseOpenAiCompatibleProviderOptions<ModelName extends string> = ApiHandlerOptions & {
+	providerName: string
+	baseURL: string
+	defaultProviderModelId: ModelName
+	providerModels: Record<ModelName, ModelInfo>
+	defaultTemperature?: number
+}
+
+export abstract class BaseOpenAiCompatibleProvider<ModelName extends string>
+	extends BaseProvider
+	implements SingleCompletionHandler
+{
+	protected readonly providerName: string
+	protected readonly baseURL: string
+	protected readonly defaultTemperature: number
+	protected readonly defaultProviderModelId: ModelName
+	protected readonly providerModels: Record<ModelName, ModelInfo>
+
+	protected readonly options: ApiHandlerOptions
+
+	private client: OpenAI
+
+	constructor({
+		providerName,
+		baseURL,
+		defaultProviderModelId,
+		providerModels,
+		defaultTemperature,
+		...options
+	}: BaseOpenAiCompatibleProviderOptions<ModelName>) {
+		super()
+
+		this.providerName = providerName
+		this.baseURL = baseURL
+		this.defaultProviderModelId = defaultProviderModelId
+		this.providerModels = providerModels
+		this.defaultTemperature = defaultTemperature ?? 0
+
+		this.options = options
+
+		if (!this.options.apiKey) {
+			throw new Error("API key is required")
+		}
+
+		this.client = new OpenAI({
+			baseURL,
+			apiKey: this.options.apiKey,
+			defaultHeaders: DEFAULT_HEADERS,
+		})
+	}
+
+	override async *createMessage(systemPrompt: string, messages: Anthropic.Messages.MessageParam[]): ApiStream {
+		const {
+			id: model,
+			info: { maxTokens: max_tokens },
+		} = this.getModel()
+
+		const temperature = this.options.modelTemperature ?? this.defaultTemperature
+
+		const params: OpenAI.Chat.Completions.ChatCompletionCreateParamsStreaming = {
+			model,
+			max_tokens,
+			temperature,
+			messages: [{ role: "system", content: systemPrompt }, ...convertToOpenAiMessages(messages)],
+			stream: true,
+			stream_options: { include_usage: true },
+		}
+
+		const stream = await this.client.chat.completions.create(params)
+
+		for await (const chunk of stream) {
+			const delta = chunk.choices[0]?.delta
+
+			if (delta?.content) {
+				yield {
+					type: "text",
+					text: delta.content,
+				}
+			}
+
+			if (chunk.usage) {
+				yield {
+					type: "usage",
+					inputTokens: chunk.usage.prompt_tokens || 0,
+					outputTokens: chunk.usage.completion_tokens || 0,
+				}
+			}
+		}
+	}
+
+	async completePrompt(prompt: string): Promise<string> {
+		const { id: modelId } = this.getModel()
+
+		try {
+			const response = await this.client.chat.completions.create({
+				model: modelId,
+				messages: [{ role: "user", content: prompt }],
+			})
+
+			return response.choices[0]?.message.content || ""
+		} catch (error) {
+			if (error instanceof Error) {
+				throw new Error(`${this.providerName} completion error: ${error.message}`)
+			}
+
+			throw error
+		}
+	}
+
+	override getModel() {
+		const id =
+			this.options.apiModelId && this.options.apiModelId in this.providerModels
+				? (this.options.apiModelId as ModelName)
+				: this.defaultProviderModelId
+
+		return { id, info: this.providerModels[id] }
+	}
+}

+ 17 - 0
src/api/providers/chutes.ts

@@ -0,0 +1,17 @@
+import { ApiHandlerOptions, ChutesModelId, chutesDefaultModelId, chutesModels } from "../../shared/api"
+
+import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"
+
+export class ChutesHandler extends BaseOpenAiCompatibleProvider<ChutesModelId> {
+	constructor(options: ApiHandlerOptions) {
+		super({
+			...options,
+			providerName: "Chutes",
+			baseURL: "https://llm.chutes.ai/v1",
+			apiKey: options.chutesApiKey,
+			defaultProviderModelId: chutesDefaultModelId,
+			providerModels: chutesModels,
+			defaultTemperature: 0.5,
+		})
+	}
+}

+ 17 - 0
src/api/providers/groq.ts

@@ -0,0 +1,17 @@
+import { ApiHandlerOptions, GroqModelId, groqDefaultModelId, groqModels } from "../../shared/api" // Updated imports for Groq
+
+import { BaseOpenAiCompatibleProvider } from "./base-openai-compatible-provider"
+
+export class GroqHandler extends BaseOpenAiCompatibleProvider<GroqModelId> {
+	constructor(options: ApiHandlerOptions) {
+		super({
+			...options,
+			providerName: "Groq",
+			baseURL: "https://api.groq.com/openai/v1",
+			apiKey: options.groqApiKey,
+			defaultProviderModelId: groqDefaultModelId,
+			providerModels: groqModels,
+			defaultTemperature: 0.5,
+		})
+	}
+}

+ 6 - 0
src/exports/roo-code.d.ts

@@ -21,6 +21,8 @@ type ProviderSettings = {
 				| "human-relay"
 				| "fake-ai"
 				| "xai"
+				| "groq"
+				| "chutes"
 		  )
 		| undefined
 	apiModelId?: string | undefined
@@ -120,6 +122,8 @@ type ProviderSettings = {
 	requestyApiKey?: string | undefined
 	requestyModelId?: string | undefined
 	xaiApiKey?: string | undefined
+	groqApiKey?: string | undefined
+	chutesApiKey?: string | undefined
 	modelMaxTokens?: number | undefined
 	modelMaxThinkingTokens?: number | undefined
 	includeMaxTokens?: boolean | undefined
@@ -158,6 +162,8 @@ type GlobalSettings = {
 							| "human-relay"
 							| "fake-ai"
 							| "xai"
+							| "groq"
+							| "chutes"
 					  )
 					| undefined
 		  }[]

+ 6 - 0
src/exports/types.ts

@@ -22,6 +22,8 @@ type ProviderSettings = {
 				| "human-relay"
 				| "fake-ai"
 				| "xai"
+				| "groq"
+				| "chutes"
 		  )
 		| undefined
 	apiModelId?: string | undefined
@@ -121,6 +123,8 @@ type ProviderSettings = {
 	requestyApiKey?: string | undefined
 	requestyModelId?: string | undefined
 	xaiApiKey?: string | undefined
+	groqApiKey?: string | undefined
+	chutesApiKey?: string | undefined
 	modelMaxTokens?: number | undefined
 	modelMaxThinkingTokens?: number | undefined
 	includeMaxTokens?: boolean | undefined
@@ -161,6 +165,8 @@ type GlobalSettings = {
 							| "human-relay"
 							| "fake-ai"
 							| "xai"
+							| "groq"
+							| "chutes"
 					  )
 					| undefined
 		  }[]

+ 6 - 0
src/i18n/locales/ca/common.json

@@ -89,5 +89,11 @@
 		"path_placeholder": "D:\\RooCodeStorage",
 		"enter_absolute_path": "Introdueix una ruta completa (p. ex. D:\\RooCodeStorage o /home/user/storage)",
 		"enter_valid_path": "Introdueix una ruta vàlida"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Clau API de Groq",
+			"getGroqApiKey": "Obté la clau API de Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/de/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Was soll Roo tun?",
 		"task_placeholder": "Gib deine Aufgabe hier ein"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Groq API-Schlüssel",
+			"getGroqApiKey": "Groq API-Schlüssel erhalten"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/es/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "¿Qué debe hacer Roo?",
 		"task_placeholder": "Escribe tu tarea aquí"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Clave API de Groq",
+			"getGroqApiKey": "Obtener clave API de Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/fr/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Que doit faire Roo ?",
 		"task_placeholder": "Écris ta tâche ici"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Clé API Groq",
+			"getGroqApiKey": "Obtenir la clé API Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/hi/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Roo को क्या करना है?",
 		"task_placeholder": "अपना कार्य यहाँ लिखें"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "ग्रोक एपीआई कुंजी",
+			"getGroqApiKey": "ग्रोक एपीआई कुंजी प्राप्त करें"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/it/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Cosa deve fare Roo?",
 		"task_placeholder": "Scrivi il tuo compito qui"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Chiave API Groq",
+			"getGroqApiKey": "Ottieni chiave API Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/ja/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Rooにどんなことをさせますか?",
 		"task_placeholder": "タスクをここに入力してください"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Groq APIキー",
+			"getGroqApiKey": "Groq APIキーを取得"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/ko/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Roo에게 무엇을 시킬까요?",
 		"task_placeholder": "여기에 작업을 입력하세요"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Groq API 키",
+			"getGroqApiKey": "Groq API 키 받기"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/pl/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Co ma zrobić Roo?",
 		"task_placeholder": "Wpisz swoje zadanie tutaj"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Klucz API Groq",
+			"getGroqApiKey": "Uzyskaj klucz API Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/pt-BR/common.json

@@ -89,5 +89,11 @@
 		"path_placeholder": "D:\\RooCodeStorage",
 		"enter_absolute_path": "Por favor, digite um caminho absoluto (ex: D:\\RooCodeStorage ou /home/user/storage)",
 		"enter_valid_path": "Por favor, digite um caminho válido"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Chave de API Groq",
+			"getGroqApiKey": "Obter chave de API Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/ru/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Что должен сделать Roo?",
 		"task_placeholder": "Введите вашу задачу здесь"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Ключ API Groq",
+			"getGroqApiKey": "Получить ключ API Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/tr/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Roo ne yapsın?",
 		"task_placeholder": "Görevini buraya yaz"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Groq API Anahtarı",
+			"getGroqApiKey": "Groq API Anahtarı Al"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/vi/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "Bạn muốn Roo làm gì?",
 		"task_placeholder": "Nhập nhiệm vụ của bạn ở đây"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Khóa API Groq",
+			"getGroqApiKey": "Lấy khóa API Groq"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/zh-CN/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "让Roo做什么?",
 		"task_placeholder": "在这里输入任务"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Groq API 密钥",
+			"getGroqApiKey": "获取 Groq API 密钥"
+		}
 	}
 }

+ 6 - 0
src/i18n/locales/zh-TW/common.json

@@ -89,5 +89,11 @@
 	"input": {
 		"task_prompt": "讓 Roo 做什麼?",
 		"task_placeholder": "在這裡輸入工作"
+	},
+	"settings": {
+		"providers": {
+			"groqApiKey": "Groq API 金鑰",
+			"getGroqApiKey": "取得 Groq API 金鑰"
+		}
 	}
 }

+ 14 - 0
src/schemas/index.ts

@@ -29,6 +29,8 @@ export const providerNames = [
 	"human-relay",
 	"fake-ai",
 	"xai",
+	"groq",
+	"chutes",
 ] as const
 
 export const providerNamesSchema = z.enum(providerNames)
@@ -423,6 +425,10 @@ export const providerSettingsSchema = z.object({
 	requestyModelId: z.string().optional(),
 	// X.AI (Grok)
 	xaiApiKey: z.string().optional(),
+	// Groq
+	groqApiKey: z.string().optional(),
+	// Chutes AI
+	chutesApiKey: z.string().optional(),
 	// Claude 3.7 Sonnet Thinking
 	modelMaxTokens: z.number().optional(),
 	modelMaxThinkingTokens: z.number().optional(),
@@ -529,6 +535,10 @@ const providerSettingsRecord: ProviderSettingsRecord = {
 	fakeAi: undefined,
 	// X.AI (Grok)
 	xaiApiKey: undefined,
+	// Groq
+	groqApiKey: undefined,
+	// Chutes AI
+	chutesApiKey: undefined,
 }
 
 export const PROVIDER_SETTINGS_KEYS = Object.keys(providerSettingsRecord) as Keys<ProviderSettings>[]
@@ -721,6 +731,8 @@ export type SecretState = Pick<
 	| "unboundApiKey"
 	| "requestyApiKey"
 	| "xaiApiKey"
+	| "groqApiKey"
+	| "chutesApiKey"
 >
 
 type SecretStateRecord = Record<Keys<SecretState>, undefined>
@@ -740,6 +752,8 @@ const secretStateRecord: SecretStateRecord = {
 	unboundApiKey: undefined,
 	requestyApiKey: undefined,
 	xaiApiKey: undefined,
+	groqApiKey: undefined,
+	chutesApiKey: undefined,
 }
 
 export const SECRET_STATE_KEYS = Object.keys(secretStateRecord) as Keys<SecretState>[]

+ 245 - 0
src/shared/api.ts

@@ -1400,6 +1400,251 @@ export const vscodeLlmModels = {
 	}
 >
 
+// Groq
+// https://console.groq.com/docs/models
+export type GroqModelId =
+	| "llama-3.1-8b-instant"
+	| "llama-3.3-70b-versatile"
+	| "meta-llama/llama-4-scout-17b-16e-instruct"
+	| "meta-llama/llama-4-maverick-17b-128e-instruct"
+	| "mistral-saba-24b"
+	| "qwen-qwq-32b"
+	| "deepseek-r1-distill-llama-70b"
+export const groqDefaultModelId: GroqModelId = "llama-3.3-70b-versatile" // Defaulting to Llama3 70B Versatile
+export const groqModels = {
+	// Models based on API response: https://api.groq.com/openai/v1/models
+	"llama-3.1-8b-instant": {
+		maxTokens: 131072,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Meta Llama 3.1 8B Instant model, 128K context.",
+	},
+	"llama-3.3-70b-versatile": {
+		maxTokens: 32768,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Meta Llama 3.3 70B Versatile model, 128K context.",
+	},
+	"meta-llama/llama-4-scout-17b-16e-instruct": {
+		maxTokens: 8192,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Meta Llama 4 Scout 17B Instruct model, 128K context.",
+	},
+	"meta-llama/llama-4-maverick-17b-128e-instruct": {
+		maxTokens: 8192,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Meta Llama 4 Maverick 17B Instruct model, 128K context.",
+	},
+	"mistral-saba-24b": {
+		maxTokens: 32768,
+		contextWindow: 32768,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Mistral Saba 24B model, 32K context.",
+	},
+	"qwen-qwq-32b": {
+		maxTokens: 131072,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Alibaba Qwen QwQ 32B model, 128K context.",
+	},
+	"deepseek-r1-distill-llama-70b": {
+		maxTokens: 131072,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "DeepSeek R1 Distill Llama 70B model, 128K context.",
+	},
+} as const satisfies Record<string, ModelInfo>
+
+// Chutes AI
+// https://llm.chutes.ai/v1 (OpenAI compatible)
+export type ChutesModelId =
+	| "deepseek-ai/DeepSeek-R1"
+	| "deepseek-ai/DeepSeek-V3"
+	| "unsloth/Llama-3.3-70B-Instruct"
+	| "chutesai/Llama-4-Scout-17B-16E-Instruct"
+	| "unsloth/Mistral-Nemo-Instruct-2407"
+	| "unsloth/gemma-3-12b-it"
+	| "NousResearch/DeepHermes-3-Llama-3-8B-Preview"
+	| "unsloth/gemma-3-4b-it"
+	| "nvidia/Llama-3_3-Nemotron-Super-49B-v1"
+	| "nvidia/Llama-3_1-Nemotron-Ultra-253B-v1"
+	| "chutesai/Llama-4-Maverick-17B-128E-Instruct-FP8"
+	| "deepseek-ai/DeepSeek-V3-Base"
+	| "deepseek-ai/DeepSeek-R1-Zero"
+	| "deepseek-ai/DeepSeek-V3-0324"
+	| "microsoft/MAI-DS-R1-FP8"
+	| "tngtech/DeepSeek-R1T-Chimera"
+export const chutesDefaultModelId: ChutesModelId = "deepseek-ai/DeepSeek-R1"
+export const chutesModels = {
+	"deepseek-ai/DeepSeek-R1": {
+		maxTokens: 32768,
+		contextWindow: 163840,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "DeepSeek R1 model.",
+	},
+	"deepseek-ai/DeepSeek-V3": {
+		maxTokens: 32768,
+		contextWindow: 163840,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "DeepSeek V3 model.",
+	},
+	"unsloth/Llama-3.3-70B-Instruct": {
+		maxTokens: 32768, // From Groq
+		contextWindow: 131072, // From Groq
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Unsloth Llama 3.3 70B Instruct model.",
+	},
+	"chutesai/Llama-4-Scout-17B-16E-Instruct": {
+		maxTokens: 32768,
+		contextWindow: 512000,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "ChutesAI Llama 4 Scout 17B Instruct model, 512K context.",
+	},
+	"unsloth/Mistral-Nemo-Instruct-2407": {
+		maxTokens: 32768,
+		contextWindow: 128000,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Unsloth Mistral Nemo Instruct model.",
+	},
+	"unsloth/gemma-3-12b-it": {
+		maxTokens: 32768,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Unsloth Gemma 3 12B IT model.",
+	},
+	"NousResearch/DeepHermes-3-Llama-3-8B-Preview": {
+		maxTokens: 32768,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Nous DeepHermes 3 Llama 3 8B Preview model.",
+	},
+	"unsloth/gemma-3-4b-it": {
+		maxTokens: 32768,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Unsloth Gemma 3 4B IT model.",
+	},
+	"nvidia/Llama-3_3-Nemotron-Super-49B-v1": {
+		maxTokens: 32768,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Nvidia Llama 3.3 Nemotron Super 49B model.",
+	},
+	"nvidia/Llama-3_1-Nemotron-Ultra-253B-v1": {
+		maxTokens: 32768,
+		contextWindow: 131072,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Nvidia Llama 3.1 Nemotron Ultra 253B model.",
+	},
+	"chutesai/Llama-4-Maverick-17B-128E-Instruct-FP8": {
+		maxTokens: 32768,
+		contextWindow: 256000,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "ChutesAI Llama 4 Maverick 17B Instruct FP8 model.",
+	},
+	"deepseek-ai/DeepSeek-V3-Base": {
+		maxTokens: 32768,
+		contextWindow: 163840,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "DeepSeek V3 Base model.",
+	},
+	"deepseek-ai/DeepSeek-R1-Zero": {
+		maxTokens: 32768,
+		contextWindow: 163840,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "DeepSeek R1 Zero model.",
+	},
+	"deepseek-ai/DeepSeek-V3-0324": {
+		maxTokens: 32768,
+		contextWindow: 163840,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "DeepSeek V3 (0324) model.",
+	},
+	"microsoft/MAI-DS-R1-FP8": {
+		maxTokens: 32768,
+		contextWindow: 163840,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "Microsoft MAI-DS-R1 FP8 model.",
+	},
+	"tngtech/DeepSeek-R1T-Chimera": {
+		maxTokens: 32768,
+		contextWindow: 163840,
+		supportsImages: false,
+		supportsPromptCache: false,
+		inputPrice: 0,
+		outputPrice: 0,
+		description: "TNGTech DeepSeek R1T Chimera model.",
+	},
+} as const satisfies Record<string, ModelInfo>
+
 /**
  * Constants
  */

+ 43 - 0
webview-ui/src/components/settings/ApiOptions.tsx

@@ -1613,6 +1613,49 @@ const ApiOptions = ({
 				</>
 			)}
 
+			{selectedProvider === "groq" && (
+				<>
+					<VSCodeTextField
+						value={apiConfiguration?.groqApiKey || ""}
+						type="password"
+						onInput={handleInputChange("groqApiKey")}
+						placeholder={t("settings:placeholders.apiKey")}
+						className="w-full">
+						<label className="block font-medium mb-1">{t("settings:providers.groqApiKey")}</label>
+					</VSCodeTextField>
+					<div className="text-sm text-vscode-descriptionForeground -mt-2">
+						{t("settings:providers.apiKeyStorageNotice")}
+					</div>
+					{!apiConfiguration?.groqApiKey && (
+						<VSCodeButtonLink href="https://console.groq.com/keys" appearance="secondary">
+							{t("settings:providers.getGroqApiKey")}
+						</VSCodeButtonLink>
+					)}
+				</>
+			)}
+
+			{selectedProvider === "chutes" && (
+				<>
+					<VSCodeTextField
+						value={apiConfiguration?.chutesApiKey || ""}
+						type="password"
+						onInput={handleInputChange("chutesApiKey")}
+						placeholder={t("settings:placeholders.apiKey")}
+						className="w-full">
+						<label className="block font-medium mb-1">{t("settings:providers.chutesApiKey")}</label>
+					</VSCodeTextField>
+					<div className="text-sm text-vscode-descriptionForeground -mt-2">
+						{t("settings:providers.apiKeyStorageNotice")}
+					</div>
+					{/* Add a link to get Chutes API key if available */}
+					{/* {!apiConfiguration?.chutesApiKey && (
+						<VSCodeButtonLink href="LINK_TO_CHUTES_API_KEYS" appearance="secondary">
+							{t("settings:providers.getChutesApiKey")}
+						</VSCodeButtonLink>
+					)} */}
+				</>
+			)}
+
 			{selectedProvider === "unbound" && (
 				<>
 					<VSCodeTextField

+ 6 - 0
webview-ui/src/components/settings/constants.ts

@@ -9,6 +9,8 @@ import {
 	openAiNativeModels,
 	vertexModels,
 	xaiModels,
+	groqModels, 
+	chutesModels, 
 } from "@roo/shared/api"
 
 export { REASONING_MODELS, PROMPT_CACHING_MODELS } from "@roo/shared/api"
@@ -24,6 +26,8 @@ export const MODELS_BY_PROVIDER: Partial<Record<ApiProvider, Record<string, Mode
 	"openai-native": openAiNativeModels,
 	vertex: vertexModels,
 	xai: xaiModels,
+	groq: groqModels, 
+	chutes: chutesModels, 
 }
 
 export const PROVIDERS = [
@@ -44,6 +48,8 @@ export const PROVIDERS = [
 	{ value: "requesty", label: "Requesty" },
 	{ value: "human-relay", label: "Human Relay" },
 	{ value: "xai", label: "xAI" },
+	{ value: "groq", label: "Groq" }, 
+	{ value: "chutes", label: "Chutes AI" }, 
 ].sort((a, b) => a.label.localeCompare(b.label))
 
 export const VERTEX_REGIONS = [

+ 8 - 0
webview-ui/src/components/ui/hooks/useSelectedModel.ts

@@ -19,6 +19,10 @@ import {
 	vertexModels,
 	xaiDefaultModelId,
 	xaiModels,
+	groqModels,
+	groqDefaultModelId,
+	chutesModels,
+	chutesDefaultModelId,
 	vscodeLlmModels,
 	vscodeLlmDefaultModelId,
 	openRouterDefaultModelId,
@@ -84,6 +88,10 @@ function getSelectedModelInfo({
 			return routerModels.unbound[id] ?? routerModels.unbound[unboundDefaultModelId]
 		case "xai":
 			return xaiModels[id as keyof typeof xaiModels] ?? xaiModels[xaiDefaultModelId]
+		case "groq":
+			return groqModels[id as keyof typeof groqModels] ?? groqModels[groqDefaultModelId]
+		case "chutes":
+			return chutesModels[id as keyof typeof chutesModels] ?? chutesModels[chutesDefaultModelId]
 		case "bedrock":
 			// Special case for custom ARN.
 			if (id === "custom-arn") {

+ 4 - 0
webview-ui/src/i18n/locales/ca/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Clau API d'Anthropic",
 		"getAnthropicApiKey": "Obtenir clau API d'Anthropic",
 		"anthropicUseAuthToken": "Passar la clau API d'Anthropic com a capçalera d'autorització en lloc de X-Api-Key",
+		"chutesApiKey": "Clau API de Chutes",
+		"getChutesApiKey": "Obtenir clau API de Chutes",
 		"deepSeekApiKey": "Clau API de DeepSeek",
 		"getDeepSeekApiKey": "Obtenir clau API de DeepSeek",
 		"geminiApiKey": "Clau API de Gemini",
+		"getGroqApiKey": "Obtenir clau API de Groq",
+		"groqApiKey": "Clau API de Groq",
 		"getGeminiApiKey": "Obtenir clau API de Gemini",
 		"openAiApiKey": "Clau API d'OpenAI",
 		"openAiBaseUrl": "URL base",

+ 4 - 0
webview-ui/src/i18n/locales/de/settings.json

@@ -123,10 +123,14 @@
 		"anthropicApiKey": "Anthropic API-Schlüssel",
 		"getAnthropicApiKey": "Anthropic API-Schlüssel erhalten",
 		"anthropicUseAuthToken": "Anthropic API-Schlüssel als Authorization-Header anstelle von X-Api-Key übergeben",
+		"chutesApiKey": "Chutes API-Schlüssel",
+		"getChutesApiKey": "Chutes API-Schlüssel erhalten",
 		"deepSeekApiKey": "DeepSeek API-Schlüssel",
 		"getDeepSeekApiKey": "DeepSeek API-Schlüssel erhalten",
 		"geminiApiKey": "Gemini API-Schlüssel",
 		"getGeminiApiKey": "Gemini API-Schlüssel erhalten",
+		"getGroqApiKey": "Groq API-Schlüssel erhalten",
+		"groqApiKey": "Groq API-Schlüssel",
 		"openAiApiKey": "OpenAI API-Schlüssel",
 		"openAiBaseUrl": "Basis-URL",
 		"getOpenAiApiKey": "OpenAI API-Schlüssel erhalten",

+ 4 - 0
webview-ui/src/i18n/locales/en/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic API Key",
 		"getAnthropicApiKey": "Get Anthropic API Key",
 		"anthropicUseAuthToken": "Pass Anthropic API Key as Authorization header instead of X-Api-Key",
+		"chutesApiKey": "Chutes API Key",
+		"getChutesApiKey": "Get Chutes API Key",
 		"deepSeekApiKey": "DeepSeek API Key",
 		"getDeepSeekApiKey": "Get DeepSeek API Key",
 		"geminiApiKey": "Gemini API Key",
+		"getGroqApiKey": "Get Groq API Key",
+		"groqApiKey": "Groq API Key",
 		"getGeminiApiKey": "Get Gemini API Key",
 		"openAiApiKey": "OpenAI API Key",
 		"openAiBaseUrl": "Base URL",

+ 4 - 0
webview-ui/src/i18n/locales/es/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Clave API de Anthropic",
 		"getAnthropicApiKey": "Obtener clave API de Anthropic",
 		"anthropicUseAuthToken": "Pasar la clave API de Anthropic como encabezado de autorización en lugar de X-Api-Key",
+		"chutesApiKey": "Clave API de Chutes",
+		"getChutesApiKey": "Obtener clave API de Chutes",
 		"deepSeekApiKey": "Clave API de DeepSeek",
 		"getDeepSeekApiKey": "Obtener clave API de DeepSeek",
 		"geminiApiKey": "Clave API de Gemini",
+		"getGroqApiKey": "Obtener clave API de Groq",
+		"groqApiKey": "Clave API de Groq",
 		"getGeminiApiKey": "Obtener clave API de Gemini",
 		"openAiApiKey": "Clave API de OpenAI",
 		"openAiBaseUrl": "URL base",

+ 4 - 0
webview-ui/src/i18n/locales/fr/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Clé API Anthropic",
 		"getAnthropicApiKey": "Obtenir la clé API Anthropic",
 		"anthropicUseAuthToken": "Passer la clé API Anthropic comme en-tête d'autorisation au lieu de X-Api-Key",
+		"chutesApiKey": "Clé API Chutes",
+		"getChutesApiKey": "Obtenir la clé API Chutes",
 		"deepSeekApiKey": "Clé API DeepSeek",
 		"getDeepSeekApiKey": "Obtenir la clé API DeepSeek",
 		"geminiApiKey": "Clé API Gemini",
+		"getGroqApiKey": "Obtenir la clé API Groq",
+		"groqApiKey": "Clé API Groq",
 		"getGeminiApiKey": "Obtenir la clé API Gemini",
 		"openAiApiKey": "Clé API OpenAI",
 		"openAiBaseUrl": "URL de base",

+ 4 - 0
webview-ui/src/i18n/locales/hi/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic API कुंजी",
 		"getAnthropicApiKey": "Anthropic API कुंजी प्राप्त करें",
 		"anthropicUseAuthToken": "X-Api-Key के बजाय Anthropic API कुंजी को Authorization हेडर के रूप में पास करें",
+		"chutesApiKey": "Chutes API कुंजी",
+		"getChutesApiKey": "Chutes API कुंजी प्राप्त करें",
 		"deepSeekApiKey": "DeepSeek API कुंजी",
 		"getDeepSeekApiKey": "DeepSeek API कुंजी प्राप्त करें",
 		"geminiApiKey": "Gemini API कुंजी",
+		"getGroqApiKey": "Groq API कुंजी प्राप्त करें",
+		"groqApiKey": "Groq API कुंजी",
 		"getGeminiApiKey": "Gemini API कुंजी प्राप्त करें",
 		"openAiApiKey": "OpenAI API कुंजी",
 		"openAiBaseUrl": "बेस URL",

+ 4 - 0
webview-ui/src/i18n/locales/it/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Chiave API Anthropic",
 		"getAnthropicApiKey": "Ottieni chiave API Anthropic",
 		"anthropicUseAuthToken": "Passa la chiave API Anthropic come header di autorizzazione invece di X-Api-Key",
+		"chutesApiKey": "Chiave API Chutes",
+		"getChutesApiKey": "Ottieni chiave API Chutes",
 		"deepSeekApiKey": "Chiave API DeepSeek",
 		"getDeepSeekApiKey": "Ottieni chiave API DeepSeek",
 		"geminiApiKey": "Chiave API Gemini",
+		"getGroqApiKey": "Ottieni chiave API Groq",
+		"groqApiKey": "Chiave API Groq",
 		"getGeminiApiKey": "Ottieni chiave API Gemini",
 		"openAiApiKey": "Chiave API OpenAI",
 		"openAiBaseUrl": "URL base",

+ 4 - 0
webview-ui/src/i18n/locales/ja/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic APIキー",
 		"getAnthropicApiKey": "Anthropic APIキーを取得",
 		"anthropicUseAuthToken": "Anthropic APIキーをX-Api-Keyの代わりにAuthorizationヘッダーとして渡す",
+		"chutesApiKey": "Chutes APIキー",
+		"getChutesApiKey": "Chutes APIキーを取得",
 		"deepSeekApiKey": "DeepSeek APIキー",
 		"getDeepSeekApiKey": "DeepSeek APIキーを取得",
 		"geminiApiKey": "Gemini APIキー",
+		"getGroqApiKey": "Groq APIキーを取得",
+		"groqApiKey": "Groq APIキー",
 		"getGeminiApiKey": "Gemini APIキーを取得",
 		"openAiApiKey": "OpenAI APIキー",
 		"openAiBaseUrl": "ベースURL",

+ 4 - 0
webview-ui/src/i18n/locales/ko/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic API 키",
 		"getAnthropicApiKey": "Anthropic API 키 받기",
 		"anthropicUseAuthToken": "X-Api-Key 대신 Authorization 헤더로 Anthropic API 키 전달",
+		"chutesApiKey": "Chutes API 키",
+		"getChutesApiKey": "Chutes API 키 받기",
 		"deepSeekApiKey": "DeepSeek API 키",
 		"getDeepSeekApiKey": "DeepSeek API 키 받기",
 		"geminiApiKey": "Gemini API 키",
+		"getGroqApiKey": "Groq API 키 받기",
+		"groqApiKey": "Groq API 키",
 		"getGeminiApiKey": "Gemini API 키 받기",
 		"openAiApiKey": "OpenAI API 키",
 		"openAiBaseUrl": "기본 URL",

+ 4 - 0
webview-ui/src/i18n/locales/pl/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Klucz API Anthropic",
 		"getAnthropicApiKey": "Uzyskaj klucz API Anthropic",
 		"anthropicUseAuthToken": "Przekaż klucz API Anthropic jako nagłówek Authorization zamiast X-Api-Key",
+		"chutesApiKey": "Klucz API Chutes",
+		"getChutesApiKey": "Uzyskaj klucz API Chutes",
 		"deepSeekApiKey": "Klucz API DeepSeek",
 		"getDeepSeekApiKey": "Uzyskaj klucz API DeepSeek",
 		"geminiApiKey": "Klucz API Gemini",
+		"getGroqApiKey": "Uzyskaj klucz API Groq",
+		"groqApiKey": "Klucz API Groq",
 		"getGeminiApiKey": "Uzyskaj klucz API Gemini",
 		"openAiApiKey": "Klucz API OpenAI",
 		"openAiBaseUrl": "URL bazowy",

+ 4 - 0
webview-ui/src/i18n/locales/pt-BR/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Chave de API Anthropic",
 		"getAnthropicApiKey": "Obter chave de API Anthropic",
 		"anthropicUseAuthToken": "Passar a chave de API Anthropic como cabeçalho Authorization em vez de X-Api-Key",
+		"chutesApiKey": "Chave de API Chutes",
+		"getChutesApiKey": "Obter chave de API Chutes",
 		"deepSeekApiKey": "Chave de API DeepSeek",
 		"getDeepSeekApiKey": "Obter chave de API DeepSeek",
 		"geminiApiKey": "Chave de API Gemini",
+		"getGroqApiKey": "Obter chave de API Groq",
+		"groqApiKey": "Chave de API Groq",
 		"getGeminiApiKey": "Obter chave de API Gemini",
 		"openAiApiKey": "Chave de API OpenAI",
 		"openAiBaseUrl": "URL Base",

+ 4 - 0
webview-ui/src/i18n/locales/ru/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic API-ключ",
 		"getAnthropicApiKey": "Получить Anthropic API-ключ",
 		"anthropicUseAuthToken": "Передавать Anthropic API-ключ как Authorization-заголовок вместо X-Api-Key",
+		"chutesApiKey": "Chutes API-ключ",
+		"getChutesApiKey": "Получить Chutes API-ключ",
 		"deepSeekApiKey": "DeepSeek API-ключ",
 		"getDeepSeekApiKey": "Получить DeepSeek API-ключ",
 		"geminiApiKey": "Gemini API-ключ",
+		"getGroqApiKey": "Получить Groq API-ключ",
+		"groqApiKey": "Groq API-ключ",
 		"getGeminiApiKey": "Получить Gemini API-ключ",
 		"openAiApiKey": "OpenAI API-ключ",
 		"openAiBaseUrl": "Базовый URL",

+ 4 - 0
webview-ui/src/i18n/locales/tr/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic API Anahtarı",
 		"getAnthropicApiKey": "Anthropic API Anahtarı Al",
 		"anthropicUseAuthToken": "Anthropic API Anahtarını X-Api-Key yerine Authorization başlığı olarak geçir",
+		"chutesApiKey": "Chutes API Anahtarı",
+		"getChutesApiKey": "Chutes API Anahtarı Al",
 		"deepSeekApiKey": "DeepSeek API Anahtarı",
 		"getDeepSeekApiKey": "DeepSeek API Anahtarı Al",
 		"geminiApiKey": "Gemini API Anahtarı",
+		"getGroqApiKey": "Groq API Anahtarı Al",
+		"groqApiKey": "Groq API Anahtarı",
 		"getGeminiApiKey": "Gemini API Anahtarı Al",
 		"openAiApiKey": "OpenAI API Anahtarı",
 		"openAiBaseUrl": "Temel URL",

+ 4 - 0
webview-ui/src/i18n/locales/vi/settings.json

@@ -122,9 +122,13 @@
 		"anthropicApiKey": "Khóa API Anthropic",
 		"getAnthropicApiKey": "Lấy khóa API Anthropic",
 		"anthropicUseAuthToken": "Truyền khóa API Anthropic dưới dạng tiêu đề Authorization thay vì X-Api-Key",
+		"chutesApiKey": "Khóa API Chutes",
+		"getChutesApiKey": "Lấy khóa API Chutes",
 		"deepSeekApiKey": "Khóa API DeepSeek",
 		"getDeepSeekApiKey": "Lấy khóa API DeepSeek",
 		"geminiApiKey": "Khóa API Gemini",
+		"getGroqApiKey": "Lấy khóa API Groq",
+		"groqApiKey": "Khóa API Groq",
 		"getGeminiApiKey": "Lấy khóa API Gemini",
 		"openAiApiKey": "Khóa API OpenAI",
 		"openAiBaseUrl": "URL cơ sở",

+ 4 - 0
webview-ui/src/i18n/locales/zh-CN/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic API 密钥",
 		"getAnthropicApiKey": "获取 Anthropic API 密钥",
 		"anthropicUseAuthToken": "将 Anthropic API 密钥作为 Authorization 标头传递,而不是 X-Api-Key",
+		"chutesApiKey": "Chutes API 密钥",
+		"getChutesApiKey": "获取 Chutes API 密钥",
 		"deepSeekApiKey": "DeepSeek API 密钥",
 		"getDeepSeekApiKey": "获取 DeepSeek API 密钥",
 		"geminiApiKey": "Gemini API 密钥",
+		"getGroqApiKey": "获取 Groq API 密钥",
+		"groqApiKey": "Groq API 密钥",
 		"getGeminiApiKey": "获取 Gemini API 密钥",
 		"openAiApiKey": "OpenAI API 密钥",
 		"openAiBaseUrl": "OpenAI 基础 URL",

+ 4 - 0
webview-ui/src/i18n/locales/zh-TW/settings.json

@@ -123,9 +123,13 @@
 		"anthropicApiKey": "Anthropic API 金鑰",
 		"getAnthropicApiKey": "取得 Anthropic API 金鑰",
 		"anthropicUseAuthToken": "將 Anthropic API 金鑰作為 Authorization 標頭傳遞,而非使用 X-Api-Key",
+		"chutesApiKey": "Chutes API 金鑰",
+		"getChutesApiKey": "取得 Chutes API 金鑰",
 		"deepSeekApiKey": "DeepSeek API 金鑰",
 		"getDeepSeekApiKey": "取得 DeepSeek API 金鑰",
 		"geminiApiKey": "Gemini API 金鑰",
+		"getGroqApiKey": "取得 Groq API 金鑰",
+		"groqApiKey": "Groq API 金鑰",
 		"getGeminiApiKey": "取得 Gemini API 金鑰",
 		"openAiApiKey": "OpenAI API 金鑰",
 		"openAiBaseUrl": "基礎 URL",