Browse Source

feat: add Ollama API key support for Turbo mode (#7425)

* feat: add Ollama API key support for Turbo mode

- Add ollamaApiKey field to ProviderSettings schema
- Add ollamaApiKey to SECRET_STATE_KEYS for secure storage
- Update Ollama and NativeOllama providers to use API key for authentication
- Add UI field for Ollama API key (shown when custom base URL is provided)
- Add test coverage for API key functionality

This enables users to use Ollama Turbo with datacenter-grade hardware by providing an API key for authenticated Ollama instances or cloud services.

* fix: use VSCodeTextField for Ollama API key field

Remove non-existent ApiKeyField import and use standard VSCodeTextField with password type, matching other provider implementations

* Add missing translation keys for Ollama API key support

- Add providers.ollama.apiKey and providers.ollama.apiKeyHelp to all 18 language files
- Support for authenticated Ollama instances and cloud services
- Relates to PR #7425

* refactor: improve type safety for Ollama client configuration

- Replace 'any' type with proper OllamaOptions (Config) type
- Import Config type from ollama package for better type checking

---------

Co-authored-by: Roo Code <[email protected]>
Co-authored-by: Daniel Riccio <[email protected]>
roomote[bot] 6 months ago
parent
commit
63b71d8299

+ 1 - 0
packages/types/src/global-settings.ts

@@ -178,6 +178,7 @@ export const SECRET_STATE_KEYS = [
 	"awsSecretKey",
 	"awsSecretKey",
 	"awsSessionToken",
 	"awsSessionToken",
 	"openAiApiKey",
 	"openAiApiKey",
+	"ollamaApiKey",
 	"geminiApiKey",
 	"geminiApiKey",
 	"openAiNativeApiKey",
 	"openAiNativeApiKey",
 	"cerebrasApiKey",
 	"cerebrasApiKey",

+ 1 - 0
packages/types/src/provider-settings.ts

@@ -188,6 +188,7 @@ const openAiSchema = baseProviderSettingsSchema.extend({
 const ollamaSchema = baseProviderSettingsSchema.extend({
 const ollamaSchema = baseProviderSettingsSchema.extend({
 	ollamaModelId: z.string().optional(),
 	ollamaModelId: z.string().optional(),
 	ollamaBaseUrl: z.string().optional(),
 	ollamaBaseUrl: z.string().optional(),
+	ollamaApiKey: z.string().optional(),
 })
 })
 
 
 const vsCodeLmSchema = baseProviderSettingsSchema.extend({
 const vsCodeLmSchema = baseProviderSettingsSchema.extend({

+ 11 - 0
src/api/providers/__tests__/ollama.spec.ts

@@ -92,6 +92,17 @@ describe("OllamaHandler", () => {
 			})
 			})
 			expect(handlerWithoutUrl).toBeInstanceOf(OllamaHandler)
 			expect(handlerWithoutUrl).toBeInstanceOf(OllamaHandler)
 		})
 		})
+
+		it("should use API key when provided", () => {
+			const handlerWithApiKey = new OllamaHandler({
+				apiModelId: "llama2",
+				ollamaModelId: "llama2",
+				ollamaBaseUrl: "https://ollama.com",
+				ollamaApiKey: "test-api-key",
+			})
+			expect(handlerWithApiKey).toBeInstanceOf(OllamaHandler)
+			// The API key will be used in the Authorization header
+		})
 	})
 	})
 
 
 	describe("createMessage", () => {
 	describe("createMessage", () => {

+ 12 - 3
src/api/providers/native-ollama.ts

@@ -1,5 +1,5 @@
 import { Anthropic } from "@anthropic-ai/sdk"
 import { Anthropic } from "@anthropic-ai/sdk"
-import { Message, Ollama } from "ollama"
+import { Message, Ollama, type Config as OllamaOptions } from "ollama"
 import { ModelInfo, openAiModelInfoSaneDefaults, DEEP_SEEK_DEFAULT_TEMPERATURE } from "@roo-code/types"
 import { ModelInfo, openAiModelInfoSaneDefaults, DEEP_SEEK_DEFAULT_TEMPERATURE } from "@roo-code/types"
 import { ApiStream } from "../transform/stream"
 import { ApiStream } from "../transform/stream"
 import { BaseProvider } from "./base-provider"
 import { BaseProvider } from "./base-provider"
@@ -140,10 +140,19 @@ export class NativeOllamaHandler extends BaseProvider implements SingleCompletio
 	private ensureClient(): Ollama {
 	private ensureClient(): Ollama {
 		if (!this.client) {
 		if (!this.client) {
 			try {
 			try {
-				this.client = new Ollama({
+				const clientOptions: OllamaOptions = {
 					host: this.options.ollamaBaseUrl || "http://localhost:11434",
 					host: this.options.ollamaBaseUrl || "http://localhost:11434",
 					// Note: The ollama npm package handles timeouts internally
 					// Note: The ollama npm package handles timeouts internally
-				})
+				}
+
+				// Add API key if provided (for Ollama cloud or authenticated instances)
+				if (this.options.ollamaApiKey) {
+					clientOptions.headers = {
+						Authorization: `Bearer ${this.options.ollamaApiKey}`,
+					}
+				}
+
+				this.client = new Ollama(clientOptions)
 			} catch (error: any) {
 			} catch (error: any) {
 				throw new Error(`Error creating Ollama client: ${error.message}`)
 				throw new Error(`Error creating Ollama client: ${error.message}`)
 			}
 			}

+ 11 - 1
src/api/providers/ollama.ts

@@ -25,10 +25,20 @@ export class OllamaHandler extends BaseProvider implements SingleCompletionHandl
 		super()
 		super()
 		this.options = options
 		this.options = options
 
 
+		// Use the API key if provided (for Ollama cloud or authenticated instances)
+		// Otherwise use "ollama" as a placeholder for local instances
+		const apiKey = this.options.ollamaApiKey || "ollama"
+
+		const headers: Record<string, string> = {}
+		if (this.options.ollamaApiKey) {
+			headers["Authorization"] = `Bearer ${this.options.ollamaApiKey}`
+		}
+
 		this.client = new OpenAI({
 		this.client = new OpenAI({
 			baseURL: (this.options.ollamaBaseUrl || "http://localhost:11434") + "/v1",
 			baseURL: (this.options.ollamaBaseUrl || "http://localhost:11434") + "/v1",
-			apiKey: "ollama",
+			apiKey: apiKey,
 			timeout: getApiRequestTimeout(),
 			timeout: getApiRequestTimeout(),
+			defaultHeaders: headers,
 		})
 		})
 	}
 	}
 
 

+ 13 - 0
webview-ui/src/components/settings/providers/Ollama.tsx

@@ -86,6 +86,19 @@ export const Ollama = ({ apiConfiguration, setApiConfigurationField }: OllamaPro
 				className="w-full">
 				className="w-full">
 				<label className="block font-medium mb-1">{t("settings:providers.ollama.baseUrl")}</label>
 				<label className="block font-medium mb-1">{t("settings:providers.ollama.baseUrl")}</label>
 			</VSCodeTextField>
 			</VSCodeTextField>
+			{apiConfiguration?.ollamaBaseUrl && (
+				<VSCodeTextField
+					value={apiConfiguration?.ollamaApiKey || ""}
+					type="password"
+					onInput={handleInputChange("ollamaApiKey")}
+					placeholder={t("settings:placeholders.apiKey")}
+					className="w-full">
+					<label className="block font-medium mb-1">{t("settings:providers.ollama.apiKey")}</label>
+					<div className="text-xs text-vscode-descriptionForeground mt-1">
+						{t("settings:providers.ollama.apiKeyHelp")}
+					</div>
+				</VSCodeTextField>
+			)}
 			<VSCodeTextField
 			<VSCodeTextField
 				value={apiConfiguration?.ollamaModelId || ""}
 				value={apiConfiguration?.ollamaModelId || ""}
 				onInput={handleInputChange("ollamaModelId")}
 				onInput={handleInputChange("ollamaModelId")}

+ 2 - 0
webview-ui/src/i18n/locales/ca/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "URL base (opcional)",
 			"baseUrl": "URL base (opcional)",
 			"modelId": "ID del model",
 			"modelId": "ID del model",
+			"apiKey": "Clau API d'Ollama",
+			"apiKeyHelp": "Clau API opcional per a instàncies d'Ollama autenticades o serveis al núvol. Deixa-ho buit per a instal·lacions locals.",
 			"description": "Ollama permet executar models localment al vostre ordinador. Per a instruccions sobre com començar, consulteu la Guia d'inici ràpid.",
 			"description": "Ollama permet executar models localment al vostre ordinador. Per a instruccions sobre com començar, consulteu la Guia d'inici ràpid.",
 			"warning": "Nota: Roo Code utilitza prompts complexos i funciona millor amb models Claude. Els models menys capaços poden no funcionar com s'espera."
 			"warning": "Nota: Roo Code utilitza prompts complexos i funciona millor amb models Claude. Els models menys capaços poden no funcionar com s'espera."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/de/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "Basis-URL (optional)",
 			"baseUrl": "Basis-URL (optional)",
 			"modelId": "Modell-ID",
 			"modelId": "Modell-ID",
+			"apiKey": "Ollama API-Schlüssel",
+			"apiKeyHelp": "Optionaler API-Schlüssel für authentifizierte Ollama-Instanzen oder Cloud-Services. Leer lassen für lokale Installationen.",
 			"description": "Ollama ermöglicht es dir, Modelle lokal auf deinem Computer auszuführen. Eine Anleitung zum Einstieg findest du im Schnellstart-Guide.",
 			"description": "Ollama ermöglicht es dir, Modelle lokal auf deinem Computer auszuführen. Eine Anleitung zum Einstieg findest du im Schnellstart-Guide.",
 			"warning": "Hinweis: Roo Code verwendet komplexe Prompts und funktioniert am besten mit Claude-Modellen. Weniger leistungsfähige Modelle funktionieren möglicherweise nicht wie erwartet."
 			"warning": "Hinweis: Roo Code verwendet komplexe Prompts und funktioniert am besten mit Claude-Modellen. Weniger leistungsfähige Modelle funktionieren möglicherweise nicht wie erwartet."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/en/settings.json

@@ -373,6 +373,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "Base URL (optional)",
 			"baseUrl": "Base URL (optional)",
 			"modelId": "Model ID",
 			"modelId": "Model ID",
+			"apiKey": "Ollama API Key",
+			"apiKeyHelp": "Optional API key for authenticated Ollama instances or cloud services. Leave empty for local installations.",
 			"description": "Ollama allows you to run models locally on your computer. For instructions on how to get started, see their quickstart guide.",
 			"description": "Ollama allows you to run models locally on your computer. For instructions on how to get started, see their quickstart guide.",
 			"warning": "Note: Roo Code uses complex prompts and works best with Claude models. Less capable models may not work as expected."
 			"warning": "Note: Roo Code uses complex prompts and works best with Claude models. Less capable models may not work as expected."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/es/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "URL base (opcional)",
 			"baseUrl": "URL base (opcional)",
 			"modelId": "ID del modelo",
 			"modelId": "ID del modelo",
+			"apiKey": "Clave API de Ollama",
+			"apiKeyHelp": "Clave API opcional para instancias de Ollama autenticadas o servicios en la nube. Deja vacío para instalaciones locales.",
 			"description": "Ollama le permite ejecutar modelos localmente en su computadora. Para obtener instrucciones sobre cómo comenzar, consulte la guía de inicio rápido.",
 			"description": "Ollama le permite ejecutar modelos localmente en su computadora. Para obtener instrucciones sobre cómo comenzar, consulte la guía de inicio rápido.",
 			"warning": "Nota: Roo Code utiliza prompts complejos y funciona mejor con modelos Claude. Los modelos menos capaces pueden no funcionar como se espera."
 			"warning": "Nota: Roo Code utiliza prompts complejos y funciona mejor con modelos Claude. Los modelos menos capaces pueden no funcionar como se espera."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/fr/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "URL de base (optionnel)",
 			"baseUrl": "URL de base (optionnel)",
 			"modelId": "ID du modèle",
 			"modelId": "ID du modèle",
+			"apiKey": "Clé API Ollama",
+			"apiKeyHelp": "Clé API optionnelle pour les instances Ollama authentifiées ou les services cloud. Laissez vide pour les installations locales.",
 			"description": "Ollama vous permet d'exécuter des modèles localement sur votre ordinateur. Pour obtenir des instructions sur la mise en route, consultez le guide de démarrage rapide.",
 			"description": "Ollama vous permet d'exécuter des modèles localement sur votre ordinateur. Pour obtenir des instructions sur la mise en route, consultez le guide de démarrage rapide.",
 			"warning": "Remarque : Roo Code utilise des prompts complexes et fonctionne mieux avec les modèles Claude. Les modèles moins performants peuvent ne pas fonctionner comme prévu."
 			"warning": "Remarque : Roo Code utilise des prompts complexes et fonctionne mieux avec les modèles Claude. Les modèles moins performants peuvent ne pas fonctionner comme prévu."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/hi/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "बेस URL (वैकल्पिक)",
 			"baseUrl": "बेस URL (वैकल्पिक)",
 			"modelId": "मॉडल ID",
 			"modelId": "मॉडल ID",
+			"apiKey": "Ollama API Key",
+			"apiKeyHelp": "प्रमाणित Ollama इंस्टेंसेस या क्लाउड सेवाओं के लिए वैकल्पिक API key। स्थानीय इंस्टॉलेशन के लिए खाली छोड़ें।",
 			"description": "Ollama आपको अपने कंप्यूटर पर स्थानीय रूप से मॉडल चलाने की अनुमति देता है। आरंभ करने के निर्देशों के लिए, उनकी क्विकस्टार्ट गाइड देखें।",
 			"description": "Ollama आपको अपने कंप्यूटर पर स्थानीय रूप से मॉडल चलाने की अनुमति देता है। आरंभ करने के निर्देशों के लिए, उनकी क्विकस्टार्ट गाइड देखें।",
 			"warning": "नोट: Roo Code जटिल प्रॉम्प्ट्स का उपयोग करता है और Claude मॉडल के साथ सबसे अच्छा काम करता है। कम क्षमता वाले मॉडल अपेक्षित रूप से काम नहीं कर सकते हैं।"
 			"warning": "नोट: Roo Code जटिल प्रॉम्प्ट्स का उपयोग करता है और Claude मॉडल के साथ सबसे अच्छा काम करता है। कम क्षमता वाले मॉडल अपेक्षित रूप से काम नहीं कर सकते हैं।"
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/id/settings.json

@@ -378,6 +378,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "Base URL (opsional)",
 			"baseUrl": "Base URL (opsional)",
 			"modelId": "Model ID",
 			"modelId": "Model ID",
+			"apiKey": "Ollama API Key",
+			"apiKeyHelp": "API key opsional untuk instance Ollama yang terautentikasi atau layanan cloud. Biarkan kosong untuk instalasi lokal.",
 			"description": "Ollama memungkinkan kamu menjalankan model secara lokal di komputer. Untuk instruksi cara memulai, lihat panduan quickstart mereka.",
 			"description": "Ollama memungkinkan kamu menjalankan model secara lokal di komputer. Untuk instruksi cara memulai, lihat panduan quickstart mereka.",
 			"warning": "Catatan: Roo Code menggunakan prompt kompleks dan bekerja terbaik dengan model Claude. Model yang kurang mampu mungkin tidak bekerja seperti yang diharapkan."
 			"warning": "Catatan: Roo Code menggunakan prompt kompleks dan bekerja terbaik dengan model Claude. Model yang kurang mampu mungkin tidak bekerja seperti yang diharapkan."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/it/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "URL base (opzionale)",
 			"baseUrl": "URL base (opzionale)",
 			"modelId": "ID modello",
 			"modelId": "ID modello",
+			"apiKey": "Chiave API Ollama",
+			"apiKeyHelp": "Chiave API opzionale per istanze Ollama autenticate o servizi cloud. Lascia vuoto per installazioni locali.",
 			"description": "Ollama ti permette di eseguire modelli localmente sul tuo computer. Per iniziare, consulta la guida rapida.",
 			"description": "Ollama ti permette di eseguire modelli localmente sul tuo computer. Per iniziare, consulta la guida rapida.",
 			"warning": "Nota: Roo Code utilizza prompt complessi e funziona meglio con i modelli Claude. I modelli con capacità inferiori potrebbero non funzionare come previsto."
 			"warning": "Nota: Roo Code utilizza prompt complessi e funziona meglio con i modelli Claude. I modelli con capacità inferiori potrebbero non funzionare come previsto."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/ja/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "ベースURL(オプション)",
 			"baseUrl": "ベースURL(オプション)",
 			"modelId": "モデルID",
 			"modelId": "モデルID",
+			"apiKey": "Ollama APIキー",
+			"apiKeyHelp": "認証されたOllamaインスタンスやクラウドサービス用のオプションAPIキー。ローカルインストールの場合は空のままにしてください。",
 			"description": "Ollamaを使用すると、ローカルコンピューターでモデルを実行できます。始め方については、クイックスタートガイドをご覧ください。",
 			"description": "Ollamaを使用すると、ローカルコンピューターでモデルを実行できます。始め方については、クイックスタートガイドをご覧ください。",
 			"warning": "注意:Roo Codeは複雑なプロンプトを使用し、Claudeモデルで最適に動作します。能力の低いモデルは期待通りに動作しない場合があります。"
 			"warning": "注意:Roo Codeは複雑なプロンプトを使用し、Claudeモデルで最適に動作します。能力の低いモデルは期待通りに動作しない場合があります。"
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/ko/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "기본 URL (선택사항)",
 			"baseUrl": "기본 URL (선택사항)",
 			"modelId": "모델 ID",
 			"modelId": "모델 ID",
+			"apiKey": "Ollama API 키",
+			"apiKeyHelp": "인증된 Ollama 인스턴스나 클라우드 서비스용 선택적 API 키. 로컬 설치의 경우 비워두세요.",
 			"description": "Ollama를 사용하면 컴퓨터에서 로컬로 모델을 실행할 수 있습니다. 시작하는 방법은 빠른 시작 가이드를 참조하세요.",
 			"description": "Ollama를 사용하면 컴퓨터에서 로컬로 모델을 실행할 수 있습니다. 시작하는 방법은 빠른 시작 가이드를 참조하세요.",
 			"warning": "참고: Roo Code는 복잡한 프롬프트를 사용하며 Claude 모델에서 가장 잘 작동합니다. 덜 강력한 모델은 예상대로 작동하지 않을 수 있습니다."
 			"warning": "참고: Roo Code는 복잡한 프롬프트를 사용하며 Claude 모델에서 가장 잘 작동합니다. 덜 강력한 모델은 예상대로 작동하지 않을 수 있습니다."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/nl/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "Basis-URL (optioneel)",
 			"baseUrl": "Basis-URL (optioneel)",
 			"modelId": "Model-ID",
 			"modelId": "Model-ID",
+			"apiKey": "Ollama API-sleutel",
+			"apiKeyHelp": "Optionele API-sleutel voor geauthenticeerde Ollama-instanties of cloudservices. Laat leeg voor lokale installaties.",
 			"description": "Ollama laat je modellen lokaal op je computer draaien. Zie hun quickstart-gids voor instructies.",
 			"description": "Ollama laat je modellen lokaal op je computer draaien. Zie hun quickstart-gids voor instructies.",
 			"warning": "Let op: Roo Code gebruikt complexe prompts en werkt het beste met Claude-modellen. Minder krachtige modellen werken mogelijk niet zoals verwacht."
 			"warning": "Let op: Roo Code gebruikt complexe prompts en werkt het beste met Claude-modellen. Minder krachtige modellen werken mogelijk niet zoals verwacht."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/pl/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "URL bazowy (opcjonalnie)",
 			"baseUrl": "URL bazowy (opcjonalnie)",
 			"modelId": "ID modelu",
 			"modelId": "ID modelu",
+			"apiKey": "Klucz API Ollama",
+			"apiKeyHelp": "Opcjonalny klucz API dla uwierzytelnionych instancji Ollama lub usług chmurowych. Pozostaw puste dla instalacji lokalnych.",
 			"description": "Ollama pozwala na lokalne uruchamianie modeli na twoim komputerze. Aby rozpocząć, zapoznaj się z przewodnikiem szybkiego startu.",
 			"description": "Ollama pozwala na lokalne uruchamianie modeli na twoim komputerze. Aby rozpocząć, zapoznaj się z przewodnikiem szybkiego startu.",
 			"warning": "Uwaga: Roo Code używa złożonych podpowiedzi i działa najlepiej z modelami Claude. Modele o niższych możliwościach mogą nie działać zgodnie z oczekiwaniami."
 			"warning": "Uwaga: Roo Code używa złożonych podpowiedzi i działa najlepiej z modelami Claude. Modele o niższych możliwościach mogą nie działać zgodnie z oczekiwaniami."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/pt-BR/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "URL Base (opcional)",
 			"baseUrl": "URL Base (opcional)",
 			"modelId": "ID do Modelo",
 			"modelId": "ID do Modelo",
+			"apiKey": "Chave API Ollama",
+			"apiKeyHelp": "Chave API opcional para instâncias Ollama autenticadas ou serviços em nuvem. Deixe vazio para instalações locais.",
 			"description": "O Ollama permite que você execute modelos localmente em seu computador. Para instruções sobre como começar, veja o guia de início rápido deles.",
 			"description": "O Ollama permite que você execute modelos localmente em seu computador. Para instruções sobre como começar, veja o guia de início rápido deles.",
 			"warning": "Nota: O Roo Code usa prompts complexos e funciona melhor com modelos Claude. Modelos menos capazes podem não funcionar como esperado."
 			"warning": "Nota: O Roo Code usa prompts complexos e funciona melhor com modelos Claude. Modelos menos capazes podem não funcionar como esperado."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/ru/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "Базовый URL (опционально)",
 			"baseUrl": "Базовый URL (опционально)",
 			"modelId": "ID модели",
 			"modelId": "ID модели",
+			"apiKey": "API-ключ Ollama",
+			"apiKeyHelp": "Опциональный API-ключ для аутентифицированных экземпляров Ollama или облачных сервисов. Оставьте пустым для локальных установок.",
 			"description": "Ollama позволяет запускать модели локально на вашем компьютере. Для начала ознакомьтесь с кратким руководством.",
 			"description": "Ollama позволяет запускать модели локально на вашем компьютере. Для начала ознакомьтесь с кратким руководством.",
 			"warning": "Примечание: Roo Code использует сложные подсказки и лучше всего работает с моделями Claude. Менее мощные модели могут работать некорректно."
 			"warning": "Примечание: Roo Code использует сложные подсказки и лучше всего работает с моделями Claude. Менее мощные модели могут работать некорректно."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/tr/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "Temel URL (İsteğe bağlı)",
 			"baseUrl": "Temel URL (İsteğe bağlı)",
 			"modelId": "Model Kimliği",
 			"modelId": "Model Kimliği",
+			"apiKey": "Ollama API Anahtarı",
+			"apiKeyHelp": "Kimlik doğrulamalı Ollama örnekleri veya bulut hizmetleri için isteğe bağlı API anahtarı. Yerel kurulumlar için boş bırakın.",
 			"description": "Ollama, modelleri bilgisayarınızda yerel olarak çalıştırmanıza olanak tanır. Başlamak için hızlı başlangıç kılavuzlarına bakın.",
 			"description": "Ollama, modelleri bilgisayarınızda yerel olarak çalıştırmanıza olanak tanır. Başlamak için hızlı başlangıç kılavuzlarına bakın.",
 			"warning": "Not: Roo Code karmaşık istemler kullanır ve Claude modelleriyle en iyi şekilde çalışır. Daha az yetenekli modeller beklendiği gibi çalışmayabilir."
 			"warning": "Not: Roo Code karmaşık istemler kullanır ve Claude modelleriyle en iyi şekilde çalışır. Daha az yetenekli modeller beklendiği gibi çalışmayabilir."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/vi/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "URL cơ sở (tùy chọn)",
 			"baseUrl": "URL cơ sở (tùy chọn)",
 			"modelId": "ID mô hình",
 			"modelId": "ID mô hình",
+			"apiKey": "Khóa API Ollama",
+			"apiKeyHelp": "Khóa API tùy chọn cho các phiên bản Ollama đã xác thực hoặc dịch vụ đám mây. Để trống cho cài đặt cục bộ.",
 			"description": "Ollama cho phép bạn chạy các mô hình cục bộ trên máy tính của bạn. Để biết hướng dẫn về cách bắt đầu, xem hướng dẫn nhanh của họ.",
 			"description": "Ollama cho phép bạn chạy các mô hình cục bộ trên máy tính của bạn. Để biết hướng dẫn về cách bắt đầu, xem hướng dẫn nhanh của họ.",
 			"warning": "Lưu ý: Roo Code sử dụng các lời nhắc phức tạp và hoạt động tốt nhất với các mô hình Claude. Các mô hình kém mạnh hơn có thể không hoạt động như mong đợi."
 			"warning": "Lưu ý: Roo Code sử dụng các lời nhắc phức tạp và hoạt động tốt nhất với các mô hình Claude. Các mô hình kém mạnh hơn có thể không hoạt động như mong đợi."
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/zh-CN/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "基础 URL(可选)",
 			"baseUrl": "基础 URL(可选)",
 			"modelId": "模型 ID",
 			"modelId": "模型 ID",
+			"apiKey": "Ollama API 密钥",
+			"apiKeyHelp": "用于已认证 Ollama 实例或云服务的可选 API 密钥。本地安装请留空。",
 			"description": "Ollama 允许您在本地计算机上运行模型。有关如何开始使用的说明,请参阅其快速入门指南。",
 			"description": "Ollama 允许您在本地计算机上运行模型。有关如何开始使用的说明,请参阅其快速入门指南。",
 			"warning": "注意:Roo Code 使用复杂的提示,与 Claude 模型配合最佳。功能较弱的模型可能无法按预期工作。"
 			"warning": "注意:Roo Code 使用复杂的提示,与 Claude 模型配合最佳。功能较弱的模型可能无法按预期工作。"
 		},
 		},

+ 2 - 0
webview-ui/src/i18n/locales/zh-TW/settings.json

@@ -374,6 +374,8 @@
 		"ollama": {
 		"ollama": {
 			"baseUrl": "基礎 URL(選用)",
 			"baseUrl": "基礎 URL(選用)",
 			"modelId": "模型 ID",
 			"modelId": "模型 ID",
+			"apiKey": "Ollama API 金鑰",
+			"apiKeyHelp": "用於已認證 Ollama 執行個體或雲端服務的選用 API 金鑰。本機安裝請留空。",
 			"description": "Ollama 允許您在本機電腦執行模型。請參閱快速入門指南。",
 			"description": "Ollama 允許您在本機電腦執行模型。請參閱快速入門指南。",
 			"warning": "注意:Roo Code 使用複雜提示,與 Claude 模型搭配最佳。功能較弱的模型可能無法正常運作。"
 			"warning": "注意:Roo Code 使用複雜提示,與 Claude 模型搭配最佳。功能較弱的模型可能無法正常運作。"
 		},
 		},