|
|
@@ -1,5 +1,144 @@
|
|
|
import { EventEmitter } from "events"
|
|
|
|
|
|
+type ProviderSettings = {
|
|
|
+ apiProvider?:
|
|
|
+ | (
|
|
|
+ | "anthropic"
|
|
|
+ | "glama"
|
|
|
+ | "openrouter"
|
|
|
+ | "bedrock"
|
|
|
+ | "vertex"
|
|
|
+ | "openai"
|
|
|
+ | "ollama"
|
|
|
+ | "vscode-lm"
|
|
|
+ | "lmstudio"
|
|
|
+ | "gemini"
|
|
|
+ | "openai-native"
|
|
|
+ | "mistral"
|
|
|
+ | "deepseek"
|
|
|
+ | "unbound"
|
|
|
+ | "requesty"
|
|
|
+ | "human-relay"
|
|
|
+ | "fake-ai"
|
|
|
+ | "xai"
|
|
|
+ | "groq"
|
|
|
+ | "chutes"
|
|
|
+ | "litellm"
|
|
|
+ )
|
|
|
+ | undefined
|
|
|
+ apiModelId?: string | undefined
|
|
|
+ apiKey?: string | undefined
|
|
|
+ anthropicBaseUrl?: string | undefined
|
|
|
+ anthropicUseAuthToken?: boolean | undefined
|
|
|
+ glamaModelId?: string | undefined
|
|
|
+ glamaApiKey?: string | undefined
|
|
|
+ openRouterApiKey?: string | undefined
|
|
|
+ openRouterModelId?: string | undefined
|
|
|
+ openRouterBaseUrl?: string | undefined
|
|
|
+ openRouterSpecificProvider?: string | undefined
|
|
|
+ openRouterUseMiddleOutTransform?: boolean | undefined
|
|
|
+ awsAccessKey?: string | undefined
|
|
|
+ awsSecretKey?: string | undefined
|
|
|
+ awsSessionToken?: string | undefined
|
|
|
+ awsRegion?: string | undefined
|
|
|
+ awsUseCrossRegionInference?: boolean | undefined
|
|
|
+ awsUsePromptCache?: boolean | undefined
|
|
|
+ awsProfile?: string | undefined
|
|
|
+ awsUseProfile?: boolean | undefined
|
|
|
+ awsCustomArn?: string | undefined
|
|
|
+ vertexKeyFile?: string | undefined
|
|
|
+ vertexJsonCredentials?: string | undefined
|
|
|
+ vertexProjectId?: string | undefined
|
|
|
+ vertexRegion?: string | undefined
|
|
|
+ openAiBaseUrl?: string | undefined
|
|
|
+ openAiApiKey?: string | undefined
|
|
|
+ openAiLegacyFormat?: boolean | undefined
|
|
|
+ openAiR1FormatEnabled?: boolean | undefined
|
|
|
+ openAiModelId?: string | undefined
|
|
|
+ openAiCustomModelInfo?:
|
|
|
+ | ({
|
|
|
+ maxTokens?: (number | null) | undefined
|
|
|
+ maxThinkingTokens?: (number | null) | undefined
|
|
|
+ contextWindow: number
|
|
|
+ supportsImages?: boolean | undefined
|
|
|
+ supportsComputerUse?: boolean | undefined
|
|
|
+ supportsPromptCache: boolean
|
|
|
+ isPromptCacheOptional?: boolean | undefined
|
|
|
+ inputPrice?: number | undefined
|
|
|
+ outputPrice?: number | undefined
|
|
|
+ cacheWritesPrice?: number | undefined
|
|
|
+ cacheReadsPrice?: number | undefined
|
|
|
+ description?: string | undefined
|
|
|
+ reasoningEffort?: ("low" | "medium" | "high") | undefined
|
|
|
+ thinking?: boolean | undefined
|
|
|
+ minTokensPerCachePoint?: number | undefined
|
|
|
+ maxCachePoints?: number | undefined
|
|
|
+ cachableFields?: string[] | undefined
|
|
|
+ tiers?:
|
|
|
+ | {
|
|
|
+ contextWindow: number
|
|
|
+ inputPrice?: number | undefined
|
|
|
+ outputPrice?: number | undefined
|
|
|
+ cacheWritesPrice?: number | undefined
|
|
|
+ cacheReadsPrice?: number | undefined
|
|
|
+ }[]
|
|
|
+ | undefined
|
|
|
+ } | null)
|
|
|
+ | undefined
|
|
|
+ openAiUseAzure?: boolean | undefined
|
|
|
+ azureApiVersion?: string | undefined
|
|
|
+ openAiStreamingEnabled?: boolean | undefined
|
|
|
+ enableReasoningEffort?: boolean | undefined
|
|
|
+ openAiHostHeader?: string | undefined
|
|
|
+ openAiHeaders?:
|
|
|
+ | {
|
|
|
+ [x: string]: string
|
|
|
+ }
|
|
|
+ | undefined
|
|
|
+ ollamaModelId?: string | undefined
|
|
|
+ ollamaBaseUrl?: string | undefined
|
|
|
+ vsCodeLmModelSelector?:
|
|
|
+ | {
|
|
|
+ vendor?: string | undefined
|
|
|
+ family?: string | undefined
|
|
|
+ version?: string | undefined
|
|
|
+ id?: string | undefined
|
|
|
+ }
|
|
|
+ | undefined
|
|
|
+ lmStudioModelId?: string | undefined
|
|
|
+ lmStudioBaseUrl?: string | undefined
|
|
|
+ lmStudioDraftModelId?: string | undefined
|
|
|
+ lmStudioSpeculativeDecodingEnabled?: boolean | undefined
|
|
|
+ geminiApiKey?: string | undefined
|
|
|
+ googleGeminiBaseUrl?: string | undefined
|
|
|
+ openAiNativeApiKey?: string | undefined
|
|
|
+ openAiNativeBaseUrl?: string | undefined
|
|
|
+ mistralApiKey?: string | undefined
|
|
|
+ mistralCodestralUrl?: string | undefined
|
|
|
+ deepSeekBaseUrl?: string | undefined
|
|
|
+ deepSeekApiKey?: string | undefined
|
|
|
+ unboundApiKey?: string | undefined
|
|
|
+ unboundModelId?: string | undefined
|
|
|
+ requestyApiKey?: string | undefined
|
|
|
+ requestyModelId?: string | undefined
|
|
|
+ xaiApiKey?: string | undefined
|
|
|
+ groqApiKey?: string | undefined
|
|
|
+ chutesApiKey?: string | undefined
|
|
|
+ litellmBaseUrl?: string | undefined
|
|
|
+ litellmApiKey?: string | undefined
|
|
|
+ litellmModelId?: string | undefined
|
|
|
+ modelMaxTokens?: number | undefined
|
|
|
+ modelMaxThinkingTokens?: number | undefined
|
|
|
+ includeMaxTokens?: boolean | undefined
|
|
|
+ reasoningEffort?: ("low" | "medium" | "high") | undefined
|
|
|
+ promptCachingDisabled?: boolean | undefined
|
|
|
+ diffEnabled?: boolean | undefined
|
|
|
+ fuzzyMatchThreshold?: number | undefined
|
|
|
+ modelTemperature?: (number | null) | undefined
|
|
|
+ rateLimitSeconds?: number | undefined
|
|
|
+ fakeAi?: unknown | undefined
|
|
|
+}
|
|
|
+
|
|
|
type GlobalSettings = {
|
|
|
currentApiConfigName?: string | undefined
|
|
|
listApiConfigMeta?:
|
|
|
@@ -170,175 +309,6 @@ type GlobalSettings = {
|
|
|
historyPreviewCollapsed?: boolean | undefined
|
|
|
}
|
|
|
|
|
|
-type ProviderSettings = {
|
|
|
- apiProvider?:
|
|
|
- | (
|
|
|
- | "anthropic"
|
|
|
- | "glama"
|
|
|
- | "openrouter"
|
|
|
- | "bedrock"
|
|
|
- | "vertex"
|
|
|
- | "openai"
|
|
|
- | "ollama"
|
|
|
- | "vscode-lm"
|
|
|
- | "lmstudio"
|
|
|
- | "gemini"
|
|
|
- | "openai-native"
|
|
|
- | "mistral"
|
|
|
- | "deepseek"
|
|
|
- | "unbound"
|
|
|
- | "requesty"
|
|
|
- | "human-relay"
|
|
|
- | "fake-ai"
|
|
|
- | "xai"
|
|
|
- | "groq"
|
|
|
- | "chutes"
|
|
|
- | "litellm"
|
|
|
- )
|
|
|
- | undefined
|
|
|
- apiModelId?: string | undefined
|
|
|
- apiKey?: string | undefined
|
|
|
- anthropicBaseUrl?: string | undefined
|
|
|
- anthropicUseAuthToken?: boolean | undefined
|
|
|
- glamaModelId?: string | undefined
|
|
|
- glamaApiKey?: string | undefined
|
|
|
- openRouterApiKey?: string | undefined
|
|
|
- openRouterModelId?: string | undefined
|
|
|
- openRouterBaseUrl?: string | undefined
|
|
|
- openRouterSpecificProvider?: string | undefined
|
|
|
- openRouterUseMiddleOutTransform?: boolean | undefined
|
|
|
- awsAccessKey?: string | undefined
|
|
|
- awsSecretKey?: string | undefined
|
|
|
- awsSessionToken?: string | undefined
|
|
|
- awsRegion?: string | undefined
|
|
|
- awsUseCrossRegionInference?: boolean | undefined
|
|
|
- awsUsePromptCache?: boolean | undefined
|
|
|
- awsProfile?: string | undefined
|
|
|
- awsUseProfile?: boolean | undefined
|
|
|
- awsCustomArn?: string | undefined
|
|
|
- vertexKeyFile?: string | undefined
|
|
|
- vertexJsonCredentials?: string | undefined
|
|
|
- vertexProjectId?: string | undefined
|
|
|
- vertexRegion?: string | undefined
|
|
|
- openAiBaseUrl?: string | undefined
|
|
|
- openAiApiKey?: string | undefined
|
|
|
- openAiLegacyFormat?: boolean | undefined
|
|
|
- openAiR1FormatEnabled?: boolean | undefined
|
|
|
- openAiModelId?: string | undefined
|
|
|
- openAiCustomModelInfo?:
|
|
|
- | ({
|
|
|
- maxTokens?: (number | null) | undefined
|
|
|
- maxThinkingTokens?: (number | null) | undefined
|
|
|
- contextWindow: number
|
|
|
- supportsImages?: boolean | undefined
|
|
|
- supportsComputerUse?: boolean | undefined
|
|
|
- supportsPromptCache: boolean
|
|
|
- isPromptCacheOptional?: boolean | undefined
|
|
|
- inputPrice?: number | undefined
|
|
|
- outputPrice?: number | undefined
|
|
|
- cacheWritesPrice?: number | undefined
|
|
|
- cacheReadsPrice?: number | undefined
|
|
|
- description?: string | undefined
|
|
|
- reasoningEffort?: ("low" | "medium" | "high") | undefined
|
|
|
- thinking?: boolean | undefined
|
|
|
- minTokensPerCachePoint?: number | undefined
|
|
|
- maxCachePoints?: number | undefined
|
|
|
- cachableFields?: string[] | undefined
|
|
|
- tiers?:
|
|
|
- | {
|
|
|
- contextWindow: number
|
|
|
- inputPrice?: number | undefined
|
|
|
- outputPrice?: number | undefined
|
|
|
- cacheWritesPrice?: number | undefined
|
|
|
- cacheReadsPrice?: number | undefined
|
|
|
- }[]
|
|
|
- | undefined
|
|
|
- } | null)
|
|
|
- | undefined
|
|
|
- openAiUseAzure?: boolean | undefined
|
|
|
- azureApiVersion?: string | undefined
|
|
|
- openAiStreamingEnabled?: boolean | undefined
|
|
|
- enableReasoningEffort?: boolean | undefined
|
|
|
- openAiHostHeader?: string | undefined
|
|
|
- openAiHeaders?:
|
|
|
- | {
|
|
|
- [x: string]: string
|
|
|
- }
|
|
|
- | undefined
|
|
|
- ollamaModelId?: string | undefined
|
|
|
- ollamaBaseUrl?: string | undefined
|
|
|
- vsCodeLmModelSelector?:
|
|
|
- | {
|
|
|
- vendor?: string | undefined
|
|
|
- family?: string | undefined
|
|
|
- version?: string | undefined
|
|
|
- id?: string | undefined
|
|
|
- }
|
|
|
- | undefined
|
|
|
- lmStudioModelId?: string | undefined
|
|
|
- lmStudioBaseUrl?: string | undefined
|
|
|
- lmStudioDraftModelId?: string | undefined
|
|
|
- lmStudioSpeculativeDecodingEnabled?: boolean | undefined
|
|
|
- geminiApiKey?: string | undefined
|
|
|
- googleGeminiBaseUrl?: string | undefined
|
|
|
- openAiNativeApiKey?: string | undefined
|
|
|
- openAiNativeBaseUrl?: string | undefined
|
|
|
- mistralApiKey?: string | undefined
|
|
|
- mistralCodestralUrl?: string | undefined
|
|
|
- deepSeekBaseUrl?: string | undefined
|
|
|
- deepSeekApiKey?: string | undefined
|
|
|
- unboundApiKey?: string | undefined
|
|
|
- unboundModelId?: string | undefined
|
|
|
- requestyApiKey?: string | undefined
|
|
|
- requestyModelId?: string | undefined
|
|
|
- fakeAi?: unknown | undefined
|
|
|
- xaiApiKey?: string | undefined
|
|
|
- groqApiKey?: string | undefined
|
|
|
- chutesApiKey?: string | undefined
|
|
|
- litellmBaseUrl?: string | undefined
|
|
|
- litellmApiKey?: string | undefined
|
|
|
- litellmModelId?: string | undefined
|
|
|
- includeMaxTokens?: boolean | undefined
|
|
|
- reasoningEffort?: ("low" | "medium" | "high") | undefined
|
|
|
- promptCachingDisabled?: boolean | undefined
|
|
|
- diffEnabled?: boolean | undefined
|
|
|
- fuzzyMatchThreshold?: number | undefined
|
|
|
- modelTemperature?: (number | null) | undefined
|
|
|
- rateLimitSeconds?: number | undefined
|
|
|
- modelMaxTokens?: number | undefined
|
|
|
- modelMaxThinkingTokens?: number | undefined
|
|
|
-}
|
|
|
-
|
|
|
-type ProviderSettingsEntry = {
|
|
|
- id: string
|
|
|
- name: string
|
|
|
- apiProvider?:
|
|
|
- | (
|
|
|
- | "anthropic"
|
|
|
- | "glama"
|
|
|
- | "openrouter"
|
|
|
- | "bedrock"
|
|
|
- | "vertex"
|
|
|
- | "openai"
|
|
|
- | "ollama"
|
|
|
- | "vscode-lm"
|
|
|
- | "lmstudio"
|
|
|
- | "gemini"
|
|
|
- | "openai-native"
|
|
|
- | "mistral"
|
|
|
- | "deepseek"
|
|
|
- | "unbound"
|
|
|
- | "requesty"
|
|
|
- | "human-relay"
|
|
|
- | "fake-ai"
|
|
|
- | "xai"
|
|
|
- | "groq"
|
|
|
- | "chutes"
|
|
|
- | "litellm"
|
|
|
- )
|
|
|
- | undefined
|
|
|
-}
|
|
|
-
|
|
|
type ClineMessage = {
|
|
|
ts: number
|
|
|
type: "ask" | "say"
|
|
|
@@ -613,10 +583,6 @@ interface RooCodeAPI extends EventEmitter<RooCodeEvents> {
|
|
|
* Simulates pressing the secondary button in the chat interface.
|
|
|
*/
|
|
|
pressSecondaryButton(): Promise<void>
|
|
|
- /**
|
|
|
- * Returns true if the API is ready to use.
|
|
|
- */
|
|
|
- isReady(): boolean
|
|
|
/**
|
|
|
* Returns the current configuration.
|
|
|
* @returns The current configuration.
|
|
|
@@ -627,67 +593,44 @@ interface RooCodeAPI extends EventEmitter<RooCodeEvents> {
|
|
|
* @param values An object containing key-value pairs to set.
|
|
|
*/
|
|
|
setConfiguration(values: RooCodeSettings): Promise<void>
|
|
|
- /**
|
|
|
- * Returns a list of all configured profile names
|
|
|
- * @returns Array of profile names
|
|
|
- */
|
|
|
- getProfiles(): string[]
|
|
|
- /**
|
|
|
- * Returns the profile entry for a given name
|
|
|
- * @param name The name of the profile
|
|
|
- * @returns The profile entry, or undefined if the profile does not exist
|
|
|
- */
|
|
|
- getProfileEntry(name: string): ProviderSettingsEntry | undefined
|
|
|
/**
|
|
|
* Creates a new API configuration profile
|
|
|
* @param name The name of the profile
|
|
|
- * @param profile The profile to create; defaults to an empty object
|
|
|
- * @param activate Whether to activate the profile after creation; defaults to true
|
|
|
* @returns The ID of the created profile
|
|
|
- * @throws Error if the profile already exists
|
|
|
- */
|
|
|
- createProfile(name: string, profile?: ProviderSettings, activate?: boolean): Promise<string>
|
|
|
- /**
|
|
|
- * Updates an existing API configuration profile
|
|
|
- * @param name The name of the profile
|
|
|
- * @param profile The profile to update
|
|
|
- * @param activate Whether to activate the profile after update; defaults to true
|
|
|
- * @returns The ID of the updated profile
|
|
|
- * @throws Error if the profile does not exist
|
|
|
*/
|
|
|
- updateProfile(name: string, profile: ProviderSettings, activate?: boolean): Promise<string | undefined>
|
|
|
+ createProfile(name: string): Promise<string>
|
|
|
/**
|
|
|
- * Creates a new API configuration profile or updates an existing one
|
|
|
- * @param name The name of the profile
|
|
|
- * @param profile The profile to create or update; defaults to an empty object
|
|
|
- * @param activate Whether to activate the profile after upsert; defaults to true
|
|
|
- * @returns The ID of the upserted profile
|
|
|
+ * Returns a list of all configured profile names
|
|
|
+ * @returns Array of profile names
|
|
|
*/
|
|
|
- upsertProfile(name: string, profile: ProviderSettings, activate?: boolean): Promise<string | undefined>
|
|
|
+ getProfiles(): string[]
|
|
|
/**
|
|
|
- * Deletes a profile by name
|
|
|
- * @param name The name of the profile to delete
|
|
|
+ * Changes the active API configuration profile
|
|
|
+ * @param name The name of the profile to activate
|
|
|
* @throws Error if the profile does not exist
|
|
|
*/
|
|
|
- deleteProfile(name: string): Promise<void>
|
|
|
+ setActiveProfile(name: string): Promise<void>
|
|
|
/**
|
|
|
* Returns the name of the currently active profile
|
|
|
* @returns The profile name, or undefined if no profile is active
|
|
|
*/
|
|
|
getActiveProfile(): string | undefined
|
|
|
/**
|
|
|
- * Changes the active API configuration profile
|
|
|
- * @param name The name of the profile to activate
|
|
|
+ * Deletes a profile by name
|
|
|
+ * @param name The name of the profile to delete
|
|
|
* @throws Error if the profile does not exist
|
|
|
*/
|
|
|
- setActiveProfile(name: string): Promise<string | undefined>
|
|
|
+ deleteProfile(name: string): Promise<void>
|
|
|
+ /**
|
|
|
+ * Returns true if the API is ready to use.
|
|
|
+ */
|
|
|
+ isReady(): boolean
|
|
|
}
|
|
|
|
|
|
export {
|
|
|
type ClineMessage,
|
|
|
type GlobalSettings,
|
|
|
type ProviderSettings,
|
|
|
- type ProviderSettingsEntry,
|
|
|
type RooCodeAPI,
|
|
|
RooCodeEventName,
|
|
|
type RooCodeEvents,
|