Просмотр исходного кода

feat: add Claude Opus 4.6 model support (#9119)

* feat: add Claude Opus 4.6 model support with 1M context window

Adds support for Claude Opus 4.6, Anthropic's latest model with:
- 200K base context window with optional 1M context variant
- Tiered pricing for >200K context (2x input/output pricing)
- Extended thinking/reasoning support
- Prompt caching support

Changes:
- Added model definitions for Anthropic, Bedrock, and Vertex providers
- Added OpenRouter 1M variant support
- Updated thinking models lists across all provider UIs
- Added context window switcher for Opus 4.6
- Updated JP cross-region inference models list

* feat: update featured model to Opus 4.6 in model picker

* chore: add changeset for Claude Opus 4.6

* fix: correct Opus 4.6 model IDs (no date suffix)

---------

Co-authored-by: Robin Newhouse <[email protected]>
Saoud Rizwan 2 месяцев назад
Родитель
Сommit
7c87017

+ 5 - 0
.changeset/cold-opus-launch.md

@@ -0,0 +1,5 @@
+---
+"cline": patch
+---
+
+Add Claude Opus 4.6 model support

+ 7 - 5
src/core/api/providers/bedrock.ts

@@ -8,15 +8,15 @@ import {
 	InvokeModelWithResponseStreamCommand,
 } from "@aws-sdk/client-bedrock-runtime"
 import { fromNodeProviderChain } from "@aws-sdk/credential-providers"
-import { BedrockModelId, bedrockDefaultModelId, bedrockModels, CLAUDE_SONNET_1M_SUFFIX, ModelInfo } from "@shared/api"
+import { type BedrockModelId, bedrockDefaultModelId, bedrockModels, CLAUDE_SONNET_1M_SUFFIX, type ModelInfo } from "@shared/api"
 import { calculateApiCostOpenAI, calculateApiCostQwen } from "@utils/cost"
 import { ExtensionRegistryInfo } from "@/registry"
-import { ClineStorageMessage } from "@/shared/messages/content"
+import type { ClineStorageMessage } from "@/shared/messages/content"
 import { Logger } from "@/shared/services/Logger"
-import { ApiHandler, CommonApiHandlerOptions } from "../"
+import type { ApiHandler, CommonApiHandlerOptions } from "../"
 import { withRetry } from "../retry"
 import { convertToR1Format } from "../transform/r1-format"
-import { ApiStream } from "../transform/stream"
+import type { ApiStream } from "../transform/stream"
 
 export interface AwsBedrockHandlerOptions extends CommonApiHandlerOptions {
 	apiModelId?: string
@@ -110,9 +110,11 @@ interface ProviderChainOptions {
 	profile?: string
 }
 
-// a special jp inference profile was created for sonnet 4.5 & haiku 4.5
+// a special jp inference profile was created for opus 4.6, sonnet 4.5 & haiku 4.5
 // https://docs.aws.amazon.com/bedrock/latest/userguide/inference-profiles-support.html
 const JP_SUPPORTED_CRIS_MODELS = [
+	"anthropic.claude-opus-4-6-v1:0",
+	"anthropic.claude-opus-4-6-v1:0:1m",
 	"anthropic.claude-sonnet-4-5-20250929-v1:0",
 	"anthropic.claude-sonnet-4-5-20250929-v1:0:1m",
 	"anthropic.claude-haiku-4-5-20251001-v1:0",

+ 1 - 0
src/core/api/providers/requesty.ts

@@ -77,6 +77,7 @@ export class RequestyHandler implements ApiHandler {
 				? { thinking: { type: "enabled", budget_tokens: thinkingBudget } }
 				: { thinking: { type: "disabled" } }
 		const thinkingArgs =
+			model.id.includes("claude-opus-4-6") ||
 			model.id.includes("claude-3-7-sonnet") ||
 			model.id.includes("claude-sonnet-4") ||
 			model.id.includes("claude-opus-4") ||

+ 10 - 3
src/core/api/transform/openrouter-stream.ts

@@ -5,6 +5,7 @@ import {
 	OPENROUTER_PROVIDER_PREFERENCES,
 	openRouterClaudeSonnet41mModelId,
 	openRouterClaudeSonnet451mModelId,
+	openRouterClaudeOpus461mModelId,
 } from "@shared/api"
 import { shouldSkipReasoningForModel } from "@utils/model-utils"
 import OpenAI from "openai"
@@ -30,8 +31,11 @@ export async function createOpenRouterStream(
 		...convertToOpenAiMessages(messages),
 	]
 
-	const isClaudeSonnet1m = model.id === openRouterClaudeSonnet41mModelId || model.id === openRouterClaudeSonnet451mModelId
-	if (isClaudeSonnet1m) {
+	const isClaude1m =
+		model.id === openRouterClaudeSonnet41mModelId ||
+		model.id === openRouterClaudeSonnet451mModelId ||
+		model.id === openRouterClaudeOpus461mModelId
+	if (isClaude1m) {
 		// remove the custom :1m suffix, to create the model id openrouter API expects
 		model.id = model.id.slice(0, -CLAUDE_SONNET_1M_SUFFIX.length)
 	}
@@ -43,6 +47,7 @@ export async function createOpenRouterStream(
 	// this was initially specifically for claude models (some models may 'support prompt caching' automatically without this)
 	// handles direct model.id match logic
 	switch (model.id) {
+		case "anthropic/claude-opus-4.6":
 		case "anthropic/claude-haiku-4.5":
 		case "anthropic/claude-4.5-haiku":
 		case "anthropic/claude-sonnet-4.5":
@@ -110,6 +115,7 @@ export async function createOpenRouterStream(
 	// (models usually default to max tokens allowed)
 	let maxTokens: number | undefined
 	switch (model.id) {
+		case "anthropic/claude-opus-4.6":
 		case "anthropic/claude-haiku-4.5":
 		case "anthropic/claude-4.5-haiku":
 		case "anthropic/claude-sonnet-4.5":
@@ -155,6 +161,7 @@ export async function createOpenRouterStream(
 
 	let reasoning: { max_tokens: number } | undefined
 	switch (model.id) {
+		case "anthropic/claude-opus-4.6":
 		case "anthropic/claude-haiku-4.5":
 		case "anthropic/claude-4.5-haiku":
 		case "anthropic/claude-sonnet-4.5":
@@ -210,7 +217,7 @@ export async function createOpenRouterStream(
 		...(reasoning ? { reasoning } : {}),
 		...(openRouterProviderSorting && !providerPreferences ? { provider: { sort: openRouterProviderSorting } } : {}),
 		...(providerPreferences ? { provider: providerPreferences } : {}),
-		...(isClaudeSonnet1m ? { provider: { order: ["anthropic", "google-vertex/global"], allow_fallbacks: false } } : {}),
+		...(isClaude1m ? { provider: { order: ["anthropic", "google-vertex/global"], allow_fallbacks: false } } : {}),
 		...getOpenAIToolParams(tools),
 		...(model.id.includes("gemini-3") && geminiThinkingLevel
 			? { thinking_config: { thinking_level: geminiThinkingLevel, include_thoughts: true } }

+ 6 - 2
src/core/api/transform/vercel-ai-gateway-stream.ts

@@ -4,6 +4,7 @@ import {
 	ModelInfo,
 	openRouterClaudeSonnet41mModelId,
 	openRouterClaudeSonnet451mModelId,
+	openRouterClaudeOpus461mModelId,
 } from "@shared/api"
 import { shouldSkipReasoningForModel } from "@utils/model-utils"
 import OpenAI from "openai"
@@ -28,8 +29,11 @@ export async function createVercelAIGatewayStream(
 		...convertToOpenAiMessages(messages),
 	]
 
-	const isClaudeSonnet1m = model.id === openRouterClaudeSonnet41mModelId || model.id === openRouterClaudeSonnet451mModelId
-	if (isClaudeSonnet1m) {
+	const isClaude1m =
+		model.id === openRouterClaudeSonnet41mModelId ||
+		model.id === openRouterClaudeSonnet451mModelId ||
+		model.id === openRouterClaudeOpus461mModelId
+	if (isClaude1m) {
 		// remove the custom :1m suffix, to create the model id the API expects
 		model.id = model.id.slice(0, -CLAUDE_SONNET_1M_SUFFIX.length)
 	}

+ 17 - 1
src/core/controller/models/refreshOpenRouterModels.ts

@@ -7,7 +7,9 @@ import path from "path"
 import { StateManager } from "@/core/storage/StateManager"
 import {
 	ANTHROPIC_MAX_THINKING_BUDGET,
+	CLAUDE_OPUS_1M_TIERS,
 	CLAUDE_SONNET_1M_TIERS,
+	openRouterClaudeOpus461mModelId,
 	openRouterClaudeSonnet41mModelId,
 	openRouterClaudeSonnet451mModelId,
 } from "@/shared/api"
@@ -163,6 +165,12 @@ async function fetchAndCacheModels(controller: Controller): Promise<Record<strin
 						modelInfo.cacheWritesPrice = 3.75
 						modelInfo.cacheReadsPrice = 0.3
 						break
+					case "anthropic/claude-opus-4.6":
+						modelInfo.contextWindow = 200_000 // restrict to 200k, 1m variant created below
+						modelInfo.supportsPromptCache = true
+						modelInfo.cacheWritesPrice = 6.25
+						modelInfo.cacheReadsPrice = 0.5
+						break
 					case "anthropic/claude-opus-4.5":
 						modelInfo.supportsPromptCache = true
 						modelInfo.cacheWritesPrice = 6.25
@@ -255,7 +263,7 @@ async function fetchAndCacheModels(controller: Controller): Promise<Record<strin
 
 				models[rawModel.id] = modelInfo
 
-				// add custom :1m model variant
+				// add custom :1m model variant for sonnet
 				if (rawModel.id === "anthropic/claude-sonnet-4" || rawModel.id === "anthropic/claude-sonnet-4.5") {
 					const claudeSonnet1mModelInfo = cloneDeep(modelInfo)
 					claudeSonnet1mModelInfo.contextWindow = 1_000_000 // limiting providers to those that support 1m context window
@@ -265,6 +273,14 @@ async function fetchAndCacheModels(controller: Controller): Promise<Record<strin
 					// sonnet 4.5
 					models[openRouterClaudeSonnet451mModelId] = claudeSonnet1mModelInfo
 				}
+
+				// add custom :1m model variant for opus 4.6
+				if (rawModel.id === "anthropic/claude-opus-4.6") {
+					const claudeOpus1mModelInfo = cloneDeep(modelInfo)
+					claudeOpus1mModelInfo.contextWindow = 1_000_000
+					claudeOpus1mModelInfo.tiers = CLAUDE_OPUS_1M_TIERS
+					models[openRouterClaudeOpus461mModelId] = claudeOpus1mModelInfo
+				}
 			}
 			// Save models and cache them in memory
 			await fs.writeFile(openRouterModelsFilePath, JSON.stringify(models))

+ 90 - 9
src/shared/api.ts

@@ -1,5 +1,5 @@
 import { ApiFormat } from "./proto/cline/models"
-import { ApiHandlerSettings } from "./storage/state-keys"
+import type { ApiHandlerSettings } from "./storage/state-keys"
 
 export type ApiProvider =
 	| "anthropic"
@@ -126,6 +126,22 @@ export const CLAUDE_SONNET_1M_TIERS = [
 		cacheReadsPrice: 0.6,
 	},
 ]
+export const CLAUDE_OPUS_1M_TIERS = [
+	{
+		contextWindow: 200000,
+		inputPrice: 5.0,
+		outputPrice: 25,
+		cacheWritesPrice: 6.25,
+		cacheReadsPrice: 0.5,
+	},
+	{
+		contextWindow: Number.MAX_SAFE_INTEGER,
+		inputPrice: 10,
+		outputPrice: 37.5,
+		cacheWritesPrice: 12.5,
+		cacheReadsPrice: 1.0,
+	},
+]
 
 export interface HicapCompatibleModelInfo extends ModelInfo {
 	temperature?: number
@@ -205,6 +221,29 @@ export const anthropicModels = {
 		cacheReadsPrice: 0.3,
 		tiers: CLAUDE_SONNET_1M_TIERS,
 	},
+	"claude-opus-4-6": {
+		maxTokens: 8192,
+		contextWindow: 200_000,
+		supportsImages: true,
+		supportsPromptCache: true,
+		supportsReasoning: true,
+		inputPrice: 5.0,
+		outputPrice: 25.0,
+		cacheWritesPrice: 6.25,
+		cacheReadsPrice: 0.5,
+	},
+	"claude-opus-4-6:1m": {
+		maxTokens: 8192,
+		contextWindow: 1_000_000,
+		supportsImages: true,
+		supportsPromptCache: true,
+		supportsReasoning: true,
+		inputPrice: 5.0,
+		outputPrice: 25.0,
+		cacheWritesPrice: 6.25,
+		cacheReadsPrice: 0.5,
+		tiers: CLAUDE_OPUS_1M_TIERS,
+	},
 	"claude-opus-4-5-20251101": {
 		maxTokens: 8192,
 		contextWindow: 200_000,
@@ -322,6 +361,11 @@ export const claudeCodeModels = {
 		supportsImages: false,
 		supportsPromptCache: false,
 	},
+	"claude-opus-4-6": {
+		...anthropicModels["claude-opus-4-6"],
+		supportsImages: false,
+		supportsPromptCache: false,
+	},
 	"claude-opus-4-5-20251101": {
 		...anthropicModels["claude-opus-4-5-20251101"],
 		supportsImages: false,
@@ -416,6 +460,31 @@ export const bedrockModels = {
 		cacheReadsPrice: 0.3,
 		tiers: CLAUDE_SONNET_1M_TIERS,
 	},
+	"anthropic.claude-opus-4-6-v1:0": {
+		maxTokens: 8192,
+		contextWindow: 200_000,
+		supportsImages: true,
+		supportsPromptCache: true,
+		supportsReasoning: true,
+		supportsGlobalEndpoint: true,
+		inputPrice: 5.0,
+		outputPrice: 25.0,
+		cacheWritesPrice: 6.25,
+		cacheReadsPrice: 0.5,
+	},
+	"anthropic.claude-opus-4-6-v1:0:1m": {
+		maxTokens: 8192,
+		contextWindow: 1_000_000,
+		supportsImages: true,
+		supportsPromptCache: true,
+		supportsReasoning: true,
+		supportsGlobalEndpoint: true,
+		inputPrice: 5.0,
+		outputPrice: 25.0,
+		cacheWritesPrice: 6.25,
+		cacheReadsPrice: 0.5,
+		tiers: CLAUDE_OPUS_1M_TIERS,
+	},
 	"anthropic.claude-opus-4-5-20251101-v1:0": {
 		maxTokens: 8192,
 		contextWindow: 200_000,
@@ -624,6 +693,7 @@ export const bedrockModels = {
 export const openRouterDefaultModelId = "anthropic/claude-sonnet-4.5" // will always exist in openRouterModels
 export const openRouterClaudeSonnet41mModelId = `anthropic/claude-sonnet-4${CLAUDE_SONNET_1M_SUFFIX}`
 export const openRouterClaudeSonnet451mModelId = `anthropic/claude-sonnet-4.5${CLAUDE_SONNET_1M_SUFFIX}`
+export const openRouterClaudeOpus461mModelId = `anthropic/claude-opus-4.6${CLAUDE_SONNET_1M_SUFFIX}`
 export const openRouterDefaultModelInfo: ModelInfo = {
 	maxTokens: 8192,
 	contextWindow: 200_000,
@@ -805,6 +875,17 @@ export const vertexModels = {
 		cacheReadsPrice: 0.1,
 		supportsReasoning: true,
 	},
+	"claude-opus-4-6": {
+		maxTokens: 8192,
+		contextWindow: 200_000,
+		supportsImages: true,
+		supportsPromptCache: true,
+		inputPrice: 5.0,
+		outputPrice: 25.0,
+		cacheWritesPrice: 6.25,
+		cacheReadsPrice: 0.5,
+		supportsReasoning: true,
+	},
 	"claude-opus-4-5@20251101": {
 		maxTokens: 8192,
 		contextWindow: 200_000,
@@ -1010,7 +1091,7 @@ export const vertexModels = {
 				cacheReadsPrice: 0.31,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 2.5,
 				outputPrice: 15,
 				cacheReadsPrice: 0.625,
@@ -1079,7 +1160,7 @@ export const vertexModels = {
 				cacheReadsPrice: 0.01875,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 0.15,
 				outputPrice: 0.6,
 				cacheReadsPrice: 0.0375,
@@ -1162,7 +1243,7 @@ export const geminiModels = {
 				cacheReadsPrice: 0.2,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 4.0,
 				outputPrice: 18.0,
 				cacheReadsPrice: 0.4,
@@ -1191,7 +1272,7 @@ export const geminiModels = {
 				cacheReadsPrice: 0.03,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 0.3,
 				outputPrice: 2.5,
 				cacheReadsPrice: 0.03,
@@ -1217,7 +1298,7 @@ export const geminiModels = {
 				cacheReadsPrice: 0.31,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 2.5,
 				outputPrice: 15,
 				cacheReadsPrice: 0.625,
@@ -1318,7 +1399,7 @@ export const geminiModels = {
 				cacheReadsPrice: 0.01875,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 0.15,
 				outputPrice: 0.6,
 				cacheReadsPrice: 0.0375,
@@ -4087,7 +4168,7 @@ export const mainlandZAiModels = {
 				cacheReadsPrice: 0.057,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 0.29,
 				outputPrice: 1.14,
 				cacheReadsPrice: 0.057,
@@ -4119,7 +4200,7 @@ export const mainlandZAiModels = {
 				cacheReadsPrice: 0.017,
 			},
 			{
-				contextWindow: Infinity,
+				contextWindow: Number.POSITIVE_INFINITY,
 				inputPrice: 0.086,
 				outputPrice: 0.57,
 				cacheReadsPrice: 0.017,

+ 1 - 1
webview-ui/src/components/chat/chat-view/components/layout/WelcomeSection.tsx

@@ -155,7 +155,7 @@ export const WelcomeSection: React.FC<WelcomeSectionProps> = ({
 					break
 
 				case BannerActionType.SetModel: {
-					const modelId = action.arg || "anthropic/claude-opus-4.5"
+					const modelId = action.arg || "anthropic/claude-opus-4.6"
 					handleFieldsChange({
 						planModeOpenRouterModelId: modelId,
 						actModeOpenRouterModelId: modelId,

+ 12 - 3
webview-ui/src/components/settings/OpenRouterModelPicker.tsx

@@ -59,9 +59,9 @@ export const recommendedModels = [
 		label: "NEW",
 	},
 	{
-		id: "anthropic/claude-opus-4.5",
-		description: "State-of-the-art for complex coding",
-		label: "HOT",
+		id: "anthropic/claude-opus-4.6",
+		description: "Most intelligent model for agents and coding",
+		label: "NEW",
 	},
 	{
 		id: "openai/gpt-5.2-codex",
@@ -288,6 +288,7 @@ const OpenRouterModelPicker: React.FC<OpenRouterModelPickerProps> = ({
 	const showBudgetSlider = useMemo(() => {
 		return (
 			Object.entries(openRouterModels)?.some(([id, m]) => id === selectedModelId && m.thinkingConfig) ||
+			selectedModelId?.toLowerCase().includes("claude-opus-4.6") ||
 			selectedModelId?.toLowerCase().includes("claude-haiku-4.5") ||
 			selectedModelId?.toLowerCase().includes("claude-4.5-haiku") ||
 			selectedModelId?.toLowerCase().includes("claude-sonnet-4.5") ||
@@ -441,6 +442,14 @@ const OpenRouterModelPicker: React.FC<OpenRouterModelPickerProps> = ({
 					)}
 				</DropdownWrapper>
 
+				{/* Context window switcher for Claude Opus 4.6 */}
+				<ContextWindowSwitcher
+					base1mModelId={`anthropic/claude-opus-4.6${CLAUDE_SONNET_1M_SUFFIX}`}
+					base200kModelId="anthropic/claude-opus-4.6"
+					onModelChange={handleModelChange}
+					selectedModelId={selectedModelId}
+				/>
+
 				{/* Context window switcher for Claude Sonnet 4.5 */}
 				<ContextWindowSwitcher
 					base1mModelId={`anthropic/claude-sonnet-4.5${CLAUDE_SONNET_1M_SUFFIX}`}

+ 1 - 0
webview-ui/src/components/settings/VercelModelPicker.tsx

@@ -161,6 +161,7 @@ const VercelModelPicker: React.FC<VercelModelPickerProps> = ({ isPopup, currentM
 
 	const showBudgetSlider = useMemo(() => {
 		return (
+			selectedModelId?.toLowerCase().includes("claude-opus-4.6") ||
 			selectedModelId?.toLowerCase().includes("claude-haiku-4.5") ||
 			selectedModelId?.toLowerCase().includes("claude-4.5-haiku") ||
 			selectedModelId?.toLowerCase().includes("claude-sonnet-4.5") ||

+ 11 - 1
webview-ui/src/components/settings/providers/AnthropicProvider.tsx

@@ -1,5 +1,5 @@
 import { anthropicModels, CLAUDE_SONNET_1M_SUFFIX } from "@shared/api"
-import { Mode } from "@shared/storage/types"
+import type { Mode } from "@shared/storage/types"
 import { useExtensionState } from "@/context/ExtensionStateContext"
 import { ApiKeyField } from "../common/ApiKeyField"
 import { BaseUrlField } from "../common/BaseUrlField"
@@ -13,6 +13,8 @@ import { useApiConfigurationHandlers } from "../utils/useApiConfigurationHandler
 
 // Anthropic models that support thinking/reasoning mode
 export const SUPPORTED_ANTHROPIC_THINKING_MODELS = [
+	"claude-opus-4-6",
+	`claude-opus-4-6${CLAUDE_SONNET_1M_SUFFIX}`,
 	"claude-3-7-sonnet-20250219",
 	"claude-sonnet-4-20250514",
 	`claude-sonnet-4-20250514${CLAUDE_SONNET_1M_SUFFIX}`,
@@ -83,6 +85,14 @@ export const AnthropicProvider = ({ showModelOptions, isPopup, currentMode }: An
 						selectedModelId={selectedModelId}
 					/>
 
+					{/* Context window switcher for Claude Opus 4.6 */}
+					<ContextWindowSwitcher
+						base1mModelId={`claude-opus-4-6${CLAUDE_SONNET_1M_SUFFIX}`}
+						base200kModelId="claude-opus-4-6"
+						onModelChange={handleModelChange}
+						selectedModelId={selectedModelId}
+					/>
+
 					{/* Context window switcher for Claude Sonnet 4.5 */}
 					<ContextWindowSwitcher
 						base1mModelId={`claude-sonnet-4-5-20250929${CLAUDE_SONNET_1M_SUFFIX}`}

+ 3 - 1
webview-ui/src/components/settings/providers/BedrockProvider.tsx

@@ -1,6 +1,6 @@
 import { bedrockDefaultModelId, bedrockModels, CLAUDE_SONNET_1M_SUFFIX } from "@shared/api"
 import BedrockData from "@shared/providers/bedrock.json"
-import { Mode } from "@shared/storage/types"
+import type { Mode } from "@shared/storage/types"
 import { VSCodeCheckbox, VSCodeDropdown, VSCodeOption, VSCodeRadio, VSCodeRadioGroup } from "@vscode/webview-ui-toolkit/react"
 import { useState } from "react"
 import { Tooltip, TooltipContent, TooltipTrigger } from "@/components/ui/tooltip"
@@ -13,6 +13,8 @@ import { getModeSpecificFields, normalizeApiConfiguration } from "../utils/provi
 import { useApiConfigurationHandlers } from "../utils/useApiConfigurationHandlers"
 
 export const SUPPORTED_BEDROCK_THINKING_MODELS = [
+	"anthropic.claude-opus-4-6-v1:0",
+	`anthropic.claude-opus-4-6-v1:0${CLAUDE_SONNET_1M_SUFFIX}`,
 	"anthropic.claude-3-7-sonnet-20250219-v1:0",
 	"anthropic.claude-sonnet-4-20250514-v1:0",
 	"anthropic.claude-sonnet-4-5-20250929-v1:0",

+ 2 - 1
webview-ui/src/components/settings/providers/VertexProvider.tsx

@@ -1,6 +1,6 @@
 import { vertexGlobalModels, vertexModels } from "@shared/api"
 import VertexData from "@shared/providers/vertex.json"
-import { Mode } from "@shared/storage/types"
+import type { Mode } from "@shared/storage/types"
 import { VSCodeDropdown, VSCodeLink, VSCodeOption } from "@vscode/webview-ui-toolkit/react"
 import { useExtensionState } from "@/context/ExtensionStateContext"
 import { DROPDOWN_Z_INDEX, DropdownContainer } from "../ApiOptions"
@@ -23,6 +23,7 @@ interface VertexProviderProps {
 
 // Vertex models that support thinking
 const SUPPORTED_THINKING_MODELS = [
+	"claude-opus-4-6",
 	"claude-haiku-4-5@20251001",
 	"claude-sonnet-4-5@20250929",
 	"claude-3-7-sonnet@20250219",