Task.ts 111 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271
  1. import * as path from "path"
  2. import * as vscode from "vscode"
  3. import os from "os"
  4. import crypto from "crypto"
  5. import EventEmitter from "events"
  6. import { Anthropic } from "@anthropic-ai/sdk"
  7. import OpenAI from "openai"
  8. import delay from "delay"
  9. import pWaitFor from "p-wait-for"
  10. import { serializeError } from "serialize-error"
  11. import { Package } from "../../shared/package"
  12. import {
  13. type TaskLike,
  14. type TaskMetadata,
  15. type TaskEvents,
  16. type ProviderSettings,
  17. type TokenUsage,
  18. type ToolUsage,
  19. type ToolName,
  20. type ContextCondense,
  21. type ClineMessage,
  22. type ClineSay,
  23. type ClineAsk,
  24. type ToolProgressStatus,
  25. type HistoryItem,
  26. type CreateTaskOptions,
  27. RooCodeEventName,
  28. TelemetryEventName,
  29. TaskStatus,
  30. TodoItem,
  31. getApiProtocol,
  32. getModelId,
  33. isIdleAsk,
  34. isInteractiveAsk,
  35. isResumableAsk,
  36. QueuedMessage,
  37. DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  38. DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  39. MAX_CHECKPOINT_TIMEOUT_SECONDS,
  40. MIN_CHECKPOINT_TIMEOUT_SECONDS,
  41. TOOL_PROTOCOL,
  42. ToolProtocol,
  43. } from "@roo-code/types"
  44. import { TelemetryService } from "@roo-code/telemetry"
  45. import { CloudService, BridgeOrchestrator } from "@roo-code/cloud"
  46. // api
  47. import { ApiHandler, ApiHandlerCreateMessageMetadata, buildApiHandler } from "../../api"
  48. import { ApiStream, GroundingSource } from "../../api/transform/stream"
  49. import { maybeRemoveImageBlocks } from "../../api/transform/image-cleaning"
  50. // shared
  51. import { findLastIndex } from "../../shared/array"
  52. import { combineApiRequests } from "../../shared/combineApiRequests"
  53. import { combineCommandSequences } from "../../shared/combineCommandSequences"
  54. import { t } from "../../i18n"
  55. import { ClineApiReqCancelReason, ClineApiReqInfo } from "../../shared/ExtensionMessage"
  56. import { getApiMetrics, hasTokenUsageChanged } from "../../shared/getApiMetrics"
  57. import { ClineAskResponse } from "../../shared/WebviewMessage"
  58. import { defaultModeSlug, getModeBySlug, getGroupName } from "../../shared/modes"
  59. import { DiffStrategy } from "../../shared/tools"
  60. import { EXPERIMENT_IDS, experiments } from "../../shared/experiments"
  61. import { getModelMaxOutputTokens } from "../../shared/api"
  62. // services
  63. import { UrlContentFetcher } from "../../services/browser/UrlContentFetcher"
  64. import { BrowserSession } from "../../services/browser/BrowserSession"
  65. import { McpHub } from "../../services/mcp/McpHub"
  66. import { McpServerManager } from "../../services/mcp/McpServerManager"
  67. import { RepoPerTaskCheckpointService } from "../../services/checkpoints"
  68. // integrations
  69. import { DiffViewProvider } from "../../integrations/editor/DiffViewProvider"
  70. import { findToolName } from "../../integrations/misc/export-markdown"
  71. import { RooTerminalProcess } from "../../integrations/terminal/types"
  72. import { TerminalRegistry } from "../../integrations/terminal/TerminalRegistry"
  73. // utils
  74. import { calculateApiCostAnthropic, calculateApiCostOpenAI } from "../../shared/cost"
  75. import { getWorkspacePath } from "../../utils/path"
  76. // prompts
  77. import { formatResponse } from "../prompts/responses"
  78. import { SYSTEM_PROMPT } from "../prompts/system"
  79. import { nativeTools, getMcpServerTools } from "../prompts/tools/native-tools"
  80. import { filterNativeToolsForMode, filterMcpToolsForMode } from "../prompts/tools/filter-tools-for-mode"
  81. // core modules
  82. import { ToolRepetitionDetector } from "../tools/ToolRepetitionDetector"
  83. import { restoreTodoListForTask } from "../tools/UpdateTodoListTool"
  84. import { FileContextTracker } from "../context-tracking/FileContextTracker"
  85. import { RooIgnoreController } from "../ignore/RooIgnoreController"
  86. import { RooProtectedController } from "../protect/RooProtectedController"
  87. import { type AssistantMessageContent, presentAssistantMessage } from "../assistant-message"
  88. import { AssistantMessageParser } from "../assistant-message/AssistantMessageParser"
  89. import { NativeToolCallParser } from "../assistant-message/NativeToolCallParser"
  90. import { manageContext } from "../context-management"
  91. import { ClineProvider } from "../webview/ClineProvider"
  92. import { MultiSearchReplaceDiffStrategy } from "../diff/strategies/multi-search-replace"
  93. import { MultiFileSearchReplaceDiffStrategy } from "../diff/strategies/multi-file-search-replace"
  94. import {
  95. type ApiMessage,
  96. readApiMessages,
  97. saveApiMessages,
  98. readTaskMessages,
  99. saveTaskMessages,
  100. taskMetadata,
  101. } from "../task-persistence"
  102. import { getEnvironmentDetails } from "../environment/getEnvironmentDetails"
  103. import { checkContextWindowExceededError } from "../context/context-management/context-error-handling"
  104. import {
  105. type CheckpointDiffOptions,
  106. type CheckpointRestoreOptions,
  107. getCheckpointService,
  108. checkpointSave,
  109. checkpointRestore,
  110. checkpointDiff,
  111. } from "../checkpoints"
  112. import { processUserContentMentions } from "../mentions/processUserContentMentions"
  113. import { getMessagesSinceLastSummary, summarizeConversation } from "../condense"
  114. import { MessageQueueService } from "../message-queue/MessageQueueService"
  115. import { AutoApprovalHandler, checkAutoApproval } from "../auto-approval"
  116. const MAX_EXPONENTIAL_BACKOFF_SECONDS = 600 // 10 minutes
  117. const DEFAULT_USAGE_COLLECTION_TIMEOUT_MS = 5000 // 5 seconds
  118. const FORCED_CONTEXT_REDUCTION_PERCENT = 75 // Keep 75% of context (remove 25%) on context window errors
  119. const MAX_CONTEXT_WINDOW_RETRIES = 3 // Maximum retries for context window errors
  120. export interface TaskOptions extends CreateTaskOptions {
  121. provider: ClineProvider
  122. apiConfiguration: ProviderSettings
  123. enableDiff?: boolean
  124. enableCheckpoints?: boolean
  125. checkpointTimeout?: number
  126. enableBridge?: boolean
  127. fuzzyMatchThreshold?: number
  128. consecutiveMistakeLimit?: number
  129. task?: string
  130. images?: string[]
  131. historyItem?: HistoryItem
  132. experiments?: Record<string, boolean>
  133. startTask?: boolean
  134. rootTask?: Task
  135. parentTask?: Task
  136. taskNumber?: number
  137. onCreated?: (task: Task) => void
  138. initialTodos?: TodoItem[]
  139. workspacePath?: string
  140. }
  141. export class Task extends EventEmitter<TaskEvents> implements TaskLike {
  142. readonly taskId: string
  143. readonly rootTaskId?: string
  144. readonly parentTaskId?: string
  145. childTaskId?: string
  146. readonly instanceId: string
  147. readonly metadata: TaskMetadata
  148. todoList?: TodoItem[]
  149. readonly rootTask: Task | undefined = undefined
  150. readonly parentTask: Task | undefined = undefined
  151. readonly taskNumber: number
  152. readonly workspacePath: string
  153. /**
  154. * The mode associated with this task. Persisted across sessions
  155. * to maintain user context when reopening tasks from history.
  156. *
  157. * ## Lifecycle
  158. *
  159. * ### For new tasks:
  160. * 1. Initially `undefined` during construction
  161. * 2. Asynchronously initialized from provider state via `initializeTaskMode()`
  162. * 3. Falls back to `defaultModeSlug` if provider state is unavailable
  163. *
  164. * ### For history items:
  165. * 1. Immediately set from `historyItem.mode` during construction
  166. * 2. Falls back to `defaultModeSlug` if mode is not stored in history
  167. *
  168. * ## Important
  169. * This property should NOT be accessed directly until `taskModeReady` promise resolves.
  170. * Use `getTaskMode()` for async access or `taskMode` getter for sync access after initialization.
  171. *
  172. * @private
  173. * @see {@link getTaskMode} - For safe async access
  174. * @see {@link taskMode} - For sync access after initialization
  175. * @see {@link waitForModeInitialization} - To ensure initialization is complete
  176. */
  177. private _taskMode: string | undefined
  178. /**
  179. * Promise that resolves when the task mode has been initialized.
  180. * This ensures async mode initialization completes before the task is used.
  181. *
  182. * ## Purpose
  183. * - Prevents race conditions when accessing task mode
  184. * - Ensures provider state is properly loaded before mode-dependent operations
  185. * - Provides a synchronization point for async initialization
  186. *
  187. * ## Resolution timing
  188. * - For history items: Resolves immediately (sync initialization)
  189. * - For new tasks: Resolves after provider state is fetched (async initialization)
  190. *
  191. * @private
  192. * @see {@link waitForModeInitialization} - Public method to await this promise
  193. */
  194. private taskModeReady: Promise<void>
  195. providerRef: WeakRef<ClineProvider>
  196. private readonly globalStoragePath: string
  197. abort: boolean = false
  198. // TaskStatus
  199. idleAsk?: ClineMessage
  200. resumableAsk?: ClineMessage
  201. interactiveAsk?: ClineMessage
  202. didFinishAbortingStream = false
  203. abandoned = false
  204. abortReason?: ClineApiReqCancelReason
  205. isInitialized = false
  206. isPaused: boolean = false
  207. pausedModeSlug: string = defaultModeSlug
  208. private pauseInterval: NodeJS.Timeout | undefined
  209. // API
  210. readonly apiConfiguration: ProviderSettings
  211. api: ApiHandler
  212. private static lastGlobalApiRequestTime?: number
  213. private autoApprovalHandler: AutoApprovalHandler
  214. /**
  215. * Reset the global API request timestamp. This should only be used for testing.
  216. * @internal
  217. */
  218. static resetGlobalApiRequestTime(): void {
  219. Task.lastGlobalApiRequestTime = undefined
  220. }
  221. toolRepetitionDetector: ToolRepetitionDetector
  222. rooIgnoreController?: RooIgnoreController
  223. rooProtectedController?: RooProtectedController
  224. fileContextTracker: FileContextTracker
  225. urlContentFetcher: UrlContentFetcher
  226. terminalProcess?: RooTerminalProcess
  227. // Computer User
  228. browserSession: BrowserSession
  229. // Editing
  230. diffViewProvider: DiffViewProvider
  231. diffStrategy?: DiffStrategy
  232. diffEnabled: boolean = false
  233. fuzzyMatchThreshold: number
  234. didEditFile: boolean = false
  235. // LLM Messages & Chat Messages
  236. apiConversationHistory: ApiMessage[] = []
  237. clineMessages: ClineMessage[] = []
  238. // Ask
  239. private askResponse?: ClineAskResponse
  240. private askResponseText?: string
  241. private askResponseImages?: string[]
  242. public lastMessageTs?: number
  243. // Tool Use
  244. consecutiveMistakeCount: number = 0
  245. consecutiveMistakeLimit: number
  246. consecutiveMistakeCountForApplyDiff: Map<string, number> = new Map()
  247. toolUsage: ToolUsage = {}
  248. // Checkpoints
  249. enableCheckpoints: boolean
  250. checkpointTimeout: number
  251. checkpointService?: RepoPerTaskCheckpointService
  252. checkpointServiceInitializing = false
  253. // Task Bridge
  254. enableBridge: boolean
  255. // Message Queue Service
  256. public readonly messageQueueService: MessageQueueService
  257. private messageQueueStateChangedHandler: (() => void) | undefined
  258. // Streaming
  259. isWaitingForFirstChunk = false
  260. isStreaming = false
  261. currentStreamingContentIndex = 0
  262. currentStreamingDidCheckpoint = false
  263. assistantMessageContent: AssistantMessageContent[] = []
  264. presentAssistantMessageLocked = false
  265. presentAssistantMessageHasPendingUpdates = false
  266. userMessageContent: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam | Anthropic.ToolResultBlockParam)[] = []
  267. userMessageContentReady = false
  268. didRejectTool = false
  269. didAlreadyUseTool = false
  270. didCompleteReadingStream = false
  271. assistantMessageParser: AssistantMessageParser
  272. // Token Usage Cache
  273. private tokenUsageSnapshot?: TokenUsage
  274. private tokenUsageSnapshotAt?: number
  275. constructor({
  276. provider,
  277. apiConfiguration,
  278. enableDiff = false,
  279. enableCheckpoints = true,
  280. checkpointTimeout = DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  281. enableBridge = false,
  282. fuzzyMatchThreshold = 1.0,
  283. consecutiveMistakeLimit = DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  284. task,
  285. images,
  286. historyItem,
  287. startTask = true,
  288. rootTask,
  289. parentTask,
  290. taskNumber = -1,
  291. onCreated,
  292. initialTodos,
  293. workspacePath,
  294. }: TaskOptions) {
  295. super()
  296. if (startTask && !task && !images && !historyItem) {
  297. throw new Error("Either historyItem or task/images must be provided")
  298. }
  299. if (
  300. !checkpointTimeout ||
  301. checkpointTimeout > MAX_CHECKPOINT_TIMEOUT_SECONDS ||
  302. checkpointTimeout < MIN_CHECKPOINT_TIMEOUT_SECONDS
  303. ) {
  304. throw new Error(
  305. "checkpointTimeout must be between " +
  306. MIN_CHECKPOINT_TIMEOUT_SECONDS +
  307. " and " +
  308. MAX_CHECKPOINT_TIMEOUT_SECONDS +
  309. " seconds",
  310. )
  311. }
  312. this.taskId = historyItem ? historyItem.id : crypto.randomUUID()
  313. this.rootTaskId = historyItem ? historyItem.rootTaskId : rootTask?.taskId
  314. this.parentTaskId = historyItem ? historyItem.parentTaskId : parentTask?.taskId
  315. this.childTaskId = undefined
  316. this.metadata = {
  317. task: historyItem ? historyItem.task : task,
  318. images: historyItem ? [] : images,
  319. }
  320. // Normal use-case is usually retry similar history task with new workspace.
  321. this.workspacePath = parentTask
  322. ? parentTask.workspacePath
  323. : (workspacePath ?? getWorkspacePath(path.join(os.homedir(), "Desktop")))
  324. this.instanceId = crypto.randomUUID().slice(0, 8)
  325. this.taskNumber = -1
  326. this.rooIgnoreController = new RooIgnoreController(this.cwd)
  327. this.rooProtectedController = new RooProtectedController(this.cwd)
  328. this.fileContextTracker = new FileContextTracker(provider, this.taskId)
  329. this.rooIgnoreController.initialize().catch((error) => {
  330. console.error("Failed to initialize RooIgnoreController:", error)
  331. })
  332. this.apiConfiguration = apiConfiguration
  333. this.api = buildApiHandler(apiConfiguration)
  334. this.autoApprovalHandler = new AutoApprovalHandler()
  335. this.urlContentFetcher = new UrlContentFetcher(provider.context)
  336. this.browserSession = new BrowserSession(provider.context)
  337. this.diffEnabled = enableDiff
  338. this.fuzzyMatchThreshold = fuzzyMatchThreshold
  339. this.consecutiveMistakeLimit = consecutiveMistakeLimit ?? DEFAULT_CONSECUTIVE_MISTAKE_LIMIT
  340. this.providerRef = new WeakRef(provider)
  341. this.globalStoragePath = provider.context.globalStorageUri.fsPath
  342. this.diffViewProvider = new DiffViewProvider(this.cwd, this)
  343. this.enableCheckpoints = enableCheckpoints
  344. this.checkpointTimeout = checkpointTimeout
  345. this.enableBridge = enableBridge
  346. this.parentTask = parentTask
  347. this.taskNumber = taskNumber
  348. // Store the task's mode when it's created.
  349. // For history items, use the stored mode; for new tasks, we'll set it
  350. // after getting state.
  351. if (historyItem) {
  352. this._taskMode = historyItem.mode || defaultModeSlug
  353. this.taskModeReady = Promise.resolve()
  354. TelemetryService.instance.captureTaskRestarted(this.taskId)
  355. } else {
  356. // For new tasks, don't set the mode yet - wait for async initialization.
  357. this._taskMode = undefined
  358. this.taskModeReady = this.initializeTaskMode(provider)
  359. TelemetryService.instance.captureTaskCreated(this.taskId)
  360. }
  361. // Initialize the assistant message parser.
  362. this.assistantMessageParser = new AssistantMessageParser()
  363. this.messageQueueService = new MessageQueueService()
  364. this.messageQueueStateChangedHandler = () => {
  365. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  366. this.providerRef.deref()?.postStateToWebview()
  367. }
  368. this.messageQueueService.on("stateChanged", this.messageQueueStateChangedHandler)
  369. // Only set up diff strategy if diff is enabled.
  370. if (this.diffEnabled) {
  371. // Default to old strategy, will be updated if experiment is enabled.
  372. this.diffStrategy = new MultiSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  373. // Check experiment asynchronously and update strategy if needed.
  374. provider.getState().then((state) => {
  375. const isMultiFileApplyDiffEnabled = experiments.isEnabled(
  376. state.experiments ?? {},
  377. EXPERIMENT_IDS.MULTI_FILE_APPLY_DIFF,
  378. )
  379. if (isMultiFileApplyDiffEnabled) {
  380. this.diffStrategy = new MultiFileSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  381. }
  382. })
  383. }
  384. this.toolRepetitionDetector = new ToolRepetitionDetector(this.consecutiveMistakeLimit)
  385. // Initialize todo list if provided
  386. if (initialTodos && initialTodos.length > 0) {
  387. this.todoList = initialTodos
  388. }
  389. onCreated?.(this)
  390. if (startTask) {
  391. if (task || images) {
  392. this.startTask(task, images)
  393. } else if (historyItem) {
  394. this.resumeTaskFromHistory()
  395. } else {
  396. throw new Error("Either historyItem or task/images must be provided")
  397. }
  398. }
  399. }
  400. /**
  401. * Initialize the task mode from the provider state.
  402. * This method handles async initialization with proper error handling.
  403. *
  404. * ## Flow
  405. * 1. Attempts to fetch the current mode from provider state
  406. * 2. Sets `_taskMode` to the fetched mode or `defaultModeSlug` if unavailable
  407. * 3. Handles errors gracefully by falling back to default mode
  408. * 4. Logs any initialization errors for debugging
  409. *
  410. * ## Error handling
  411. * - Network failures when fetching provider state
  412. * - Provider not yet initialized
  413. * - Invalid state structure
  414. *
  415. * All errors result in fallback to `defaultModeSlug` to ensure task can proceed.
  416. *
  417. * @private
  418. * @param provider - The ClineProvider instance to fetch state from
  419. * @returns Promise that resolves when initialization is complete
  420. */
  421. private async initializeTaskMode(provider: ClineProvider): Promise<void> {
  422. try {
  423. const state = await provider.getState()
  424. this._taskMode = state?.mode || defaultModeSlug
  425. } catch (error) {
  426. // If there's an error getting state, use the default mode
  427. this._taskMode = defaultModeSlug
  428. // Use the provider's log method for better error visibility
  429. const errorMessage = `Failed to initialize task mode: ${error instanceof Error ? error.message : String(error)}`
  430. provider.log(errorMessage)
  431. }
  432. }
  433. /**
  434. * Wait for the task mode to be initialized before proceeding.
  435. * This method ensures that any operations depending on the task mode
  436. * will have access to the correct mode value.
  437. *
  438. * ## When to use
  439. * - Before accessing mode-specific configurations
  440. * - When switching between tasks with different modes
  441. * - Before operations that depend on mode-based permissions
  442. *
  443. * ## Example usage
  444. * ```typescript
  445. * // Wait for mode initialization before mode-dependent operations
  446. * await task.waitForModeInitialization();
  447. * const mode = task.taskMode; // Now safe to access synchronously
  448. *
  449. * // Or use with getTaskMode() for a one-liner
  450. * const mode = await task.getTaskMode(); // Internally waits for initialization
  451. * ```
  452. *
  453. * @returns Promise that resolves when the task mode is initialized
  454. * @public
  455. */
  456. public async waitForModeInitialization(): Promise<void> {
  457. return this.taskModeReady
  458. }
  459. /**
  460. * Get the task mode asynchronously, ensuring it's properly initialized.
  461. * This is the recommended way to access the task mode as it guarantees
  462. * the mode is available before returning.
  463. *
  464. * ## Async behavior
  465. * - Internally waits for `taskModeReady` promise to resolve
  466. * - Returns the initialized mode or `defaultModeSlug` as fallback
  467. * - Safe to call multiple times - subsequent calls return immediately if already initialized
  468. *
  469. * ## Example usage
  470. * ```typescript
  471. * // Safe async access
  472. * const mode = await task.getTaskMode();
  473. * console.log(`Task is running in ${mode} mode`);
  474. *
  475. * // Use in conditional logic
  476. * if (await task.getTaskMode() === 'architect') {
  477. * // Perform architect-specific operations
  478. * }
  479. * ```
  480. *
  481. * @returns Promise resolving to the task mode string
  482. * @public
  483. */
  484. public async getTaskMode(): Promise<string> {
  485. await this.taskModeReady
  486. return this._taskMode || defaultModeSlug
  487. }
  488. /**
  489. * Get the task mode synchronously. This should only be used when you're certain
  490. * that the mode has already been initialized (e.g., after waitForModeInitialization).
  491. *
  492. * ## When to use
  493. * - In synchronous contexts where async/await is not available
  494. * - After explicitly waiting for initialization via `waitForModeInitialization()`
  495. * - In event handlers or callbacks where mode is guaranteed to be initialized
  496. *
  497. * ## Example usage
  498. * ```typescript
  499. * // After ensuring initialization
  500. * await task.waitForModeInitialization();
  501. * const mode = task.taskMode; // Safe synchronous access
  502. *
  503. * // In an event handler after task is started
  504. * task.on('taskStarted', () => {
  505. * console.log(`Task started in ${task.taskMode} mode`); // Safe here
  506. * });
  507. * ```
  508. *
  509. * @throws {Error} If the mode hasn't been initialized yet
  510. * @returns The task mode string
  511. * @public
  512. */
  513. public get taskMode(): string {
  514. if (this._taskMode === undefined) {
  515. throw new Error("Task mode accessed before initialization. Use getTaskMode() or wait for taskModeReady.")
  516. }
  517. return this._taskMode
  518. }
  519. static create(options: TaskOptions): [Task, Promise<void>] {
  520. const instance = new Task({ ...options, startTask: false })
  521. const { images, task, historyItem } = options
  522. let promise
  523. if (images || task) {
  524. promise = instance.startTask(task, images)
  525. } else if (historyItem) {
  526. promise = instance.resumeTaskFromHistory()
  527. } else {
  528. throw new Error("Either historyItem or task/images must be provided")
  529. }
  530. return [instance, promise]
  531. }
  532. // API Messages
  533. private async getSavedApiConversationHistory(): Promise<ApiMessage[]> {
  534. return readApiMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  535. }
  536. private async addToApiConversationHistory(message: Anthropic.MessageParam) {
  537. // Capture the encrypted_content from the provider (e.g., OpenAI Responses API) if present.
  538. // We only persist data reported by the current response body.
  539. const handler = this.api as ApiHandler & {
  540. getResponseId?: () => string | undefined
  541. getEncryptedContent?: () => { encrypted_content: string; id?: string } | undefined
  542. }
  543. if (message.role === "assistant") {
  544. const responseId = handler.getResponseId?.()
  545. const reasoningData = handler.getEncryptedContent?.()
  546. // If we have encrypted_content, add it as a reasoning item before the assistant message
  547. if (reasoningData?.encrypted_content) {
  548. this.apiConversationHistory.push({
  549. type: "reasoning",
  550. summary: [],
  551. encrypted_content: reasoningData.encrypted_content,
  552. ...(reasoningData.id ? { id: reasoningData.id } : {}),
  553. ts: Date.now(),
  554. } as any)
  555. }
  556. const messageWithTs = {
  557. ...message,
  558. ...(responseId ? { id: responseId } : {}),
  559. ts: Date.now(),
  560. }
  561. this.apiConversationHistory.push(messageWithTs)
  562. } else {
  563. const messageWithTs = { ...message, ts: Date.now() }
  564. this.apiConversationHistory.push(messageWithTs)
  565. }
  566. await this.saveApiConversationHistory()
  567. }
  568. async overwriteApiConversationHistory(newHistory: ApiMessage[]) {
  569. this.apiConversationHistory = newHistory
  570. await this.saveApiConversationHistory()
  571. }
  572. private async saveApiConversationHistory() {
  573. try {
  574. await saveApiMessages({
  575. messages: this.apiConversationHistory,
  576. taskId: this.taskId,
  577. globalStoragePath: this.globalStoragePath,
  578. })
  579. } catch (error) {
  580. // In the off chance this fails, we don't want to stop the task.
  581. console.error("Failed to save API conversation history:", error)
  582. }
  583. }
  584. // Cline Messages
  585. private async getSavedClineMessages(): Promise<ClineMessage[]> {
  586. return readTaskMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  587. }
  588. private async addToClineMessages(message: ClineMessage) {
  589. this.clineMessages.push(message)
  590. const provider = this.providerRef.deref()
  591. await provider?.postStateToWebview()
  592. this.emit(RooCodeEventName.Message, { action: "created", message })
  593. await this.saveClineMessages()
  594. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  595. if (shouldCaptureMessage) {
  596. CloudService.instance.captureEvent({
  597. event: TelemetryEventName.TASK_MESSAGE,
  598. properties: { taskId: this.taskId, message },
  599. })
  600. }
  601. }
  602. public async overwriteClineMessages(newMessages: ClineMessage[]) {
  603. this.clineMessages = newMessages
  604. restoreTodoListForTask(this)
  605. await this.saveClineMessages()
  606. }
  607. private async updateClineMessage(message: ClineMessage) {
  608. const provider = this.providerRef.deref()
  609. await provider?.postMessageToWebview({ type: "messageUpdated", clineMessage: message })
  610. this.emit(RooCodeEventName.Message, { action: "updated", message })
  611. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  612. if (shouldCaptureMessage) {
  613. CloudService.instance.captureEvent({
  614. event: TelemetryEventName.TASK_MESSAGE,
  615. properties: { taskId: this.taskId, message },
  616. })
  617. }
  618. }
  619. private async saveClineMessages() {
  620. try {
  621. await saveTaskMessages({
  622. messages: this.clineMessages,
  623. taskId: this.taskId,
  624. globalStoragePath: this.globalStoragePath,
  625. })
  626. const { historyItem, tokenUsage } = await taskMetadata({
  627. taskId: this.taskId,
  628. rootTaskId: this.rootTaskId,
  629. parentTaskId: this.parentTaskId,
  630. taskNumber: this.taskNumber,
  631. messages: this.clineMessages,
  632. globalStoragePath: this.globalStoragePath,
  633. workspace: this.cwd,
  634. mode: this._taskMode || defaultModeSlug, // Use the task's own mode, not the current provider mode.
  635. })
  636. if (hasTokenUsageChanged(tokenUsage, this.tokenUsageSnapshot)) {
  637. this.emit(RooCodeEventName.TaskTokenUsageUpdated, this.taskId, tokenUsage)
  638. this.tokenUsageSnapshot = undefined
  639. this.tokenUsageSnapshotAt = undefined
  640. }
  641. await this.providerRef.deref()?.updateTaskHistory(historyItem)
  642. } catch (error) {
  643. console.error("Failed to save Roo messages:", error)
  644. }
  645. }
  646. private findMessageByTimestamp(ts: number): ClineMessage | undefined {
  647. for (let i = this.clineMessages.length - 1; i >= 0; i--) {
  648. if (this.clineMessages[i].ts === ts) {
  649. return this.clineMessages[i]
  650. }
  651. }
  652. return undefined
  653. }
  654. // Note that `partial` has three valid states true (partial message),
  655. // false (completion of partial message), undefined (individual complete
  656. // message).
  657. async ask(
  658. type: ClineAsk,
  659. text?: string,
  660. partial?: boolean,
  661. progressStatus?: ToolProgressStatus,
  662. isProtected?: boolean,
  663. ): Promise<{ response: ClineAskResponse; text?: string; images?: string[] }> {
  664. // If this Cline instance was aborted by the provider, then the only
  665. // thing keeping us alive is a promise still running in the background,
  666. // in which case we don't want to send its result to the webview as it
  667. // is attached to a new instance of Cline now. So we can safely ignore
  668. // the result of any active promises, and this class will be
  669. // deallocated. (Although we set Cline = undefined in provider, that
  670. // simply removes the reference to this instance, but the instance is
  671. // still alive until this promise resolves or rejects.)
  672. if (this.abort) {
  673. throw new Error(`[RooCode#ask] task ${this.taskId}.${this.instanceId} aborted`)
  674. }
  675. let askTs: number
  676. if (partial !== undefined) {
  677. const lastMessage = this.clineMessages.at(-1)
  678. const isUpdatingPreviousPartial =
  679. lastMessage && lastMessage.partial && lastMessage.type === "ask" && lastMessage.ask === type
  680. if (partial) {
  681. if (isUpdatingPreviousPartial) {
  682. // Existing partial message, so update it.
  683. lastMessage.text = text
  684. lastMessage.partial = partial
  685. lastMessage.progressStatus = progressStatus
  686. lastMessage.isProtected = isProtected
  687. // TODO: Be more efficient about saving and posting only new
  688. // data or one whole message at a time so ignore partial for
  689. // saves, and only post parts of partial message instead of
  690. // whole array in new listener.
  691. this.updateClineMessage(lastMessage)
  692. // console.log("Task#ask: current ask promise was ignored (#1)")
  693. throw new Error("Current ask promise was ignored (#1)")
  694. } else {
  695. // This is a new partial message, so add it with partial
  696. // state.
  697. askTs = Date.now()
  698. this.lastMessageTs = askTs
  699. console.log(`Task#ask: new partial ask -> ${type} @ ${askTs}`)
  700. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, partial, isProtected })
  701. // console.log("Task#ask: current ask promise was ignored (#2)")
  702. throw new Error("Current ask promise was ignored (#2)")
  703. }
  704. } else {
  705. if (isUpdatingPreviousPartial) {
  706. // This is the complete version of a previously partial
  707. // message, so replace the partial with the complete version.
  708. this.askResponse = undefined
  709. this.askResponseText = undefined
  710. this.askResponseImages = undefined
  711. // Bug for the history books:
  712. // In the webview we use the ts as the chatrow key for the
  713. // virtuoso list. Since we would update this ts right at the
  714. // end of streaming, it would cause the view to flicker. The
  715. // key prop has to be stable otherwise react has trouble
  716. // reconciling items between renders, causing unmounting and
  717. // remounting of components (flickering).
  718. // The lesson here is if you see flickering when rendering
  719. // lists, it's likely because the key prop is not stable.
  720. // So in this case we must make sure that the message ts is
  721. // never altered after first setting it.
  722. askTs = lastMessage.ts
  723. console.log(`Task#ask: updating previous partial ask -> ${type} @ ${askTs}`)
  724. this.lastMessageTs = askTs
  725. lastMessage.text = text
  726. lastMessage.partial = false
  727. lastMessage.progressStatus = progressStatus
  728. lastMessage.isProtected = isProtected
  729. await this.saveClineMessages()
  730. this.updateClineMessage(lastMessage)
  731. } else {
  732. // This is a new and complete message, so add it like normal.
  733. this.askResponse = undefined
  734. this.askResponseText = undefined
  735. this.askResponseImages = undefined
  736. askTs = Date.now()
  737. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  738. this.lastMessageTs = askTs
  739. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  740. }
  741. }
  742. } else {
  743. // This is a new non-partial message, so add it like normal.
  744. this.askResponse = undefined
  745. this.askResponseText = undefined
  746. this.askResponseImages = undefined
  747. askTs = Date.now()
  748. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  749. this.lastMessageTs = askTs
  750. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  751. }
  752. let timeouts: NodeJS.Timeout[] = []
  753. // Automatically approve if the ask according to the user's settings.
  754. const provider = this.providerRef.deref()
  755. const state = provider ? await provider.getState() : undefined
  756. const approval = await checkAutoApproval({ state, ask: type, text, isProtected })
  757. if (approval.decision === "approve") {
  758. this.approveAsk()
  759. } else if (approval.decision === "deny") {
  760. this.denyAsk()
  761. } else if (approval.decision === "timeout") {
  762. timeouts.push(
  763. setTimeout(() => {
  764. const { askResponse, text, images } = approval.fn()
  765. this.handleWebviewAskResponse(askResponse, text, images)
  766. }, approval.timeout),
  767. )
  768. }
  769. // The state is mutable if the message is complete and the task will
  770. // block (via the `pWaitFor`).
  771. const isBlocking = !(this.askResponse !== undefined || this.lastMessageTs !== askTs)
  772. const isMessageQueued = !this.messageQueueService.isEmpty()
  773. const isStatusMutable = !partial && isBlocking && !isMessageQueued && approval.decision === "ask"
  774. if (isBlocking) {
  775. console.log(`Task#ask will block -> type: ${type}`)
  776. }
  777. if (isStatusMutable) {
  778. console.log(`Task#ask: status is mutable -> type: ${type}`)
  779. const statusMutationTimeout = 2_000
  780. if (isInteractiveAsk(type)) {
  781. timeouts.push(
  782. setTimeout(() => {
  783. const message = this.findMessageByTimestamp(askTs)
  784. if (message) {
  785. this.interactiveAsk = message
  786. this.emit(RooCodeEventName.TaskInteractive, this.taskId)
  787. provider?.postMessageToWebview({ type: "interactionRequired" })
  788. }
  789. }, statusMutationTimeout),
  790. )
  791. } else if (isResumableAsk(type)) {
  792. timeouts.push(
  793. setTimeout(() => {
  794. const message = this.findMessageByTimestamp(askTs)
  795. if (message) {
  796. this.resumableAsk = message
  797. this.emit(RooCodeEventName.TaskResumable, this.taskId)
  798. }
  799. }, statusMutationTimeout),
  800. )
  801. } else if (isIdleAsk(type)) {
  802. timeouts.push(
  803. setTimeout(() => {
  804. const message = this.findMessageByTimestamp(askTs)
  805. if (message) {
  806. this.idleAsk = message
  807. this.emit(RooCodeEventName.TaskIdle, this.taskId)
  808. }
  809. }, statusMutationTimeout),
  810. )
  811. }
  812. } else if (isMessageQueued) {
  813. console.log(`Task#ask: will process message queue -> type: ${type}`)
  814. const message = this.messageQueueService.dequeueMessage()
  815. if (message) {
  816. // Check if this is a tool approval ask that needs to be handled.
  817. if (
  818. type === "tool" ||
  819. type === "command" ||
  820. type === "browser_action_launch" ||
  821. type === "use_mcp_server"
  822. ) {
  823. // For tool approvals, we need to approve first, then send
  824. // the message if there's text/images.
  825. this.handleWebviewAskResponse("yesButtonClicked", message.text, message.images)
  826. } else {
  827. // For other ask types (like followup or command_output), fulfill the ask
  828. // directly.
  829. this.handleWebviewAskResponse("messageResponse", message.text, message.images)
  830. }
  831. }
  832. }
  833. // Wait for askResponse to be set
  834. await pWaitFor(() => this.askResponse !== undefined || this.lastMessageTs !== askTs, { interval: 100 })
  835. if (this.lastMessageTs !== askTs) {
  836. // Could happen if we send multiple asks in a row i.e. with
  837. // command_output. It's important that when we know an ask could
  838. // fail, it is handled gracefully.
  839. console.log("Task#ask: current ask promise was ignored")
  840. throw new Error("Current ask promise was ignored")
  841. }
  842. const result = { response: this.askResponse!, text: this.askResponseText, images: this.askResponseImages }
  843. this.askResponse = undefined
  844. this.askResponseText = undefined
  845. this.askResponseImages = undefined
  846. // Cancel the timeouts if they are still running.
  847. timeouts.forEach((timeout) => clearTimeout(timeout))
  848. // Switch back to an active state.
  849. if (this.idleAsk || this.resumableAsk || this.interactiveAsk) {
  850. this.idleAsk = undefined
  851. this.resumableAsk = undefined
  852. this.interactiveAsk = undefined
  853. this.emit(RooCodeEventName.TaskActive, this.taskId)
  854. }
  855. this.emit(RooCodeEventName.TaskAskResponded)
  856. return result
  857. }
  858. handleWebviewAskResponse(askResponse: ClineAskResponse, text?: string, images?: string[]) {
  859. this.askResponse = askResponse
  860. this.askResponseText = text
  861. this.askResponseImages = images
  862. // Create a checkpoint whenever the user sends a message.
  863. // Use allowEmpty=true to ensure a checkpoint is recorded even if there are no file changes.
  864. // Suppress the checkpoint_saved chat row for this particular checkpoint to keep the timeline clean.
  865. if (askResponse === "messageResponse") {
  866. void this.checkpointSave(false, true)
  867. }
  868. // Mark the last follow-up question as answered
  869. if (askResponse === "messageResponse" || askResponse === "yesButtonClicked") {
  870. // Find the last unanswered follow-up message using findLastIndex
  871. const lastFollowUpIndex = findLastIndex(
  872. this.clineMessages,
  873. (msg) => msg.type === "ask" && msg.ask === "followup" && !msg.isAnswered,
  874. )
  875. if (lastFollowUpIndex !== -1) {
  876. // Mark this follow-up as answered
  877. this.clineMessages[lastFollowUpIndex].isAnswered = true
  878. // Save the updated messages
  879. this.saveClineMessages().catch((error) => {
  880. console.error("Failed to save answered follow-up state:", error)
  881. })
  882. }
  883. }
  884. }
  885. public approveAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  886. this.handleWebviewAskResponse("yesButtonClicked", text, images)
  887. }
  888. public denyAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  889. this.handleWebviewAskResponse("noButtonClicked", text, images)
  890. }
  891. public async submitUserMessage(
  892. text: string,
  893. images?: string[],
  894. mode?: string,
  895. providerProfile?: string,
  896. ): Promise<void> {
  897. try {
  898. text = (text ?? "").trim()
  899. images = images ?? []
  900. if (text.length === 0 && images.length === 0) {
  901. return
  902. }
  903. const provider = this.providerRef.deref()
  904. if (provider) {
  905. if (mode) {
  906. await provider.setMode(mode)
  907. }
  908. if (providerProfile) {
  909. await provider.setProviderProfile(providerProfile)
  910. }
  911. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  912. provider.postMessageToWebview({ type: "invoke", invoke: "sendMessage", text, images })
  913. } else {
  914. console.error("[Task#submitUserMessage] Provider reference lost")
  915. }
  916. } catch (error) {
  917. console.error("[Task#submitUserMessage] Failed to submit user message:", error)
  918. }
  919. }
  920. async handleTerminalOperation(terminalOperation: "continue" | "abort") {
  921. if (terminalOperation === "continue") {
  922. this.terminalProcess?.continue()
  923. } else if (terminalOperation === "abort") {
  924. this.terminalProcess?.abort()
  925. }
  926. }
  927. public async condenseContext(): Promise<void> {
  928. const systemPrompt = await this.getSystemPrompt()
  929. // Get condensing configuration
  930. const state = await this.providerRef.deref()?.getState()
  931. // These properties may not exist in the state type yet, but are used for condensing configuration
  932. const customCondensingPrompt = state?.customCondensingPrompt
  933. const condensingApiConfigId = state?.condensingApiConfigId
  934. const listApiConfigMeta = state?.listApiConfigMeta
  935. // Determine API handler to use
  936. let condensingApiHandler: ApiHandler | undefined
  937. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  938. // Find matching config by ID
  939. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  940. if (matchingConfig) {
  941. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  942. id: condensingApiConfigId,
  943. })
  944. // Ensure profile and apiProvider exist before trying to build handler
  945. if (profile && profile.apiProvider) {
  946. condensingApiHandler = buildApiHandler(profile)
  947. }
  948. }
  949. }
  950. const { contextTokens: prevContextTokens } = this.getTokenUsage()
  951. const {
  952. messages,
  953. summary,
  954. cost,
  955. newContextTokens = 0,
  956. error,
  957. } = await summarizeConversation(
  958. this.apiConversationHistory,
  959. this.api, // Main API handler (fallback)
  960. systemPrompt, // Default summarization prompt (fallback)
  961. this.taskId,
  962. prevContextTokens,
  963. false, // manual trigger
  964. customCondensingPrompt, // User's custom prompt
  965. condensingApiHandler, // Specific handler for condensing
  966. )
  967. if (error) {
  968. this.say(
  969. "condense_context_error",
  970. error,
  971. undefined /* images */,
  972. false /* partial */,
  973. undefined /* checkpoint */,
  974. undefined /* progressStatus */,
  975. { isNonInteractive: true } /* options */,
  976. )
  977. return
  978. }
  979. await this.overwriteApiConversationHistory(messages)
  980. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  981. await this.say(
  982. "condense_context",
  983. undefined /* text */,
  984. undefined /* images */,
  985. false /* partial */,
  986. undefined /* checkpoint */,
  987. undefined /* progressStatus */,
  988. { isNonInteractive: true } /* options */,
  989. contextCondense,
  990. )
  991. // Process any queued messages after condensing completes
  992. this.processQueuedMessages()
  993. }
  994. async say(
  995. type: ClineSay,
  996. text?: string,
  997. images?: string[],
  998. partial?: boolean,
  999. checkpoint?: Record<string, unknown>,
  1000. progressStatus?: ToolProgressStatus,
  1001. options: {
  1002. isNonInteractive?: boolean
  1003. } = {},
  1004. contextCondense?: ContextCondense,
  1005. ): Promise<undefined> {
  1006. if (this.abort) {
  1007. throw new Error(`[RooCode#say] task ${this.taskId}.${this.instanceId} aborted`)
  1008. }
  1009. if (partial !== undefined) {
  1010. const lastMessage = this.clineMessages.at(-1)
  1011. const isUpdatingPreviousPartial =
  1012. lastMessage && lastMessage.partial && lastMessage.type === "say" && lastMessage.say === type
  1013. if (partial) {
  1014. if (isUpdatingPreviousPartial) {
  1015. // Existing partial message, so update it.
  1016. lastMessage.text = text
  1017. lastMessage.images = images
  1018. lastMessage.partial = partial
  1019. lastMessage.progressStatus = progressStatus
  1020. this.updateClineMessage(lastMessage)
  1021. } else {
  1022. // This is a new partial message, so add it with partial state.
  1023. const sayTs = Date.now()
  1024. if (!options.isNonInteractive) {
  1025. this.lastMessageTs = sayTs
  1026. }
  1027. await this.addToClineMessages({
  1028. ts: sayTs,
  1029. type: "say",
  1030. say: type,
  1031. text,
  1032. images,
  1033. partial,
  1034. contextCondense,
  1035. })
  1036. }
  1037. } else {
  1038. // New now have a complete version of a previously partial message.
  1039. // This is the complete version of a previously partial
  1040. // message, so replace the partial with the complete version.
  1041. if (isUpdatingPreviousPartial) {
  1042. if (!options.isNonInteractive) {
  1043. this.lastMessageTs = lastMessage.ts
  1044. }
  1045. lastMessage.text = text
  1046. lastMessage.images = images
  1047. lastMessage.partial = false
  1048. lastMessage.progressStatus = progressStatus
  1049. // Instead of streaming partialMessage events, we do a save
  1050. // and post like normal to persist to disk.
  1051. await this.saveClineMessages()
  1052. // More performant than an entire `postStateToWebview`.
  1053. this.updateClineMessage(lastMessage)
  1054. } else {
  1055. // This is a new and complete message, so add it like normal.
  1056. const sayTs = Date.now()
  1057. if (!options.isNonInteractive) {
  1058. this.lastMessageTs = sayTs
  1059. }
  1060. await this.addToClineMessages({
  1061. ts: sayTs,
  1062. type: "say",
  1063. say: type,
  1064. text,
  1065. images,
  1066. contextCondense,
  1067. })
  1068. }
  1069. }
  1070. } else {
  1071. // This is a new non-partial message, so add it like normal.
  1072. const sayTs = Date.now()
  1073. // A "non-interactive" message is a message is one that the user
  1074. // does not need to respond to. We don't want these message types
  1075. // to trigger an update to `lastMessageTs` since they can be created
  1076. // asynchronously and could interrupt a pending ask.
  1077. if (!options.isNonInteractive) {
  1078. this.lastMessageTs = sayTs
  1079. }
  1080. await this.addToClineMessages({
  1081. ts: sayTs,
  1082. type: "say",
  1083. say: type,
  1084. text,
  1085. images,
  1086. checkpoint,
  1087. contextCondense,
  1088. })
  1089. }
  1090. }
  1091. async sayAndCreateMissingParamError(toolName: ToolName, paramName: string, relPath?: string) {
  1092. await this.say(
  1093. "error",
  1094. `Roo tried to use ${toolName}${
  1095. relPath ? ` for '${relPath.toPosix()}'` : ""
  1096. } without value for required parameter '${paramName}'. Retrying...`,
  1097. )
  1098. return formatResponse.toolError(formatResponse.missingToolParameterError(paramName))
  1099. }
  1100. // Lifecycle
  1101. // Start / Resume / Abort / Dispose
  1102. private async startTask(task?: string, images?: string[]): Promise<void> {
  1103. if (this.enableBridge) {
  1104. try {
  1105. await BridgeOrchestrator.subscribeToTask(this)
  1106. } catch (error) {
  1107. console.error(
  1108. `[Task#startTask] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1109. )
  1110. }
  1111. }
  1112. // `conversationHistory` (for API) and `clineMessages` (for webview)
  1113. // need to be in sync.
  1114. // If the extension process were killed, then on restart the
  1115. // `clineMessages` might not be empty, so we need to set it to [] when
  1116. // we create a new Cline client (otherwise webview would show stale
  1117. // messages from previous session).
  1118. this.clineMessages = []
  1119. this.apiConversationHistory = []
  1120. // The todo list is already set in the constructor if initialTodos were provided
  1121. // No need to add any messages - the todoList property is already set
  1122. await this.providerRef.deref()?.postStateToWebview()
  1123. await this.say("text", task, images)
  1124. this.isInitialized = true
  1125. let imageBlocks: Anthropic.ImageBlockParam[] = formatResponse.imageBlocks(images)
  1126. // Task starting
  1127. await this.initiateTaskLoop([
  1128. {
  1129. type: "text",
  1130. text: `<task>\n${task}\n</task>`,
  1131. },
  1132. ...imageBlocks,
  1133. ])
  1134. }
  1135. private async resumeTaskFromHistory() {
  1136. if (this.enableBridge) {
  1137. try {
  1138. await BridgeOrchestrator.subscribeToTask(this)
  1139. } catch (error) {
  1140. console.error(
  1141. `[Task#resumeTaskFromHistory] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1142. )
  1143. }
  1144. }
  1145. const modifiedClineMessages = await this.getSavedClineMessages()
  1146. // Remove any resume messages that may have been added before.
  1147. const lastRelevantMessageIndex = findLastIndex(
  1148. modifiedClineMessages,
  1149. (m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task"),
  1150. )
  1151. if (lastRelevantMessageIndex !== -1) {
  1152. modifiedClineMessages.splice(lastRelevantMessageIndex + 1)
  1153. }
  1154. // Remove any trailing reasoning-only UI messages that were not part of the persisted API conversation
  1155. while (modifiedClineMessages.length > 0) {
  1156. const last = modifiedClineMessages[modifiedClineMessages.length - 1]
  1157. if (last.type === "say" && last.say === "reasoning") {
  1158. modifiedClineMessages.pop()
  1159. } else {
  1160. break
  1161. }
  1162. }
  1163. // Since we don't use `api_req_finished` anymore, we need to check if the
  1164. // last `api_req_started` has a cost value, if it doesn't and no
  1165. // cancellation reason to present, then we remove it since it indicates
  1166. // an api request without any partial content streamed.
  1167. const lastApiReqStartedIndex = findLastIndex(
  1168. modifiedClineMessages,
  1169. (m) => m.type === "say" && m.say === "api_req_started",
  1170. )
  1171. if (lastApiReqStartedIndex !== -1) {
  1172. const lastApiReqStarted = modifiedClineMessages[lastApiReqStartedIndex]
  1173. const { cost, cancelReason }: ClineApiReqInfo = JSON.parse(lastApiReqStarted.text || "{}")
  1174. if (cost === undefined && cancelReason === undefined) {
  1175. modifiedClineMessages.splice(lastApiReqStartedIndex, 1)
  1176. }
  1177. }
  1178. await this.overwriteClineMessages(modifiedClineMessages)
  1179. this.clineMessages = await this.getSavedClineMessages()
  1180. // Now present the cline messages to the user and ask if they want to
  1181. // resume (NOTE: we ran into a bug before where the
  1182. // apiConversationHistory wouldn't be initialized when opening a old
  1183. // task, and it was because we were waiting for resume).
  1184. // This is important in case the user deletes messages without resuming
  1185. // the task first.
  1186. this.apiConversationHistory = await this.getSavedApiConversationHistory()
  1187. const lastClineMessage = this.clineMessages
  1188. .slice()
  1189. .reverse()
  1190. .find((m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task")) // Could be multiple resume tasks.
  1191. let askType: ClineAsk
  1192. if (lastClineMessage?.ask === "completion_result") {
  1193. askType = "resume_completed_task"
  1194. } else {
  1195. askType = "resume_task"
  1196. }
  1197. this.isInitialized = true
  1198. const { response, text, images } = await this.ask(askType) // Calls `postStateToWebview`.
  1199. let responseText: string | undefined
  1200. let responseImages: string[] | undefined
  1201. if (response === "messageResponse") {
  1202. await this.say("user_feedback", text, images)
  1203. responseText = text
  1204. responseImages = images
  1205. }
  1206. // Make sure that the api conversation history can be resumed by the API,
  1207. // even if it goes out of sync with cline messages.
  1208. let existingApiConversationHistory: ApiMessage[] = await this.getSavedApiConversationHistory()
  1209. // v2.0 xml tags refactor caveat: since we don't use tools anymore, we need to replace all tool use blocks with a text block since the API disallows conversations with tool uses and no tool schema
  1210. const conversationWithoutToolBlocks = existingApiConversationHistory.map((message) => {
  1211. if (Array.isArray(message.content)) {
  1212. const newContent = message.content.map((block) => {
  1213. if (block.type === "tool_use") {
  1214. // It's important we convert to the new tool schema
  1215. // format so the model doesn't get confused about how to
  1216. // invoke tools.
  1217. const inputAsXml = Object.entries(block.input as Record<string, string>)
  1218. .map(([key, value]) => `<${key}>\n${value}\n</${key}>`)
  1219. .join("\n")
  1220. return {
  1221. type: "text",
  1222. text: `<${block.name}>\n${inputAsXml}\n</${block.name}>`,
  1223. } as Anthropic.Messages.TextBlockParam
  1224. } else if (block.type === "tool_result") {
  1225. // Convert block.content to text block array, removing images
  1226. const contentAsTextBlocks = Array.isArray(block.content)
  1227. ? block.content.filter((item) => item.type === "text")
  1228. : [{ type: "text", text: block.content }]
  1229. const textContent = contentAsTextBlocks.map((item) => item.text).join("\n\n")
  1230. const toolName = findToolName(block.tool_use_id, existingApiConversationHistory)
  1231. return {
  1232. type: "text",
  1233. text: `[${toolName} Result]\n\n${textContent}`,
  1234. } as Anthropic.Messages.TextBlockParam
  1235. }
  1236. return block
  1237. })
  1238. return { ...message, content: newContent }
  1239. }
  1240. return message
  1241. })
  1242. existingApiConversationHistory = conversationWithoutToolBlocks
  1243. // FIXME: remove tool use blocks altogether
  1244. // if the last message is an assistant message, we need to check if there's tool use since every tool use has to have a tool response
  1245. // if there's no tool use and only a text block, then we can just add a user message
  1246. // (note this isn't relevant anymore since we use custom tool prompts instead of tool use blocks, but this is here for legacy purposes in case users resume old tasks)
  1247. // if the last message is a user message, we can need to get the assistant message before it to see if it made tool calls, and if so, fill in the remaining tool responses with 'interrupted'
  1248. let modifiedOldUserContent: Anthropic.Messages.ContentBlockParam[] // either the last message if its user message, or the user message before the last (assistant) message
  1249. let modifiedApiConversationHistory: ApiMessage[] // need to remove the last user message to replace with new modified user message
  1250. if (existingApiConversationHistory.length > 0) {
  1251. const lastMessage = existingApiConversationHistory[existingApiConversationHistory.length - 1]
  1252. if (lastMessage.role === "assistant") {
  1253. const content = Array.isArray(lastMessage.content)
  1254. ? lastMessage.content
  1255. : [{ type: "text", text: lastMessage.content }]
  1256. const hasToolUse = content.some((block) => block.type === "tool_use")
  1257. if (hasToolUse) {
  1258. const toolUseBlocks = content.filter(
  1259. (block) => block.type === "tool_use",
  1260. ) as Anthropic.Messages.ToolUseBlock[]
  1261. const toolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks.map((block) => ({
  1262. type: "tool_result",
  1263. tool_use_id: block.id,
  1264. content: "Task was interrupted before this tool call could be completed.",
  1265. }))
  1266. modifiedApiConversationHistory = [...existingApiConversationHistory] // no changes
  1267. modifiedOldUserContent = [...toolResponses]
  1268. } else {
  1269. modifiedApiConversationHistory = [...existingApiConversationHistory]
  1270. modifiedOldUserContent = []
  1271. }
  1272. } else if (lastMessage.role === "user") {
  1273. const previousAssistantMessage: ApiMessage | undefined =
  1274. existingApiConversationHistory[existingApiConversationHistory.length - 2]
  1275. const existingUserContent: Anthropic.Messages.ContentBlockParam[] = Array.isArray(lastMessage.content)
  1276. ? lastMessage.content
  1277. : [{ type: "text", text: lastMessage.content }]
  1278. if (previousAssistantMessage && previousAssistantMessage.role === "assistant") {
  1279. const assistantContent = Array.isArray(previousAssistantMessage.content)
  1280. ? previousAssistantMessage.content
  1281. : [{ type: "text", text: previousAssistantMessage.content }]
  1282. const toolUseBlocks = assistantContent.filter(
  1283. (block) => block.type === "tool_use",
  1284. ) as Anthropic.Messages.ToolUseBlock[]
  1285. if (toolUseBlocks.length > 0) {
  1286. const existingToolResults = existingUserContent.filter(
  1287. (block) => block.type === "tool_result",
  1288. ) as Anthropic.ToolResultBlockParam[]
  1289. const missingToolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks
  1290. .filter(
  1291. (toolUse) => !existingToolResults.some((result) => result.tool_use_id === toolUse.id),
  1292. )
  1293. .map((toolUse) => ({
  1294. type: "tool_result",
  1295. tool_use_id: toolUse.id,
  1296. content: "Task was interrupted before this tool call could be completed.",
  1297. }))
  1298. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1) // removes the last user message
  1299. modifiedOldUserContent = [...existingUserContent, ...missingToolResponses]
  1300. } else {
  1301. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1302. modifiedOldUserContent = [...existingUserContent]
  1303. }
  1304. } else {
  1305. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1306. modifiedOldUserContent = [...existingUserContent]
  1307. }
  1308. } else {
  1309. throw new Error("Unexpected: Last message is not a user or assistant message")
  1310. }
  1311. } else {
  1312. throw new Error("Unexpected: No existing API conversation history")
  1313. }
  1314. let newUserContent: Anthropic.Messages.ContentBlockParam[] = [...modifiedOldUserContent]
  1315. const agoText = ((): string => {
  1316. const timestamp = lastClineMessage?.ts ?? Date.now()
  1317. const now = Date.now()
  1318. const diff = now - timestamp
  1319. const minutes = Math.floor(diff / 60000)
  1320. const hours = Math.floor(minutes / 60)
  1321. const days = Math.floor(hours / 24)
  1322. if (days > 0) {
  1323. return `${days} day${days > 1 ? "s" : ""} ago`
  1324. }
  1325. if (hours > 0) {
  1326. return `${hours} hour${hours > 1 ? "s" : ""} ago`
  1327. }
  1328. if (minutes > 0) {
  1329. return `${minutes} minute${minutes > 1 ? "s" : ""} ago`
  1330. }
  1331. return "just now"
  1332. })()
  1333. if (responseText) {
  1334. newUserContent.push({
  1335. type: "text",
  1336. text: `\n\nNew instructions for task continuation:\n<user_message>\n${responseText}\n</user_message>`,
  1337. })
  1338. }
  1339. if (responseImages && responseImages.length > 0) {
  1340. newUserContent.push(...formatResponse.imageBlocks(responseImages))
  1341. }
  1342. // Ensure we have at least some content to send to the API.
  1343. // If newUserContent is empty, add a minimal resumption message.
  1344. if (newUserContent.length === 0) {
  1345. newUserContent.push({
  1346. type: "text",
  1347. text: "[TASK RESUMPTION] Resuming task...",
  1348. })
  1349. }
  1350. await this.overwriteApiConversationHistory(modifiedApiConversationHistory)
  1351. // Task resuming from history item.
  1352. await this.initiateTaskLoop(newUserContent)
  1353. }
  1354. public async abortTask(isAbandoned = false) {
  1355. // Aborting task
  1356. // Will stop any autonomously running promises.
  1357. if (isAbandoned) {
  1358. this.abandoned = true
  1359. }
  1360. this.abort = true
  1361. this.emit(RooCodeEventName.TaskAborted)
  1362. try {
  1363. this.dispose() // Call the centralized dispose method
  1364. } catch (error) {
  1365. console.error(`Error during task ${this.taskId}.${this.instanceId} disposal:`, error)
  1366. // Don't rethrow - we want abort to always succeed
  1367. }
  1368. // Save the countdown message in the automatic retry or other content.
  1369. try {
  1370. // Save the countdown message in the automatic retry or other content.
  1371. await this.saveClineMessages()
  1372. } catch (error) {
  1373. console.error(`Error saving messages during abort for task ${this.taskId}.${this.instanceId}:`, error)
  1374. }
  1375. }
  1376. public dispose(): void {
  1377. console.log(`[Task#dispose] disposing task ${this.taskId}.${this.instanceId}`)
  1378. // Dispose message queue and remove event listeners.
  1379. try {
  1380. if (this.messageQueueStateChangedHandler) {
  1381. this.messageQueueService.removeListener("stateChanged", this.messageQueueStateChangedHandler)
  1382. this.messageQueueStateChangedHandler = undefined
  1383. }
  1384. this.messageQueueService.dispose()
  1385. } catch (error) {
  1386. console.error("Error disposing message queue:", error)
  1387. }
  1388. // Remove all event listeners to prevent memory leaks.
  1389. try {
  1390. this.removeAllListeners()
  1391. } catch (error) {
  1392. console.error("Error removing event listeners:", error)
  1393. }
  1394. // Stop waiting for child task completion.
  1395. if (this.pauseInterval) {
  1396. clearInterval(this.pauseInterval)
  1397. this.pauseInterval = undefined
  1398. }
  1399. if (this.enableBridge) {
  1400. BridgeOrchestrator.getInstance()
  1401. ?.unsubscribeFromTask(this.taskId)
  1402. .catch((error) =>
  1403. console.error(
  1404. `[Task#dispose] BridgeOrchestrator#unsubscribeFromTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1405. ),
  1406. )
  1407. }
  1408. // Release any terminals associated with this task.
  1409. try {
  1410. // Release any terminals associated with this task.
  1411. TerminalRegistry.releaseTerminalsForTask(this.taskId)
  1412. } catch (error) {
  1413. console.error("Error releasing terminals:", error)
  1414. }
  1415. try {
  1416. this.urlContentFetcher.closeBrowser()
  1417. } catch (error) {
  1418. console.error("Error closing URL content fetcher browser:", error)
  1419. }
  1420. try {
  1421. this.browserSession.closeBrowser()
  1422. } catch (error) {
  1423. console.error("Error closing browser session:", error)
  1424. }
  1425. try {
  1426. if (this.rooIgnoreController) {
  1427. this.rooIgnoreController.dispose()
  1428. this.rooIgnoreController = undefined
  1429. }
  1430. } catch (error) {
  1431. console.error("Error disposing RooIgnoreController:", error)
  1432. // This is the critical one for the leak fix.
  1433. }
  1434. try {
  1435. this.fileContextTracker.dispose()
  1436. } catch (error) {
  1437. console.error("Error disposing file context tracker:", error)
  1438. }
  1439. try {
  1440. // If we're not streaming then `abortStream` won't be called.
  1441. if (this.isStreaming && this.diffViewProvider.isEditing) {
  1442. this.diffViewProvider.revertChanges().catch(console.error)
  1443. }
  1444. } catch (error) {
  1445. console.error("Error reverting diff changes:", error)
  1446. }
  1447. }
  1448. // Subtasks
  1449. // Spawn / Wait / Complete
  1450. public async startSubtask(message: string, initialTodos: TodoItem[], mode: string) {
  1451. const provider = this.providerRef.deref()
  1452. if (!provider) {
  1453. throw new Error("Provider not available")
  1454. }
  1455. const newTask = await provider.createTask(message, undefined, this, { initialTodos })
  1456. if (newTask) {
  1457. this.isPaused = true // Pause parent.
  1458. this.childTaskId = newTask.taskId
  1459. await provider.handleModeSwitch(mode) // Set child's mode.
  1460. await delay(500) // Allow mode change to take effect.
  1461. this.emit(RooCodeEventName.TaskPaused, this.taskId)
  1462. this.emit(RooCodeEventName.TaskSpawned, newTask.taskId)
  1463. }
  1464. return newTask
  1465. }
  1466. // Used when a sub-task is launched and the parent task is waiting for it to
  1467. // finish.
  1468. // TBD: Add a timeout to prevent infinite waiting.
  1469. public async waitForSubtask() {
  1470. await new Promise<void>((resolve) => {
  1471. this.pauseInterval = setInterval(() => {
  1472. if (!this.isPaused) {
  1473. clearInterval(this.pauseInterval)
  1474. this.pauseInterval = undefined
  1475. resolve()
  1476. }
  1477. }, 1000)
  1478. })
  1479. }
  1480. public async completeSubtask(lastMessage: string) {
  1481. this.isPaused = false
  1482. this.childTaskId = undefined
  1483. this.emit(RooCodeEventName.TaskUnpaused, this.taskId)
  1484. // Fake an answer from the subtask that it has completed running and
  1485. // this is the result of what it has done add the message to the chat
  1486. // history and to the webview ui.
  1487. try {
  1488. await this.say("subtask_result", lastMessage)
  1489. await this.addToApiConversationHistory({
  1490. role: "user",
  1491. content: [{ type: "text", text: `[new_task completed] Result: ${lastMessage}` }],
  1492. })
  1493. } catch (error) {
  1494. this.providerRef
  1495. .deref()
  1496. ?.log(`Error failed to add reply from subtask into conversation of parent task, error: ${error}`)
  1497. throw error
  1498. }
  1499. }
  1500. // Task Loop
  1501. private async initiateTaskLoop(userContent: Anthropic.Messages.ContentBlockParam[]): Promise<void> {
  1502. // Kicks off the checkpoints initialization process in the background.
  1503. getCheckpointService(this)
  1504. let nextUserContent = userContent
  1505. let includeFileDetails = true
  1506. this.emit(RooCodeEventName.TaskStarted)
  1507. while (!this.abort) {
  1508. const didEndLoop = await this.recursivelyMakeClineRequests(nextUserContent, includeFileDetails)
  1509. includeFileDetails = false // We only need file details the first time.
  1510. // The way this agentic loop works is that cline will be given a
  1511. // task that he then calls tools to complete. Unless there's an
  1512. // attempt_completion call, we keep responding back to him with his
  1513. // tool's responses until he either attempt_completion or does not
  1514. // use anymore tools. If he does not use anymore tools, we ask him
  1515. // to consider if he's completed the task and then call
  1516. // attempt_completion, otherwise proceed with completing the task.
  1517. // There is a MAX_REQUESTS_PER_TASK limit to prevent infinite
  1518. // requests, but Cline is prompted to finish the task as efficiently
  1519. // as he can.
  1520. if (didEndLoop) {
  1521. // For now a task never 'completes'. This will only happen if
  1522. // the user hits max requests and denies resetting the count.
  1523. break
  1524. } else {
  1525. nextUserContent = [{ type: "text", text: formatResponse.noToolsUsed() }]
  1526. this.consecutiveMistakeCount++
  1527. }
  1528. }
  1529. }
  1530. public async recursivelyMakeClineRequests(
  1531. userContent: Anthropic.Messages.ContentBlockParam[],
  1532. includeFileDetails: boolean = false,
  1533. ): Promise<boolean> {
  1534. interface StackItem {
  1535. userContent: Anthropic.Messages.ContentBlockParam[]
  1536. includeFileDetails: boolean
  1537. retryAttempt?: number
  1538. userMessageWasRemoved?: boolean // Track if user message was removed due to empty response
  1539. }
  1540. const stack: StackItem[] = [{ userContent, includeFileDetails, retryAttempt: 0 }]
  1541. while (stack.length > 0) {
  1542. const currentItem = stack.pop()!
  1543. const currentUserContent = currentItem.userContent
  1544. const currentIncludeFileDetails = currentItem.includeFileDetails
  1545. if (this.abort) {
  1546. throw new Error(`[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`)
  1547. }
  1548. if (this.consecutiveMistakeLimit > 0 && this.consecutiveMistakeCount >= this.consecutiveMistakeLimit) {
  1549. const { response, text, images } = await this.ask(
  1550. "mistake_limit_reached",
  1551. t("common:errors.mistake_limit_guidance"),
  1552. )
  1553. if (response === "messageResponse") {
  1554. currentUserContent.push(
  1555. ...[
  1556. { type: "text" as const, text: formatResponse.tooManyMistakes(text) },
  1557. ...formatResponse.imageBlocks(images),
  1558. ],
  1559. )
  1560. await this.say("user_feedback", text, images)
  1561. // Track consecutive mistake errors in telemetry.
  1562. TelemetryService.instance.captureConsecutiveMistakeError(this.taskId)
  1563. }
  1564. this.consecutiveMistakeCount = 0
  1565. }
  1566. // In this Cline request loop, we need to check if this task instance
  1567. // has been asked to wait for a subtask to finish before continuing.
  1568. const provider = this.providerRef.deref()
  1569. if (this.isPaused && provider) {
  1570. provider.log(`[subtasks] paused ${this.taskId}.${this.instanceId}`)
  1571. await this.waitForSubtask()
  1572. provider.log(`[subtasks] resumed ${this.taskId}.${this.instanceId}`)
  1573. const currentMode = (await provider.getState())?.mode ?? defaultModeSlug
  1574. if (currentMode !== this.pausedModeSlug) {
  1575. // The mode has changed, we need to switch back to the paused mode.
  1576. await provider.handleModeSwitch(this.pausedModeSlug)
  1577. // Delay to allow mode change to take effect before next tool is executed.
  1578. await delay(500)
  1579. provider.log(
  1580. `[subtasks] task ${this.taskId}.${this.instanceId} has switched back to '${this.pausedModeSlug}' from '${currentMode}'`,
  1581. )
  1582. }
  1583. }
  1584. // Getting verbose details is an expensive operation, it uses ripgrep to
  1585. // top-down build file structure of project which for large projects can
  1586. // take a few seconds. For the best UX we show a placeholder api_req_started
  1587. // message with a loading spinner as this happens.
  1588. // Determine API protocol based on provider and model
  1589. const modelId = getModelId(this.apiConfiguration)
  1590. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  1591. await this.say(
  1592. "api_req_started",
  1593. JSON.stringify({
  1594. apiProtocol,
  1595. }),
  1596. )
  1597. const {
  1598. showRooIgnoredFiles = false,
  1599. includeDiagnosticMessages = true,
  1600. maxDiagnosticMessages = 50,
  1601. maxReadFileLine = -1,
  1602. } = (await this.providerRef.deref()?.getState()) ?? {}
  1603. const parsedUserContent = await processUserContentMentions({
  1604. userContent: currentUserContent,
  1605. cwd: this.cwd,
  1606. urlContentFetcher: this.urlContentFetcher,
  1607. fileContextTracker: this.fileContextTracker,
  1608. rooIgnoreController: this.rooIgnoreController,
  1609. showRooIgnoredFiles,
  1610. includeDiagnosticMessages,
  1611. maxDiagnosticMessages,
  1612. maxReadFileLine,
  1613. })
  1614. const environmentDetails = await getEnvironmentDetails(this, currentIncludeFileDetails)
  1615. // Add environment details as its own text block, separate from tool
  1616. // results.
  1617. const finalUserContent = [...parsedUserContent, { type: "text" as const, text: environmentDetails }]
  1618. // Only add user message to conversation history if:
  1619. // 1. This is the first attempt (retryAttempt === 0), OR
  1620. // 2. The message was removed in a previous iteration (userMessageWasRemoved === true)
  1621. // This prevents consecutive user messages while allowing re-add when needed
  1622. if ((currentItem.retryAttempt ?? 0) === 0 || currentItem.userMessageWasRemoved) {
  1623. await this.addToApiConversationHistory({ role: "user", content: finalUserContent })
  1624. TelemetryService.instance.captureConversationMessage(this.taskId, "user")
  1625. }
  1626. // Since we sent off a placeholder api_req_started message to update the
  1627. // webview while waiting to actually start the API request (to load
  1628. // potential details for example), we need to update the text of that
  1629. // message.
  1630. const lastApiReqIndex = findLastIndex(this.clineMessages, (m) => m.say === "api_req_started")
  1631. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  1632. apiProtocol,
  1633. } satisfies ClineApiReqInfo)
  1634. await this.saveClineMessages()
  1635. await provider?.postStateToWebview()
  1636. try {
  1637. let cacheWriteTokens = 0
  1638. let cacheReadTokens = 0
  1639. let inputTokens = 0
  1640. let outputTokens = 0
  1641. let totalCost: number | undefined
  1642. // We can't use `api_req_finished` anymore since it's a unique case
  1643. // where it could come after a streaming message (i.e. in the middle
  1644. // of being updated or executed).
  1645. // Fortunately `api_req_finished` was always parsed out for the GUI
  1646. // anyways, so it remains solely for legacy purposes to keep track
  1647. // of prices in tasks from history (it's worth removing a few months
  1648. // from now).
  1649. const updateApiReqMsg = (cancelReason?: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  1650. if (lastApiReqIndex < 0 || !this.clineMessages[lastApiReqIndex]) {
  1651. return
  1652. }
  1653. const existingData = JSON.parse(this.clineMessages[lastApiReqIndex].text || "{}")
  1654. // Calculate total tokens and cost using provider-aware function
  1655. const modelId = getModelId(this.apiConfiguration)
  1656. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  1657. const costResult =
  1658. apiProtocol === "anthropic"
  1659. ? calculateApiCostAnthropic(
  1660. this.api.getModel().info,
  1661. inputTokens,
  1662. outputTokens,
  1663. cacheWriteTokens,
  1664. cacheReadTokens,
  1665. )
  1666. : calculateApiCostOpenAI(
  1667. this.api.getModel().info,
  1668. inputTokens,
  1669. outputTokens,
  1670. cacheWriteTokens,
  1671. cacheReadTokens,
  1672. )
  1673. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  1674. ...existingData,
  1675. tokensIn: costResult.totalInputTokens,
  1676. tokensOut: costResult.totalOutputTokens,
  1677. cacheWrites: cacheWriteTokens,
  1678. cacheReads: cacheReadTokens,
  1679. cost: totalCost ?? costResult.totalCost,
  1680. cancelReason,
  1681. streamingFailedMessage,
  1682. } satisfies ClineApiReqInfo)
  1683. }
  1684. const abortStream = async (cancelReason: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  1685. if (this.diffViewProvider.isEditing) {
  1686. await this.diffViewProvider.revertChanges() // closes diff view
  1687. }
  1688. // if last message is a partial we need to update and save it
  1689. const lastMessage = this.clineMessages.at(-1)
  1690. if (lastMessage && lastMessage.partial) {
  1691. // lastMessage.ts = Date.now() DO NOT update ts since it is used as a key for virtuoso list
  1692. lastMessage.partial = false
  1693. // instead of streaming partialMessage events, we do a save and post like normal to persist to disk
  1694. console.log("updating partial message", lastMessage)
  1695. }
  1696. // Update `api_req_started` to have cancelled and cost, so that
  1697. // we can display the cost of the partial stream and the cancellation reason
  1698. updateApiReqMsg(cancelReason, streamingFailedMessage)
  1699. await this.saveClineMessages()
  1700. // Signals to provider that it can retrieve the saved messages
  1701. // from disk, as abortTask can not be awaited on in nature.
  1702. this.didFinishAbortingStream = true
  1703. }
  1704. // Reset streaming state for each new API request
  1705. this.currentStreamingContentIndex = 0
  1706. this.currentStreamingDidCheckpoint = false
  1707. this.assistantMessageContent = []
  1708. this.didCompleteReadingStream = false
  1709. this.userMessageContent = []
  1710. this.userMessageContentReady = false
  1711. this.didRejectTool = false
  1712. this.didAlreadyUseTool = false
  1713. this.presentAssistantMessageLocked = false
  1714. this.presentAssistantMessageHasPendingUpdates = false
  1715. this.assistantMessageParser.reset()
  1716. await this.diffViewProvider.reset()
  1717. // Yields only if the first chunk is successful, otherwise will
  1718. // allow the user to retry the request (most likely due to rate
  1719. // limit error, which gets thrown on the first chunk).
  1720. const stream = this.attemptApiRequest()
  1721. let assistantMessage = ""
  1722. let reasoningMessage = ""
  1723. let pendingGroundingSources: GroundingSource[] = []
  1724. this.isStreaming = true
  1725. try {
  1726. const iterator = stream[Symbol.asyncIterator]()
  1727. let item = await iterator.next()
  1728. while (!item.done) {
  1729. const chunk = item.value
  1730. item = await iterator.next()
  1731. if (!chunk) {
  1732. // Sometimes chunk is undefined, no idea that can cause
  1733. // it, but this workaround seems to fix it.
  1734. continue
  1735. }
  1736. switch (chunk.type) {
  1737. case "reasoning": {
  1738. reasoningMessage += chunk.text
  1739. // Only apply formatting if the message contains sentence-ending punctuation followed by **
  1740. let formattedReasoning = reasoningMessage
  1741. if (reasoningMessage.includes("**")) {
  1742. // Add line breaks before **Title** patterns that appear after sentence endings
  1743. // This targets section headers like "...end of sentence.**Title Here**"
  1744. // Handles periods, exclamation marks, and question marks
  1745. formattedReasoning = reasoningMessage.replace(
  1746. /([.!?])\*\*([^*\n]+)\*\*/g,
  1747. "$1\n\n**$2**",
  1748. )
  1749. }
  1750. await this.say("reasoning", formattedReasoning, undefined, true)
  1751. break
  1752. }
  1753. case "usage":
  1754. inputTokens += chunk.inputTokens
  1755. outputTokens += chunk.outputTokens
  1756. cacheWriteTokens += chunk.cacheWriteTokens ?? 0
  1757. cacheReadTokens += chunk.cacheReadTokens ?? 0
  1758. totalCost = chunk.totalCost
  1759. break
  1760. case "grounding":
  1761. // Handle grounding sources separately from regular content
  1762. // to prevent state persistence issues - store them separately
  1763. if (chunk.sources && chunk.sources.length > 0) {
  1764. pendingGroundingSources.push(...chunk.sources)
  1765. }
  1766. break
  1767. case "tool_call": {
  1768. // Convert native tool call to ToolUse format
  1769. const toolUse = NativeToolCallParser.parseToolCall({
  1770. id: chunk.id,
  1771. name: chunk.name as ToolName,
  1772. arguments: chunk.arguments,
  1773. })
  1774. if (!toolUse) {
  1775. console.error(`Failed to parse tool call for task ${this.taskId}:`, chunk)
  1776. break
  1777. }
  1778. // Store the tool call ID on the ToolUse object for later reference
  1779. // This is needed to create tool_result blocks that reference the correct tool_use_id
  1780. toolUse.id = chunk.id
  1781. // Add the tool use to assistant message content
  1782. this.assistantMessageContent.push(toolUse)
  1783. // Mark that we have new content to process
  1784. this.userMessageContentReady = false
  1785. // Present the tool call to user
  1786. presentAssistantMessage(this)
  1787. break
  1788. }
  1789. case "text": {
  1790. assistantMessage += chunk.text
  1791. // Parse raw assistant message chunk into content blocks.
  1792. const prevLength = this.assistantMessageContent.length
  1793. this.assistantMessageContent = this.assistantMessageParser.processChunk(chunk.text)
  1794. if (this.assistantMessageContent.length > prevLength) {
  1795. // New content we need to present, reset to
  1796. // false in case previous content set this to true.
  1797. this.userMessageContentReady = false
  1798. }
  1799. // Present content to user.
  1800. presentAssistantMessage(this)
  1801. break
  1802. }
  1803. }
  1804. if (this.abort) {
  1805. console.log(`aborting stream, this.abandoned = ${this.abandoned}`)
  1806. if (!this.abandoned) {
  1807. // Only need to gracefully abort if this instance
  1808. // isn't abandoned (sometimes OpenRouter stream
  1809. // hangs, in which case this would affect future
  1810. // instances of Cline).
  1811. await abortStream("user_cancelled")
  1812. }
  1813. break // Aborts the stream.
  1814. }
  1815. if (this.didRejectTool) {
  1816. // `userContent` has a tool rejection, so interrupt the
  1817. // assistant's response to present the user's feedback.
  1818. assistantMessage += "\n\n[Response interrupted by user feedback]"
  1819. // Instead of setting this preemptively, we allow the
  1820. // present iterator to finish and set
  1821. // userMessageContentReady when its ready.
  1822. // this.userMessageContentReady = true
  1823. break
  1824. }
  1825. if (this.didAlreadyUseTool) {
  1826. assistantMessage +=
  1827. "\n\n[Response interrupted by a tool use result. Only one tool may be used at a time and should be placed at the end of the message.]"
  1828. break
  1829. }
  1830. }
  1831. // Create a copy of current token values to avoid race conditions
  1832. const currentTokens = {
  1833. input: inputTokens,
  1834. output: outputTokens,
  1835. cacheWrite: cacheWriteTokens,
  1836. cacheRead: cacheReadTokens,
  1837. total: totalCost,
  1838. }
  1839. const drainStreamInBackgroundToFindAllUsage = async (apiReqIndex: number) => {
  1840. const timeoutMs = DEFAULT_USAGE_COLLECTION_TIMEOUT_MS
  1841. const startTime = performance.now()
  1842. const modelId = getModelId(this.apiConfiguration)
  1843. // Local variables to accumulate usage data without affecting the main flow
  1844. let bgInputTokens = currentTokens.input
  1845. let bgOutputTokens = currentTokens.output
  1846. let bgCacheWriteTokens = currentTokens.cacheWrite
  1847. let bgCacheReadTokens = currentTokens.cacheRead
  1848. let bgTotalCost = currentTokens.total
  1849. // Helper function to capture telemetry and update messages
  1850. const captureUsageData = async (
  1851. tokens: {
  1852. input: number
  1853. output: number
  1854. cacheWrite: number
  1855. cacheRead: number
  1856. total?: number
  1857. },
  1858. messageIndex: number = apiReqIndex,
  1859. ) => {
  1860. if (
  1861. tokens.input > 0 ||
  1862. tokens.output > 0 ||
  1863. tokens.cacheWrite > 0 ||
  1864. tokens.cacheRead > 0
  1865. ) {
  1866. // Update the shared variables atomically
  1867. inputTokens = tokens.input
  1868. outputTokens = tokens.output
  1869. cacheWriteTokens = tokens.cacheWrite
  1870. cacheReadTokens = tokens.cacheRead
  1871. totalCost = tokens.total
  1872. // Update the API request message with the latest usage data
  1873. updateApiReqMsg()
  1874. await this.saveClineMessages()
  1875. // Update the specific message in the webview
  1876. const apiReqMessage = this.clineMessages[messageIndex]
  1877. if (apiReqMessage) {
  1878. await this.updateClineMessage(apiReqMessage)
  1879. }
  1880. // Capture telemetry with provider-aware cost calculation
  1881. const modelId = getModelId(this.apiConfiguration)
  1882. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  1883. // Use the appropriate cost function based on the API protocol
  1884. const costResult =
  1885. apiProtocol === "anthropic"
  1886. ? calculateApiCostAnthropic(
  1887. this.api.getModel().info,
  1888. tokens.input,
  1889. tokens.output,
  1890. tokens.cacheWrite,
  1891. tokens.cacheRead,
  1892. )
  1893. : calculateApiCostOpenAI(
  1894. this.api.getModel().info,
  1895. tokens.input,
  1896. tokens.output,
  1897. tokens.cacheWrite,
  1898. tokens.cacheRead,
  1899. )
  1900. TelemetryService.instance.captureLlmCompletion(this.taskId, {
  1901. inputTokens: costResult.totalInputTokens,
  1902. outputTokens: costResult.totalOutputTokens,
  1903. cacheWriteTokens: tokens.cacheWrite,
  1904. cacheReadTokens: tokens.cacheRead,
  1905. cost: tokens.total ?? costResult.totalCost,
  1906. })
  1907. }
  1908. }
  1909. try {
  1910. // Continue processing the original stream from where the main loop left off
  1911. let usageFound = false
  1912. let chunkCount = 0
  1913. // Use the same iterator that the main loop was using
  1914. while (!item.done) {
  1915. // Check for timeout
  1916. if (performance.now() - startTime > timeoutMs) {
  1917. console.warn(
  1918. `[Background Usage Collection] Timed out after ${timeoutMs}ms for model: ${modelId}, processed ${chunkCount} chunks`,
  1919. )
  1920. // Clean up the iterator before breaking
  1921. if (iterator.return) {
  1922. await iterator.return(undefined)
  1923. }
  1924. break
  1925. }
  1926. const chunk = item.value
  1927. item = await iterator.next()
  1928. chunkCount++
  1929. if (chunk && chunk.type === "usage") {
  1930. usageFound = true
  1931. bgInputTokens += chunk.inputTokens
  1932. bgOutputTokens += chunk.outputTokens
  1933. bgCacheWriteTokens += chunk.cacheWriteTokens ?? 0
  1934. bgCacheReadTokens += chunk.cacheReadTokens ?? 0
  1935. bgTotalCost = chunk.totalCost
  1936. }
  1937. }
  1938. if (
  1939. usageFound ||
  1940. bgInputTokens > 0 ||
  1941. bgOutputTokens > 0 ||
  1942. bgCacheWriteTokens > 0 ||
  1943. bgCacheReadTokens > 0
  1944. ) {
  1945. // We have usage data either from a usage chunk or accumulated tokens
  1946. await captureUsageData(
  1947. {
  1948. input: bgInputTokens,
  1949. output: bgOutputTokens,
  1950. cacheWrite: bgCacheWriteTokens,
  1951. cacheRead: bgCacheReadTokens,
  1952. total: bgTotalCost,
  1953. },
  1954. lastApiReqIndex,
  1955. )
  1956. } else {
  1957. console.warn(
  1958. `[Background Usage Collection] Suspicious: request ${apiReqIndex} is complete, but no usage info was found. Model: ${modelId}`,
  1959. )
  1960. }
  1961. } catch (error) {
  1962. console.error("Error draining stream for usage data:", error)
  1963. // Still try to capture whatever usage data we have collected so far
  1964. if (
  1965. bgInputTokens > 0 ||
  1966. bgOutputTokens > 0 ||
  1967. bgCacheWriteTokens > 0 ||
  1968. bgCacheReadTokens > 0
  1969. ) {
  1970. await captureUsageData(
  1971. {
  1972. input: bgInputTokens,
  1973. output: bgOutputTokens,
  1974. cacheWrite: bgCacheWriteTokens,
  1975. cacheRead: bgCacheReadTokens,
  1976. total: bgTotalCost,
  1977. },
  1978. lastApiReqIndex,
  1979. )
  1980. }
  1981. }
  1982. }
  1983. // Start the background task and handle any errors
  1984. drainStreamInBackgroundToFindAllUsage(lastApiReqIndex).catch((error) => {
  1985. console.error("Background usage collection failed:", error)
  1986. })
  1987. } catch (error) {
  1988. // Abandoned happens when extension is no longer waiting for the
  1989. // Cline instance to finish aborting (error is thrown here when
  1990. // any function in the for loop throws due to this.abort).
  1991. if (!this.abandoned) {
  1992. // Determine cancellation reason
  1993. const cancelReason: ClineApiReqCancelReason = this.abort ? "user_cancelled" : "streaming_failed"
  1994. const streamingFailedMessage = this.abort
  1995. ? undefined
  1996. : (error.message ?? JSON.stringify(serializeError(error), null, 2))
  1997. // Clean up partial state
  1998. await abortStream(cancelReason, streamingFailedMessage)
  1999. if (this.abort) {
  2000. // User cancelled - abort the entire task
  2001. this.abortReason = cancelReason
  2002. await this.abortTask()
  2003. } else {
  2004. // Stream failed - log the error and retry with the same content
  2005. // The existing rate limiting will prevent rapid retries
  2006. console.error(
  2007. `[Task#${this.taskId}.${this.instanceId}] Stream failed, will retry: ${streamingFailedMessage}`,
  2008. )
  2009. // Apply exponential backoff similar to first-chunk errors when auto-resubmit is enabled
  2010. const stateForBackoff = await this.providerRef.deref()?.getState()
  2011. if (stateForBackoff?.autoApprovalEnabled && stateForBackoff?.alwaysApproveResubmit) {
  2012. await this.backoffAndAnnounce(
  2013. currentItem.retryAttempt ?? 0,
  2014. error,
  2015. streamingFailedMessage,
  2016. )
  2017. // Check if task was aborted during the backoff
  2018. if (this.abort) {
  2019. console.log(
  2020. `[Task#${this.taskId}.${this.instanceId}] Task aborted during mid-stream retry backoff`,
  2021. )
  2022. // Abort the entire task
  2023. this.abortReason = "user_cancelled"
  2024. await this.abortTask()
  2025. break
  2026. }
  2027. }
  2028. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2029. stack.push({
  2030. userContent: currentUserContent,
  2031. includeFileDetails: false,
  2032. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2033. })
  2034. // Continue to retry the request
  2035. continue
  2036. }
  2037. }
  2038. } finally {
  2039. this.isStreaming = false
  2040. }
  2041. // Need to call here in case the stream was aborted.
  2042. if (this.abort || this.abandoned) {
  2043. throw new Error(
  2044. `[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`,
  2045. )
  2046. }
  2047. this.didCompleteReadingStream = true
  2048. // Set any blocks to be complete to allow `presentAssistantMessage`
  2049. // to finish and set `userMessageContentReady` to true.
  2050. // (Could be a text block that had no subsequent tool uses, or a
  2051. // text block at the very end, or an invalid tool use, etc. Whatever
  2052. // the case, `presentAssistantMessage` relies on these blocks either
  2053. // to be completed or the user to reject a block in order to proceed
  2054. // and eventually set userMessageContentReady to true.)
  2055. const partialBlocks = this.assistantMessageContent.filter((block) => block.partial)
  2056. partialBlocks.forEach((block) => (block.partial = false))
  2057. // Can't just do this b/c a tool could be in the middle of executing.
  2058. // this.assistantMessageContent.forEach((e) => (e.partial = false))
  2059. // Now that the stream is complete, finalize any remaining partial content blocks
  2060. this.assistantMessageParser.finalizeContentBlocks()
  2061. // Preserve tool_use blocks that were added via native protocol (not parsed from text)
  2062. // These come from tool_call chunks and are added directly to assistantMessageContent
  2063. const nativeToolBlocks = this.assistantMessageContent.filter((block) => block.type === "tool_use")
  2064. const parsedBlocks = this.assistantMessageParser.getContentBlocks()
  2065. // Merge: parser blocks + native tool blocks that aren't in parser
  2066. this.assistantMessageContent = [...parsedBlocks, ...nativeToolBlocks]
  2067. if (partialBlocks.length > 0) {
  2068. // If there is content to update then it will complete and
  2069. // update `this.userMessageContentReady` to true, which we
  2070. // `pWaitFor` before making the next request. All this is really
  2071. // doing is presenting the last partial message that we just set
  2072. // to complete.
  2073. presentAssistantMessage(this)
  2074. }
  2075. // Note: updateApiReqMsg() is now called from within drainStreamInBackgroundToFindAllUsage
  2076. // to ensure usage data is captured even when the stream is interrupted. The background task
  2077. // uses local variables to accumulate usage data before atomically updating the shared state.
  2078. // Complete the reasoning message if it exists
  2079. // We can't use say() here because the reasoning message may not be the last message
  2080. // (other messages like text blocks or tool uses may have been added after it during streaming)
  2081. if (reasoningMessage) {
  2082. const lastReasoningIndex = findLastIndex(
  2083. this.clineMessages,
  2084. (m) => m.type === "say" && m.say === "reasoning",
  2085. )
  2086. if (lastReasoningIndex !== -1 && this.clineMessages[lastReasoningIndex].partial) {
  2087. this.clineMessages[lastReasoningIndex].partial = false
  2088. await this.updateClineMessage(this.clineMessages[lastReasoningIndex])
  2089. }
  2090. }
  2091. await this.saveClineMessages()
  2092. await this.providerRef.deref()?.postStateToWebview()
  2093. // Reset parser after each complete conversation round
  2094. this.assistantMessageParser.reset()
  2095. // Now add to apiConversationHistory.
  2096. // Need to save assistant responses to file before proceeding to
  2097. // tool use since user can exit at any moment and we wouldn't be
  2098. // able to save the assistant's response.
  2099. let didEndLoop = false
  2100. // Check if we have any content to process (text or tool uses)
  2101. const hasTextContent = assistantMessage.length > 0
  2102. const hasToolUses = this.assistantMessageContent.some((block) => block.type === "tool_use")
  2103. if (hasTextContent || hasToolUses) {
  2104. // Display grounding sources to the user if they exist
  2105. if (pendingGroundingSources.length > 0) {
  2106. const citationLinks = pendingGroundingSources.map((source, i) => `[${i + 1}](${source.url})`)
  2107. const sourcesText = `${t("common:gemini.sources")} ${citationLinks.join(", ")}`
  2108. await this.say("text", sourcesText, undefined, false, undefined, undefined, {
  2109. isNonInteractive: true,
  2110. })
  2111. }
  2112. // Check if we should preserve reasoning in the assistant message
  2113. let finalAssistantMessage = assistantMessage
  2114. if (reasoningMessage && this.api.getModel().info.preserveReasoning) {
  2115. // Prepend reasoning in XML tags to the assistant message so it's included in API history
  2116. finalAssistantMessage = `<think>${reasoningMessage}</think>\n${assistantMessage}`
  2117. }
  2118. // Build the assistant message content array
  2119. const assistantContent: Array<Anthropic.TextBlockParam | Anthropic.ToolUseBlockParam> = []
  2120. // Add text content if present
  2121. if (finalAssistantMessage) {
  2122. assistantContent.push({
  2123. type: "text" as const,
  2124. text: finalAssistantMessage,
  2125. })
  2126. }
  2127. // Add tool_use blocks with their IDs for native protocol
  2128. const toolUseBlocks = this.assistantMessageContent.filter((block) => block.type === "tool_use")
  2129. for (const toolUse of toolUseBlocks) {
  2130. // Get the tool call ID that was stored during parsing
  2131. const toolCallId = (toolUse as any).id
  2132. if (toolCallId) {
  2133. // nativeArgs is already in the correct API format for all tools
  2134. const input = toolUse.nativeArgs || toolUse.params
  2135. assistantContent.push({
  2136. type: "tool_use" as const,
  2137. id: toolCallId,
  2138. name: toolUse.name,
  2139. input,
  2140. })
  2141. }
  2142. }
  2143. await this.addToApiConversationHistory({
  2144. role: "assistant",
  2145. content: assistantContent,
  2146. })
  2147. TelemetryService.instance.captureConversationMessage(this.taskId, "assistant")
  2148. // NOTE: This comment is here for future reference - this was a
  2149. // workaround for `userMessageContent` not getting set to true.
  2150. // It was due to it not recursively calling for partial blocks
  2151. // when `didRejectTool`, so it would get stuck waiting for a
  2152. // partial block to complete before it could continue.
  2153. // In case the content blocks finished it may be the api stream
  2154. // finished after the last parsed content block was executed, so
  2155. // we are able to detect out of bounds and set
  2156. // `userMessageContentReady` to true (note you should not call
  2157. // `presentAssistantMessage` since if the last block i
  2158. // completed it will be presented again).
  2159. // const completeBlocks = this.assistantMessageContent.filter((block) => !block.partial) // If there are any partial blocks after the stream ended we can consider them invalid.
  2160. // if (this.currentStreamingContentIndex >= completeBlocks.length) {
  2161. // this.userMessageContentReady = true
  2162. // }
  2163. await pWaitFor(() => this.userMessageContentReady)
  2164. // If the model did not tool use, then we need to tell it to
  2165. // either use a tool or attempt_completion.
  2166. const didToolUse = this.assistantMessageContent.some((block) => block.type === "tool_use")
  2167. if (!didToolUse) {
  2168. this.userMessageContent.push({ type: "text", text: formatResponse.noToolsUsed() })
  2169. this.consecutiveMistakeCount++
  2170. }
  2171. if (this.userMessageContent.length > 0) {
  2172. stack.push({
  2173. userContent: [...this.userMessageContent], // Create a copy to avoid mutation issues
  2174. includeFileDetails: false, // Subsequent iterations don't need file details
  2175. })
  2176. // Add periodic yielding to prevent blocking
  2177. await new Promise((resolve) => setImmediate(resolve))
  2178. }
  2179. // Continue to next iteration instead of setting didEndLoop from recursive call
  2180. continue
  2181. } else {
  2182. // If there's no assistant_responses, that means we got no text
  2183. // or tool_use content blocks from API which we should assume is
  2184. // an error.
  2185. // IMPORTANT: For native tool protocol, we already added the user message to
  2186. // apiConversationHistory at line 1876. Since the assistant failed to respond,
  2187. // we need to remove that message before retrying to avoid having two consecutive
  2188. // user messages (which would cause tool_result validation errors).
  2189. const toolProtocol = vscode.workspace
  2190. .getConfiguration(Package.name)
  2191. .get<ToolProtocol>("toolProtocol", "xml")
  2192. const isNativeProtocol = toolProtocol === TOOL_PROTOCOL.NATIVE
  2193. if (isNativeProtocol && this.apiConversationHistory.length > 0) {
  2194. const lastMessage = this.apiConversationHistory[this.apiConversationHistory.length - 1]
  2195. if (lastMessage.role === "user") {
  2196. // Remove the last user message that we added earlier
  2197. this.apiConversationHistory.pop()
  2198. }
  2199. }
  2200. // Check if we should auto-retry or prompt the user
  2201. const state = await this.providerRef.deref()?.getState()
  2202. if (state?.autoApprovalEnabled && state?.alwaysApproveResubmit) {
  2203. // Auto-retry with backoff - don't persist failure message when retrying
  2204. const errorMsg =
  2205. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output."
  2206. await this.backoffAndAnnounce(
  2207. currentItem.retryAttempt ?? 0,
  2208. new Error("Empty assistant response"),
  2209. errorMsg,
  2210. )
  2211. // Check if task was aborted during the backoff
  2212. if (this.abort) {
  2213. console.log(
  2214. `[Task#${this.taskId}.${this.instanceId}] Task aborted during empty-assistant retry backoff`,
  2215. )
  2216. break
  2217. }
  2218. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2219. // Mark that user message was removed so it gets re-added on retry
  2220. stack.push({
  2221. userContent: currentUserContent,
  2222. includeFileDetails: false,
  2223. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2224. userMessageWasRemoved: true,
  2225. })
  2226. // Continue to retry the request
  2227. continue
  2228. } else {
  2229. // Prompt the user for retry decision
  2230. const { response } = await this.ask(
  2231. "api_req_failed",
  2232. "The model returned no assistant messages. This may indicate an issue with the API or the model's output.",
  2233. )
  2234. if (response === "yesButtonClicked") {
  2235. await this.say("api_req_retried")
  2236. // Push the same content back to retry
  2237. stack.push({
  2238. userContent: currentUserContent,
  2239. includeFileDetails: false,
  2240. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2241. })
  2242. // Continue to retry the request
  2243. continue
  2244. } else {
  2245. // User declined to retry
  2246. // For native protocol, re-add the user message we removed
  2247. if (isNativeProtocol) {
  2248. await this.addToApiConversationHistory({
  2249. role: "user",
  2250. content: currentUserContent,
  2251. })
  2252. }
  2253. await this.say(
  2254. "error",
  2255. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output.",
  2256. )
  2257. await this.addToApiConversationHistory({
  2258. role: "assistant",
  2259. content: [{ type: "text", text: "Failure: I did not provide a response." }],
  2260. })
  2261. }
  2262. }
  2263. }
  2264. // If we reach here without continuing, return false (will always be false for now)
  2265. return false
  2266. } catch (error) {
  2267. // This should never happen since the only thing that can throw an
  2268. // error is the attemptApiRequest, which is wrapped in a try catch
  2269. // that sends an ask where if noButtonClicked, will clear current
  2270. // task and destroy this instance. However to avoid unhandled
  2271. // promise rejection, we will end this loop which will end execution
  2272. // of this instance (see `startTask`).
  2273. return true // Needs to be true so parent loop knows to end task.
  2274. }
  2275. }
  2276. // If we exit the while loop normally (stack is empty), return false
  2277. return false
  2278. }
  2279. private async getSystemPrompt(): Promise<string> {
  2280. const { mcpEnabled } = (await this.providerRef.deref()?.getState()) ?? {}
  2281. let mcpHub: McpHub | undefined
  2282. if (mcpEnabled ?? true) {
  2283. const provider = this.providerRef.deref()
  2284. if (!provider) {
  2285. throw new Error("Provider reference lost during view transition")
  2286. }
  2287. // Wait for MCP hub initialization through McpServerManager
  2288. mcpHub = await McpServerManager.getInstance(provider.context, provider)
  2289. if (!mcpHub) {
  2290. throw new Error("Failed to get MCP hub from server manager")
  2291. }
  2292. // Wait for MCP servers to be connected before generating system prompt
  2293. await pWaitFor(() => !mcpHub!.isConnecting, { timeout: 10_000 }).catch(() => {
  2294. console.error("MCP servers failed to connect in time")
  2295. })
  2296. }
  2297. const rooIgnoreInstructions = this.rooIgnoreController?.getInstructions()
  2298. const state = await this.providerRef.deref()?.getState()
  2299. const {
  2300. browserViewportSize,
  2301. mode,
  2302. customModes,
  2303. customModePrompts,
  2304. customInstructions,
  2305. experiments,
  2306. enableMcpServerCreation,
  2307. browserToolEnabled,
  2308. language,
  2309. maxConcurrentFileReads,
  2310. maxReadFileLine,
  2311. apiConfiguration,
  2312. } = state ?? {}
  2313. return await (async () => {
  2314. const provider = this.providerRef.deref()
  2315. if (!provider) {
  2316. throw new Error("Provider not available")
  2317. }
  2318. // Align browser tool enablement with generateSystemPrompt: require model image support,
  2319. // mode to include the browser group, and the user setting to be enabled.
  2320. const modeConfig = getModeBySlug(mode ?? defaultModeSlug, customModes)
  2321. const modeSupportsBrowser = modeConfig?.groups.some((group) => getGroupName(group) === "browser") ?? false
  2322. // Check if model supports browser capability (images)
  2323. const modelInfo = this.api.getModel().info
  2324. const modelSupportsBrowser = (modelInfo as any)?.supportsImages === true
  2325. const canUseBrowserTool = modelSupportsBrowser && modeSupportsBrowser && (browserToolEnabled ?? true)
  2326. return SYSTEM_PROMPT(
  2327. provider.context,
  2328. this.cwd,
  2329. canUseBrowserTool,
  2330. mcpHub,
  2331. this.diffStrategy,
  2332. browserViewportSize ?? "900x600",
  2333. mode ?? defaultModeSlug,
  2334. customModePrompts,
  2335. customModes,
  2336. customInstructions,
  2337. this.diffEnabled,
  2338. experiments,
  2339. enableMcpServerCreation,
  2340. language,
  2341. rooIgnoreInstructions,
  2342. maxReadFileLine !== -1,
  2343. {
  2344. maxConcurrentFileReads: maxConcurrentFileReads ?? 5,
  2345. todoListEnabled: apiConfiguration?.todoListEnabled ?? true,
  2346. browserToolEnabled: browserToolEnabled ?? true,
  2347. useAgentRules:
  2348. vscode.workspace.getConfiguration(Package.name).get<boolean>("useAgentRules") ?? true,
  2349. newTaskRequireTodos: vscode.workspace
  2350. .getConfiguration(Package.name)
  2351. .get<boolean>("newTaskRequireTodos", false),
  2352. toolProtocol: vscode.workspace
  2353. .getConfiguration(Package.name)
  2354. .get<ToolProtocol>("toolProtocol", "xml"),
  2355. },
  2356. undefined, // todoList
  2357. this.api.getModel().id,
  2358. )
  2359. })()
  2360. }
  2361. private getCurrentProfileId(state: any): string {
  2362. return (
  2363. state?.listApiConfigMeta?.find((profile: any) => profile.name === state?.currentApiConfigName)?.id ??
  2364. "default"
  2365. )
  2366. }
  2367. private async handleContextWindowExceededError(): Promise<void> {
  2368. const state = await this.providerRef.deref()?.getState()
  2369. const { profileThresholds = {} } = state ?? {}
  2370. const { contextTokens } = this.getTokenUsage()
  2371. const modelInfo = this.api.getModel().info
  2372. const maxTokens = getModelMaxOutputTokens({
  2373. modelId: this.api.getModel().id,
  2374. model: modelInfo,
  2375. settings: this.apiConfiguration,
  2376. })
  2377. const contextWindow = modelInfo.contextWindow
  2378. // Get the current profile ID using the helper method
  2379. const currentProfileId = this.getCurrentProfileId(state)
  2380. // Log the context window error for debugging
  2381. console.warn(
  2382. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  2383. `Current tokens: ${contextTokens}, Context window: ${contextWindow}. ` +
  2384. `Forcing truncation to ${FORCED_CONTEXT_REDUCTION_PERCENT}% of current context.`,
  2385. )
  2386. // Force aggressive truncation by keeping only 75% of the conversation history
  2387. const truncateResult = await manageContext({
  2388. messages: this.apiConversationHistory,
  2389. totalTokens: contextTokens || 0,
  2390. maxTokens,
  2391. contextWindow,
  2392. apiHandler: this.api,
  2393. autoCondenseContext: true,
  2394. autoCondenseContextPercent: FORCED_CONTEXT_REDUCTION_PERCENT,
  2395. systemPrompt: await this.getSystemPrompt(),
  2396. taskId: this.taskId,
  2397. profileThresholds,
  2398. currentProfileId,
  2399. })
  2400. if (truncateResult.messages !== this.apiConversationHistory) {
  2401. await this.overwriteApiConversationHistory(truncateResult.messages)
  2402. }
  2403. if (truncateResult.summary) {
  2404. const { summary, cost, prevContextTokens, newContextTokens = 0 } = truncateResult
  2405. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  2406. await this.say(
  2407. "condense_context",
  2408. undefined /* text */,
  2409. undefined /* images */,
  2410. false /* partial */,
  2411. undefined /* checkpoint */,
  2412. undefined /* progressStatus */,
  2413. { isNonInteractive: true } /* options */,
  2414. contextCondense,
  2415. )
  2416. }
  2417. }
  2418. public async *attemptApiRequest(retryAttempt: number = 0): ApiStream {
  2419. const state = await this.providerRef.deref()?.getState()
  2420. const {
  2421. apiConfiguration,
  2422. autoApprovalEnabled,
  2423. alwaysApproveResubmit,
  2424. requestDelaySeconds,
  2425. mode,
  2426. autoCondenseContext = true,
  2427. autoCondenseContextPercent = 100,
  2428. profileThresholds = {},
  2429. } = state ?? {}
  2430. // Get condensing configuration for automatic triggers.
  2431. const customCondensingPrompt = state?.customCondensingPrompt
  2432. const condensingApiConfigId = state?.condensingApiConfigId
  2433. const listApiConfigMeta = state?.listApiConfigMeta
  2434. // Determine API handler to use for condensing.
  2435. let condensingApiHandler: ApiHandler | undefined
  2436. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  2437. // Find matching config by ID
  2438. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  2439. if (matchingConfig) {
  2440. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  2441. id: condensingApiConfigId,
  2442. })
  2443. // Ensure profile and apiProvider exist before trying to build handler.
  2444. if (profile && profile.apiProvider) {
  2445. condensingApiHandler = buildApiHandler(profile)
  2446. }
  2447. }
  2448. }
  2449. let rateLimitDelay = 0
  2450. // Use the shared timestamp so that subtasks respect the same rate-limit
  2451. // window as their parent tasks.
  2452. if (Task.lastGlobalApiRequestTime) {
  2453. const now = performance.now()
  2454. const timeSinceLastRequest = now - Task.lastGlobalApiRequestTime
  2455. const rateLimit = apiConfiguration?.rateLimitSeconds || 0
  2456. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - timeSinceLastRequest) / 1000))
  2457. }
  2458. // Only show rate limiting message if we're not retrying. If retrying, we'll include the delay there.
  2459. if (rateLimitDelay > 0 && retryAttempt === 0) {
  2460. // Show countdown timer
  2461. for (let i = rateLimitDelay; i > 0; i--) {
  2462. const delayMessage = `Rate limiting for ${i} seconds...`
  2463. await this.say("api_req_retry_delayed", delayMessage, undefined, true)
  2464. await delay(1000)
  2465. }
  2466. }
  2467. // Update last request time before making the request so that subsequent
  2468. // requests — even from new subtasks — will honour the provider's rate-limit.
  2469. Task.lastGlobalApiRequestTime = performance.now()
  2470. const systemPrompt = await this.getSystemPrompt()
  2471. const { contextTokens } = this.getTokenUsage()
  2472. if (contextTokens) {
  2473. const modelInfo = this.api.getModel().info
  2474. const maxTokens = getModelMaxOutputTokens({
  2475. modelId: this.api.getModel().id,
  2476. model: modelInfo,
  2477. settings: this.apiConfiguration,
  2478. })
  2479. const contextWindow = modelInfo.contextWindow
  2480. // Get the current profile ID using the helper method
  2481. const currentProfileId = this.getCurrentProfileId(state)
  2482. const truncateResult = await manageContext({
  2483. messages: this.apiConversationHistory,
  2484. totalTokens: contextTokens,
  2485. maxTokens,
  2486. contextWindow,
  2487. apiHandler: this.api,
  2488. autoCondenseContext,
  2489. autoCondenseContextPercent,
  2490. systemPrompt,
  2491. taskId: this.taskId,
  2492. customCondensingPrompt,
  2493. condensingApiHandler,
  2494. profileThresholds,
  2495. currentProfileId,
  2496. })
  2497. if (truncateResult.messages !== this.apiConversationHistory) {
  2498. await this.overwriteApiConversationHistory(truncateResult.messages)
  2499. }
  2500. if (truncateResult.error) {
  2501. await this.say("condense_context_error", truncateResult.error)
  2502. } else if (truncateResult.summary) {
  2503. const { summary, cost, prevContextTokens, newContextTokens = 0 } = truncateResult
  2504. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  2505. await this.say(
  2506. "condense_context",
  2507. undefined /* text */,
  2508. undefined /* images */,
  2509. false /* partial */,
  2510. undefined /* checkpoint */,
  2511. undefined /* progressStatus */,
  2512. { isNonInteractive: true } /* options */,
  2513. contextCondense,
  2514. )
  2515. }
  2516. }
  2517. // Properly type cleaned conversation history to include either standard Anthropic messages
  2518. // or provider-specific reasoning items (for encrypted continuity).
  2519. type ReasoningItemForRequest = {
  2520. type: "reasoning"
  2521. encrypted_content: string
  2522. id?: string
  2523. summary?: any[]
  2524. }
  2525. type CleanConversationMessage = Anthropic.Messages.MessageParam | ReasoningItemForRequest
  2526. const messagesSinceLastSummary = getMessagesSinceLastSummary(this.apiConversationHistory)
  2527. const cleanConversationHistory: CleanConversationMessage[] = maybeRemoveImageBlocks(
  2528. messagesSinceLastSummary,
  2529. this.api,
  2530. ).map((msg: ApiMessage): CleanConversationMessage => {
  2531. // Pass through reasoning items as-is (including id if present)
  2532. if (msg.type === "reasoning") {
  2533. return {
  2534. type: "reasoning",
  2535. summary: msg.summary,
  2536. encrypted_content: msg.encrypted_content!,
  2537. ...(msg.id ? { id: msg.id } : {}),
  2538. }
  2539. }
  2540. // For regular messages, just return role and content
  2541. return { role: msg.role!, content: msg.content as Anthropic.Messages.ContentBlockParam[] | string }
  2542. })
  2543. // Check auto-approval limits
  2544. const approvalResult = await this.autoApprovalHandler.checkAutoApprovalLimits(
  2545. state,
  2546. this.combineMessages(this.clineMessages.slice(1)),
  2547. async (type, data) => this.ask(type, data),
  2548. )
  2549. if (!approvalResult.shouldProceed) {
  2550. // User did not approve, task should be aborted
  2551. throw new Error("Auto-approval limit reached and user did not approve continuation")
  2552. }
  2553. // Determine if we should include native tools based on:
  2554. // 1. Tool protocol is set to NATIVE
  2555. // 2. Model supports native tools
  2556. const toolProtocol = vscode.workspace.getConfiguration(Package.name).get<ToolProtocol>("toolProtocol", "xml")
  2557. const modelInfo = this.api.getModel().info
  2558. const shouldIncludeTools = toolProtocol === TOOL_PROTOCOL.NATIVE && (modelInfo.supportsNativeTools ?? false)
  2559. // Build complete tools array: native tools + dynamic MCP tools, filtered by mode restrictions
  2560. let allTools: OpenAI.Chat.ChatCompletionTool[] = []
  2561. if (shouldIncludeTools) {
  2562. const provider = this.providerRef.deref()
  2563. const mcpHub = provider?.getMcpHub()
  2564. // Get CodeIndexManager for feature checking
  2565. const { CodeIndexManager } = await import("../../services/code-index/manager")
  2566. const codeIndexManager = CodeIndexManager.getInstance(provider!.context, this.cwd)
  2567. // Build settings object for tool filtering
  2568. // Include browserToolEnabled to filter browser_action when disabled by user
  2569. const filterSettings = {
  2570. todoListEnabled: apiConfiguration?.todoListEnabled ?? true,
  2571. browserToolEnabled: state?.browserToolEnabled ?? true,
  2572. }
  2573. // Filter native tools based on mode restrictions (similar to XML tool filtering)
  2574. const filteredNativeTools = filterNativeToolsForMode(
  2575. nativeTools,
  2576. mode,
  2577. state?.customModes,
  2578. state?.experiments,
  2579. codeIndexManager,
  2580. filterSettings,
  2581. )
  2582. // Filter MCP tools based on mode restrictions
  2583. const mcpTools = getMcpServerTools(mcpHub)
  2584. const filteredMcpTools = filterMcpToolsForMode(mcpTools, mode, state?.customModes, state?.experiments)
  2585. allTools = [...filteredNativeTools, ...filteredMcpTools]
  2586. }
  2587. const metadata: ApiHandlerCreateMessageMetadata = {
  2588. mode: mode,
  2589. taskId: this.taskId,
  2590. // Include tools and tool protocol when using native protocol and model supports it
  2591. ...(shouldIncludeTools ? { tools: allTools, tool_choice: "auto", toolProtocol } : {}),
  2592. }
  2593. // The provider accepts reasoning items alongside standard messages; cast to the expected parameter type.
  2594. const stream = this.api.createMessage(
  2595. systemPrompt,
  2596. cleanConversationHistory as unknown as Anthropic.Messages.MessageParam[],
  2597. metadata,
  2598. )
  2599. const iterator = stream[Symbol.asyncIterator]()
  2600. try {
  2601. // Awaiting first chunk to see if it will throw an error.
  2602. this.isWaitingForFirstChunk = true
  2603. const firstChunk = await iterator.next()
  2604. yield firstChunk.value
  2605. this.isWaitingForFirstChunk = false
  2606. } catch (error) {
  2607. this.isWaitingForFirstChunk = false
  2608. const isContextWindowExceededError = checkContextWindowExceededError(error)
  2609. // If it's a context window error and we haven't exceeded max retries for this error type
  2610. if (isContextWindowExceededError && retryAttempt < MAX_CONTEXT_WINDOW_RETRIES) {
  2611. console.warn(
  2612. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  2613. `Retry attempt ${retryAttempt + 1}/${MAX_CONTEXT_WINDOW_RETRIES}. ` +
  2614. `Attempting automatic truncation...`,
  2615. )
  2616. await this.handleContextWindowExceededError()
  2617. // Retry the request after handling the context window error
  2618. yield* this.attemptApiRequest(retryAttempt + 1)
  2619. return
  2620. }
  2621. // note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
  2622. if (autoApprovalEnabled && alwaysApproveResubmit) {
  2623. let errorMsg
  2624. if (error.error?.metadata?.raw) {
  2625. errorMsg = JSON.stringify(error.error.metadata.raw, null, 2)
  2626. } else if (error.message) {
  2627. errorMsg = error.message
  2628. } else {
  2629. errorMsg = "Unknown error"
  2630. }
  2631. // Apply shared exponential backoff and countdown UX
  2632. await this.backoffAndAnnounce(retryAttempt, error, errorMsg)
  2633. // CRITICAL: Check if task was aborted during the backoff countdown
  2634. // This prevents infinite loops when users cancel during auto-retry
  2635. // Without this check, the recursive call below would continue even after abort
  2636. if (this.abort) {
  2637. throw new Error(
  2638. `[Task#attemptApiRequest] task ${this.taskId}.${this.instanceId} aborted during retry`,
  2639. )
  2640. }
  2641. // Delegate generator output from the recursive call with
  2642. // incremented retry count.
  2643. yield* this.attemptApiRequest(retryAttempt + 1)
  2644. return
  2645. } else {
  2646. const { response } = await this.ask(
  2647. "api_req_failed",
  2648. error.message ?? JSON.stringify(serializeError(error), null, 2),
  2649. )
  2650. if (response !== "yesButtonClicked") {
  2651. // This will never happen since if noButtonClicked, we will
  2652. // clear current task, aborting this instance.
  2653. throw new Error("API request failed")
  2654. }
  2655. await this.say("api_req_retried")
  2656. // Delegate generator output from the recursive call.
  2657. yield* this.attemptApiRequest()
  2658. return
  2659. }
  2660. }
  2661. // No error, so we can continue to yield all remaining chunks.
  2662. // (Needs to be placed outside of try/catch since it we want caller to
  2663. // handle errors not with api_req_failed as that is reserved for first
  2664. // chunk failures only.)
  2665. // This delegates to another generator or iterable object. In this case,
  2666. // it's saying "yield all remaining values from this iterator". This
  2667. // effectively passes along all subsequent chunks from the original
  2668. // stream.
  2669. yield* iterator
  2670. }
  2671. // Shared exponential backoff for retries (first-chunk and mid-stream)
  2672. private async backoffAndAnnounce(retryAttempt: number, error: any, header?: string): Promise<void> {
  2673. try {
  2674. const state = await this.providerRef.deref()?.getState()
  2675. const baseDelay = state?.requestDelaySeconds || 5
  2676. let exponentialDelay = Math.min(
  2677. Math.ceil(baseDelay * Math.pow(2, retryAttempt)),
  2678. MAX_EXPONENTIAL_BACKOFF_SECONDS,
  2679. )
  2680. // Respect provider rate limit window
  2681. let rateLimitDelay = 0
  2682. const rateLimit = state?.apiConfiguration?.rateLimitSeconds || 0
  2683. if (Task.lastGlobalApiRequestTime && rateLimit > 0) {
  2684. const elapsed = performance.now() - Task.lastGlobalApiRequestTime
  2685. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - elapsed) / 1000))
  2686. }
  2687. // Prefer RetryInfo on 429 if present
  2688. if (error?.status === 429) {
  2689. const retryInfo = error?.errorDetails?.find(
  2690. (d: any) => d["@type"] === "type.googleapis.com/google.rpc.RetryInfo",
  2691. )
  2692. const match = retryInfo?.retryDelay?.match?.(/^(\d+)s$/)
  2693. if (match) {
  2694. exponentialDelay = Number(match[1]) + 1
  2695. }
  2696. }
  2697. const finalDelay = Math.max(exponentialDelay, rateLimitDelay)
  2698. if (finalDelay <= 0) return
  2699. // Build header text; fall back to error message if none provided
  2700. let headerText = header
  2701. if (!headerText) {
  2702. if (error?.error?.metadata?.raw) {
  2703. headerText = JSON.stringify(error.error.metadata.raw, null, 2)
  2704. } else if (error?.message) {
  2705. headerText = error.message
  2706. } else {
  2707. headerText = "Unknown error"
  2708. }
  2709. }
  2710. headerText = headerText ? `${headerText}\n\n` : ""
  2711. // Show countdown timer with exponential backoff
  2712. for (let i = finalDelay; i > 0; i--) {
  2713. // Check abort flag during countdown to allow early exit
  2714. if (this.abort) {
  2715. throw new Error(`[Task#${this.taskId}] Aborted during retry countdown`)
  2716. }
  2717. await this.say(
  2718. "api_req_retry_delayed",
  2719. `${headerText}Retry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`,
  2720. undefined,
  2721. true,
  2722. )
  2723. await delay(1000)
  2724. }
  2725. await this.say(
  2726. "api_req_retry_delayed",
  2727. `${headerText}Retry attempt ${retryAttempt + 1}\nRetrying now...`,
  2728. undefined,
  2729. false,
  2730. )
  2731. } catch (err) {
  2732. console.error("Exponential backoff failed:", err)
  2733. }
  2734. }
  2735. // Checkpoints
  2736. public async checkpointSave(force: boolean = false, suppressMessage: boolean = false) {
  2737. return checkpointSave(this, force, suppressMessage)
  2738. }
  2739. public async checkpointRestore(options: CheckpointRestoreOptions) {
  2740. return checkpointRestore(this, options)
  2741. }
  2742. public async checkpointDiff(options: CheckpointDiffOptions) {
  2743. return checkpointDiff(this, options)
  2744. }
  2745. // Metrics
  2746. public combineMessages(messages: ClineMessage[]) {
  2747. return combineApiRequests(combineCommandSequences(messages))
  2748. }
  2749. public getTokenUsage(): TokenUsage {
  2750. return getApiMetrics(this.combineMessages(this.clineMessages.slice(1)))
  2751. }
  2752. public recordToolUsage(toolName: ToolName) {
  2753. if (!this.toolUsage[toolName]) {
  2754. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  2755. }
  2756. this.toolUsage[toolName].attempts++
  2757. }
  2758. public recordToolError(toolName: ToolName, error?: string) {
  2759. if (!this.toolUsage[toolName]) {
  2760. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  2761. }
  2762. this.toolUsage[toolName].failures++
  2763. if (error) {
  2764. this.emit(RooCodeEventName.TaskToolFailed, this.taskId, toolName, error)
  2765. }
  2766. }
  2767. // Getters
  2768. public get taskStatus(): TaskStatus {
  2769. if (this.interactiveAsk) {
  2770. return TaskStatus.Interactive
  2771. }
  2772. if (this.resumableAsk) {
  2773. return TaskStatus.Resumable
  2774. }
  2775. if (this.idleAsk) {
  2776. return TaskStatus.Idle
  2777. }
  2778. return TaskStatus.Running
  2779. }
  2780. public get taskAsk(): ClineMessage | undefined {
  2781. return this.idleAsk || this.resumableAsk || this.interactiveAsk
  2782. }
  2783. public get queuedMessages(): QueuedMessage[] {
  2784. return this.messageQueueService.messages
  2785. }
  2786. public get tokenUsage(): TokenUsage | undefined {
  2787. if (this.tokenUsageSnapshot && this.tokenUsageSnapshotAt) {
  2788. return this.tokenUsageSnapshot
  2789. }
  2790. this.tokenUsageSnapshot = this.getTokenUsage()
  2791. this.tokenUsageSnapshotAt = this.clineMessages.at(-1)?.ts
  2792. return this.tokenUsageSnapshot
  2793. }
  2794. public get cwd() {
  2795. return this.workspacePath
  2796. }
  2797. /**
  2798. * Process any queued messages by dequeuing and submitting them.
  2799. * This ensures that queued user messages are sent when appropriate,
  2800. * preventing them from getting stuck in the queue.
  2801. *
  2802. * @param context - Context string for logging (e.g., the calling tool name)
  2803. */
  2804. public processQueuedMessages(): void {
  2805. try {
  2806. if (!this.messageQueueService.isEmpty()) {
  2807. const queued = this.messageQueueService.dequeueMessage()
  2808. if (queued) {
  2809. setTimeout(() => {
  2810. this.submitUserMessage(queued.text, queued.images).catch((err) =>
  2811. console.error(`[Task] Failed to submit queued message:`, err),
  2812. )
  2813. }, 0)
  2814. }
  2815. }
  2816. } catch (e) {
  2817. console.error(`[Task] Queue processing error:`, e)
  2818. }
  2819. }
  2820. }