Task.ts 137 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942
  1. import * as path from "path"
  2. import * as vscode from "vscode"
  3. import os from "os"
  4. import crypto from "crypto"
  5. import EventEmitter from "events"
  6. import { Anthropic } from "@anthropic-ai/sdk"
  7. import OpenAI from "openai"
  8. import delay from "delay"
  9. import pWaitFor from "p-wait-for"
  10. import { serializeError } from "serialize-error"
  11. import { Package } from "../../shared/package"
  12. import { formatToolInvocation } from "../tools/helpers/toolResultFormatting"
  13. import {
  14. type TaskLike,
  15. type TaskMetadata,
  16. type TaskEvents,
  17. type ProviderSettings,
  18. type TokenUsage,
  19. type ToolUsage,
  20. type ToolName,
  21. type ContextCondense,
  22. type ClineMessage,
  23. type ClineSay,
  24. type ClineAsk,
  25. type ToolProgressStatus,
  26. type HistoryItem,
  27. type CreateTaskOptions,
  28. type ModelInfo,
  29. RooCodeEventName,
  30. TelemetryEventName,
  31. TaskStatus,
  32. TodoItem,
  33. getApiProtocol,
  34. getModelId,
  35. isIdleAsk,
  36. isInteractiveAsk,
  37. isResumableAsk,
  38. isNativeProtocol,
  39. QueuedMessage,
  40. DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  41. DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  42. MAX_CHECKPOINT_TIMEOUT_SECONDS,
  43. MIN_CHECKPOINT_TIMEOUT_SECONDS,
  44. TOOL_PROTOCOL,
  45. } from "@roo-code/types"
  46. import { TelemetryService } from "@roo-code/telemetry"
  47. import { CloudService, BridgeOrchestrator } from "@roo-code/cloud"
  48. import { resolveToolProtocol } from "../../utils/resolveToolProtocol"
  49. // api
  50. import { ApiHandler, ApiHandlerCreateMessageMetadata, buildApiHandler } from "../../api"
  51. import { ApiStream, GroundingSource } from "../../api/transform/stream"
  52. import { maybeRemoveImageBlocks } from "../../api/transform/image-cleaning"
  53. // shared
  54. import { findLastIndex } from "../../shared/array"
  55. import { combineApiRequests } from "../../shared/combineApiRequests"
  56. import { combineCommandSequences } from "../../shared/combineCommandSequences"
  57. import { t } from "../../i18n"
  58. import { ClineApiReqCancelReason, ClineApiReqInfo } from "../../shared/ExtensionMessage"
  59. import { getApiMetrics, hasTokenUsageChanged } from "../../shared/getApiMetrics"
  60. import { ClineAskResponse } from "../../shared/WebviewMessage"
  61. import { defaultModeSlug, getModeBySlug, getGroupName } from "../../shared/modes"
  62. import { DiffStrategy, type ToolUse, type ToolParamName, toolParamNames } from "../../shared/tools"
  63. import { EXPERIMENT_IDS, experiments } from "../../shared/experiments"
  64. import { getModelMaxOutputTokens } from "../../shared/api"
  65. // services
  66. import { UrlContentFetcher } from "../../services/browser/UrlContentFetcher"
  67. import { BrowserSession } from "../../services/browser/BrowserSession"
  68. import { McpHub } from "../../services/mcp/McpHub"
  69. import { McpServerManager } from "../../services/mcp/McpServerManager"
  70. import { RepoPerTaskCheckpointService } from "../../services/checkpoints"
  71. // integrations
  72. import { DiffViewProvider } from "../../integrations/editor/DiffViewProvider"
  73. import { findToolName } from "../../integrations/misc/export-markdown"
  74. import { RooTerminalProcess } from "../../integrations/terminal/types"
  75. import { TerminalRegistry } from "../../integrations/terminal/TerminalRegistry"
  76. // utils
  77. import { calculateApiCostAnthropic, calculateApiCostOpenAI } from "../../shared/cost"
  78. import { getWorkspacePath } from "../../utils/path"
  79. // prompts
  80. import { formatResponse } from "../prompts/responses"
  81. import { SYSTEM_PROMPT } from "../prompts/system"
  82. import { buildNativeToolsArray } from "./build-tools"
  83. // core modules
  84. import { ToolRepetitionDetector } from "../tools/ToolRepetitionDetector"
  85. import { restoreTodoListForTask } from "../tools/UpdateTodoListTool"
  86. import { FileContextTracker } from "../context-tracking/FileContextTracker"
  87. import { RooIgnoreController } from "../ignore/RooIgnoreController"
  88. import { RooProtectedController } from "../protect/RooProtectedController"
  89. import { type AssistantMessageContent, presentAssistantMessage } from "../assistant-message"
  90. import { AssistantMessageParser } from "../assistant-message/AssistantMessageParser"
  91. import { NativeToolCallParser } from "../assistant-message/NativeToolCallParser"
  92. import { manageContext } from "../context-management"
  93. import { ClineProvider } from "../webview/ClineProvider"
  94. import { MultiSearchReplaceDiffStrategy } from "../diff/strategies/multi-search-replace"
  95. import { MultiFileSearchReplaceDiffStrategy } from "../diff/strategies/multi-file-search-replace"
  96. import {
  97. type ApiMessage,
  98. readApiMessages,
  99. saveApiMessages,
  100. readTaskMessages,
  101. saveTaskMessages,
  102. taskMetadata,
  103. } from "../task-persistence"
  104. import { getEnvironmentDetails } from "../environment/getEnvironmentDetails"
  105. import { checkContextWindowExceededError } from "../context/context-management/context-error-handling"
  106. import {
  107. type CheckpointDiffOptions,
  108. type CheckpointRestoreOptions,
  109. getCheckpointService,
  110. checkpointSave,
  111. checkpointRestore,
  112. checkpointDiff,
  113. } from "../checkpoints"
  114. import { processUserContentMentions } from "../mentions/processUserContentMentions"
  115. import { getMessagesSinceLastSummary, summarizeConversation } from "../condense"
  116. import { MessageQueueService } from "../message-queue/MessageQueueService"
  117. import { AutoApprovalHandler, checkAutoApproval } from "../auto-approval"
  118. const MAX_EXPONENTIAL_BACKOFF_SECONDS = 600 // 10 minutes
  119. const DEFAULT_USAGE_COLLECTION_TIMEOUT_MS = 5000 // 5 seconds
  120. const FORCED_CONTEXT_REDUCTION_PERCENT = 75 // Keep 75% of context (remove 25%) on context window errors
  121. const MAX_CONTEXT_WINDOW_RETRIES = 3 // Maximum retries for context window errors
  122. export interface TaskOptions extends CreateTaskOptions {
  123. provider: ClineProvider
  124. apiConfiguration: ProviderSettings
  125. enableDiff?: boolean
  126. enableCheckpoints?: boolean
  127. checkpointTimeout?: number
  128. enableBridge?: boolean
  129. fuzzyMatchThreshold?: number
  130. consecutiveMistakeLimit?: number
  131. task?: string
  132. images?: string[]
  133. historyItem?: HistoryItem
  134. experiments?: Record<string, boolean>
  135. startTask?: boolean
  136. rootTask?: Task
  137. parentTask?: Task
  138. taskNumber?: number
  139. onCreated?: (task: Task) => void
  140. initialTodos?: TodoItem[]
  141. workspacePath?: string
  142. }
  143. export class Task extends EventEmitter<TaskEvents> implements TaskLike {
  144. readonly taskId: string
  145. readonly rootTaskId?: string
  146. readonly parentTaskId?: string
  147. childTaskId?: string
  148. readonly instanceId: string
  149. readonly metadata: TaskMetadata
  150. todoList?: TodoItem[]
  151. readonly rootTask: Task | undefined = undefined
  152. readonly parentTask: Task | undefined = undefined
  153. readonly taskNumber: number
  154. readonly workspacePath: string
  155. /**
  156. * The mode associated with this task. Persisted across sessions
  157. * to maintain user context when reopening tasks from history.
  158. *
  159. * ## Lifecycle
  160. *
  161. * ### For new tasks:
  162. * 1. Initially `undefined` during construction
  163. * 2. Asynchronously initialized from provider state via `initializeTaskMode()`
  164. * 3. Falls back to `defaultModeSlug` if provider state is unavailable
  165. *
  166. * ### For history items:
  167. * 1. Immediately set from `historyItem.mode` during construction
  168. * 2. Falls back to `defaultModeSlug` if mode is not stored in history
  169. *
  170. * ## Important
  171. * This property should NOT be accessed directly until `taskModeReady` promise resolves.
  172. * Use `getTaskMode()` for async access or `taskMode` getter for sync access after initialization.
  173. *
  174. * @private
  175. * @see {@link getTaskMode} - For safe async access
  176. * @see {@link taskMode} - For sync access after initialization
  177. * @see {@link waitForModeInitialization} - To ensure initialization is complete
  178. */
  179. private _taskMode: string | undefined
  180. /**
  181. * Promise that resolves when the task mode has been initialized.
  182. * This ensures async mode initialization completes before the task is used.
  183. *
  184. * ## Purpose
  185. * - Prevents race conditions when accessing task mode
  186. * - Ensures provider state is properly loaded before mode-dependent operations
  187. * - Provides a synchronization point for async initialization
  188. *
  189. * ## Resolution timing
  190. * - For history items: Resolves immediately (sync initialization)
  191. * - For new tasks: Resolves after provider state is fetched (async initialization)
  192. *
  193. * @private
  194. * @see {@link waitForModeInitialization} - Public method to await this promise
  195. */
  196. private taskModeReady: Promise<void>
  197. providerRef: WeakRef<ClineProvider>
  198. private readonly globalStoragePath: string
  199. abort: boolean = false
  200. currentRequestAbortController?: AbortController
  201. // TaskStatus
  202. idleAsk?: ClineMessage
  203. resumableAsk?: ClineMessage
  204. interactiveAsk?: ClineMessage
  205. didFinishAbortingStream = false
  206. abandoned = false
  207. abortReason?: ClineApiReqCancelReason
  208. isInitialized = false
  209. isPaused: boolean = false
  210. pausedModeSlug: string = defaultModeSlug
  211. private pauseInterval: NodeJS.Timeout | undefined
  212. // API
  213. apiConfiguration: ProviderSettings
  214. api: ApiHandler
  215. private static lastGlobalApiRequestTime?: number
  216. private autoApprovalHandler: AutoApprovalHandler
  217. /**
  218. * Reset the global API request timestamp. This should only be used for testing.
  219. * @internal
  220. */
  221. static resetGlobalApiRequestTime(): void {
  222. Task.lastGlobalApiRequestTime = undefined
  223. }
  224. toolRepetitionDetector: ToolRepetitionDetector
  225. rooIgnoreController?: RooIgnoreController
  226. rooProtectedController?: RooProtectedController
  227. fileContextTracker: FileContextTracker
  228. urlContentFetcher: UrlContentFetcher
  229. terminalProcess?: RooTerminalProcess
  230. // Computer User
  231. browserSession: BrowserSession
  232. // Editing
  233. diffViewProvider: DiffViewProvider
  234. diffStrategy?: DiffStrategy
  235. diffEnabled: boolean = false
  236. fuzzyMatchThreshold: number
  237. didEditFile: boolean = false
  238. // LLM Messages & Chat Messages
  239. apiConversationHistory: ApiMessage[] = []
  240. clineMessages: ClineMessage[] = []
  241. // Ask
  242. private askResponse?: ClineAskResponse
  243. private askResponseText?: string
  244. private askResponseImages?: string[]
  245. public lastMessageTs?: number
  246. // Tool Use
  247. consecutiveMistakeCount: number = 0
  248. consecutiveMistakeLimit: number
  249. consecutiveMistakeCountForApplyDiff: Map<string, number> = new Map()
  250. toolUsage: ToolUsage = {}
  251. // Checkpoints
  252. enableCheckpoints: boolean
  253. checkpointTimeout: number
  254. checkpointService?: RepoPerTaskCheckpointService
  255. checkpointServiceInitializing = false
  256. // Task Bridge
  257. enableBridge: boolean
  258. // Message Queue Service
  259. public readonly messageQueueService: MessageQueueService
  260. private messageQueueStateChangedHandler: (() => void) | undefined
  261. // Streaming
  262. isWaitingForFirstChunk = false
  263. isStreaming = false
  264. currentStreamingContentIndex = 0
  265. currentStreamingDidCheckpoint = false
  266. assistantMessageContent: AssistantMessageContent[] = []
  267. presentAssistantMessageLocked = false
  268. presentAssistantMessageHasPendingUpdates = false
  269. userMessageContent: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam | Anthropic.ToolResultBlockParam)[] = []
  270. userMessageContentReady = false
  271. didRejectTool = false
  272. didAlreadyUseTool = false
  273. didToolFailInCurrentTurn = false
  274. didCompleteReadingStream = false
  275. assistantMessageParser?: AssistantMessageParser
  276. private providerProfileChangeListener?: (config: { name: string; provider?: string }) => void
  277. // Native tool call streaming state (track which index each tool is at)
  278. private streamingToolCallIndices: Map<string, number> = new Map()
  279. // Cached model info for current streaming session (set at start of each API request)
  280. // This prevents excessive getModel() calls during tool execution
  281. cachedStreamingModel?: { id: string; info: ModelInfo }
  282. // Token Usage Cache
  283. private tokenUsageSnapshot?: TokenUsage
  284. private tokenUsageSnapshotAt?: number
  285. // Cloud Sync Tracking
  286. private cloudSyncedMessageTimestamps: Set<number> = new Set()
  287. constructor({
  288. provider,
  289. apiConfiguration,
  290. enableDiff = false,
  291. enableCheckpoints = true,
  292. checkpointTimeout = DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  293. enableBridge = false,
  294. fuzzyMatchThreshold = 1.0,
  295. consecutiveMistakeLimit = DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  296. task,
  297. images,
  298. historyItem,
  299. experiments: experimentsConfig,
  300. startTask = true,
  301. rootTask,
  302. parentTask,
  303. taskNumber = -1,
  304. onCreated,
  305. initialTodos,
  306. workspacePath,
  307. }: TaskOptions) {
  308. super()
  309. if (startTask && !task && !images && !historyItem) {
  310. throw new Error("Either historyItem or task/images must be provided")
  311. }
  312. if (
  313. !checkpointTimeout ||
  314. checkpointTimeout > MAX_CHECKPOINT_TIMEOUT_SECONDS ||
  315. checkpointTimeout < MIN_CHECKPOINT_TIMEOUT_SECONDS
  316. ) {
  317. throw new Error(
  318. "checkpointTimeout must be between " +
  319. MIN_CHECKPOINT_TIMEOUT_SECONDS +
  320. " and " +
  321. MAX_CHECKPOINT_TIMEOUT_SECONDS +
  322. " seconds",
  323. )
  324. }
  325. this.taskId = historyItem ? historyItem.id : crypto.randomUUID()
  326. this.rootTaskId = historyItem ? historyItem.rootTaskId : rootTask?.taskId
  327. this.parentTaskId = historyItem ? historyItem.parentTaskId : parentTask?.taskId
  328. this.childTaskId = undefined
  329. this.metadata = {
  330. task: historyItem ? historyItem.task : task,
  331. images: historyItem ? [] : images,
  332. }
  333. // Normal use-case is usually retry similar history task with new workspace.
  334. this.workspacePath = parentTask
  335. ? parentTask.workspacePath
  336. : (workspacePath ?? getWorkspacePath(path.join(os.homedir(), "Desktop")))
  337. this.instanceId = crypto.randomUUID().slice(0, 8)
  338. this.taskNumber = -1
  339. this.rooIgnoreController = new RooIgnoreController(this.cwd)
  340. this.rooProtectedController = new RooProtectedController(this.cwd)
  341. this.fileContextTracker = new FileContextTracker(provider, this.taskId)
  342. this.rooIgnoreController.initialize().catch((error) => {
  343. console.error("Failed to initialize RooIgnoreController:", error)
  344. })
  345. this.apiConfiguration = apiConfiguration
  346. this.api = buildApiHandler(apiConfiguration)
  347. this.autoApprovalHandler = new AutoApprovalHandler()
  348. this.urlContentFetcher = new UrlContentFetcher(provider.context)
  349. this.browserSession = new BrowserSession(provider.context, (isActive: boolean) => {
  350. // Add a message to indicate browser session status change
  351. this.say("browser_session_status", isActive ? "Browser session opened" : "Browser session closed")
  352. // Broadcast to browser panel
  353. this.broadcastBrowserSessionUpdate()
  354. // When a browser session becomes active, automatically open/reveal the Browser Session tab
  355. if (isActive) {
  356. try {
  357. // Lazy-load to avoid circular imports at module load time
  358. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  359. const providerRef = this.providerRef.deref()
  360. if (providerRef) {
  361. BrowserSessionPanelManager.getInstance(providerRef)
  362. .show()
  363. .catch(() => {})
  364. }
  365. } catch (err) {
  366. console.error("[Task] Failed to auto-open Browser Session panel:", err)
  367. }
  368. }
  369. })
  370. this.diffEnabled = enableDiff
  371. this.fuzzyMatchThreshold = fuzzyMatchThreshold
  372. this.consecutiveMistakeLimit = consecutiveMistakeLimit ?? DEFAULT_CONSECUTIVE_MISTAKE_LIMIT
  373. this.providerRef = new WeakRef(provider)
  374. this.globalStoragePath = provider.context.globalStorageUri.fsPath
  375. this.diffViewProvider = new DiffViewProvider(this.cwd, this)
  376. this.enableCheckpoints = enableCheckpoints
  377. this.checkpointTimeout = checkpointTimeout
  378. this.enableBridge = enableBridge
  379. this.parentTask = parentTask
  380. this.taskNumber = taskNumber
  381. // Store the task's mode when it's created.
  382. // For history items, use the stored mode; for new tasks, we'll set it
  383. // after getting state.
  384. if (historyItem) {
  385. this._taskMode = historyItem.mode || defaultModeSlug
  386. this.taskModeReady = Promise.resolve()
  387. TelemetryService.instance.captureTaskRestarted(this.taskId)
  388. } else {
  389. // For new tasks, don't set the mode yet - wait for async initialization.
  390. this._taskMode = undefined
  391. this.taskModeReady = this.initializeTaskMode(provider)
  392. TelemetryService.instance.captureTaskCreated(this.taskId)
  393. }
  394. // Initialize the assistant message parser only for XML protocol.
  395. // For native protocol, tool calls come as tool_call chunks, not XML.
  396. // experiments is always provided via TaskOptions (defaults to experimentDefault in provider)
  397. const modelInfo = this.api.getModel().info
  398. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  399. this.assistantMessageParser = toolProtocol !== "native" ? new AssistantMessageParser() : undefined
  400. this.messageQueueService = new MessageQueueService()
  401. this.messageQueueStateChangedHandler = () => {
  402. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  403. this.providerRef.deref()?.postStateToWebview()
  404. }
  405. this.messageQueueService.on("stateChanged", this.messageQueueStateChangedHandler)
  406. // Listen for provider profile changes to update parser state
  407. this.setupProviderProfileChangeListener(provider)
  408. // Only set up diff strategy if diff is enabled.
  409. if (this.diffEnabled) {
  410. // Default to old strategy, will be updated if experiment is enabled.
  411. this.diffStrategy = new MultiSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  412. // Check experiment asynchronously and update strategy if needed.
  413. provider.getState().then((state) => {
  414. const isMultiFileApplyDiffEnabled = experiments.isEnabled(
  415. state.experiments ?? {},
  416. EXPERIMENT_IDS.MULTI_FILE_APPLY_DIFF,
  417. )
  418. if (isMultiFileApplyDiffEnabled) {
  419. this.diffStrategy = new MultiFileSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  420. }
  421. })
  422. }
  423. this.toolRepetitionDetector = new ToolRepetitionDetector(this.consecutiveMistakeLimit)
  424. // Initialize todo list if provided
  425. if (initialTodos && initialTodos.length > 0) {
  426. this.todoList = initialTodos
  427. }
  428. onCreated?.(this)
  429. if (startTask) {
  430. if (task || images) {
  431. this.startTask(task, images)
  432. } else if (historyItem) {
  433. this.resumeTaskFromHistory()
  434. } else {
  435. throw new Error("Either historyItem or task/images must be provided")
  436. }
  437. }
  438. }
  439. /**
  440. * Initialize the task mode from the provider state.
  441. * This method handles async initialization with proper error handling.
  442. *
  443. * ## Flow
  444. * 1. Attempts to fetch the current mode from provider state
  445. * 2. Sets `_taskMode` to the fetched mode or `defaultModeSlug` if unavailable
  446. * 3. Handles errors gracefully by falling back to default mode
  447. * 4. Logs any initialization errors for debugging
  448. *
  449. * ## Error handling
  450. * - Network failures when fetching provider state
  451. * - Provider not yet initialized
  452. * - Invalid state structure
  453. *
  454. * All errors result in fallback to `defaultModeSlug` to ensure task can proceed.
  455. *
  456. * @private
  457. * @param provider - The ClineProvider instance to fetch state from
  458. * @returns Promise that resolves when initialization is complete
  459. */
  460. private async initializeTaskMode(provider: ClineProvider): Promise<void> {
  461. try {
  462. const state = await provider.getState()
  463. this._taskMode = state?.mode || defaultModeSlug
  464. } catch (error) {
  465. // If there's an error getting state, use the default mode
  466. this._taskMode = defaultModeSlug
  467. // Use the provider's log method for better error visibility
  468. const errorMessage = `Failed to initialize task mode: ${error instanceof Error ? error.message : String(error)}`
  469. provider.log(errorMessage)
  470. }
  471. }
  472. /**
  473. * Sets up a listener for provider profile changes to automatically update the parser state.
  474. * This ensures the XML/native protocol parser stays synchronized with the current model.
  475. *
  476. * @private
  477. * @param provider - The ClineProvider instance to listen to
  478. */
  479. private setupProviderProfileChangeListener(provider: ClineProvider): void {
  480. // Only set up listener if provider has the on method (may not exist in test mocks)
  481. if (typeof provider.on !== "function") {
  482. return
  483. }
  484. this.providerProfileChangeListener = async () => {
  485. try {
  486. const newState = await provider.getState()
  487. if (newState?.apiConfiguration) {
  488. this.updateApiConfiguration(newState.apiConfiguration)
  489. }
  490. } catch (error) {
  491. console.error(
  492. `[Task#${this.taskId}.${this.instanceId}] Failed to update API configuration on profile change:`,
  493. error,
  494. )
  495. }
  496. }
  497. provider.on(RooCodeEventName.ProviderProfileChanged, this.providerProfileChangeListener)
  498. }
  499. /**
  500. * Wait for the task mode to be initialized before proceeding.
  501. * This method ensures that any operations depending on the task mode
  502. * will have access to the correct mode value.
  503. *
  504. * ## When to use
  505. * - Before accessing mode-specific configurations
  506. * - When switching between tasks with different modes
  507. * - Before operations that depend on mode-based permissions
  508. *
  509. * ## Example usage
  510. * ```typescript
  511. * // Wait for mode initialization before mode-dependent operations
  512. * await task.waitForModeInitialization();
  513. * const mode = task.taskMode; // Now safe to access synchronously
  514. *
  515. * // Or use with getTaskMode() for a one-liner
  516. * const mode = await task.getTaskMode(); // Internally waits for initialization
  517. * ```
  518. *
  519. * @returns Promise that resolves when the task mode is initialized
  520. * @public
  521. */
  522. public async waitForModeInitialization(): Promise<void> {
  523. return this.taskModeReady
  524. }
  525. /**
  526. * Get the task mode asynchronously, ensuring it's properly initialized.
  527. * This is the recommended way to access the task mode as it guarantees
  528. * the mode is available before returning.
  529. *
  530. * ## Async behavior
  531. * - Internally waits for `taskModeReady` promise to resolve
  532. * - Returns the initialized mode or `defaultModeSlug` as fallback
  533. * - Safe to call multiple times - subsequent calls return immediately if already initialized
  534. *
  535. * ## Example usage
  536. * ```typescript
  537. * // Safe async access
  538. * const mode = await task.getTaskMode();
  539. * console.log(`Task is running in ${mode} mode`);
  540. *
  541. * // Use in conditional logic
  542. * if (await task.getTaskMode() === 'architect') {
  543. * // Perform architect-specific operations
  544. * }
  545. * ```
  546. *
  547. * @returns Promise resolving to the task mode string
  548. * @public
  549. */
  550. public async getTaskMode(): Promise<string> {
  551. await this.taskModeReady
  552. return this._taskMode || defaultModeSlug
  553. }
  554. /**
  555. * Get the task mode synchronously. This should only be used when you're certain
  556. * that the mode has already been initialized (e.g., after waitForModeInitialization).
  557. *
  558. * ## When to use
  559. * - In synchronous contexts where async/await is not available
  560. * - After explicitly waiting for initialization via `waitForModeInitialization()`
  561. * - In event handlers or callbacks where mode is guaranteed to be initialized
  562. *
  563. * ## Example usage
  564. * ```typescript
  565. * // After ensuring initialization
  566. * await task.waitForModeInitialization();
  567. * const mode = task.taskMode; // Safe synchronous access
  568. *
  569. * // In an event handler after task is started
  570. * task.on('taskStarted', () => {
  571. * console.log(`Task started in ${task.taskMode} mode`); // Safe here
  572. * });
  573. * ```
  574. *
  575. * @throws {Error} If the mode hasn't been initialized yet
  576. * @returns The task mode string
  577. * @public
  578. */
  579. public get taskMode(): string {
  580. if (this._taskMode === undefined) {
  581. throw new Error("Task mode accessed before initialization. Use getTaskMode() or wait for taskModeReady.")
  582. }
  583. return this._taskMode
  584. }
  585. static create(options: TaskOptions): [Task, Promise<void>] {
  586. const instance = new Task({ ...options, startTask: false })
  587. const { images, task, historyItem } = options
  588. let promise
  589. if (images || task) {
  590. promise = instance.startTask(task, images)
  591. } else if (historyItem) {
  592. promise = instance.resumeTaskFromHistory()
  593. } else {
  594. throw new Error("Either historyItem or task/images must be provided")
  595. }
  596. return [instance, promise]
  597. }
  598. // API Messages
  599. private async getSavedApiConversationHistory(): Promise<ApiMessage[]> {
  600. return readApiMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  601. }
  602. private async addToApiConversationHistory(message: Anthropic.MessageParam, reasoning?: string) {
  603. // Capture the encrypted_content / thought signatures from the provider (e.g., OpenAI Responses API, Google GenAI) if present.
  604. // We only persist data reported by the current response body.
  605. const handler = this.api as ApiHandler & {
  606. getResponseId?: () => string | undefined
  607. getEncryptedContent?: () => { encrypted_content: string; id?: string } | undefined
  608. getThoughtSignature?: () => string | undefined
  609. getSummary?: () => any[] | undefined
  610. getReasoningDetails?: () => any[] | undefined
  611. }
  612. if (message.role === "assistant") {
  613. const responseId = handler.getResponseId?.()
  614. const reasoningData = handler.getEncryptedContent?.()
  615. const thoughtSignature = handler.getThoughtSignature?.()
  616. const reasoningSummary = handler.getSummary?.()
  617. const reasoningDetails = handler.getReasoningDetails?.()
  618. // Start from the original assistant message
  619. const messageWithTs: any = {
  620. ...message,
  621. ...(responseId ? { id: responseId } : {}),
  622. ts: Date.now(),
  623. }
  624. // Store reasoning_details array if present (for models like Gemini 3)
  625. if (reasoningDetails) {
  626. messageWithTs.reasoning_details = reasoningDetails
  627. }
  628. // Store reasoning: plain text (most providers) or encrypted (OpenAI Native)
  629. // Skip if reasoning_details already contains the reasoning (to avoid duplication)
  630. if (reasoning && !reasoningDetails) {
  631. const reasoningBlock = {
  632. type: "reasoning",
  633. text: reasoning,
  634. summary: reasoningSummary ?? ([] as any[]),
  635. }
  636. if (typeof messageWithTs.content === "string") {
  637. messageWithTs.content = [
  638. reasoningBlock,
  639. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  640. ]
  641. } else if (Array.isArray(messageWithTs.content)) {
  642. messageWithTs.content = [reasoningBlock, ...messageWithTs.content]
  643. } else if (!messageWithTs.content) {
  644. messageWithTs.content = [reasoningBlock]
  645. }
  646. } else if (reasoningData?.encrypted_content) {
  647. // OpenAI Native encrypted reasoning
  648. const reasoningBlock = {
  649. type: "reasoning",
  650. summary: [] as any[],
  651. encrypted_content: reasoningData.encrypted_content,
  652. ...(reasoningData.id ? { id: reasoningData.id } : {}),
  653. }
  654. if (typeof messageWithTs.content === "string") {
  655. messageWithTs.content = [
  656. reasoningBlock,
  657. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  658. ]
  659. } else if (Array.isArray(messageWithTs.content)) {
  660. messageWithTs.content = [reasoningBlock, ...messageWithTs.content]
  661. } else if (!messageWithTs.content) {
  662. messageWithTs.content = [reasoningBlock]
  663. }
  664. }
  665. // If we have a thought signature, append it as a dedicated content block
  666. // so it can be round-tripped in api_history.json and re-sent on subsequent calls.
  667. if (thoughtSignature) {
  668. const thoughtSignatureBlock = {
  669. type: "thoughtSignature",
  670. thoughtSignature,
  671. }
  672. if (typeof messageWithTs.content === "string") {
  673. messageWithTs.content = [
  674. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  675. thoughtSignatureBlock,
  676. ]
  677. } else if (Array.isArray(messageWithTs.content)) {
  678. messageWithTs.content = [...messageWithTs.content, thoughtSignatureBlock]
  679. } else if (!messageWithTs.content) {
  680. messageWithTs.content = [thoughtSignatureBlock]
  681. }
  682. }
  683. this.apiConversationHistory.push(messageWithTs)
  684. } else {
  685. const messageWithTs = { ...message, ts: Date.now() }
  686. this.apiConversationHistory.push(messageWithTs)
  687. }
  688. await this.saveApiConversationHistory()
  689. }
  690. async overwriteApiConversationHistory(newHistory: ApiMessage[]) {
  691. this.apiConversationHistory = newHistory
  692. await this.saveApiConversationHistory()
  693. }
  694. private async saveApiConversationHistory() {
  695. try {
  696. await saveApiMessages({
  697. messages: this.apiConversationHistory,
  698. taskId: this.taskId,
  699. globalStoragePath: this.globalStoragePath,
  700. })
  701. } catch (error) {
  702. // In the off chance this fails, we don't want to stop the task.
  703. console.error("Failed to save API conversation history:", error)
  704. }
  705. }
  706. // Cline Messages
  707. private async getSavedClineMessages(): Promise<ClineMessage[]> {
  708. return readTaskMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  709. }
  710. private async addToClineMessages(message: ClineMessage) {
  711. this.clineMessages.push(message)
  712. const provider = this.providerRef.deref()
  713. await provider?.postStateToWebview()
  714. this.emit(RooCodeEventName.Message, { action: "created", message })
  715. await this.saveClineMessages()
  716. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  717. if (shouldCaptureMessage) {
  718. CloudService.instance.captureEvent({
  719. event: TelemetryEventName.TASK_MESSAGE,
  720. properties: { taskId: this.taskId, message },
  721. })
  722. // Track that this message has been synced to cloud
  723. this.cloudSyncedMessageTimestamps.add(message.ts)
  724. }
  725. }
  726. public async overwriteClineMessages(newMessages: ClineMessage[]) {
  727. this.clineMessages = newMessages
  728. restoreTodoListForTask(this)
  729. await this.saveClineMessages()
  730. // When overwriting messages (e.g., during task resume), repopulate the cloud sync tracking Set
  731. // with timestamps from all non-partial messages to prevent re-syncing previously synced messages
  732. this.cloudSyncedMessageTimestamps.clear()
  733. for (const msg of newMessages) {
  734. if (msg.partial !== true) {
  735. this.cloudSyncedMessageTimestamps.add(msg.ts)
  736. }
  737. }
  738. }
  739. private async updateClineMessage(message: ClineMessage) {
  740. const provider = this.providerRef.deref()
  741. await provider?.postMessageToWebview({ type: "messageUpdated", clineMessage: message })
  742. this.emit(RooCodeEventName.Message, { action: "updated", message })
  743. // Check if we should sync to cloud and haven't already synced this message
  744. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  745. const hasNotBeenSynced = !this.cloudSyncedMessageTimestamps.has(message.ts)
  746. if (shouldCaptureMessage && hasNotBeenSynced) {
  747. CloudService.instance.captureEvent({
  748. event: TelemetryEventName.TASK_MESSAGE,
  749. properties: { taskId: this.taskId, message },
  750. })
  751. // Track that this message has been synced to cloud
  752. this.cloudSyncedMessageTimestamps.add(message.ts)
  753. }
  754. }
  755. private async saveClineMessages() {
  756. try {
  757. await saveTaskMessages({
  758. messages: this.clineMessages,
  759. taskId: this.taskId,
  760. globalStoragePath: this.globalStoragePath,
  761. })
  762. const { historyItem, tokenUsage } = await taskMetadata({
  763. taskId: this.taskId,
  764. rootTaskId: this.rootTaskId,
  765. parentTaskId: this.parentTaskId,
  766. taskNumber: this.taskNumber,
  767. messages: this.clineMessages,
  768. globalStoragePath: this.globalStoragePath,
  769. workspace: this.cwd,
  770. mode: this._taskMode || defaultModeSlug, // Use the task's own mode, not the current provider mode.
  771. })
  772. if (hasTokenUsageChanged(tokenUsage, this.tokenUsageSnapshot)) {
  773. this.emit(RooCodeEventName.TaskTokenUsageUpdated, this.taskId, tokenUsage)
  774. this.tokenUsageSnapshot = undefined
  775. this.tokenUsageSnapshotAt = undefined
  776. }
  777. await this.providerRef.deref()?.updateTaskHistory(historyItem)
  778. } catch (error) {
  779. console.error("Failed to save Roo messages:", error)
  780. }
  781. }
  782. private findMessageByTimestamp(ts: number): ClineMessage | undefined {
  783. for (let i = this.clineMessages.length - 1; i >= 0; i--) {
  784. if (this.clineMessages[i].ts === ts) {
  785. return this.clineMessages[i]
  786. }
  787. }
  788. return undefined
  789. }
  790. // Note that `partial` has three valid states true (partial message),
  791. // false (completion of partial message), undefined (individual complete
  792. // message).
  793. async ask(
  794. type: ClineAsk,
  795. text?: string,
  796. partial?: boolean,
  797. progressStatus?: ToolProgressStatus,
  798. isProtected?: boolean,
  799. ): Promise<{ response: ClineAskResponse; text?: string; images?: string[] }> {
  800. // If this Cline instance was aborted by the provider, then the only
  801. // thing keeping us alive is a promise still running in the background,
  802. // in which case we don't want to send its result to the webview as it
  803. // is attached to a new instance of Cline now. So we can safely ignore
  804. // the result of any active promises, and this class will be
  805. // deallocated. (Although we set Cline = undefined in provider, that
  806. // simply removes the reference to this instance, but the instance is
  807. // still alive until this promise resolves or rejects.)
  808. if (this.abort) {
  809. throw new Error(`[RooCode#ask] task ${this.taskId}.${this.instanceId} aborted`)
  810. }
  811. let askTs: number
  812. if (partial !== undefined) {
  813. const lastMessage = this.clineMessages.at(-1)
  814. const isUpdatingPreviousPartial =
  815. lastMessage && lastMessage.partial && lastMessage.type === "ask" && lastMessage.ask === type
  816. if (partial) {
  817. if (isUpdatingPreviousPartial) {
  818. // Existing partial message, so update it.
  819. lastMessage.text = text
  820. lastMessage.partial = partial
  821. lastMessage.progressStatus = progressStatus
  822. lastMessage.isProtected = isProtected
  823. // TODO: Be more efficient about saving and posting only new
  824. // data or one whole message at a time so ignore partial for
  825. // saves, and only post parts of partial message instead of
  826. // whole array in new listener.
  827. this.updateClineMessage(lastMessage)
  828. // console.log("Task#ask: current ask promise was ignored (#1)")
  829. throw new Error("Current ask promise was ignored (#1)")
  830. } else {
  831. // This is a new partial message, so add it with partial
  832. // state.
  833. askTs = Date.now()
  834. this.lastMessageTs = askTs
  835. console.log(`Task#ask: new partial ask -> ${type} @ ${askTs}`)
  836. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, partial, isProtected })
  837. // console.log("Task#ask: current ask promise was ignored (#2)")
  838. throw new Error("Current ask promise was ignored (#2)")
  839. }
  840. } else {
  841. if (isUpdatingPreviousPartial) {
  842. // This is the complete version of a previously partial
  843. // message, so replace the partial with the complete version.
  844. this.askResponse = undefined
  845. this.askResponseText = undefined
  846. this.askResponseImages = undefined
  847. // Bug for the history books:
  848. // In the webview we use the ts as the chatrow key for the
  849. // virtuoso list. Since we would update this ts right at the
  850. // end of streaming, it would cause the view to flicker. The
  851. // key prop has to be stable otherwise react has trouble
  852. // reconciling items between renders, causing unmounting and
  853. // remounting of components (flickering).
  854. // The lesson here is if you see flickering when rendering
  855. // lists, it's likely because the key prop is not stable.
  856. // So in this case we must make sure that the message ts is
  857. // never altered after first setting it.
  858. askTs = lastMessage.ts
  859. console.log(`Task#ask: updating previous partial ask -> ${type} @ ${askTs}`)
  860. this.lastMessageTs = askTs
  861. lastMessage.text = text
  862. lastMessage.partial = false
  863. lastMessage.progressStatus = progressStatus
  864. lastMessage.isProtected = isProtected
  865. await this.saveClineMessages()
  866. this.updateClineMessage(lastMessage)
  867. } else {
  868. // This is a new and complete message, so add it like normal.
  869. this.askResponse = undefined
  870. this.askResponseText = undefined
  871. this.askResponseImages = undefined
  872. askTs = Date.now()
  873. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  874. this.lastMessageTs = askTs
  875. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  876. }
  877. }
  878. } else {
  879. // This is a new non-partial message, so add it like normal.
  880. this.askResponse = undefined
  881. this.askResponseText = undefined
  882. this.askResponseImages = undefined
  883. askTs = Date.now()
  884. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  885. this.lastMessageTs = askTs
  886. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  887. }
  888. let timeouts: NodeJS.Timeout[] = []
  889. // Automatically approve if the ask according to the user's settings.
  890. const provider = this.providerRef.deref()
  891. const state = provider ? await provider.getState() : undefined
  892. const approval = await checkAutoApproval({ state, ask: type, text, isProtected })
  893. if (approval.decision === "approve") {
  894. this.approveAsk()
  895. } else if (approval.decision === "deny") {
  896. this.denyAsk()
  897. } else if (approval.decision === "timeout") {
  898. timeouts.push(
  899. setTimeout(() => {
  900. const { askResponse, text, images } = approval.fn()
  901. this.handleWebviewAskResponse(askResponse, text, images)
  902. }, approval.timeout),
  903. )
  904. }
  905. // The state is mutable if the message is complete and the task will
  906. // block (via the `pWaitFor`).
  907. const isBlocking = !(this.askResponse !== undefined || this.lastMessageTs !== askTs)
  908. const isMessageQueued = !this.messageQueueService.isEmpty()
  909. const isStatusMutable = !partial && isBlocking && !isMessageQueued && approval.decision === "ask"
  910. if (isBlocking) {
  911. console.log(`Task#ask will block -> type: ${type}`)
  912. }
  913. if (isStatusMutable) {
  914. console.log(`Task#ask: status is mutable -> type: ${type}`)
  915. const statusMutationTimeout = 2_000
  916. if (isInteractiveAsk(type)) {
  917. timeouts.push(
  918. setTimeout(() => {
  919. const message = this.findMessageByTimestamp(askTs)
  920. if (message) {
  921. this.interactiveAsk = message
  922. this.emit(RooCodeEventName.TaskInteractive, this.taskId)
  923. provider?.postMessageToWebview({ type: "interactionRequired" })
  924. }
  925. }, statusMutationTimeout),
  926. )
  927. } else if (isResumableAsk(type)) {
  928. timeouts.push(
  929. setTimeout(() => {
  930. const message = this.findMessageByTimestamp(askTs)
  931. if (message) {
  932. this.resumableAsk = message
  933. this.emit(RooCodeEventName.TaskResumable, this.taskId)
  934. }
  935. }, statusMutationTimeout),
  936. )
  937. } else if (isIdleAsk(type)) {
  938. timeouts.push(
  939. setTimeout(() => {
  940. const message = this.findMessageByTimestamp(askTs)
  941. if (message) {
  942. this.idleAsk = message
  943. this.emit(RooCodeEventName.TaskIdle, this.taskId)
  944. }
  945. }, statusMutationTimeout),
  946. )
  947. }
  948. } else if (isMessageQueued) {
  949. console.log(`Task#ask: will process message queue -> type: ${type}`)
  950. const message = this.messageQueueService.dequeueMessage()
  951. if (message) {
  952. // Check if this is a tool approval ask that needs to be handled.
  953. if (
  954. type === "tool" ||
  955. type === "command" ||
  956. type === "browser_action_launch" ||
  957. type === "use_mcp_server"
  958. ) {
  959. // For tool approvals, we need to approve first, then send
  960. // the message if there's text/images.
  961. this.handleWebviewAskResponse("yesButtonClicked", message.text, message.images)
  962. } else {
  963. // For other ask types (like followup or command_output), fulfill the ask
  964. // directly.
  965. this.handleWebviewAskResponse("messageResponse", message.text, message.images)
  966. }
  967. }
  968. }
  969. // Wait for askResponse to be set
  970. await pWaitFor(() => this.askResponse !== undefined || this.lastMessageTs !== askTs, { interval: 100 })
  971. if (this.lastMessageTs !== askTs) {
  972. // Could happen if we send multiple asks in a row i.e. with
  973. // command_output. It's important that when we know an ask could
  974. // fail, it is handled gracefully.
  975. console.log("Task#ask: current ask promise was ignored")
  976. throw new Error("Current ask promise was ignored")
  977. }
  978. const result = { response: this.askResponse!, text: this.askResponseText, images: this.askResponseImages }
  979. this.askResponse = undefined
  980. this.askResponseText = undefined
  981. this.askResponseImages = undefined
  982. // Cancel the timeouts if they are still running.
  983. timeouts.forEach((timeout) => clearTimeout(timeout))
  984. // Switch back to an active state.
  985. if (this.idleAsk || this.resumableAsk || this.interactiveAsk) {
  986. this.idleAsk = undefined
  987. this.resumableAsk = undefined
  988. this.interactiveAsk = undefined
  989. this.emit(RooCodeEventName.TaskActive, this.taskId)
  990. }
  991. this.emit(RooCodeEventName.TaskAskResponded)
  992. return result
  993. }
  994. handleWebviewAskResponse(askResponse: ClineAskResponse, text?: string, images?: string[]) {
  995. this.askResponse = askResponse
  996. this.askResponseText = text
  997. this.askResponseImages = images
  998. // Create a checkpoint whenever the user sends a message.
  999. // Use allowEmpty=true to ensure a checkpoint is recorded even if there are no file changes.
  1000. // Suppress the checkpoint_saved chat row for this particular checkpoint to keep the timeline clean.
  1001. if (askResponse === "messageResponse") {
  1002. void this.checkpointSave(false, true)
  1003. }
  1004. // Mark the last follow-up question as answered
  1005. if (askResponse === "messageResponse" || askResponse === "yesButtonClicked") {
  1006. // Find the last unanswered follow-up message using findLastIndex
  1007. const lastFollowUpIndex = findLastIndex(
  1008. this.clineMessages,
  1009. (msg) => msg.type === "ask" && msg.ask === "followup" && !msg.isAnswered,
  1010. )
  1011. if (lastFollowUpIndex !== -1) {
  1012. // Mark this follow-up as answered
  1013. this.clineMessages[lastFollowUpIndex].isAnswered = true
  1014. // Save the updated messages
  1015. this.saveClineMessages().catch((error) => {
  1016. console.error("Failed to save answered follow-up state:", error)
  1017. })
  1018. }
  1019. }
  1020. }
  1021. public approveAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  1022. this.handleWebviewAskResponse("yesButtonClicked", text, images)
  1023. }
  1024. public denyAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  1025. this.handleWebviewAskResponse("noButtonClicked", text, images)
  1026. }
  1027. /**
  1028. * Updates the API configuration and reinitializes the parser based on the new tool protocol.
  1029. * This should be called when switching between models/profiles with different tool protocols
  1030. * to prevent the parser from being left in an inconsistent state.
  1031. *
  1032. * @param newApiConfiguration - The new API configuration to use
  1033. */
  1034. public updateApiConfiguration(newApiConfiguration: ProviderSettings): void {
  1035. // Update the configuration and rebuild the API handler
  1036. this.apiConfiguration = newApiConfiguration
  1037. this.api = buildApiHandler(newApiConfiguration)
  1038. // Determine what the tool protocol should be
  1039. const modelInfo = this.api.getModel().info
  1040. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1041. const shouldUseXmlParser = protocol === "xml"
  1042. // Ensure parser state matches protocol requirement
  1043. const parserStateCorrect =
  1044. (shouldUseXmlParser && this.assistantMessageParser) || (!shouldUseXmlParser && !this.assistantMessageParser)
  1045. if (parserStateCorrect) {
  1046. return
  1047. }
  1048. // Fix parser state
  1049. if (shouldUseXmlParser && !this.assistantMessageParser) {
  1050. this.assistantMessageParser = new AssistantMessageParser()
  1051. } else if (!shouldUseXmlParser && this.assistantMessageParser) {
  1052. this.assistantMessageParser.reset()
  1053. this.assistantMessageParser = undefined
  1054. }
  1055. }
  1056. public async submitUserMessage(
  1057. text: string,
  1058. images?: string[],
  1059. mode?: string,
  1060. providerProfile?: string,
  1061. ): Promise<void> {
  1062. try {
  1063. text = (text ?? "").trim()
  1064. images = images ?? []
  1065. if (text.length === 0 && images.length === 0) {
  1066. return
  1067. }
  1068. const provider = this.providerRef.deref()
  1069. if (provider) {
  1070. if (mode) {
  1071. await provider.setMode(mode)
  1072. }
  1073. if (providerProfile) {
  1074. await provider.setProviderProfile(providerProfile)
  1075. // Update this task's API configuration to match the new profile
  1076. // This ensures the parser state is synchronized with the selected model
  1077. const newState = await provider.getState()
  1078. if (newState?.apiConfiguration) {
  1079. this.updateApiConfiguration(newState.apiConfiguration)
  1080. }
  1081. }
  1082. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  1083. provider.postMessageToWebview({ type: "invoke", invoke: "sendMessage", text, images })
  1084. } else {
  1085. console.error("[Task#submitUserMessage] Provider reference lost")
  1086. }
  1087. } catch (error) {
  1088. console.error("[Task#submitUserMessage] Failed to submit user message:", error)
  1089. }
  1090. }
  1091. async handleTerminalOperation(terminalOperation: "continue" | "abort") {
  1092. if (terminalOperation === "continue") {
  1093. this.terminalProcess?.continue()
  1094. } else if (terminalOperation === "abort") {
  1095. this.terminalProcess?.abort()
  1096. }
  1097. }
  1098. public async condenseContext(): Promise<void> {
  1099. const systemPrompt = await this.getSystemPrompt()
  1100. // Get condensing configuration
  1101. const state = await this.providerRef.deref()?.getState()
  1102. // These properties may not exist in the state type yet, but are used for condensing configuration
  1103. const customCondensingPrompt = state?.customCondensingPrompt
  1104. const condensingApiConfigId = state?.condensingApiConfigId
  1105. const listApiConfigMeta = state?.listApiConfigMeta
  1106. // Determine API handler to use
  1107. let condensingApiHandler: ApiHandler | undefined
  1108. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  1109. // Find matching config by ID
  1110. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  1111. if (matchingConfig) {
  1112. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  1113. id: condensingApiConfigId,
  1114. })
  1115. // Ensure profile and apiProvider exist before trying to build handler
  1116. if (profile && profile.apiProvider) {
  1117. condensingApiHandler = buildApiHandler(profile)
  1118. }
  1119. }
  1120. }
  1121. const { contextTokens: prevContextTokens } = this.getTokenUsage()
  1122. // Determine if we're using native tool protocol for proper message handling
  1123. const modelInfo = this.api.getModel().info
  1124. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1125. const useNativeTools = isNativeProtocol(protocol)
  1126. const {
  1127. messages,
  1128. summary,
  1129. cost,
  1130. newContextTokens = 0,
  1131. error,
  1132. } = await summarizeConversation(
  1133. this.apiConversationHistory,
  1134. this.api, // Main API handler (fallback)
  1135. systemPrompt, // Default summarization prompt (fallback)
  1136. this.taskId,
  1137. prevContextTokens,
  1138. false, // manual trigger
  1139. customCondensingPrompt, // User's custom prompt
  1140. condensingApiHandler, // Specific handler for condensing
  1141. useNativeTools, // Pass native tools flag for proper message handling
  1142. )
  1143. if (error) {
  1144. this.say(
  1145. "condense_context_error",
  1146. error,
  1147. undefined /* images */,
  1148. false /* partial */,
  1149. undefined /* checkpoint */,
  1150. undefined /* progressStatus */,
  1151. { isNonInteractive: true } /* options */,
  1152. )
  1153. return
  1154. }
  1155. await this.overwriteApiConversationHistory(messages)
  1156. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  1157. await this.say(
  1158. "condense_context",
  1159. undefined /* text */,
  1160. undefined /* images */,
  1161. false /* partial */,
  1162. undefined /* checkpoint */,
  1163. undefined /* progressStatus */,
  1164. { isNonInteractive: true } /* options */,
  1165. contextCondense,
  1166. )
  1167. // Process any queued messages after condensing completes
  1168. this.processQueuedMessages()
  1169. }
  1170. async say(
  1171. type: ClineSay,
  1172. text?: string,
  1173. images?: string[],
  1174. partial?: boolean,
  1175. checkpoint?: Record<string, unknown>,
  1176. progressStatus?: ToolProgressStatus,
  1177. options: {
  1178. isNonInteractive?: boolean
  1179. } = {},
  1180. contextCondense?: ContextCondense,
  1181. ): Promise<undefined> {
  1182. if (this.abort) {
  1183. throw new Error(`[RooCode#say] task ${this.taskId}.${this.instanceId} aborted`)
  1184. }
  1185. if (partial !== undefined) {
  1186. const lastMessage = this.clineMessages.at(-1)
  1187. const isUpdatingPreviousPartial =
  1188. lastMessage && lastMessage.partial && lastMessage.type === "say" && lastMessage.say === type
  1189. if (partial) {
  1190. if (isUpdatingPreviousPartial) {
  1191. // Existing partial message, so update it.
  1192. lastMessage.text = text
  1193. lastMessage.images = images
  1194. lastMessage.partial = partial
  1195. lastMessage.progressStatus = progressStatus
  1196. this.updateClineMessage(lastMessage)
  1197. } else {
  1198. // This is a new partial message, so add it with partial state.
  1199. const sayTs = Date.now()
  1200. if (!options.isNonInteractive) {
  1201. this.lastMessageTs = sayTs
  1202. }
  1203. await this.addToClineMessages({
  1204. ts: sayTs,
  1205. type: "say",
  1206. say: type,
  1207. text,
  1208. images,
  1209. partial,
  1210. contextCondense,
  1211. })
  1212. }
  1213. } else {
  1214. // New now have a complete version of a previously partial message.
  1215. // This is the complete version of a previously partial
  1216. // message, so replace the partial with the complete version.
  1217. if (isUpdatingPreviousPartial) {
  1218. if (!options.isNonInteractive) {
  1219. this.lastMessageTs = lastMessage.ts
  1220. }
  1221. lastMessage.text = text
  1222. lastMessage.images = images
  1223. lastMessage.partial = false
  1224. lastMessage.progressStatus = progressStatus
  1225. // Instead of streaming partialMessage events, we do a save
  1226. // and post like normal to persist to disk.
  1227. await this.saveClineMessages()
  1228. // More performant than an entire `postStateToWebview`.
  1229. this.updateClineMessage(lastMessage)
  1230. } else {
  1231. // This is a new and complete message, so add it like normal.
  1232. const sayTs = Date.now()
  1233. if (!options.isNonInteractive) {
  1234. this.lastMessageTs = sayTs
  1235. }
  1236. await this.addToClineMessages({
  1237. ts: sayTs,
  1238. type: "say",
  1239. say: type,
  1240. text,
  1241. images,
  1242. contextCondense,
  1243. })
  1244. }
  1245. }
  1246. } else {
  1247. // This is a new non-partial message, so add it like normal.
  1248. const sayTs = Date.now()
  1249. // A "non-interactive" message is a message is one that the user
  1250. // does not need to respond to. We don't want these message types
  1251. // to trigger an update to `lastMessageTs` since they can be created
  1252. // asynchronously and could interrupt a pending ask.
  1253. if (!options.isNonInteractive) {
  1254. this.lastMessageTs = sayTs
  1255. }
  1256. await this.addToClineMessages({
  1257. ts: sayTs,
  1258. type: "say",
  1259. say: type,
  1260. text,
  1261. images,
  1262. checkpoint,
  1263. contextCondense,
  1264. })
  1265. }
  1266. // Broadcast browser session updates to panel when browser-related messages are added
  1267. if (type === "browser_action" || type === "browser_action_result" || type === "browser_session_status") {
  1268. this.broadcastBrowserSessionUpdate()
  1269. }
  1270. }
  1271. async sayAndCreateMissingParamError(toolName: ToolName, paramName: string, relPath?: string) {
  1272. await this.say(
  1273. "error",
  1274. `Roo tried to use ${toolName}${
  1275. relPath ? ` for '${relPath.toPosix()}'` : ""
  1276. } without value for required parameter '${paramName}'. Retrying...`,
  1277. )
  1278. const modelInfo = this.api.getModel().info
  1279. const state = await this.providerRef.deref()?.getState()
  1280. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1281. return formatResponse.toolError(formatResponse.missingToolParameterError(paramName, toolProtocol))
  1282. }
  1283. // Lifecycle
  1284. // Start / Resume / Abort / Dispose
  1285. private async startTask(task?: string, images?: string[]): Promise<void> {
  1286. if (this.enableBridge) {
  1287. try {
  1288. await BridgeOrchestrator.subscribeToTask(this)
  1289. } catch (error) {
  1290. console.error(
  1291. `[Task#startTask] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1292. )
  1293. }
  1294. }
  1295. // `conversationHistory` (for API) and `clineMessages` (for webview)
  1296. // need to be in sync.
  1297. // If the extension process were killed, then on restart the
  1298. // `clineMessages` might not be empty, so we need to set it to [] when
  1299. // we create a new Cline client (otherwise webview would show stale
  1300. // messages from previous session).
  1301. this.clineMessages = []
  1302. this.apiConversationHistory = []
  1303. // The todo list is already set in the constructor if initialTodos were provided
  1304. // No need to add any messages - the todoList property is already set
  1305. await this.providerRef.deref()?.postStateToWebview()
  1306. await this.say("text", task, images)
  1307. this.isInitialized = true
  1308. let imageBlocks: Anthropic.ImageBlockParam[] = formatResponse.imageBlocks(images)
  1309. // Task starting
  1310. await this.initiateTaskLoop([
  1311. {
  1312. type: "text",
  1313. text: `<task>\n${task}\n</task>`,
  1314. },
  1315. ...imageBlocks,
  1316. ])
  1317. }
  1318. private async resumeTaskFromHistory() {
  1319. if (this.enableBridge) {
  1320. try {
  1321. await BridgeOrchestrator.subscribeToTask(this)
  1322. } catch (error) {
  1323. console.error(
  1324. `[Task#resumeTaskFromHistory] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1325. )
  1326. }
  1327. }
  1328. const modifiedClineMessages = await this.getSavedClineMessages()
  1329. // Remove any resume messages that may have been added before.
  1330. const lastRelevantMessageIndex = findLastIndex(
  1331. modifiedClineMessages,
  1332. (m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task"),
  1333. )
  1334. if (lastRelevantMessageIndex !== -1) {
  1335. modifiedClineMessages.splice(lastRelevantMessageIndex + 1)
  1336. }
  1337. // Remove any trailing reasoning-only UI messages that were not part of the persisted API conversation
  1338. while (modifiedClineMessages.length > 0) {
  1339. const last = modifiedClineMessages[modifiedClineMessages.length - 1]
  1340. if (last.type === "say" && last.say === "reasoning") {
  1341. modifiedClineMessages.pop()
  1342. } else {
  1343. break
  1344. }
  1345. }
  1346. // Since we don't use `api_req_finished` anymore, we need to check if the
  1347. // last `api_req_started` has a cost value, if it doesn't and no
  1348. // cancellation reason to present, then we remove it since it indicates
  1349. // an api request without any partial content streamed.
  1350. const lastApiReqStartedIndex = findLastIndex(
  1351. modifiedClineMessages,
  1352. (m) => m.type === "say" && m.say === "api_req_started",
  1353. )
  1354. if (lastApiReqStartedIndex !== -1) {
  1355. const lastApiReqStarted = modifiedClineMessages[lastApiReqStartedIndex]
  1356. const { cost, cancelReason }: ClineApiReqInfo = JSON.parse(lastApiReqStarted.text || "{}")
  1357. if (cost === undefined && cancelReason === undefined) {
  1358. modifiedClineMessages.splice(lastApiReqStartedIndex, 1)
  1359. }
  1360. }
  1361. await this.overwriteClineMessages(modifiedClineMessages)
  1362. this.clineMessages = await this.getSavedClineMessages()
  1363. // Now present the cline messages to the user and ask if they want to
  1364. // resume (NOTE: we ran into a bug before where the
  1365. // apiConversationHistory wouldn't be initialized when opening a old
  1366. // task, and it was because we were waiting for resume).
  1367. // This is important in case the user deletes messages without resuming
  1368. // the task first.
  1369. this.apiConversationHistory = await this.getSavedApiConversationHistory()
  1370. const lastClineMessage = this.clineMessages
  1371. .slice()
  1372. .reverse()
  1373. .find((m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task")) // Could be multiple resume tasks.
  1374. let askType: ClineAsk
  1375. if (lastClineMessage?.ask === "completion_result") {
  1376. askType = "resume_completed_task"
  1377. } else {
  1378. askType = "resume_task"
  1379. }
  1380. this.isInitialized = true
  1381. const { response, text, images } = await this.ask(askType) // Calls `postStateToWebview`.
  1382. let responseText: string | undefined
  1383. let responseImages: string[] | undefined
  1384. if (response === "messageResponse") {
  1385. await this.say("user_feedback", text, images)
  1386. responseText = text
  1387. responseImages = images
  1388. }
  1389. // Make sure that the api conversation history can be resumed by the API,
  1390. // even if it goes out of sync with cline messages.
  1391. let existingApiConversationHistory: ApiMessage[] = await this.getSavedApiConversationHistory()
  1392. // v2.0 xml tags refactor caveat: since we don't use tools anymore for XML protocol,
  1393. // we need to replace all tool use blocks with a text block since the API disallows
  1394. // conversations with tool uses and no tool schema.
  1395. // For native protocol, we preserve tool_use and tool_result blocks as they're expected by the API.
  1396. const state = await this.providerRef.deref()?.getState()
  1397. const protocol = resolveToolProtocol(this.apiConfiguration, this.api.getModel().info)
  1398. const useNative = isNativeProtocol(protocol)
  1399. // Only convert tool blocks to text for XML protocol
  1400. // For native protocol, the API expects proper tool_use/tool_result structure
  1401. if (!useNative) {
  1402. const conversationWithoutToolBlocks = existingApiConversationHistory.map((message) => {
  1403. if (Array.isArray(message.content)) {
  1404. const newContent = message.content.map((block) => {
  1405. if (block.type === "tool_use") {
  1406. // Format tool invocation based on protocol
  1407. const params = block.input as Record<string, any>
  1408. const formattedText = formatToolInvocation(block.name, params, protocol)
  1409. return {
  1410. type: "text",
  1411. text: formattedText,
  1412. } as Anthropic.Messages.TextBlockParam
  1413. } else if (block.type === "tool_result") {
  1414. // Convert block.content to text block array, removing images
  1415. const contentAsTextBlocks = Array.isArray(block.content)
  1416. ? block.content.filter((item) => item.type === "text")
  1417. : [{ type: "text", text: block.content }]
  1418. const textContent = contentAsTextBlocks.map((item) => item.text).join("\n\n")
  1419. const toolName = findToolName(block.tool_use_id, existingApiConversationHistory)
  1420. return {
  1421. type: "text",
  1422. text: `[${toolName} Result]\n\n${textContent}`,
  1423. } as Anthropic.Messages.TextBlockParam
  1424. }
  1425. return block
  1426. })
  1427. return { ...message, content: newContent }
  1428. }
  1429. return message
  1430. })
  1431. existingApiConversationHistory = conversationWithoutToolBlocks
  1432. }
  1433. // FIXME: remove tool use blocks altogether
  1434. // if the last message is an assistant message, we need to check if there's tool use since every tool use has to have a tool response
  1435. // if there's no tool use and only a text block, then we can just add a user message
  1436. // (note this isn't relevant anymore since we use custom tool prompts instead of tool use blocks, but this is here for legacy purposes in case users resume old tasks)
  1437. // if the last message is a user message, we can need to get the assistant message before it to see if it made tool calls, and if so, fill in the remaining tool responses with 'interrupted'
  1438. let modifiedOldUserContent: Anthropic.Messages.ContentBlockParam[] // either the last message if its user message, or the user message before the last (assistant) message
  1439. let modifiedApiConversationHistory: ApiMessage[] // need to remove the last user message to replace with new modified user message
  1440. if (existingApiConversationHistory.length > 0) {
  1441. const lastMessage = existingApiConversationHistory[existingApiConversationHistory.length - 1]
  1442. if (lastMessage.role === "assistant") {
  1443. const content = Array.isArray(lastMessage.content)
  1444. ? lastMessage.content
  1445. : [{ type: "text", text: lastMessage.content }]
  1446. const hasToolUse = content.some((block) => block.type === "tool_use")
  1447. if (hasToolUse) {
  1448. const toolUseBlocks = content.filter(
  1449. (block) => block.type === "tool_use",
  1450. ) as Anthropic.Messages.ToolUseBlock[]
  1451. const toolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks.map((block) => ({
  1452. type: "tool_result",
  1453. tool_use_id: block.id,
  1454. content: "Task was interrupted before this tool call could be completed.",
  1455. }))
  1456. modifiedApiConversationHistory = [...existingApiConversationHistory] // no changes
  1457. modifiedOldUserContent = [...toolResponses]
  1458. } else {
  1459. modifiedApiConversationHistory = [...existingApiConversationHistory]
  1460. modifiedOldUserContent = []
  1461. }
  1462. } else if (lastMessage.role === "user") {
  1463. const previousAssistantMessage: ApiMessage | undefined =
  1464. existingApiConversationHistory[existingApiConversationHistory.length - 2]
  1465. const existingUserContent: Anthropic.Messages.ContentBlockParam[] = Array.isArray(lastMessage.content)
  1466. ? lastMessage.content
  1467. : [{ type: "text", text: lastMessage.content }]
  1468. if (previousAssistantMessage && previousAssistantMessage.role === "assistant") {
  1469. const assistantContent = Array.isArray(previousAssistantMessage.content)
  1470. ? previousAssistantMessage.content
  1471. : [{ type: "text", text: previousAssistantMessage.content }]
  1472. const toolUseBlocks = assistantContent.filter(
  1473. (block) => block.type === "tool_use",
  1474. ) as Anthropic.Messages.ToolUseBlock[]
  1475. if (toolUseBlocks.length > 0) {
  1476. const existingToolResults = existingUserContent.filter(
  1477. (block) => block.type === "tool_result",
  1478. ) as Anthropic.ToolResultBlockParam[]
  1479. const missingToolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks
  1480. .filter(
  1481. (toolUse) => !existingToolResults.some((result) => result.tool_use_id === toolUse.id),
  1482. )
  1483. .map((toolUse) => ({
  1484. type: "tool_result",
  1485. tool_use_id: toolUse.id,
  1486. content: "Task was interrupted before this tool call could be completed.",
  1487. }))
  1488. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1) // removes the last user message
  1489. modifiedOldUserContent = [...existingUserContent, ...missingToolResponses]
  1490. } else {
  1491. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1492. modifiedOldUserContent = [...existingUserContent]
  1493. }
  1494. } else {
  1495. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1496. modifiedOldUserContent = [...existingUserContent]
  1497. }
  1498. } else {
  1499. throw new Error("Unexpected: Last message is not a user or assistant message")
  1500. }
  1501. } else {
  1502. throw new Error("Unexpected: No existing API conversation history")
  1503. }
  1504. let newUserContent: Anthropic.Messages.ContentBlockParam[] = [...modifiedOldUserContent]
  1505. const agoText = ((): string => {
  1506. const timestamp = lastClineMessage?.ts ?? Date.now()
  1507. const now = Date.now()
  1508. const diff = now - timestamp
  1509. const minutes = Math.floor(diff / 60000)
  1510. const hours = Math.floor(minutes / 60)
  1511. const days = Math.floor(hours / 24)
  1512. if (days > 0) {
  1513. return `${days} day${days > 1 ? "s" : ""} ago`
  1514. }
  1515. if (hours > 0) {
  1516. return `${hours} hour${hours > 1 ? "s" : ""} ago`
  1517. }
  1518. if (minutes > 0) {
  1519. return `${minutes} minute${minutes > 1 ? "s" : ""} ago`
  1520. }
  1521. return "just now"
  1522. })()
  1523. if (responseText) {
  1524. newUserContent.push({
  1525. type: "text",
  1526. text: `\n\nNew instructions for task continuation:\n<user_message>\n${responseText}\n</user_message>`,
  1527. })
  1528. }
  1529. if (responseImages && responseImages.length > 0) {
  1530. newUserContent.push(...formatResponse.imageBlocks(responseImages))
  1531. }
  1532. // Ensure we have at least some content to send to the API.
  1533. // If newUserContent is empty, add a minimal resumption message.
  1534. if (newUserContent.length === 0) {
  1535. newUserContent.push({
  1536. type: "text",
  1537. text: "[TASK RESUMPTION] Resuming task...",
  1538. })
  1539. }
  1540. await this.overwriteApiConversationHistory(modifiedApiConversationHistory)
  1541. // Task resuming from history item.
  1542. await this.initiateTaskLoop(newUserContent)
  1543. }
  1544. /**
  1545. * Cancels the current HTTP request if one is in progress.
  1546. * This immediately aborts the underlying stream rather than waiting for the next chunk.
  1547. */
  1548. public cancelCurrentRequest(): void {
  1549. if (this.currentRequestAbortController) {
  1550. console.log(`[Task#${this.taskId}.${this.instanceId}] Aborting current HTTP request`)
  1551. this.currentRequestAbortController.abort()
  1552. this.currentRequestAbortController = undefined
  1553. }
  1554. }
  1555. public async abortTask(isAbandoned = false) {
  1556. // Aborting task
  1557. // Will stop any autonomously running promises.
  1558. if (isAbandoned) {
  1559. this.abandoned = true
  1560. }
  1561. this.abort = true
  1562. this.emit(RooCodeEventName.TaskAborted)
  1563. try {
  1564. this.dispose() // Call the centralized dispose method
  1565. } catch (error) {
  1566. console.error(`Error during task ${this.taskId}.${this.instanceId} disposal:`, error)
  1567. // Don't rethrow - we want abort to always succeed
  1568. }
  1569. // Save the countdown message in the automatic retry or other content.
  1570. try {
  1571. // Save the countdown message in the automatic retry or other content.
  1572. await this.saveClineMessages()
  1573. } catch (error) {
  1574. console.error(`Error saving messages during abort for task ${this.taskId}.${this.instanceId}:`, error)
  1575. }
  1576. }
  1577. public dispose(): void {
  1578. console.log(`[Task#dispose] disposing task ${this.taskId}.${this.instanceId}`)
  1579. // Cancel any in-progress HTTP request
  1580. try {
  1581. this.cancelCurrentRequest()
  1582. } catch (error) {
  1583. console.error("Error cancelling current request:", error)
  1584. }
  1585. // Remove provider profile change listener
  1586. try {
  1587. if (this.providerProfileChangeListener) {
  1588. const provider = this.providerRef.deref()
  1589. if (provider) {
  1590. provider.off(RooCodeEventName.ProviderProfileChanged, this.providerProfileChangeListener)
  1591. }
  1592. this.providerProfileChangeListener = undefined
  1593. }
  1594. } catch (error) {
  1595. console.error("Error removing provider profile change listener:", error)
  1596. }
  1597. // Dispose message queue and remove event listeners.
  1598. try {
  1599. if (this.messageQueueStateChangedHandler) {
  1600. this.messageQueueService.removeListener("stateChanged", this.messageQueueStateChangedHandler)
  1601. this.messageQueueStateChangedHandler = undefined
  1602. }
  1603. this.messageQueueService.dispose()
  1604. } catch (error) {
  1605. console.error("Error disposing message queue:", error)
  1606. }
  1607. // Remove all event listeners to prevent memory leaks.
  1608. try {
  1609. this.removeAllListeners()
  1610. } catch (error) {
  1611. console.error("Error removing event listeners:", error)
  1612. }
  1613. // Stop waiting for child task completion.
  1614. if (this.pauseInterval) {
  1615. clearInterval(this.pauseInterval)
  1616. this.pauseInterval = undefined
  1617. }
  1618. if (this.enableBridge) {
  1619. BridgeOrchestrator.getInstance()
  1620. ?.unsubscribeFromTask(this.taskId)
  1621. .catch((error) =>
  1622. console.error(
  1623. `[Task#dispose] BridgeOrchestrator#unsubscribeFromTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1624. ),
  1625. )
  1626. }
  1627. // Release any terminals associated with this task.
  1628. try {
  1629. // Release any terminals associated with this task.
  1630. TerminalRegistry.releaseTerminalsForTask(this.taskId)
  1631. } catch (error) {
  1632. console.error("Error releasing terminals:", error)
  1633. }
  1634. try {
  1635. this.urlContentFetcher.closeBrowser()
  1636. } catch (error) {
  1637. console.error("Error closing URL content fetcher browser:", error)
  1638. }
  1639. try {
  1640. this.browserSession.closeBrowser()
  1641. } catch (error) {
  1642. console.error("Error closing browser session:", error)
  1643. }
  1644. // Also close the Browser Session panel when the task is disposed
  1645. try {
  1646. const provider = this.providerRef.deref()
  1647. if (provider) {
  1648. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  1649. BrowserSessionPanelManager.getInstance(provider).dispose()
  1650. }
  1651. } catch (error) {
  1652. console.error("Error closing browser session panel:", error)
  1653. }
  1654. try {
  1655. if (this.rooIgnoreController) {
  1656. this.rooIgnoreController.dispose()
  1657. this.rooIgnoreController = undefined
  1658. }
  1659. } catch (error) {
  1660. console.error("Error disposing RooIgnoreController:", error)
  1661. // This is the critical one for the leak fix.
  1662. }
  1663. try {
  1664. this.fileContextTracker.dispose()
  1665. } catch (error) {
  1666. console.error("Error disposing file context tracker:", error)
  1667. }
  1668. try {
  1669. // If we're not streaming then `abortStream` won't be called.
  1670. if (this.isStreaming && this.diffViewProvider.isEditing) {
  1671. this.diffViewProvider.revertChanges().catch(console.error)
  1672. }
  1673. } catch (error) {
  1674. console.error("Error reverting diff changes:", error)
  1675. }
  1676. }
  1677. // Subtasks
  1678. // Spawn / Wait / Complete
  1679. public async startSubtask(message: string, initialTodos: TodoItem[], mode: string) {
  1680. const provider = this.providerRef.deref()
  1681. if (!provider) {
  1682. throw new Error("Provider not available")
  1683. }
  1684. const newTask = await provider.createTask(message, undefined, this, { initialTodos })
  1685. if (newTask) {
  1686. this.isPaused = true // Pause parent.
  1687. this.childTaskId = newTask.taskId
  1688. await provider.handleModeSwitch(mode) // Set child's mode.
  1689. await delay(500) // Allow mode change to take effect.
  1690. this.emit(RooCodeEventName.TaskPaused, this.taskId)
  1691. this.emit(RooCodeEventName.TaskSpawned, newTask.taskId)
  1692. }
  1693. return newTask
  1694. }
  1695. // Used when a sub-task is launched and the parent task is waiting for it to
  1696. // finish.
  1697. // TBD: Add a timeout to prevent infinite waiting.
  1698. public async waitForSubtask() {
  1699. await new Promise<void>((resolve) => {
  1700. this.pauseInterval = setInterval(() => {
  1701. if (!this.isPaused) {
  1702. clearInterval(this.pauseInterval)
  1703. this.pauseInterval = undefined
  1704. resolve()
  1705. }
  1706. }, 1000)
  1707. })
  1708. }
  1709. public async completeSubtask(lastMessage: string) {
  1710. this.isPaused = false
  1711. this.childTaskId = undefined
  1712. this.emit(RooCodeEventName.TaskUnpaused, this.taskId)
  1713. // Fake an answer from the subtask that it has completed running and
  1714. // this is the result of what it has done add the message to the chat
  1715. // history and to the webview ui.
  1716. try {
  1717. await this.say("subtask_result", lastMessage)
  1718. await this.addToApiConversationHistory({
  1719. role: "user",
  1720. content: [{ type: "text", text: `[new_task completed] Result: ${lastMessage}` }],
  1721. })
  1722. } catch (error) {
  1723. this.providerRef
  1724. .deref()
  1725. ?.log(`Error failed to add reply from subtask into conversation of parent task, error: ${error}`)
  1726. throw error
  1727. }
  1728. }
  1729. // Task Loop
  1730. private async initiateTaskLoop(userContent: Anthropic.Messages.ContentBlockParam[]): Promise<void> {
  1731. // Kicks off the checkpoints initialization process in the background.
  1732. getCheckpointService(this)
  1733. let nextUserContent = userContent
  1734. let includeFileDetails = true
  1735. this.emit(RooCodeEventName.TaskStarted)
  1736. while (!this.abort) {
  1737. const didEndLoop = await this.recursivelyMakeClineRequests(nextUserContent, includeFileDetails)
  1738. includeFileDetails = false // We only need file details the first time.
  1739. // The way this agentic loop works is that cline will be given a
  1740. // task that he then calls tools to complete. Unless there's an
  1741. // attempt_completion call, we keep responding back to him with his
  1742. // tool's responses until he either attempt_completion or does not
  1743. // use anymore tools. If he does not use anymore tools, we ask him
  1744. // to consider if he's completed the task and then call
  1745. // attempt_completion, otherwise proceed with completing the task.
  1746. // There is a MAX_REQUESTS_PER_TASK limit to prevent infinite
  1747. // requests, but Cline is prompted to finish the task as efficiently
  1748. // as he can.
  1749. if (didEndLoop) {
  1750. // For now a task never 'completes'. This will only happen if
  1751. // the user hits max requests and denies resetting the count.
  1752. break
  1753. } else {
  1754. const modelInfo = this.api.getModel().info
  1755. const state = await this.providerRef.deref()?.getState()
  1756. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1757. nextUserContent = [{ type: "text", text: formatResponse.noToolsUsed(toolProtocol) }]
  1758. this.consecutiveMistakeCount++
  1759. }
  1760. }
  1761. }
  1762. public async recursivelyMakeClineRequests(
  1763. userContent: Anthropic.Messages.ContentBlockParam[],
  1764. includeFileDetails: boolean = false,
  1765. ): Promise<boolean> {
  1766. interface StackItem {
  1767. userContent: Anthropic.Messages.ContentBlockParam[]
  1768. includeFileDetails: boolean
  1769. retryAttempt?: number
  1770. userMessageWasRemoved?: boolean // Track if user message was removed due to empty response
  1771. }
  1772. const stack: StackItem[] = [{ userContent, includeFileDetails, retryAttempt: 0 }]
  1773. while (stack.length > 0) {
  1774. const currentItem = stack.pop()!
  1775. const currentUserContent = currentItem.userContent
  1776. const currentIncludeFileDetails = currentItem.includeFileDetails
  1777. if (this.abort) {
  1778. throw new Error(`[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`)
  1779. }
  1780. if (this.consecutiveMistakeLimit > 0 && this.consecutiveMistakeCount >= this.consecutiveMistakeLimit) {
  1781. const { response, text, images } = await this.ask(
  1782. "mistake_limit_reached",
  1783. t("common:errors.mistake_limit_guidance"),
  1784. )
  1785. if (response === "messageResponse") {
  1786. currentUserContent.push(
  1787. ...[
  1788. { type: "text" as const, text: formatResponse.tooManyMistakes(text) },
  1789. ...formatResponse.imageBlocks(images),
  1790. ],
  1791. )
  1792. await this.say("user_feedback", text, images)
  1793. // Track consecutive mistake errors in telemetry.
  1794. TelemetryService.instance.captureConsecutiveMistakeError(this.taskId)
  1795. }
  1796. this.consecutiveMistakeCount = 0
  1797. }
  1798. // In this Cline request loop, we need to check if this task instance
  1799. // has been asked to wait for a subtask to finish before continuing.
  1800. const provider = this.providerRef.deref()
  1801. if (this.isPaused && provider) {
  1802. provider.log(`[subtasks] paused ${this.taskId}.${this.instanceId}`)
  1803. await this.waitForSubtask()
  1804. provider.log(`[subtasks] resumed ${this.taskId}.${this.instanceId}`)
  1805. const currentMode = (await provider.getState())?.mode ?? defaultModeSlug
  1806. if (currentMode !== this.pausedModeSlug) {
  1807. // The mode has changed, we need to switch back to the paused mode.
  1808. await provider.handleModeSwitch(this.pausedModeSlug)
  1809. // Delay to allow mode change to take effect before next tool is executed.
  1810. await delay(500)
  1811. provider.log(
  1812. `[subtasks] task ${this.taskId}.${this.instanceId} has switched back to '${this.pausedModeSlug}' from '${currentMode}'`,
  1813. )
  1814. }
  1815. }
  1816. // Getting verbose details is an expensive operation, it uses ripgrep to
  1817. // top-down build file structure of project which for large projects can
  1818. // take a few seconds. For the best UX we show a placeholder api_req_started
  1819. // message with a loading spinner as this happens.
  1820. // Determine API protocol based on provider and model
  1821. const modelId = getModelId(this.apiConfiguration)
  1822. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  1823. await this.say(
  1824. "api_req_started",
  1825. JSON.stringify({
  1826. apiProtocol,
  1827. }),
  1828. )
  1829. const {
  1830. showRooIgnoredFiles = false,
  1831. includeDiagnosticMessages = true,
  1832. maxDiagnosticMessages = 50,
  1833. maxReadFileLine = -1,
  1834. } = (await this.providerRef.deref()?.getState()) ?? {}
  1835. const parsedUserContent = await processUserContentMentions({
  1836. userContent: currentUserContent,
  1837. cwd: this.cwd,
  1838. urlContentFetcher: this.urlContentFetcher,
  1839. fileContextTracker: this.fileContextTracker,
  1840. rooIgnoreController: this.rooIgnoreController,
  1841. showRooIgnoredFiles,
  1842. includeDiagnosticMessages,
  1843. maxDiagnosticMessages,
  1844. maxReadFileLine,
  1845. })
  1846. const environmentDetails = await getEnvironmentDetails(this, currentIncludeFileDetails)
  1847. // Remove any existing environment_details blocks before adding fresh ones.
  1848. // This prevents duplicate environment details when resuming tasks with XML tool calls,
  1849. // where the old user message content may already contain environment details from the previous session.
  1850. // We check for both opening and closing tags to ensure we're matching complete environment detail blocks,
  1851. // not just mentions of the tag in regular content.
  1852. const contentWithoutEnvDetails = parsedUserContent.filter((block) => {
  1853. if (block.type === "text" && typeof block.text === "string") {
  1854. // Check if this text block is a complete environment_details block
  1855. // by verifying it starts with the opening tag and ends with the closing tag
  1856. const isEnvironmentDetailsBlock =
  1857. block.text.trim().startsWith("<environment_details>") &&
  1858. block.text.trim().endsWith("</environment_details>")
  1859. return !isEnvironmentDetailsBlock
  1860. }
  1861. return true
  1862. })
  1863. // Add environment details as its own text block, separate from tool
  1864. // results.
  1865. const finalUserContent = [...contentWithoutEnvDetails, { type: "text" as const, text: environmentDetails }]
  1866. // Only add user message to conversation history if:
  1867. // 1. This is the first attempt (retryAttempt === 0), OR
  1868. // 2. The message was removed in a previous iteration (userMessageWasRemoved === true)
  1869. // This prevents consecutive user messages while allowing re-add when needed
  1870. if ((currentItem.retryAttempt ?? 0) === 0 || currentItem.userMessageWasRemoved) {
  1871. await this.addToApiConversationHistory({ role: "user", content: finalUserContent })
  1872. TelemetryService.instance.captureConversationMessage(this.taskId, "user")
  1873. }
  1874. // Since we sent off a placeholder api_req_started message to update the
  1875. // webview while waiting to actually start the API request (to load
  1876. // potential details for example), we need to update the text of that
  1877. // message.
  1878. const lastApiReqIndex = findLastIndex(this.clineMessages, (m) => m.say === "api_req_started")
  1879. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  1880. apiProtocol,
  1881. } satisfies ClineApiReqInfo)
  1882. await this.saveClineMessages()
  1883. await provider?.postStateToWebview()
  1884. try {
  1885. let cacheWriteTokens = 0
  1886. let cacheReadTokens = 0
  1887. let inputTokens = 0
  1888. let outputTokens = 0
  1889. let totalCost: number | undefined
  1890. // We can't use `api_req_finished` anymore since it's a unique case
  1891. // where it could come after a streaming message (i.e. in the middle
  1892. // of being updated or executed).
  1893. // Fortunately `api_req_finished` was always parsed out for the GUI
  1894. // anyways, so it remains solely for legacy purposes to keep track
  1895. // of prices in tasks from history (it's worth removing a few months
  1896. // from now).
  1897. const updateApiReqMsg = (cancelReason?: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  1898. if (lastApiReqIndex < 0 || !this.clineMessages[lastApiReqIndex]) {
  1899. return
  1900. }
  1901. const existingData = JSON.parse(this.clineMessages[lastApiReqIndex].text || "{}")
  1902. // Calculate total tokens and cost using provider-aware function
  1903. const modelId = getModelId(this.apiConfiguration)
  1904. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  1905. const costResult =
  1906. apiProtocol === "anthropic"
  1907. ? calculateApiCostAnthropic(
  1908. streamModelInfo,
  1909. inputTokens,
  1910. outputTokens,
  1911. cacheWriteTokens,
  1912. cacheReadTokens,
  1913. )
  1914. : calculateApiCostOpenAI(
  1915. streamModelInfo,
  1916. inputTokens,
  1917. outputTokens,
  1918. cacheWriteTokens,
  1919. cacheReadTokens,
  1920. )
  1921. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  1922. ...existingData,
  1923. tokensIn: costResult.totalInputTokens,
  1924. tokensOut: costResult.totalOutputTokens,
  1925. cacheWrites: cacheWriteTokens,
  1926. cacheReads: cacheReadTokens,
  1927. cost: totalCost ?? costResult.totalCost,
  1928. cancelReason,
  1929. streamingFailedMessage,
  1930. } satisfies ClineApiReqInfo)
  1931. }
  1932. const abortStream = async (cancelReason: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  1933. if (this.diffViewProvider.isEditing) {
  1934. await this.diffViewProvider.revertChanges() // closes diff view
  1935. }
  1936. // if last message is a partial we need to update and save it
  1937. const lastMessage = this.clineMessages.at(-1)
  1938. if (lastMessage && lastMessage.partial) {
  1939. // lastMessage.ts = Date.now() DO NOT update ts since it is used as a key for virtuoso list
  1940. lastMessage.partial = false
  1941. // instead of streaming partialMessage events, we do a save and post like normal to persist to disk
  1942. console.log("updating partial message", lastMessage)
  1943. }
  1944. // Update `api_req_started` to have cancelled and cost, so that
  1945. // we can display the cost of the partial stream and the cancellation reason
  1946. updateApiReqMsg(cancelReason, streamingFailedMessage)
  1947. await this.saveClineMessages()
  1948. // Signals to provider that it can retrieve the saved messages
  1949. // from disk, as abortTask can not be awaited on in nature.
  1950. this.didFinishAbortingStream = true
  1951. }
  1952. // Reset streaming state for each new API request
  1953. this.currentStreamingContentIndex = 0
  1954. this.currentStreamingDidCheckpoint = false
  1955. this.assistantMessageContent = []
  1956. this.didCompleteReadingStream = false
  1957. this.userMessageContent = []
  1958. this.userMessageContentReady = false
  1959. this.didRejectTool = false
  1960. this.didAlreadyUseTool = false
  1961. // Reset tool failure flag for each new assistant turn - this ensures that tool failures
  1962. // only prevent attempt_completion within the same assistant message, not across turns
  1963. // (e.g., if a tool fails, then user sends a message saying "just complete anyway")
  1964. this.didToolFailInCurrentTurn = false
  1965. this.presentAssistantMessageLocked = false
  1966. this.presentAssistantMessageHasPendingUpdates = false
  1967. this.assistantMessageParser?.reset()
  1968. this.streamingToolCallIndices.clear()
  1969. // Clear any leftover streaming tool call state from previous interrupted streams
  1970. NativeToolCallParser.clearAllStreamingToolCalls()
  1971. NativeToolCallParser.clearRawChunkState()
  1972. await this.diffViewProvider.reset()
  1973. // Cache model info once per API request to avoid repeated calls during streaming
  1974. // This is especially important for tools and background usage collection
  1975. this.cachedStreamingModel = this.api.getModel()
  1976. const streamModelInfo = this.cachedStreamingModel.info
  1977. const cachedModelId = this.cachedStreamingModel.id
  1978. const streamProtocol = resolveToolProtocol(this.apiConfiguration, streamModelInfo)
  1979. const shouldUseXmlParser = streamProtocol === "xml"
  1980. // Yields only if the first chunk is successful, otherwise will
  1981. // allow the user to retry the request (most likely due to rate
  1982. // limit error, which gets thrown on the first chunk).
  1983. const stream = this.attemptApiRequest()
  1984. let assistantMessage = ""
  1985. let reasoningMessage = ""
  1986. let pendingGroundingSources: GroundingSource[] = []
  1987. this.isStreaming = true
  1988. try {
  1989. const iterator = stream[Symbol.asyncIterator]()
  1990. // Helper to race iterator.next() with abort signal
  1991. const nextChunkWithAbort = async () => {
  1992. const nextPromise = iterator.next()
  1993. // If we have an abort controller, race it with the next chunk
  1994. if (this.currentRequestAbortController) {
  1995. const abortPromise = new Promise<never>((_, reject) => {
  1996. const signal = this.currentRequestAbortController!.signal
  1997. if (signal.aborted) {
  1998. reject(new Error("Request cancelled by user"))
  1999. } else {
  2000. signal.addEventListener("abort", () => {
  2001. reject(new Error("Request cancelled by user"))
  2002. })
  2003. }
  2004. })
  2005. return await Promise.race([nextPromise, abortPromise])
  2006. }
  2007. // No abort controller, just return the next chunk normally
  2008. return await nextPromise
  2009. }
  2010. let item = await nextChunkWithAbort()
  2011. while (!item.done) {
  2012. const chunk = item.value
  2013. item = await nextChunkWithAbort()
  2014. if (!chunk) {
  2015. // Sometimes chunk is undefined, no idea that can cause
  2016. // it, but this workaround seems to fix it.
  2017. continue
  2018. }
  2019. switch (chunk.type) {
  2020. case "reasoning": {
  2021. reasoningMessage += chunk.text
  2022. // Only apply formatting if the message contains sentence-ending punctuation followed by **
  2023. let formattedReasoning = reasoningMessage
  2024. if (reasoningMessage.includes("**")) {
  2025. // Add line breaks before **Title** patterns that appear after sentence endings
  2026. // This targets section headers like "...end of sentence.**Title Here**"
  2027. // Handles periods, exclamation marks, and question marks
  2028. formattedReasoning = reasoningMessage.replace(
  2029. /([.!?])\*\*([^*\n]+)\*\*/g,
  2030. "$1\n\n**$2**",
  2031. )
  2032. }
  2033. await this.say("reasoning", formattedReasoning, undefined, true)
  2034. break
  2035. }
  2036. case "usage":
  2037. inputTokens += chunk.inputTokens
  2038. outputTokens += chunk.outputTokens
  2039. cacheWriteTokens += chunk.cacheWriteTokens ?? 0
  2040. cacheReadTokens += chunk.cacheReadTokens ?? 0
  2041. totalCost = chunk.totalCost
  2042. break
  2043. case "grounding":
  2044. // Handle grounding sources separately from regular content
  2045. // to prevent state persistence issues - store them separately
  2046. if (chunk.sources && chunk.sources.length > 0) {
  2047. pendingGroundingSources.push(...chunk.sources)
  2048. }
  2049. break
  2050. case "tool_call_partial": {
  2051. // Process raw tool call chunk through NativeToolCallParser
  2052. // which handles tracking, buffering, and emits events
  2053. const events = NativeToolCallParser.processRawChunk({
  2054. index: chunk.index,
  2055. id: chunk.id,
  2056. name: chunk.name,
  2057. arguments: chunk.arguments,
  2058. })
  2059. for (const event of events) {
  2060. if (event.type === "tool_call_start") {
  2061. // Initialize streaming in NativeToolCallParser
  2062. NativeToolCallParser.startStreamingToolCall(event.id, event.name as ToolName)
  2063. // Before adding a new tool, finalize any preceding text block
  2064. // This prevents the text block from blocking tool presentation
  2065. const lastBlock =
  2066. this.assistantMessageContent[this.assistantMessageContent.length - 1]
  2067. if (lastBlock?.type === "text" && lastBlock.partial) {
  2068. lastBlock.partial = false
  2069. }
  2070. // Track the index where this tool will be stored
  2071. const toolUseIndex = this.assistantMessageContent.length
  2072. this.streamingToolCallIndices.set(event.id, toolUseIndex)
  2073. // Create initial partial tool use
  2074. const partialToolUse: ToolUse = {
  2075. type: "tool_use",
  2076. name: event.name as ToolName,
  2077. params: {},
  2078. partial: true,
  2079. }
  2080. // Store the ID for native protocol
  2081. ;(partialToolUse as any).id = event.id
  2082. // Add to content and present
  2083. this.assistantMessageContent.push(partialToolUse)
  2084. this.userMessageContentReady = false
  2085. presentAssistantMessage(this)
  2086. } else if (event.type === "tool_call_delta") {
  2087. // Process chunk using streaming JSON parser
  2088. const partialToolUse = NativeToolCallParser.processStreamingChunk(
  2089. event.id,
  2090. event.delta,
  2091. )
  2092. if (partialToolUse) {
  2093. // Get the index for this tool call
  2094. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2095. if (toolUseIndex !== undefined) {
  2096. // Store the ID for native protocol
  2097. ;(partialToolUse as any).id = event.id
  2098. // Update the existing tool use with new partial data
  2099. this.assistantMessageContent[toolUseIndex] = partialToolUse
  2100. // Present updated tool use
  2101. presentAssistantMessage(this)
  2102. }
  2103. }
  2104. } else if (event.type === "tool_call_end") {
  2105. // Finalize the streaming tool call
  2106. const finalToolUse = NativeToolCallParser.finalizeStreamingToolCall(event.id)
  2107. if (finalToolUse) {
  2108. // Store the tool call ID
  2109. ;(finalToolUse as any).id = event.id
  2110. // Get the index and replace partial with final
  2111. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2112. if (toolUseIndex !== undefined) {
  2113. this.assistantMessageContent[toolUseIndex] = finalToolUse
  2114. }
  2115. // Clean up tracking
  2116. this.streamingToolCallIndices.delete(event.id)
  2117. // Mark that we have new content to process
  2118. this.userMessageContentReady = false
  2119. // Present the finalized tool call
  2120. presentAssistantMessage(this)
  2121. }
  2122. }
  2123. }
  2124. break
  2125. }
  2126. case "tool_call": {
  2127. // Legacy: Handle complete tool calls (for backward compatibility)
  2128. // Convert native tool call to ToolUse format
  2129. const toolUse = NativeToolCallParser.parseToolCall({
  2130. id: chunk.id,
  2131. name: chunk.name as ToolName,
  2132. arguments: chunk.arguments,
  2133. })
  2134. if (!toolUse) {
  2135. console.error(`Failed to parse tool call for task ${this.taskId}:`, chunk)
  2136. break
  2137. }
  2138. // Store the tool call ID on the ToolUse object for later reference
  2139. // This is needed to create tool_result blocks that reference the correct tool_use_id
  2140. toolUse.id = chunk.id
  2141. // Add the tool use to assistant message content
  2142. this.assistantMessageContent.push(toolUse)
  2143. // Mark that we have new content to process
  2144. this.userMessageContentReady = false
  2145. // Present the tool call to user - presentAssistantMessage will execute
  2146. // tools sequentially and accumulate all results in userMessageContent
  2147. presentAssistantMessage(this)
  2148. break
  2149. }
  2150. case "text": {
  2151. assistantMessage += chunk.text
  2152. // Use the protocol determined at the start of streaming
  2153. // Don't rely solely on parser existence - parser might exist from previous state
  2154. if (shouldUseXmlParser && this.assistantMessageParser) {
  2155. // XML protocol: Parse raw assistant message chunk into content blocks
  2156. const prevLength = this.assistantMessageContent.length
  2157. this.assistantMessageContent = this.assistantMessageParser.processChunk(chunk.text)
  2158. if (this.assistantMessageContent.length > prevLength) {
  2159. // New content we need to present, reset to
  2160. // false in case previous content set this to true.
  2161. this.userMessageContentReady = false
  2162. }
  2163. // Present content to user.
  2164. presentAssistantMessage(this)
  2165. } else {
  2166. // Native protocol: Text chunks are plain text, not XML tool calls
  2167. // Create or update a text content block directly
  2168. const lastBlock =
  2169. this.assistantMessageContent[this.assistantMessageContent.length - 1]
  2170. if (lastBlock?.type === "text" && lastBlock.partial) {
  2171. // Update existing partial text block
  2172. lastBlock.content = assistantMessage
  2173. } else {
  2174. // Create new text block
  2175. this.assistantMessageContent.push({
  2176. type: "text",
  2177. content: assistantMessage,
  2178. partial: true,
  2179. })
  2180. this.userMessageContentReady = false
  2181. }
  2182. // Present content to user
  2183. presentAssistantMessage(this)
  2184. }
  2185. break
  2186. }
  2187. }
  2188. if (this.abort) {
  2189. console.log(`aborting stream, this.abandoned = ${this.abandoned}`)
  2190. if (!this.abandoned) {
  2191. // Only need to gracefully abort if this instance
  2192. // isn't abandoned (sometimes OpenRouter stream
  2193. // hangs, in which case this would affect future
  2194. // instances of Cline).
  2195. await abortStream("user_cancelled")
  2196. }
  2197. break // Aborts the stream.
  2198. }
  2199. if (this.didRejectTool) {
  2200. // `userContent` has a tool rejection, so interrupt the
  2201. // assistant's response to present the user's feedback.
  2202. assistantMessage += "\n\n[Response interrupted by user feedback]"
  2203. // Instead of setting this preemptively, we allow the
  2204. // present iterator to finish and set
  2205. // userMessageContentReady when its ready.
  2206. // this.userMessageContentReady = true
  2207. break
  2208. }
  2209. if (this.didAlreadyUseTool) {
  2210. assistantMessage +=
  2211. "\n\n[Response interrupted by a tool use result. Only one tool may be used at a time and should be placed at the end of the message.]"
  2212. break
  2213. }
  2214. }
  2215. // Finalize any remaining streaming tool calls that weren't explicitly ended
  2216. // This is critical for MCP tools which need tool_call_end events to be properly
  2217. // converted from ToolUse to McpToolUse via finalizeStreamingToolCall()
  2218. const finalizeEvents = NativeToolCallParser.finalizeRawChunks()
  2219. for (const event of finalizeEvents) {
  2220. if (event.type === "tool_call_end") {
  2221. // Finalize the streaming tool call
  2222. const finalToolUse = NativeToolCallParser.finalizeStreamingToolCall(event.id)
  2223. if (finalToolUse) {
  2224. // Store the tool call ID
  2225. ;(finalToolUse as any).id = event.id
  2226. // Get the index and replace partial with final
  2227. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2228. if (toolUseIndex !== undefined) {
  2229. this.assistantMessageContent[toolUseIndex] = finalToolUse
  2230. }
  2231. // Clean up tracking
  2232. this.streamingToolCallIndices.delete(event.id)
  2233. // Mark that we have new content to process
  2234. this.userMessageContentReady = false
  2235. // Present the finalized tool call
  2236. presentAssistantMessage(this)
  2237. }
  2238. }
  2239. }
  2240. // Create a copy of current token values to avoid race conditions
  2241. const currentTokens = {
  2242. input: inputTokens,
  2243. output: outputTokens,
  2244. cacheWrite: cacheWriteTokens,
  2245. cacheRead: cacheReadTokens,
  2246. total: totalCost,
  2247. }
  2248. const drainStreamInBackgroundToFindAllUsage = async (apiReqIndex: number) => {
  2249. const timeoutMs = DEFAULT_USAGE_COLLECTION_TIMEOUT_MS
  2250. const startTime = performance.now()
  2251. const modelId = getModelId(this.apiConfiguration)
  2252. // Local variables to accumulate usage data without affecting the main flow
  2253. let bgInputTokens = currentTokens.input
  2254. let bgOutputTokens = currentTokens.output
  2255. let bgCacheWriteTokens = currentTokens.cacheWrite
  2256. let bgCacheReadTokens = currentTokens.cacheRead
  2257. let bgTotalCost = currentTokens.total
  2258. // Helper function to capture telemetry and update messages
  2259. const captureUsageData = async (
  2260. tokens: {
  2261. input: number
  2262. output: number
  2263. cacheWrite: number
  2264. cacheRead: number
  2265. total?: number
  2266. },
  2267. messageIndex: number = apiReqIndex,
  2268. ) => {
  2269. if (
  2270. tokens.input > 0 ||
  2271. tokens.output > 0 ||
  2272. tokens.cacheWrite > 0 ||
  2273. tokens.cacheRead > 0
  2274. ) {
  2275. // Update the shared variables atomically
  2276. inputTokens = tokens.input
  2277. outputTokens = tokens.output
  2278. cacheWriteTokens = tokens.cacheWrite
  2279. cacheReadTokens = tokens.cacheRead
  2280. totalCost = tokens.total
  2281. // Update the API request message with the latest usage data
  2282. updateApiReqMsg()
  2283. await this.saveClineMessages()
  2284. // Update the specific message in the webview
  2285. const apiReqMessage = this.clineMessages[messageIndex]
  2286. if (apiReqMessage) {
  2287. await this.updateClineMessage(apiReqMessage)
  2288. }
  2289. // Capture telemetry with provider-aware cost calculation
  2290. const modelId = getModelId(this.apiConfiguration)
  2291. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  2292. // Use the appropriate cost function based on the API protocol
  2293. const costResult =
  2294. apiProtocol === "anthropic"
  2295. ? calculateApiCostAnthropic(
  2296. streamModelInfo,
  2297. tokens.input,
  2298. tokens.output,
  2299. tokens.cacheWrite,
  2300. tokens.cacheRead,
  2301. )
  2302. : calculateApiCostOpenAI(
  2303. streamModelInfo,
  2304. tokens.input,
  2305. tokens.output,
  2306. tokens.cacheWrite,
  2307. tokens.cacheRead,
  2308. )
  2309. TelemetryService.instance.captureLlmCompletion(this.taskId, {
  2310. inputTokens: costResult.totalInputTokens,
  2311. outputTokens: costResult.totalOutputTokens,
  2312. cacheWriteTokens: tokens.cacheWrite,
  2313. cacheReadTokens: tokens.cacheRead,
  2314. cost: tokens.total ?? costResult.totalCost,
  2315. })
  2316. }
  2317. }
  2318. try {
  2319. // Continue processing the original stream from where the main loop left off
  2320. let usageFound = false
  2321. let chunkCount = 0
  2322. // Use the same iterator that the main loop was using
  2323. while (!item.done) {
  2324. // Check for timeout
  2325. if (performance.now() - startTime > timeoutMs) {
  2326. console.warn(
  2327. `[Background Usage Collection] Timed out after ${timeoutMs}ms for model: ${modelId}, processed ${chunkCount} chunks`,
  2328. )
  2329. // Clean up the iterator before breaking
  2330. if (iterator.return) {
  2331. await iterator.return(undefined)
  2332. }
  2333. break
  2334. }
  2335. const chunk = item.value
  2336. item = await iterator.next()
  2337. chunkCount++
  2338. if (chunk && chunk.type === "usage") {
  2339. usageFound = true
  2340. bgInputTokens += chunk.inputTokens
  2341. bgOutputTokens += chunk.outputTokens
  2342. bgCacheWriteTokens += chunk.cacheWriteTokens ?? 0
  2343. bgCacheReadTokens += chunk.cacheReadTokens ?? 0
  2344. bgTotalCost = chunk.totalCost
  2345. }
  2346. }
  2347. if (
  2348. usageFound ||
  2349. bgInputTokens > 0 ||
  2350. bgOutputTokens > 0 ||
  2351. bgCacheWriteTokens > 0 ||
  2352. bgCacheReadTokens > 0
  2353. ) {
  2354. // We have usage data either from a usage chunk or accumulated tokens
  2355. await captureUsageData(
  2356. {
  2357. input: bgInputTokens,
  2358. output: bgOutputTokens,
  2359. cacheWrite: bgCacheWriteTokens,
  2360. cacheRead: bgCacheReadTokens,
  2361. total: bgTotalCost,
  2362. },
  2363. lastApiReqIndex,
  2364. )
  2365. } else {
  2366. console.warn(
  2367. `[Background Usage Collection] Suspicious: request ${apiReqIndex} is complete, but no usage info was found. Model: ${modelId}`,
  2368. )
  2369. }
  2370. } catch (error) {
  2371. console.error("Error draining stream for usage data:", error)
  2372. // Still try to capture whatever usage data we have collected so far
  2373. if (
  2374. bgInputTokens > 0 ||
  2375. bgOutputTokens > 0 ||
  2376. bgCacheWriteTokens > 0 ||
  2377. bgCacheReadTokens > 0
  2378. ) {
  2379. await captureUsageData(
  2380. {
  2381. input: bgInputTokens,
  2382. output: bgOutputTokens,
  2383. cacheWrite: bgCacheWriteTokens,
  2384. cacheRead: bgCacheReadTokens,
  2385. total: bgTotalCost,
  2386. },
  2387. lastApiReqIndex,
  2388. )
  2389. }
  2390. }
  2391. }
  2392. // Start the background task and handle any errors
  2393. drainStreamInBackgroundToFindAllUsage(lastApiReqIndex).catch((error) => {
  2394. console.error("Background usage collection failed:", error)
  2395. })
  2396. } catch (error) {
  2397. // Abandoned happens when extension is no longer waiting for the
  2398. // Cline instance to finish aborting (error is thrown here when
  2399. // any function in the for loop throws due to this.abort).
  2400. if (!this.abandoned) {
  2401. // Determine cancellation reason
  2402. const cancelReason: ClineApiReqCancelReason = this.abort ? "user_cancelled" : "streaming_failed"
  2403. const streamingFailedMessage = this.abort
  2404. ? undefined
  2405. : (error.message ?? JSON.stringify(serializeError(error), null, 2))
  2406. // Clean up partial state
  2407. await abortStream(cancelReason, streamingFailedMessage)
  2408. if (this.abort) {
  2409. // User cancelled - abort the entire task
  2410. this.abortReason = cancelReason
  2411. await this.abortTask()
  2412. } else {
  2413. // Stream failed - log the error and retry with the same content
  2414. // The existing rate limiting will prevent rapid retries
  2415. console.error(
  2416. `[Task#${this.taskId}.${this.instanceId}] Stream failed, will retry: ${streamingFailedMessage}`,
  2417. )
  2418. // Apply exponential backoff similar to first-chunk errors when auto-resubmit is enabled
  2419. const stateForBackoff = await this.providerRef.deref()?.getState()
  2420. if (stateForBackoff?.autoApprovalEnabled && stateForBackoff?.alwaysApproveResubmit) {
  2421. await this.backoffAndAnnounce(
  2422. currentItem.retryAttempt ?? 0,
  2423. error,
  2424. streamingFailedMessage,
  2425. )
  2426. // Check if task was aborted during the backoff
  2427. if (this.abort) {
  2428. console.log(
  2429. `[Task#${this.taskId}.${this.instanceId}] Task aborted during mid-stream retry backoff`,
  2430. )
  2431. // Abort the entire task
  2432. this.abortReason = "user_cancelled"
  2433. await this.abortTask()
  2434. break
  2435. }
  2436. }
  2437. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2438. stack.push({
  2439. userContent: currentUserContent,
  2440. includeFileDetails: false,
  2441. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2442. })
  2443. // Continue to retry the request
  2444. continue
  2445. }
  2446. }
  2447. } finally {
  2448. this.isStreaming = false
  2449. // Clean up the abort controller when streaming completes
  2450. this.currentRequestAbortController = undefined
  2451. }
  2452. // Need to call here in case the stream was aborted.
  2453. if (this.abort || this.abandoned) {
  2454. throw new Error(
  2455. `[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`,
  2456. )
  2457. }
  2458. this.didCompleteReadingStream = true
  2459. // Set any blocks to be complete to allow `presentAssistantMessage`
  2460. // to finish and set `userMessageContentReady` to true.
  2461. // (Could be a text block that had no subsequent tool uses, or a
  2462. // text block at the very end, or an invalid tool use, etc. Whatever
  2463. // the case, `presentAssistantMessage` relies on these blocks either
  2464. // to be completed or the user to reject a block in order to proceed
  2465. // and eventually set userMessageContentReady to true.)
  2466. const partialBlocks = this.assistantMessageContent.filter((block) => block.partial)
  2467. partialBlocks.forEach((block) => (block.partial = false))
  2468. // Can't just do this b/c a tool could be in the middle of executing.
  2469. // this.assistantMessageContent.forEach((e) => (e.partial = false))
  2470. // Now that the stream is complete, finalize any remaining partial content blocks (XML protocol only)
  2471. // Use the protocol determined at the start of streaming
  2472. if (shouldUseXmlParser && this.assistantMessageParser) {
  2473. this.assistantMessageParser.finalizeContentBlocks()
  2474. const parsedBlocks = this.assistantMessageParser.getContentBlocks()
  2475. // For XML protocol: Use only parsed blocks (includes both text and tool_use parsed from XML)
  2476. this.assistantMessageContent = parsedBlocks
  2477. }
  2478. // Only present partial blocks that were just completed (from XML parsing)
  2479. // Native tool blocks were already presented during streaming, so don't re-present them
  2480. if (partialBlocks.length > 0 && partialBlocks.some((block) => block.type !== "tool_use")) {
  2481. // If there is content to update then it will complete and
  2482. // update `this.userMessageContentReady` to true, which we
  2483. // `pWaitFor` before making the next request.
  2484. presentAssistantMessage(this)
  2485. }
  2486. // Note: updateApiReqMsg() is now called from within drainStreamInBackgroundToFindAllUsage
  2487. // to ensure usage data is captured even when the stream is interrupted. The background task
  2488. // uses local variables to accumulate usage data before atomically updating the shared state.
  2489. // Complete the reasoning message if it exists
  2490. // We can't use say() here because the reasoning message may not be the last message
  2491. // (other messages like text blocks or tool uses may have been added after it during streaming)
  2492. if (reasoningMessage) {
  2493. const lastReasoningIndex = findLastIndex(
  2494. this.clineMessages,
  2495. (m) => m.type === "say" && m.say === "reasoning",
  2496. )
  2497. if (lastReasoningIndex !== -1 && this.clineMessages[lastReasoningIndex].partial) {
  2498. this.clineMessages[lastReasoningIndex].partial = false
  2499. await this.updateClineMessage(this.clineMessages[lastReasoningIndex])
  2500. }
  2501. }
  2502. await this.saveClineMessages()
  2503. await this.providerRef.deref()?.postStateToWebview()
  2504. // Reset parser after each complete conversation round (XML protocol only)
  2505. this.assistantMessageParser?.reset()
  2506. // Now add to apiConversationHistory.
  2507. // Need to save assistant responses to file before proceeding to
  2508. // tool use since user can exit at any moment and we wouldn't be
  2509. // able to save the assistant's response.
  2510. let didEndLoop = false
  2511. // Check if we have any content to process (text or tool uses)
  2512. const hasTextContent = assistantMessage.length > 0
  2513. const hasToolUses = this.assistantMessageContent.some(
  2514. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2515. )
  2516. if (hasTextContent || hasToolUses) {
  2517. // Display grounding sources to the user if they exist
  2518. if (pendingGroundingSources.length > 0) {
  2519. const citationLinks = pendingGroundingSources.map((source, i) => `[${i + 1}](${source.url})`)
  2520. const sourcesText = `${t("common:gemini.sources")} ${citationLinks.join(", ")}`
  2521. await this.say("text", sourcesText, undefined, false, undefined, undefined, {
  2522. isNonInteractive: true,
  2523. })
  2524. }
  2525. // Build the assistant message content array
  2526. const assistantContent: Array<Anthropic.TextBlockParam | Anthropic.ToolUseBlockParam> = []
  2527. // Add text content if present
  2528. if (assistantMessage) {
  2529. assistantContent.push({
  2530. type: "text" as const,
  2531. text: assistantMessage,
  2532. })
  2533. }
  2534. // Add tool_use blocks with their IDs for native protocol
  2535. // This handles both regular ToolUse and McpToolUse types
  2536. const toolUseBlocks = this.assistantMessageContent.filter(
  2537. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2538. )
  2539. for (const block of toolUseBlocks) {
  2540. if (block.type === "mcp_tool_use") {
  2541. // McpToolUse already has the original tool name (e.g., "mcp_serverName_toolName")
  2542. // The arguments are the raw tool arguments (matching the simplified schema)
  2543. const mcpBlock = block as import("../../shared/tools").McpToolUse
  2544. if (mcpBlock.id) {
  2545. assistantContent.push({
  2546. type: "tool_use" as const,
  2547. id: mcpBlock.id,
  2548. name: mcpBlock.name, // Original dynamic name
  2549. input: mcpBlock.arguments, // Direct tool arguments
  2550. })
  2551. }
  2552. } else {
  2553. // Regular ToolUse
  2554. const toolUse = block as import("../../shared/tools").ToolUse
  2555. const toolCallId = toolUse.id
  2556. if (toolCallId) {
  2557. // nativeArgs is already in the correct API format for all tools
  2558. const input = toolUse.nativeArgs || toolUse.params
  2559. assistantContent.push({
  2560. type: "tool_use" as const,
  2561. id: toolCallId,
  2562. name: toolUse.name,
  2563. input,
  2564. })
  2565. }
  2566. }
  2567. }
  2568. await this.addToApiConversationHistory(
  2569. {
  2570. role: "assistant",
  2571. content: assistantContent,
  2572. },
  2573. reasoningMessage || undefined,
  2574. )
  2575. TelemetryService.instance.captureConversationMessage(this.taskId, "assistant")
  2576. // NOTE: This comment is here for future reference - this was a
  2577. // workaround for `userMessageContent` not getting set to true.
  2578. // It was due to it not recursively calling for partial blocks
  2579. // when `didRejectTool`, so it would get stuck waiting for a
  2580. // partial block to complete before it could continue.
  2581. // In case the content blocks finished it may be the api stream
  2582. // finished after the last parsed content block was executed, so
  2583. // we are able to detect out of bounds and set
  2584. // `userMessageContentReady` to true (note you should not call
  2585. // `presentAssistantMessage` since if the last block i
  2586. // completed it will be presented again).
  2587. // const completeBlocks = this.assistantMessageContent.filter((block) => !block.partial) // If there are any partial blocks after the stream ended we can consider them invalid.
  2588. // if (this.currentStreamingContentIndex >= completeBlocks.length) {
  2589. // this.userMessageContentReady = true
  2590. // }
  2591. await pWaitFor(() => this.userMessageContentReady)
  2592. // If the model did not tool use, then we need to tell it to
  2593. // either use a tool or attempt_completion.
  2594. const didToolUse = this.assistantMessageContent.some(
  2595. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2596. )
  2597. if (!didToolUse) {
  2598. const modelInfo = this.api.getModel().info
  2599. const state = await this.providerRef.deref()?.getState()
  2600. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  2601. this.userMessageContent.push({ type: "text", text: formatResponse.noToolsUsed(toolProtocol) })
  2602. this.consecutiveMistakeCount++
  2603. }
  2604. if (this.userMessageContent.length > 0) {
  2605. stack.push({
  2606. userContent: [...this.userMessageContent], // Create a copy to avoid mutation issues
  2607. includeFileDetails: false, // Subsequent iterations don't need file details
  2608. })
  2609. // Add periodic yielding to prevent blocking
  2610. await new Promise((resolve) => setImmediate(resolve))
  2611. }
  2612. // Continue to next iteration instead of setting didEndLoop from recursive call
  2613. continue
  2614. } else {
  2615. // If there's no assistant_responses, that means we got no text
  2616. // or tool_use content blocks from API which we should assume is
  2617. // an error.
  2618. // IMPORTANT: For native tool protocol, we already added the user message to
  2619. // apiConversationHistory at line 1876. Since the assistant failed to respond,
  2620. // we need to remove that message before retrying to avoid having two consecutive
  2621. // user messages (which would cause tool_result validation errors).
  2622. let state = await this.providerRef.deref()?.getState()
  2623. if (
  2624. isNativeProtocol(resolveToolProtocol(this.apiConfiguration, this.api.getModel().info)) &&
  2625. this.apiConversationHistory.length > 0
  2626. ) {
  2627. const lastMessage = this.apiConversationHistory[this.apiConversationHistory.length - 1]
  2628. if (lastMessage.role === "user") {
  2629. // Remove the last user message that we added earlier
  2630. this.apiConversationHistory.pop()
  2631. }
  2632. }
  2633. // Check if we should auto-retry or prompt the user
  2634. // Reuse the state variable from above
  2635. if (state?.autoApprovalEnabled && state?.alwaysApproveResubmit) {
  2636. // Auto-retry with backoff - don't persist failure message when retrying
  2637. const errorMsg =
  2638. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output."
  2639. await this.backoffAndAnnounce(
  2640. currentItem.retryAttempt ?? 0,
  2641. new Error("Empty assistant response"),
  2642. errorMsg,
  2643. )
  2644. // Check if task was aborted during the backoff
  2645. if (this.abort) {
  2646. console.log(
  2647. `[Task#${this.taskId}.${this.instanceId}] Task aborted during empty-assistant retry backoff`,
  2648. )
  2649. break
  2650. }
  2651. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2652. // Mark that user message was removed so it gets re-added on retry
  2653. stack.push({
  2654. userContent: currentUserContent,
  2655. includeFileDetails: false,
  2656. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2657. userMessageWasRemoved: true,
  2658. })
  2659. // Continue to retry the request
  2660. continue
  2661. } else {
  2662. // Prompt the user for retry decision
  2663. const { response } = await this.ask(
  2664. "api_req_failed",
  2665. "The model returned no assistant messages. This may indicate an issue with the API or the model's output.",
  2666. )
  2667. if (response === "yesButtonClicked") {
  2668. await this.say("api_req_retried")
  2669. // Push the same content back to retry
  2670. stack.push({
  2671. userContent: currentUserContent,
  2672. includeFileDetails: false,
  2673. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2674. })
  2675. // Continue to retry the request
  2676. continue
  2677. } else {
  2678. // User declined to retry
  2679. // For native protocol, re-add the user message we removed
  2680. // Reuse the state variable from above
  2681. if (
  2682. isNativeProtocol(resolveToolProtocol(this.apiConfiguration, this.api.getModel().info))
  2683. ) {
  2684. await this.addToApiConversationHistory({
  2685. role: "user",
  2686. content: currentUserContent,
  2687. })
  2688. }
  2689. await this.say(
  2690. "error",
  2691. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output.",
  2692. )
  2693. await this.addToApiConversationHistory({
  2694. role: "assistant",
  2695. content: [{ type: "text", text: "Failure: I did not provide a response." }],
  2696. })
  2697. }
  2698. }
  2699. }
  2700. // If we reach here without continuing, return false (will always be false for now)
  2701. return false
  2702. } catch (error) {
  2703. // This should never happen since the only thing that can throw an
  2704. // error is the attemptApiRequest, which is wrapped in a try catch
  2705. // that sends an ask where if noButtonClicked, will clear current
  2706. // task and destroy this instance. However to avoid unhandled
  2707. // promise rejection, we will end this loop which will end execution
  2708. // of this instance (see `startTask`).
  2709. return true // Needs to be true so parent loop knows to end task.
  2710. }
  2711. }
  2712. // If we exit the while loop normally (stack is empty), return false
  2713. return false
  2714. }
  2715. private async getSystemPrompt(): Promise<string> {
  2716. const { mcpEnabled } = (await this.providerRef.deref()?.getState()) ?? {}
  2717. let mcpHub: McpHub | undefined
  2718. if (mcpEnabled ?? true) {
  2719. const provider = this.providerRef.deref()
  2720. if (!provider) {
  2721. throw new Error("Provider reference lost during view transition")
  2722. }
  2723. // Wait for MCP hub initialization through McpServerManager
  2724. mcpHub = await McpServerManager.getInstance(provider.context, provider)
  2725. if (!mcpHub) {
  2726. throw new Error("Failed to get MCP hub from server manager")
  2727. }
  2728. // Wait for MCP servers to be connected before generating system prompt
  2729. await pWaitFor(() => !mcpHub!.isConnecting, { timeout: 10_000 }).catch(() => {
  2730. console.error("MCP servers failed to connect in time")
  2731. })
  2732. }
  2733. const rooIgnoreInstructions = this.rooIgnoreController?.getInstructions()
  2734. const state = await this.providerRef.deref()?.getState()
  2735. const {
  2736. browserViewportSize,
  2737. mode,
  2738. customModes,
  2739. customModePrompts,
  2740. customInstructions,
  2741. experiments,
  2742. enableMcpServerCreation,
  2743. browserToolEnabled,
  2744. language,
  2745. maxConcurrentFileReads,
  2746. maxReadFileLine,
  2747. apiConfiguration,
  2748. } = state ?? {}
  2749. return await (async () => {
  2750. const provider = this.providerRef.deref()
  2751. if (!provider) {
  2752. throw new Error("Provider not available")
  2753. }
  2754. // Align browser tool enablement with generateSystemPrompt: require model image support,
  2755. // mode to include the browser group, and the user setting to be enabled.
  2756. const modeConfig = getModeBySlug(mode ?? defaultModeSlug, customModes)
  2757. const modeSupportsBrowser = modeConfig?.groups.some((group) => getGroupName(group) === "browser") ?? false
  2758. // Check if model supports browser capability (images)
  2759. const modelInfo = this.api.getModel().info
  2760. const modelSupportsBrowser = (modelInfo as any)?.supportsImages === true
  2761. const canUseBrowserTool = modelSupportsBrowser && modeSupportsBrowser && (browserToolEnabled ?? true)
  2762. // Resolve the tool protocol based on profile, model, and provider settings
  2763. const toolProtocol = resolveToolProtocol(apiConfiguration ?? this.apiConfiguration, modelInfo)
  2764. return SYSTEM_PROMPT(
  2765. provider.context,
  2766. this.cwd,
  2767. canUseBrowserTool,
  2768. mcpHub,
  2769. this.diffStrategy,
  2770. browserViewportSize ?? "900x600",
  2771. mode ?? defaultModeSlug,
  2772. customModePrompts,
  2773. customModes,
  2774. customInstructions,
  2775. this.diffEnabled,
  2776. experiments,
  2777. enableMcpServerCreation,
  2778. language,
  2779. rooIgnoreInstructions,
  2780. maxReadFileLine !== -1,
  2781. {
  2782. maxConcurrentFileReads: maxConcurrentFileReads ?? 5,
  2783. todoListEnabled: apiConfiguration?.todoListEnabled ?? true,
  2784. browserToolEnabled: browserToolEnabled ?? true,
  2785. useAgentRules:
  2786. vscode.workspace.getConfiguration(Package.name).get<boolean>("useAgentRules") ?? true,
  2787. newTaskRequireTodos: vscode.workspace
  2788. .getConfiguration(Package.name)
  2789. .get<boolean>("newTaskRequireTodos", false),
  2790. toolProtocol,
  2791. },
  2792. undefined, // todoList
  2793. this.api.getModel().id,
  2794. )
  2795. })()
  2796. }
  2797. private getCurrentProfileId(state: any): string {
  2798. return (
  2799. state?.listApiConfigMeta?.find((profile: any) => profile.name === state?.currentApiConfigName)?.id ??
  2800. "default"
  2801. )
  2802. }
  2803. private async handleContextWindowExceededError(): Promise<void> {
  2804. const state = await this.providerRef.deref()?.getState()
  2805. const { profileThresholds = {} } = state ?? {}
  2806. const { contextTokens } = this.getTokenUsage()
  2807. const modelInfo = this.api.getModel().info
  2808. const maxTokens = getModelMaxOutputTokens({
  2809. modelId: this.api.getModel().id,
  2810. model: modelInfo,
  2811. settings: this.apiConfiguration,
  2812. })
  2813. const contextWindow = modelInfo.contextWindow
  2814. // Get the current profile ID using the helper method
  2815. const currentProfileId = this.getCurrentProfileId(state)
  2816. // Log the context window error for debugging
  2817. console.warn(
  2818. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  2819. `Current tokens: ${contextTokens}, Context window: ${contextWindow}. ` +
  2820. `Forcing truncation to ${FORCED_CONTEXT_REDUCTION_PERCENT}% of current context.`,
  2821. )
  2822. // Determine if we're using native tool protocol for proper message handling
  2823. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  2824. const useNativeTools = isNativeProtocol(protocol)
  2825. // Force aggressive truncation by keeping only 75% of the conversation history
  2826. const truncateResult = await manageContext({
  2827. messages: this.apiConversationHistory,
  2828. totalTokens: contextTokens || 0,
  2829. maxTokens,
  2830. contextWindow,
  2831. apiHandler: this.api,
  2832. autoCondenseContext: true,
  2833. autoCondenseContextPercent: FORCED_CONTEXT_REDUCTION_PERCENT,
  2834. systemPrompt: await this.getSystemPrompt(),
  2835. taskId: this.taskId,
  2836. profileThresholds,
  2837. currentProfileId,
  2838. useNativeTools,
  2839. })
  2840. if (truncateResult.messages !== this.apiConversationHistory) {
  2841. await this.overwriteApiConversationHistory(truncateResult.messages)
  2842. }
  2843. if (truncateResult.summary) {
  2844. const { summary, cost, prevContextTokens, newContextTokens = 0 } = truncateResult
  2845. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  2846. await this.say(
  2847. "condense_context",
  2848. undefined /* text */,
  2849. undefined /* images */,
  2850. false /* partial */,
  2851. undefined /* checkpoint */,
  2852. undefined /* progressStatus */,
  2853. { isNonInteractive: true } /* options */,
  2854. contextCondense,
  2855. )
  2856. }
  2857. }
  2858. public async *attemptApiRequest(retryAttempt: number = 0): ApiStream {
  2859. const state = await this.providerRef.deref()?.getState()
  2860. const {
  2861. apiConfiguration,
  2862. autoApprovalEnabled,
  2863. alwaysApproveResubmit,
  2864. requestDelaySeconds,
  2865. mode,
  2866. autoCondenseContext = true,
  2867. autoCondenseContextPercent = 100,
  2868. profileThresholds = {},
  2869. } = state ?? {}
  2870. // Get condensing configuration for automatic triggers.
  2871. const customCondensingPrompt = state?.customCondensingPrompt
  2872. const condensingApiConfigId = state?.condensingApiConfigId
  2873. const listApiConfigMeta = state?.listApiConfigMeta
  2874. // Determine API handler to use for condensing.
  2875. let condensingApiHandler: ApiHandler | undefined
  2876. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  2877. // Find matching config by ID
  2878. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  2879. if (matchingConfig) {
  2880. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  2881. id: condensingApiConfigId,
  2882. })
  2883. // Ensure profile and apiProvider exist before trying to build handler.
  2884. if (profile && profile.apiProvider) {
  2885. condensingApiHandler = buildApiHandler(profile)
  2886. }
  2887. }
  2888. }
  2889. let rateLimitDelay = 0
  2890. // Use the shared timestamp so that subtasks respect the same rate-limit
  2891. // window as their parent tasks.
  2892. if (Task.lastGlobalApiRequestTime) {
  2893. const now = performance.now()
  2894. const timeSinceLastRequest = now - Task.lastGlobalApiRequestTime
  2895. const rateLimit = apiConfiguration?.rateLimitSeconds || 0
  2896. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - timeSinceLastRequest) / 1000))
  2897. }
  2898. // Only show rate limiting message if we're not retrying. If retrying, we'll include the delay there.
  2899. if (rateLimitDelay > 0 && retryAttempt === 0) {
  2900. // Show countdown timer
  2901. for (let i = rateLimitDelay; i > 0; i--) {
  2902. const delayMessage = `Rate limiting for ${i} seconds...`
  2903. await this.say("api_req_retry_delayed", delayMessage, undefined, true)
  2904. await delay(1000)
  2905. }
  2906. }
  2907. // Update last request time before making the request so that subsequent
  2908. // requests — even from new subtasks — will honour the provider's rate-limit.
  2909. Task.lastGlobalApiRequestTime = performance.now()
  2910. const systemPrompt = await this.getSystemPrompt()
  2911. const { contextTokens } = this.getTokenUsage()
  2912. if (contextTokens) {
  2913. const modelInfo = this.api.getModel().info
  2914. const maxTokens = getModelMaxOutputTokens({
  2915. modelId: this.api.getModel().id,
  2916. model: modelInfo,
  2917. settings: this.apiConfiguration,
  2918. })
  2919. const contextWindow = modelInfo.contextWindow
  2920. // Get the current profile ID using the helper method
  2921. const currentProfileId = this.getCurrentProfileId(state)
  2922. // Determine if we're using native tool protocol for proper message handling
  2923. const modelInfoForProtocol = this.api.getModel().info
  2924. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfoForProtocol)
  2925. const useNativeTools = isNativeProtocol(protocol)
  2926. const truncateResult = await manageContext({
  2927. messages: this.apiConversationHistory,
  2928. totalTokens: contextTokens,
  2929. maxTokens,
  2930. contextWindow,
  2931. apiHandler: this.api,
  2932. autoCondenseContext,
  2933. autoCondenseContextPercent,
  2934. systemPrompt,
  2935. taskId: this.taskId,
  2936. customCondensingPrompt,
  2937. condensingApiHandler,
  2938. profileThresholds,
  2939. currentProfileId,
  2940. useNativeTools,
  2941. })
  2942. if (truncateResult.messages !== this.apiConversationHistory) {
  2943. await this.overwriteApiConversationHistory(truncateResult.messages)
  2944. }
  2945. if (truncateResult.error) {
  2946. await this.say("condense_context_error", truncateResult.error)
  2947. } else if (truncateResult.summary) {
  2948. const { summary, cost, prevContextTokens, newContextTokens = 0 } = truncateResult
  2949. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  2950. await this.say(
  2951. "condense_context",
  2952. undefined /* text */,
  2953. undefined /* images */,
  2954. false /* partial */,
  2955. undefined /* checkpoint */,
  2956. undefined /* progressStatus */,
  2957. { isNonInteractive: true } /* options */,
  2958. contextCondense,
  2959. )
  2960. }
  2961. }
  2962. const messagesSinceLastSummary = getMessagesSinceLastSummary(this.apiConversationHistory)
  2963. const messagesWithoutImages = maybeRemoveImageBlocks(messagesSinceLastSummary, this.api)
  2964. const cleanConversationHistory = this.buildCleanConversationHistory(messagesWithoutImages as ApiMessage[])
  2965. // Check auto-approval limits
  2966. const approvalResult = await this.autoApprovalHandler.checkAutoApprovalLimits(
  2967. state,
  2968. this.combineMessages(this.clineMessages.slice(1)),
  2969. async (type, data) => this.ask(type, data),
  2970. )
  2971. if (!approvalResult.shouldProceed) {
  2972. // User did not approve, task should be aborted
  2973. throw new Error("Auto-approval limit reached and user did not approve continuation")
  2974. }
  2975. // Determine if we should include native tools based on:
  2976. // 1. Tool protocol is set to NATIVE
  2977. // 2. Model supports native tools
  2978. const modelInfo = this.api.getModel().info
  2979. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  2980. const shouldIncludeTools = toolProtocol === TOOL_PROTOCOL.NATIVE && (modelInfo.supportsNativeTools ?? false)
  2981. // Build complete tools array: native tools + dynamic MCP tools, filtered by mode restrictions
  2982. let allTools: OpenAI.Chat.ChatCompletionTool[] = []
  2983. if (shouldIncludeTools) {
  2984. const provider = this.providerRef.deref()
  2985. if (!provider) {
  2986. throw new Error("Provider reference lost during tool building")
  2987. }
  2988. allTools = await buildNativeToolsArray({
  2989. provider,
  2990. cwd: this.cwd,
  2991. mode,
  2992. customModes: state?.customModes,
  2993. experiments: state?.experiments,
  2994. apiConfiguration,
  2995. maxReadFileLine: state?.maxReadFileLine ?? -1,
  2996. browserToolEnabled: state?.browserToolEnabled ?? true,
  2997. })
  2998. }
  2999. // Resolve parallel tool calls setting from experiment (will move to per-API-profile setting later)
  3000. const parallelToolCallsEnabled = experiments.isEnabled(
  3001. state?.experiments ?? {},
  3002. EXPERIMENT_IDS.MULTIPLE_NATIVE_TOOL_CALLS,
  3003. )
  3004. const metadata: ApiHandlerCreateMessageMetadata = {
  3005. mode: mode,
  3006. taskId: this.taskId,
  3007. // Include tools and tool protocol when using native protocol and model supports it
  3008. ...(shouldIncludeTools
  3009. ? { tools: allTools, tool_choice: "auto", toolProtocol, parallelToolCalls: parallelToolCallsEnabled }
  3010. : {}),
  3011. }
  3012. // Create an AbortController to allow cancelling the request mid-stream
  3013. this.currentRequestAbortController = new AbortController()
  3014. const abortSignal = this.currentRequestAbortController.signal
  3015. // The provider accepts reasoning items alongside standard messages; cast to the expected parameter type.
  3016. const stream = this.api.createMessage(
  3017. systemPrompt,
  3018. cleanConversationHistory as unknown as Anthropic.Messages.MessageParam[],
  3019. metadata,
  3020. )
  3021. const iterator = stream[Symbol.asyncIterator]()
  3022. // Set up abort handling - when the signal is aborted, clean up the controller reference
  3023. abortSignal.addEventListener("abort", () => {
  3024. console.log(`[Task#${this.taskId}.${this.instanceId}] AbortSignal triggered for current request`)
  3025. this.currentRequestAbortController = undefined
  3026. })
  3027. try {
  3028. // Awaiting first chunk to see if it will throw an error.
  3029. this.isWaitingForFirstChunk = true
  3030. // Race between the first chunk and the abort signal
  3031. const firstChunkPromise = iterator.next()
  3032. const abortPromise = new Promise<never>((_, reject) => {
  3033. if (abortSignal.aborted) {
  3034. reject(new Error("Request cancelled by user"))
  3035. } else {
  3036. abortSignal.addEventListener("abort", () => {
  3037. reject(new Error("Request cancelled by user"))
  3038. })
  3039. }
  3040. })
  3041. const firstChunk = await Promise.race([firstChunkPromise, abortPromise])
  3042. yield firstChunk.value
  3043. this.isWaitingForFirstChunk = false
  3044. } catch (error) {
  3045. this.isWaitingForFirstChunk = false
  3046. this.currentRequestAbortController = undefined
  3047. const isContextWindowExceededError = checkContextWindowExceededError(error)
  3048. // If it's a context window error and we haven't exceeded max retries for this error type
  3049. if (isContextWindowExceededError && retryAttempt < MAX_CONTEXT_WINDOW_RETRIES) {
  3050. console.warn(
  3051. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  3052. `Retry attempt ${retryAttempt + 1}/${MAX_CONTEXT_WINDOW_RETRIES}. ` +
  3053. `Attempting automatic truncation...`,
  3054. )
  3055. await this.handleContextWindowExceededError()
  3056. // Retry the request after handling the context window error
  3057. yield* this.attemptApiRequest(retryAttempt + 1)
  3058. return
  3059. }
  3060. // note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
  3061. if (autoApprovalEnabled && alwaysApproveResubmit) {
  3062. let errorMsg
  3063. if (error.error?.metadata?.raw) {
  3064. errorMsg = JSON.stringify(error.error.metadata.raw, null, 2)
  3065. } else if (error.message) {
  3066. errorMsg = error.message
  3067. } else {
  3068. errorMsg = "Unknown error"
  3069. }
  3070. // Apply shared exponential backoff and countdown UX
  3071. await this.backoffAndAnnounce(retryAttempt, error, errorMsg)
  3072. // CRITICAL: Check if task was aborted during the backoff countdown
  3073. // This prevents infinite loops when users cancel during auto-retry
  3074. // Without this check, the recursive call below would continue even after abort
  3075. if (this.abort) {
  3076. throw new Error(
  3077. `[Task#attemptApiRequest] task ${this.taskId}.${this.instanceId} aborted during retry`,
  3078. )
  3079. }
  3080. // Delegate generator output from the recursive call with
  3081. // incremented retry count.
  3082. yield* this.attemptApiRequest(retryAttempt + 1)
  3083. return
  3084. } else {
  3085. const { response } = await this.ask(
  3086. "api_req_failed",
  3087. error.message ?? JSON.stringify(serializeError(error), null, 2),
  3088. )
  3089. if (response !== "yesButtonClicked") {
  3090. // This will never happen since if noButtonClicked, we will
  3091. // clear current task, aborting this instance.
  3092. throw new Error("API request failed")
  3093. }
  3094. await this.say("api_req_retried")
  3095. // Delegate generator output from the recursive call.
  3096. yield* this.attemptApiRequest()
  3097. return
  3098. }
  3099. }
  3100. // No error, so we can continue to yield all remaining chunks.
  3101. // (Needs to be placed outside of try/catch since it we want caller to
  3102. // handle errors not with api_req_failed as that is reserved for first
  3103. // chunk failures only.)
  3104. // This delegates to another generator or iterable object. In this case,
  3105. // it's saying "yield all remaining values from this iterator". This
  3106. // effectively passes along all subsequent chunks from the original
  3107. // stream.
  3108. yield* iterator
  3109. }
  3110. // Shared exponential backoff for retries (first-chunk and mid-stream)
  3111. private async backoffAndAnnounce(retryAttempt: number, error: any, header?: string): Promise<void> {
  3112. try {
  3113. const state = await this.providerRef.deref()?.getState()
  3114. const baseDelay = state?.requestDelaySeconds || 5
  3115. let exponentialDelay = Math.min(
  3116. Math.ceil(baseDelay * Math.pow(2, retryAttempt)),
  3117. MAX_EXPONENTIAL_BACKOFF_SECONDS,
  3118. )
  3119. // Respect provider rate limit window
  3120. let rateLimitDelay = 0
  3121. const rateLimit = state?.apiConfiguration?.rateLimitSeconds || 0
  3122. if (Task.lastGlobalApiRequestTime && rateLimit > 0) {
  3123. const elapsed = performance.now() - Task.lastGlobalApiRequestTime
  3124. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - elapsed) / 1000))
  3125. }
  3126. // Prefer RetryInfo on 429 if present
  3127. if (error?.status === 429) {
  3128. const retryInfo = error?.errorDetails?.find(
  3129. (d: any) => d["@type"] === "type.googleapis.com/google.rpc.RetryInfo",
  3130. )
  3131. const match = retryInfo?.retryDelay?.match?.(/^(\d+)s$/)
  3132. if (match) {
  3133. exponentialDelay = Number(match[1]) + 1
  3134. }
  3135. }
  3136. const finalDelay = Math.max(exponentialDelay, rateLimitDelay)
  3137. if (finalDelay <= 0) return
  3138. // Build header text; fall back to error message if none provided
  3139. let headerText = header
  3140. if (!headerText) {
  3141. if (error?.error?.metadata?.raw) {
  3142. headerText = JSON.stringify(error.error.metadata.raw, null, 2)
  3143. } else if (error?.message) {
  3144. headerText = error.message
  3145. } else {
  3146. headerText = "Unknown error"
  3147. }
  3148. }
  3149. headerText = headerText ? `${headerText}\n\n` : ""
  3150. // Show countdown timer with exponential backoff
  3151. for (let i = finalDelay; i > 0; i--) {
  3152. // Check abort flag during countdown to allow early exit
  3153. if (this.abort) {
  3154. throw new Error(`[Task#${this.taskId}] Aborted during retry countdown`)
  3155. }
  3156. await this.say(
  3157. "api_req_retry_delayed",
  3158. `${headerText}Retry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`,
  3159. undefined,
  3160. true,
  3161. )
  3162. await delay(1000)
  3163. }
  3164. await this.say(
  3165. "api_req_retry_delayed",
  3166. `${headerText}Retry attempt ${retryAttempt + 1}\nRetrying now...`,
  3167. undefined,
  3168. false,
  3169. )
  3170. } catch (err) {
  3171. console.error("Exponential backoff failed:", err)
  3172. }
  3173. }
  3174. // Checkpoints
  3175. public async checkpointSave(force: boolean = false, suppressMessage: boolean = false) {
  3176. return checkpointSave(this, force, suppressMessage)
  3177. }
  3178. private buildCleanConversationHistory(
  3179. messages: ApiMessage[],
  3180. ): Array<
  3181. Anthropic.Messages.MessageParam | { type: "reasoning"; encrypted_content: string; id?: string; summary?: any[] }
  3182. > {
  3183. type ReasoningItemForRequest = {
  3184. type: "reasoning"
  3185. encrypted_content: string
  3186. id?: string
  3187. summary?: any[]
  3188. }
  3189. const cleanConversationHistory: (Anthropic.Messages.MessageParam | ReasoningItemForRequest)[] = []
  3190. for (const msg of messages) {
  3191. // Standalone reasoning: send encrypted, skip plain text
  3192. if (msg.type === "reasoning") {
  3193. if (msg.encrypted_content) {
  3194. cleanConversationHistory.push({
  3195. type: "reasoning",
  3196. summary: msg.summary,
  3197. encrypted_content: msg.encrypted_content!,
  3198. ...(msg.id ? { id: msg.id } : {}),
  3199. })
  3200. }
  3201. continue
  3202. }
  3203. // Preferred path: assistant message with embedded reasoning as first content block
  3204. if (msg.role === "assistant") {
  3205. const rawContent = msg.content
  3206. const contentArray: Anthropic.Messages.ContentBlockParam[] = Array.isArray(rawContent)
  3207. ? (rawContent as Anthropic.Messages.ContentBlockParam[])
  3208. : rawContent !== undefined
  3209. ? ([
  3210. { type: "text", text: rawContent } satisfies Anthropic.Messages.TextBlockParam,
  3211. ] as Anthropic.Messages.ContentBlockParam[])
  3212. : []
  3213. const [first, ...rest] = contentArray
  3214. // Check if this message has reasoning_details (OpenRouter format for Gemini 3, etc.)
  3215. const msgWithDetails = msg
  3216. if (msgWithDetails.reasoning_details && Array.isArray(msgWithDetails.reasoning_details)) {
  3217. // Build the assistant message with reasoning_details
  3218. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3219. if (contentArray.length === 0) {
  3220. assistantContent = ""
  3221. } else if (contentArray.length === 1 && contentArray[0].type === "text") {
  3222. assistantContent = (contentArray[0] as Anthropic.Messages.TextBlockParam).text
  3223. } else {
  3224. assistantContent = contentArray
  3225. }
  3226. // Create message with reasoning_details property
  3227. cleanConversationHistory.push({
  3228. role: "assistant",
  3229. content: assistantContent,
  3230. reasoning_details: msgWithDetails.reasoning_details,
  3231. } as any)
  3232. continue
  3233. }
  3234. // Embedded reasoning: encrypted (send) or plain text (skip)
  3235. const hasEncryptedReasoning =
  3236. first && (first as any).type === "reasoning" && typeof (first as any).encrypted_content === "string"
  3237. const hasPlainTextReasoning =
  3238. first && (first as any).type === "reasoning" && typeof (first as any).text === "string"
  3239. if (hasEncryptedReasoning) {
  3240. const reasoningBlock = first as any
  3241. // Send as separate reasoning item (OpenAI Native)
  3242. cleanConversationHistory.push({
  3243. type: "reasoning",
  3244. summary: reasoningBlock.summary ?? [],
  3245. encrypted_content: reasoningBlock.encrypted_content,
  3246. ...(reasoningBlock.id ? { id: reasoningBlock.id } : {}),
  3247. })
  3248. // Send assistant message without reasoning
  3249. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3250. if (rest.length === 0) {
  3251. assistantContent = ""
  3252. } else if (rest.length === 1 && rest[0].type === "text") {
  3253. assistantContent = (rest[0] as Anthropic.Messages.TextBlockParam).text
  3254. } else {
  3255. assistantContent = rest
  3256. }
  3257. cleanConversationHistory.push({
  3258. role: "assistant",
  3259. content: assistantContent,
  3260. } satisfies Anthropic.Messages.MessageParam)
  3261. continue
  3262. } else if (hasPlainTextReasoning) {
  3263. // Check if the model's preserveReasoning flag is set
  3264. // If true, include the reasoning block in API requests
  3265. // If false/undefined, strip it out (stored for history only, not sent back to API)
  3266. const shouldPreserveForApi = this.api.getModel().info.preserveReasoning === true
  3267. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3268. if (shouldPreserveForApi) {
  3269. // Include reasoning block in the content sent to API
  3270. assistantContent = contentArray
  3271. } else {
  3272. // Strip reasoning out - stored for history only, not sent back to API
  3273. if (rest.length === 0) {
  3274. assistantContent = ""
  3275. } else if (rest.length === 1 && rest[0].type === "text") {
  3276. assistantContent = (rest[0] as Anthropic.Messages.TextBlockParam).text
  3277. } else {
  3278. assistantContent = rest
  3279. }
  3280. }
  3281. cleanConversationHistory.push({
  3282. role: "assistant",
  3283. content: assistantContent,
  3284. } satisfies Anthropic.Messages.MessageParam)
  3285. continue
  3286. }
  3287. }
  3288. // Default path for regular messages (no embedded reasoning)
  3289. if (msg.role) {
  3290. cleanConversationHistory.push({
  3291. role: msg.role,
  3292. content: msg.content as Anthropic.Messages.ContentBlockParam[] | string,
  3293. })
  3294. }
  3295. }
  3296. return cleanConversationHistory
  3297. }
  3298. public async checkpointRestore(options: CheckpointRestoreOptions) {
  3299. return checkpointRestore(this, options)
  3300. }
  3301. public async checkpointDiff(options: CheckpointDiffOptions) {
  3302. return checkpointDiff(this, options)
  3303. }
  3304. // Metrics
  3305. public combineMessages(messages: ClineMessage[]) {
  3306. return combineApiRequests(combineCommandSequences(messages))
  3307. }
  3308. public getTokenUsage(): TokenUsage {
  3309. return getApiMetrics(this.combineMessages(this.clineMessages.slice(1)))
  3310. }
  3311. public recordToolUsage(toolName: ToolName) {
  3312. if (!this.toolUsage[toolName]) {
  3313. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  3314. }
  3315. this.toolUsage[toolName].attempts++
  3316. }
  3317. public recordToolError(toolName: ToolName, error?: string) {
  3318. if (!this.toolUsage[toolName]) {
  3319. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  3320. }
  3321. this.toolUsage[toolName].failures++
  3322. if (error) {
  3323. this.emit(RooCodeEventName.TaskToolFailed, this.taskId, toolName, error)
  3324. }
  3325. }
  3326. // Getters
  3327. public get taskStatus(): TaskStatus {
  3328. if (this.interactiveAsk) {
  3329. return TaskStatus.Interactive
  3330. }
  3331. if (this.resumableAsk) {
  3332. return TaskStatus.Resumable
  3333. }
  3334. if (this.idleAsk) {
  3335. return TaskStatus.Idle
  3336. }
  3337. return TaskStatus.Running
  3338. }
  3339. public get taskAsk(): ClineMessage | undefined {
  3340. return this.idleAsk || this.resumableAsk || this.interactiveAsk
  3341. }
  3342. public get queuedMessages(): QueuedMessage[] {
  3343. return this.messageQueueService.messages
  3344. }
  3345. public get tokenUsage(): TokenUsage | undefined {
  3346. if (this.tokenUsageSnapshot && this.tokenUsageSnapshotAt) {
  3347. return this.tokenUsageSnapshot
  3348. }
  3349. this.tokenUsageSnapshot = this.getTokenUsage()
  3350. this.tokenUsageSnapshotAt = this.clineMessages.at(-1)?.ts
  3351. return this.tokenUsageSnapshot
  3352. }
  3353. public get cwd() {
  3354. return this.workspacePath
  3355. }
  3356. /**
  3357. * Broadcast browser session updates to the browser panel (if open)
  3358. */
  3359. private broadcastBrowserSessionUpdate(): void {
  3360. const provider = this.providerRef.deref()
  3361. if (!provider) {
  3362. return
  3363. }
  3364. try {
  3365. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  3366. const panelManager = BrowserSessionPanelManager.getInstance(provider)
  3367. // Get browser session messages
  3368. const browserSessionStartIndex = this.clineMessages.findIndex(
  3369. (m) =>
  3370. m.ask === "browser_action_launch" ||
  3371. (m.say === "browser_session_status" && m.text?.includes("opened")),
  3372. )
  3373. const browserSessionMessages =
  3374. browserSessionStartIndex !== -1 ? this.clineMessages.slice(browserSessionStartIndex) : []
  3375. const isBrowserSessionActive = this.browserSession?.isSessionActive() ?? false
  3376. // Update the panel asynchronously
  3377. panelManager.updateBrowserSession(browserSessionMessages, isBrowserSessionActive).catch((error: Error) => {
  3378. console.error("Failed to broadcast browser session update:", error)
  3379. })
  3380. } catch (error) {
  3381. // Silently fail if panel manager is not available
  3382. console.debug("Browser panel not available for update:", error)
  3383. }
  3384. }
  3385. /**
  3386. * Process any queued messages by dequeuing and submitting them.
  3387. * This ensures that queued user messages are sent when appropriate,
  3388. * preventing them from getting stuck in the queue.
  3389. *
  3390. * @param context - Context string for logging (e.g., the calling tool name)
  3391. */
  3392. public processQueuedMessages(): void {
  3393. try {
  3394. if (!this.messageQueueService.isEmpty()) {
  3395. const queued = this.messageQueueService.dequeueMessage()
  3396. if (queued) {
  3397. setTimeout(() => {
  3398. this.submitUserMessage(queued.text, queued.images).catch((err) =>
  3399. console.error(`[Task] Failed to submit queued message:`, err),
  3400. )
  3401. }, 0)
  3402. }
  3403. }
  3404. } catch (e) {
  3405. console.error(`[Task] Queue processing error:`, e)
  3406. }
  3407. }
  3408. }