Task.ts 138 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973
  1. import * as path from "path"
  2. import * as vscode from "vscode"
  3. import os from "os"
  4. import crypto from "crypto"
  5. import EventEmitter from "events"
  6. import { Anthropic } from "@anthropic-ai/sdk"
  7. import OpenAI from "openai"
  8. import delay from "delay"
  9. import pWaitFor from "p-wait-for"
  10. import { serializeError } from "serialize-error"
  11. import { Package } from "../../shared/package"
  12. import { formatToolInvocation } from "../tools/helpers/toolResultFormatting"
  13. import {
  14. type TaskLike,
  15. type TaskMetadata,
  16. type TaskEvents,
  17. type ProviderSettings,
  18. type TokenUsage,
  19. type ToolUsage,
  20. type ToolName,
  21. type ContextCondense,
  22. type ClineMessage,
  23. type ClineSay,
  24. type ClineAsk,
  25. type ToolProgressStatus,
  26. type HistoryItem,
  27. type CreateTaskOptions,
  28. type ModelInfo,
  29. RooCodeEventName,
  30. TelemetryEventName,
  31. TaskStatus,
  32. TodoItem,
  33. getApiProtocol,
  34. getModelId,
  35. isIdleAsk,
  36. isInteractiveAsk,
  37. isResumableAsk,
  38. isNativeProtocol,
  39. QueuedMessage,
  40. DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  41. DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  42. MAX_CHECKPOINT_TIMEOUT_SECONDS,
  43. MIN_CHECKPOINT_TIMEOUT_SECONDS,
  44. TOOL_PROTOCOL,
  45. } from "@roo-code/types"
  46. import { TelemetryService } from "@roo-code/telemetry"
  47. import { CloudService, BridgeOrchestrator } from "@roo-code/cloud"
  48. import { resolveToolProtocol } from "../../utils/resolveToolProtocol"
  49. // api
  50. import { ApiHandler, ApiHandlerCreateMessageMetadata, buildApiHandler } from "../../api"
  51. import { ApiStream, GroundingSource } from "../../api/transform/stream"
  52. import { maybeRemoveImageBlocks } from "../../api/transform/image-cleaning"
  53. // shared
  54. import { findLastIndex } from "../../shared/array"
  55. import { combineApiRequests } from "../../shared/combineApiRequests"
  56. import { combineCommandSequences } from "../../shared/combineCommandSequences"
  57. import { t } from "../../i18n"
  58. import { ClineApiReqCancelReason, ClineApiReqInfo } from "../../shared/ExtensionMessage"
  59. import { getApiMetrics, hasTokenUsageChanged } from "../../shared/getApiMetrics"
  60. import { ClineAskResponse } from "../../shared/WebviewMessage"
  61. import { defaultModeSlug, getModeBySlug, getGroupName } from "../../shared/modes"
  62. import { DiffStrategy, type ToolUse, type ToolParamName, toolParamNames } from "../../shared/tools"
  63. import { EXPERIMENT_IDS, experiments } from "../../shared/experiments"
  64. import { getModelMaxOutputTokens } from "../../shared/api"
  65. // services
  66. import { UrlContentFetcher } from "../../services/browser/UrlContentFetcher"
  67. import { BrowserSession } from "../../services/browser/BrowserSession"
  68. import { McpHub } from "../../services/mcp/McpHub"
  69. import { McpServerManager } from "../../services/mcp/McpServerManager"
  70. import { RepoPerTaskCheckpointService } from "../../services/checkpoints"
  71. // integrations
  72. import { DiffViewProvider } from "../../integrations/editor/DiffViewProvider"
  73. import { findToolName } from "../../integrations/misc/export-markdown"
  74. import { RooTerminalProcess } from "../../integrations/terminal/types"
  75. import { TerminalRegistry } from "../../integrations/terminal/TerminalRegistry"
  76. // utils
  77. import { calculateApiCostAnthropic, calculateApiCostOpenAI } from "../../shared/cost"
  78. import { getWorkspacePath } from "../../utils/path"
  79. // prompts
  80. import { formatResponse } from "../prompts/responses"
  81. import { SYSTEM_PROMPT } from "../prompts/system"
  82. import { buildNativeToolsArray } from "./build-tools"
  83. // core modules
  84. import { ToolRepetitionDetector } from "../tools/ToolRepetitionDetector"
  85. import { restoreTodoListForTask } from "../tools/UpdateTodoListTool"
  86. import { FileContextTracker } from "../context-tracking/FileContextTracker"
  87. import { RooIgnoreController } from "../ignore/RooIgnoreController"
  88. import { RooProtectedController } from "../protect/RooProtectedController"
  89. import { type AssistantMessageContent, presentAssistantMessage } from "../assistant-message"
  90. import { AssistantMessageParser } from "../assistant-message/AssistantMessageParser"
  91. import { NativeToolCallParser } from "../assistant-message/NativeToolCallParser"
  92. import { manageContext } from "../context-management"
  93. import { ClineProvider } from "../webview/ClineProvider"
  94. import { MultiSearchReplaceDiffStrategy } from "../diff/strategies/multi-search-replace"
  95. import { MultiFileSearchReplaceDiffStrategy } from "../diff/strategies/multi-file-search-replace"
  96. import {
  97. type ApiMessage,
  98. readApiMessages,
  99. saveApiMessages,
  100. readTaskMessages,
  101. saveTaskMessages,
  102. taskMetadata,
  103. } from "../task-persistence"
  104. import { getEnvironmentDetails } from "../environment/getEnvironmentDetails"
  105. import { checkContextWindowExceededError } from "../context/context-management/context-error-handling"
  106. import {
  107. type CheckpointDiffOptions,
  108. type CheckpointRestoreOptions,
  109. getCheckpointService,
  110. checkpointSave,
  111. checkpointRestore,
  112. checkpointDiff,
  113. } from "../checkpoints"
  114. import { processUserContentMentions } from "../mentions/processUserContentMentions"
  115. import { getMessagesSinceLastSummary, summarizeConversation } from "../condense"
  116. import { MessageQueueService } from "../message-queue/MessageQueueService"
  117. import { AutoApprovalHandler, checkAutoApproval } from "../auto-approval"
  118. const MAX_EXPONENTIAL_BACKOFF_SECONDS = 600 // 10 minutes
  119. const DEFAULT_USAGE_COLLECTION_TIMEOUT_MS = 5000 // 5 seconds
  120. const FORCED_CONTEXT_REDUCTION_PERCENT = 75 // Keep 75% of context (remove 25%) on context window errors
  121. const MAX_CONTEXT_WINDOW_RETRIES = 3 // Maximum retries for context window errors
  122. export interface TaskOptions extends CreateTaskOptions {
  123. provider: ClineProvider
  124. apiConfiguration: ProviderSettings
  125. enableDiff?: boolean
  126. enableCheckpoints?: boolean
  127. checkpointTimeout?: number
  128. enableBridge?: boolean
  129. fuzzyMatchThreshold?: number
  130. consecutiveMistakeLimit?: number
  131. task?: string
  132. images?: string[]
  133. historyItem?: HistoryItem
  134. experiments?: Record<string, boolean>
  135. startTask?: boolean
  136. rootTask?: Task
  137. parentTask?: Task
  138. taskNumber?: number
  139. onCreated?: (task: Task) => void
  140. initialTodos?: TodoItem[]
  141. workspacePath?: string
  142. }
  143. export class Task extends EventEmitter<TaskEvents> implements TaskLike {
  144. readonly taskId: string
  145. readonly rootTaskId?: string
  146. readonly parentTaskId?: string
  147. childTaskId?: string
  148. pendingNewTaskToolCallId?: string
  149. readonly instanceId: string
  150. readonly metadata: TaskMetadata
  151. todoList?: TodoItem[]
  152. readonly rootTask: Task | undefined = undefined
  153. readonly parentTask: Task | undefined = undefined
  154. readonly taskNumber: number
  155. readonly workspacePath: string
  156. /**
  157. * The mode associated with this task. Persisted across sessions
  158. * to maintain user context when reopening tasks from history.
  159. *
  160. * ## Lifecycle
  161. *
  162. * ### For new tasks:
  163. * 1. Initially `undefined` during construction
  164. * 2. Asynchronously initialized from provider state via `initializeTaskMode()`
  165. * 3. Falls back to `defaultModeSlug` if provider state is unavailable
  166. *
  167. * ### For history items:
  168. * 1. Immediately set from `historyItem.mode` during construction
  169. * 2. Falls back to `defaultModeSlug` if mode is not stored in history
  170. *
  171. * ## Important
  172. * This property should NOT be accessed directly until `taskModeReady` promise resolves.
  173. * Use `getTaskMode()` for async access or `taskMode` getter for sync access after initialization.
  174. *
  175. * @private
  176. * @see {@link getTaskMode} - For safe async access
  177. * @see {@link taskMode} - For sync access after initialization
  178. * @see {@link waitForModeInitialization} - To ensure initialization is complete
  179. */
  180. private _taskMode: string | undefined
  181. /**
  182. * Promise that resolves when the task mode has been initialized.
  183. * This ensures async mode initialization completes before the task is used.
  184. *
  185. * ## Purpose
  186. * - Prevents race conditions when accessing task mode
  187. * - Ensures provider state is properly loaded before mode-dependent operations
  188. * - Provides a synchronization point for async initialization
  189. *
  190. * ## Resolution timing
  191. * - For history items: Resolves immediately (sync initialization)
  192. * - For new tasks: Resolves after provider state is fetched (async initialization)
  193. *
  194. * @private
  195. * @see {@link waitForModeInitialization} - Public method to await this promise
  196. */
  197. private taskModeReady: Promise<void>
  198. providerRef: WeakRef<ClineProvider>
  199. private readonly globalStoragePath: string
  200. abort: boolean = false
  201. currentRequestAbortController?: AbortController
  202. // TaskStatus
  203. idleAsk?: ClineMessage
  204. resumableAsk?: ClineMessage
  205. interactiveAsk?: ClineMessage
  206. didFinishAbortingStream = false
  207. abandoned = false
  208. abortReason?: ClineApiReqCancelReason
  209. isInitialized = false
  210. isPaused: boolean = false
  211. pausedModeSlug: string = defaultModeSlug
  212. private pauseInterval: NodeJS.Timeout | undefined
  213. // API
  214. apiConfiguration: ProviderSettings
  215. api: ApiHandler
  216. private static lastGlobalApiRequestTime?: number
  217. private autoApprovalHandler: AutoApprovalHandler
  218. /**
  219. * Reset the global API request timestamp. This should only be used for testing.
  220. * @internal
  221. */
  222. static resetGlobalApiRequestTime(): void {
  223. Task.lastGlobalApiRequestTime = undefined
  224. }
  225. toolRepetitionDetector: ToolRepetitionDetector
  226. rooIgnoreController?: RooIgnoreController
  227. rooProtectedController?: RooProtectedController
  228. fileContextTracker: FileContextTracker
  229. urlContentFetcher: UrlContentFetcher
  230. terminalProcess?: RooTerminalProcess
  231. // Computer User
  232. browserSession: BrowserSession
  233. // Editing
  234. diffViewProvider: DiffViewProvider
  235. diffStrategy?: DiffStrategy
  236. diffEnabled: boolean = false
  237. fuzzyMatchThreshold: number
  238. didEditFile: boolean = false
  239. // LLM Messages & Chat Messages
  240. apiConversationHistory: ApiMessage[] = []
  241. clineMessages: ClineMessage[] = []
  242. // Ask
  243. private askResponse?: ClineAskResponse
  244. private askResponseText?: string
  245. private askResponseImages?: string[]
  246. public lastMessageTs?: number
  247. // Tool Use
  248. consecutiveMistakeCount: number = 0
  249. consecutiveMistakeLimit: number
  250. consecutiveMistakeCountForApplyDiff: Map<string, number> = new Map()
  251. toolUsage: ToolUsage = {}
  252. // Checkpoints
  253. enableCheckpoints: boolean
  254. checkpointTimeout: number
  255. checkpointService?: RepoPerTaskCheckpointService
  256. checkpointServiceInitializing = false
  257. // Task Bridge
  258. enableBridge: boolean
  259. // Message Queue Service
  260. public readonly messageQueueService: MessageQueueService
  261. private messageQueueStateChangedHandler: (() => void) | undefined
  262. // Streaming
  263. isWaitingForFirstChunk = false
  264. isStreaming = false
  265. currentStreamingContentIndex = 0
  266. currentStreamingDidCheckpoint = false
  267. assistantMessageContent: AssistantMessageContent[] = []
  268. presentAssistantMessageLocked = false
  269. presentAssistantMessageHasPendingUpdates = false
  270. userMessageContent: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam | Anthropic.ToolResultBlockParam)[] = []
  271. userMessageContentReady = false
  272. didRejectTool = false
  273. didAlreadyUseTool = false
  274. didToolFailInCurrentTurn = false
  275. didCompleteReadingStream = false
  276. assistantMessageParser?: AssistantMessageParser
  277. private providerProfileChangeListener?: (config: { name: string; provider?: string }) => void
  278. // Native tool call streaming state (track which index each tool is at)
  279. private streamingToolCallIndices: Map<string, number> = new Map()
  280. // Cached model info for current streaming session (set at start of each API request)
  281. // This prevents excessive getModel() calls during tool execution
  282. cachedStreamingModel?: { id: string; info: ModelInfo }
  283. // Token Usage Cache
  284. private tokenUsageSnapshot?: TokenUsage
  285. private tokenUsageSnapshotAt?: number
  286. // Cloud Sync Tracking
  287. private cloudSyncedMessageTimestamps: Set<number> = new Set()
  288. constructor({
  289. provider,
  290. apiConfiguration,
  291. enableDiff = false,
  292. enableCheckpoints = true,
  293. checkpointTimeout = DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  294. enableBridge = false,
  295. fuzzyMatchThreshold = 1.0,
  296. consecutiveMistakeLimit = DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  297. task,
  298. images,
  299. historyItem,
  300. experiments: experimentsConfig,
  301. startTask = true,
  302. rootTask,
  303. parentTask,
  304. taskNumber = -1,
  305. onCreated,
  306. initialTodos,
  307. workspacePath,
  308. }: TaskOptions) {
  309. super()
  310. if (startTask && !task && !images && !historyItem) {
  311. throw new Error("Either historyItem or task/images must be provided")
  312. }
  313. if (
  314. !checkpointTimeout ||
  315. checkpointTimeout > MAX_CHECKPOINT_TIMEOUT_SECONDS ||
  316. checkpointTimeout < MIN_CHECKPOINT_TIMEOUT_SECONDS
  317. ) {
  318. throw new Error(
  319. "checkpointTimeout must be between " +
  320. MIN_CHECKPOINT_TIMEOUT_SECONDS +
  321. " and " +
  322. MAX_CHECKPOINT_TIMEOUT_SECONDS +
  323. " seconds",
  324. )
  325. }
  326. this.taskId = historyItem ? historyItem.id : crypto.randomUUID()
  327. this.rootTaskId = historyItem ? historyItem.rootTaskId : rootTask?.taskId
  328. this.parentTaskId = historyItem ? historyItem.parentTaskId : parentTask?.taskId
  329. this.childTaskId = undefined
  330. this.metadata = {
  331. task: historyItem ? historyItem.task : task,
  332. images: historyItem ? [] : images,
  333. }
  334. // Normal use-case is usually retry similar history task with new workspace.
  335. this.workspacePath = parentTask
  336. ? parentTask.workspacePath
  337. : (workspacePath ?? getWorkspacePath(path.join(os.homedir(), "Desktop")))
  338. this.instanceId = crypto.randomUUID().slice(0, 8)
  339. this.taskNumber = -1
  340. this.rooIgnoreController = new RooIgnoreController(this.cwd)
  341. this.rooProtectedController = new RooProtectedController(this.cwd)
  342. this.fileContextTracker = new FileContextTracker(provider, this.taskId)
  343. this.rooIgnoreController.initialize().catch((error) => {
  344. console.error("Failed to initialize RooIgnoreController:", error)
  345. })
  346. this.apiConfiguration = apiConfiguration
  347. this.api = buildApiHandler(apiConfiguration)
  348. this.autoApprovalHandler = new AutoApprovalHandler()
  349. this.urlContentFetcher = new UrlContentFetcher(provider.context)
  350. this.browserSession = new BrowserSession(provider.context, (isActive: boolean) => {
  351. // Add a message to indicate browser session status change
  352. this.say("browser_session_status", isActive ? "Browser session opened" : "Browser session closed")
  353. // Broadcast to browser panel
  354. this.broadcastBrowserSessionUpdate()
  355. // When a browser session becomes active, automatically open/reveal the Browser Session tab
  356. if (isActive) {
  357. try {
  358. // Lazy-load to avoid circular imports at module load time
  359. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  360. const providerRef = this.providerRef.deref()
  361. if (providerRef) {
  362. BrowserSessionPanelManager.getInstance(providerRef)
  363. .show()
  364. .catch(() => {})
  365. }
  366. } catch (err) {
  367. console.error("[Task] Failed to auto-open Browser Session panel:", err)
  368. }
  369. }
  370. })
  371. this.diffEnabled = enableDiff
  372. this.fuzzyMatchThreshold = fuzzyMatchThreshold
  373. this.consecutiveMistakeLimit = consecutiveMistakeLimit ?? DEFAULT_CONSECUTIVE_MISTAKE_LIMIT
  374. this.providerRef = new WeakRef(provider)
  375. this.globalStoragePath = provider.context.globalStorageUri.fsPath
  376. this.diffViewProvider = new DiffViewProvider(this.cwd, this)
  377. this.enableCheckpoints = enableCheckpoints
  378. this.checkpointTimeout = checkpointTimeout
  379. this.enableBridge = enableBridge
  380. this.parentTask = parentTask
  381. this.taskNumber = taskNumber
  382. // Store the task's mode when it's created.
  383. // For history items, use the stored mode; for new tasks, we'll set it
  384. // after getting state.
  385. if (historyItem) {
  386. this._taskMode = historyItem.mode || defaultModeSlug
  387. this.taskModeReady = Promise.resolve()
  388. TelemetryService.instance.captureTaskRestarted(this.taskId)
  389. } else {
  390. // For new tasks, don't set the mode yet - wait for async initialization.
  391. this._taskMode = undefined
  392. this.taskModeReady = this.initializeTaskMode(provider)
  393. TelemetryService.instance.captureTaskCreated(this.taskId)
  394. }
  395. // Initialize the assistant message parser only for XML protocol.
  396. // For native protocol, tool calls come as tool_call chunks, not XML.
  397. // experiments is always provided via TaskOptions (defaults to experimentDefault in provider)
  398. const modelInfo = this.api.getModel().info
  399. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  400. this.assistantMessageParser = toolProtocol !== "native" ? new AssistantMessageParser() : undefined
  401. this.messageQueueService = new MessageQueueService()
  402. this.messageQueueStateChangedHandler = () => {
  403. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  404. this.providerRef.deref()?.postStateToWebview()
  405. }
  406. this.messageQueueService.on("stateChanged", this.messageQueueStateChangedHandler)
  407. // Listen for provider profile changes to update parser state
  408. this.setupProviderProfileChangeListener(provider)
  409. // Only set up diff strategy if diff is enabled.
  410. if (this.diffEnabled) {
  411. // Default to old strategy, will be updated if experiment is enabled.
  412. this.diffStrategy = new MultiSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  413. // Check experiment asynchronously and update strategy if needed.
  414. provider.getState().then((state) => {
  415. const isMultiFileApplyDiffEnabled = experiments.isEnabled(
  416. state.experiments ?? {},
  417. EXPERIMENT_IDS.MULTI_FILE_APPLY_DIFF,
  418. )
  419. if (isMultiFileApplyDiffEnabled) {
  420. this.diffStrategy = new MultiFileSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  421. }
  422. })
  423. }
  424. this.toolRepetitionDetector = new ToolRepetitionDetector(this.consecutiveMistakeLimit)
  425. // Initialize todo list if provided
  426. if (initialTodos && initialTodos.length > 0) {
  427. this.todoList = initialTodos
  428. }
  429. onCreated?.(this)
  430. if (startTask) {
  431. if (task || images) {
  432. this.startTask(task, images)
  433. } else if (historyItem) {
  434. this.resumeTaskFromHistory()
  435. } else {
  436. throw new Error("Either historyItem or task/images must be provided")
  437. }
  438. }
  439. }
  440. /**
  441. * Initialize the task mode from the provider state.
  442. * This method handles async initialization with proper error handling.
  443. *
  444. * ## Flow
  445. * 1. Attempts to fetch the current mode from provider state
  446. * 2. Sets `_taskMode` to the fetched mode or `defaultModeSlug` if unavailable
  447. * 3. Handles errors gracefully by falling back to default mode
  448. * 4. Logs any initialization errors for debugging
  449. *
  450. * ## Error handling
  451. * - Network failures when fetching provider state
  452. * - Provider not yet initialized
  453. * - Invalid state structure
  454. *
  455. * All errors result in fallback to `defaultModeSlug` to ensure task can proceed.
  456. *
  457. * @private
  458. * @param provider - The ClineProvider instance to fetch state from
  459. * @returns Promise that resolves when initialization is complete
  460. */
  461. private async initializeTaskMode(provider: ClineProvider): Promise<void> {
  462. try {
  463. const state = await provider.getState()
  464. this._taskMode = state?.mode || defaultModeSlug
  465. } catch (error) {
  466. // If there's an error getting state, use the default mode
  467. this._taskMode = defaultModeSlug
  468. // Use the provider's log method for better error visibility
  469. const errorMessage = `Failed to initialize task mode: ${error instanceof Error ? error.message : String(error)}`
  470. provider.log(errorMessage)
  471. }
  472. }
  473. /**
  474. * Sets up a listener for provider profile changes to automatically update the parser state.
  475. * This ensures the XML/native protocol parser stays synchronized with the current model.
  476. *
  477. * @private
  478. * @param provider - The ClineProvider instance to listen to
  479. */
  480. private setupProviderProfileChangeListener(provider: ClineProvider): void {
  481. // Only set up listener if provider has the on method (may not exist in test mocks)
  482. if (typeof provider.on !== "function") {
  483. return
  484. }
  485. this.providerProfileChangeListener = async () => {
  486. try {
  487. const newState = await provider.getState()
  488. if (newState?.apiConfiguration) {
  489. this.updateApiConfiguration(newState.apiConfiguration)
  490. }
  491. } catch (error) {
  492. console.error(
  493. `[Task#${this.taskId}.${this.instanceId}] Failed to update API configuration on profile change:`,
  494. error,
  495. )
  496. }
  497. }
  498. provider.on(RooCodeEventName.ProviderProfileChanged, this.providerProfileChangeListener)
  499. }
  500. /**
  501. * Wait for the task mode to be initialized before proceeding.
  502. * This method ensures that any operations depending on the task mode
  503. * will have access to the correct mode value.
  504. *
  505. * ## When to use
  506. * - Before accessing mode-specific configurations
  507. * - When switching between tasks with different modes
  508. * - Before operations that depend on mode-based permissions
  509. *
  510. * ## Example usage
  511. * ```typescript
  512. * // Wait for mode initialization before mode-dependent operations
  513. * await task.waitForModeInitialization();
  514. * const mode = task.taskMode; // Now safe to access synchronously
  515. *
  516. * // Or use with getTaskMode() for a one-liner
  517. * const mode = await task.getTaskMode(); // Internally waits for initialization
  518. * ```
  519. *
  520. * @returns Promise that resolves when the task mode is initialized
  521. * @public
  522. */
  523. public async waitForModeInitialization(): Promise<void> {
  524. return this.taskModeReady
  525. }
  526. /**
  527. * Get the task mode asynchronously, ensuring it's properly initialized.
  528. * This is the recommended way to access the task mode as it guarantees
  529. * the mode is available before returning.
  530. *
  531. * ## Async behavior
  532. * - Internally waits for `taskModeReady` promise to resolve
  533. * - Returns the initialized mode or `defaultModeSlug` as fallback
  534. * - Safe to call multiple times - subsequent calls return immediately if already initialized
  535. *
  536. * ## Example usage
  537. * ```typescript
  538. * // Safe async access
  539. * const mode = await task.getTaskMode();
  540. * console.log(`Task is running in ${mode} mode`);
  541. *
  542. * // Use in conditional logic
  543. * if (await task.getTaskMode() === 'architect') {
  544. * // Perform architect-specific operations
  545. * }
  546. * ```
  547. *
  548. * @returns Promise resolving to the task mode string
  549. * @public
  550. */
  551. public async getTaskMode(): Promise<string> {
  552. await this.taskModeReady
  553. return this._taskMode || defaultModeSlug
  554. }
  555. /**
  556. * Get the task mode synchronously. This should only be used when you're certain
  557. * that the mode has already been initialized (e.g., after waitForModeInitialization).
  558. *
  559. * ## When to use
  560. * - In synchronous contexts where async/await is not available
  561. * - After explicitly waiting for initialization via `waitForModeInitialization()`
  562. * - In event handlers or callbacks where mode is guaranteed to be initialized
  563. *
  564. * ## Example usage
  565. * ```typescript
  566. * // After ensuring initialization
  567. * await task.waitForModeInitialization();
  568. * const mode = task.taskMode; // Safe synchronous access
  569. *
  570. * // In an event handler after task is started
  571. * task.on('taskStarted', () => {
  572. * console.log(`Task started in ${task.taskMode} mode`); // Safe here
  573. * });
  574. * ```
  575. *
  576. * @throws {Error} If the mode hasn't been initialized yet
  577. * @returns The task mode string
  578. * @public
  579. */
  580. public get taskMode(): string {
  581. if (this._taskMode === undefined) {
  582. throw new Error("Task mode accessed before initialization. Use getTaskMode() or wait for taskModeReady.")
  583. }
  584. return this._taskMode
  585. }
  586. static create(options: TaskOptions): [Task, Promise<void>] {
  587. const instance = new Task({ ...options, startTask: false })
  588. const { images, task, historyItem } = options
  589. let promise
  590. if (images || task) {
  591. promise = instance.startTask(task, images)
  592. } else if (historyItem) {
  593. promise = instance.resumeTaskFromHistory()
  594. } else {
  595. throw new Error("Either historyItem or task/images must be provided")
  596. }
  597. return [instance, promise]
  598. }
  599. // API Messages
  600. private async getSavedApiConversationHistory(): Promise<ApiMessage[]> {
  601. return readApiMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  602. }
  603. private async addToApiConversationHistory(message: Anthropic.MessageParam, reasoning?: string) {
  604. // Capture the encrypted_content / thought signatures from the provider (e.g., OpenAI Responses API, Google GenAI) if present.
  605. // We only persist data reported by the current response body.
  606. const handler = this.api as ApiHandler & {
  607. getResponseId?: () => string | undefined
  608. getEncryptedContent?: () => { encrypted_content: string; id?: string } | undefined
  609. getThoughtSignature?: () => string | undefined
  610. getSummary?: () => any[] | undefined
  611. getReasoningDetails?: () => any[] | undefined
  612. }
  613. if (message.role === "assistant") {
  614. const responseId = handler.getResponseId?.()
  615. const reasoningData = handler.getEncryptedContent?.()
  616. const thoughtSignature = handler.getThoughtSignature?.()
  617. const reasoningSummary = handler.getSummary?.()
  618. const reasoningDetails = handler.getReasoningDetails?.()
  619. // Start from the original assistant message
  620. const messageWithTs: any = {
  621. ...message,
  622. ...(responseId ? { id: responseId } : {}),
  623. ts: Date.now(),
  624. }
  625. // Store reasoning_details array if present (for models like Gemini 3)
  626. if (reasoningDetails) {
  627. messageWithTs.reasoning_details = reasoningDetails
  628. }
  629. // Store reasoning: plain text (most providers) or encrypted (OpenAI Native)
  630. // Skip if reasoning_details already contains the reasoning (to avoid duplication)
  631. if (reasoning && !reasoningDetails) {
  632. const reasoningBlock = {
  633. type: "reasoning",
  634. text: reasoning,
  635. summary: reasoningSummary ?? ([] as any[]),
  636. }
  637. if (typeof messageWithTs.content === "string") {
  638. messageWithTs.content = [
  639. reasoningBlock,
  640. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  641. ]
  642. } else if (Array.isArray(messageWithTs.content)) {
  643. messageWithTs.content = [reasoningBlock, ...messageWithTs.content]
  644. } else if (!messageWithTs.content) {
  645. messageWithTs.content = [reasoningBlock]
  646. }
  647. } else if (reasoningData?.encrypted_content) {
  648. // OpenAI Native encrypted reasoning
  649. const reasoningBlock = {
  650. type: "reasoning",
  651. summary: [] as any[],
  652. encrypted_content: reasoningData.encrypted_content,
  653. ...(reasoningData.id ? { id: reasoningData.id } : {}),
  654. }
  655. if (typeof messageWithTs.content === "string") {
  656. messageWithTs.content = [
  657. reasoningBlock,
  658. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  659. ]
  660. } else if (Array.isArray(messageWithTs.content)) {
  661. messageWithTs.content = [reasoningBlock, ...messageWithTs.content]
  662. } else if (!messageWithTs.content) {
  663. messageWithTs.content = [reasoningBlock]
  664. }
  665. }
  666. // If we have a thought signature, append it as a dedicated content block
  667. // so it can be round-tripped in api_history.json and re-sent on subsequent calls.
  668. if (thoughtSignature) {
  669. const thoughtSignatureBlock = {
  670. type: "thoughtSignature",
  671. thoughtSignature,
  672. }
  673. if (typeof messageWithTs.content === "string") {
  674. messageWithTs.content = [
  675. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  676. thoughtSignatureBlock,
  677. ]
  678. } else if (Array.isArray(messageWithTs.content)) {
  679. messageWithTs.content = [...messageWithTs.content, thoughtSignatureBlock]
  680. } else if (!messageWithTs.content) {
  681. messageWithTs.content = [thoughtSignatureBlock]
  682. }
  683. }
  684. this.apiConversationHistory.push(messageWithTs)
  685. } else {
  686. const messageWithTs = { ...message, ts: Date.now() }
  687. this.apiConversationHistory.push(messageWithTs)
  688. }
  689. await this.saveApiConversationHistory()
  690. }
  691. async overwriteApiConversationHistory(newHistory: ApiMessage[]) {
  692. this.apiConversationHistory = newHistory
  693. await this.saveApiConversationHistory()
  694. }
  695. private async saveApiConversationHistory() {
  696. try {
  697. await saveApiMessages({
  698. messages: this.apiConversationHistory,
  699. taskId: this.taskId,
  700. globalStoragePath: this.globalStoragePath,
  701. })
  702. } catch (error) {
  703. // In the off chance this fails, we don't want to stop the task.
  704. console.error("Failed to save API conversation history:", error)
  705. }
  706. }
  707. // Cline Messages
  708. private async getSavedClineMessages(): Promise<ClineMessage[]> {
  709. return readTaskMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  710. }
  711. private async addToClineMessages(message: ClineMessage) {
  712. this.clineMessages.push(message)
  713. const provider = this.providerRef.deref()
  714. await provider?.postStateToWebview()
  715. this.emit(RooCodeEventName.Message, { action: "created", message })
  716. await this.saveClineMessages()
  717. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  718. if (shouldCaptureMessage) {
  719. CloudService.instance.captureEvent({
  720. event: TelemetryEventName.TASK_MESSAGE,
  721. properties: { taskId: this.taskId, message },
  722. })
  723. // Track that this message has been synced to cloud
  724. this.cloudSyncedMessageTimestamps.add(message.ts)
  725. }
  726. }
  727. public async overwriteClineMessages(newMessages: ClineMessage[]) {
  728. this.clineMessages = newMessages
  729. restoreTodoListForTask(this)
  730. await this.saveClineMessages()
  731. // When overwriting messages (e.g., during task resume), repopulate the cloud sync tracking Set
  732. // with timestamps from all non-partial messages to prevent re-syncing previously synced messages
  733. this.cloudSyncedMessageTimestamps.clear()
  734. for (const msg of newMessages) {
  735. if (msg.partial !== true) {
  736. this.cloudSyncedMessageTimestamps.add(msg.ts)
  737. }
  738. }
  739. }
  740. private async updateClineMessage(message: ClineMessage) {
  741. const provider = this.providerRef.deref()
  742. await provider?.postMessageToWebview({ type: "messageUpdated", clineMessage: message })
  743. this.emit(RooCodeEventName.Message, { action: "updated", message })
  744. // Check if we should sync to cloud and haven't already synced this message
  745. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  746. const hasNotBeenSynced = !this.cloudSyncedMessageTimestamps.has(message.ts)
  747. if (shouldCaptureMessage && hasNotBeenSynced) {
  748. CloudService.instance.captureEvent({
  749. event: TelemetryEventName.TASK_MESSAGE,
  750. properties: { taskId: this.taskId, message },
  751. })
  752. // Track that this message has been synced to cloud
  753. this.cloudSyncedMessageTimestamps.add(message.ts)
  754. }
  755. }
  756. private async saveClineMessages() {
  757. try {
  758. await saveTaskMessages({
  759. messages: this.clineMessages,
  760. taskId: this.taskId,
  761. globalStoragePath: this.globalStoragePath,
  762. })
  763. const { historyItem, tokenUsage } = await taskMetadata({
  764. taskId: this.taskId,
  765. rootTaskId: this.rootTaskId,
  766. parentTaskId: this.parentTaskId,
  767. taskNumber: this.taskNumber,
  768. messages: this.clineMessages,
  769. globalStoragePath: this.globalStoragePath,
  770. workspace: this.cwd,
  771. mode: this._taskMode || defaultModeSlug, // Use the task's own mode, not the current provider mode.
  772. })
  773. if (hasTokenUsageChanged(tokenUsage, this.tokenUsageSnapshot)) {
  774. this.emit(RooCodeEventName.TaskTokenUsageUpdated, this.taskId, tokenUsage)
  775. this.tokenUsageSnapshot = undefined
  776. this.tokenUsageSnapshotAt = undefined
  777. }
  778. await this.providerRef.deref()?.updateTaskHistory(historyItem)
  779. } catch (error) {
  780. console.error("Failed to save Roo messages:", error)
  781. }
  782. }
  783. private findMessageByTimestamp(ts: number): ClineMessage | undefined {
  784. for (let i = this.clineMessages.length - 1; i >= 0; i--) {
  785. if (this.clineMessages[i].ts === ts) {
  786. return this.clineMessages[i]
  787. }
  788. }
  789. return undefined
  790. }
  791. // Note that `partial` has three valid states true (partial message),
  792. // false (completion of partial message), undefined (individual complete
  793. // message).
  794. async ask(
  795. type: ClineAsk,
  796. text?: string,
  797. partial?: boolean,
  798. progressStatus?: ToolProgressStatus,
  799. isProtected?: boolean,
  800. ): Promise<{ response: ClineAskResponse; text?: string; images?: string[] }> {
  801. // If this Cline instance was aborted by the provider, then the only
  802. // thing keeping us alive is a promise still running in the background,
  803. // in which case we don't want to send its result to the webview as it
  804. // is attached to a new instance of Cline now. So we can safely ignore
  805. // the result of any active promises, and this class will be
  806. // deallocated. (Although we set Cline = undefined in provider, that
  807. // simply removes the reference to this instance, but the instance is
  808. // still alive until this promise resolves or rejects.)
  809. if (this.abort) {
  810. throw new Error(`[RooCode#ask] task ${this.taskId}.${this.instanceId} aborted`)
  811. }
  812. let askTs: number
  813. if (partial !== undefined) {
  814. const lastMessage = this.clineMessages.at(-1)
  815. const isUpdatingPreviousPartial =
  816. lastMessage && lastMessage.partial && lastMessage.type === "ask" && lastMessage.ask === type
  817. if (partial) {
  818. if (isUpdatingPreviousPartial) {
  819. // Existing partial message, so update it.
  820. lastMessage.text = text
  821. lastMessage.partial = partial
  822. lastMessage.progressStatus = progressStatus
  823. lastMessage.isProtected = isProtected
  824. // TODO: Be more efficient about saving and posting only new
  825. // data or one whole message at a time so ignore partial for
  826. // saves, and only post parts of partial message instead of
  827. // whole array in new listener.
  828. this.updateClineMessage(lastMessage)
  829. // console.log("Task#ask: current ask promise was ignored (#1)")
  830. throw new Error("Current ask promise was ignored (#1)")
  831. } else {
  832. // This is a new partial message, so add it with partial
  833. // state.
  834. askTs = Date.now()
  835. this.lastMessageTs = askTs
  836. console.log(`Task#ask: new partial ask -> ${type} @ ${askTs}`)
  837. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, partial, isProtected })
  838. // console.log("Task#ask: current ask promise was ignored (#2)")
  839. throw new Error("Current ask promise was ignored (#2)")
  840. }
  841. } else {
  842. if (isUpdatingPreviousPartial) {
  843. // This is the complete version of a previously partial
  844. // message, so replace the partial with the complete version.
  845. this.askResponse = undefined
  846. this.askResponseText = undefined
  847. this.askResponseImages = undefined
  848. // Bug for the history books:
  849. // In the webview we use the ts as the chatrow key for the
  850. // virtuoso list. Since we would update this ts right at the
  851. // end of streaming, it would cause the view to flicker. The
  852. // key prop has to be stable otherwise react has trouble
  853. // reconciling items between renders, causing unmounting and
  854. // remounting of components (flickering).
  855. // The lesson here is if you see flickering when rendering
  856. // lists, it's likely because the key prop is not stable.
  857. // So in this case we must make sure that the message ts is
  858. // never altered after first setting it.
  859. askTs = lastMessage.ts
  860. console.log(`Task#ask: updating previous partial ask -> ${type} @ ${askTs}`)
  861. this.lastMessageTs = askTs
  862. lastMessage.text = text
  863. lastMessage.partial = false
  864. lastMessage.progressStatus = progressStatus
  865. lastMessage.isProtected = isProtected
  866. await this.saveClineMessages()
  867. this.updateClineMessage(lastMessage)
  868. } else {
  869. // This is a new and complete message, so add it like normal.
  870. this.askResponse = undefined
  871. this.askResponseText = undefined
  872. this.askResponseImages = undefined
  873. askTs = Date.now()
  874. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  875. this.lastMessageTs = askTs
  876. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  877. }
  878. }
  879. } else {
  880. // This is a new non-partial message, so add it like normal.
  881. this.askResponse = undefined
  882. this.askResponseText = undefined
  883. this.askResponseImages = undefined
  884. askTs = Date.now()
  885. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  886. this.lastMessageTs = askTs
  887. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  888. }
  889. let timeouts: NodeJS.Timeout[] = []
  890. // Automatically approve if the ask according to the user's settings.
  891. const provider = this.providerRef.deref()
  892. const state = provider ? await provider.getState() : undefined
  893. const approval = await checkAutoApproval({ state, ask: type, text, isProtected })
  894. if (approval.decision === "approve") {
  895. this.approveAsk()
  896. } else if (approval.decision === "deny") {
  897. this.denyAsk()
  898. } else if (approval.decision === "timeout") {
  899. timeouts.push(
  900. setTimeout(() => {
  901. const { askResponse, text, images } = approval.fn()
  902. this.handleWebviewAskResponse(askResponse, text, images)
  903. }, approval.timeout),
  904. )
  905. }
  906. // The state is mutable if the message is complete and the task will
  907. // block (via the `pWaitFor`).
  908. const isBlocking = !(this.askResponse !== undefined || this.lastMessageTs !== askTs)
  909. const isMessageQueued = !this.messageQueueService.isEmpty()
  910. const isStatusMutable = !partial && isBlocking && !isMessageQueued && approval.decision === "ask"
  911. if (isBlocking) {
  912. console.log(`Task#ask will block -> type: ${type}`)
  913. }
  914. if (isStatusMutable) {
  915. console.log(`Task#ask: status is mutable -> type: ${type}`)
  916. const statusMutationTimeout = 2_000
  917. if (isInteractiveAsk(type)) {
  918. timeouts.push(
  919. setTimeout(() => {
  920. const message = this.findMessageByTimestamp(askTs)
  921. if (message) {
  922. this.interactiveAsk = message
  923. this.emit(RooCodeEventName.TaskInteractive, this.taskId)
  924. provider?.postMessageToWebview({ type: "interactionRequired" })
  925. }
  926. }, statusMutationTimeout),
  927. )
  928. } else if (isResumableAsk(type)) {
  929. timeouts.push(
  930. setTimeout(() => {
  931. const message = this.findMessageByTimestamp(askTs)
  932. if (message) {
  933. this.resumableAsk = message
  934. this.emit(RooCodeEventName.TaskResumable, this.taskId)
  935. }
  936. }, statusMutationTimeout),
  937. )
  938. } else if (isIdleAsk(type)) {
  939. timeouts.push(
  940. setTimeout(() => {
  941. const message = this.findMessageByTimestamp(askTs)
  942. if (message) {
  943. this.idleAsk = message
  944. this.emit(RooCodeEventName.TaskIdle, this.taskId)
  945. }
  946. }, statusMutationTimeout),
  947. )
  948. }
  949. } else if (isMessageQueued) {
  950. console.log(`Task#ask: will process message queue -> type: ${type}`)
  951. const message = this.messageQueueService.dequeueMessage()
  952. if (message) {
  953. // Check if this is a tool approval ask that needs to be handled.
  954. if (
  955. type === "tool" ||
  956. type === "command" ||
  957. type === "browser_action_launch" ||
  958. type === "use_mcp_server"
  959. ) {
  960. // For tool approvals, we need to approve first, then send
  961. // the message if there's text/images.
  962. this.handleWebviewAskResponse("yesButtonClicked", message.text, message.images)
  963. } else {
  964. // For other ask types (like followup or command_output), fulfill the ask
  965. // directly.
  966. this.handleWebviewAskResponse("messageResponse", message.text, message.images)
  967. }
  968. }
  969. }
  970. // Wait for askResponse to be set
  971. await pWaitFor(() => this.askResponse !== undefined || this.lastMessageTs !== askTs, { interval: 100 })
  972. if (this.lastMessageTs !== askTs) {
  973. // Could happen if we send multiple asks in a row i.e. with
  974. // command_output. It's important that when we know an ask could
  975. // fail, it is handled gracefully.
  976. console.log("Task#ask: current ask promise was ignored")
  977. throw new Error("Current ask promise was ignored")
  978. }
  979. const result = { response: this.askResponse!, text: this.askResponseText, images: this.askResponseImages }
  980. this.askResponse = undefined
  981. this.askResponseText = undefined
  982. this.askResponseImages = undefined
  983. // Cancel the timeouts if they are still running.
  984. timeouts.forEach((timeout) => clearTimeout(timeout))
  985. // Switch back to an active state.
  986. if (this.idleAsk || this.resumableAsk || this.interactiveAsk) {
  987. this.idleAsk = undefined
  988. this.resumableAsk = undefined
  989. this.interactiveAsk = undefined
  990. this.emit(RooCodeEventName.TaskActive, this.taskId)
  991. }
  992. this.emit(RooCodeEventName.TaskAskResponded)
  993. return result
  994. }
  995. handleWebviewAskResponse(askResponse: ClineAskResponse, text?: string, images?: string[]) {
  996. this.askResponse = askResponse
  997. this.askResponseText = text
  998. this.askResponseImages = images
  999. // Create a checkpoint whenever the user sends a message.
  1000. // Use allowEmpty=true to ensure a checkpoint is recorded even if there are no file changes.
  1001. // Suppress the checkpoint_saved chat row for this particular checkpoint to keep the timeline clean.
  1002. if (askResponse === "messageResponse") {
  1003. void this.checkpointSave(false, true)
  1004. }
  1005. // Mark the last follow-up question as answered
  1006. if (askResponse === "messageResponse" || askResponse === "yesButtonClicked") {
  1007. // Find the last unanswered follow-up message using findLastIndex
  1008. const lastFollowUpIndex = findLastIndex(
  1009. this.clineMessages,
  1010. (msg) => msg.type === "ask" && msg.ask === "followup" && !msg.isAnswered,
  1011. )
  1012. if (lastFollowUpIndex !== -1) {
  1013. // Mark this follow-up as answered
  1014. this.clineMessages[lastFollowUpIndex].isAnswered = true
  1015. // Save the updated messages
  1016. this.saveClineMessages().catch((error) => {
  1017. console.error("Failed to save answered follow-up state:", error)
  1018. })
  1019. }
  1020. }
  1021. }
  1022. public approveAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  1023. this.handleWebviewAskResponse("yesButtonClicked", text, images)
  1024. }
  1025. public denyAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  1026. this.handleWebviewAskResponse("noButtonClicked", text, images)
  1027. }
  1028. /**
  1029. * Updates the API configuration and reinitializes the parser based on the new tool protocol.
  1030. * This should be called when switching between models/profiles with different tool protocols
  1031. * to prevent the parser from being left in an inconsistent state.
  1032. *
  1033. * @param newApiConfiguration - The new API configuration to use
  1034. */
  1035. public updateApiConfiguration(newApiConfiguration: ProviderSettings): void {
  1036. // Update the configuration and rebuild the API handler
  1037. this.apiConfiguration = newApiConfiguration
  1038. this.api = buildApiHandler(newApiConfiguration)
  1039. // Determine what the tool protocol should be
  1040. const modelInfo = this.api.getModel().info
  1041. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1042. const shouldUseXmlParser = protocol === "xml"
  1043. // Ensure parser state matches protocol requirement
  1044. const parserStateCorrect =
  1045. (shouldUseXmlParser && this.assistantMessageParser) || (!shouldUseXmlParser && !this.assistantMessageParser)
  1046. if (parserStateCorrect) {
  1047. return
  1048. }
  1049. // Fix parser state
  1050. if (shouldUseXmlParser && !this.assistantMessageParser) {
  1051. this.assistantMessageParser = new AssistantMessageParser()
  1052. } else if (!shouldUseXmlParser && this.assistantMessageParser) {
  1053. this.assistantMessageParser.reset()
  1054. this.assistantMessageParser = undefined
  1055. }
  1056. }
  1057. public async submitUserMessage(
  1058. text: string,
  1059. images?: string[],
  1060. mode?: string,
  1061. providerProfile?: string,
  1062. ): Promise<void> {
  1063. try {
  1064. text = (text ?? "").trim()
  1065. images = images ?? []
  1066. if (text.length === 0 && images.length === 0) {
  1067. return
  1068. }
  1069. const provider = this.providerRef.deref()
  1070. if (provider) {
  1071. if (mode) {
  1072. await provider.setMode(mode)
  1073. }
  1074. if (providerProfile) {
  1075. await provider.setProviderProfile(providerProfile)
  1076. // Update this task's API configuration to match the new profile
  1077. // This ensures the parser state is synchronized with the selected model
  1078. const newState = await provider.getState()
  1079. if (newState?.apiConfiguration) {
  1080. this.updateApiConfiguration(newState.apiConfiguration)
  1081. }
  1082. }
  1083. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  1084. provider.postMessageToWebview({ type: "invoke", invoke: "sendMessage", text, images })
  1085. } else {
  1086. console.error("[Task#submitUserMessage] Provider reference lost")
  1087. }
  1088. } catch (error) {
  1089. console.error("[Task#submitUserMessage] Failed to submit user message:", error)
  1090. }
  1091. }
  1092. async handleTerminalOperation(terminalOperation: "continue" | "abort") {
  1093. if (terminalOperation === "continue") {
  1094. this.terminalProcess?.continue()
  1095. } else if (terminalOperation === "abort") {
  1096. this.terminalProcess?.abort()
  1097. }
  1098. }
  1099. public async condenseContext(): Promise<void> {
  1100. const systemPrompt = await this.getSystemPrompt()
  1101. // Get condensing configuration
  1102. const state = await this.providerRef.deref()?.getState()
  1103. // These properties may not exist in the state type yet, but are used for condensing configuration
  1104. const customCondensingPrompt = state?.customCondensingPrompt
  1105. const condensingApiConfigId = state?.condensingApiConfigId
  1106. const listApiConfigMeta = state?.listApiConfigMeta
  1107. // Determine API handler to use
  1108. let condensingApiHandler: ApiHandler | undefined
  1109. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  1110. // Find matching config by ID
  1111. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  1112. if (matchingConfig) {
  1113. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  1114. id: condensingApiConfigId,
  1115. })
  1116. // Ensure profile and apiProvider exist before trying to build handler
  1117. if (profile && profile.apiProvider) {
  1118. condensingApiHandler = buildApiHandler(profile)
  1119. }
  1120. }
  1121. }
  1122. const { contextTokens: prevContextTokens } = this.getTokenUsage()
  1123. // Determine if we're using native tool protocol for proper message handling
  1124. const modelInfo = this.api.getModel().info
  1125. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1126. const useNativeTools = isNativeProtocol(protocol)
  1127. const {
  1128. messages,
  1129. summary,
  1130. cost,
  1131. newContextTokens = 0,
  1132. error,
  1133. } = await summarizeConversation(
  1134. this.apiConversationHistory,
  1135. this.api, // Main API handler (fallback)
  1136. systemPrompt, // Default summarization prompt (fallback)
  1137. this.taskId,
  1138. prevContextTokens,
  1139. false, // manual trigger
  1140. customCondensingPrompt, // User's custom prompt
  1141. condensingApiHandler, // Specific handler for condensing
  1142. useNativeTools, // Pass native tools flag for proper message handling
  1143. )
  1144. if (error) {
  1145. this.say(
  1146. "condense_context_error",
  1147. error,
  1148. undefined /* images */,
  1149. false /* partial */,
  1150. undefined /* checkpoint */,
  1151. undefined /* progressStatus */,
  1152. { isNonInteractive: true } /* options */,
  1153. )
  1154. return
  1155. }
  1156. await this.overwriteApiConversationHistory(messages)
  1157. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  1158. await this.say(
  1159. "condense_context",
  1160. undefined /* text */,
  1161. undefined /* images */,
  1162. false /* partial */,
  1163. undefined /* checkpoint */,
  1164. undefined /* progressStatus */,
  1165. { isNonInteractive: true } /* options */,
  1166. contextCondense,
  1167. )
  1168. // Process any queued messages after condensing completes
  1169. this.processQueuedMessages()
  1170. }
  1171. async say(
  1172. type: ClineSay,
  1173. text?: string,
  1174. images?: string[],
  1175. partial?: boolean,
  1176. checkpoint?: Record<string, unknown>,
  1177. progressStatus?: ToolProgressStatus,
  1178. options: {
  1179. isNonInteractive?: boolean
  1180. } = {},
  1181. contextCondense?: ContextCondense,
  1182. ): Promise<undefined> {
  1183. if (this.abort) {
  1184. throw new Error(`[RooCode#say] task ${this.taskId}.${this.instanceId} aborted`)
  1185. }
  1186. if (partial !== undefined) {
  1187. const lastMessage = this.clineMessages.at(-1)
  1188. const isUpdatingPreviousPartial =
  1189. lastMessage && lastMessage.partial && lastMessage.type === "say" && lastMessage.say === type
  1190. if (partial) {
  1191. if (isUpdatingPreviousPartial) {
  1192. // Existing partial message, so update it.
  1193. lastMessage.text = text
  1194. lastMessage.images = images
  1195. lastMessage.partial = partial
  1196. lastMessage.progressStatus = progressStatus
  1197. this.updateClineMessage(lastMessage)
  1198. } else {
  1199. // This is a new partial message, so add it with partial state.
  1200. const sayTs = Date.now()
  1201. if (!options.isNonInteractive) {
  1202. this.lastMessageTs = sayTs
  1203. }
  1204. await this.addToClineMessages({
  1205. ts: sayTs,
  1206. type: "say",
  1207. say: type,
  1208. text,
  1209. images,
  1210. partial,
  1211. contextCondense,
  1212. })
  1213. }
  1214. } else {
  1215. // New now have a complete version of a previously partial message.
  1216. // This is the complete version of a previously partial
  1217. // message, so replace the partial with the complete version.
  1218. if (isUpdatingPreviousPartial) {
  1219. if (!options.isNonInteractive) {
  1220. this.lastMessageTs = lastMessage.ts
  1221. }
  1222. lastMessage.text = text
  1223. lastMessage.images = images
  1224. lastMessage.partial = false
  1225. lastMessage.progressStatus = progressStatus
  1226. // Instead of streaming partialMessage events, we do a save
  1227. // and post like normal to persist to disk.
  1228. await this.saveClineMessages()
  1229. // More performant than an entire `postStateToWebview`.
  1230. this.updateClineMessage(lastMessage)
  1231. } else {
  1232. // This is a new and complete message, so add it like normal.
  1233. const sayTs = Date.now()
  1234. if (!options.isNonInteractive) {
  1235. this.lastMessageTs = sayTs
  1236. }
  1237. await this.addToClineMessages({
  1238. ts: sayTs,
  1239. type: "say",
  1240. say: type,
  1241. text,
  1242. images,
  1243. contextCondense,
  1244. })
  1245. }
  1246. }
  1247. } else {
  1248. // This is a new non-partial message, so add it like normal.
  1249. const sayTs = Date.now()
  1250. // A "non-interactive" message is a message is one that the user
  1251. // does not need to respond to. We don't want these message types
  1252. // to trigger an update to `lastMessageTs` since they can be created
  1253. // asynchronously and could interrupt a pending ask.
  1254. if (!options.isNonInteractive) {
  1255. this.lastMessageTs = sayTs
  1256. }
  1257. await this.addToClineMessages({
  1258. ts: sayTs,
  1259. type: "say",
  1260. say: type,
  1261. text,
  1262. images,
  1263. checkpoint,
  1264. contextCondense,
  1265. })
  1266. }
  1267. // Broadcast browser session updates to panel when browser-related messages are added
  1268. if (type === "browser_action" || type === "browser_action_result" || type === "browser_session_status") {
  1269. this.broadcastBrowserSessionUpdate()
  1270. }
  1271. }
  1272. async sayAndCreateMissingParamError(toolName: ToolName, paramName: string, relPath?: string) {
  1273. await this.say(
  1274. "error",
  1275. `Roo tried to use ${toolName}${
  1276. relPath ? ` for '${relPath.toPosix()}'` : ""
  1277. } without value for required parameter '${paramName}'. Retrying...`,
  1278. )
  1279. const modelInfo = this.api.getModel().info
  1280. const state = await this.providerRef.deref()?.getState()
  1281. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1282. return formatResponse.toolError(formatResponse.missingToolParameterError(paramName, toolProtocol))
  1283. }
  1284. // Lifecycle
  1285. // Start / Resume / Abort / Dispose
  1286. private async startTask(task?: string, images?: string[]): Promise<void> {
  1287. if (this.enableBridge) {
  1288. try {
  1289. await BridgeOrchestrator.subscribeToTask(this)
  1290. } catch (error) {
  1291. console.error(
  1292. `[Task#startTask] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1293. )
  1294. }
  1295. }
  1296. // `conversationHistory` (for API) and `clineMessages` (for webview)
  1297. // need to be in sync.
  1298. // If the extension process were killed, then on restart the
  1299. // `clineMessages` might not be empty, so we need to set it to [] when
  1300. // we create a new Cline client (otherwise webview would show stale
  1301. // messages from previous session).
  1302. this.clineMessages = []
  1303. this.apiConversationHistory = []
  1304. // The todo list is already set in the constructor if initialTodos were provided
  1305. // No need to add any messages - the todoList property is already set
  1306. await this.providerRef.deref()?.postStateToWebview()
  1307. await this.say("text", task, images)
  1308. this.isInitialized = true
  1309. let imageBlocks: Anthropic.ImageBlockParam[] = formatResponse.imageBlocks(images)
  1310. // Task starting
  1311. await this.initiateTaskLoop([
  1312. {
  1313. type: "text",
  1314. text: `<task>\n${task}\n</task>`,
  1315. },
  1316. ...imageBlocks,
  1317. ])
  1318. }
  1319. private async resumeTaskFromHistory() {
  1320. if (this.enableBridge) {
  1321. try {
  1322. await BridgeOrchestrator.subscribeToTask(this)
  1323. } catch (error) {
  1324. console.error(
  1325. `[Task#resumeTaskFromHistory] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1326. )
  1327. }
  1328. }
  1329. const modifiedClineMessages = await this.getSavedClineMessages()
  1330. // Remove any resume messages that may have been added before.
  1331. const lastRelevantMessageIndex = findLastIndex(
  1332. modifiedClineMessages,
  1333. (m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task"),
  1334. )
  1335. if (lastRelevantMessageIndex !== -1) {
  1336. modifiedClineMessages.splice(lastRelevantMessageIndex + 1)
  1337. }
  1338. // Remove any trailing reasoning-only UI messages that were not part of the persisted API conversation
  1339. while (modifiedClineMessages.length > 0) {
  1340. const last = modifiedClineMessages[modifiedClineMessages.length - 1]
  1341. if (last.type === "say" && last.say === "reasoning") {
  1342. modifiedClineMessages.pop()
  1343. } else {
  1344. break
  1345. }
  1346. }
  1347. // Since we don't use `api_req_finished` anymore, we need to check if the
  1348. // last `api_req_started` has a cost value, if it doesn't and no
  1349. // cancellation reason to present, then we remove it since it indicates
  1350. // an api request without any partial content streamed.
  1351. const lastApiReqStartedIndex = findLastIndex(
  1352. modifiedClineMessages,
  1353. (m) => m.type === "say" && m.say === "api_req_started",
  1354. )
  1355. if (lastApiReqStartedIndex !== -1) {
  1356. const lastApiReqStarted = modifiedClineMessages[lastApiReqStartedIndex]
  1357. const { cost, cancelReason }: ClineApiReqInfo = JSON.parse(lastApiReqStarted.text || "{}")
  1358. if (cost === undefined && cancelReason === undefined) {
  1359. modifiedClineMessages.splice(lastApiReqStartedIndex, 1)
  1360. }
  1361. }
  1362. await this.overwriteClineMessages(modifiedClineMessages)
  1363. this.clineMessages = await this.getSavedClineMessages()
  1364. // Now present the cline messages to the user and ask if they want to
  1365. // resume (NOTE: we ran into a bug before where the
  1366. // apiConversationHistory wouldn't be initialized when opening a old
  1367. // task, and it was because we were waiting for resume).
  1368. // This is important in case the user deletes messages without resuming
  1369. // the task first.
  1370. this.apiConversationHistory = await this.getSavedApiConversationHistory()
  1371. const lastClineMessage = this.clineMessages
  1372. .slice()
  1373. .reverse()
  1374. .find((m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task")) // Could be multiple resume tasks.
  1375. let askType: ClineAsk
  1376. if (lastClineMessage?.ask === "completion_result") {
  1377. askType = "resume_completed_task"
  1378. } else {
  1379. askType = "resume_task"
  1380. }
  1381. this.isInitialized = true
  1382. const { response, text, images } = await this.ask(askType) // Calls `postStateToWebview`.
  1383. let responseText: string | undefined
  1384. let responseImages: string[] | undefined
  1385. if (response === "messageResponse") {
  1386. await this.say("user_feedback", text, images)
  1387. responseText = text
  1388. responseImages = images
  1389. }
  1390. // Make sure that the api conversation history can be resumed by the API,
  1391. // even if it goes out of sync with cline messages.
  1392. let existingApiConversationHistory: ApiMessage[] = await this.getSavedApiConversationHistory()
  1393. // v2.0 xml tags refactor caveat: since we don't use tools anymore for XML protocol,
  1394. // we need to replace all tool use blocks with a text block since the API disallows
  1395. // conversations with tool uses and no tool schema.
  1396. // For native protocol, we preserve tool_use and tool_result blocks as they're expected by the API.
  1397. const state = await this.providerRef.deref()?.getState()
  1398. const protocol = resolveToolProtocol(this.apiConfiguration, this.api.getModel().info)
  1399. const useNative = isNativeProtocol(protocol)
  1400. // Only convert tool blocks to text for XML protocol
  1401. // For native protocol, the API expects proper tool_use/tool_result structure
  1402. if (!useNative) {
  1403. const conversationWithoutToolBlocks = existingApiConversationHistory.map((message) => {
  1404. if (Array.isArray(message.content)) {
  1405. const newContent = message.content.map((block) => {
  1406. if (block.type === "tool_use") {
  1407. // Format tool invocation based on protocol
  1408. const params = block.input as Record<string, any>
  1409. const formattedText = formatToolInvocation(block.name, params, protocol)
  1410. return {
  1411. type: "text",
  1412. text: formattedText,
  1413. } as Anthropic.Messages.TextBlockParam
  1414. } else if (block.type === "tool_result") {
  1415. // Convert block.content to text block array, removing images
  1416. const contentAsTextBlocks = Array.isArray(block.content)
  1417. ? block.content.filter((item) => item.type === "text")
  1418. : [{ type: "text", text: block.content }]
  1419. const textContent = contentAsTextBlocks.map((item) => item.text).join("\n\n")
  1420. const toolName = findToolName(block.tool_use_id, existingApiConversationHistory)
  1421. return {
  1422. type: "text",
  1423. text: `[${toolName} Result]\n\n${textContent}`,
  1424. } as Anthropic.Messages.TextBlockParam
  1425. }
  1426. return block
  1427. })
  1428. return { ...message, content: newContent }
  1429. }
  1430. return message
  1431. })
  1432. existingApiConversationHistory = conversationWithoutToolBlocks
  1433. }
  1434. // FIXME: remove tool use blocks altogether
  1435. // if the last message is an assistant message, we need to check if there's tool use since every tool use has to have a tool response
  1436. // if there's no tool use and only a text block, then we can just add a user message
  1437. // (note this isn't relevant anymore since we use custom tool prompts instead of tool use blocks, but this is here for legacy purposes in case users resume old tasks)
  1438. // if the last message is a user message, we can need to get the assistant message before it to see if it made tool calls, and if so, fill in the remaining tool responses with 'interrupted'
  1439. let modifiedOldUserContent: Anthropic.Messages.ContentBlockParam[] // either the last message if its user message, or the user message before the last (assistant) message
  1440. let modifiedApiConversationHistory: ApiMessage[] // need to remove the last user message to replace with new modified user message
  1441. if (existingApiConversationHistory.length > 0) {
  1442. const lastMessage = existingApiConversationHistory[existingApiConversationHistory.length - 1]
  1443. if (lastMessage.role === "assistant") {
  1444. const content = Array.isArray(lastMessage.content)
  1445. ? lastMessage.content
  1446. : [{ type: "text", text: lastMessage.content }]
  1447. const hasToolUse = content.some((block) => block.type === "tool_use")
  1448. if (hasToolUse) {
  1449. const toolUseBlocks = content.filter(
  1450. (block) => block.type === "tool_use",
  1451. ) as Anthropic.Messages.ToolUseBlock[]
  1452. const toolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks.map((block) => ({
  1453. type: "tool_result",
  1454. tool_use_id: block.id,
  1455. content: "Task was interrupted before this tool call could be completed.",
  1456. }))
  1457. modifiedApiConversationHistory = [...existingApiConversationHistory] // no changes
  1458. modifiedOldUserContent = [...toolResponses]
  1459. } else {
  1460. modifiedApiConversationHistory = [...existingApiConversationHistory]
  1461. modifiedOldUserContent = []
  1462. }
  1463. } else if (lastMessage.role === "user") {
  1464. const previousAssistantMessage: ApiMessage | undefined =
  1465. existingApiConversationHistory[existingApiConversationHistory.length - 2]
  1466. const existingUserContent: Anthropic.Messages.ContentBlockParam[] = Array.isArray(lastMessage.content)
  1467. ? lastMessage.content
  1468. : [{ type: "text", text: lastMessage.content }]
  1469. if (previousAssistantMessage && previousAssistantMessage.role === "assistant") {
  1470. const assistantContent = Array.isArray(previousAssistantMessage.content)
  1471. ? previousAssistantMessage.content
  1472. : [{ type: "text", text: previousAssistantMessage.content }]
  1473. const toolUseBlocks = assistantContent.filter(
  1474. (block) => block.type === "tool_use",
  1475. ) as Anthropic.Messages.ToolUseBlock[]
  1476. if (toolUseBlocks.length > 0) {
  1477. const existingToolResults = existingUserContent.filter(
  1478. (block) => block.type === "tool_result",
  1479. ) as Anthropic.ToolResultBlockParam[]
  1480. const missingToolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks
  1481. .filter(
  1482. (toolUse) => !existingToolResults.some((result) => result.tool_use_id === toolUse.id),
  1483. )
  1484. .map((toolUse) => ({
  1485. type: "tool_result",
  1486. tool_use_id: toolUse.id,
  1487. content: "Task was interrupted before this tool call could be completed.",
  1488. }))
  1489. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1) // removes the last user message
  1490. modifiedOldUserContent = [...existingUserContent, ...missingToolResponses]
  1491. } else {
  1492. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1493. modifiedOldUserContent = [...existingUserContent]
  1494. }
  1495. } else {
  1496. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1497. modifiedOldUserContent = [...existingUserContent]
  1498. }
  1499. } else {
  1500. throw new Error("Unexpected: Last message is not a user or assistant message")
  1501. }
  1502. } else {
  1503. throw new Error("Unexpected: No existing API conversation history")
  1504. }
  1505. let newUserContent: Anthropic.Messages.ContentBlockParam[] = [...modifiedOldUserContent]
  1506. const agoText = ((): string => {
  1507. const timestamp = lastClineMessage?.ts ?? Date.now()
  1508. const now = Date.now()
  1509. const diff = now - timestamp
  1510. const minutes = Math.floor(diff / 60000)
  1511. const hours = Math.floor(minutes / 60)
  1512. const days = Math.floor(hours / 24)
  1513. if (days > 0) {
  1514. return `${days} day${days > 1 ? "s" : ""} ago`
  1515. }
  1516. if (hours > 0) {
  1517. return `${hours} hour${hours > 1 ? "s" : ""} ago`
  1518. }
  1519. if (minutes > 0) {
  1520. return `${minutes} minute${minutes > 1 ? "s" : ""} ago`
  1521. }
  1522. return "just now"
  1523. })()
  1524. if (responseText) {
  1525. newUserContent.push({
  1526. type: "text",
  1527. text: `\n\nNew instructions for task continuation:\n<user_message>\n${responseText}\n</user_message>`,
  1528. })
  1529. }
  1530. if (responseImages && responseImages.length > 0) {
  1531. newUserContent.push(...formatResponse.imageBlocks(responseImages))
  1532. }
  1533. // Ensure we have at least some content to send to the API.
  1534. // If newUserContent is empty, add a minimal resumption message.
  1535. if (newUserContent.length === 0) {
  1536. newUserContent.push({
  1537. type: "text",
  1538. text: "[TASK RESUMPTION] Resuming task...",
  1539. })
  1540. }
  1541. await this.overwriteApiConversationHistory(modifiedApiConversationHistory)
  1542. // Task resuming from history item.
  1543. await this.initiateTaskLoop(newUserContent)
  1544. }
  1545. /**
  1546. * Cancels the current HTTP request if one is in progress.
  1547. * This immediately aborts the underlying stream rather than waiting for the next chunk.
  1548. */
  1549. public cancelCurrentRequest(): void {
  1550. if (this.currentRequestAbortController) {
  1551. console.log(`[Task#${this.taskId}.${this.instanceId}] Aborting current HTTP request`)
  1552. this.currentRequestAbortController.abort()
  1553. this.currentRequestAbortController = undefined
  1554. }
  1555. }
  1556. public async abortTask(isAbandoned = false) {
  1557. // Aborting task
  1558. // Will stop any autonomously running promises.
  1559. if (isAbandoned) {
  1560. this.abandoned = true
  1561. }
  1562. this.abort = true
  1563. this.emit(RooCodeEventName.TaskAborted)
  1564. try {
  1565. this.dispose() // Call the centralized dispose method
  1566. } catch (error) {
  1567. console.error(`Error during task ${this.taskId}.${this.instanceId} disposal:`, error)
  1568. // Don't rethrow - we want abort to always succeed
  1569. }
  1570. // Save the countdown message in the automatic retry or other content.
  1571. try {
  1572. // Save the countdown message in the automatic retry or other content.
  1573. await this.saveClineMessages()
  1574. } catch (error) {
  1575. console.error(`Error saving messages during abort for task ${this.taskId}.${this.instanceId}:`, error)
  1576. }
  1577. }
  1578. public dispose(): void {
  1579. console.log(`[Task#dispose] disposing task ${this.taskId}.${this.instanceId}`)
  1580. // Cancel any in-progress HTTP request
  1581. try {
  1582. this.cancelCurrentRequest()
  1583. } catch (error) {
  1584. console.error("Error cancelling current request:", error)
  1585. }
  1586. // Remove provider profile change listener
  1587. try {
  1588. if (this.providerProfileChangeListener) {
  1589. const provider = this.providerRef.deref()
  1590. if (provider) {
  1591. provider.off(RooCodeEventName.ProviderProfileChanged, this.providerProfileChangeListener)
  1592. }
  1593. this.providerProfileChangeListener = undefined
  1594. }
  1595. } catch (error) {
  1596. console.error("Error removing provider profile change listener:", error)
  1597. }
  1598. // Dispose message queue and remove event listeners.
  1599. try {
  1600. if (this.messageQueueStateChangedHandler) {
  1601. this.messageQueueService.removeListener("stateChanged", this.messageQueueStateChangedHandler)
  1602. this.messageQueueStateChangedHandler = undefined
  1603. }
  1604. this.messageQueueService.dispose()
  1605. } catch (error) {
  1606. console.error("Error disposing message queue:", error)
  1607. }
  1608. // Remove all event listeners to prevent memory leaks.
  1609. try {
  1610. this.removeAllListeners()
  1611. } catch (error) {
  1612. console.error("Error removing event listeners:", error)
  1613. }
  1614. // Stop waiting for child task completion.
  1615. if (this.pauseInterval) {
  1616. clearInterval(this.pauseInterval)
  1617. this.pauseInterval = undefined
  1618. }
  1619. if (this.enableBridge) {
  1620. BridgeOrchestrator.getInstance()
  1621. ?.unsubscribeFromTask(this.taskId)
  1622. .catch((error) =>
  1623. console.error(
  1624. `[Task#dispose] BridgeOrchestrator#unsubscribeFromTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1625. ),
  1626. )
  1627. }
  1628. // Release any terminals associated with this task.
  1629. try {
  1630. // Release any terminals associated with this task.
  1631. TerminalRegistry.releaseTerminalsForTask(this.taskId)
  1632. } catch (error) {
  1633. console.error("Error releasing terminals:", error)
  1634. }
  1635. try {
  1636. this.urlContentFetcher.closeBrowser()
  1637. } catch (error) {
  1638. console.error("Error closing URL content fetcher browser:", error)
  1639. }
  1640. try {
  1641. this.browserSession.closeBrowser()
  1642. } catch (error) {
  1643. console.error("Error closing browser session:", error)
  1644. }
  1645. // Also close the Browser Session panel when the task is disposed
  1646. try {
  1647. const provider = this.providerRef.deref()
  1648. if (provider) {
  1649. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  1650. BrowserSessionPanelManager.getInstance(provider).dispose()
  1651. }
  1652. } catch (error) {
  1653. console.error("Error closing browser session panel:", error)
  1654. }
  1655. try {
  1656. if (this.rooIgnoreController) {
  1657. this.rooIgnoreController.dispose()
  1658. this.rooIgnoreController = undefined
  1659. }
  1660. } catch (error) {
  1661. console.error("Error disposing RooIgnoreController:", error)
  1662. // This is the critical one for the leak fix.
  1663. }
  1664. try {
  1665. this.fileContextTracker.dispose()
  1666. } catch (error) {
  1667. console.error("Error disposing file context tracker:", error)
  1668. }
  1669. try {
  1670. // If we're not streaming then `abortStream` won't be called.
  1671. if (this.isStreaming && this.diffViewProvider.isEditing) {
  1672. this.diffViewProvider.revertChanges().catch(console.error)
  1673. }
  1674. } catch (error) {
  1675. console.error("Error reverting diff changes:", error)
  1676. }
  1677. }
  1678. // Subtasks
  1679. // Spawn / Wait / Complete
  1680. public async startSubtask(message: string, initialTodos: TodoItem[], mode: string) {
  1681. const provider = this.providerRef.deref()
  1682. if (!provider) {
  1683. throw new Error("Provider not available")
  1684. }
  1685. const newTask = await provider.createTask(message, undefined, this, { initialTodos })
  1686. if (newTask) {
  1687. this.isPaused = true // Pause parent.
  1688. this.childTaskId = newTask.taskId
  1689. await provider.handleModeSwitch(mode) // Set child's mode.
  1690. await delay(500) // Allow mode change to take effect.
  1691. this.emit(RooCodeEventName.TaskPaused, this.taskId)
  1692. this.emit(RooCodeEventName.TaskSpawned, newTask.taskId)
  1693. }
  1694. return newTask
  1695. }
  1696. // Used when a sub-task is launched and the parent task is waiting for it to
  1697. // finish.
  1698. // TBD: Add a timeout to prevent infinite waiting.
  1699. public async waitForSubtask() {
  1700. await new Promise<void>((resolve) => {
  1701. this.pauseInterval = setInterval(() => {
  1702. if (!this.isPaused) {
  1703. clearInterval(this.pauseInterval)
  1704. this.pauseInterval = undefined
  1705. resolve()
  1706. }
  1707. }, 1000)
  1708. })
  1709. }
  1710. public async completeSubtask(lastMessage: string) {
  1711. this.isPaused = false
  1712. this.childTaskId = undefined
  1713. this.emit(RooCodeEventName.TaskUnpaused, this.taskId)
  1714. // Fake an answer from the subtask that it has completed running and
  1715. // this is the result of what it has done add the message to the chat
  1716. // history and to the webview ui.
  1717. try {
  1718. await this.say("subtask_result", lastMessage)
  1719. // Check if using native protocol to determine how to add the subtask result
  1720. const modelInfo = this.api.getModel().info
  1721. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1722. if (toolProtocol === "native" && this.pendingNewTaskToolCallId) {
  1723. // For native protocol, push the actual tool_result with the subtask's real result.
  1724. // NewTaskTool deferred pushing the tool_result until now so that the parent task
  1725. // gets useful information about what the subtask actually accomplished.
  1726. this.userMessageContent.push({
  1727. type: "tool_result",
  1728. tool_use_id: this.pendingNewTaskToolCallId,
  1729. content: `[new_task completed] Result: ${lastMessage}`,
  1730. } as Anthropic.ToolResultBlockParam)
  1731. // Clear the pending tool call ID
  1732. this.pendingNewTaskToolCallId = undefined
  1733. } else {
  1734. // For XML protocol (or if no pending tool call ID), add as a separate user message
  1735. await this.addToApiConversationHistory({
  1736. role: "user",
  1737. content: [{ type: "text", text: `[new_task completed] Result: ${lastMessage}` }],
  1738. })
  1739. }
  1740. } catch (error) {
  1741. this.providerRef
  1742. .deref()
  1743. ?.log(`Error failed to add reply from subtask into conversation of parent task, error: ${error}`)
  1744. throw error
  1745. }
  1746. }
  1747. // Task Loop
  1748. private async initiateTaskLoop(userContent: Anthropic.Messages.ContentBlockParam[]): Promise<void> {
  1749. // Kicks off the checkpoints initialization process in the background.
  1750. getCheckpointService(this)
  1751. let nextUserContent = userContent
  1752. let includeFileDetails = true
  1753. this.emit(RooCodeEventName.TaskStarted)
  1754. while (!this.abort) {
  1755. const didEndLoop = await this.recursivelyMakeClineRequests(nextUserContent, includeFileDetails)
  1756. includeFileDetails = false // We only need file details the first time.
  1757. // The way this agentic loop works is that cline will be given a
  1758. // task that he then calls tools to complete. Unless there's an
  1759. // attempt_completion call, we keep responding back to him with his
  1760. // tool's responses until he either attempt_completion or does not
  1761. // use anymore tools. If he does not use anymore tools, we ask him
  1762. // to consider if he's completed the task and then call
  1763. // attempt_completion, otherwise proceed with completing the task.
  1764. // There is a MAX_REQUESTS_PER_TASK limit to prevent infinite
  1765. // requests, but Cline is prompted to finish the task as efficiently
  1766. // as he can.
  1767. if (didEndLoop) {
  1768. // For now a task never 'completes'. This will only happen if
  1769. // the user hits max requests and denies resetting the count.
  1770. break
  1771. } else {
  1772. const modelInfo = this.api.getModel().info
  1773. const state = await this.providerRef.deref()?.getState()
  1774. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1775. nextUserContent = [{ type: "text", text: formatResponse.noToolsUsed(toolProtocol) }]
  1776. this.consecutiveMistakeCount++
  1777. }
  1778. }
  1779. }
  1780. public async recursivelyMakeClineRequests(
  1781. userContent: Anthropic.Messages.ContentBlockParam[],
  1782. includeFileDetails: boolean = false,
  1783. ): Promise<boolean> {
  1784. interface StackItem {
  1785. userContent: Anthropic.Messages.ContentBlockParam[]
  1786. includeFileDetails: boolean
  1787. retryAttempt?: number
  1788. userMessageWasRemoved?: boolean // Track if user message was removed due to empty response
  1789. }
  1790. const stack: StackItem[] = [{ userContent, includeFileDetails, retryAttempt: 0 }]
  1791. while (stack.length > 0) {
  1792. const currentItem = stack.pop()!
  1793. const currentUserContent = currentItem.userContent
  1794. const currentIncludeFileDetails = currentItem.includeFileDetails
  1795. if (this.abort) {
  1796. throw new Error(`[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`)
  1797. }
  1798. if (this.consecutiveMistakeLimit > 0 && this.consecutiveMistakeCount >= this.consecutiveMistakeLimit) {
  1799. const { response, text, images } = await this.ask(
  1800. "mistake_limit_reached",
  1801. t("common:errors.mistake_limit_guidance"),
  1802. )
  1803. if (response === "messageResponse") {
  1804. currentUserContent.push(
  1805. ...[
  1806. { type: "text" as const, text: formatResponse.tooManyMistakes(text) },
  1807. ...formatResponse.imageBlocks(images),
  1808. ],
  1809. )
  1810. await this.say("user_feedback", text, images)
  1811. // Track consecutive mistake errors in telemetry.
  1812. TelemetryService.instance.captureConsecutiveMistakeError(this.taskId)
  1813. }
  1814. this.consecutiveMistakeCount = 0
  1815. }
  1816. // In this Cline request loop, we need to check if this task instance
  1817. // has been asked to wait for a subtask to finish before continuing.
  1818. const provider = this.providerRef.deref()
  1819. if (this.isPaused && provider) {
  1820. provider.log(`[subtasks] paused ${this.taskId}.${this.instanceId}`)
  1821. await this.waitForSubtask()
  1822. provider.log(`[subtasks] resumed ${this.taskId}.${this.instanceId}`)
  1823. // After subtask completes, completeSubtask has pushed content to userMessageContent.
  1824. // Copy it to currentUserContent so it gets sent to the API in this iteration.
  1825. if (this.userMessageContent.length > 0) {
  1826. currentUserContent.push(...this.userMessageContent)
  1827. this.userMessageContent = []
  1828. }
  1829. const currentMode = (await provider.getState())?.mode ?? defaultModeSlug
  1830. if (currentMode !== this.pausedModeSlug) {
  1831. // The mode has changed, we need to switch back to the paused mode.
  1832. await provider.handleModeSwitch(this.pausedModeSlug)
  1833. // Delay to allow mode change to take effect before next tool is executed.
  1834. await delay(500)
  1835. provider.log(
  1836. `[subtasks] task ${this.taskId}.${this.instanceId} has switched back to '${this.pausedModeSlug}' from '${currentMode}'`,
  1837. )
  1838. }
  1839. }
  1840. // Getting verbose details is an expensive operation, it uses ripgrep to
  1841. // top-down build file structure of project which for large projects can
  1842. // take a few seconds. For the best UX we show a placeholder api_req_started
  1843. // message with a loading spinner as this happens.
  1844. // Determine API protocol based on provider and model
  1845. const modelId = getModelId(this.apiConfiguration)
  1846. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  1847. await this.say(
  1848. "api_req_started",
  1849. JSON.stringify({
  1850. apiProtocol,
  1851. }),
  1852. )
  1853. const {
  1854. showRooIgnoredFiles = false,
  1855. includeDiagnosticMessages = true,
  1856. maxDiagnosticMessages = 50,
  1857. maxReadFileLine = -1,
  1858. } = (await this.providerRef.deref()?.getState()) ?? {}
  1859. const parsedUserContent = await processUserContentMentions({
  1860. userContent: currentUserContent,
  1861. cwd: this.cwd,
  1862. urlContentFetcher: this.urlContentFetcher,
  1863. fileContextTracker: this.fileContextTracker,
  1864. rooIgnoreController: this.rooIgnoreController,
  1865. showRooIgnoredFiles,
  1866. includeDiagnosticMessages,
  1867. maxDiagnosticMessages,
  1868. maxReadFileLine,
  1869. })
  1870. const environmentDetails = await getEnvironmentDetails(this, currentIncludeFileDetails)
  1871. // Remove any existing environment_details blocks before adding fresh ones.
  1872. // This prevents duplicate environment details when resuming tasks with XML tool calls,
  1873. // where the old user message content may already contain environment details from the previous session.
  1874. // We check for both opening and closing tags to ensure we're matching complete environment detail blocks,
  1875. // not just mentions of the tag in regular content.
  1876. const contentWithoutEnvDetails = parsedUserContent.filter((block) => {
  1877. if (block.type === "text" && typeof block.text === "string") {
  1878. // Check if this text block is a complete environment_details block
  1879. // by verifying it starts with the opening tag and ends with the closing tag
  1880. const isEnvironmentDetailsBlock =
  1881. block.text.trim().startsWith("<environment_details>") &&
  1882. block.text.trim().endsWith("</environment_details>")
  1883. return !isEnvironmentDetailsBlock
  1884. }
  1885. return true
  1886. })
  1887. // Add environment details as its own text block, separate from tool
  1888. // results.
  1889. const finalUserContent = [...contentWithoutEnvDetails, { type: "text" as const, text: environmentDetails }]
  1890. // Only add user message to conversation history if:
  1891. // 1. This is the first attempt (retryAttempt === 0), OR
  1892. // 2. The message was removed in a previous iteration (userMessageWasRemoved === true)
  1893. // This prevents consecutive user messages while allowing re-add when needed
  1894. if ((currentItem.retryAttempt ?? 0) === 0 || currentItem.userMessageWasRemoved) {
  1895. await this.addToApiConversationHistory({ role: "user", content: finalUserContent })
  1896. TelemetryService.instance.captureConversationMessage(this.taskId, "user")
  1897. }
  1898. // Since we sent off a placeholder api_req_started message to update the
  1899. // webview while waiting to actually start the API request (to load
  1900. // potential details for example), we need to update the text of that
  1901. // message.
  1902. const lastApiReqIndex = findLastIndex(this.clineMessages, (m) => m.say === "api_req_started")
  1903. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  1904. apiProtocol,
  1905. } satisfies ClineApiReqInfo)
  1906. await this.saveClineMessages()
  1907. await provider?.postStateToWebview()
  1908. try {
  1909. let cacheWriteTokens = 0
  1910. let cacheReadTokens = 0
  1911. let inputTokens = 0
  1912. let outputTokens = 0
  1913. let totalCost: number | undefined
  1914. // We can't use `api_req_finished` anymore since it's a unique case
  1915. // where it could come after a streaming message (i.e. in the middle
  1916. // of being updated or executed).
  1917. // Fortunately `api_req_finished` was always parsed out for the GUI
  1918. // anyways, so it remains solely for legacy purposes to keep track
  1919. // of prices in tasks from history (it's worth removing a few months
  1920. // from now).
  1921. const updateApiReqMsg = (cancelReason?: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  1922. if (lastApiReqIndex < 0 || !this.clineMessages[lastApiReqIndex]) {
  1923. return
  1924. }
  1925. const existingData = JSON.parse(this.clineMessages[lastApiReqIndex].text || "{}")
  1926. // Calculate total tokens and cost using provider-aware function
  1927. const modelId = getModelId(this.apiConfiguration)
  1928. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  1929. const costResult =
  1930. apiProtocol === "anthropic"
  1931. ? calculateApiCostAnthropic(
  1932. streamModelInfo,
  1933. inputTokens,
  1934. outputTokens,
  1935. cacheWriteTokens,
  1936. cacheReadTokens,
  1937. )
  1938. : calculateApiCostOpenAI(
  1939. streamModelInfo,
  1940. inputTokens,
  1941. outputTokens,
  1942. cacheWriteTokens,
  1943. cacheReadTokens,
  1944. )
  1945. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  1946. ...existingData,
  1947. tokensIn: costResult.totalInputTokens,
  1948. tokensOut: costResult.totalOutputTokens,
  1949. cacheWrites: cacheWriteTokens,
  1950. cacheReads: cacheReadTokens,
  1951. cost: totalCost ?? costResult.totalCost,
  1952. cancelReason,
  1953. streamingFailedMessage,
  1954. } satisfies ClineApiReqInfo)
  1955. }
  1956. const abortStream = async (cancelReason: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  1957. if (this.diffViewProvider.isEditing) {
  1958. await this.diffViewProvider.revertChanges() // closes diff view
  1959. }
  1960. // if last message is a partial we need to update and save it
  1961. const lastMessage = this.clineMessages.at(-1)
  1962. if (lastMessage && lastMessage.partial) {
  1963. // lastMessage.ts = Date.now() DO NOT update ts since it is used as a key for virtuoso list
  1964. lastMessage.partial = false
  1965. // instead of streaming partialMessage events, we do a save and post like normal to persist to disk
  1966. console.log("updating partial message", lastMessage)
  1967. }
  1968. // Update `api_req_started` to have cancelled and cost, so that
  1969. // we can display the cost of the partial stream and the cancellation reason
  1970. updateApiReqMsg(cancelReason, streamingFailedMessage)
  1971. await this.saveClineMessages()
  1972. // Signals to provider that it can retrieve the saved messages
  1973. // from disk, as abortTask can not be awaited on in nature.
  1974. this.didFinishAbortingStream = true
  1975. }
  1976. // Reset streaming state for each new API request
  1977. this.currentStreamingContentIndex = 0
  1978. this.currentStreamingDidCheckpoint = false
  1979. this.assistantMessageContent = []
  1980. this.didCompleteReadingStream = false
  1981. this.userMessageContent = []
  1982. this.userMessageContentReady = false
  1983. this.didRejectTool = false
  1984. this.didAlreadyUseTool = false
  1985. // Reset tool failure flag for each new assistant turn - this ensures that tool failures
  1986. // only prevent attempt_completion within the same assistant message, not across turns
  1987. // (e.g., if a tool fails, then user sends a message saying "just complete anyway")
  1988. this.didToolFailInCurrentTurn = false
  1989. this.presentAssistantMessageLocked = false
  1990. this.presentAssistantMessageHasPendingUpdates = false
  1991. this.assistantMessageParser?.reset()
  1992. this.streamingToolCallIndices.clear()
  1993. // Clear any leftover streaming tool call state from previous interrupted streams
  1994. NativeToolCallParser.clearAllStreamingToolCalls()
  1995. NativeToolCallParser.clearRawChunkState()
  1996. await this.diffViewProvider.reset()
  1997. // Cache model info once per API request to avoid repeated calls during streaming
  1998. // This is especially important for tools and background usage collection
  1999. this.cachedStreamingModel = this.api.getModel()
  2000. const streamModelInfo = this.cachedStreamingModel.info
  2001. const cachedModelId = this.cachedStreamingModel.id
  2002. const streamProtocol = resolveToolProtocol(this.apiConfiguration, streamModelInfo)
  2003. const shouldUseXmlParser = streamProtocol === "xml"
  2004. // Yields only if the first chunk is successful, otherwise will
  2005. // allow the user to retry the request (most likely due to rate
  2006. // limit error, which gets thrown on the first chunk).
  2007. const stream = this.attemptApiRequest()
  2008. let assistantMessage = ""
  2009. let reasoningMessage = ""
  2010. let pendingGroundingSources: GroundingSource[] = []
  2011. this.isStreaming = true
  2012. try {
  2013. const iterator = stream[Symbol.asyncIterator]()
  2014. // Helper to race iterator.next() with abort signal
  2015. const nextChunkWithAbort = async () => {
  2016. const nextPromise = iterator.next()
  2017. // If we have an abort controller, race it with the next chunk
  2018. if (this.currentRequestAbortController) {
  2019. const abortPromise = new Promise<never>((_, reject) => {
  2020. const signal = this.currentRequestAbortController!.signal
  2021. if (signal.aborted) {
  2022. reject(new Error("Request cancelled by user"))
  2023. } else {
  2024. signal.addEventListener("abort", () => {
  2025. reject(new Error("Request cancelled by user"))
  2026. })
  2027. }
  2028. })
  2029. return await Promise.race([nextPromise, abortPromise])
  2030. }
  2031. // No abort controller, just return the next chunk normally
  2032. return await nextPromise
  2033. }
  2034. let item = await nextChunkWithAbort()
  2035. while (!item.done) {
  2036. const chunk = item.value
  2037. item = await nextChunkWithAbort()
  2038. if (!chunk) {
  2039. // Sometimes chunk is undefined, no idea that can cause
  2040. // it, but this workaround seems to fix it.
  2041. continue
  2042. }
  2043. switch (chunk.type) {
  2044. case "reasoning": {
  2045. reasoningMessage += chunk.text
  2046. // Only apply formatting if the message contains sentence-ending punctuation followed by **
  2047. let formattedReasoning = reasoningMessage
  2048. if (reasoningMessage.includes("**")) {
  2049. // Add line breaks before **Title** patterns that appear after sentence endings
  2050. // This targets section headers like "...end of sentence.**Title Here**"
  2051. // Handles periods, exclamation marks, and question marks
  2052. formattedReasoning = reasoningMessage.replace(
  2053. /([.!?])\*\*([^*\n]+)\*\*/g,
  2054. "$1\n\n**$2**",
  2055. )
  2056. }
  2057. await this.say("reasoning", formattedReasoning, undefined, true)
  2058. break
  2059. }
  2060. case "usage":
  2061. inputTokens += chunk.inputTokens
  2062. outputTokens += chunk.outputTokens
  2063. cacheWriteTokens += chunk.cacheWriteTokens ?? 0
  2064. cacheReadTokens += chunk.cacheReadTokens ?? 0
  2065. totalCost = chunk.totalCost
  2066. break
  2067. case "grounding":
  2068. // Handle grounding sources separately from regular content
  2069. // to prevent state persistence issues - store them separately
  2070. if (chunk.sources && chunk.sources.length > 0) {
  2071. pendingGroundingSources.push(...chunk.sources)
  2072. }
  2073. break
  2074. case "tool_call_partial": {
  2075. // Process raw tool call chunk through NativeToolCallParser
  2076. // which handles tracking, buffering, and emits events
  2077. const events = NativeToolCallParser.processRawChunk({
  2078. index: chunk.index,
  2079. id: chunk.id,
  2080. name: chunk.name,
  2081. arguments: chunk.arguments,
  2082. })
  2083. for (const event of events) {
  2084. if (event.type === "tool_call_start") {
  2085. // Initialize streaming in NativeToolCallParser
  2086. NativeToolCallParser.startStreamingToolCall(event.id, event.name as ToolName)
  2087. // Before adding a new tool, finalize any preceding text block
  2088. // This prevents the text block from blocking tool presentation
  2089. const lastBlock =
  2090. this.assistantMessageContent[this.assistantMessageContent.length - 1]
  2091. if (lastBlock?.type === "text" && lastBlock.partial) {
  2092. lastBlock.partial = false
  2093. }
  2094. // Track the index where this tool will be stored
  2095. const toolUseIndex = this.assistantMessageContent.length
  2096. this.streamingToolCallIndices.set(event.id, toolUseIndex)
  2097. // Create initial partial tool use
  2098. const partialToolUse: ToolUse = {
  2099. type: "tool_use",
  2100. name: event.name as ToolName,
  2101. params: {},
  2102. partial: true,
  2103. }
  2104. // Store the ID for native protocol
  2105. ;(partialToolUse as any).id = event.id
  2106. // Add to content and present
  2107. this.assistantMessageContent.push(partialToolUse)
  2108. this.userMessageContentReady = false
  2109. presentAssistantMessage(this)
  2110. } else if (event.type === "tool_call_delta") {
  2111. // Process chunk using streaming JSON parser
  2112. const partialToolUse = NativeToolCallParser.processStreamingChunk(
  2113. event.id,
  2114. event.delta,
  2115. )
  2116. if (partialToolUse) {
  2117. // Get the index for this tool call
  2118. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2119. if (toolUseIndex !== undefined) {
  2120. // Store the ID for native protocol
  2121. ;(partialToolUse as any).id = event.id
  2122. // Update the existing tool use with new partial data
  2123. this.assistantMessageContent[toolUseIndex] = partialToolUse
  2124. // Present updated tool use
  2125. presentAssistantMessage(this)
  2126. }
  2127. }
  2128. } else if (event.type === "tool_call_end") {
  2129. // Finalize the streaming tool call
  2130. const finalToolUse = NativeToolCallParser.finalizeStreamingToolCall(event.id)
  2131. if (finalToolUse) {
  2132. // Store the tool call ID
  2133. ;(finalToolUse as any).id = event.id
  2134. // Get the index and replace partial with final
  2135. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2136. if (toolUseIndex !== undefined) {
  2137. this.assistantMessageContent[toolUseIndex] = finalToolUse
  2138. }
  2139. // Clean up tracking
  2140. this.streamingToolCallIndices.delete(event.id)
  2141. // Mark that we have new content to process
  2142. this.userMessageContentReady = false
  2143. // Present the finalized tool call
  2144. presentAssistantMessage(this)
  2145. }
  2146. }
  2147. }
  2148. break
  2149. }
  2150. case "tool_call": {
  2151. // Legacy: Handle complete tool calls (for backward compatibility)
  2152. // Convert native tool call to ToolUse format
  2153. const toolUse = NativeToolCallParser.parseToolCall({
  2154. id: chunk.id,
  2155. name: chunk.name as ToolName,
  2156. arguments: chunk.arguments,
  2157. })
  2158. if (!toolUse) {
  2159. console.error(`Failed to parse tool call for task ${this.taskId}:`, chunk)
  2160. break
  2161. }
  2162. // Store the tool call ID on the ToolUse object for later reference
  2163. // This is needed to create tool_result blocks that reference the correct tool_use_id
  2164. toolUse.id = chunk.id
  2165. // Add the tool use to assistant message content
  2166. this.assistantMessageContent.push(toolUse)
  2167. // Mark that we have new content to process
  2168. this.userMessageContentReady = false
  2169. // Present the tool call to user - presentAssistantMessage will execute
  2170. // tools sequentially and accumulate all results in userMessageContent
  2171. presentAssistantMessage(this)
  2172. break
  2173. }
  2174. case "text": {
  2175. assistantMessage += chunk.text
  2176. // Use the protocol determined at the start of streaming
  2177. // Don't rely solely on parser existence - parser might exist from previous state
  2178. if (shouldUseXmlParser && this.assistantMessageParser) {
  2179. // XML protocol: Parse raw assistant message chunk into content blocks
  2180. const prevLength = this.assistantMessageContent.length
  2181. this.assistantMessageContent = this.assistantMessageParser.processChunk(chunk.text)
  2182. if (this.assistantMessageContent.length > prevLength) {
  2183. // New content we need to present, reset to
  2184. // false in case previous content set this to true.
  2185. this.userMessageContentReady = false
  2186. }
  2187. // Present content to user.
  2188. presentAssistantMessage(this)
  2189. } else {
  2190. // Native protocol: Text chunks are plain text, not XML tool calls
  2191. // Create or update a text content block directly
  2192. const lastBlock =
  2193. this.assistantMessageContent[this.assistantMessageContent.length - 1]
  2194. if (lastBlock?.type === "text" && lastBlock.partial) {
  2195. // Update existing partial text block
  2196. lastBlock.content = assistantMessage
  2197. } else {
  2198. // Create new text block
  2199. this.assistantMessageContent.push({
  2200. type: "text",
  2201. content: assistantMessage,
  2202. partial: true,
  2203. })
  2204. this.userMessageContentReady = false
  2205. }
  2206. // Present content to user
  2207. presentAssistantMessage(this)
  2208. }
  2209. break
  2210. }
  2211. }
  2212. if (this.abort) {
  2213. console.log(`aborting stream, this.abandoned = ${this.abandoned}`)
  2214. if (!this.abandoned) {
  2215. // Only need to gracefully abort if this instance
  2216. // isn't abandoned (sometimes OpenRouter stream
  2217. // hangs, in which case this would affect future
  2218. // instances of Cline).
  2219. await abortStream("user_cancelled")
  2220. }
  2221. break // Aborts the stream.
  2222. }
  2223. if (this.didRejectTool) {
  2224. // `userContent` has a tool rejection, so interrupt the
  2225. // assistant's response to present the user's feedback.
  2226. assistantMessage += "\n\n[Response interrupted by user feedback]"
  2227. // Instead of setting this preemptively, we allow the
  2228. // present iterator to finish and set
  2229. // userMessageContentReady when its ready.
  2230. // this.userMessageContentReady = true
  2231. break
  2232. }
  2233. if (this.didAlreadyUseTool) {
  2234. assistantMessage +=
  2235. "\n\n[Response interrupted by a tool use result. Only one tool may be used at a time and should be placed at the end of the message.]"
  2236. break
  2237. }
  2238. }
  2239. // Finalize any remaining streaming tool calls that weren't explicitly ended
  2240. // This is critical for MCP tools which need tool_call_end events to be properly
  2241. // converted from ToolUse to McpToolUse via finalizeStreamingToolCall()
  2242. const finalizeEvents = NativeToolCallParser.finalizeRawChunks()
  2243. for (const event of finalizeEvents) {
  2244. if (event.type === "tool_call_end") {
  2245. // Finalize the streaming tool call
  2246. const finalToolUse = NativeToolCallParser.finalizeStreamingToolCall(event.id)
  2247. if (finalToolUse) {
  2248. // Store the tool call ID
  2249. ;(finalToolUse as any).id = event.id
  2250. // Get the index and replace partial with final
  2251. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2252. if (toolUseIndex !== undefined) {
  2253. this.assistantMessageContent[toolUseIndex] = finalToolUse
  2254. }
  2255. // Clean up tracking
  2256. this.streamingToolCallIndices.delete(event.id)
  2257. // Mark that we have new content to process
  2258. this.userMessageContentReady = false
  2259. // Present the finalized tool call
  2260. presentAssistantMessage(this)
  2261. }
  2262. }
  2263. }
  2264. // Create a copy of current token values to avoid race conditions
  2265. const currentTokens = {
  2266. input: inputTokens,
  2267. output: outputTokens,
  2268. cacheWrite: cacheWriteTokens,
  2269. cacheRead: cacheReadTokens,
  2270. total: totalCost,
  2271. }
  2272. const drainStreamInBackgroundToFindAllUsage = async (apiReqIndex: number) => {
  2273. const timeoutMs = DEFAULT_USAGE_COLLECTION_TIMEOUT_MS
  2274. const startTime = performance.now()
  2275. const modelId = getModelId(this.apiConfiguration)
  2276. // Local variables to accumulate usage data without affecting the main flow
  2277. let bgInputTokens = currentTokens.input
  2278. let bgOutputTokens = currentTokens.output
  2279. let bgCacheWriteTokens = currentTokens.cacheWrite
  2280. let bgCacheReadTokens = currentTokens.cacheRead
  2281. let bgTotalCost = currentTokens.total
  2282. // Helper function to capture telemetry and update messages
  2283. const captureUsageData = async (
  2284. tokens: {
  2285. input: number
  2286. output: number
  2287. cacheWrite: number
  2288. cacheRead: number
  2289. total?: number
  2290. },
  2291. messageIndex: number = apiReqIndex,
  2292. ) => {
  2293. if (
  2294. tokens.input > 0 ||
  2295. tokens.output > 0 ||
  2296. tokens.cacheWrite > 0 ||
  2297. tokens.cacheRead > 0
  2298. ) {
  2299. // Update the shared variables atomically
  2300. inputTokens = tokens.input
  2301. outputTokens = tokens.output
  2302. cacheWriteTokens = tokens.cacheWrite
  2303. cacheReadTokens = tokens.cacheRead
  2304. totalCost = tokens.total
  2305. // Update the API request message with the latest usage data
  2306. updateApiReqMsg()
  2307. await this.saveClineMessages()
  2308. // Update the specific message in the webview
  2309. const apiReqMessage = this.clineMessages[messageIndex]
  2310. if (apiReqMessage) {
  2311. await this.updateClineMessage(apiReqMessage)
  2312. }
  2313. // Capture telemetry with provider-aware cost calculation
  2314. const modelId = getModelId(this.apiConfiguration)
  2315. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  2316. // Use the appropriate cost function based on the API protocol
  2317. const costResult =
  2318. apiProtocol === "anthropic"
  2319. ? calculateApiCostAnthropic(
  2320. streamModelInfo,
  2321. tokens.input,
  2322. tokens.output,
  2323. tokens.cacheWrite,
  2324. tokens.cacheRead,
  2325. )
  2326. : calculateApiCostOpenAI(
  2327. streamModelInfo,
  2328. tokens.input,
  2329. tokens.output,
  2330. tokens.cacheWrite,
  2331. tokens.cacheRead,
  2332. )
  2333. TelemetryService.instance.captureLlmCompletion(this.taskId, {
  2334. inputTokens: costResult.totalInputTokens,
  2335. outputTokens: costResult.totalOutputTokens,
  2336. cacheWriteTokens: tokens.cacheWrite,
  2337. cacheReadTokens: tokens.cacheRead,
  2338. cost: tokens.total ?? costResult.totalCost,
  2339. })
  2340. }
  2341. }
  2342. try {
  2343. // Continue processing the original stream from where the main loop left off
  2344. let usageFound = false
  2345. let chunkCount = 0
  2346. // Use the same iterator that the main loop was using
  2347. while (!item.done) {
  2348. // Check for timeout
  2349. if (performance.now() - startTime > timeoutMs) {
  2350. console.warn(
  2351. `[Background Usage Collection] Timed out after ${timeoutMs}ms for model: ${modelId}, processed ${chunkCount} chunks`,
  2352. )
  2353. // Clean up the iterator before breaking
  2354. if (iterator.return) {
  2355. await iterator.return(undefined)
  2356. }
  2357. break
  2358. }
  2359. const chunk = item.value
  2360. item = await iterator.next()
  2361. chunkCount++
  2362. if (chunk && chunk.type === "usage") {
  2363. usageFound = true
  2364. bgInputTokens += chunk.inputTokens
  2365. bgOutputTokens += chunk.outputTokens
  2366. bgCacheWriteTokens += chunk.cacheWriteTokens ?? 0
  2367. bgCacheReadTokens += chunk.cacheReadTokens ?? 0
  2368. bgTotalCost = chunk.totalCost
  2369. }
  2370. }
  2371. if (
  2372. usageFound ||
  2373. bgInputTokens > 0 ||
  2374. bgOutputTokens > 0 ||
  2375. bgCacheWriteTokens > 0 ||
  2376. bgCacheReadTokens > 0
  2377. ) {
  2378. // We have usage data either from a usage chunk or accumulated tokens
  2379. await captureUsageData(
  2380. {
  2381. input: bgInputTokens,
  2382. output: bgOutputTokens,
  2383. cacheWrite: bgCacheWriteTokens,
  2384. cacheRead: bgCacheReadTokens,
  2385. total: bgTotalCost,
  2386. },
  2387. lastApiReqIndex,
  2388. )
  2389. } else {
  2390. console.warn(
  2391. `[Background Usage Collection] Suspicious: request ${apiReqIndex} is complete, but no usage info was found. Model: ${modelId}`,
  2392. )
  2393. }
  2394. } catch (error) {
  2395. console.error("Error draining stream for usage data:", error)
  2396. // Still try to capture whatever usage data we have collected so far
  2397. if (
  2398. bgInputTokens > 0 ||
  2399. bgOutputTokens > 0 ||
  2400. bgCacheWriteTokens > 0 ||
  2401. bgCacheReadTokens > 0
  2402. ) {
  2403. await captureUsageData(
  2404. {
  2405. input: bgInputTokens,
  2406. output: bgOutputTokens,
  2407. cacheWrite: bgCacheWriteTokens,
  2408. cacheRead: bgCacheReadTokens,
  2409. total: bgTotalCost,
  2410. },
  2411. lastApiReqIndex,
  2412. )
  2413. }
  2414. }
  2415. }
  2416. // Start the background task and handle any errors
  2417. drainStreamInBackgroundToFindAllUsage(lastApiReqIndex).catch((error) => {
  2418. console.error("Background usage collection failed:", error)
  2419. })
  2420. } catch (error) {
  2421. // Abandoned happens when extension is no longer waiting for the
  2422. // Cline instance to finish aborting (error is thrown here when
  2423. // any function in the for loop throws due to this.abort).
  2424. if (!this.abandoned) {
  2425. // Determine cancellation reason
  2426. const cancelReason: ClineApiReqCancelReason = this.abort ? "user_cancelled" : "streaming_failed"
  2427. const streamingFailedMessage = this.abort
  2428. ? undefined
  2429. : (error.message ?? JSON.stringify(serializeError(error), null, 2))
  2430. // Clean up partial state
  2431. await abortStream(cancelReason, streamingFailedMessage)
  2432. if (this.abort) {
  2433. // User cancelled - abort the entire task
  2434. this.abortReason = cancelReason
  2435. await this.abortTask()
  2436. } else {
  2437. // Stream failed - log the error and retry with the same content
  2438. // The existing rate limiting will prevent rapid retries
  2439. console.error(
  2440. `[Task#${this.taskId}.${this.instanceId}] Stream failed, will retry: ${streamingFailedMessage}`,
  2441. )
  2442. // Apply exponential backoff similar to first-chunk errors when auto-resubmit is enabled
  2443. const stateForBackoff = await this.providerRef.deref()?.getState()
  2444. if (stateForBackoff?.autoApprovalEnabled && stateForBackoff?.alwaysApproveResubmit) {
  2445. await this.backoffAndAnnounce(
  2446. currentItem.retryAttempt ?? 0,
  2447. error,
  2448. streamingFailedMessage,
  2449. )
  2450. // Check if task was aborted during the backoff
  2451. if (this.abort) {
  2452. console.log(
  2453. `[Task#${this.taskId}.${this.instanceId}] Task aborted during mid-stream retry backoff`,
  2454. )
  2455. // Abort the entire task
  2456. this.abortReason = "user_cancelled"
  2457. await this.abortTask()
  2458. break
  2459. }
  2460. }
  2461. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2462. stack.push({
  2463. userContent: currentUserContent,
  2464. includeFileDetails: false,
  2465. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2466. })
  2467. // Continue to retry the request
  2468. continue
  2469. }
  2470. }
  2471. } finally {
  2472. this.isStreaming = false
  2473. // Clean up the abort controller when streaming completes
  2474. this.currentRequestAbortController = undefined
  2475. }
  2476. // Need to call here in case the stream was aborted.
  2477. if (this.abort || this.abandoned) {
  2478. throw new Error(
  2479. `[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`,
  2480. )
  2481. }
  2482. this.didCompleteReadingStream = true
  2483. // Set any blocks to be complete to allow `presentAssistantMessage`
  2484. // to finish and set `userMessageContentReady` to true.
  2485. // (Could be a text block that had no subsequent tool uses, or a
  2486. // text block at the very end, or an invalid tool use, etc. Whatever
  2487. // the case, `presentAssistantMessage` relies on these blocks either
  2488. // to be completed or the user to reject a block in order to proceed
  2489. // and eventually set userMessageContentReady to true.)
  2490. const partialBlocks = this.assistantMessageContent.filter((block) => block.partial)
  2491. partialBlocks.forEach((block) => (block.partial = false))
  2492. // Can't just do this b/c a tool could be in the middle of executing.
  2493. // this.assistantMessageContent.forEach((e) => (e.partial = false))
  2494. // Now that the stream is complete, finalize any remaining partial content blocks (XML protocol only)
  2495. // Use the protocol determined at the start of streaming
  2496. if (shouldUseXmlParser && this.assistantMessageParser) {
  2497. this.assistantMessageParser.finalizeContentBlocks()
  2498. const parsedBlocks = this.assistantMessageParser.getContentBlocks()
  2499. // For XML protocol: Use only parsed blocks (includes both text and tool_use parsed from XML)
  2500. this.assistantMessageContent = parsedBlocks
  2501. }
  2502. // Only present partial blocks that were just completed (from XML parsing)
  2503. // Native tool blocks were already presented during streaming, so don't re-present them
  2504. if (partialBlocks.length > 0 && partialBlocks.some((block) => block.type !== "tool_use")) {
  2505. // If there is content to update then it will complete and
  2506. // update `this.userMessageContentReady` to true, which we
  2507. // `pWaitFor` before making the next request.
  2508. presentAssistantMessage(this)
  2509. }
  2510. // Note: updateApiReqMsg() is now called from within drainStreamInBackgroundToFindAllUsage
  2511. // to ensure usage data is captured even when the stream is interrupted. The background task
  2512. // uses local variables to accumulate usage data before atomically updating the shared state.
  2513. // Complete the reasoning message if it exists
  2514. // We can't use say() here because the reasoning message may not be the last message
  2515. // (other messages like text blocks or tool uses may have been added after it during streaming)
  2516. if (reasoningMessage) {
  2517. const lastReasoningIndex = findLastIndex(
  2518. this.clineMessages,
  2519. (m) => m.type === "say" && m.say === "reasoning",
  2520. )
  2521. if (lastReasoningIndex !== -1 && this.clineMessages[lastReasoningIndex].partial) {
  2522. this.clineMessages[lastReasoningIndex].partial = false
  2523. await this.updateClineMessage(this.clineMessages[lastReasoningIndex])
  2524. }
  2525. }
  2526. await this.saveClineMessages()
  2527. await this.providerRef.deref()?.postStateToWebview()
  2528. // Reset parser after each complete conversation round (XML protocol only)
  2529. this.assistantMessageParser?.reset()
  2530. // Now add to apiConversationHistory.
  2531. // Need to save assistant responses to file before proceeding to
  2532. // tool use since user can exit at any moment and we wouldn't be
  2533. // able to save the assistant's response.
  2534. let didEndLoop = false
  2535. // Check if we have any content to process (text or tool uses)
  2536. const hasTextContent = assistantMessage.length > 0
  2537. const hasToolUses = this.assistantMessageContent.some(
  2538. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2539. )
  2540. if (hasTextContent || hasToolUses) {
  2541. // Display grounding sources to the user if they exist
  2542. if (pendingGroundingSources.length > 0) {
  2543. const citationLinks = pendingGroundingSources.map((source, i) => `[${i + 1}](${source.url})`)
  2544. const sourcesText = `${t("common:gemini.sources")} ${citationLinks.join(", ")}`
  2545. await this.say("text", sourcesText, undefined, false, undefined, undefined, {
  2546. isNonInteractive: true,
  2547. })
  2548. }
  2549. // Build the assistant message content array
  2550. const assistantContent: Array<Anthropic.TextBlockParam | Anthropic.ToolUseBlockParam> = []
  2551. // Add text content if present
  2552. if (assistantMessage) {
  2553. assistantContent.push({
  2554. type: "text" as const,
  2555. text: assistantMessage,
  2556. })
  2557. }
  2558. // Add tool_use blocks with their IDs for native protocol
  2559. // This handles both regular ToolUse and McpToolUse types
  2560. const toolUseBlocks = this.assistantMessageContent.filter(
  2561. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2562. )
  2563. for (const block of toolUseBlocks) {
  2564. if (block.type === "mcp_tool_use") {
  2565. // McpToolUse already has the original tool name (e.g., "mcp_serverName_toolName")
  2566. // The arguments are the raw tool arguments (matching the simplified schema)
  2567. const mcpBlock = block as import("../../shared/tools").McpToolUse
  2568. if (mcpBlock.id) {
  2569. assistantContent.push({
  2570. type: "tool_use" as const,
  2571. id: mcpBlock.id,
  2572. name: mcpBlock.name, // Original dynamic name
  2573. input: mcpBlock.arguments, // Direct tool arguments
  2574. })
  2575. }
  2576. } else {
  2577. // Regular ToolUse
  2578. const toolUse = block as import("../../shared/tools").ToolUse
  2579. const toolCallId = toolUse.id
  2580. if (toolCallId) {
  2581. // nativeArgs is already in the correct API format for all tools
  2582. const input = toolUse.nativeArgs || toolUse.params
  2583. assistantContent.push({
  2584. type: "tool_use" as const,
  2585. id: toolCallId,
  2586. name: toolUse.name,
  2587. input,
  2588. })
  2589. }
  2590. }
  2591. }
  2592. await this.addToApiConversationHistory(
  2593. {
  2594. role: "assistant",
  2595. content: assistantContent,
  2596. },
  2597. reasoningMessage || undefined,
  2598. )
  2599. TelemetryService.instance.captureConversationMessage(this.taskId, "assistant")
  2600. // NOTE: This comment is here for future reference - this was a
  2601. // workaround for `userMessageContent` not getting set to true.
  2602. // It was due to it not recursively calling for partial blocks
  2603. // when `didRejectTool`, so it would get stuck waiting for a
  2604. // partial block to complete before it could continue.
  2605. // In case the content blocks finished it may be the api stream
  2606. // finished after the last parsed content block was executed, so
  2607. // we are able to detect out of bounds and set
  2608. // `userMessageContentReady` to true (note you should not call
  2609. // `presentAssistantMessage` since if the last block i
  2610. // completed it will be presented again).
  2611. // const completeBlocks = this.assistantMessageContent.filter((block) => !block.partial) // If there are any partial blocks after the stream ended we can consider them invalid.
  2612. // if (this.currentStreamingContentIndex >= completeBlocks.length) {
  2613. // this.userMessageContentReady = true
  2614. // }
  2615. await pWaitFor(() => this.userMessageContentReady)
  2616. // If the model did not tool use, then we need to tell it to
  2617. // either use a tool or attempt_completion.
  2618. const didToolUse = this.assistantMessageContent.some(
  2619. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2620. )
  2621. if (!didToolUse) {
  2622. const modelInfo = this.api.getModel().info
  2623. const state = await this.providerRef.deref()?.getState()
  2624. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  2625. this.userMessageContent.push({ type: "text", text: formatResponse.noToolsUsed(toolProtocol) })
  2626. this.consecutiveMistakeCount++
  2627. }
  2628. // Push to stack if there's content OR if we're paused waiting for a subtask.
  2629. // When paused, we push an empty item so the loop continues to the pause check.
  2630. if (this.userMessageContent.length > 0 || this.isPaused) {
  2631. stack.push({
  2632. userContent: [...this.userMessageContent], // Create a copy to avoid mutation issues
  2633. includeFileDetails: false, // Subsequent iterations don't need file details
  2634. })
  2635. // Add periodic yielding to prevent blocking
  2636. await new Promise((resolve) => setImmediate(resolve))
  2637. }
  2638. // Continue to next iteration instead of setting didEndLoop from recursive call
  2639. continue
  2640. } else {
  2641. // If there's no assistant_responses, that means we got no text
  2642. // or tool_use content blocks from API which we should assume is
  2643. // an error.
  2644. // IMPORTANT: For native tool protocol, we already added the user message to
  2645. // apiConversationHistory at line 1876. Since the assistant failed to respond,
  2646. // we need to remove that message before retrying to avoid having two consecutive
  2647. // user messages (which would cause tool_result validation errors).
  2648. let state = await this.providerRef.deref()?.getState()
  2649. if (
  2650. isNativeProtocol(resolveToolProtocol(this.apiConfiguration, this.api.getModel().info)) &&
  2651. this.apiConversationHistory.length > 0
  2652. ) {
  2653. const lastMessage = this.apiConversationHistory[this.apiConversationHistory.length - 1]
  2654. if (lastMessage.role === "user") {
  2655. // Remove the last user message that we added earlier
  2656. this.apiConversationHistory.pop()
  2657. }
  2658. }
  2659. // Check if we should auto-retry or prompt the user
  2660. // Reuse the state variable from above
  2661. if (state?.autoApprovalEnabled && state?.alwaysApproveResubmit) {
  2662. // Auto-retry with backoff - don't persist failure message when retrying
  2663. const errorMsg =
  2664. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output."
  2665. await this.backoffAndAnnounce(
  2666. currentItem.retryAttempt ?? 0,
  2667. new Error("Empty assistant response"),
  2668. errorMsg,
  2669. )
  2670. // Check if task was aborted during the backoff
  2671. if (this.abort) {
  2672. console.log(
  2673. `[Task#${this.taskId}.${this.instanceId}] Task aborted during empty-assistant retry backoff`,
  2674. )
  2675. break
  2676. }
  2677. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2678. // Mark that user message was removed so it gets re-added on retry
  2679. stack.push({
  2680. userContent: currentUserContent,
  2681. includeFileDetails: false,
  2682. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2683. userMessageWasRemoved: true,
  2684. })
  2685. // Continue to retry the request
  2686. continue
  2687. } else {
  2688. // Prompt the user for retry decision
  2689. const { response } = await this.ask(
  2690. "api_req_failed",
  2691. "The model returned no assistant messages. This may indicate an issue with the API or the model's output.",
  2692. )
  2693. if (response === "yesButtonClicked") {
  2694. await this.say("api_req_retried")
  2695. // Push the same content back to retry
  2696. stack.push({
  2697. userContent: currentUserContent,
  2698. includeFileDetails: false,
  2699. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2700. })
  2701. // Continue to retry the request
  2702. continue
  2703. } else {
  2704. // User declined to retry
  2705. // For native protocol, re-add the user message we removed
  2706. // Reuse the state variable from above
  2707. if (
  2708. isNativeProtocol(resolveToolProtocol(this.apiConfiguration, this.api.getModel().info))
  2709. ) {
  2710. await this.addToApiConversationHistory({
  2711. role: "user",
  2712. content: currentUserContent,
  2713. })
  2714. }
  2715. await this.say(
  2716. "error",
  2717. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output.",
  2718. )
  2719. await this.addToApiConversationHistory({
  2720. role: "assistant",
  2721. content: [{ type: "text", text: "Failure: I did not provide a response." }],
  2722. })
  2723. }
  2724. }
  2725. }
  2726. // If we reach here without continuing, return false (will always be false for now)
  2727. return false
  2728. } catch (error) {
  2729. // This should never happen since the only thing that can throw an
  2730. // error is the attemptApiRequest, which is wrapped in a try catch
  2731. // that sends an ask where if noButtonClicked, will clear current
  2732. // task and destroy this instance. However to avoid unhandled
  2733. // promise rejection, we will end this loop which will end execution
  2734. // of this instance (see `startTask`).
  2735. return true // Needs to be true so parent loop knows to end task.
  2736. }
  2737. }
  2738. // If we exit the while loop normally (stack is empty), return false
  2739. return false
  2740. }
  2741. private async getSystemPrompt(): Promise<string> {
  2742. const { mcpEnabled } = (await this.providerRef.deref()?.getState()) ?? {}
  2743. let mcpHub: McpHub | undefined
  2744. if (mcpEnabled ?? true) {
  2745. const provider = this.providerRef.deref()
  2746. if (!provider) {
  2747. throw new Error("Provider reference lost during view transition")
  2748. }
  2749. // Wait for MCP hub initialization through McpServerManager
  2750. mcpHub = await McpServerManager.getInstance(provider.context, provider)
  2751. if (!mcpHub) {
  2752. throw new Error("Failed to get MCP hub from server manager")
  2753. }
  2754. // Wait for MCP servers to be connected before generating system prompt
  2755. await pWaitFor(() => !mcpHub!.isConnecting, { timeout: 10_000 }).catch(() => {
  2756. console.error("MCP servers failed to connect in time")
  2757. })
  2758. }
  2759. const rooIgnoreInstructions = this.rooIgnoreController?.getInstructions()
  2760. const state = await this.providerRef.deref()?.getState()
  2761. const {
  2762. browserViewportSize,
  2763. mode,
  2764. customModes,
  2765. customModePrompts,
  2766. customInstructions,
  2767. experiments,
  2768. enableMcpServerCreation,
  2769. browserToolEnabled,
  2770. language,
  2771. maxConcurrentFileReads,
  2772. maxReadFileLine,
  2773. apiConfiguration,
  2774. } = state ?? {}
  2775. return await (async () => {
  2776. const provider = this.providerRef.deref()
  2777. if (!provider) {
  2778. throw new Error("Provider not available")
  2779. }
  2780. // Align browser tool enablement with generateSystemPrompt: require model image support,
  2781. // mode to include the browser group, and the user setting to be enabled.
  2782. const modeConfig = getModeBySlug(mode ?? defaultModeSlug, customModes)
  2783. const modeSupportsBrowser = modeConfig?.groups.some((group) => getGroupName(group) === "browser") ?? false
  2784. // Check if model supports browser capability (images)
  2785. const modelInfo = this.api.getModel().info
  2786. const modelSupportsBrowser = (modelInfo as any)?.supportsImages === true
  2787. const canUseBrowserTool = modelSupportsBrowser && modeSupportsBrowser && (browserToolEnabled ?? true)
  2788. // Resolve the tool protocol based on profile, model, and provider settings
  2789. const toolProtocol = resolveToolProtocol(apiConfiguration ?? this.apiConfiguration, modelInfo)
  2790. return SYSTEM_PROMPT(
  2791. provider.context,
  2792. this.cwd,
  2793. canUseBrowserTool,
  2794. mcpHub,
  2795. this.diffStrategy,
  2796. browserViewportSize ?? "900x600",
  2797. mode ?? defaultModeSlug,
  2798. customModePrompts,
  2799. customModes,
  2800. customInstructions,
  2801. this.diffEnabled,
  2802. experiments,
  2803. enableMcpServerCreation,
  2804. language,
  2805. rooIgnoreInstructions,
  2806. maxReadFileLine !== -1,
  2807. {
  2808. maxConcurrentFileReads: maxConcurrentFileReads ?? 5,
  2809. todoListEnabled: apiConfiguration?.todoListEnabled ?? true,
  2810. browserToolEnabled: browserToolEnabled ?? true,
  2811. useAgentRules:
  2812. vscode.workspace.getConfiguration(Package.name).get<boolean>("useAgentRules") ?? true,
  2813. newTaskRequireTodos: vscode.workspace
  2814. .getConfiguration(Package.name)
  2815. .get<boolean>("newTaskRequireTodos", false),
  2816. toolProtocol,
  2817. },
  2818. undefined, // todoList
  2819. this.api.getModel().id,
  2820. )
  2821. })()
  2822. }
  2823. private getCurrentProfileId(state: any): string {
  2824. return (
  2825. state?.listApiConfigMeta?.find((profile: any) => profile.name === state?.currentApiConfigName)?.id ??
  2826. "default"
  2827. )
  2828. }
  2829. private async handleContextWindowExceededError(): Promise<void> {
  2830. const state = await this.providerRef.deref()?.getState()
  2831. const { profileThresholds = {} } = state ?? {}
  2832. const { contextTokens } = this.getTokenUsage()
  2833. const modelInfo = this.api.getModel().info
  2834. const maxTokens = getModelMaxOutputTokens({
  2835. modelId: this.api.getModel().id,
  2836. model: modelInfo,
  2837. settings: this.apiConfiguration,
  2838. })
  2839. const contextWindow = modelInfo.contextWindow
  2840. // Get the current profile ID using the helper method
  2841. const currentProfileId = this.getCurrentProfileId(state)
  2842. // Log the context window error for debugging
  2843. console.warn(
  2844. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  2845. `Current tokens: ${contextTokens}, Context window: ${contextWindow}. ` +
  2846. `Forcing truncation to ${FORCED_CONTEXT_REDUCTION_PERCENT}% of current context.`,
  2847. )
  2848. // Determine if we're using native tool protocol for proper message handling
  2849. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  2850. const useNativeTools = isNativeProtocol(protocol)
  2851. // Force aggressive truncation by keeping only 75% of the conversation history
  2852. const truncateResult = await manageContext({
  2853. messages: this.apiConversationHistory,
  2854. totalTokens: contextTokens || 0,
  2855. maxTokens,
  2856. contextWindow,
  2857. apiHandler: this.api,
  2858. autoCondenseContext: true,
  2859. autoCondenseContextPercent: FORCED_CONTEXT_REDUCTION_PERCENT,
  2860. systemPrompt: await this.getSystemPrompt(),
  2861. taskId: this.taskId,
  2862. profileThresholds,
  2863. currentProfileId,
  2864. useNativeTools,
  2865. })
  2866. if (truncateResult.messages !== this.apiConversationHistory) {
  2867. await this.overwriteApiConversationHistory(truncateResult.messages)
  2868. }
  2869. if (truncateResult.summary) {
  2870. const { summary, cost, prevContextTokens, newContextTokens = 0 } = truncateResult
  2871. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  2872. await this.say(
  2873. "condense_context",
  2874. undefined /* text */,
  2875. undefined /* images */,
  2876. false /* partial */,
  2877. undefined /* checkpoint */,
  2878. undefined /* progressStatus */,
  2879. { isNonInteractive: true } /* options */,
  2880. contextCondense,
  2881. )
  2882. }
  2883. }
  2884. public async *attemptApiRequest(retryAttempt: number = 0): ApiStream {
  2885. const state = await this.providerRef.deref()?.getState()
  2886. const {
  2887. apiConfiguration,
  2888. autoApprovalEnabled,
  2889. alwaysApproveResubmit,
  2890. requestDelaySeconds,
  2891. mode,
  2892. autoCondenseContext = true,
  2893. autoCondenseContextPercent = 100,
  2894. profileThresholds = {},
  2895. } = state ?? {}
  2896. // Get condensing configuration for automatic triggers.
  2897. const customCondensingPrompt = state?.customCondensingPrompt
  2898. const condensingApiConfigId = state?.condensingApiConfigId
  2899. const listApiConfigMeta = state?.listApiConfigMeta
  2900. // Determine API handler to use for condensing.
  2901. let condensingApiHandler: ApiHandler | undefined
  2902. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  2903. // Find matching config by ID
  2904. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  2905. if (matchingConfig) {
  2906. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  2907. id: condensingApiConfigId,
  2908. })
  2909. // Ensure profile and apiProvider exist before trying to build handler.
  2910. if (profile && profile.apiProvider) {
  2911. condensingApiHandler = buildApiHandler(profile)
  2912. }
  2913. }
  2914. }
  2915. let rateLimitDelay = 0
  2916. // Use the shared timestamp so that subtasks respect the same rate-limit
  2917. // window as their parent tasks.
  2918. if (Task.lastGlobalApiRequestTime) {
  2919. const now = performance.now()
  2920. const timeSinceLastRequest = now - Task.lastGlobalApiRequestTime
  2921. const rateLimit = apiConfiguration?.rateLimitSeconds || 0
  2922. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - timeSinceLastRequest) / 1000))
  2923. }
  2924. // Only show rate limiting message if we're not retrying. If retrying, we'll include the delay there.
  2925. if (rateLimitDelay > 0 && retryAttempt === 0) {
  2926. // Show countdown timer
  2927. for (let i = rateLimitDelay; i > 0; i--) {
  2928. const delayMessage = `Rate limiting for ${i} seconds...`
  2929. await this.say("api_req_retry_delayed", delayMessage, undefined, true)
  2930. await delay(1000)
  2931. }
  2932. }
  2933. // Update last request time before making the request so that subsequent
  2934. // requests — even from new subtasks — will honour the provider's rate-limit.
  2935. Task.lastGlobalApiRequestTime = performance.now()
  2936. const systemPrompt = await this.getSystemPrompt()
  2937. const { contextTokens } = this.getTokenUsage()
  2938. if (contextTokens) {
  2939. const modelInfo = this.api.getModel().info
  2940. const maxTokens = getModelMaxOutputTokens({
  2941. modelId: this.api.getModel().id,
  2942. model: modelInfo,
  2943. settings: this.apiConfiguration,
  2944. })
  2945. const contextWindow = modelInfo.contextWindow
  2946. // Get the current profile ID using the helper method
  2947. const currentProfileId = this.getCurrentProfileId(state)
  2948. // Determine if we're using native tool protocol for proper message handling
  2949. const modelInfoForProtocol = this.api.getModel().info
  2950. const protocol = resolveToolProtocol(this.apiConfiguration, modelInfoForProtocol)
  2951. const useNativeTools = isNativeProtocol(protocol)
  2952. const truncateResult = await manageContext({
  2953. messages: this.apiConversationHistory,
  2954. totalTokens: contextTokens,
  2955. maxTokens,
  2956. contextWindow,
  2957. apiHandler: this.api,
  2958. autoCondenseContext,
  2959. autoCondenseContextPercent,
  2960. systemPrompt,
  2961. taskId: this.taskId,
  2962. customCondensingPrompt,
  2963. condensingApiHandler,
  2964. profileThresholds,
  2965. currentProfileId,
  2966. useNativeTools,
  2967. })
  2968. if (truncateResult.messages !== this.apiConversationHistory) {
  2969. await this.overwriteApiConversationHistory(truncateResult.messages)
  2970. }
  2971. if (truncateResult.error) {
  2972. await this.say("condense_context_error", truncateResult.error)
  2973. } else if (truncateResult.summary) {
  2974. const { summary, cost, prevContextTokens, newContextTokens = 0 } = truncateResult
  2975. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  2976. await this.say(
  2977. "condense_context",
  2978. undefined /* text */,
  2979. undefined /* images */,
  2980. false /* partial */,
  2981. undefined /* checkpoint */,
  2982. undefined /* progressStatus */,
  2983. { isNonInteractive: true } /* options */,
  2984. contextCondense,
  2985. )
  2986. }
  2987. }
  2988. const messagesSinceLastSummary = getMessagesSinceLastSummary(this.apiConversationHistory)
  2989. const messagesWithoutImages = maybeRemoveImageBlocks(messagesSinceLastSummary, this.api)
  2990. const cleanConversationHistory = this.buildCleanConversationHistory(messagesWithoutImages as ApiMessage[])
  2991. // Check auto-approval limits
  2992. const approvalResult = await this.autoApprovalHandler.checkAutoApprovalLimits(
  2993. state,
  2994. this.combineMessages(this.clineMessages.slice(1)),
  2995. async (type, data) => this.ask(type, data),
  2996. )
  2997. if (!approvalResult.shouldProceed) {
  2998. // User did not approve, task should be aborted
  2999. throw new Error("Auto-approval limit reached and user did not approve continuation")
  3000. }
  3001. // Determine if we should include native tools based on:
  3002. // 1. Tool protocol is set to NATIVE
  3003. // 2. Model supports native tools
  3004. const modelInfo = this.api.getModel().info
  3005. const toolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  3006. const shouldIncludeTools = toolProtocol === TOOL_PROTOCOL.NATIVE && (modelInfo.supportsNativeTools ?? false)
  3007. // Build complete tools array: native tools + dynamic MCP tools, filtered by mode restrictions
  3008. let allTools: OpenAI.Chat.ChatCompletionTool[] = []
  3009. if (shouldIncludeTools) {
  3010. const provider = this.providerRef.deref()
  3011. if (!provider) {
  3012. throw new Error("Provider reference lost during tool building")
  3013. }
  3014. allTools = await buildNativeToolsArray({
  3015. provider,
  3016. cwd: this.cwd,
  3017. mode,
  3018. customModes: state?.customModes,
  3019. experiments: state?.experiments,
  3020. apiConfiguration,
  3021. maxReadFileLine: state?.maxReadFileLine ?? -1,
  3022. browserToolEnabled: state?.browserToolEnabled ?? true,
  3023. modelInfo,
  3024. })
  3025. }
  3026. // Resolve parallel tool calls setting from experiment (will move to per-API-profile setting later)
  3027. const parallelToolCallsEnabled = experiments.isEnabled(
  3028. state?.experiments ?? {},
  3029. EXPERIMENT_IDS.MULTIPLE_NATIVE_TOOL_CALLS,
  3030. )
  3031. const metadata: ApiHandlerCreateMessageMetadata = {
  3032. mode: mode,
  3033. taskId: this.taskId,
  3034. // Include tools and tool protocol when using native protocol and model supports it
  3035. ...(shouldIncludeTools
  3036. ? { tools: allTools, tool_choice: "auto", toolProtocol, parallelToolCalls: parallelToolCallsEnabled }
  3037. : {}),
  3038. }
  3039. // Create an AbortController to allow cancelling the request mid-stream
  3040. this.currentRequestAbortController = new AbortController()
  3041. const abortSignal = this.currentRequestAbortController.signal
  3042. // The provider accepts reasoning items alongside standard messages; cast to the expected parameter type.
  3043. const stream = this.api.createMessage(
  3044. systemPrompt,
  3045. cleanConversationHistory as unknown as Anthropic.Messages.MessageParam[],
  3046. metadata,
  3047. )
  3048. const iterator = stream[Symbol.asyncIterator]()
  3049. // Set up abort handling - when the signal is aborted, clean up the controller reference
  3050. abortSignal.addEventListener("abort", () => {
  3051. console.log(`[Task#${this.taskId}.${this.instanceId}] AbortSignal triggered for current request`)
  3052. this.currentRequestAbortController = undefined
  3053. })
  3054. try {
  3055. // Awaiting first chunk to see if it will throw an error.
  3056. this.isWaitingForFirstChunk = true
  3057. // Race between the first chunk and the abort signal
  3058. const firstChunkPromise = iterator.next()
  3059. const abortPromise = new Promise<never>((_, reject) => {
  3060. if (abortSignal.aborted) {
  3061. reject(new Error("Request cancelled by user"))
  3062. } else {
  3063. abortSignal.addEventListener("abort", () => {
  3064. reject(new Error("Request cancelled by user"))
  3065. })
  3066. }
  3067. })
  3068. const firstChunk = await Promise.race([firstChunkPromise, abortPromise])
  3069. yield firstChunk.value
  3070. this.isWaitingForFirstChunk = false
  3071. } catch (error) {
  3072. this.isWaitingForFirstChunk = false
  3073. this.currentRequestAbortController = undefined
  3074. const isContextWindowExceededError = checkContextWindowExceededError(error)
  3075. // If it's a context window error and we haven't exceeded max retries for this error type
  3076. if (isContextWindowExceededError && retryAttempt < MAX_CONTEXT_WINDOW_RETRIES) {
  3077. console.warn(
  3078. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  3079. `Retry attempt ${retryAttempt + 1}/${MAX_CONTEXT_WINDOW_RETRIES}. ` +
  3080. `Attempting automatic truncation...`,
  3081. )
  3082. await this.handleContextWindowExceededError()
  3083. // Retry the request after handling the context window error
  3084. yield* this.attemptApiRequest(retryAttempt + 1)
  3085. return
  3086. }
  3087. // note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
  3088. if (autoApprovalEnabled && alwaysApproveResubmit) {
  3089. let errorMsg
  3090. if (error.error?.metadata?.raw) {
  3091. errorMsg = JSON.stringify(error.error.metadata.raw, null, 2)
  3092. } else if (error.message) {
  3093. errorMsg = error.message
  3094. } else {
  3095. errorMsg = "Unknown error"
  3096. }
  3097. // Apply shared exponential backoff and countdown UX
  3098. await this.backoffAndAnnounce(retryAttempt, error, errorMsg)
  3099. // CRITICAL: Check if task was aborted during the backoff countdown
  3100. // This prevents infinite loops when users cancel during auto-retry
  3101. // Without this check, the recursive call below would continue even after abort
  3102. if (this.abort) {
  3103. throw new Error(
  3104. `[Task#attemptApiRequest] task ${this.taskId}.${this.instanceId} aborted during retry`,
  3105. )
  3106. }
  3107. // Delegate generator output from the recursive call with
  3108. // incremented retry count.
  3109. yield* this.attemptApiRequest(retryAttempt + 1)
  3110. return
  3111. } else {
  3112. const { response } = await this.ask(
  3113. "api_req_failed",
  3114. error.message ?? JSON.stringify(serializeError(error), null, 2),
  3115. )
  3116. if (response !== "yesButtonClicked") {
  3117. // This will never happen since if noButtonClicked, we will
  3118. // clear current task, aborting this instance.
  3119. throw new Error("API request failed")
  3120. }
  3121. await this.say("api_req_retried")
  3122. // Delegate generator output from the recursive call.
  3123. yield* this.attemptApiRequest()
  3124. return
  3125. }
  3126. }
  3127. // No error, so we can continue to yield all remaining chunks.
  3128. // (Needs to be placed outside of try/catch since it we want caller to
  3129. // handle errors not with api_req_failed as that is reserved for first
  3130. // chunk failures only.)
  3131. // This delegates to another generator or iterable object. In this case,
  3132. // it's saying "yield all remaining values from this iterator". This
  3133. // effectively passes along all subsequent chunks from the original
  3134. // stream.
  3135. yield* iterator
  3136. }
  3137. // Shared exponential backoff for retries (first-chunk and mid-stream)
  3138. private async backoffAndAnnounce(retryAttempt: number, error: any, header?: string): Promise<void> {
  3139. try {
  3140. const state = await this.providerRef.deref()?.getState()
  3141. const baseDelay = state?.requestDelaySeconds || 5
  3142. let exponentialDelay = Math.min(
  3143. Math.ceil(baseDelay * Math.pow(2, retryAttempt)),
  3144. MAX_EXPONENTIAL_BACKOFF_SECONDS,
  3145. )
  3146. // Respect provider rate limit window
  3147. let rateLimitDelay = 0
  3148. const rateLimit = state?.apiConfiguration?.rateLimitSeconds || 0
  3149. if (Task.lastGlobalApiRequestTime && rateLimit > 0) {
  3150. const elapsed = performance.now() - Task.lastGlobalApiRequestTime
  3151. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - elapsed) / 1000))
  3152. }
  3153. // Prefer RetryInfo on 429 if present
  3154. if (error?.status === 429) {
  3155. const retryInfo = error?.errorDetails?.find(
  3156. (d: any) => d["@type"] === "type.googleapis.com/google.rpc.RetryInfo",
  3157. )
  3158. const match = retryInfo?.retryDelay?.match?.(/^(\d+)s$/)
  3159. if (match) {
  3160. exponentialDelay = Number(match[1]) + 1
  3161. }
  3162. }
  3163. const finalDelay = Math.max(exponentialDelay, rateLimitDelay)
  3164. if (finalDelay <= 0) return
  3165. // Build header text; fall back to error message if none provided
  3166. let headerText = header
  3167. if (!headerText) {
  3168. if (error?.error?.metadata?.raw) {
  3169. headerText = JSON.stringify(error.error.metadata.raw, null, 2)
  3170. } else if (error?.message) {
  3171. headerText = error.message
  3172. } else {
  3173. headerText = "Unknown error"
  3174. }
  3175. }
  3176. headerText = headerText ? `${headerText}\n\n` : ""
  3177. // Show countdown timer with exponential backoff
  3178. for (let i = finalDelay; i > 0; i--) {
  3179. // Check abort flag during countdown to allow early exit
  3180. if (this.abort) {
  3181. throw new Error(`[Task#${this.taskId}] Aborted during retry countdown`)
  3182. }
  3183. await this.say(
  3184. "api_req_retry_delayed",
  3185. `${headerText}Retry attempt ${retryAttempt + 1}\nRetrying in ${i} seconds...`,
  3186. undefined,
  3187. true,
  3188. )
  3189. await delay(1000)
  3190. }
  3191. await this.say(
  3192. "api_req_retry_delayed",
  3193. `${headerText}Retry attempt ${retryAttempt + 1}\nRetrying now...`,
  3194. undefined,
  3195. false,
  3196. )
  3197. } catch (err) {
  3198. console.error("Exponential backoff failed:", err)
  3199. }
  3200. }
  3201. // Checkpoints
  3202. public async checkpointSave(force: boolean = false, suppressMessage: boolean = false) {
  3203. return checkpointSave(this, force, suppressMessage)
  3204. }
  3205. private buildCleanConversationHistory(
  3206. messages: ApiMessage[],
  3207. ): Array<
  3208. Anthropic.Messages.MessageParam | { type: "reasoning"; encrypted_content: string; id?: string; summary?: any[] }
  3209. > {
  3210. type ReasoningItemForRequest = {
  3211. type: "reasoning"
  3212. encrypted_content: string
  3213. id?: string
  3214. summary?: any[]
  3215. }
  3216. const cleanConversationHistory: (Anthropic.Messages.MessageParam | ReasoningItemForRequest)[] = []
  3217. for (const msg of messages) {
  3218. // Standalone reasoning: send encrypted, skip plain text
  3219. if (msg.type === "reasoning") {
  3220. if (msg.encrypted_content) {
  3221. cleanConversationHistory.push({
  3222. type: "reasoning",
  3223. summary: msg.summary,
  3224. encrypted_content: msg.encrypted_content!,
  3225. ...(msg.id ? { id: msg.id } : {}),
  3226. })
  3227. }
  3228. continue
  3229. }
  3230. // Preferred path: assistant message with embedded reasoning as first content block
  3231. if (msg.role === "assistant") {
  3232. const rawContent = msg.content
  3233. const contentArray: Anthropic.Messages.ContentBlockParam[] = Array.isArray(rawContent)
  3234. ? (rawContent as Anthropic.Messages.ContentBlockParam[])
  3235. : rawContent !== undefined
  3236. ? ([
  3237. { type: "text", text: rawContent } satisfies Anthropic.Messages.TextBlockParam,
  3238. ] as Anthropic.Messages.ContentBlockParam[])
  3239. : []
  3240. const [first, ...rest] = contentArray
  3241. // Check if this message has reasoning_details (OpenRouter format for Gemini 3, etc.)
  3242. const msgWithDetails = msg
  3243. if (msgWithDetails.reasoning_details && Array.isArray(msgWithDetails.reasoning_details)) {
  3244. // Build the assistant message with reasoning_details
  3245. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3246. if (contentArray.length === 0) {
  3247. assistantContent = ""
  3248. } else if (contentArray.length === 1 && contentArray[0].type === "text") {
  3249. assistantContent = (contentArray[0] as Anthropic.Messages.TextBlockParam).text
  3250. } else {
  3251. assistantContent = contentArray
  3252. }
  3253. // Create message with reasoning_details property
  3254. cleanConversationHistory.push({
  3255. role: "assistant",
  3256. content: assistantContent,
  3257. reasoning_details: msgWithDetails.reasoning_details,
  3258. } as any)
  3259. continue
  3260. }
  3261. // Embedded reasoning: encrypted (send) or plain text (skip)
  3262. const hasEncryptedReasoning =
  3263. first && (first as any).type === "reasoning" && typeof (first as any).encrypted_content === "string"
  3264. const hasPlainTextReasoning =
  3265. first && (first as any).type === "reasoning" && typeof (first as any).text === "string"
  3266. if (hasEncryptedReasoning) {
  3267. const reasoningBlock = first as any
  3268. // Send as separate reasoning item (OpenAI Native)
  3269. cleanConversationHistory.push({
  3270. type: "reasoning",
  3271. summary: reasoningBlock.summary ?? [],
  3272. encrypted_content: reasoningBlock.encrypted_content,
  3273. ...(reasoningBlock.id ? { id: reasoningBlock.id } : {}),
  3274. })
  3275. // Send assistant message without reasoning
  3276. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3277. if (rest.length === 0) {
  3278. assistantContent = ""
  3279. } else if (rest.length === 1 && rest[0].type === "text") {
  3280. assistantContent = (rest[0] as Anthropic.Messages.TextBlockParam).text
  3281. } else {
  3282. assistantContent = rest
  3283. }
  3284. cleanConversationHistory.push({
  3285. role: "assistant",
  3286. content: assistantContent,
  3287. } satisfies Anthropic.Messages.MessageParam)
  3288. continue
  3289. } else if (hasPlainTextReasoning) {
  3290. // Check if the model's preserveReasoning flag is set
  3291. // If true, include the reasoning block in API requests
  3292. // If false/undefined, strip it out (stored for history only, not sent back to API)
  3293. const shouldPreserveForApi = this.api.getModel().info.preserveReasoning === true
  3294. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3295. if (shouldPreserveForApi) {
  3296. // Include reasoning block in the content sent to API
  3297. assistantContent = contentArray
  3298. } else {
  3299. // Strip reasoning out - stored for history only, not sent back to API
  3300. if (rest.length === 0) {
  3301. assistantContent = ""
  3302. } else if (rest.length === 1 && rest[0].type === "text") {
  3303. assistantContent = (rest[0] as Anthropic.Messages.TextBlockParam).text
  3304. } else {
  3305. assistantContent = rest
  3306. }
  3307. }
  3308. cleanConversationHistory.push({
  3309. role: "assistant",
  3310. content: assistantContent,
  3311. } satisfies Anthropic.Messages.MessageParam)
  3312. continue
  3313. }
  3314. }
  3315. // Default path for regular messages (no embedded reasoning)
  3316. if (msg.role) {
  3317. cleanConversationHistory.push({
  3318. role: msg.role,
  3319. content: msg.content as Anthropic.Messages.ContentBlockParam[] | string,
  3320. })
  3321. }
  3322. }
  3323. return cleanConversationHistory
  3324. }
  3325. public async checkpointRestore(options: CheckpointRestoreOptions) {
  3326. return checkpointRestore(this, options)
  3327. }
  3328. public async checkpointDiff(options: CheckpointDiffOptions) {
  3329. return checkpointDiff(this, options)
  3330. }
  3331. // Metrics
  3332. public combineMessages(messages: ClineMessage[]) {
  3333. return combineApiRequests(combineCommandSequences(messages))
  3334. }
  3335. public getTokenUsage(): TokenUsage {
  3336. return getApiMetrics(this.combineMessages(this.clineMessages.slice(1)))
  3337. }
  3338. public recordToolUsage(toolName: ToolName) {
  3339. if (!this.toolUsage[toolName]) {
  3340. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  3341. }
  3342. this.toolUsage[toolName].attempts++
  3343. }
  3344. public recordToolError(toolName: ToolName, error?: string) {
  3345. if (!this.toolUsage[toolName]) {
  3346. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  3347. }
  3348. this.toolUsage[toolName].failures++
  3349. if (error) {
  3350. this.emit(RooCodeEventName.TaskToolFailed, this.taskId, toolName, error)
  3351. }
  3352. }
  3353. // Getters
  3354. public get taskStatus(): TaskStatus {
  3355. if (this.interactiveAsk) {
  3356. return TaskStatus.Interactive
  3357. }
  3358. if (this.resumableAsk) {
  3359. return TaskStatus.Resumable
  3360. }
  3361. if (this.idleAsk) {
  3362. return TaskStatus.Idle
  3363. }
  3364. return TaskStatus.Running
  3365. }
  3366. public get taskAsk(): ClineMessage | undefined {
  3367. return this.idleAsk || this.resumableAsk || this.interactiveAsk
  3368. }
  3369. public get queuedMessages(): QueuedMessage[] {
  3370. return this.messageQueueService.messages
  3371. }
  3372. public get tokenUsage(): TokenUsage | undefined {
  3373. if (this.tokenUsageSnapshot && this.tokenUsageSnapshotAt) {
  3374. return this.tokenUsageSnapshot
  3375. }
  3376. this.tokenUsageSnapshot = this.getTokenUsage()
  3377. this.tokenUsageSnapshotAt = this.clineMessages.at(-1)?.ts
  3378. return this.tokenUsageSnapshot
  3379. }
  3380. public get cwd() {
  3381. return this.workspacePath
  3382. }
  3383. /**
  3384. * Broadcast browser session updates to the browser panel (if open)
  3385. */
  3386. private broadcastBrowserSessionUpdate(): void {
  3387. const provider = this.providerRef.deref()
  3388. if (!provider) {
  3389. return
  3390. }
  3391. try {
  3392. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  3393. const panelManager = BrowserSessionPanelManager.getInstance(provider)
  3394. // Get browser session messages
  3395. const browserSessionStartIndex = this.clineMessages.findIndex(
  3396. (m) =>
  3397. m.ask === "browser_action_launch" ||
  3398. (m.say === "browser_session_status" && m.text?.includes("opened")),
  3399. )
  3400. const browserSessionMessages =
  3401. browserSessionStartIndex !== -1 ? this.clineMessages.slice(browserSessionStartIndex) : []
  3402. const isBrowserSessionActive = this.browserSession?.isSessionActive() ?? false
  3403. // Update the panel asynchronously
  3404. panelManager.updateBrowserSession(browserSessionMessages, isBrowserSessionActive).catch((error: Error) => {
  3405. console.error("Failed to broadcast browser session update:", error)
  3406. })
  3407. } catch (error) {
  3408. // Silently fail if panel manager is not available
  3409. console.debug("Browser panel not available for update:", error)
  3410. }
  3411. }
  3412. /**
  3413. * Process any queued messages by dequeuing and submitting them.
  3414. * This ensures that queued user messages are sent when appropriate,
  3415. * preventing them from getting stuck in the queue.
  3416. *
  3417. * @param context - Context string for logging (e.g., the calling tool name)
  3418. */
  3419. public processQueuedMessages(): void {
  3420. try {
  3421. if (!this.messageQueueService.isEmpty()) {
  3422. const queued = this.messageQueueService.dequeueMessage()
  3423. if (queued) {
  3424. setTimeout(() => {
  3425. this.submitUserMessage(queued.text, queued.images).catch((err) =>
  3426. console.error(`[Task] Failed to submit queued message:`, err),
  3427. )
  3428. }, 0)
  3429. }
  3430. }
  3431. } catch (e) {
  3432. console.error(`[Task] Queue processing error:`, e)
  3433. }
  3434. }
  3435. }