2
0

Task.ts 155 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351
  1. import * as path from "path"
  2. import * as vscode from "vscode"
  3. import os from "os"
  4. import crypto from "crypto"
  5. import EventEmitter from "events"
  6. import { AskIgnoredError } from "./AskIgnoredError"
  7. import { Anthropic } from "@anthropic-ai/sdk"
  8. import OpenAI from "openai"
  9. import debounce from "lodash.debounce"
  10. import delay from "delay"
  11. import pWaitFor from "p-wait-for"
  12. import { serializeError } from "serialize-error"
  13. import { Package } from "../../shared/package"
  14. import { formatToolInvocation } from "../tools/helpers/toolResultFormatting"
  15. import {
  16. type TaskLike,
  17. type TaskMetadata,
  18. type TaskEvents,
  19. type ProviderSettings,
  20. type TokenUsage,
  21. type ToolUsage,
  22. type ToolName,
  23. type ContextCondense,
  24. type ContextTruncation,
  25. type ClineMessage,
  26. type ClineSay,
  27. type ClineAsk,
  28. type ToolProgressStatus,
  29. type HistoryItem,
  30. type CreateTaskOptions,
  31. type ModelInfo,
  32. type ToolProtocol,
  33. RooCodeEventName,
  34. TelemetryEventName,
  35. TaskStatus,
  36. TodoItem,
  37. getApiProtocol,
  38. getModelId,
  39. isIdleAsk,
  40. isInteractiveAsk,
  41. isResumableAsk,
  42. isNativeProtocol,
  43. QueuedMessage,
  44. DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  45. DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  46. MAX_CHECKPOINT_TIMEOUT_SECONDS,
  47. MIN_CHECKPOINT_TIMEOUT_SECONDS,
  48. TOOL_PROTOCOL,
  49. ConsecutiveMistakeError,
  50. } from "@roo-code/types"
  51. import { TelemetryService } from "@roo-code/telemetry"
  52. import { CloudService, BridgeOrchestrator } from "@roo-code/cloud"
  53. import { resolveToolProtocol, detectToolProtocolFromHistory } from "../../utils/resolveToolProtocol"
  54. // api
  55. import { ApiHandler, ApiHandlerCreateMessageMetadata, buildApiHandler } from "../../api"
  56. import { ApiStream, GroundingSource } from "../../api/transform/stream"
  57. import { maybeRemoveImageBlocks } from "../../api/transform/image-cleaning"
  58. // shared
  59. import { findLastIndex } from "../../shared/array"
  60. import { combineApiRequests } from "../../shared/combineApiRequests"
  61. import { combineCommandSequences } from "../../shared/combineCommandSequences"
  62. import { t } from "../../i18n"
  63. import { ClineApiReqCancelReason, ClineApiReqInfo } from "../../shared/ExtensionMessage"
  64. import { getApiMetrics, hasTokenUsageChanged, hasToolUsageChanged } from "../../shared/getApiMetrics"
  65. import { ClineAskResponse } from "../../shared/WebviewMessage"
  66. import { defaultModeSlug, getModeBySlug, getGroupName } from "../../shared/modes"
  67. import { DiffStrategy, type ToolUse, type ToolParamName, toolParamNames } from "../../shared/tools"
  68. import { EXPERIMENT_IDS, experiments } from "../../shared/experiments"
  69. import { getModelMaxOutputTokens } from "../../shared/api"
  70. // services
  71. import { UrlContentFetcher } from "../../services/browser/UrlContentFetcher"
  72. import { BrowserSession } from "../../services/browser/BrowserSession"
  73. import { McpHub } from "../../services/mcp/McpHub"
  74. import { McpServerManager } from "../../services/mcp/McpServerManager"
  75. import { RepoPerTaskCheckpointService } from "../../services/checkpoints"
  76. // integrations
  77. import { DiffViewProvider } from "../../integrations/editor/DiffViewProvider"
  78. import { findToolName } from "../../integrations/misc/export-markdown"
  79. import { RooTerminalProcess } from "../../integrations/terminal/types"
  80. import { TerminalRegistry } from "../../integrations/terminal/TerminalRegistry"
  81. // utils
  82. import { calculateApiCostAnthropic, calculateApiCostOpenAI } from "../../shared/cost"
  83. import { getWorkspacePath } from "../../utils/path"
  84. // prompts
  85. import { formatResponse } from "../prompts/responses"
  86. import { SYSTEM_PROMPT } from "../prompts/system"
  87. import { buildNativeToolsArray } from "./build-tools"
  88. // core modules
  89. import { ToolRepetitionDetector } from "../tools/ToolRepetitionDetector"
  90. import { restoreTodoListForTask } from "../tools/UpdateTodoListTool"
  91. import { FileContextTracker } from "../context-tracking/FileContextTracker"
  92. import { RooIgnoreController } from "../ignore/RooIgnoreController"
  93. import { RooProtectedController } from "../protect/RooProtectedController"
  94. import { type AssistantMessageContent, presentAssistantMessage } from "../assistant-message"
  95. import { AssistantMessageParser } from "../assistant-message/AssistantMessageParser"
  96. import { NativeToolCallParser } from "../assistant-message/NativeToolCallParser"
  97. import { manageContext, willManageContext } from "../context-management"
  98. import { ClineProvider } from "../webview/ClineProvider"
  99. import { MultiSearchReplaceDiffStrategy } from "../diff/strategies/multi-search-replace"
  100. import { MultiFileSearchReplaceDiffStrategy } from "../diff/strategies/multi-file-search-replace"
  101. import {
  102. type ApiMessage,
  103. readApiMessages,
  104. saveApiMessages,
  105. readTaskMessages,
  106. saveTaskMessages,
  107. taskMetadata,
  108. } from "../task-persistence"
  109. import { getEnvironmentDetails } from "../environment/getEnvironmentDetails"
  110. import { checkContextWindowExceededError } from "../context/context-management/context-error-handling"
  111. import {
  112. type CheckpointDiffOptions,
  113. type CheckpointRestoreOptions,
  114. getCheckpointService,
  115. checkpointSave,
  116. checkpointRestore,
  117. checkpointDiff,
  118. } from "../checkpoints"
  119. import { processUserContentMentions } from "../mentions/processUserContentMentions"
  120. import { getMessagesSinceLastSummary, summarizeConversation, getEffectiveApiHistory } from "../condense"
  121. import { MessageQueueService } from "../message-queue/MessageQueueService"
  122. import { AutoApprovalHandler, checkAutoApproval } from "../auto-approval"
  123. import { MessageManager } from "../message-manager"
  124. import { validateAndFixToolResultIds } from "./validateToolResultIds"
  125. const MAX_EXPONENTIAL_BACKOFF_SECONDS = 600 // 10 minutes
  126. const DEFAULT_USAGE_COLLECTION_TIMEOUT_MS = 5000 // 5 seconds
  127. const FORCED_CONTEXT_REDUCTION_PERCENT = 75 // Keep 75% of context (remove 25%) on context window errors
  128. const MAX_CONTEXT_WINDOW_RETRIES = 3 // Maximum retries for context window errors
  129. export interface TaskOptions extends CreateTaskOptions {
  130. provider: ClineProvider
  131. apiConfiguration: ProviderSettings
  132. enableDiff?: boolean
  133. enableCheckpoints?: boolean
  134. checkpointTimeout?: number
  135. enableBridge?: boolean
  136. fuzzyMatchThreshold?: number
  137. consecutiveMistakeLimit?: number
  138. task?: string
  139. images?: string[]
  140. historyItem?: HistoryItem
  141. experiments?: Record<string, boolean>
  142. startTask?: boolean
  143. rootTask?: Task
  144. parentTask?: Task
  145. taskNumber?: number
  146. onCreated?: (task: Task) => void
  147. initialTodos?: TodoItem[]
  148. workspacePath?: string
  149. /** Initial status for the task's history item (e.g., "active" for child tasks) */
  150. initialStatus?: "active" | "delegated" | "completed"
  151. }
  152. export class Task extends EventEmitter<TaskEvents> implements TaskLike {
  153. readonly taskId: string
  154. readonly rootTaskId?: string
  155. readonly parentTaskId?: string
  156. childTaskId?: string
  157. pendingNewTaskToolCallId?: string
  158. readonly instanceId: string
  159. readonly metadata: TaskMetadata
  160. todoList?: TodoItem[]
  161. readonly rootTask: Task | undefined = undefined
  162. readonly parentTask: Task | undefined = undefined
  163. readonly taskNumber: number
  164. readonly workspacePath: string
  165. /**
  166. * The mode associated with this task. Persisted across sessions
  167. * to maintain user context when reopening tasks from history.
  168. *
  169. * ## Lifecycle
  170. *
  171. * ### For new tasks:
  172. * 1. Initially `undefined` during construction
  173. * 2. Asynchronously initialized from provider state via `initializeTaskMode()`
  174. * 3. Falls back to `defaultModeSlug` if provider state is unavailable
  175. *
  176. * ### For history items:
  177. * 1. Immediately set from `historyItem.mode` during construction
  178. * 2. Falls back to `defaultModeSlug` if mode is not stored in history
  179. *
  180. * ## Important
  181. * This property should NOT be accessed directly until `taskModeReady` promise resolves.
  182. * Use `getTaskMode()` for async access or `taskMode` getter for sync access after initialization.
  183. *
  184. * @private
  185. * @see {@link getTaskMode} - For safe async access
  186. * @see {@link taskMode} - For sync access after initialization
  187. * @see {@link waitForModeInitialization} - To ensure initialization is complete
  188. */
  189. private _taskMode: string | undefined
  190. /**
  191. * The tool protocol locked to this task. Once set, the task will continue
  192. * using this protocol even if user settings change.
  193. *
  194. * ## Why This Matters
  195. * When NTC (Native Tool Calling) is enabled, XML parsing does NOT occur.
  196. * If a task previously used XML tools, resuming it with NTC enabled would
  197. * break because the tool calls in the history would not be parseable.
  198. *
  199. * ## Lifecycle
  200. *
  201. * ### For new tasks:
  202. * 1. Set immediately in constructor via `resolveToolProtocol()`
  203. * 2. Locked for the lifetime of the task
  204. *
  205. * ### For history items:
  206. * 1. If `historyItem.toolProtocol` exists, use it
  207. * 2. Otherwise, detect from API history via `detectToolProtocolFromHistory()`
  208. * 3. If no tools in history, use `resolveToolProtocol()` from current settings
  209. *
  210. * @private
  211. */
  212. private _taskToolProtocol: ToolProtocol | undefined
  213. /**
  214. * Promise that resolves when the task mode has been initialized.
  215. * This ensures async mode initialization completes before the task is used.
  216. *
  217. * ## Purpose
  218. * - Prevents race conditions when accessing task mode
  219. * - Ensures provider state is properly loaded before mode-dependent operations
  220. * - Provides a synchronization point for async initialization
  221. *
  222. * ## Resolution timing
  223. * - For history items: Resolves immediately (sync initialization)
  224. * - For new tasks: Resolves after provider state is fetched (async initialization)
  225. *
  226. * @private
  227. * @see {@link waitForModeInitialization} - Public method to await this promise
  228. */
  229. private taskModeReady: Promise<void>
  230. providerRef: WeakRef<ClineProvider>
  231. private readonly globalStoragePath: string
  232. abort: boolean = false
  233. currentRequestAbortController?: AbortController
  234. skipPrevResponseIdOnce: boolean = false
  235. // TaskStatus
  236. idleAsk?: ClineMessage
  237. resumableAsk?: ClineMessage
  238. interactiveAsk?: ClineMessage
  239. didFinishAbortingStream = false
  240. abandoned = false
  241. abortReason?: ClineApiReqCancelReason
  242. isInitialized = false
  243. isPaused: boolean = false
  244. // API
  245. apiConfiguration: ProviderSettings
  246. api: ApiHandler
  247. private static lastGlobalApiRequestTime?: number
  248. private autoApprovalHandler: AutoApprovalHandler
  249. /**
  250. * Reset the global API request timestamp. This should only be used for testing.
  251. * @internal
  252. */
  253. static resetGlobalApiRequestTime(): void {
  254. Task.lastGlobalApiRequestTime = undefined
  255. }
  256. toolRepetitionDetector: ToolRepetitionDetector
  257. rooIgnoreController?: RooIgnoreController
  258. rooProtectedController?: RooProtectedController
  259. fileContextTracker: FileContextTracker
  260. urlContentFetcher: UrlContentFetcher
  261. terminalProcess?: RooTerminalProcess
  262. // Computer User
  263. browserSession: BrowserSession
  264. // Editing
  265. diffViewProvider: DiffViewProvider
  266. diffStrategy?: DiffStrategy
  267. diffEnabled: boolean = false
  268. fuzzyMatchThreshold: number
  269. didEditFile: boolean = false
  270. // LLM Messages & Chat Messages
  271. apiConversationHistory: ApiMessage[] = []
  272. clineMessages: ClineMessage[] = []
  273. // Ask
  274. private askResponse?: ClineAskResponse
  275. private askResponseText?: string
  276. private askResponseImages?: string[]
  277. public lastMessageTs?: number
  278. private autoApprovalTimeoutRef?: NodeJS.Timeout
  279. // Tool Use
  280. consecutiveMistakeCount: number = 0
  281. consecutiveMistakeLimit: number
  282. consecutiveMistakeCountForApplyDiff: Map<string, number> = new Map()
  283. consecutiveNoToolUseCount: number = 0
  284. toolUsage: ToolUsage = {}
  285. // Checkpoints
  286. enableCheckpoints: boolean
  287. checkpointTimeout: number
  288. checkpointService?: RepoPerTaskCheckpointService
  289. checkpointServiceInitializing = false
  290. // Task Bridge
  291. enableBridge: boolean
  292. // Message Queue Service
  293. public readonly messageQueueService: MessageQueueService
  294. private messageQueueStateChangedHandler: (() => void) | undefined
  295. // Streaming
  296. isWaitingForFirstChunk = false
  297. isStreaming = false
  298. currentStreamingContentIndex = 0
  299. currentStreamingDidCheckpoint = false
  300. assistantMessageContent: AssistantMessageContent[] = []
  301. presentAssistantMessageLocked = false
  302. presentAssistantMessageHasPendingUpdates = false
  303. userMessageContent: (Anthropic.TextBlockParam | Anthropic.ImageBlockParam | Anthropic.ToolResultBlockParam)[] = []
  304. userMessageContentReady = false
  305. didRejectTool = false
  306. didAlreadyUseTool = false
  307. didToolFailInCurrentTurn = false
  308. didCompleteReadingStream = false
  309. assistantMessageParser?: AssistantMessageParser
  310. private providerProfileChangeListener?: (config: { name: string; provider?: string }) => void
  311. // Native tool call streaming state (track which index each tool is at)
  312. private streamingToolCallIndices: Map<string, number> = new Map()
  313. // Cached model info for current streaming session (set at start of each API request)
  314. // This prevents excessive getModel() calls during tool execution
  315. cachedStreamingModel?: { id: string; info: ModelInfo }
  316. // Token Usage Cache
  317. private tokenUsageSnapshot?: TokenUsage
  318. private tokenUsageSnapshotAt?: number
  319. // Tool Usage Cache
  320. private toolUsageSnapshot?: ToolUsage
  321. // Token Usage Throttling - Debounced emit function
  322. private readonly TOKEN_USAGE_EMIT_INTERVAL_MS = 2000 // 2 seconds
  323. private debouncedEmitTokenUsage: ReturnType<typeof debounce>
  324. // Cloud Sync Tracking
  325. private cloudSyncedMessageTimestamps: Set<number> = new Set()
  326. // Initial status for the task's history item (set at creation time to avoid race conditions)
  327. private readonly initialStatus?: "active" | "delegated" | "completed"
  328. // MessageManager for high-level message operations (lazy initialized)
  329. private _messageManager?: MessageManager
  330. constructor({
  331. provider,
  332. apiConfiguration,
  333. enableDiff = false,
  334. enableCheckpoints = true,
  335. checkpointTimeout = DEFAULT_CHECKPOINT_TIMEOUT_SECONDS,
  336. enableBridge = false,
  337. fuzzyMatchThreshold = 1.0,
  338. consecutiveMistakeLimit = DEFAULT_CONSECUTIVE_MISTAKE_LIMIT,
  339. task,
  340. images,
  341. historyItem,
  342. experiments: experimentsConfig,
  343. startTask = true,
  344. rootTask,
  345. parentTask,
  346. taskNumber = -1,
  347. onCreated,
  348. initialTodos,
  349. workspacePath,
  350. initialStatus,
  351. }: TaskOptions) {
  352. super()
  353. if (startTask && !task && !images && !historyItem) {
  354. throw new Error("Either historyItem or task/images must be provided")
  355. }
  356. if (
  357. !checkpointTimeout ||
  358. checkpointTimeout > MAX_CHECKPOINT_TIMEOUT_SECONDS ||
  359. checkpointTimeout < MIN_CHECKPOINT_TIMEOUT_SECONDS
  360. ) {
  361. throw new Error(
  362. "checkpointTimeout must be between " +
  363. MIN_CHECKPOINT_TIMEOUT_SECONDS +
  364. " and " +
  365. MAX_CHECKPOINT_TIMEOUT_SECONDS +
  366. " seconds",
  367. )
  368. }
  369. this.taskId = historyItem ? historyItem.id : crypto.randomUUID()
  370. this.rootTaskId = historyItem ? historyItem.rootTaskId : rootTask?.taskId
  371. this.parentTaskId = historyItem ? historyItem.parentTaskId : parentTask?.taskId
  372. this.childTaskId = undefined
  373. this.metadata = {
  374. task: historyItem ? historyItem.task : task,
  375. images: historyItem ? [] : images,
  376. }
  377. // Normal use-case is usually retry similar history task with new workspace.
  378. this.workspacePath = parentTask
  379. ? parentTask.workspacePath
  380. : (workspacePath ?? getWorkspacePath(path.join(os.homedir(), "Desktop")))
  381. this.instanceId = crypto.randomUUID().slice(0, 8)
  382. this.taskNumber = -1
  383. this.rooIgnoreController = new RooIgnoreController(this.cwd)
  384. this.rooProtectedController = new RooProtectedController(this.cwd)
  385. this.fileContextTracker = new FileContextTracker(provider, this.taskId)
  386. this.rooIgnoreController.initialize().catch((error) => {
  387. console.error("Failed to initialize RooIgnoreController:", error)
  388. })
  389. this.apiConfiguration = apiConfiguration
  390. this.api = buildApiHandler(apiConfiguration)
  391. this.autoApprovalHandler = new AutoApprovalHandler()
  392. this.urlContentFetcher = new UrlContentFetcher(provider.context)
  393. this.browserSession = new BrowserSession(provider.context, (isActive: boolean) => {
  394. // Add a message to indicate browser session status change
  395. this.say("browser_session_status", isActive ? "Browser session opened" : "Browser session closed")
  396. // Broadcast to browser panel
  397. this.broadcastBrowserSessionUpdate()
  398. // When a browser session becomes active, automatically open/reveal the Browser Session tab
  399. if (isActive) {
  400. try {
  401. // Lazy-load to avoid circular imports at module load time
  402. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  403. const providerRef = this.providerRef.deref()
  404. if (providerRef) {
  405. BrowserSessionPanelManager.getInstance(providerRef)
  406. .show()
  407. .catch(() => {})
  408. }
  409. } catch (err) {
  410. console.error("[Task] Failed to auto-open Browser Session panel:", err)
  411. }
  412. }
  413. })
  414. this.diffEnabled = enableDiff
  415. this.fuzzyMatchThreshold = fuzzyMatchThreshold
  416. this.consecutiveMistakeLimit = consecutiveMistakeLimit ?? DEFAULT_CONSECUTIVE_MISTAKE_LIMIT
  417. this.providerRef = new WeakRef(provider)
  418. this.globalStoragePath = provider.context.globalStorageUri.fsPath
  419. this.diffViewProvider = new DiffViewProvider(this.cwd, this)
  420. this.enableCheckpoints = enableCheckpoints
  421. this.checkpointTimeout = checkpointTimeout
  422. this.enableBridge = enableBridge
  423. this.parentTask = parentTask
  424. this.taskNumber = taskNumber
  425. this.initialStatus = initialStatus
  426. // Store the task's mode when it's created.
  427. // For history items, use the stored mode; for new tasks, we'll set it
  428. // after getting state.
  429. if (historyItem) {
  430. this._taskMode = historyItem.mode || defaultModeSlug
  431. this.taskModeReady = Promise.resolve()
  432. TelemetryService.instance.captureTaskRestarted(this.taskId)
  433. // For history items, use the persisted tool protocol if available.
  434. // If not available (old tasks), it will be detected in resumeTaskFromHistory.
  435. this._taskToolProtocol = historyItem.toolProtocol
  436. } else {
  437. // For new tasks, don't set the mode yet - wait for async initialization.
  438. this._taskMode = undefined
  439. this.taskModeReady = this.initializeTaskMode(provider)
  440. TelemetryService.instance.captureTaskCreated(this.taskId)
  441. // For new tasks, resolve and lock the tool protocol immediately.
  442. // This ensures the task will continue using this protocol even if
  443. // user settings change.
  444. const modelInfo = this.api.getModel().info
  445. this._taskToolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  446. }
  447. // Initialize the assistant message parser based on the locked tool protocol.
  448. // For native protocol, tool calls come as tool_call chunks, not XML.
  449. // For history items without a persisted protocol, we default to XML parser
  450. // and will update it in resumeTaskFromHistory after detection.
  451. const effectiveProtocol = this._taskToolProtocol || "xml"
  452. this.assistantMessageParser = effectiveProtocol !== "native" ? new AssistantMessageParser() : undefined
  453. this.messageQueueService = new MessageQueueService()
  454. this.messageQueueStateChangedHandler = () => {
  455. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  456. this.providerRef.deref()?.postStateToWebview()
  457. }
  458. this.messageQueueService.on("stateChanged", this.messageQueueStateChangedHandler)
  459. // Listen for provider profile changes to update parser state
  460. this.setupProviderProfileChangeListener(provider)
  461. // Only set up diff strategy if diff is enabled.
  462. if (this.diffEnabled) {
  463. // Default to old strategy, will be updated if experiment is enabled.
  464. this.diffStrategy = new MultiSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  465. // Check experiment asynchronously and update strategy if needed.
  466. provider.getState().then((state) => {
  467. const isMultiFileApplyDiffEnabled = experiments.isEnabled(
  468. state.experiments ?? {},
  469. EXPERIMENT_IDS.MULTI_FILE_APPLY_DIFF,
  470. )
  471. if (isMultiFileApplyDiffEnabled) {
  472. this.diffStrategy = new MultiFileSearchReplaceDiffStrategy(this.fuzzyMatchThreshold)
  473. }
  474. })
  475. }
  476. this.toolRepetitionDetector = new ToolRepetitionDetector(this.consecutiveMistakeLimit)
  477. // Initialize todo list if provided
  478. if (initialTodos && initialTodos.length > 0) {
  479. this.todoList = initialTodos
  480. }
  481. // Initialize debounced token usage emit function
  482. // Uses debounce with maxWait to achieve throttle-like behavior:
  483. // - leading: true - Emit immediately on first call
  484. // - trailing: true - Emit final state when updates stop
  485. // - maxWait - Ensures at most one emit per interval during rapid updates (throttle behavior)
  486. this.debouncedEmitTokenUsage = debounce(
  487. (tokenUsage: TokenUsage, toolUsage: ToolUsage) => {
  488. const tokenChanged = hasTokenUsageChanged(tokenUsage, this.tokenUsageSnapshot)
  489. const toolChanged = hasToolUsageChanged(toolUsage, this.toolUsageSnapshot)
  490. if (tokenChanged || toolChanged) {
  491. this.emit(RooCodeEventName.TaskTokenUsageUpdated, this.taskId, tokenUsage, toolUsage)
  492. this.tokenUsageSnapshot = tokenUsage
  493. this.tokenUsageSnapshotAt = this.clineMessages.at(-1)?.ts
  494. // Deep copy tool usage for snapshot
  495. this.toolUsageSnapshot = JSON.parse(JSON.stringify(toolUsage))
  496. }
  497. },
  498. this.TOKEN_USAGE_EMIT_INTERVAL_MS,
  499. { leading: true, trailing: true, maxWait: this.TOKEN_USAGE_EMIT_INTERVAL_MS },
  500. )
  501. onCreated?.(this)
  502. if (startTask) {
  503. if (task || images) {
  504. this.startTask(task, images)
  505. } else if (historyItem) {
  506. this.resumeTaskFromHistory()
  507. } else {
  508. throw new Error("Either historyItem or task/images must be provided")
  509. }
  510. }
  511. }
  512. /**
  513. * Initialize the task mode from the provider state.
  514. * This method handles async initialization with proper error handling.
  515. *
  516. * ## Flow
  517. * 1. Attempts to fetch the current mode from provider state
  518. * 2. Sets `_taskMode` to the fetched mode or `defaultModeSlug` if unavailable
  519. * 3. Handles errors gracefully by falling back to default mode
  520. * 4. Logs any initialization errors for debugging
  521. *
  522. * ## Error handling
  523. * - Network failures when fetching provider state
  524. * - Provider not yet initialized
  525. * - Invalid state structure
  526. *
  527. * All errors result in fallback to `defaultModeSlug` to ensure task can proceed.
  528. *
  529. * @private
  530. * @param provider - The ClineProvider instance to fetch state from
  531. * @returns Promise that resolves when initialization is complete
  532. */
  533. private async initializeTaskMode(provider: ClineProvider): Promise<void> {
  534. try {
  535. const state = await provider.getState()
  536. this._taskMode = state?.mode || defaultModeSlug
  537. } catch (error) {
  538. // If there's an error getting state, use the default mode
  539. this._taskMode = defaultModeSlug
  540. // Use the provider's log method for better error visibility
  541. const errorMessage = `Failed to initialize task mode: ${error instanceof Error ? error.message : String(error)}`
  542. provider.log(errorMessage)
  543. }
  544. }
  545. /**
  546. * Sets up a listener for provider profile changes to automatically update the parser state.
  547. * This ensures the XML/native protocol parser stays synchronized with the current model.
  548. *
  549. * @private
  550. * @param provider - The ClineProvider instance to listen to
  551. */
  552. private setupProviderProfileChangeListener(provider: ClineProvider): void {
  553. // Only set up listener if provider has the on method (may not exist in test mocks)
  554. if (typeof provider.on !== "function") {
  555. return
  556. }
  557. this.providerProfileChangeListener = async () => {
  558. try {
  559. const newState = await provider.getState()
  560. if (newState?.apiConfiguration) {
  561. this.updateApiConfiguration(newState.apiConfiguration)
  562. }
  563. } catch (error) {
  564. console.error(
  565. `[Task#${this.taskId}.${this.instanceId}] Failed to update API configuration on profile change:`,
  566. error,
  567. )
  568. }
  569. }
  570. provider.on(RooCodeEventName.ProviderProfileChanged, this.providerProfileChangeListener)
  571. }
  572. /**
  573. * Wait for the task mode to be initialized before proceeding.
  574. * This method ensures that any operations depending on the task mode
  575. * will have access to the correct mode value.
  576. *
  577. * ## When to use
  578. * - Before accessing mode-specific configurations
  579. * - When switching between tasks with different modes
  580. * - Before operations that depend on mode-based permissions
  581. *
  582. * ## Example usage
  583. * ```typescript
  584. * // Wait for mode initialization before mode-dependent operations
  585. * await task.waitForModeInitialization();
  586. * const mode = task.taskMode; // Now safe to access synchronously
  587. *
  588. * // Or use with getTaskMode() for a one-liner
  589. * const mode = await task.getTaskMode(); // Internally waits for initialization
  590. * ```
  591. *
  592. * @returns Promise that resolves when the task mode is initialized
  593. * @public
  594. */
  595. public async waitForModeInitialization(): Promise<void> {
  596. return this.taskModeReady
  597. }
  598. /**
  599. * Get the task mode asynchronously, ensuring it's properly initialized.
  600. * This is the recommended way to access the task mode as it guarantees
  601. * the mode is available before returning.
  602. *
  603. * ## Async behavior
  604. * - Internally waits for `taskModeReady` promise to resolve
  605. * - Returns the initialized mode or `defaultModeSlug` as fallback
  606. * - Safe to call multiple times - subsequent calls return immediately if already initialized
  607. *
  608. * ## Example usage
  609. * ```typescript
  610. * // Safe async access
  611. * const mode = await task.getTaskMode();
  612. * console.log(`Task is running in ${mode} mode`);
  613. *
  614. * // Use in conditional logic
  615. * if (await task.getTaskMode() === 'architect') {
  616. * // Perform architect-specific operations
  617. * }
  618. * ```
  619. *
  620. * @returns Promise resolving to the task mode string
  621. * @public
  622. */
  623. public async getTaskMode(): Promise<string> {
  624. await this.taskModeReady
  625. return this._taskMode || defaultModeSlug
  626. }
  627. /**
  628. * Get the task mode synchronously. This should only be used when you're certain
  629. * that the mode has already been initialized (e.g., after waitForModeInitialization).
  630. *
  631. * ## When to use
  632. * - In synchronous contexts where async/await is not available
  633. * - After explicitly waiting for initialization via `waitForModeInitialization()`
  634. * - In event handlers or callbacks where mode is guaranteed to be initialized
  635. *
  636. * ## Example usage
  637. * ```typescript
  638. * // After ensuring initialization
  639. * await task.waitForModeInitialization();
  640. * const mode = task.taskMode; // Safe synchronous access
  641. *
  642. * // In an event handler after task is started
  643. * task.on('taskStarted', () => {
  644. * console.log(`Task started in ${task.taskMode} mode`); // Safe here
  645. * });
  646. * ```
  647. *
  648. * @throws {Error} If the mode hasn't been initialized yet
  649. * @returns The task mode string
  650. * @public
  651. */
  652. public get taskMode(): string {
  653. if (this._taskMode === undefined) {
  654. throw new Error("Task mode accessed before initialization. Use getTaskMode() or wait for taskModeReady.")
  655. }
  656. return this._taskMode
  657. }
  658. static create(options: TaskOptions): [Task, Promise<void>] {
  659. const instance = new Task({ ...options, startTask: false })
  660. const { images, task, historyItem } = options
  661. let promise
  662. if (images || task) {
  663. promise = instance.startTask(task, images)
  664. } else if (historyItem) {
  665. promise = instance.resumeTaskFromHistory()
  666. } else {
  667. throw new Error("Either historyItem or task/images must be provided")
  668. }
  669. return [instance, promise]
  670. }
  671. // API Messages
  672. private async getSavedApiConversationHistory(): Promise<ApiMessage[]> {
  673. return readApiMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  674. }
  675. private async addToApiConversationHistory(message: Anthropic.MessageParam, reasoning?: string) {
  676. // Capture the encrypted_content / thought signatures from the provider (e.g., OpenAI Responses API, Google GenAI) if present.
  677. // We only persist data reported by the current response body.
  678. const handler = this.api as ApiHandler & {
  679. getResponseId?: () => string | undefined
  680. getEncryptedContent?: () => { encrypted_content: string; id?: string } | undefined
  681. getThoughtSignature?: () => string | undefined
  682. getSummary?: () => any[] | undefined
  683. getReasoningDetails?: () => any[] | undefined
  684. }
  685. if (message.role === "assistant") {
  686. const responseId = handler.getResponseId?.()
  687. const reasoningData = handler.getEncryptedContent?.()
  688. const thoughtSignature = handler.getThoughtSignature?.()
  689. const reasoningSummary = handler.getSummary?.()
  690. const reasoningDetails = handler.getReasoningDetails?.()
  691. // Start from the original assistant message
  692. const messageWithTs: any = {
  693. ...message,
  694. ...(responseId ? { id: responseId } : {}),
  695. ts: Date.now(),
  696. }
  697. // Store reasoning_details array if present (for models like Gemini 3)
  698. if (reasoningDetails) {
  699. messageWithTs.reasoning_details = reasoningDetails
  700. }
  701. // Store reasoning: Anthropic thinking (with signature), plain text (most providers), or encrypted (OpenAI Native)
  702. // Skip if reasoning_details already contains the reasoning (to avoid duplication)
  703. if (reasoning && thoughtSignature && !reasoningDetails) {
  704. // Anthropic provider with extended thinking: Store as proper `thinking` block
  705. // This format passes through anthropic-filter.ts and is properly round-tripped
  706. // for interleaved thinking with tool use (required by Anthropic API)
  707. const thinkingBlock = {
  708. type: "thinking",
  709. thinking: reasoning,
  710. signature: thoughtSignature,
  711. }
  712. if (typeof messageWithTs.content === "string") {
  713. messageWithTs.content = [
  714. thinkingBlock,
  715. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  716. ]
  717. } else if (Array.isArray(messageWithTs.content)) {
  718. messageWithTs.content = [thinkingBlock, ...messageWithTs.content]
  719. } else if (!messageWithTs.content) {
  720. messageWithTs.content = [thinkingBlock]
  721. }
  722. } else if (reasoning && !reasoningDetails) {
  723. // Other providers (non-Anthropic): Store as generic reasoning block
  724. const reasoningBlock = {
  725. type: "reasoning",
  726. text: reasoning,
  727. summary: reasoningSummary ?? ([] as any[]),
  728. }
  729. if (typeof messageWithTs.content === "string") {
  730. messageWithTs.content = [
  731. reasoningBlock,
  732. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  733. ]
  734. } else if (Array.isArray(messageWithTs.content)) {
  735. messageWithTs.content = [reasoningBlock, ...messageWithTs.content]
  736. } else if (!messageWithTs.content) {
  737. messageWithTs.content = [reasoningBlock]
  738. }
  739. } else if (reasoningData?.encrypted_content) {
  740. // OpenAI Native encrypted reasoning
  741. const reasoningBlock = {
  742. type: "reasoning",
  743. summary: [] as any[],
  744. encrypted_content: reasoningData.encrypted_content,
  745. ...(reasoningData.id ? { id: reasoningData.id } : {}),
  746. }
  747. if (typeof messageWithTs.content === "string") {
  748. messageWithTs.content = [
  749. reasoningBlock,
  750. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  751. ]
  752. } else if (Array.isArray(messageWithTs.content)) {
  753. messageWithTs.content = [reasoningBlock, ...messageWithTs.content]
  754. } else if (!messageWithTs.content) {
  755. messageWithTs.content = [reasoningBlock]
  756. }
  757. }
  758. // If we have a thought signature WITHOUT reasoning text (edge case),
  759. // append it as a dedicated content block for non-Anthropic providers (e.g., Gemini).
  760. // Note: For Anthropic, the signature is already included in the thinking block above.
  761. if (thoughtSignature && !reasoning) {
  762. const thoughtSignatureBlock = {
  763. type: "thoughtSignature",
  764. thoughtSignature,
  765. }
  766. if (typeof messageWithTs.content === "string") {
  767. messageWithTs.content = [
  768. { type: "text", text: messageWithTs.content } satisfies Anthropic.Messages.TextBlockParam,
  769. thoughtSignatureBlock,
  770. ]
  771. } else if (Array.isArray(messageWithTs.content)) {
  772. messageWithTs.content = [...messageWithTs.content, thoughtSignatureBlock]
  773. } else if (!messageWithTs.content) {
  774. messageWithTs.content = [thoughtSignatureBlock]
  775. }
  776. }
  777. this.apiConversationHistory.push(messageWithTs)
  778. } else {
  779. // For user messages, validate and fix tool_result IDs against the previous assistant message
  780. const validatedMessage = validateAndFixToolResultIds(message, this.apiConversationHistory)
  781. const messageWithTs = { ...validatedMessage, ts: Date.now() }
  782. this.apiConversationHistory.push(messageWithTs)
  783. }
  784. await this.saveApiConversationHistory()
  785. }
  786. async overwriteApiConversationHistory(newHistory: ApiMessage[]) {
  787. this.apiConversationHistory = newHistory
  788. await this.saveApiConversationHistory()
  789. }
  790. /**
  791. * Flush any pending tool results to the API conversation history.
  792. *
  793. * This is critical for native tool protocol when the task is about to be
  794. * delegated (e.g., via new_task). Before delegation, if other tools were
  795. * called in the same turn before new_task, their tool_result blocks are
  796. * accumulated in `userMessageContent` but haven't been saved to the API
  797. * history yet. If we don't flush them before the parent is disposed,
  798. * the API conversation will be incomplete and cause 400 errors when
  799. * the parent resumes (missing tool_result for tool_use blocks).
  800. *
  801. * NOTE: The assistant message is typically already in history by the time
  802. * tools execute (added in recursivelyMakeClineRequests after streaming completes).
  803. * So we usually only need to flush the pending user message with tool_results.
  804. */
  805. public async flushPendingToolResultsToHistory(): Promise<void> {
  806. // Only flush if there's actually pending content to save
  807. if (this.userMessageContent.length === 0) {
  808. return
  809. }
  810. // Save the user message with tool_result blocks
  811. const userMessage: Anthropic.MessageParam = {
  812. role: "user",
  813. content: this.userMessageContent,
  814. }
  815. // Validate and fix tool_result IDs against the previous assistant message
  816. const validatedMessage = validateAndFixToolResultIds(userMessage, this.apiConversationHistory)
  817. const userMessageWithTs = { ...validatedMessage, ts: Date.now() }
  818. this.apiConversationHistory.push(userMessageWithTs as ApiMessage)
  819. await this.saveApiConversationHistory()
  820. // Clear the pending content since it's now saved
  821. this.userMessageContent = []
  822. }
  823. private async saveApiConversationHistory() {
  824. try {
  825. await saveApiMessages({
  826. messages: this.apiConversationHistory,
  827. taskId: this.taskId,
  828. globalStoragePath: this.globalStoragePath,
  829. })
  830. } catch (error) {
  831. // In the off chance this fails, we don't want to stop the task.
  832. console.error("Failed to save API conversation history:", error)
  833. }
  834. }
  835. // Cline Messages
  836. private async getSavedClineMessages(): Promise<ClineMessage[]> {
  837. return readTaskMessages({ taskId: this.taskId, globalStoragePath: this.globalStoragePath })
  838. }
  839. private async addToClineMessages(message: ClineMessage) {
  840. this.clineMessages.push(message)
  841. const provider = this.providerRef.deref()
  842. await provider?.postStateToWebview()
  843. this.emit(RooCodeEventName.Message, { action: "created", message })
  844. await this.saveClineMessages()
  845. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  846. if (shouldCaptureMessage) {
  847. CloudService.instance.captureEvent({
  848. event: TelemetryEventName.TASK_MESSAGE,
  849. properties: { taskId: this.taskId, message },
  850. })
  851. // Track that this message has been synced to cloud
  852. this.cloudSyncedMessageTimestamps.add(message.ts)
  853. }
  854. }
  855. public async overwriteClineMessages(newMessages: ClineMessage[]) {
  856. this.clineMessages = newMessages
  857. restoreTodoListForTask(this)
  858. await this.saveClineMessages()
  859. // When overwriting messages (e.g., during task resume), repopulate the cloud sync tracking Set
  860. // with timestamps from all non-partial messages to prevent re-syncing previously synced messages
  861. this.cloudSyncedMessageTimestamps.clear()
  862. for (const msg of newMessages) {
  863. if (msg.partial !== true) {
  864. this.cloudSyncedMessageTimestamps.add(msg.ts)
  865. }
  866. }
  867. }
  868. private async updateClineMessage(message: ClineMessage) {
  869. const provider = this.providerRef.deref()
  870. await provider?.postMessageToWebview({ type: "messageUpdated", clineMessage: message })
  871. this.emit(RooCodeEventName.Message, { action: "updated", message })
  872. // Check if we should sync to cloud and haven't already synced this message
  873. const shouldCaptureMessage = message.partial !== true && CloudService.isEnabled()
  874. const hasNotBeenSynced = !this.cloudSyncedMessageTimestamps.has(message.ts)
  875. if (shouldCaptureMessage && hasNotBeenSynced) {
  876. CloudService.instance.captureEvent({
  877. event: TelemetryEventName.TASK_MESSAGE,
  878. properties: { taskId: this.taskId, message },
  879. })
  880. // Track that this message has been synced to cloud
  881. this.cloudSyncedMessageTimestamps.add(message.ts)
  882. }
  883. }
  884. private async saveClineMessages() {
  885. try {
  886. await saveTaskMessages({
  887. messages: this.clineMessages,
  888. taskId: this.taskId,
  889. globalStoragePath: this.globalStoragePath,
  890. })
  891. const { historyItem, tokenUsage } = await taskMetadata({
  892. taskId: this.taskId,
  893. rootTaskId: this.rootTaskId,
  894. parentTaskId: this.parentTaskId,
  895. taskNumber: this.taskNumber,
  896. messages: this.clineMessages,
  897. globalStoragePath: this.globalStoragePath,
  898. workspace: this.cwd,
  899. mode: this._taskMode || defaultModeSlug, // Use the task's own mode, not the current provider mode.
  900. initialStatus: this.initialStatus,
  901. toolProtocol: this._taskToolProtocol, // Persist the locked tool protocol.
  902. })
  903. // Emit token/tool usage updates using debounced function
  904. // The debounce with maxWait ensures:
  905. // - Immediate first emit (leading: true)
  906. // - At most one emit per interval during rapid updates (maxWait)
  907. // - Final state is emitted when updates stop (trailing: true)
  908. this.debouncedEmitTokenUsage(tokenUsage, this.toolUsage)
  909. await this.providerRef.deref()?.updateTaskHistory(historyItem)
  910. } catch (error) {
  911. console.error("Failed to save Roo messages:", error)
  912. }
  913. }
  914. private findMessageByTimestamp(ts: number): ClineMessage | undefined {
  915. for (let i = this.clineMessages.length - 1; i >= 0; i--) {
  916. if (this.clineMessages[i].ts === ts) {
  917. return this.clineMessages[i]
  918. }
  919. }
  920. return undefined
  921. }
  922. // Note that `partial` has three valid states true (partial message),
  923. // false (completion of partial message), undefined (individual complete
  924. // message).
  925. async ask(
  926. type: ClineAsk,
  927. text?: string,
  928. partial?: boolean,
  929. progressStatus?: ToolProgressStatus,
  930. isProtected?: boolean,
  931. ): Promise<{ response: ClineAskResponse; text?: string; images?: string[] }> {
  932. // If this Cline instance was aborted by the provider, then the only
  933. // thing keeping us alive is a promise still running in the background,
  934. // in which case we don't want to send its result to the webview as it
  935. // is attached to a new instance of Cline now. So we can safely ignore
  936. // the result of any active promises, and this class will be
  937. // deallocated. (Although we set Cline = undefined in provider, that
  938. // simply removes the reference to this instance, but the instance is
  939. // still alive until this promise resolves or rejects.)
  940. if (this.abort) {
  941. throw new Error(`[RooCode#ask] task ${this.taskId}.${this.instanceId} aborted`)
  942. }
  943. let askTs: number
  944. if (partial !== undefined) {
  945. const lastMessage = this.clineMessages.at(-1)
  946. const isUpdatingPreviousPartial =
  947. lastMessage && lastMessage.partial && lastMessage.type === "ask" && lastMessage.ask === type
  948. if (partial) {
  949. if (isUpdatingPreviousPartial) {
  950. // Existing partial message, so update it.
  951. lastMessage.text = text
  952. lastMessage.partial = partial
  953. lastMessage.progressStatus = progressStatus
  954. lastMessage.isProtected = isProtected
  955. // TODO: Be more efficient about saving and posting only new
  956. // data or one whole message at a time so ignore partial for
  957. // saves, and only post parts of partial message instead of
  958. // whole array in new listener.
  959. this.updateClineMessage(lastMessage)
  960. // console.log("Task#ask: current ask promise was ignored (#1)")
  961. throw new AskIgnoredError("updating existing partial")
  962. } else {
  963. // This is a new partial message, so add it with partial
  964. // state.
  965. askTs = Date.now()
  966. this.lastMessageTs = askTs
  967. console.log(`Task#ask: new partial ask -> ${type} @ ${askTs}`)
  968. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, partial, isProtected })
  969. // console.log("Task#ask: current ask promise was ignored (#2)")
  970. throw new AskIgnoredError("new partial")
  971. }
  972. } else {
  973. if (isUpdatingPreviousPartial) {
  974. // This is the complete version of a previously partial
  975. // message, so replace the partial with the complete version.
  976. this.askResponse = undefined
  977. this.askResponseText = undefined
  978. this.askResponseImages = undefined
  979. // Bug for the history books:
  980. // In the webview we use the ts as the chatrow key for the
  981. // virtuoso list. Since we would update this ts right at the
  982. // end of streaming, it would cause the view to flicker. The
  983. // key prop has to be stable otherwise react has trouble
  984. // reconciling items between renders, causing unmounting and
  985. // remounting of components (flickering).
  986. // The lesson here is if you see flickering when rendering
  987. // lists, it's likely because the key prop is not stable.
  988. // So in this case we must make sure that the message ts is
  989. // never altered after first setting it.
  990. askTs = lastMessage.ts
  991. console.log(`Task#ask: updating previous partial ask -> ${type} @ ${askTs}`)
  992. this.lastMessageTs = askTs
  993. lastMessage.text = text
  994. lastMessage.partial = false
  995. lastMessage.progressStatus = progressStatus
  996. lastMessage.isProtected = isProtected
  997. await this.saveClineMessages()
  998. this.updateClineMessage(lastMessage)
  999. } else {
  1000. // This is a new and complete message, so add it like normal.
  1001. this.askResponse = undefined
  1002. this.askResponseText = undefined
  1003. this.askResponseImages = undefined
  1004. askTs = Date.now()
  1005. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  1006. this.lastMessageTs = askTs
  1007. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  1008. }
  1009. }
  1010. } else {
  1011. // This is a new non-partial message, so add it like normal.
  1012. this.askResponse = undefined
  1013. this.askResponseText = undefined
  1014. this.askResponseImages = undefined
  1015. askTs = Date.now()
  1016. console.log(`Task#ask: new complete ask -> ${type} @ ${askTs}`)
  1017. this.lastMessageTs = askTs
  1018. await this.addToClineMessages({ ts: askTs, type: "ask", ask: type, text, isProtected })
  1019. }
  1020. let timeouts: NodeJS.Timeout[] = []
  1021. // Automatically approve if the ask according to the user's settings.
  1022. const provider = this.providerRef.deref()
  1023. const state = provider ? await provider.getState() : undefined
  1024. const approval = await checkAutoApproval({ state, ask: type, text, isProtected })
  1025. if (approval.decision === "approve") {
  1026. this.approveAsk()
  1027. } else if (approval.decision === "deny") {
  1028. this.denyAsk()
  1029. } else if (approval.decision === "timeout") {
  1030. // Store the auto-approval timeout so it can be cancelled if user interacts
  1031. this.autoApprovalTimeoutRef = setTimeout(() => {
  1032. const { askResponse, text, images } = approval.fn()
  1033. this.handleWebviewAskResponse(askResponse, text, images)
  1034. this.autoApprovalTimeoutRef = undefined
  1035. }, approval.timeout)
  1036. timeouts.push(this.autoApprovalTimeoutRef)
  1037. }
  1038. // The state is mutable if the message is complete and the task will
  1039. // block (via the `pWaitFor`).
  1040. const isBlocking = !(this.askResponse !== undefined || this.lastMessageTs !== askTs)
  1041. const isMessageQueued = !this.messageQueueService.isEmpty()
  1042. const isStatusMutable = !partial && isBlocking && !isMessageQueued && approval.decision === "ask"
  1043. if (isBlocking) {
  1044. console.log(`Task#ask will block -> type: ${type}`)
  1045. }
  1046. if (isStatusMutable) {
  1047. console.log(`Task#ask: status is mutable -> type: ${type}`)
  1048. const statusMutationTimeout = 2_000
  1049. if (isInteractiveAsk(type)) {
  1050. timeouts.push(
  1051. setTimeout(() => {
  1052. const message = this.findMessageByTimestamp(askTs)
  1053. if (message) {
  1054. this.interactiveAsk = message
  1055. this.emit(RooCodeEventName.TaskInteractive, this.taskId)
  1056. provider?.postMessageToWebview({ type: "interactionRequired" })
  1057. }
  1058. }, statusMutationTimeout),
  1059. )
  1060. } else if (isResumableAsk(type)) {
  1061. timeouts.push(
  1062. setTimeout(() => {
  1063. const message = this.findMessageByTimestamp(askTs)
  1064. if (message) {
  1065. this.resumableAsk = message
  1066. this.emit(RooCodeEventName.TaskResumable, this.taskId)
  1067. }
  1068. }, statusMutationTimeout),
  1069. )
  1070. } else if (isIdleAsk(type)) {
  1071. timeouts.push(
  1072. setTimeout(() => {
  1073. const message = this.findMessageByTimestamp(askTs)
  1074. if (message) {
  1075. this.idleAsk = message
  1076. this.emit(RooCodeEventName.TaskIdle, this.taskId)
  1077. }
  1078. }, statusMutationTimeout),
  1079. )
  1080. }
  1081. } else if (isMessageQueued) {
  1082. console.log(`Task#ask: will process message queue -> type: ${type}`)
  1083. const message = this.messageQueueService.dequeueMessage()
  1084. if (message) {
  1085. // Check if this is a tool approval ask that needs to be handled.
  1086. if (
  1087. type === "tool" ||
  1088. type === "command" ||
  1089. type === "browser_action_launch" ||
  1090. type === "use_mcp_server"
  1091. ) {
  1092. // For tool approvals, we need to approve first, then send
  1093. // the message if there's text/images.
  1094. this.handleWebviewAskResponse("yesButtonClicked", message.text, message.images)
  1095. } else {
  1096. // For other ask types (like followup or command_output), fulfill the ask
  1097. // directly.
  1098. this.handleWebviewAskResponse("messageResponse", message.text, message.images)
  1099. }
  1100. }
  1101. }
  1102. // Wait for askResponse to be set
  1103. await pWaitFor(() => this.askResponse !== undefined || this.lastMessageTs !== askTs, { interval: 100 })
  1104. if (this.lastMessageTs !== askTs) {
  1105. // Could happen if we send multiple asks in a row i.e. with
  1106. // command_output. It's important that when we know an ask could
  1107. // fail, it is handled gracefully.
  1108. console.log("Task#ask: current ask promise was ignored")
  1109. throw new AskIgnoredError("superseded")
  1110. }
  1111. const result = { response: this.askResponse!, text: this.askResponseText, images: this.askResponseImages }
  1112. this.askResponse = undefined
  1113. this.askResponseText = undefined
  1114. this.askResponseImages = undefined
  1115. // Cancel the timeouts if they are still running.
  1116. timeouts.forEach((timeout) => clearTimeout(timeout))
  1117. // Switch back to an active state.
  1118. if (this.idleAsk || this.resumableAsk || this.interactiveAsk) {
  1119. this.idleAsk = undefined
  1120. this.resumableAsk = undefined
  1121. this.interactiveAsk = undefined
  1122. this.emit(RooCodeEventName.TaskActive, this.taskId)
  1123. }
  1124. this.emit(RooCodeEventName.TaskAskResponded)
  1125. return result
  1126. }
  1127. handleWebviewAskResponse(askResponse: ClineAskResponse, text?: string, images?: string[]) {
  1128. // Clear any pending auto-approval timeout when user responds
  1129. this.cancelAutoApprovalTimeout()
  1130. this.askResponse = askResponse
  1131. this.askResponseText = text
  1132. this.askResponseImages = images
  1133. // Create a checkpoint whenever the user sends a message.
  1134. // Use allowEmpty=true to ensure a checkpoint is recorded even if there are no file changes.
  1135. // Suppress the checkpoint_saved chat row for this particular checkpoint to keep the timeline clean.
  1136. if (askResponse === "messageResponse") {
  1137. void this.checkpointSave(false, true)
  1138. }
  1139. // Mark the last follow-up question as answered
  1140. if (askResponse === "messageResponse" || askResponse === "yesButtonClicked") {
  1141. // Find the last unanswered follow-up message using findLastIndex
  1142. const lastFollowUpIndex = findLastIndex(
  1143. this.clineMessages,
  1144. (msg) => msg.type === "ask" && msg.ask === "followup" && !msg.isAnswered,
  1145. )
  1146. if (lastFollowUpIndex !== -1) {
  1147. // Mark this follow-up as answered
  1148. this.clineMessages[lastFollowUpIndex].isAnswered = true
  1149. // Save the updated messages
  1150. this.saveClineMessages().catch((error) => {
  1151. console.error("Failed to save answered follow-up state:", error)
  1152. })
  1153. }
  1154. }
  1155. }
  1156. /**
  1157. * Cancel any pending auto-approval timeout.
  1158. * Called when user interacts (types, clicks buttons, etc.) to prevent the timeout from firing.
  1159. */
  1160. public cancelAutoApprovalTimeout(): void {
  1161. if (this.autoApprovalTimeoutRef) {
  1162. clearTimeout(this.autoApprovalTimeoutRef)
  1163. this.autoApprovalTimeoutRef = undefined
  1164. }
  1165. }
  1166. public approveAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  1167. this.handleWebviewAskResponse("yesButtonClicked", text, images)
  1168. }
  1169. public denyAsk({ text, images }: { text?: string; images?: string[] } = {}) {
  1170. this.handleWebviewAskResponse("noButtonClicked", text, images)
  1171. }
  1172. /**
  1173. * Updates the API configuration but preserves the locked tool protocol.
  1174. * The task's tool protocol is locked at creation time and should NOT change
  1175. * even when switching between models/profiles with different settings.
  1176. *
  1177. * @param newApiConfiguration - The new API configuration to use
  1178. */
  1179. public updateApiConfiguration(newApiConfiguration: ProviderSettings): void {
  1180. // Update the configuration and rebuild the API handler
  1181. this.apiConfiguration = newApiConfiguration
  1182. this.api = buildApiHandler(newApiConfiguration)
  1183. // IMPORTANT: Do NOT change the parser based on the new configuration!
  1184. // The task's tool protocol is locked at creation time and must remain
  1185. // consistent throughout the task's lifetime to ensure history can be
  1186. // properly resumed.
  1187. }
  1188. public async submitUserMessage(
  1189. text: string,
  1190. images?: string[],
  1191. mode?: string,
  1192. providerProfile?: string,
  1193. ): Promise<void> {
  1194. try {
  1195. text = (text ?? "").trim()
  1196. images = images ?? []
  1197. if (text.length === 0 && images.length === 0) {
  1198. return
  1199. }
  1200. const provider = this.providerRef.deref()
  1201. if (provider) {
  1202. if (mode) {
  1203. await provider.setMode(mode)
  1204. }
  1205. if (providerProfile) {
  1206. await provider.setProviderProfile(providerProfile)
  1207. // Update this task's API configuration to match the new profile
  1208. // This ensures the parser state is synchronized with the selected model
  1209. const newState = await provider.getState()
  1210. if (newState?.apiConfiguration) {
  1211. this.updateApiConfiguration(newState.apiConfiguration)
  1212. }
  1213. }
  1214. this.emit(RooCodeEventName.TaskUserMessage, this.taskId)
  1215. provider.postMessageToWebview({ type: "invoke", invoke: "sendMessage", text, images })
  1216. } else {
  1217. console.error("[Task#submitUserMessage] Provider reference lost")
  1218. }
  1219. } catch (error) {
  1220. console.error("[Task#submitUserMessage] Failed to submit user message:", error)
  1221. }
  1222. }
  1223. async handleTerminalOperation(terminalOperation: "continue" | "abort") {
  1224. if (terminalOperation === "continue") {
  1225. this.terminalProcess?.continue()
  1226. } else if (terminalOperation === "abort") {
  1227. this.terminalProcess?.abort()
  1228. }
  1229. }
  1230. public async condenseContext(): Promise<void> {
  1231. const systemPrompt = await this.getSystemPrompt()
  1232. // Get condensing configuration
  1233. const state = await this.providerRef.deref()?.getState()
  1234. // These properties may not exist in the state type yet, but are used for condensing configuration
  1235. const customCondensingPrompt = state?.customCondensingPrompt
  1236. const condensingApiConfigId = state?.condensingApiConfigId
  1237. const listApiConfigMeta = state?.listApiConfigMeta
  1238. // Determine API handler to use
  1239. let condensingApiHandler: ApiHandler | undefined
  1240. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  1241. // Find matching config by ID
  1242. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  1243. if (matchingConfig) {
  1244. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  1245. id: condensingApiConfigId,
  1246. })
  1247. // Ensure profile and apiProvider exist before trying to build handler
  1248. if (profile && profile.apiProvider) {
  1249. condensingApiHandler = buildApiHandler(profile)
  1250. }
  1251. }
  1252. }
  1253. const { contextTokens: prevContextTokens } = this.getTokenUsage()
  1254. // Determine if we're using native tool protocol for proper message handling
  1255. // Use the task's locked protocol, NOT the current settings (fallback to xml if not set)
  1256. const useNativeTools = isNativeProtocol(this._taskToolProtocol ?? "xml")
  1257. const {
  1258. messages,
  1259. summary,
  1260. cost,
  1261. newContextTokens = 0,
  1262. error,
  1263. condenseId,
  1264. } = await summarizeConversation(
  1265. this.apiConversationHistory,
  1266. this.api, // Main API handler (fallback)
  1267. systemPrompt, // Default summarization prompt (fallback)
  1268. this.taskId,
  1269. prevContextTokens,
  1270. false, // manual trigger
  1271. customCondensingPrompt, // User's custom prompt
  1272. condensingApiHandler, // Specific handler for condensing
  1273. useNativeTools, // Pass native tools flag for proper message handling
  1274. )
  1275. if (error) {
  1276. this.say(
  1277. "condense_context_error",
  1278. error,
  1279. undefined /* images */,
  1280. false /* partial */,
  1281. undefined /* checkpoint */,
  1282. undefined /* progressStatus */,
  1283. { isNonInteractive: true } /* options */,
  1284. )
  1285. return
  1286. }
  1287. await this.overwriteApiConversationHistory(messages)
  1288. const contextCondense: ContextCondense = {
  1289. summary,
  1290. cost,
  1291. newContextTokens,
  1292. prevContextTokens,
  1293. condenseId: condenseId!,
  1294. }
  1295. await this.say(
  1296. "condense_context",
  1297. undefined /* text */,
  1298. undefined /* images */,
  1299. false /* partial */,
  1300. undefined /* checkpoint */,
  1301. undefined /* progressStatus */,
  1302. { isNonInteractive: true } /* options */,
  1303. contextCondense,
  1304. )
  1305. // Process any queued messages after condensing completes
  1306. this.processQueuedMessages()
  1307. }
  1308. async say(
  1309. type: ClineSay,
  1310. text?: string,
  1311. images?: string[],
  1312. partial?: boolean,
  1313. checkpoint?: Record<string, unknown>,
  1314. progressStatus?: ToolProgressStatus,
  1315. options: {
  1316. isNonInteractive?: boolean
  1317. } = {},
  1318. contextCondense?: ContextCondense,
  1319. contextTruncation?: ContextTruncation,
  1320. ): Promise<undefined> {
  1321. if (this.abort) {
  1322. throw new Error(`[RooCode#say] task ${this.taskId}.${this.instanceId} aborted`)
  1323. }
  1324. if (partial !== undefined) {
  1325. const lastMessage = this.clineMessages.at(-1)
  1326. const isUpdatingPreviousPartial =
  1327. lastMessage && lastMessage.partial && lastMessage.type === "say" && lastMessage.say === type
  1328. if (partial) {
  1329. if (isUpdatingPreviousPartial) {
  1330. // Existing partial message, so update it.
  1331. lastMessage.text = text
  1332. lastMessage.images = images
  1333. lastMessage.partial = partial
  1334. lastMessage.progressStatus = progressStatus
  1335. this.updateClineMessage(lastMessage)
  1336. } else {
  1337. // This is a new partial message, so add it with partial state.
  1338. const sayTs = Date.now()
  1339. if (!options.isNonInteractive) {
  1340. this.lastMessageTs = sayTs
  1341. }
  1342. await this.addToClineMessages({
  1343. ts: sayTs,
  1344. type: "say",
  1345. say: type,
  1346. text,
  1347. images,
  1348. partial,
  1349. contextCondense,
  1350. contextTruncation,
  1351. })
  1352. }
  1353. } else {
  1354. // New now have a complete version of a previously partial message.
  1355. // This is the complete version of a previously partial
  1356. // message, so replace the partial with the complete version.
  1357. if (isUpdatingPreviousPartial) {
  1358. if (!options.isNonInteractive) {
  1359. this.lastMessageTs = lastMessage.ts
  1360. }
  1361. lastMessage.text = text
  1362. lastMessage.images = images
  1363. lastMessage.partial = false
  1364. lastMessage.progressStatus = progressStatus
  1365. // Instead of streaming partialMessage events, we do a save
  1366. // and post like normal to persist to disk.
  1367. await this.saveClineMessages()
  1368. // More performant than an entire `postStateToWebview`.
  1369. this.updateClineMessage(lastMessage)
  1370. } else {
  1371. // This is a new and complete message, so add it like normal.
  1372. const sayTs = Date.now()
  1373. if (!options.isNonInteractive) {
  1374. this.lastMessageTs = sayTs
  1375. }
  1376. await this.addToClineMessages({
  1377. ts: sayTs,
  1378. type: "say",
  1379. say: type,
  1380. text,
  1381. images,
  1382. contextCondense,
  1383. contextTruncation,
  1384. })
  1385. }
  1386. }
  1387. } else {
  1388. // This is a new non-partial message, so add it like normal.
  1389. const sayTs = Date.now()
  1390. // A "non-interactive" message is a message is one that the user
  1391. // does not need to respond to. We don't want these message types
  1392. // to trigger an update to `lastMessageTs` since they can be created
  1393. // asynchronously and could interrupt a pending ask.
  1394. if (!options.isNonInteractive) {
  1395. this.lastMessageTs = sayTs
  1396. }
  1397. await this.addToClineMessages({
  1398. ts: sayTs,
  1399. type: "say",
  1400. say: type,
  1401. text,
  1402. images,
  1403. checkpoint,
  1404. contextCondense,
  1405. contextTruncation,
  1406. })
  1407. }
  1408. // Broadcast browser session updates to panel when browser-related messages are added
  1409. if (type === "browser_action" || type === "browser_action_result" || type === "browser_session_status") {
  1410. this.broadcastBrowserSessionUpdate()
  1411. }
  1412. }
  1413. async sayAndCreateMissingParamError(toolName: ToolName, paramName: string, relPath?: string) {
  1414. await this.say(
  1415. "error",
  1416. `Roo tried to use ${toolName}${
  1417. relPath ? ` for '${relPath.toPosix()}'` : ""
  1418. } without value for required parameter '${paramName}'. Retrying...`,
  1419. )
  1420. // Use the task's locked protocol, NOT the current settings (fallback to xml if not set)
  1421. return formatResponse.toolError(
  1422. formatResponse.missingToolParameterError(paramName, this._taskToolProtocol ?? "xml"),
  1423. )
  1424. }
  1425. // Lifecycle
  1426. // Start / Resume / Abort / Dispose
  1427. private async startTask(task?: string, images?: string[]): Promise<void> {
  1428. if (this.enableBridge) {
  1429. try {
  1430. await BridgeOrchestrator.subscribeToTask(this)
  1431. } catch (error) {
  1432. console.error(
  1433. `[Task#startTask] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1434. )
  1435. }
  1436. }
  1437. // `conversationHistory` (for API) and `clineMessages` (for webview)
  1438. // need to be in sync.
  1439. // If the extension process were killed, then on restart the
  1440. // `clineMessages` might not be empty, so we need to set it to [] when
  1441. // we create a new Cline client (otherwise webview would show stale
  1442. // messages from previous session).
  1443. this.clineMessages = []
  1444. this.apiConversationHistory = []
  1445. // The todo list is already set in the constructor if initialTodos were provided
  1446. // No need to add any messages - the todoList property is already set
  1447. await this.providerRef.deref()?.postStateToWebview()
  1448. await this.say("text", task, images)
  1449. this.isInitialized = true
  1450. let imageBlocks: Anthropic.ImageBlockParam[] = formatResponse.imageBlocks(images)
  1451. // Task starting
  1452. await this.initiateTaskLoop([
  1453. {
  1454. type: "text",
  1455. text: `<task>\n${task}\n</task>`,
  1456. },
  1457. ...imageBlocks,
  1458. ]).catch((error) => {
  1459. // Swallow loop rejection when the task was intentionally abandoned/aborted
  1460. // during delegation or user cancellation to prevent unhandled rejections.
  1461. if (this.abandoned === true || this.abortReason === "user_cancelled") {
  1462. return
  1463. }
  1464. throw error
  1465. })
  1466. }
  1467. private async resumeTaskFromHistory() {
  1468. if (this.enableBridge) {
  1469. try {
  1470. await BridgeOrchestrator.subscribeToTask(this)
  1471. } catch (error) {
  1472. console.error(
  1473. `[Task#resumeTaskFromHistory] BridgeOrchestrator.subscribeToTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1474. )
  1475. }
  1476. }
  1477. const modifiedClineMessages = await this.getSavedClineMessages()
  1478. // Remove any resume messages that may have been added before.
  1479. const lastRelevantMessageIndex = findLastIndex(
  1480. modifiedClineMessages,
  1481. (m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task"),
  1482. )
  1483. if (lastRelevantMessageIndex !== -1) {
  1484. modifiedClineMessages.splice(lastRelevantMessageIndex + 1)
  1485. }
  1486. // Remove any trailing reasoning-only UI messages that were not part of the persisted API conversation
  1487. while (modifiedClineMessages.length > 0) {
  1488. const last = modifiedClineMessages[modifiedClineMessages.length - 1]
  1489. if (last.type === "say" && last.say === "reasoning") {
  1490. modifiedClineMessages.pop()
  1491. } else {
  1492. break
  1493. }
  1494. }
  1495. // Since we don't use `api_req_finished` anymore, we need to check if the
  1496. // last `api_req_started` has a cost value, if it doesn't and no
  1497. // cancellation reason to present, then we remove it since it indicates
  1498. // an api request without any partial content streamed.
  1499. const lastApiReqStartedIndex = findLastIndex(
  1500. modifiedClineMessages,
  1501. (m) => m.type === "say" && m.say === "api_req_started",
  1502. )
  1503. if (lastApiReqStartedIndex !== -1) {
  1504. const lastApiReqStarted = modifiedClineMessages[lastApiReqStartedIndex]
  1505. const { cost, cancelReason }: ClineApiReqInfo = JSON.parse(lastApiReqStarted.text || "{}")
  1506. if (cost === undefined && cancelReason === undefined) {
  1507. modifiedClineMessages.splice(lastApiReqStartedIndex, 1)
  1508. }
  1509. }
  1510. await this.overwriteClineMessages(modifiedClineMessages)
  1511. this.clineMessages = await this.getSavedClineMessages()
  1512. // Now present the cline messages to the user and ask if they want to
  1513. // resume (NOTE: we ran into a bug before where the
  1514. // apiConversationHistory wouldn't be initialized when opening a old
  1515. // task, and it was because we were waiting for resume).
  1516. // This is important in case the user deletes messages without resuming
  1517. // the task first.
  1518. this.apiConversationHistory = await this.getSavedApiConversationHistory()
  1519. // If we don't have a persisted tool protocol (old tasks before this feature),
  1520. // detect it from the API history. This ensures tasks that previously used
  1521. // XML tools will continue using XML even if NTC is now enabled.
  1522. if (!this._taskToolProtocol) {
  1523. const detectedProtocol = detectToolProtocolFromHistory(this.apiConversationHistory)
  1524. if (detectedProtocol) {
  1525. // Found tool calls in history - lock to that protocol
  1526. this._taskToolProtocol = detectedProtocol
  1527. } else {
  1528. // No tool calls in history yet - use current settings
  1529. const modelInfo = this.api.getModel().info
  1530. this._taskToolProtocol = resolveToolProtocol(this.apiConfiguration, modelInfo)
  1531. }
  1532. // Update parser state to match the detected/resolved protocol
  1533. const shouldUseXmlParser = this._taskToolProtocol === "xml"
  1534. if (shouldUseXmlParser && !this.assistantMessageParser) {
  1535. this.assistantMessageParser = new AssistantMessageParser()
  1536. } else if (!shouldUseXmlParser && this.assistantMessageParser) {
  1537. this.assistantMessageParser.reset()
  1538. this.assistantMessageParser = undefined
  1539. }
  1540. } else {
  1541. }
  1542. const lastClineMessage = this.clineMessages
  1543. .slice()
  1544. .reverse()
  1545. .find((m) => !(m.ask === "resume_task" || m.ask === "resume_completed_task")) // Could be multiple resume tasks.
  1546. let askType: ClineAsk
  1547. if (lastClineMessage?.ask === "completion_result") {
  1548. askType = "resume_completed_task"
  1549. } else {
  1550. askType = "resume_task"
  1551. }
  1552. this.isInitialized = true
  1553. const { response, text, images } = await this.ask(askType) // Calls `postStateToWebview`.
  1554. let responseText: string | undefined
  1555. let responseImages: string[] | undefined
  1556. if (response === "messageResponse") {
  1557. await this.say("user_feedback", text, images)
  1558. responseText = text
  1559. responseImages = images
  1560. }
  1561. // Make sure that the api conversation history can be resumed by the API,
  1562. // even if it goes out of sync with cline messages.
  1563. let existingApiConversationHistory: ApiMessage[] = await this.getSavedApiConversationHistory()
  1564. // v2.0 xml tags refactor caveat: since we don't use tools anymore for XML protocol,
  1565. // we need to replace all tool use blocks with a text block since the API disallows
  1566. // conversations with tool uses and no tool schema.
  1567. // For native protocol, we preserve tool_use and tool_result blocks as they're expected by the API.
  1568. // IMPORTANT: Use the task's locked protocol, NOT the current settings!
  1569. const useNative = isNativeProtocol(this._taskToolProtocol)
  1570. // Only convert tool blocks to text for XML protocol
  1571. // For native protocol, the API expects proper tool_use/tool_result structure
  1572. if (!useNative) {
  1573. const conversationWithoutToolBlocks = existingApiConversationHistory.map((message) => {
  1574. if (Array.isArray(message.content)) {
  1575. const newContent = message.content.map((block) => {
  1576. if (block.type === "tool_use") {
  1577. // Format tool invocation based on the task's locked protocol
  1578. const params = block.input as Record<string, any>
  1579. const formattedText = formatToolInvocation(block.name, params, this._taskToolProtocol)
  1580. return {
  1581. type: "text",
  1582. text: formattedText,
  1583. } as Anthropic.Messages.TextBlockParam
  1584. } else if (block.type === "tool_result") {
  1585. // Convert block.content to text block array, removing images
  1586. const contentAsTextBlocks = Array.isArray(block.content)
  1587. ? block.content.filter((item) => item.type === "text")
  1588. : [{ type: "text", text: block.content }]
  1589. const textContent = contentAsTextBlocks.map((item) => item.text).join("\n\n")
  1590. const toolName = findToolName(block.tool_use_id, existingApiConversationHistory)
  1591. return {
  1592. type: "text",
  1593. text: `[${toolName} Result]\n\n${textContent}`,
  1594. } as Anthropic.Messages.TextBlockParam
  1595. }
  1596. return block
  1597. })
  1598. return { ...message, content: newContent }
  1599. }
  1600. return message
  1601. })
  1602. existingApiConversationHistory = conversationWithoutToolBlocks
  1603. }
  1604. // FIXME: remove tool use blocks altogether
  1605. // if the last message is an assistant message, we need to check if there's tool use since every tool use has to have a tool response
  1606. // if there's no tool use and only a text block, then we can just add a user message
  1607. // (note this isn't relevant anymore since we use custom tool prompts instead of tool use blocks, but this is here for legacy purposes in case users resume old tasks)
  1608. // if the last message is a user message, we can need to get the assistant message before it to see if it made tool calls, and if so, fill in the remaining tool responses with 'interrupted'
  1609. let modifiedOldUserContent: Anthropic.Messages.ContentBlockParam[] // either the last message if its user message, or the user message before the last (assistant) message
  1610. let modifiedApiConversationHistory: ApiMessage[] // need to remove the last user message to replace with new modified user message
  1611. if (existingApiConversationHistory.length > 0) {
  1612. const lastMessage = existingApiConversationHistory[existingApiConversationHistory.length - 1]
  1613. if (lastMessage.role === "assistant") {
  1614. const content = Array.isArray(lastMessage.content)
  1615. ? lastMessage.content
  1616. : [{ type: "text", text: lastMessage.content }]
  1617. const hasToolUse = content.some((block) => block.type === "tool_use")
  1618. if (hasToolUse) {
  1619. const toolUseBlocks = content.filter(
  1620. (block) => block.type === "tool_use",
  1621. ) as Anthropic.Messages.ToolUseBlock[]
  1622. const toolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks.map((block) => ({
  1623. type: "tool_result",
  1624. tool_use_id: block.id,
  1625. content: "Task was interrupted before this tool call could be completed.",
  1626. }))
  1627. modifiedApiConversationHistory = [...existingApiConversationHistory] // no changes
  1628. modifiedOldUserContent = [...toolResponses]
  1629. } else {
  1630. modifiedApiConversationHistory = [...existingApiConversationHistory]
  1631. modifiedOldUserContent = []
  1632. }
  1633. } else if (lastMessage.role === "user") {
  1634. const previousAssistantMessage: ApiMessage | undefined =
  1635. existingApiConversationHistory[existingApiConversationHistory.length - 2]
  1636. const existingUserContent: Anthropic.Messages.ContentBlockParam[] = Array.isArray(lastMessage.content)
  1637. ? lastMessage.content
  1638. : [{ type: "text", text: lastMessage.content }]
  1639. if (previousAssistantMessage && previousAssistantMessage.role === "assistant") {
  1640. const assistantContent = Array.isArray(previousAssistantMessage.content)
  1641. ? previousAssistantMessage.content
  1642. : [{ type: "text", text: previousAssistantMessage.content }]
  1643. const toolUseBlocks = assistantContent.filter(
  1644. (block) => block.type === "tool_use",
  1645. ) as Anthropic.Messages.ToolUseBlock[]
  1646. if (toolUseBlocks.length > 0) {
  1647. const existingToolResults = existingUserContent.filter(
  1648. (block) => block.type === "tool_result",
  1649. ) as Anthropic.ToolResultBlockParam[]
  1650. const missingToolResponses: Anthropic.ToolResultBlockParam[] = toolUseBlocks
  1651. .filter(
  1652. (toolUse) => !existingToolResults.some((result) => result.tool_use_id === toolUse.id),
  1653. )
  1654. .map((toolUse) => ({
  1655. type: "tool_result",
  1656. tool_use_id: toolUse.id,
  1657. content: "Task was interrupted before this tool call could be completed.",
  1658. }))
  1659. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1) // removes the last user message
  1660. modifiedOldUserContent = [...existingUserContent, ...missingToolResponses]
  1661. } else {
  1662. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1663. modifiedOldUserContent = [...existingUserContent]
  1664. }
  1665. } else {
  1666. modifiedApiConversationHistory = existingApiConversationHistory.slice(0, -1)
  1667. modifiedOldUserContent = [...existingUserContent]
  1668. }
  1669. } else {
  1670. throw new Error("Unexpected: Last message is not a user or assistant message")
  1671. }
  1672. } else {
  1673. throw new Error("Unexpected: No existing API conversation history")
  1674. }
  1675. let newUserContent: Anthropic.Messages.ContentBlockParam[] = [...modifiedOldUserContent]
  1676. const agoText = ((): string => {
  1677. const timestamp = lastClineMessage?.ts ?? Date.now()
  1678. const now = Date.now()
  1679. const diff = now - timestamp
  1680. const minutes = Math.floor(diff / 60000)
  1681. const hours = Math.floor(minutes / 60)
  1682. const days = Math.floor(hours / 24)
  1683. if (days > 0) {
  1684. return `${days} day${days > 1 ? "s" : ""} ago`
  1685. }
  1686. if (hours > 0) {
  1687. return `${hours} hour${hours > 1 ? "s" : ""} ago`
  1688. }
  1689. if (minutes > 0) {
  1690. return `${minutes} minute${minutes > 1 ? "s" : ""} ago`
  1691. }
  1692. return "just now"
  1693. })()
  1694. if (responseText) {
  1695. newUserContent.push({
  1696. type: "text",
  1697. text: `\n\nNew instructions for task continuation:\n<user_message>\n${responseText}\n</user_message>`,
  1698. })
  1699. }
  1700. if (responseImages && responseImages.length > 0) {
  1701. newUserContent.push(...formatResponse.imageBlocks(responseImages))
  1702. }
  1703. // Ensure we have at least some content to send to the API.
  1704. // If newUserContent is empty, add a minimal resumption message.
  1705. if (newUserContent.length === 0) {
  1706. newUserContent.push({
  1707. type: "text",
  1708. text: "[TASK RESUMPTION] Resuming task...",
  1709. })
  1710. }
  1711. await this.overwriteApiConversationHistory(modifiedApiConversationHistory)
  1712. // Task resuming from history item.
  1713. await this.initiateTaskLoop(newUserContent)
  1714. }
  1715. /**
  1716. * Cancels the current HTTP request if one is in progress.
  1717. * This immediately aborts the underlying stream rather than waiting for the next chunk.
  1718. */
  1719. public cancelCurrentRequest(): void {
  1720. if (this.currentRequestAbortController) {
  1721. console.log(`[Task#${this.taskId}.${this.instanceId}] Aborting current HTTP request`)
  1722. this.currentRequestAbortController.abort()
  1723. this.currentRequestAbortController = undefined
  1724. }
  1725. }
  1726. /**
  1727. * Force emit a final token usage update, ignoring throttle.
  1728. * Called before task completion or abort to ensure final stats are captured.
  1729. * Triggers the debounce with current values and immediately flushes to ensure emit.
  1730. */
  1731. public emitFinalTokenUsageUpdate(): void {
  1732. const tokenUsage = this.getTokenUsage()
  1733. this.debouncedEmitTokenUsage(tokenUsage, this.toolUsage)
  1734. this.debouncedEmitTokenUsage.flush()
  1735. }
  1736. public async abortTask(isAbandoned = false) {
  1737. // Aborting task
  1738. // Will stop any autonomously running promises.
  1739. if (isAbandoned) {
  1740. this.abandoned = true
  1741. }
  1742. this.abort = true
  1743. // Reset consecutive error counters on abort (manual intervention)
  1744. this.consecutiveNoToolUseCount = 0
  1745. // Force final token usage update before abort event
  1746. this.emitFinalTokenUsageUpdate()
  1747. this.emit(RooCodeEventName.TaskAborted)
  1748. try {
  1749. this.dispose() // Call the centralized dispose method
  1750. } catch (error) {
  1751. console.error(`Error during task ${this.taskId}.${this.instanceId} disposal:`, error)
  1752. // Don't rethrow - we want abort to always succeed
  1753. }
  1754. // Save the countdown message in the automatic retry or other content.
  1755. try {
  1756. // Save the countdown message in the automatic retry or other content.
  1757. await this.saveClineMessages()
  1758. } catch (error) {
  1759. console.error(`Error saving messages during abort for task ${this.taskId}.${this.instanceId}:`, error)
  1760. }
  1761. }
  1762. public dispose(): void {
  1763. console.log(`[Task#dispose] disposing task ${this.taskId}.${this.instanceId}`)
  1764. // Cancel any in-progress HTTP request
  1765. try {
  1766. this.cancelCurrentRequest()
  1767. } catch (error) {
  1768. console.error("Error cancelling current request:", error)
  1769. }
  1770. // Remove provider profile change listener
  1771. try {
  1772. if (this.providerProfileChangeListener) {
  1773. const provider = this.providerRef.deref()
  1774. if (provider) {
  1775. provider.off(RooCodeEventName.ProviderProfileChanged, this.providerProfileChangeListener)
  1776. }
  1777. this.providerProfileChangeListener = undefined
  1778. }
  1779. } catch (error) {
  1780. console.error("Error removing provider profile change listener:", error)
  1781. }
  1782. // Dispose message queue and remove event listeners.
  1783. try {
  1784. if (this.messageQueueStateChangedHandler) {
  1785. this.messageQueueService.removeListener("stateChanged", this.messageQueueStateChangedHandler)
  1786. this.messageQueueStateChangedHandler = undefined
  1787. }
  1788. this.messageQueueService.dispose()
  1789. } catch (error) {
  1790. console.error("Error disposing message queue:", error)
  1791. }
  1792. // Remove all event listeners to prevent memory leaks.
  1793. try {
  1794. this.removeAllListeners()
  1795. } catch (error) {
  1796. console.error("Error removing event listeners:", error)
  1797. }
  1798. if (this.enableBridge) {
  1799. BridgeOrchestrator.getInstance()
  1800. ?.unsubscribeFromTask(this.taskId)
  1801. .catch((error) =>
  1802. console.error(
  1803. `[Task#dispose] BridgeOrchestrator#unsubscribeFromTask() failed: ${error instanceof Error ? error.message : String(error)}`,
  1804. ),
  1805. )
  1806. }
  1807. // Release any terminals associated with this task.
  1808. try {
  1809. // Release any terminals associated with this task.
  1810. TerminalRegistry.releaseTerminalsForTask(this.taskId)
  1811. } catch (error) {
  1812. console.error("Error releasing terminals:", error)
  1813. }
  1814. try {
  1815. this.urlContentFetcher.closeBrowser()
  1816. } catch (error) {
  1817. console.error("Error closing URL content fetcher browser:", error)
  1818. }
  1819. try {
  1820. this.browserSession.closeBrowser()
  1821. } catch (error) {
  1822. console.error("Error closing browser session:", error)
  1823. }
  1824. // Also close the Browser Session panel when the task is disposed
  1825. try {
  1826. const provider = this.providerRef.deref()
  1827. if (provider) {
  1828. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  1829. BrowserSessionPanelManager.getInstance(provider).dispose()
  1830. }
  1831. } catch (error) {
  1832. console.error("Error closing browser session panel:", error)
  1833. }
  1834. try {
  1835. if (this.rooIgnoreController) {
  1836. this.rooIgnoreController.dispose()
  1837. this.rooIgnoreController = undefined
  1838. }
  1839. } catch (error) {
  1840. console.error("Error disposing RooIgnoreController:", error)
  1841. // This is the critical one for the leak fix.
  1842. }
  1843. try {
  1844. this.fileContextTracker.dispose()
  1845. } catch (error) {
  1846. console.error("Error disposing file context tracker:", error)
  1847. }
  1848. try {
  1849. // If we're not streaming then `abortStream` won't be called.
  1850. if (this.isStreaming && this.diffViewProvider.isEditing) {
  1851. this.diffViewProvider.revertChanges().catch(console.error)
  1852. }
  1853. } catch (error) {
  1854. console.error("Error reverting diff changes:", error)
  1855. }
  1856. }
  1857. // Subtasks
  1858. // Spawn / Wait / Complete
  1859. public async startSubtask(message: string, initialTodos: TodoItem[], mode: string) {
  1860. const provider = this.providerRef.deref()
  1861. if (!provider) {
  1862. throw new Error("Provider not available")
  1863. }
  1864. const child = await (provider as any).delegateParentAndOpenChild({
  1865. parentTaskId: this.taskId,
  1866. message,
  1867. initialTodos,
  1868. mode,
  1869. })
  1870. return child
  1871. }
  1872. /**
  1873. * Resume parent task after delegation completion without showing resume ask.
  1874. * Used in metadata-driven subtask flow.
  1875. *
  1876. * This method:
  1877. * - Clears any pending ask states
  1878. * - Resets abort and streaming flags
  1879. * - Ensures next API call includes full context
  1880. * - Immediately continues task loop without user interaction
  1881. */
  1882. public async resumeAfterDelegation(): Promise<void> {
  1883. // Clear any ask states that might have been set during history load
  1884. this.idleAsk = undefined
  1885. this.resumableAsk = undefined
  1886. this.interactiveAsk = undefined
  1887. // Reset abort and streaming state to ensure clean continuation
  1888. this.abort = false
  1889. this.abandoned = false
  1890. this.abortReason = undefined
  1891. this.didFinishAbortingStream = false
  1892. this.isStreaming = false
  1893. this.isWaitingForFirstChunk = false
  1894. // Ensure next API call includes full context after delegation
  1895. this.skipPrevResponseIdOnce = true
  1896. // Mark as initialized and active
  1897. this.isInitialized = true
  1898. this.emit(RooCodeEventName.TaskActive, this.taskId)
  1899. // Load conversation history if not already loaded
  1900. if (this.apiConversationHistory.length === 0) {
  1901. this.apiConversationHistory = await this.getSavedApiConversationHistory()
  1902. }
  1903. // Add environment details to the existing last user message (which contains the tool_result)
  1904. // This avoids creating a new user message which would cause consecutive user messages
  1905. const environmentDetails = await getEnvironmentDetails(this, true)
  1906. let lastUserMsgIndex = -1
  1907. for (let i = this.apiConversationHistory.length - 1; i >= 0; i--) {
  1908. if (this.apiConversationHistory[i].role === "user") {
  1909. lastUserMsgIndex = i
  1910. break
  1911. }
  1912. }
  1913. if (lastUserMsgIndex >= 0) {
  1914. const lastUserMsg = this.apiConversationHistory[lastUserMsgIndex]
  1915. if (Array.isArray(lastUserMsg.content)) {
  1916. // Remove any existing environment_details blocks before adding fresh ones
  1917. const contentWithoutEnvDetails = lastUserMsg.content.filter(
  1918. (block: Anthropic.Messages.ContentBlockParam) => {
  1919. if (block.type === "text" && typeof block.text === "string") {
  1920. const isEnvironmentDetailsBlock =
  1921. block.text.trim().startsWith("<environment_details>") &&
  1922. block.text.trim().endsWith("</environment_details>")
  1923. return !isEnvironmentDetailsBlock
  1924. }
  1925. return true
  1926. },
  1927. )
  1928. // Add fresh environment details
  1929. lastUserMsg.content = [...contentWithoutEnvDetails, { type: "text" as const, text: environmentDetails }]
  1930. }
  1931. }
  1932. // Save the updated history
  1933. await this.saveApiConversationHistory()
  1934. // Continue task loop - pass empty array to signal no new user content needed
  1935. // The initiateTaskLoop will handle this by skipping user message addition
  1936. await this.initiateTaskLoop([])
  1937. }
  1938. // Task Loop
  1939. private async initiateTaskLoop(userContent: Anthropic.Messages.ContentBlockParam[]): Promise<void> {
  1940. // Kicks off the checkpoints initialization process in the background.
  1941. getCheckpointService(this)
  1942. let nextUserContent = userContent
  1943. let includeFileDetails = true
  1944. this.emit(RooCodeEventName.TaskStarted)
  1945. while (!this.abort) {
  1946. const didEndLoop = await this.recursivelyMakeClineRequests(nextUserContent, includeFileDetails)
  1947. includeFileDetails = false // We only need file details the first time.
  1948. // The way this agentic loop works is that cline will be given a
  1949. // task that he then calls tools to complete. Unless there's an
  1950. // attempt_completion call, we keep responding back to him with his
  1951. // tool's responses until he either attempt_completion or does not
  1952. // use anymore tools. If he does not use anymore tools, we ask him
  1953. // to consider if he's completed the task and then call
  1954. // attempt_completion, otherwise proceed with completing the task.
  1955. // There is a MAX_REQUESTS_PER_TASK limit to prevent infinite
  1956. // requests, but Cline is prompted to finish the task as efficiently
  1957. // as he can.
  1958. if (didEndLoop) {
  1959. // For now a task never 'completes'. This will only happen if
  1960. // the user hits max requests and denies resetting the count.
  1961. break
  1962. } else {
  1963. // Use the task's locked protocol, NOT the current settings (fallback to xml if not set)
  1964. nextUserContent = [{ type: "text", text: formatResponse.noToolsUsed(this._taskToolProtocol ?? "xml") }]
  1965. }
  1966. }
  1967. }
  1968. public async recursivelyMakeClineRequests(
  1969. userContent: Anthropic.Messages.ContentBlockParam[],
  1970. includeFileDetails: boolean = false,
  1971. ): Promise<boolean> {
  1972. interface StackItem {
  1973. userContent: Anthropic.Messages.ContentBlockParam[]
  1974. includeFileDetails: boolean
  1975. retryAttempt?: number
  1976. userMessageWasRemoved?: boolean // Track if user message was removed due to empty response
  1977. }
  1978. const stack: StackItem[] = [{ userContent, includeFileDetails, retryAttempt: 0 }]
  1979. while (stack.length > 0) {
  1980. const currentItem = stack.pop()!
  1981. const currentUserContent = currentItem.userContent
  1982. const currentIncludeFileDetails = currentItem.includeFileDetails
  1983. if (this.abort) {
  1984. throw new Error(`[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`)
  1985. }
  1986. if (this.consecutiveMistakeLimit > 0 && this.consecutiveMistakeCount >= this.consecutiveMistakeLimit) {
  1987. // Track consecutive mistake errors in telemetry via event and PostHog exception tracking.
  1988. // The reason is "no_tools_used" because this limit is reached via initiateTaskLoop
  1989. // which increments consecutiveMistakeCount when the model doesn't use any tools.
  1990. TelemetryService.instance.captureConsecutiveMistakeError(this.taskId)
  1991. TelemetryService.instance.captureException(
  1992. new ConsecutiveMistakeError(
  1993. `Task reached consecutive mistake limit (${this.consecutiveMistakeLimit})`,
  1994. this.taskId,
  1995. this.consecutiveMistakeCount,
  1996. this.consecutiveMistakeLimit,
  1997. "no_tools_used",
  1998. this.apiConfiguration.apiProvider,
  1999. getModelId(this.apiConfiguration),
  2000. ),
  2001. )
  2002. const { response, text, images } = await this.ask(
  2003. "mistake_limit_reached",
  2004. t("common:errors.mistake_limit_guidance"),
  2005. )
  2006. if (response === "messageResponse") {
  2007. currentUserContent.push(
  2008. ...[
  2009. { type: "text" as const, text: formatResponse.tooManyMistakes(text) },
  2010. ...formatResponse.imageBlocks(images),
  2011. ],
  2012. )
  2013. await this.say("user_feedback", text, images)
  2014. }
  2015. this.consecutiveMistakeCount = 0
  2016. }
  2017. // Getting verbose details is an expensive operation, it uses ripgrep to
  2018. // top-down build file structure of project which for large projects can
  2019. // take a few seconds. For the best UX we show a placeholder api_req_started
  2020. // message with a loading spinner as this happens.
  2021. // Determine API protocol based on provider and model
  2022. const modelId = getModelId(this.apiConfiguration)
  2023. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  2024. await this.say(
  2025. "api_req_started",
  2026. JSON.stringify({
  2027. apiProtocol,
  2028. }),
  2029. )
  2030. const {
  2031. showRooIgnoredFiles = false,
  2032. includeDiagnosticMessages = true,
  2033. maxDiagnosticMessages = 50,
  2034. maxReadFileLine = -1,
  2035. } = (await this.providerRef.deref()?.getState()) ?? {}
  2036. const parsedUserContent = await processUserContentMentions({
  2037. userContent: currentUserContent,
  2038. cwd: this.cwd,
  2039. urlContentFetcher: this.urlContentFetcher,
  2040. fileContextTracker: this.fileContextTracker,
  2041. rooIgnoreController: this.rooIgnoreController,
  2042. showRooIgnoredFiles,
  2043. includeDiagnosticMessages,
  2044. maxDiagnosticMessages,
  2045. maxReadFileLine,
  2046. })
  2047. const environmentDetails = await getEnvironmentDetails(this, currentIncludeFileDetails)
  2048. // Remove any existing environment_details blocks before adding fresh ones.
  2049. // This prevents duplicate environment details when resuming tasks with XML tool calls,
  2050. // where the old user message content may already contain environment details from the previous session.
  2051. // We check for both opening and closing tags to ensure we're matching complete environment detail blocks,
  2052. // not just mentions of the tag in regular content.
  2053. const contentWithoutEnvDetails = parsedUserContent.filter((block) => {
  2054. if (block.type === "text" && typeof block.text === "string") {
  2055. // Check if this text block is a complete environment_details block
  2056. // by verifying it starts with the opening tag and ends with the closing tag
  2057. const isEnvironmentDetailsBlock =
  2058. block.text.trim().startsWith("<environment_details>") &&
  2059. block.text.trim().endsWith("</environment_details>")
  2060. return !isEnvironmentDetailsBlock
  2061. }
  2062. return true
  2063. })
  2064. // Add environment details as its own text block, separate from tool
  2065. // results.
  2066. let finalUserContent = [...contentWithoutEnvDetails, { type: "text" as const, text: environmentDetails }]
  2067. // Only add user message to conversation history if:
  2068. // 1. This is the first attempt (retryAttempt === 0), AND
  2069. // 2. The original userContent was not empty (empty signals delegation resume where
  2070. // the user message with tool_result and env details is already in history), OR
  2071. // 3. The message was removed in a previous iteration (userMessageWasRemoved === true)
  2072. // This prevents consecutive user messages while allowing re-add when needed
  2073. const isEmptyUserContent = currentUserContent.length === 0
  2074. const shouldAddUserMessage =
  2075. ((currentItem.retryAttempt ?? 0) === 0 && !isEmptyUserContent) || currentItem.userMessageWasRemoved
  2076. if (shouldAddUserMessage) {
  2077. await this.addToApiConversationHistory({ role: "user", content: finalUserContent })
  2078. TelemetryService.instance.captureConversationMessage(this.taskId, "user")
  2079. }
  2080. // Since we sent off a placeholder api_req_started message to update the
  2081. // webview while waiting to actually start the API request (to load
  2082. // potential details for example), we need to update the text of that
  2083. // message.
  2084. const lastApiReqIndex = findLastIndex(this.clineMessages, (m) => m.say === "api_req_started")
  2085. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  2086. apiProtocol,
  2087. } satisfies ClineApiReqInfo)
  2088. await this.saveClineMessages()
  2089. await this.providerRef.deref()?.postStateToWebview()
  2090. try {
  2091. let cacheWriteTokens = 0
  2092. let cacheReadTokens = 0
  2093. let inputTokens = 0
  2094. let outputTokens = 0
  2095. let totalCost: number | undefined
  2096. // We can't use `api_req_finished` anymore since it's a unique case
  2097. // where it could come after a streaming message (i.e. in the middle
  2098. // of being updated or executed).
  2099. // Fortunately `api_req_finished` was always parsed out for the GUI
  2100. // anyways, so it remains solely for legacy purposes to keep track
  2101. // of prices in tasks from history (it's worth removing a few months
  2102. // from now).
  2103. const updateApiReqMsg = (cancelReason?: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  2104. if (lastApiReqIndex < 0 || !this.clineMessages[lastApiReqIndex]) {
  2105. return
  2106. }
  2107. const existingData = JSON.parse(this.clineMessages[lastApiReqIndex].text || "{}")
  2108. // Calculate total tokens and cost using provider-aware function
  2109. const modelId = getModelId(this.apiConfiguration)
  2110. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  2111. const costResult =
  2112. apiProtocol === "anthropic"
  2113. ? calculateApiCostAnthropic(
  2114. streamModelInfo,
  2115. inputTokens,
  2116. outputTokens,
  2117. cacheWriteTokens,
  2118. cacheReadTokens,
  2119. )
  2120. : calculateApiCostOpenAI(
  2121. streamModelInfo,
  2122. inputTokens,
  2123. outputTokens,
  2124. cacheWriteTokens,
  2125. cacheReadTokens,
  2126. )
  2127. this.clineMessages[lastApiReqIndex].text = JSON.stringify({
  2128. ...existingData,
  2129. tokensIn: costResult.totalInputTokens,
  2130. tokensOut: costResult.totalOutputTokens,
  2131. cacheWrites: cacheWriteTokens,
  2132. cacheReads: cacheReadTokens,
  2133. cost: totalCost ?? costResult.totalCost,
  2134. cancelReason,
  2135. streamingFailedMessage,
  2136. } satisfies ClineApiReqInfo)
  2137. }
  2138. const abortStream = async (cancelReason: ClineApiReqCancelReason, streamingFailedMessage?: string) => {
  2139. if (this.diffViewProvider.isEditing) {
  2140. await this.diffViewProvider.revertChanges() // closes diff view
  2141. }
  2142. // if last message is a partial we need to update and save it
  2143. const lastMessage = this.clineMessages.at(-1)
  2144. if (lastMessage && lastMessage.partial) {
  2145. // lastMessage.ts = Date.now() DO NOT update ts since it is used as a key for virtuoso list
  2146. lastMessage.partial = false
  2147. // instead of streaming partialMessage events, we do a save and post like normal to persist to disk
  2148. console.log("updating partial message", lastMessage)
  2149. }
  2150. // Update `api_req_started` to have cancelled and cost, so that
  2151. // we can display the cost of the partial stream and the cancellation reason
  2152. updateApiReqMsg(cancelReason, streamingFailedMessage)
  2153. await this.saveClineMessages()
  2154. // Signals to provider that it can retrieve the saved messages
  2155. // from disk, as abortTask can not be awaited on in nature.
  2156. this.didFinishAbortingStream = true
  2157. }
  2158. // Reset streaming state for each new API request
  2159. this.currentStreamingContentIndex = 0
  2160. this.currentStreamingDidCheckpoint = false
  2161. this.assistantMessageContent = []
  2162. this.didCompleteReadingStream = false
  2163. this.userMessageContent = []
  2164. this.userMessageContentReady = false
  2165. this.didRejectTool = false
  2166. this.didAlreadyUseTool = false
  2167. // Reset tool failure flag for each new assistant turn - this ensures that tool failures
  2168. // only prevent attempt_completion within the same assistant message, not across turns
  2169. // (e.g., if a tool fails, then user sends a message saying "just complete anyway")
  2170. this.didToolFailInCurrentTurn = false
  2171. this.presentAssistantMessageLocked = false
  2172. this.presentAssistantMessageHasPendingUpdates = false
  2173. this.assistantMessageParser?.reset()
  2174. this.streamingToolCallIndices.clear()
  2175. // Clear any leftover streaming tool call state from previous interrupted streams
  2176. NativeToolCallParser.clearAllStreamingToolCalls()
  2177. NativeToolCallParser.clearRawChunkState()
  2178. await this.diffViewProvider.reset()
  2179. // Cache model info once per API request to avoid repeated calls during streaming
  2180. // This is especially important for tools and background usage collection
  2181. this.cachedStreamingModel = this.api.getModel()
  2182. const streamModelInfo = this.cachedStreamingModel.info
  2183. const cachedModelId = this.cachedStreamingModel.id
  2184. // Use the task's locked protocol instead of resolving fresh.
  2185. // This ensures task resumption works correctly even if NTC settings changed.
  2186. // Fallback to resolving if somehow _taskToolProtocol is not set (should not happen).
  2187. const streamProtocol = resolveToolProtocol(
  2188. this.apiConfiguration,
  2189. streamModelInfo,
  2190. this._taskToolProtocol,
  2191. )
  2192. const shouldUseXmlParser = streamProtocol === "xml"
  2193. // Yields only if the first chunk is successful, otherwise will
  2194. // allow the user to retry the request (most likely due to rate
  2195. // limit error, which gets thrown on the first chunk).
  2196. const stream = this.attemptApiRequest()
  2197. let assistantMessage = ""
  2198. let reasoningMessage = ""
  2199. let pendingGroundingSources: GroundingSource[] = []
  2200. this.isStreaming = true
  2201. try {
  2202. const iterator = stream[Symbol.asyncIterator]()
  2203. // Helper to race iterator.next() with abort signal
  2204. const nextChunkWithAbort = async () => {
  2205. const nextPromise = iterator.next()
  2206. // If we have an abort controller, race it with the next chunk
  2207. if (this.currentRequestAbortController) {
  2208. const abortPromise = new Promise<never>((_, reject) => {
  2209. const signal = this.currentRequestAbortController!.signal
  2210. if (signal.aborted) {
  2211. reject(new Error("Request cancelled by user"))
  2212. } else {
  2213. signal.addEventListener("abort", () => {
  2214. reject(new Error("Request cancelled by user"))
  2215. })
  2216. }
  2217. })
  2218. return await Promise.race([nextPromise, abortPromise])
  2219. }
  2220. // No abort controller, just return the next chunk normally
  2221. return await nextPromise
  2222. }
  2223. let item = await nextChunkWithAbort()
  2224. while (!item.done) {
  2225. const chunk = item.value
  2226. item = await nextChunkWithAbort()
  2227. if (!chunk) {
  2228. // Sometimes chunk is undefined, no idea that can cause
  2229. // it, but this workaround seems to fix it.
  2230. continue
  2231. }
  2232. switch (chunk.type) {
  2233. case "reasoning": {
  2234. reasoningMessage += chunk.text
  2235. // Only apply formatting if the message contains sentence-ending punctuation followed by **
  2236. let formattedReasoning = reasoningMessage
  2237. if (reasoningMessage.includes("**")) {
  2238. // Add line breaks before **Title** patterns that appear after sentence endings
  2239. // This targets section headers like "...end of sentence.**Title Here**"
  2240. // Handles periods, exclamation marks, and question marks
  2241. formattedReasoning = reasoningMessage.replace(
  2242. /([.!?])\*\*([^*\n]+)\*\*/g,
  2243. "$1\n\n**$2**",
  2244. )
  2245. }
  2246. await this.say("reasoning", formattedReasoning, undefined, true)
  2247. break
  2248. }
  2249. case "usage":
  2250. inputTokens += chunk.inputTokens
  2251. outputTokens += chunk.outputTokens
  2252. cacheWriteTokens += chunk.cacheWriteTokens ?? 0
  2253. cacheReadTokens += chunk.cacheReadTokens ?? 0
  2254. totalCost = chunk.totalCost
  2255. break
  2256. case "grounding":
  2257. // Handle grounding sources separately from regular content
  2258. // to prevent state persistence issues - store them separately
  2259. if (chunk.sources && chunk.sources.length > 0) {
  2260. pendingGroundingSources.push(...chunk.sources)
  2261. }
  2262. break
  2263. case "tool_call_partial": {
  2264. // Process raw tool call chunk through NativeToolCallParser
  2265. // which handles tracking, buffering, and emits events
  2266. const events = NativeToolCallParser.processRawChunk({
  2267. index: chunk.index,
  2268. id: chunk.id,
  2269. name: chunk.name,
  2270. arguments: chunk.arguments,
  2271. })
  2272. for (const event of events) {
  2273. if (event.type === "tool_call_start") {
  2274. // Initialize streaming in NativeToolCallParser
  2275. NativeToolCallParser.startStreamingToolCall(event.id, event.name as ToolName)
  2276. // Before adding a new tool, finalize any preceding text block
  2277. // This prevents the text block from blocking tool presentation
  2278. const lastBlock =
  2279. this.assistantMessageContent[this.assistantMessageContent.length - 1]
  2280. if (lastBlock?.type === "text" && lastBlock.partial) {
  2281. lastBlock.partial = false
  2282. }
  2283. // Track the index where this tool will be stored
  2284. const toolUseIndex = this.assistantMessageContent.length
  2285. this.streamingToolCallIndices.set(event.id, toolUseIndex)
  2286. // Create initial partial tool use
  2287. const partialToolUse: ToolUse = {
  2288. type: "tool_use",
  2289. name: event.name as ToolName,
  2290. params: {},
  2291. partial: true,
  2292. }
  2293. // Store the ID for native protocol
  2294. ;(partialToolUse as any).id = event.id
  2295. // Add to content and present
  2296. this.assistantMessageContent.push(partialToolUse)
  2297. this.userMessageContentReady = false
  2298. presentAssistantMessage(this)
  2299. } else if (event.type === "tool_call_delta") {
  2300. // Process chunk using streaming JSON parser
  2301. const partialToolUse = NativeToolCallParser.processStreamingChunk(
  2302. event.id,
  2303. event.delta,
  2304. )
  2305. if (partialToolUse) {
  2306. // Get the index for this tool call
  2307. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2308. if (toolUseIndex !== undefined) {
  2309. // Store the ID for native protocol
  2310. ;(partialToolUse as any).id = event.id
  2311. // Update the existing tool use with new partial data
  2312. this.assistantMessageContent[toolUseIndex] = partialToolUse
  2313. // Present updated tool use
  2314. presentAssistantMessage(this)
  2315. }
  2316. }
  2317. } else if (event.type === "tool_call_end") {
  2318. // Finalize the streaming tool call
  2319. const finalToolUse = NativeToolCallParser.finalizeStreamingToolCall(event.id)
  2320. // Get the index for this tool call
  2321. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2322. if (finalToolUse) {
  2323. // Store the tool call ID
  2324. ;(finalToolUse as any).id = event.id
  2325. // Get the index and replace partial with final
  2326. if (toolUseIndex !== undefined) {
  2327. this.assistantMessageContent[toolUseIndex] = finalToolUse
  2328. }
  2329. // Clean up tracking
  2330. this.streamingToolCallIndices.delete(event.id)
  2331. // Mark that we have new content to process
  2332. this.userMessageContentReady = false
  2333. // Present the finalized tool call
  2334. presentAssistantMessage(this)
  2335. } else if (toolUseIndex !== undefined) {
  2336. // finalizeStreamingToolCall returned null (malformed JSON or missing args)
  2337. // We still need to mark the tool as non-partial so it gets executed
  2338. // The tool's validation will catch any missing required parameters
  2339. const existingToolUse = this.assistantMessageContent[toolUseIndex]
  2340. if (existingToolUse && existingToolUse.type === "tool_use") {
  2341. existingToolUse.partial = false
  2342. // Ensure it has the ID for native protocol
  2343. ;(existingToolUse as any).id = event.id
  2344. }
  2345. // Clean up tracking
  2346. this.streamingToolCallIndices.delete(event.id)
  2347. // Mark that we have new content to process
  2348. this.userMessageContentReady = false
  2349. // Present the tool call - validation will handle missing params
  2350. presentAssistantMessage(this)
  2351. }
  2352. }
  2353. }
  2354. break
  2355. }
  2356. case "tool_call": {
  2357. // Legacy: Handle complete tool calls (for backward compatibility)
  2358. // Convert native tool call to ToolUse format
  2359. const toolUse = NativeToolCallParser.parseToolCall({
  2360. id: chunk.id,
  2361. name: chunk.name as ToolName,
  2362. arguments: chunk.arguments,
  2363. })
  2364. if (!toolUse) {
  2365. console.error(`Failed to parse tool call for task ${this.taskId}:`, chunk)
  2366. break
  2367. }
  2368. // Store the tool call ID on the ToolUse object for later reference
  2369. // This is needed to create tool_result blocks that reference the correct tool_use_id
  2370. toolUse.id = chunk.id
  2371. // Add the tool use to assistant message content
  2372. this.assistantMessageContent.push(toolUse)
  2373. // Mark that we have new content to process
  2374. this.userMessageContentReady = false
  2375. // Present the tool call to user - presentAssistantMessage will execute
  2376. // tools sequentially and accumulate all results in userMessageContent
  2377. presentAssistantMessage(this)
  2378. break
  2379. }
  2380. case "text": {
  2381. assistantMessage += chunk.text
  2382. // Use the protocol determined at the start of streaming
  2383. // Don't rely solely on parser existence - parser might exist from previous state
  2384. if (shouldUseXmlParser && this.assistantMessageParser) {
  2385. // XML protocol: Parse raw assistant message chunk into content blocks
  2386. const prevLength = this.assistantMessageContent.length
  2387. this.assistantMessageContent = this.assistantMessageParser.processChunk(chunk.text)
  2388. if (this.assistantMessageContent.length > prevLength) {
  2389. // New content we need to present, reset to
  2390. // false in case previous content set this to true.
  2391. this.userMessageContentReady = false
  2392. }
  2393. // Present content to user.
  2394. presentAssistantMessage(this)
  2395. } else {
  2396. // Native protocol: Text chunks are plain text, not XML tool calls
  2397. // Create or update a text content block directly
  2398. const lastBlock =
  2399. this.assistantMessageContent[this.assistantMessageContent.length - 1]
  2400. if (lastBlock?.type === "text" && lastBlock.partial) {
  2401. // Update existing partial text block
  2402. lastBlock.content = assistantMessage
  2403. } else {
  2404. // Create new text block
  2405. this.assistantMessageContent.push({
  2406. type: "text",
  2407. content: assistantMessage,
  2408. partial: true,
  2409. })
  2410. this.userMessageContentReady = false
  2411. }
  2412. // Present content to user
  2413. presentAssistantMessage(this)
  2414. }
  2415. break
  2416. }
  2417. }
  2418. if (this.abort) {
  2419. console.log(`aborting stream, this.abandoned = ${this.abandoned}`)
  2420. if (!this.abandoned) {
  2421. // Only need to gracefully abort if this instance
  2422. // isn't abandoned (sometimes OpenRouter stream
  2423. // hangs, in which case this would affect future
  2424. // instances of Cline).
  2425. await abortStream("user_cancelled")
  2426. }
  2427. break // Aborts the stream.
  2428. }
  2429. if (this.didRejectTool) {
  2430. // `userContent` has a tool rejection, so interrupt the
  2431. // assistant's response to present the user's feedback.
  2432. assistantMessage += "\n\n[Response interrupted by user feedback]"
  2433. // Instead of setting this preemptively, we allow the
  2434. // present iterator to finish and set
  2435. // userMessageContentReady when its ready.
  2436. // this.userMessageContentReady = true
  2437. break
  2438. }
  2439. if (this.didAlreadyUseTool) {
  2440. assistantMessage +=
  2441. "\n\n[Response interrupted by a tool use result. Only one tool may be used at a time and should be placed at the end of the message.]"
  2442. break
  2443. }
  2444. }
  2445. // Finalize any remaining streaming tool calls that weren't explicitly ended
  2446. // This is critical for MCP tools which need tool_call_end events to be properly
  2447. // converted from ToolUse to McpToolUse via finalizeStreamingToolCall()
  2448. const finalizeEvents = NativeToolCallParser.finalizeRawChunks()
  2449. for (const event of finalizeEvents) {
  2450. if (event.type === "tool_call_end") {
  2451. // Finalize the streaming tool call
  2452. const finalToolUse = NativeToolCallParser.finalizeStreamingToolCall(event.id)
  2453. // Get the index for this tool call
  2454. const toolUseIndex = this.streamingToolCallIndices.get(event.id)
  2455. if (finalToolUse) {
  2456. // Store the tool call ID
  2457. ;(finalToolUse as any).id = event.id
  2458. // Get the index and replace partial with final
  2459. if (toolUseIndex !== undefined) {
  2460. this.assistantMessageContent[toolUseIndex] = finalToolUse
  2461. }
  2462. // Clean up tracking
  2463. this.streamingToolCallIndices.delete(event.id)
  2464. // Mark that we have new content to process
  2465. this.userMessageContentReady = false
  2466. // Present the finalized tool call
  2467. presentAssistantMessage(this)
  2468. } else if (toolUseIndex !== undefined) {
  2469. // finalizeStreamingToolCall returned null (malformed JSON or missing args)
  2470. // We still need to mark the tool as non-partial so it gets executed
  2471. // The tool's validation will catch any missing required parameters
  2472. const existingToolUse = this.assistantMessageContent[toolUseIndex]
  2473. if (existingToolUse && existingToolUse.type === "tool_use") {
  2474. existingToolUse.partial = false
  2475. // Ensure it has the ID for native protocol
  2476. ;(existingToolUse as any).id = event.id
  2477. }
  2478. // Clean up tracking
  2479. this.streamingToolCallIndices.delete(event.id)
  2480. // Mark that we have new content to process
  2481. this.userMessageContentReady = false
  2482. // Present the tool call - validation will handle missing params
  2483. presentAssistantMessage(this)
  2484. }
  2485. }
  2486. }
  2487. // Create a copy of current token values to avoid race conditions
  2488. const currentTokens = {
  2489. input: inputTokens,
  2490. output: outputTokens,
  2491. cacheWrite: cacheWriteTokens,
  2492. cacheRead: cacheReadTokens,
  2493. total: totalCost,
  2494. }
  2495. const drainStreamInBackgroundToFindAllUsage = async (apiReqIndex: number) => {
  2496. const timeoutMs = DEFAULT_USAGE_COLLECTION_TIMEOUT_MS
  2497. const startTime = performance.now()
  2498. const modelId = getModelId(this.apiConfiguration)
  2499. // Local variables to accumulate usage data without affecting the main flow
  2500. let bgInputTokens = currentTokens.input
  2501. let bgOutputTokens = currentTokens.output
  2502. let bgCacheWriteTokens = currentTokens.cacheWrite
  2503. let bgCacheReadTokens = currentTokens.cacheRead
  2504. let bgTotalCost = currentTokens.total
  2505. // Helper function to capture telemetry and update messages
  2506. const captureUsageData = async (
  2507. tokens: {
  2508. input: number
  2509. output: number
  2510. cacheWrite: number
  2511. cacheRead: number
  2512. total?: number
  2513. },
  2514. messageIndex: number = apiReqIndex,
  2515. ) => {
  2516. if (
  2517. tokens.input > 0 ||
  2518. tokens.output > 0 ||
  2519. tokens.cacheWrite > 0 ||
  2520. tokens.cacheRead > 0
  2521. ) {
  2522. // Update the shared variables atomically
  2523. inputTokens = tokens.input
  2524. outputTokens = tokens.output
  2525. cacheWriteTokens = tokens.cacheWrite
  2526. cacheReadTokens = tokens.cacheRead
  2527. totalCost = tokens.total
  2528. // Update the API request message with the latest usage data
  2529. updateApiReqMsg()
  2530. await this.saveClineMessages()
  2531. // Update the specific message in the webview
  2532. const apiReqMessage = this.clineMessages[messageIndex]
  2533. if (apiReqMessage) {
  2534. await this.updateClineMessage(apiReqMessage)
  2535. }
  2536. // Capture telemetry with provider-aware cost calculation
  2537. const modelId = getModelId(this.apiConfiguration)
  2538. const apiProtocol = getApiProtocol(this.apiConfiguration.apiProvider, modelId)
  2539. // Use the appropriate cost function based on the API protocol
  2540. const costResult =
  2541. apiProtocol === "anthropic"
  2542. ? calculateApiCostAnthropic(
  2543. streamModelInfo,
  2544. tokens.input,
  2545. tokens.output,
  2546. tokens.cacheWrite,
  2547. tokens.cacheRead,
  2548. )
  2549. : calculateApiCostOpenAI(
  2550. streamModelInfo,
  2551. tokens.input,
  2552. tokens.output,
  2553. tokens.cacheWrite,
  2554. tokens.cacheRead,
  2555. )
  2556. TelemetryService.instance.captureLlmCompletion(this.taskId, {
  2557. inputTokens: costResult.totalInputTokens,
  2558. outputTokens: costResult.totalOutputTokens,
  2559. cacheWriteTokens: tokens.cacheWrite,
  2560. cacheReadTokens: tokens.cacheRead,
  2561. cost: tokens.total ?? costResult.totalCost,
  2562. })
  2563. }
  2564. }
  2565. try {
  2566. // Continue processing the original stream from where the main loop left off
  2567. let usageFound = false
  2568. let chunkCount = 0
  2569. // Use the same iterator that the main loop was using
  2570. while (!item.done) {
  2571. // Check for timeout
  2572. if (performance.now() - startTime > timeoutMs) {
  2573. console.warn(
  2574. `[Background Usage Collection] Timed out after ${timeoutMs}ms for model: ${modelId}, processed ${chunkCount} chunks`,
  2575. )
  2576. // Clean up the iterator before breaking
  2577. if (iterator.return) {
  2578. await iterator.return(undefined)
  2579. }
  2580. break
  2581. }
  2582. const chunk = item.value
  2583. item = await iterator.next()
  2584. chunkCount++
  2585. if (chunk && chunk.type === "usage") {
  2586. usageFound = true
  2587. bgInputTokens += chunk.inputTokens
  2588. bgOutputTokens += chunk.outputTokens
  2589. bgCacheWriteTokens += chunk.cacheWriteTokens ?? 0
  2590. bgCacheReadTokens += chunk.cacheReadTokens ?? 0
  2591. bgTotalCost = chunk.totalCost
  2592. }
  2593. }
  2594. if (
  2595. usageFound ||
  2596. bgInputTokens > 0 ||
  2597. bgOutputTokens > 0 ||
  2598. bgCacheWriteTokens > 0 ||
  2599. bgCacheReadTokens > 0
  2600. ) {
  2601. // We have usage data either from a usage chunk or accumulated tokens
  2602. await captureUsageData(
  2603. {
  2604. input: bgInputTokens,
  2605. output: bgOutputTokens,
  2606. cacheWrite: bgCacheWriteTokens,
  2607. cacheRead: bgCacheReadTokens,
  2608. total: bgTotalCost,
  2609. },
  2610. lastApiReqIndex,
  2611. )
  2612. } else {
  2613. console.warn(
  2614. `[Background Usage Collection] Suspicious: request ${apiReqIndex} is complete, but no usage info was found. Model: ${modelId}`,
  2615. )
  2616. }
  2617. } catch (error) {
  2618. console.error("Error draining stream for usage data:", error)
  2619. // Still try to capture whatever usage data we have collected so far
  2620. if (
  2621. bgInputTokens > 0 ||
  2622. bgOutputTokens > 0 ||
  2623. bgCacheWriteTokens > 0 ||
  2624. bgCacheReadTokens > 0
  2625. ) {
  2626. await captureUsageData(
  2627. {
  2628. input: bgInputTokens,
  2629. output: bgOutputTokens,
  2630. cacheWrite: bgCacheWriteTokens,
  2631. cacheRead: bgCacheReadTokens,
  2632. total: bgTotalCost,
  2633. },
  2634. lastApiReqIndex,
  2635. )
  2636. }
  2637. }
  2638. }
  2639. // Start the background task and handle any errors
  2640. drainStreamInBackgroundToFindAllUsage(lastApiReqIndex).catch((error) => {
  2641. console.error("Background usage collection failed:", error)
  2642. })
  2643. } catch (error) {
  2644. // Abandoned happens when extension is no longer waiting for the
  2645. // Cline instance to finish aborting (error is thrown here when
  2646. // any function in the for loop throws due to this.abort).
  2647. if (!this.abandoned) {
  2648. // Determine cancellation reason
  2649. const cancelReason: ClineApiReqCancelReason = this.abort ? "user_cancelled" : "streaming_failed"
  2650. const streamingFailedMessage = this.abort
  2651. ? undefined
  2652. : (error.message ?? JSON.stringify(serializeError(error), null, 2))
  2653. // Clean up partial state
  2654. await abortStream(cancelReason, streamingFailedMessage)
  2655. if (this.abort) {
  2656. // User cancelled - abort the entire task
  2657. this.abortReason = cancelReason
  2658. await this.abortTask()
  2659. } else {
  2660. // Stream failed - log the error and retry with the same content
  2661. // The existing rate limiting will prevent rapid retries
  2662. console.error(
  2663. `[Task#${this.taskId}.${this.instanceId}] Stream failed, will retry: ${streamingFailedMessage}`,
  2664. )
  2665. // Apply exponential backoff similar to first-chunk errors when auto-resubmit is enabled
  2666. const stateForBackoff = await this.providerRef.deref()?.getState()
  2667. if (stateForBackoff?.autoApprovalEnabled) {
  2668. await this.backoffAndAnnounce(currentItem.retryAttempt ?? 0, error)
  2669. // Check if task was aborted during the backoff
  2670. if (this.abort) {
  2671. console.log(
  2672. `[Task#${this.taskId}.${this.instanceId}] Task aborted during mid-stream retry backoff`,
  2673. )
  2674. // Abort the entire task
  2675. this.abortReason = "user_cancelled"
  2676. await this.abortTask()
  2677. break
  2678. }
  2679. }
  2680. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2681. stack.push({
  2682. userContent: currentUserContent,
  2683. includeFileDetails: false,
  2684. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2685. })
  2686. // Continue to retry the request
  2687. continue
  2688. }
  2689. }
  2690. } finally {
  2691. this.isStreaming = false
  2692. // Clean up the abort controller when streaming completes
  2693. this.currentRequestAbortController = undefined
  2694. }
  2695. // Need to call here in case the stream was aborted.
  2696. if (this.abort || this.abandoned) {
  2697. throw new Error(
  2698. `[RooCode#recursivelyMakeRooRequests] task ${this.taskId}.${this.instanceId} aborted`,
  2699. )
  2700. }
  2701. this.didCompleteReadingStream = true
  2702. // Set any blocks to be complete to allow `presentAssistantMessage`
  2703. // to finish and set `userMessageContentReady` to true.
  2704. // (Could be a text block that had no subsequent tool uses, or a
  2705. // text block at the very end, or an invalid tool use, etc. Whatever
  2706. // the case, `presentAssistantMessage` relies on these blocks either
  2707. // to be completed or the user to reject a block in order to proceed
  2708. // and eventually set userMessageContentReady to true.)
  2709. const partialBlocks = this.assistantMessageContent.filter((block) => block.partial)
  2710. partialBlocks.forEach((block) => (block.partial = false))
  2711. // Can't just do this b/c a tool could be in the middle of executing.
  2712. // this.assistantMessageContent.forEach((e) => (e.partial = false))
  2713. // Now that the stream is complete, finalize any remaining partial content blocks (XML protocol only)
  2714. // Use the protocol determined at the start of streaming
  2715. if (shouldUseXmlParser && this.assistantMessageParser) {
  2716. this.assistantMessageParser.finalizeContentBlocks()
  2717. const parsedBlocks = this.assistantMessageParser.getContentBlocks()
  2718. // For XML protocol: Use only parsed blocks (includes both text and tool_use parsed from XML)
  2719. this.assistantMessageContent = parsedBlocks
  2720. }
  2721. // Present any partial blocks that were just completed
  2722. // For XML protocol: includes both text and tool_use blocks parsed from the text stream
  2723. // For native protocol: tool_use blocks were already presented during streaming via
  2724. // tool_call_partial events, but we still need to present them if they exist (e.g., malformed)
  2725. if (partialBlocks.length > 0) {
  2726. // If there is content to update then it will complete and
  2727. // update `this.userMessageContentReady` to true, which we
  2728. // `pWaitFor` before making the next request.
  2729. presentAssistantMessage(this)
  2730. }
  2731. // Note: updateApiReqMsg() is now called from within drainStreamInBackgroundToFindAllUsage
  2732. // to ensure usage data is captured even when the stream is interrupted. The background task
  2733. // uses local variables to accumulate usage data before atomically updating the shared state.
  2734. // Complete the reasoning message if it exists
  2735. // We can't use say() here because the reasoning message may not be the last message
  2736. // (other messages like text blocks or tool uses may have been added after it during streaming)
  2737. if (reasoningMessage) {
  2738. const lastReasoningIndex = findLastIndex(
  2739. this.clineMessages,
  2740. (m) => m.type === "say" && m.say === "reasoning",
  2741. )
  2742. if (lastReasoningIndex !== -1 && this.clineMessages[lastReasoningIndex].partial) {
  2743. this.clineMessages[lastReasoningIndex].partial = false
  2744. await this.updateClineMessage(this.clineMessages[lastReasoningIndex])
  2745. }
  2746. }
  2747. await this.saveClineMessages()
  2748. await this.providerRef.deref()?.postStateToWebview()
  2749. // Reset parser after each complete conversation round (XML protocol only)
  2750. this.assistantMessageParser?.reset()
  2751. // Now add to apiConversationHistory.
  2752. // Need to save assistant responses to file before proceeding to
  2753. // tool use since user can exit at any moment and we wouldn't be
  2754. // able to save the assistant's response.
  2755. // Check if we have any content to process (text or tool uses)
  2756. const hasTextContent = assistantMessage.length > 0
  2757. const hasToolUses = this.assistantMessageContent.some(
  2758. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2759. )
  2760. if (hasTextContent || hasToolUses) {
  2761. // Display grounding sources to the user if they exist
  2762. if (pendingGroundingSources.length > 0) {
  2763. const citationLinks = pendingGroundingSources.map((source, i) => `[${i + 1}](${source.url})`)
  2764. const sourcesText = `${t("common:gemini.sources")} ${citationLinks.join(", ")}`
  2765. await this.say("text", sourcesText, undefined, false, undefined, undefined, {
  2766. isNonInteractive: true,
  2767. })
  2768. }
  2769. // Build the assistant message content array
  2770. const assistantContent: Array<Anthropic.TextBlockParam | Anthropic.ToolUseBlockParam> = []
  2771. // Add text content if present
  2772. if (assistantMessage) {
  2773. assistantContent.push({
  2774. type: "text" as const,
  2775. text: assistantMessage,
  2776. })
  2777. }
  2778. // Add tool_use blocks with their IDs for native protocol
  2779. // This handles both regular ToolUse and McpToolUse types
  2780. const toolUseBlocks = this.assistantMessageContent.filter(
  2781. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2782. )
  2783. for (const block of toolUseBlocks) {
  2784. if (block.type === "mcp_tool_use") {
  2785. // McpToolUse already has the original tool name (e.g., "mcp_serverName_toolName")
  2786. // The arguments are the raw tool arguments (matching the simplified schema)
  2787. const mcpBlock = block as import("../../shared/tools").McpToolUse
  2788. if (mcpBlock.id) {
  2789. assistantContent.push({
  2790. type: "tool_use" as const,
  2791. id: mcpBlock.id,
  2792. name: mcpBlock.name, // Original dynamic name
  2793. input: mcpBlock.arguments, // Direct tool arguments
  2794. })
  2795. }
  2796. } else {
  2797. // Regular ToolUse
  2798. const toolUse = block as import("../../shared/tools").ToolUse
  2799. const toolCallId = toolUse.id
  2800. if (toolCallId) {
  2801. // nativeArgs is already in the correct API format for all tools
  2802. const input = toolUse.nativeArgs || toolUse.params
  2803. // Use originalName (alias) if present for API history consistency.
  2804. // When tool aliases are used (e.g., "edit_file" -> "search_and_replace"),
  2805. // we want the alias name in the conversation history to match what the model
  2806. // was told the tool was named, preventing confusion in multi-turn conversations.
  2807. const toolNameForHistory = toolUse.originalName ?? toolUse.name
  2808. assistantContent.push({
  2809. type: "tool_use" as const,
  2810. id: toolCallId,
  2811. name: toolNameForHistory,
  2812. input,
  2813. })
  2814. }
  2815. }
  2816. }
  2817. await this.addToApiConversationHistory(
  2818. { role: "assistant", content: assistantContent },
  2819. reasoningMessage || undefined,
  2820. )
  2821. TelemetryService.instance.captureConversationMessage(this.taskId, "assistant")
  2822. // NOTE: This comment is here for future reference - this was a
  2823. // workaround for `userMessageContent` not getting set to true.
  2824. // It was due to it not recursively calling for partial blocks
  2825. // when `didRejectTool`, so it would get stuck waiting for a
  2826. // partial block to complete before it could continue.
  2827. // In case the content blocks finished it may be the api stream
  2828. // finished after the last parsed content block was executed, so
  2829. // we are able to detect out of bounds and set
  2830. // `userMessageContentReady` to true (note you should not call
  2831. // `presentAssistantMessage` since if the last block i
  2832. // completed it will be presented again).
  2833. // const completeBlocks = this.assistantMessageContent.filter((block) => !block.partial) // If there are any partial blocks after the stream ended we can consider them invalid.
  2834. // if (this.currentStreamingContentIndex >= completeBlocks.length) {
  2835. // this.userMessageContentReady = true
  2836. // }
  2837. await pWaitFor(() => this.userMessageContentReady)
  2838. // If the model did not tool use, then we need to tell it to
  2839. // either use a tool or attempt_completion.
  2840. const didToolUse = this.assistantMessageContent.some(
  2841. (block) => block.type === "tool_use" || block.type === "mcp_tool_use",
  2842. )
  2843. if (!didToolUse) {
  2844. // Increment consecutive no-tool-use counter
  2845. this.consecutiveNoToolUseCount++
  2846. // Only show error and count toward mistake limit after 2 consecutive failures
  2847. if (this.consecutiveNoToolUseCount >= 2) {
  2848. await this.say("error", "MODEL_NO_TOOLS_USED")
  2849. // Only count toward mistake limit after second consecutive failure
  2850. this.consecutiveMistakeCount++
  2851. }
  2852. // Use the task's locked protocol for consistent behavior
  2853. this.userMessageContent.push({
  2854. type: "text",
  2855. text: formatResponse.noToolsUsed(this._taskToolProtocol ?? "xml"),
  2856. })
  2857. } else {
  2858. // Reset counter when tools are used successfully
  2859. this.consecutiveNoToolUseCount = 0
  2860. }
  2861. // Push to stack if there's content OR if we're paused waiting for a subtask.
  2862. // When paused, we push an empty item so the loop continues to the pause check.
  2863. if (this.userMessageContent.length > 0 || this.isPaused) {
  2864. stack.push({
  2865. userContent: [...this.userMessageContent], // Create a copy to avoid mutation issues
  2866. includeFileDetails: false, // Subsequent iterations don't need file details
  2867. })
  2868. // Add periodic yielding to prevent blocking
  2869. await new Promise((resolve) => setImmediate(resolve))
  2870. }
  2871. continue
  2872. } else {
  2873. // If there's no assistant_responses, that means we got no text
  2874. // or tool_use content blocks from API which we should assume is
  2875. // an error.
  2876. // IMPORTANT: For native tool protocol, we already added the user message to
  2877. // apiConversationHistory at line 1876. Since the assistant failed to respond,
  2878. // we need to remove that message before retrying to avoid having two consecutive
  2879. // user messages (which would cause tool_result validation errors).
  2880. let state = await this.providerRef.deref()?.getState()
  2881. // Use the task's locked protocol, NOT current settings
  2882. if (isNativeProtocol(this._taskToolProtocol ?? "xml") && this.apiConversationHistory.length > 0) {
  2883. const lastMessage = this.apiConversationHistory[this.apiConversationHistory.length - 1]
  2884. if (lastMessage.role === "user") {
  2885. // Remove the last user message that we added earlier
  2886. this.apiConversationHistory.pop()
  2887. }
  2888. }
  2889. // Check if we should auto-retry or prompt the user
  2890. // Reuse the state variable from above
  2891. if (state?.autoApprovalEnabled) {
  2892. // Auto-retry with backoff - don't persist failure message when retrying
  2893. await this.backoffAndAnnounce(
  2894. currentItem.retryAttempt ?? 0,
  2895. new Error(
  2896. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output.",
  2897. ),
  2898. )
  2899. // Check if task was aborted during the backoff
  2900. if (this.abort) {
  2901. console.log(
  2902. `[Task#${this.taskId}.${this.instanceId}] Task aborted during empty-assistant retry backoff`,
  2903. )
  2904. break
  2905. }
  2906. // Push the same content back onto the stack to retry, incrementing the retry attempt counter
  2907. // Mark that user message was removed so it gets re-added on retry
  2908. stack.push({
  2909. userContent: currentUserContent,
  2910. includeFileDetails: false,
  2911. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2912. userMessageWasRemoved: true,
  2913. })
  2914. // Continue to retry the request
  2915. continue
  2916. } else {
  2917. // Prompt the user for retry decision
  2918. const { response } = await this.ask(
  2919. "api_req_failed",
  2920. "The model returned no assistant messages. This may indicate an issue with the API or the model's output.",
  2921. )
  2922. if (response === "yesButtonClicked") {
  2923. await this.say("api_req_retried")
  2924. // Push the same content back to retry
  2925. stack.push({
  2926. userContent: currentUserContent,
  2927. includeFileDetails: false,
  2928. retryAttempt: (currentItem.retryAttempt ?? 0) + 1,
  2929. })
  2930. // Continue to retry the request
  2931. continue
  2932. } else {
  2933. // User declined to retry
  2934. // For native protocol, re-add the user message we removed
  2935. // Use the task's locked protocol, NOT current settings
  2936. if (isNativeProtocol(this._taskToolProtocol ?? "xml")) {
  2937. await this.addToApiConversationHistory({
  2938. role: "user",
  2939. content: currentUserContent,
  2940. })
  2941. }
  2942. await this.say(
  2943. "error",
  2944. "Unexpected API Response: The language model did not provide any assistant messages. This may indicate an issue with the API or the model's output.",
  2945. )
  2946. await this.addToApiConversationHistory({
  2947. role: "assistant",
  2948. content: [{ type: "text", text: "Failure: I did not provide a response." }],
  2949. })
  2950. }
  2951. }
  2952. }
  2953. // If we reach here without continuing, return false (will always be false for now)
  2954. return false
  2955. } catch (error) {
  2956. // This should never happen since the only thing that can throw an
  2957. // error is the attemptApiRequest, which is wrapped in a try catch
  2958. // that sends an ask where if noButtonClicked, will clear current
  2959. // task and destroy this instance. However to avoid unhandled
  2960. // promise rejection, we will end this loop which will end execution
  2961. // of this instance (see `startTask`).
  2962. return true // Needs to be true so parent loop knows to end task.
  2963. }
  2964. }
  2965. // If we exit the while loop normally (stack is empty), return false
  2966. return false
  2967. }
  2968. private async getSystemPrompt(): Promise<string> {
  2969. const { mcpEnabled } = (await this.providerRef.deref()?.getState()) ?? {}
  2970. let mcpHub: McpHub | undefined
  2971. if (mcpEnabled ?? true) {
  2972. const provider = this.providerRef.deref()
  2973. if (!provider) {
  2974. throw new Error("Provider reference lost during view transition")
  2975. }
  2976. // Wait for MCP hub initialization through McpServerManager
  2977. mcpHub = await McpServerManager.getInstance(provider.context, provider)
  2978. if (!mcpHub) {
  2979. throw new Error("Failed to get MCP hub from server manager")
  2980. }
  2981. // Wait for MCP servers to be connected before generating system prompt
  2982. await pWaitFor(() => !mcpHub!.isConnecting, { timeout: 10_000 }).catch(() => {
  2983. console.error("MCP servers failed to connect in time")
  2984. })
  2985. }
  2986. const rooIgnoreInstructions = this.rooIgnoreController?.getInstructions()
  2987. const state = await this.providerRef.deref()?.getState()
  2988. const {
  2989. browserViewportSize,
  2990. mode,
  2991. customModes,
  2992. customModePrompts,
  2993. customInstructions,
  2994. experiments,
  2995. enableMcpServerCreation,
  2996. browserToolEnabled,
  2997. language,
  2998. maxConcurrentFileReads,
  2999. maxReadFileLine,
  3000. apiConfiguration,
  3001. } = state ?? {}
  3002. return await (async () => {
  3003. const provider = this.providerRef.deref()
  3004. if (!provider) {
  3005. throw new Error("Provider not available")
  3006. }
  3007. // Align browser tool enablement with generateSystemPrompt: require model image support,
  3008. // mode to include the browser group, and the user setting to be enabled.
  3009. const modeConfig = getModeBySlug(mode ?? defaultModeSlug, customModes)
  3010. const modeSupportsBrowser = modeConfig?.groups.some((group) => getGroupName(group) === "browser") ?? false
  3011. // Check if model supports browser capability (images)
  3012. const modelInfo = this.api.getModel().info
  3013. const modelSupportsBrowser = (modelInfo as any)?.supportsImages === true
  3014. const canUseBrowserTool = modelSupportsBrowser && modeSupportsBrowser && (browserToolEnabled ?? true)
  3015. // Use the task's locked protocol for system prompt consistency.
  3016. // This ensures the system prompt matches the protocol the task was started with,
  3017. // even if user settings have changed since then.
  3018. const toolProtocol = resolveToolProtocol(
  3019. apiConfiguration ?? this.apiConfiguration,
  3020. modelInfo,
  3021. this._taskToolProtocol,
  3022. )
  3023. return SYSTEM_PROMPT(
  3024. provider.context,
  3025. this.cwd,
  3026. canUseBrowserTool,
  3027. mcpHub,
  3028. this.diffStrategy,
  3029. browserViewportSize ?? "900x600",
  3030. mode ?? defaultModeSlug,
  3031. customModePrompts,
  3032. customModes,
  3033. customInstructions,
  3034. this.diffEnabled,
  3035. experiments,
  3036. enableMcpServerCreation,
  3037. language,
  3038. rooIgnoreInstructions,
  3039. maxReadFileLine !== -1,
  3040. {
  3041. maxConcurrentFileReads: maxConcurrentFileReads ?? 5,
  3042. todoListEnabled: apiConfiguration?.todoListEnabled ?? true,
  3043. browserToolEnabled: browserToolEnabled ?? true,
  3044. useAgentRules:
  3045. vscode.workspace.getConfiguration(Package.name).get<boolean>("useAgentRules") ?? true,
  3046. newTaskRequireTodos: vscode.workspace
  3047. .getConfiguration(Package.name)
  3048. .get<boolean>("newTaskRequireTodos", false),
  3049. toolProtocol,
  3050. isStealthModel: modelInfo?.isStealthModel,
  3051. },
  3052. undefined, // todoList
  3053. this.api.getModel().id,
  3054. )
  3055. })()
  3056. }
  3057. private getCurrentProfileId(state: any): string {
  3058. return (
  3059. state?.listApiConfigMeta?.find((profile: any) => profile.name === state?.currentApiConfigName)?.id ??
  3060. "default"
  3061. )
  3062. }
  3063. private async handleContextWindowExceededError(): Promise<void> {
  3064. const state = await this.providerRef.deref()?.getState()
  3065. const { profileThresholds = {} } = state ?? {}
  3066. const { contextTokens } = this.getTokenUsage()
  3067. const modelInfo = this.api.getModel().info
  3068. const maxTokens = getModelMaxOutputTokens({
  3069. modelId: this.api.getModel().id,
  3070. model: modelInfo,
  3071. settings: this.apiConfiguration,
  3072. })
  3073. const contextWindow = modelInfo.contextWindow
  3074. // Get the current profile ID using the helper method
  3075. const currentProfileId = this.getCurrentProfileId(state)
  3076. // Log the context window error for debugging
  3077. console.warn(
  3078. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  3079. `Current tokens: ${contextTokens}, Context window: ${contextWindow}. ` +
  3080. `Forcing truncation to ${FORCED_CONTEXT_REDUCTION_PERCENT}% of current context.`,
  3081. )
  3082. // Determine if we're using native tool protocol for proper message handling
  3083. // Use the task's locked protocol, NOT the current settings
  3084. const useNativeTools = isNativeProtocol(this._taskToolProtocol ?? "xml")
  3085. // Send condenseTaskContextStarted to show in-progress indicator
  3086. await this.providerRef.deref()?.postMessageToWebview({ type: "condenseTaskContextStarted", text: this.taskId })
  3087. // Force aggressive truncation by keeping only 75% of the conversation history
  3088. const truncateResult = await manageContext({
  3089. messages: this.apiConversationHistory,
  3090. totalTokens: contextTokens || 0,
  3091. maxTokens,
  3092. contextWindow,
  3093. apiHandler: this.api,
  3094. autoCondenseContext: true,
  3095. autoCondenseContextPercent: FORCED_CONTEXT_REDUCTION_PERCENT,
  3096. systemPrompt: await this.getSystemPrompt(),
  3097. taskId: this.taskId,
  3098. profileThresholds,
  3099. currentProfileId,
  3100. useNativeTools,
  3101. })
  3102. if (truncateResult.messages !== this.apiConversationHistory) {
  3103. await this.overwriteApiConversationHistory(truncateResult.messages)
  3104. }
  3105. if (truncateResult.summary) {
  3106. const { summary, cost, prevContextTokens, newContextTokens = 0 } = truncateResult
  3107. const contextCondense: ContextCondense = { summary, cost, newContextTokens, prevContextTokens }
  3108. await this.say(
  3109. "condense_context",
  3110. undefined /* text */,
  3111. undefined /* images */,
  3112. false /* partial */,
  3113. undefined /* checkpoint */,
  3114. undefined /* progressStatus */,
  3115. { isNonInteractive: true } /* options */,
  3116. contextCondense,
  3117. )
  3118. } else if (truncateResult.truncationId) {
  3119. // Sliding window truncation occurred (fallback when condensing fails or is disabled)
  3120. const contextTruncation: ContextTruncation = {
  3121. truncationId: truncateResult.truncationId,
  3122. messagesRemoved: truncateResult.messagesRemoved ?? 0,
  3123. prevContextTokens: truncateResult.prevContextTokens,
  3124. newContextTokens: truncateResult.newContextTokensAfterTruncation ?? 0,
  3125. }
  3126. await this.say(
  3127. "sliding_window_truncation",
  3128. undefined /* text */,
  3129. undefined /* images */,
  3130. false /* partial */,
  3131. undefined /* checkpoint */,
  3132. undefined /* progressStatus */,
  3133. { isNonInteractive: true } /* options */,
  3134. undefined /* contextCondense */,
  3135. contextTruncation,
  3136. )
  3137. }
  3138. // Notify webview that context management is complete (removes in-progress spinner)
  3139. await this.providerRef.deref()?.postMessageToWebview({ type: "condenseTaskContextResponse", text: this.taskId })
  3140. }
  3141. public async *attemptApiRequest(retryAttempt: number = 0): ApiStream {
  3142. const state = await this.providerRef.deref()?.getState()
  3143. const {
  3144. apiConfiguration,
  3145. autoApprovalEnabled,
  3146. requestDelaySeconds,
  3147. mode,
  3148. autoCondenseContext = true,
  3149. autoCondenseContextPercent = 100,
  3150. profileThresholds = {},
  3151. } = state ?? {}
  3152. // Get condensing configuration for automatic triggers.
  3153. const customCondensingPrompt = state?.customCondensingPrompt
  3154. const condensingApiConfigId = state?.condensingApiConfigId
  3155. const listApiConfigMeta = state?.listApiConfigMeta
  3156. // Determine API handler to use for condensing.
  3157. let condensingApiHandler: ApiHandler | undefined
  3158. if (condensingApiConfigId && listApiConfigMeta && Array.isArray(listApiConfigMeta)) {
  3159. // Find matching config by ID
  3160. const matchingConfig = listApiConfigMeta.find((config) => config.id === condensingApiConfigId)
  3161. if (matchingConfig) {
  3162. const profile = await this.providerRef.deref()?.providerSettingsManager.getProfile({
  3163. id: condensingApiConfigId,
  3164. })
  3165. // Ensure profile and apiProvider exist before trying to build handler.
  3166. if (profile && profile.apiProvider) {
  3167. condensingApiHandler = buildApiHandler(profile)
  3168. }
  3169. }
  3170. }
  3171. let rateLimitDelay = 0
  3172. // Use the shared timestamp so that subtasks respect the same rate-limit
  3173. // window as their parent tasks.
  3174. if (Task.lastGlobalApiRequestTime) {
  3175. const now = performance.now()
  3176. const timeSinceLastRequest = now - Task.lastGlobalApiRequestTime
  3177. const rateLimit = apiConfiguration?.rateLimitSeconds || 0
  3178. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - timeSinceLastRequest) / 1000))
  3179. }
  3180. // Only show rate limiting message if we're not retrying. If retrying, we'll include the delay there.
  3181. if (rateLimitDelay > 0 && retryAttempt === 0) {
  3182. // Show countdown timer
  3183. for (let i = rateLimitDelay; i > 0; i--) {
  3184. const delayMessage = `Rate limiting for ${i} seconds...`
  3185. await this.say("api_req_retry_delayed", delayMessage, undefined, true)
  3186. await delay(1000)
  3187. }
  3188. }
  3189. // Update last request time before making the request so that subsequent
  3190. // requests — even from new subtasks — will honour the provider's rate-limit.
  3191. Task.lastGlobalApiRequestTime = performance.now()
  3192. const systemPrompt = await this.getSystemPrompt()
  3193. const { contextTokens } = this.getTokenUsage()
  3194. if (contextTokens) {
  3195. const modelInfo = this.api.getModel().info
  3196. const maxTokens = getModelMaxOutputTokens({
  3197. modelId: this.api.getModel().id,
  3198. model: modelInfo,
  3199. settings: this.apiConfiguration,
  3200. })
  3201. const contextWindow = modelInfo.contextWindow
  3202. // Get the current profile ID using the helper method
  3203. const currentProfileId = this.getCurrentProfileId(state)
  3204. // Determine if we're using native tool protocol for proper message handling
  3205. // Use the task's locked protocol, NOT the current settings
  3206. const useNativeTools = isNativeProtocol(this._taskToolProtocol ?? "xml")
  3207. // Check if context management will likely run (threshold check)
  3208. // This allows us to show an in-progress indicator to the user
  3209. // We use the centralized willManageContext helper to avoid duplicating threshold logic
  3210. const lastMessage = this.apiConversationHistory[this.apiConversationHistory.length - 1]
  3211. const lastMessageContent = lastMessage?.content
  3212. let lastMessageTokens = 0
  3213. if (lastMessageContent) {
  3214. lastMessageTokens = Array.isArray(lastMessageContent)
  3215. ? await this.api.countTokens(lastMessageContent)
  3216. : await this.api.countTokens([{ type: "text", text: lastMessageContent as string }])
  3217. }
  3218. const contextManagementWillRun = willManageContext({
  3219. totalTokens: contextTokens,
  3220. contextWindow,
  3221. maxTokens,
  3222. autoCondenseContext,
  3223. autoCondenseContextPercent,
  3224. profileThresholds,
  3225. currentProfileId,
  3226. lastMessageTokens,
  3227. })
  3228. // Send condenseTaskContextStarted BEFORE manageContext to show in-progress indicator
  3229. // This notification must be sent here (not earlier) because the early check uses stale token count
  3230. // (before user message is added to history), which could incorrectly skip showing the indicator
  3231. if (contextManagementWillRun && autoCondenseContext) {
  3232. await this.providerRef
  3233. .deref()
  3234. ?.postMessageToWebview({ type: "condenseTaskContextStarted", text: this.taskId })
  3235. }
  3236. const truncateResult = await manageContext({
  3237. messages: this.apiConversationHistory,
  3238. totalTokens: contextTokens,
  3239. maxTokens,
  3240. contextWindow,
  3241. apiHandler: this.api,
  3242. autoCondenseContext,
  3243. autoCondenseContextPercent,
  3244. systemPrompt,
  3245. taskId: this.taskId,
  3246. customCondensingPrompt,
  3247. condensingApiHandler,
  3248. profileThresholds,
  3249. currentProfileId,
  3250. useNativeTools,
  3251. })
  3252. if (truncateResult.messages !== this.apiConversationHistory) {
  3253. await this.overwriteApiConversationHistory(truncateResult.messages)
  3254. }
  3255. if (truncateResult.error) {
  3256. await this.say("condense_context_error", truncateResult.error)
  3257. } else if (truncateResult.summary) {
  3258. const { summary, cost, prevContextTokens, newContextTokens = 0, condenseId } = truncateResult
  3259. const contextCondense: ContextCondense = {
  3260. summary,
  3261. cost,
  3262. newContextTokens,
  3263. prevContextTokens,
  3264. condenseId,
  3265. }
  3266. await this.say(
  3267. "condense_context",
  3268. undefined /* text */,
  3269. undefined /* images */,
  3270. false /* partial */,
  3271. undefined /* checkpoint */,
  3272. undefined /* progressStatus */,
  3273. { isNonInteractive: true } /* options */,
  3274. contextCondense,
  3275. )
  3276. } else if (truncateResult.truncationId) {
  3277. // Sliding window truncation occurred (fallback when condensing fails or is disabled)
  3278. const contextTruncation: ContextTruncation = {
  3279. truncationId: truncateResult.truncationId,
  3280. messagesRemoved: truncateResult.messagesRemoved ?? 0,
  3281. prevContextTokens: truncateResult.prevContextTokens,
  3282. newContextTokens: truncateResult.newContextTokensAfterTruncation ?? 0,
  3283. }
  3284. await this.say(
  3285. "sliding_window_truncation",
  3286. undefined /* text */,
  3287. undefined /* images */,
  3288. false /* partial */,
  3289. undefined /* checkpoint */,
  3290. undefined /* progressStatus */,
  3291. { isNonInteractive: true } /* options */,
  3292. undefined /* contextCondense */,
  3293. contextTruncation,
  3294. )
  3295. }
  3296. // Notify webview that context management is complete (sets isCondensing = false)
  3297. // This removes the in-progress spinner and allows the completed result to show
  3298. if (contextManagementWillRun && autoCondenseContext) {
  3299. await this.providerRef
  3300. .deref()
  3301. ?.postMessageToWebview({ type: "condenseTaskContextResponse", text: this.taskId })
  3302. }
  3303. }
  3304. // Get the effective API history by filtering out condensed messages
  3305. // This allows non-destructive condensing where messages are tagged but not deleted,
  3306. // enabling accurate rewind operations while still sending condensed history to the API.
  3307. const effectiveHistory = getEffectiveApiHistory(this.apiConversationHistory)
  3308. const messagesSinceLastSummary = getMessagesSinceLastSummary(effectiveHistory)
  3309. const messagesWithoutImages = maybeRemoveImageBlocks(messagesSinceLastSummary, this.api)
  3310. const cleanConversationHistory = this.buildCleanConversationHistory(messagesWithoutImages as ApiMessage[])
  3311. // Check auto-approval limits
  3312. const approvalResult = await this.autoApprovalHandler.checkAutoApprovalLimits(
  3313. state,
  3314. this.combineMessages(this.clineMessages.slice(1)),
  3315. async (type, data) => this.ask(type, data),
  3316. )
  3317. if (!approvalResult.shouldProceed) {
  3318. // User did not approve, task should be aborted
  3319. throw new Error("Auto-approval limit reached and user did not approve continuation")
  3320. }
  3321. // Determine if we should include native tools based on:
  3322. // 1. Task's locked tool protocol is set to NATIVE
  3323. // 2. Model supports native tools
  3324. // CRITICAL: Use the task's locked protocol to ensure tasks that started with XML
  3325. // tools continue using XML even if NTC settings have since changed.
  3326. const modelInfo = this.api.getModel().info
  3327. const taskProtocol = this._taskToolProtocol ?? "xml"
  3328. const shouldIncludeTools = taskProtocol === TOOL_PROTOCOL.NATIVE && (modelInfo.supportsNativeTools ?? false)
  3329. // Build complete tools array: native tools + dynamic MCP tools, filtered by mode restrictions
  3330. let allTools: OpenAI.Chat.ChatCompletionTool[] = []
  3331. if (shouldIncludeTools) {
  3332. const provider = this.providerRef.deref()
  3333. if (!provider) {
  3334. throw new Error("Provider reference lost during tool building")
  3335. }
  3336. allTools = await buildNativeToolsArray({
  3337. provider,
  3338. cwd: this.cwd,
  3339. mode,
  3340. customModes: state?.customModes,
  3341. experiments: state?.experiments,
  3342. apiConfiguration,
  3343. maxReadFileLine: state?.maxReadFileLine ?? -1,
  3344. browserToolEnabled: state?.browserToolEnabled ?? true,
  3345. modelInfo,
  3346. diffEnabled: this.diffEnabled,
  3347. })
  3348. }
  3349. // Parallel tool calls are disabled - feature is on hold
  3350. // Previously resolved from experiments.isEnabled(..., EXPERIMENT_IDS.MULTIPLE_NATIVE_TOOL_CALLS)
  3351. const parallelToolCallsEnabled = false
  3352. const metadata: ApiHandlerCreateMessageMetadata = {
  3353. mode: mode,
  3354. taskId: this.taskId,
  3355. suppressPreviousResponseId: this.skipPrevResponseIdOnce,
  3356. // Include tools and tool protocol when using native protocol and model supports it
  3357. ...(shouldIncludeTools
  3358. ? {
  3359. tools: allTools,
  3360. tool_choice: "auto",
  3361. toolProtocol: taskProtocol,
  3362. parallelToolCalls: parallelToolCallsEnabled,
  3363. }
  3364. : {}),
  3365. }
  3366. // Create an AbortController to allow cancelling the request mid-stream
  3367. this.currentRequestAbortController = new AbortController()
  3368. const abortSignal = this.currentRequestAbortController.signal
  3369. // Reset the flag after using it
  3370. this.skipPrevResponseIdOnce = false
  3371. // The provider accepts reasoning items alongside standard messages; cast to the expected parameter type.
  3372. const stream = this.api.createMessage(
  3373. systemPrompt,
  3374. cleanConversationHistory as unknown as Anthropic.Messages.MessageParam[],
  3375. metadata,
  3376. )
  3377. const iterator = stream[Symbol.asyncIterator]()
  3378. // Set up abort handling - when the signal is aborted, clean up the controller reference
  3379. abortSignal.addEventListener("abort", () => {
  3380. console.log(`[Task#${this.taskId}.${this.instanceId}] AbortSignal triggered for current request`)
  3381. this.currentRequestAbortController = undefined
  3382. })
  3383. try {
  3384. // Awaiting first chunk to see if it will throw an error.
  3385. this.isWaitingForFirstChunk = true
  3386. // Race between the first chunk and the abort signal
  3387. const firstChunkPromise = iterator.next()
  3388. const abortPromise = new Promise<never>((_, reject) => {
  3389. if (abortSignal.aborted) {
  3390. reject(new Error("Request cancelled by user"))
  3391. } else {
  3392. abortSignal.addEventListener("abort", () => {
  3393. reject(new Error("Request cancelled by user"))
  3394. })
  3395. }
  3396. })
  3397. const firstChunk = await Promise.race([firstChunkPromise, abortPromise])
  3398. yield firstChunk.value
  3399. this.isWaitingForFirstChunk = false
  3400. } catch (error) {
  3401. this.isWaitingForFirstChunk = false
  3402. this.currentRequestAbortController = undefined
  3403. const isContextWindowExceededError = checkContextWindowExceededError(error)
  3404. // If it's a context window error and we haven't exceeded max retries for this error type
  3405. if (isContextWindowExceededError && retryAttempt < MAX_CONTEXT_WINDOW_RETRIES) {
  3406. console.warn(
  3407. `[Task#${this.taskId}] Context window exceeded for model ${this.api.getModel().id}. ` +
  3408. `Retry attempt ${retryAttempt + 1}/${MAX_CONTEXT_WINDOW_RETRIES}. ` +
  3409. `Attempting automatic truncation...`,
  3410. )
  3411. await this.handleContextWindowExceededError()
  3412. // Retry the request after handling the context window error
  3413. yield* this.attemptApiRequest(retryAttempt + 1)
  3414. return
  3415. }
  3416. // note that this api_req_failed ask is unique in that we only present this option if the api hasn't streamed any content yet (ie it fails on the first chunk due), as it would allow them to hit a retry button. However if the api failed mid-stream, it could be in any arbitrary state where some tools may have executed, so that error is handled differently and requires cancelling the task entirely.
  3417. if (autoApprovalEnabled) {
  3418. // Apply shared exponential backoff and countdown UX
  3419. await this.backoffAndAnnounce(retryAttempt, error)
  3420. // CRITICAL: Check if task was aborted during the backoff countdown
  3421. // This prevents infinite loops when users cancel during auto-retry
  3422. // Without this check, the recursive call below would continue even after abort
  3423. if (this.abort) {
  3424. throw new Error(
  3425. `[Task#attemptApiRequest] task ${this.taskId}.${this.instanceId} aborted during retry`,
  3426. )
  3427. }
  3428. // Delegate generator output from the recursive call with
  3429. // incremented retry count.
  3430. yield* this.attemptApiRequest(retryAttempt + 1)
  3431. return
  3432. } else {
  3433. const { response } = await this.ask(
  3434. "api_req_failed",
  3435. error.message ?? JSON.stringify(serializeError(error), null, 2),
  3436. )
  3437. if (response !== "yesButtonClicked") {
  3438. // This will never happen since if noButtonClicked, we will
  3439. // clear current task, aborting this instance.
  3440. throw new Error("API request failed")
  3441. }
  3442. await this.say("api_req_retried")
  3443. // Delegate generator output from the recursive call.
  3444. yield* this.attemptApiRequest()
  3445. return
  3446. }
  3447. }
  3448. // No error, so we can continue to yield all remaining chunks.
  3449. // (Needs to be placed outside of try/catch since it we want caller to
  3450. // handle errors not with api_req_failed as that is reserved for first
  3451. // chunk failures only.)
  3452. // This delegates to another generator or iterable object. In this case,
  3453. // it's saying "yield all remaining values from this iterator". This
  3454. // effectively passes along all subsequent chunks from the original
  3455. // stream.
  3456. yield* iterator
  3457. }
  3458. // Shared exponential backoff for retries (first-chunk and mid-stream)
  3459. private async backoffAndAnnounce(retryAttempt: number, error: any): Promise<void> {
  3460. try {
  3461. const state = await this.providerRef.deref()?.getState()
  3462. const baseDelay = state?.requestDelaySeconds || 5
  3463. let exponentialDelay = Math.min(
  3464. Math.ceil(baseDelay * Math.pow(2, retryAttempt)),
  3465. MAX_EXPONENTIAL_BACKOFF_SECONDS,
  3466. )
  3467. // Respect provider rate limit window
  3468. let rateLimitDelay = 0
  3469. const rateLimit = state?.apiConfiguration?.rateLimitSeconds || 0
  3470. if (Task.lastGlobalApiRequestTime && rateLimit > 0) {
  3471. const elapsed = performance.now() - Task.lastGlobalApiRequestTime
  3472. rateLimitDelay = Math.ceil(Math.min(rateLimit, Math.max(0, rateLimit * 1000 - elapsed) / 1000))
  3473. }
  3474. // Prefer RetryInfo on 429 if present
  3475. if (error?.status === 429) {
  3476. const retryInfo = error?.errorDetails?.find(
  3477. (d: any) => d["@type"] === "type.googleapis.com/google.rpc.RetryInfo",
  3478. )
  3479. const match = retryInfo?.retryDelay?.match?.(/^(\d+)s$/)
  3480. if (match) {
  3481. exponentialDelay = Number(match[1]) + 1
  3482. }
  3483. }
  3484. const finalDelay = Math.max(exponentialDelay, rateLimitDelay)
  3485. if (finalDelay <= 0) {
  3486. return
  3487. }
  3488. // Build header text; fall back to error message if none provided
  3489. let headerText
  3490. if (error.status) {
  3491. // This sets the message as just the error code, for which
  3492. // ChatRow knows how to handle and use an i18n'd error string
  3493. // In development, hardcode headerText to an HTTP status code to check it
  3494. headerText = error.status
  3495. } else if (error?.message) {
  3496. headerText = error.message
  3497. } else {
  3498. headerText = "Unknown error"
  3499. }
  3500. headerText = headerText ? `${headerText}\n` : ""
  3501. // Show countdown timer with exponential backoff
  3502. for (let i = finalDelay; i > 0; i--) {
  3503. // Check abort flag during countdown to allow early exit
  3504. if (this.abort) {
  3505. throw new Error(`[Task#${this.taskId}] Aborted during retry countdown`)
  3506. }
  3507. await this.say("api_req_retry_delayed", `${headerText}<retry_timer>${i}</retry_timer>`, undefined, true)
  3508. await delay(1000)
  3509. }
  3510. await this.say("api_req_retry_delayed", headerText, undefined, false)
  3511. } catch (err) {
  3512. console.error("Exponential backoff failed:", err)
  3513. }
  3514. }
  3515. // Checkpoints
  3516. public async checkpointSave(force: boolean = false, suppressMessage: boolean = false) {
  3517. return checkpointSave(this, force, suppressMessage)
  3518. }
  3519. private buildCleanConversationHistory(
  3520. messages: ApiMessage[],
  3521. ): Array<
  3522. Anthropic.Messages.MessageParam | { type: "reasoning"; encrypted_content: string; id?: string; summary?: any[] }
  3523. > {
  3524. type ReasoningItemForRequest = {
  3525. type: "reasoning"
  3526. encrypted_content: string
  3527. id?: string
  3528. summary?: any[]
  3529. }
  3530. const cleanConversationHistory: (Anthropic.Messages.MessageParam | ReasoningItemForRequest)[] = []
  3531. for (const msg of messages) {
  3532. // Standalone reasoning: send encrypted, skip plain text
  3533. if (msg.type === "reasoning") {
  3534. if (msg.encrypted_content) {
  3535. cleanConversationHistory.push({
  3536. type: "reasoning",
  3537. summary: msg.summary,
  3538. encrypted_content: msg.encrypted_content!,
  3539. ...(msg.id ? { id: msg.id } : {}),
  3540. })
  3541. }
  3542. continue
  3543. }
  3544. // Preferred path: assistant message with embedded reasoning as first content block
  3545. if (msg.role === "assistant") {
  3546. const rawContent = msg.content
  3547. const contentArray: Anthropic.Messages.ContentBlockParam[] = Array.isArray(rawContent)
  3548. ? (rawContent as Anthropic.Messages.ContentBlockParam[])
  3549. : rawContent !== undefined
  3550. ? ([
  3551. { type: "text", text: rawContent } satisfies Anthropic.Messages.TextBlockParam,
  3552. ] as Anthropic.Messages.ContentBlockParam[])
  3553. : []
  3554. const [first, ...rest] = contentArray
  3555. // Check if this message has reasoning_details (OpenRouter format for Gemini 3, etc.)
  3556. const msgWithDetails = msg
  3557. if (msgWithDetails.reasoning_details && Array.isArray(msgWithDetails.reasoning_details)) {
  3558. // Build the assistant message with reasoning_details
  3559. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3560. if (contentArray.length === 0) {
  3561. assistantContent = ""
  3562. } else if (contentArray.length === 1 && contentArray[0].type === "text") {
  3563. assistantContent = (contentArray[0] as Anthropic.Messages.TextBlockParam).text
  3564. } else {
  3565. assistantContent = contentArray
  3566. }
  3567. // Create message with reasoning_details property
  3568. cleanConversationHistory.push({
  3569. role: "assistant",
  3570. content: assistantContent,
  3571. reasoning_details: msgWithDetails.reasoning_details,
  3572. } as any)
  3573. continue
  3574. }
  3575. // Embedded reasoning: encrypted (send) or plain text (skip)
  3576. const hasEncryptedReasoning =
  3577. first && (first as any).type === "reasoning" && typeof (first as any).encrypted_content === "string"
  3578. const hasPlainTextReasoning =
  3579. first && (first as any).type === "reasoning" && typeof (first as any).text === "string"
  3580. if (hasEncryptedReasoning) {
  3581. const reasoningBlock = first as any
  3582. // Send as separate reasoning item (OpenAI Native)
  3583. cleanConversationHistory.push({
  3584. type: "reasoning",
  3585. summary: reasoningBlock.summary ?? [],
  3586. encrypted_content: reasoningBlock.encrypted_content,
  3587. ...(reasoningBlock.id ? { id: reasoningBlock.id } : {}),
  3588. })
  3589. // Send assistant message without reasoning
  3590. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3591. if (rest.length === 0) {
  3592. assistantContent = ""
  3593. } else if (rest.length === 1 && rest[0].type === "text") {
  3594. assistantContent = (rest[0] as Anthropic.Messages.TextBlockParam).text
  3595. } else {
  3596. assistantContent = rest
  3597. }
  3598. cleanConversationHistory.push({
  3599. role: "assistant",
  3600. content: assistantContent,
  3601. } satisfies Anthropic.Messages.MessageParam)
  3602. continue
  3603. } else if (hasPlainTextReasoning) {
  3604. // Check if the model's preserveReasoning flag is set
  3605. // If true, include the reasoning block in API requests
  3606. // If false/undefined, strip it out (stored for history only, not sent back to API)
  3607. const shouldPreserveForApi = this.api.getModel().info.preserveReasoning === true
  3608. let assistantContent: Anthropic.Messages.MessageParam["content"]
  3609. if (shouldPreserveForApi) {
  3610. // Include reasoning block in the content sent to API
  3611. assistantContent = contentArray
  3612. } else {
  3613. // Strip reasoning out - stored for history only, not sent back to API
  3614. if (rest.length === 0) {
  3615. assistantContent = ""
  3616. } else if (rest.length === 1 && rest[0].type === "text") {
  3617. assistantContent = (rest[0] as Anthropic.Messages.TextBlockParam).text
  3618. } else {
  3619. assistantContent = rest
  3620. }
  3621. }
  3622. cleanConversationHistory.push({
  3623. role: "assistant",
  3624. content: assistantContent,
  3625. } satisfies Anthropic.Messages.MessageParam)
  3626. continue
  3627. }
  3628. }
  3629. // Default path for regular messages (no embedded reasoning)
  3630. if (msg.role) {
  3631. cleanConversationHistory.push({
  3632. role: msg.role,
  3633. content: msg.content as Anthropic.Messages.ContentBlockParam[] | string,
  3634. })
  3635. }
  3636. }
  3637. return cleanConversationHistory
  3638. }
  3639. public async checkpointRestore(options: CheckpointRestoreOptions) {
  3640. return checkpointRestore(this, options)
  3641. }
  3642. public async checkpointDiff(options: CheckpointDiffOptions) {
  3643. return checkpointDiff(this, options)
  3644. }
  3645. // Metrics
  3646. public combineMessages(messages: ClineMessage[]) {
  3647. return combineApiRequests(combineCommandSequences(messages))
  3648. }
  3649. public getTokenUsage(): TokenUsage {
  3650. return getApiMetrics(this.combineMessages(this.clineMessages.slice(1)))
  3651. }
  3652. public recordToolUsage(toolName: ToolName) {
  3653. if (!this.toolUsage[toolName]) {
  3654. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  3655. }
  3656. this.toolUsage[toolName].attempts++
  3657. }
  3658. public recordToolError(toolName: ToolName, error?: string) {
  3659. if (!this.toolUsage[toolName]) {
  3660. this.toolUsage[toolName] = { attempts: 0, failures: 0 }
  3661. }
  3662. this.toolUsage[toolName].failures++
  3663. if (error) {
  3664. this.emit(RooCodeEventName.TaskToolFailed, this.taskId, toolName, error)
  3665. }
  3666. }
  3667. // Getters
  3668. public get taskStatus(): TaskStatus {
  3669. if (this.interactiveAsk) {
  3670. return TaskStatus.Interactive
  3671. }
  3672. if (this.resumableAsk) {
  3673. return TaskStatus.Resumable
  3674. }
  3675. if (this.idleAsk) {
  3676. return TaskStatus.Idle
  3677. }
  3678. return TaskStatus.Running
  3679. }
  3680. public get taskAsk(): ClineMessage | undefined {
  3681. return this.idleAsk || this.resumableAsk || this.interactiveAsk
  3682. }
  3683. public get queuedMessages(): QueuedMessage[] {
  3684. return this.messageQueueService.messages
  3685. }
  3686. public get tokenUsage(): TokenUsage | undefined {
  3687. if (this.tokenUsageSnapshot && this.tokenUsageSnapshotAt) {
  3688. return this.tokenUsageSnapshot
  3689. }
  3690. this.tokenUsageSnapshot = this.getTokenUsage()
  3691. this.tokenUsageSnapshotAt = this.clineMessages.at(-1)?.ts
  3692. return this.tokenUsageSnapshot
  3693. }
  3694. public get cwd() {
  3695. return this.workspacePath
  3696. }
  3697. /**
  3698. * Get the tool protocol locked to this task.
  3699. * Returns undefined only if the task hasn't been fully initialized yet.
  3700. *
  3701. * @see {@link _taskToolProtocol} for lifecycle details
  3702. */
  3703. public get taskToolProtocol() {
  3704. return this._taskToolProtocol
  3705. }
  3706. /**
  3707. * Provides convenient access to high-level message operations.
  3708. * Uses lazy initialization - the MessageManager is only created when first accessed.
  3709. * Subsequent accesses return the same cached instance.
  3710. *
  3711. * ## Important: Single Coordination Point
  3712. *
  3713. * **All MessageManager operations must go through this getter** rather than
  3714. * instantiating `new MessageManager(task)` directly. This ensures:
  3715. * - A single shared instance for consistent behavior
  3716. * - Centralized coordination of all rewind/message operations
  3717. * - Ability to add internal state or instrumentation in the future
  3718. *
  3719. * @example
  3720. * ```typescript
  3721. * // Correct: Use the getter
  3722. * await task.messageManager.rewindToTimestamp(ts)
  3723. *
  3724. * // Incorrect: Do NOT create new instances directly
  3725. * // const manager = new MessageManager(task) // Don't do this!
  3726. * ```
  3727. */
  3728. get messageManager(): MessageManager {
  3729. if (!this._messageManager) {
  3730. this._messageManager = new MessageManager(this)
  3731. }
  3732. return this._messageManager
  3733. }
  3734. /**
  3735. * Broadcast browser session updates to the browser panel (if open)
  3736. */
  3737. private broadcastBrowserSessionUpdate(): void {
  3738. const provider = this.providerRef.deref()
  3739. if (!provider) {
  3740. return
  3741. }
  3742. try {
  3743. const { BrowserSessionPanelManager } = require("../webview/BrowserSessionPanelManager")
  3744. const panelManager = BrowserSessionPanelManager.getInstance(provider)
  3745. // Get browser session messages
  3746. const browserSessionStartIndex = this.clineMessages.findIndex(
  3747. (m) =>
  3748. m.ask === "browser_action_launch" ||
  3749. (m.say === "browser_session_status" && m.text?.includes("opened")),
  3750. )
  3751. const browserSessionMessages =
  3752. browserSessionStartIndex !== -1 ? this.clineMessages.slice(browserSessionStartIndex) : []
  3753. const isBrowserSessionActive = this.browserSession?.isSessionActive() ?? false
  3754. // Update the panel asynchronously
  3755. panelManager.updateBrowserSession(browserSessionMessages, isBrowserSessionActive).catch((error: Error) => {
  3756. console.error("Failed to broadcast browser session update:", error)
  3757. })
  3758. } catch (error) {
  3759. // Silently fail if panel manager is not available
  3760. console.debug("Browser panel not available for update:", error)
  3761. }
  3762. }
  3763. /**
  3764. * Process any queued messages by dequeuing and submitting them.
  3765. * This ensures that queued user messages are sent when appropriate,
  3766. * preventing them from getting stuck in the queue.
  3767. *
  3768. * @param context - Context string for logging (e.g., the calling tool name)
  3769. */
  3770. public processQueuedMessages(): void {
  3771. try {
  3772. if (!this.messageQueueService.isEmpty()) {
  3773. const queued = this.messageQueueService.dequeueMessage()
  3774. if (queued) {
  3775. setTimeout(() => {
  3776. this.submitUserMessage(queued.text, queued.images).catch((err) =>
  3777. console.error(`[Task] Failed to submit queued message:`, err),
  3778. )
  3779. }, 0)
  3780. }
  3781. }
  3782. } catch (e) {
  3783. console.error(`[Task] Queue processing error:`, e)
  3784. }
  3785. }
  3786. }