Browse Source

fix: use safeWriteJson for all JSON file writes with race condition fix (#4733)

Co-authored-by: Eric Wheeler <[email protected]>
Co-authored-by: Daniel Riccio <[email protected]>
KJ7LNW 6 months ago
parent
commit
8455909809

+ 6 - 0
.roo/rules-code/use-safeWriteJson.md

@@ -0,0 +1,6 @@
+# JSON File Writing Must Be Atomic
+
+- You MUST use `safeWriteJson(filePath: string, data: any): Promise<void>` from `src/utils/safeWriteJson.ts` instead of `JSON.stringify` with file-write operations
+- `safeWriteJson` will create parent directories if necessary, so do not call `mkdir` prior to `safeWriteJson`
+- `safeWriteJson` prevents data corruption via atomic writes with locking and streams the write to minimize memory footprint
+- Test files are exempt from this rule

+ 66 - 1
pnpm-lock.yaml

@@ -699,6 +699,9 @@ importers:
       pretty-bytes:
         specifier: ^7.0.0
         version: 7.0.0
+      proper-lockfile:
+        specifier: ^4.1.2
+        version: 4.1.2
       ps-tree:
         specifier: ^1.2.0
         version: 1.2.0
@@ -726,6 +729,9 @@ importers:
       sound-play:
         specifier: ^1.1.0
         version: 1.1.0
+      stream-json:
+        specifier: ^1.8.0
+        version: 1.9.1
       string-similarity:
         specifier: ^4.0.4
         version: 4.0.4
@@ -802,9 +808,15 @@ importers:
       '@types/node-ipc':
         specifier: ^9.2.3
         version: 9.2.3
+      '@types/proper-lockfile':
+        specifier: ^4.1.4
+        version: 4.1.4
       '@types/ps-tree':
         specifier: ^1.1.6
         version: 1.1.6
+      '@types/stream-json':
+        specifier: ^1.7.8
+        version: 1.7.8
       '@types/string-similarity':
         specifier: ^4.0.2
         version: 4.0.2
@@ -3850,6 +3862,9 @@ packages:
   '@types/[email protected]':
     resolution: {integrity: sha512-gNMvNH49DJ7OJYv+KAKn0Xp45p8PLl6zo2YnvDIbTd4J6MER2BmWN49TG7n9LvkyihINxeKW8+3bfS2yDC9dzQ==}
 
+  '@types/[email protected]':
+    resolution: {integrity: sha512-uo2ABllncSqg9F1D4nugVl9v93RmjxF6LJzQLMLDdPaXCUIDPeOJ21Gbqi43xNKzBi/WQ0Q0dICqufzQbMjipQ==}
+
   '@types/[email protected]':
     resolution: {integrity: sha512-PtrlVaOaI44/3pl3cvnlK+GxOM3re2526TJvPvh7W+keHIXdV4TE0ylpPBAcvFQCbGitaTXwL9u+RF7qtVeazQ==}
 
@@ -3861,12 +3876,21 @@ packages:
   '@types/[email protected]':
     resolution: {integrity: sha512-/LDXMQh55EzZQ0uVAZmKKhfENivEvWz6E+EYzh+/MCjMhNsotd+ZHhBGIjFDTi6+fz0OhQQQLbTgdQIxxCsC0w==}
 
+  '@types/[email protected]':
+    resolution: {integrity: sha512-3xSjTp3v03X/lSQLkczaN9UIEwJMoMCA1+Nb5HfbJEQWogdeQIyVtTvxPXDQjZ5zws8rFQfVfRdz03ARihPJgw==}
+
   '@types/[email protected]':
     resolution: {integrity: sha512-+UE8GAGRPbJVQDdxi16dgadcBfQ+KG2vgZhV1+3A1XmHbmwcdwhCUwIdy+d3pAGrbvgRoVSjeI9vOWyq376Yzw==}
 
   '@types/[email protected]':
     resolution: {integrity: sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==}
 
+  '@types/[email protected]':
+    resolution: {integrity: sha512-guDyAl6s/CAzXUOWpGK2bHvdiopLIwpGu8v10+lb9hnQOyo4oj/ZUQFOvqFjKGsE3wJP1fpIesCcMvbXuWsqOg==}
+
+  '@types/[email protected]':
+    resolution: {integrity: sha512-MU1OB1eFLcYWd1LjwKXrxdoPtXSRzRmAnnxs4Js/ayB5O/NvHraWwuOaqMWIebpYwM6khFlsJOHEhI9xK/ab4Q==}
+
   '@types/[email protected]':
     resolution: {integrity: sha512-LkJQ/jsXtCVMK+sKYAmX/8zEq+/46f1PTQw7YtmQwb74jemS1SlNLmARM2Zml9DgdDTWKAtc5L13WorpHPDjDA==}
 
@@ -7943,6 +7967,9 @@ packages:
     resolution: {integrity: sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==}
     engines: {node: '>= 8'}
 
+  [email protected]:
+    resolution: {integrity: sha512-TjNPblN4BwAWMXU8s9AEz4JmQxnD1NNL7bNOY/AKUzyamc379FWASUhc/K1pL2noVb+XmZKLL68cjzLsiOAMaA==}
+
   [email protected]:
     resolution: {integrity: sha512-YUHSPk+A30YPv+0Qf8i9Mbfe/C0hdPXk1s1jPVToV8pk8BQtpw10ct89Eo7OWkutrwqvT0eicAxlOg3dOAu8JA==}
 
@@ -8278,6 +8305,10 @@ packages:
     resolution: {integrity: sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA==}
     engines: {node: '>=18'}
 
+  [email protected]:
+    resolution: {integrity: sha512-9LkiTwjUh6rT555DtE9rTX+BKByPfrMzEAtnlEtdEwr3Nkffwiihqe2bWADg+OQRjt9gl6ICdmB/ZFDCGAtSow==}
+    engines: {node: '>= 4'}
+
   [email protected]:
     resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==}
     engines: {iojs: '>=1.0.0', node: '>=0.10.0'}
@@ -8609,9 +8640,15 @@ packages:
     resolution: {integrity: sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ==}
     engines: {node: '>=18'}
 
+  [email protected]:
+    resolution: {integrity: sha512-1TJmBx6aSWqZ4tx7aTpBDXK0/e2hhcNSTV8+CbFJtDjbb+I1mZ8lHit0Grw9GRT+6JbIrrDd8esncgBi8aBXGA==}
+
   [email protected]:
     resolution: {integrity: sha512-rT00SPnTVyRsaSz5zgSPma/aHSOic5U1prhYdRy5HS2kTZviFpmDgzilbtsJsxiroqACmayynDN/9VzIbX5DOw==}
 
+  [email protected]:
+    resolution: {integrity: sha512-uWkjJ+2Nt/LO9Z/JyKZbMusL8Dkh97uUBTv3AJQ74y07lVahLY4eEFsPsE97pxYBwr8nnjMAIch5eqI0gPShyw==}
+
   [email protected]:
     resolution: {integrity: sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg==}
     engines: {node: '>=10.0.0'}
@@ -13022,7 +13059,6 @@ snapshots:
   '@types/[email protected]':
     dependencies:
       undici-types: 6.21.0
-    optional: true
 
   '@types/[email protected]':
     dependencies:
@@ -13030,6 +13066,10 @@ snapshots:
 
   '@types/[email protected]': {}
 
+  '@types/[email protected]':
+    dependencies:
+      '@types/retry': 0.12.5
+
   '@types/[email protected]': {}
 
   '@types/[email protected](@types/[email protected])':
@@ -13041,10 +13081,21 @@ snapshots:
       '@types/prop-types': 15.7.14
       csstype: 3.1.3
 
+  '@types/[email protected]': {}
+
   '@types/[email protected]': {}
 
   '@types/[email protected]': {}
 
+  '@types/[email protected]':
+    dependencies:
+      '@types/node': 20.19.1
+
+  '@types/[email protected]':
+    dependencies:
+      '@types/node': 20.19.1
+      '@types/stream-chain': 2.1.0
+
   '@types/[email protected]': {}
 
   '@types/[email protected]': {}
@@ -17757,6 +17808,12 @@ snapshots:
 
   [email protected]: {}
 
+  [email protected]:
+    dependencies:
+      graceful-fs: 4.2.11
+      retry: 0.12.0
+      signal-exit: 3.0.7
+
   [email protected]:
     dependencies:
       xtend: 4.0.2
@@ -18231,6 +18288,8 @@ snapshots:
       onetime: 7.0.0
       signal-exit: 4.1.0
 
+  [email protected]: {}
+
   [email protected]: {}
 
   [email protected]: {}
@@ -18640,10 +18699,16 @@ snapshots:
 
   [email protected]: {}
 
+  [email protected]: {}
+
   [email protected]:
     dependencies:
       duplexer: 0.1.2
 
+  [email protected]:
+    dependencies:
+      stream-chain: 2.2.5
+
   [email protected]: {}
 
   [email protected]:

+ 2 - 1
src/api/providers/fetchers/modelCache.ts

@@ -2,6 +2,7 @@ import * as path from "path"
 import fs from "fs/promises"
 
 import NodeCache from "node-cache"
+import { safeWriteJson } from "../../../utils/safeWriteJson"
 
 import { ContextProxy } from "../../../core/config/ContextProxy"
 import { getCacheDirectoryPath } from "../../../utils/storage"
@@ -22,7 +23,7 @@ const memoryCache = new NodeCache({ stdTTL: 5 * 60, checkperiod: 5 * 60 })
 async function writeModels(router: RouterName, data: ModelRecord) {
 	const filename = `${router}_models.json`
 	const cacheDir = await getCacheDirectoryPath(ContextProxy.instance.globalStorageUri.fsPath)
-	await fs.writeFile(path.join(cacheDir, filename), JSON.stringify(data))
+	await safeWriteJson(path.join(cacheDir, filename), data)
 }
 
 async function readModels(router: RouterName): Promise<ModelRecord | undefined> {

+ 2 - 1
src/api/providers/fetchers/modelEndpointCache.ts

@@ -2,6 +2,7 @@ import * as path from "path"
 import fs from "fs/promises"
 
 import NodeCache from "node-cache"
+import { safeWriteJson } from "../../../utils/safeWriteJson"
 import sanitize from "sanitize-filename"
 
 import { ContextProxy } from "../../../core/config/ContextProxy"
@@ -18,7 +19,7 @@ const getCacheKey = (router: RouterName, modelId: string) => sanitize(`${router}
 async function writeModelEndpoints(key: string, data: ModelRecord) {
 	const filename = `${key}_endpoints.json`
 	const cacheDir = await getCacheDirectoryPath(ContextProxy.instance.globalStorageUri.fsPath)
-	await fs.writeFile(path.join(cacheDir, filename), JSON.stringify(data, null, 2))
+	await safeWriteJson(path.join(cacheDir, filename), data)
 }
 
 async function readModelEndpoints(key: string): Promise<ModelRecord | undefined> {

+ 17 - 13
src/core/config/__tests__/importExport.spec.ts

@@ -12,6 +12,7 @@ import { importSettings, exportSettings } from "../importExport"
 import { ProviderSettingsManager } from "../ProviderSettingsManager"
 import { ContextProxy } from "../ContextProxy"
 import { CustomModesManager } from "../CustomModesManager"
+import { safeWriteJson } from "../../../utils/safeWriteJson"
 
 import type { Mock } from "vitest"
 
@@ -43,6 +44,8 @@ vi.mock("os", () => ({
 	homedir: vi.fn(() => "/mock/home"),
 }))
 
+vi.mock("../../../utils/safeWriteJson")
+
 describe("importExport", () => {
 	let mockProviderSettingsManager: ReturnType<typeof vi.mocked<ProviderSettingsManager>>
 	let mockContextProxy: ReturnType<typeof vi.mocked<ContextProxy>>
@@ -384,11 +387,10 @@ describe("importExport", () => {
 			expect(mockContextProxy.export).toHaveBeenCalled()
 			expect(fs.mkdir).toHaveBeenCalledWith("/mock/path", { recursive: true })
 
-			expect(fs.writeFile).toHaveBeenCalledWith(
-				"/mock/path/roo-code-settings.json",
-				JSON.stringify({ providerProfiles: mockProviderProfiles, globalSettings: mockGlobalSettings }, null, 2),
-				"utf-8",
-			)
+			expect(safeWriteJson).toHaveBeenCalledWith("/mock/path/roo-code-settings.json", {
+				providerProfiles: mockProviderProfiles,
+				globalSettings: mockGlobalSettings,
+			})
 		})
 
 		it("should include globalSettings when allowedMaxRequests is null", async () => {
@@ -417,11 +419,10 @@ describe("importExport", () => {
 				contextProxy: mockContextProxy,
 			})
 
-			expect(fs.writeFile).toHaveBeenCalledWith(
-				"/mock/path/roo-code-settings.json",
-				JSON.stringify({ providerProfiles: mockProviderProfiles, globalSettings: mockGlobalSettings }, null, 2),
-				"utf-8",
-			)
+			expect(safeWriteJson).toHaveBeenCalledWith("/mock/path/roo-code-settings.json", {
+				providerProfiles: mockProviderProfiles,
+				globalSettings: mockGlobalSettings,
+			})
 		})
 
 		it("should handle errors during the export process", async () => {
@@ -436,7 +437,8 @@ describe("importExport", () => {
 			})
 
 			mockContextProxy.export.mockResolvedValue({ mode: "code" })
-			;(fs.writeFile as Mock).mockRejectedValue(new Error("Write error"))
+			// Simulate an error during the safeWriteJson operation
+			;(safeWriteJson as Mock).mockRejectedValueOnce(new Error("Safe write error"))
 
 			await exportSettings({
 				providerSettingsManager: mockProviderSettingsManager,
@@ -447,8 +449,10 @@ describe("importExport", () => {
 			expect(mockProviderSettingsManager.export).toHaveBeenCalled()
 			expect(mockContextProxy.export).toHaveBeenCalled()
 			expect(fs.mkdir).toHaveBeenCalledWith("/mock/path", { recursive: true })
-			expect(fs.writeFile).toHaveBeenCalled()
+			expect(safeWriteJson).toHaveBeenCalled() // safeWriteJson is called, but it will throw
 			// The error is caught and the function exits silently.
+			// Optionally, ensure no error message was shown if that's part of "silent"
+			// expect(vscode.window.showErrorMessage).not.toHaveBeenCalled();
 		})
 
 		it("should handle errors during directory creation", async () => {
@@ -474,7 +478,7 @@ describe("importExport", () => {
 			expect(mockProviderSettingsManager.export).toHaveBeenCalled()
 			expect(mockContextProxy.export).toHaveBeenCalled()
 			expect(fs.mkdir).toHaveBeenCalled()
-			expect(fs.writeFile).not.toHaveBeenCalled() // Should not be called since mkdir failed.
+			expect(safeWriteJson).not.toHaveBeenCalled() // Should not be called since mkdir failed.
 		})
 
 		it("should use the correct default save location", async () => {

+ 2 - 1
src/core/config/importExport.ts

@@ -1,3 +1,4 @@
+import { safeWriteJson } from "../../utils/safeWriteJson"
 import os from "os"
 import * as path from "path"
 import fs from "fs/promises"
@@ -116,6 +117,6 @@ export const exportSettings = async ({ providerSettingsManager, contextProxy }:
 
 		const dirname = path.dirname(uri.fsPath)
 		await fs.mkdir(dirname, { recursive: true })
-		await fs.writeFile(uri.fsPath, JSON.stringify({ providerProfiles, globalSettings }, null, 2), "utf-8")
+		await safeWriteJson(uri.fsPath, { providerProfiles, globalSettings })
 	} catch (e) {}
 }

+ 2 - 1
src/core/context-tracking/FileContextTracker.ts

@@ -1,3 +1,4 @@
+import { safeWriteJson } from "../../utils/safeWriteJson"
 import * as path from "path"
 import * as vscode from "vscode"
 import { getTaskDirectoryPath } from "../../utils/storage"
@@ -130,7 +131,7 @@ export class FileContextTracker {
 			const globalStoragePath = this.getContextProxy()!.globalStorageUri.fsPath
 			const taskDir = await getTaskDirectoryPath(globalStoragePath, taskId)
 			const filePath = path.join(taskDir, GlobalFileNames.taskMetadata)
-			await fs.writeFile(filePath, JSON.stringify(metadata, null, 2))
+			await safeWriteJson(filePath, metadata)
 		} catch (error) {
 			console.error("Failed to save task metadata:", error)
 		}

+ 2 - 1
src/core/task-persistence/apiMessages.ts

@@ -1,3 +1,4 @@
+import { safeWriteJson } from "../../utils/safeWriteJson"
 import * as path from "path"
 import * as fs from "fs/promises"
 
@@ -78,5 +79,5 @@ export async function saveApiMessages({
 }) {
 	const taskDir = await getTaskDirectoryPath(globalStoragePath, taskId)
 	const filePath = path.join(taskDir, GlobalFileNames.apiConversationHistory)
-	await fs.writeFile(filePath, JSON.stringify(messages))
+	await safeWriteJson(filePath, messages)
 }

+ 2 - 1
src/core/task-persistence/taskMessages.ts

@@ -1,3 +1,4 @@
+import { safeWriteJson } from "../../utils/safeWriteJson"
 import * as path from "path"
 import * as fs from "fs/promises"
 
@@ -37,5 +38,5 @@ export type SaveTaskMessagesOptions = {
 export async function saveTaskMessages({ messages, taskId, globalStoragePath }: SaveTaskMessagesOptions) {
 	const taskDir = await getTaskDirectoryPath(globalStoragePath, taskId)
 	const filePath = path.join(taskDir, GlobalFileNames.uiMessages)
-	await fs.writeFile(filePath, JSON.stringify(messages))
+	await safeWriteJson(filePath, messages)
 }

+ 5 - 5
src/core/webview/__tests__/ClineProvider.spec.ts

@@ -13,6 +13,7 @@ import { experimentDefault } from "../../../shared/experiments"
 import { setTtsEnabled } from "../../../utils/tts"
 import { ContextProxy } from "../../config/ContextProxy"
 import { Task, TaskOptions } from "../../task/Task"
+import { safeWriteJson } from "../../../utils/safeWriteJson"
 
 import { ClineProvider } from "../ClineProvider"
 
@@ -43,6 +44,8 @@ vi.mock("axios", () => ({
 	post: vi.fn(),
 }))
 
+vi.mock("../../../utils/safeWriteJson")
+
 vi.mock("@modelcontextprotocol/sdk/types.js", () => ({
 	CallToolResultSchema: {},
 	ListResourcesResultSchema: {},
@@ -1989,11 +1992,8 @@ describe("Project MCP Settings", () => {
 		// Check that fs.mkdir was called with the correct path
 		expect(mockedFs.mkdir).toHaveBeenCalledWith("/test/workspace/.roo", { recursive: true })
 
-		// Check that fs.writeFile was called with default content
-		expect(mockedFs.writeFile).toHaveBeenCalledWith(
-			"/test/workspace/.roo/mcp.json",
-			JSON.stringify({ mcpServers: {} }, null, 2),
-		)
+		// Verify file was created with default content
+		expect(safeWriteJson).toHaveBeenCalledWith("/test/workspace/.roo/mcp.json", { mcpServers: {} })
 
 		// Check that openFile was called
 		expect(openFileSpy).toHaveBeenCalledWith("/test/workspace/.roo/mcp.json")

+ 2 - 1
src/core/webview/webviewMessageHandler.ts

@@ -1,3 +1,4 @@
+import { safeWriteJson } from "../../utils/safeWriteJson"
 import * as path from "path"
 import fs from "fs/promises"
 import pWaitFor from "p-wait-for"
@@ -615,7 +616,7 @@ export const webviewMessageHandler = async (
 				const exists = await fileExistsAtPath(mcpPath)
 
 				if (!exists) {
-					await fs.writeFile(mcpPath, JSON.stringify({ mcpServers: {} }, null, 2))
+					await safeWriteJson(mcpPath, { mcpServers: {} })
 				}
 
 				await openFile(mcpPath)

+ 4 - 0
src/package.json

@@ -394,6 +394,7 @@
 		"pdf-parse": "^1.1.1",
 		"pkce-challenge": "^5.0.0",
 		"pretty-bytes": "^7.0.0",
+		"proper-lockfile": "^4.1.2",
 		"ps-tree": "^1.2.0",
 		"puppeteer-chromium-resolver": "^24.0.0",
 		"puppeteer-core": "^23.4.0",
@@ -403,6 +404,7 @@
 		"serialize-error": "^12.0.0",
 		"simple-git": "^3.27.0",
 		"sound-play": "^1.1.0",
+		"stream-json": "^1.8.0",
 		"string-similarity": "^4.0.4",
 		"strip-ansi": "^7.1.0",
 		"strip-bom": "^5.0.0",
@@ -430,7 +432,9 @@
 		"@types/node": "20.x",
 		"@types/node-cache": "^4.1.3",
 		"@types/node-ipc": "^9.2.3",
+		"@types/proper-lockfile": "^4.1.4",
 		"@types/ps-tree": "^1.1.6",
+		"@types/stream-json": "^1.7.8",
 		"@types/string-similarity": "^4.0.2",
 		"@types/tmp": "^0.2.6",
 		"@types/turndown": "^5.0.5",

+ 18 - 12
src/services/code-index/__tests__/cache-manager.spec.ts

@@ -4,6 +4,14 @@ import { createHash } from "crypto"
 import debounce from "lodash.debounce"
 import { CacheManager } from "../cache-manager"
 
+// Mock safeWriteJson utility
+vitest.mock("../../../utils/safeWriteJson", () => ({
+	safeWriteJson: vitest.fn().mockResolvedValue(undefined),
+}))
+
+// Import the mocked version
+import { safeWriteJson } from "../../../utils/safeWriteJson"
+
 // Mock vscode
 vitest.mock("vscode", () => ({
 	Uri: {
@@ -89,7 +97,7 @@ describe("CacheManager", () => {
 			cacheManager.updateHash(filePath, hash)
 
 			expect(cacheManager.getHash(filePath)).toBe(hash)
-			expect(vscode.workspace.fs.writeFile).toHaveBeenCalled()
+			expect(safeWriteJson).toHaveBeenCalled()
 		})
 
 		it("should delete hash and trigger save", () => {
@@ -100,7 +108,7 @@ describe("CacheManager", () => {
 			cacheManager.deleteHash(filePath)
 
 			expect(cacheManager.getHash(filePath)).toBeUndefined()
-			expect(vscode.workspace.fs.writeFile).toHaveBeenCalled()
+			expect(safeWriteJson).toHaveBeenCalled()
 		})
 
 		it("should return shallow copy of hashes", () => {
@@ -125,18 +133,16 @@ describe("CacheManager", () => {
 
 			cacheManager.updateHash(filePath, hash)
 
-			expect(vscode.workspace.fs.writeFile).toHaveBeenCalledWith(mockCachePath, expect.any(Uint8Array))
+			expect(safeWriteJson).toHaveBeenCalledWith(mockCachePath.fsPath, expect.any(Object))
 
 			// Verify the saved data
-			const savedData = JSON.parse(
-				Buffer.from((vscode.workspace.fs.writeFile as Mock).mock.calls[0][1]).toString(),
-			)
+			const savedData = (safeWriteJson as Mock).mock.calls[0][1]
 			expect(savedData).toEqual({ [filePath]: hash })
 		})
 
 		it("should handle save errors gracefully", async () => {
 			const consoleErrorSpy = vitest.spyOn(console, "error").mockImplementation(() => {})
-			;(vscode.workspace.fs.writeFile as Mock).mockRejectedValue(new Error("Save failed"))
+			;(safeWriteJson as Mock).mockRejectedValue(new Error("Save failed"))
 
 			cacheManager.updateHash("test.ts", "hash")
 
@@ -153,19 +159,19 @@ describe("CacheManager", () => {
 		it("should clear cache file and reset state", async () => {
 			cacheManager.updateHash("test.ts", "hash")
 
-			// Reset the mock to ensure writeFile succeeds for clearCacheFile
-			;(vscode.workspace.fs.writeFile as Mock).mockClear()
-			;(vscode.workspace.fs.writeFile as Mock).mockResolvedValue(undefined)
+			// Reset the mock to ensure safeWriteJson succeeds for clearCacheFile
+			;(safeWriteJson as Mock).mockClear()
+			;(safeWriteJson as Mock).mockResolvedValue(undefined)
 
 			await cacheManager.clearCacheFile()
 
-			expect(vscode.workspace.fs.writeFile).toHaveBeenCalledWith(mockCachePath, Buffer.from("{}"))
+			expect(safeWriteJson).toHaveBeenCalledWith(mockCachePath.fsPath, {})
 			expect(cacheManager.getAllHashes()).toEqual({})
 		})
 
 		it("should handle clear errors gracefully", async () => {
 			const consoleErrorSpy = vitest.spyOn(console, "error").mockImplementation(() => {})
-			;(vscode.workspace.fs.writeFile as Mock).mockRejectedValue(new Error("Save failed"))
+			;(safeWriteJson as Mock).mockRejectedValue(new Error("Save failed"))
 
 			await cacheManager.clearCacheFile()
 

+ 3 - 2
src/services/code-index/cache-manager.ts

@@ -2,6 +2,7 @@ import * as vscode from "vscode"
 import { createHash } from "crypto"
 import { ICacheManager } from "./interfaces/cache"
 import debounce from "lodash.debounce"
+import { safeWriteJson } from "../../utils/safeWriteJson"
 
 /**
  * Manages the cache for code indexing
@@ -46,7 +47,7 @@ export class CacheManager implements ICacheManager {
 	 */
 	private async _performSave(): Promise<void> {
 		try {
-			await vscode.workspace.fs.writeFile(this.cachePath, Buffer.from(JSON.stringify(this.fileHashes, null, 2)))
+			await safeWriteJson(this.cachePath.fsPath, this.fileHashes)
 		} catch (error) {
 			console.error("Failed to save cache:", error)
 		}
@@ -57,7 +58,7 @@ export class CacheManager implements ICacheManager {
 	 */
 	async clearCacheFile(): Promise<void> {
 		try {
-			await vscode.workspace.fs.writeFile(this.cachePath, Buffer.from("{}"))
+			await safeWriteJson(this.cachePath.fsPath, {})
 			this.fileHashes = {}
 		} catch (error) {
 			console.error("Failed to clear cache file:", error, this.cachePath)

+ 38 - 0
src/services/mcp/__tests__/McpHub.spec.ts

@@ -5,6 +5,43 @@ import { ServerConfigSchema, McpHub } from "../McpHub"
 import fs from "fs/promises"
 import { vi, Mock } from "vitest"
 
+// Mock fs/promises before importing anything that uses it
+vi.mock("fs/promises", () => ({
+	default: {
+		access: vi.fn().mockResolvedValue(undefined),
+		writeFile: vi.fn().mockResolvedValue(undefined),
+		readFile: vi.fn().mockResolvedValue("{}"),
+		unlink: vi.fn().mockResolvedValue(undefined),
+		rename: vi.fn().mockResolvedValue(undefined),
+		lstat: vi.fn().mockImplementation(() =>
+			Promise.resolve({
+				isDirectory: () => true,
+			}),
+		),
+		mkdir: vi.fn().mockResolvedValue(undefined),
+	},
+	access: vi.fn().mockResolvedValue(undefined),
+	writeFile: vi.fn().mockResolvedValue(undefined),
+	readFile: vi.fn().mockResolvedValue("{}"),
+	unlink: vi.fn().mockResolvedValue(undefined),
+	rename: vi.fn().mockResolvedValue(undefined),
+	lstat: vi.fn().mockImplementation(() =>
+		Promise.resolve({
+			isDirectory: () => true,
+		}),
+	),
+	mkdir: vi.fn().mockResolvedValue(undefined),
+}))
+
+// Mock safeWriteJson
+vi.mock("../../../utils/safeWriteJson", () => ({
+	safeWriteJson: vi.fn(async (filePath, data) => {
+		// Instead of trying to write to the file system, just call fs.writeFile mock
+		// This avoids the complex file locking and temp file operations
+		return fs.writeFile(filePath, JSON.stringify(data), "utf8")
+	}),
+}))
+
 vi.mock("vscode", () => ({
 	workspace: {
 		createFileSystemWatcher: vi.fn().mockReturnValue({
@@ -56,6 +93,7 @@ describe("McpHub", () => {
 		// Mock console.error to suppress error messages during tests
 		console.error = vi.fn()
 
+
 		const mockUri: Uri = {
 			scheme: "file",
 			authority: "",

+ 480 - 0
src/utils/__tests__/safeWriteJson.test.ts

@@ -0,0 +1,480 @@
+import { vi, describe, test, expect, beforeEach, afterEach, beforeAll, afterAll } from "vitest"
+import * as actualFsPromises from "fs/promises"
+import * as fsSyncActual from "fs"
+import { Writable } from "stream"
+import { safeWriteJson } from "../safeWriteJson"
+import * as path from "path"
+import * as os from "os"
+
+const originalFsPromisesRename = actualFsPromises.rename
+const originalFsPromisesUnlink = actualFsPromises.unlink
+const originalFsPromisesWriteFile = actualFsPromises.writeFile
+const _originalFsPromisesAccess = actualFsPromises.access
+const originalFsPromisesMkdir = actualFsPromises.mkdir
+
+vi.mock("fs/promises", async () => {
+	const actual = await vi.importActual<typeof import("fs/promises")>("fs/promises")
+	// Start with all actual implementations.
+	const mockedFs = { ...actual }
+	// Selectively wrap functions with vi.fn() if they are spied on
+	// or have their implementations changed in tests.
+	// This ensures that other fs.promises functions used by the SUT
+	// (like proper-lockfile's internals) will use their actual implementations.
+	mockedFs.writeFile = vi.fn(actual.writeFile) as any
+	mockedFs.readFile = vi.fn(actual.readFile) as any
+	mockedFs.rename = vi.fn(actual.rename) as any
+	mockedFs.unlink = vi.fn(actual.unlink) as any
+	mockedFs.access = vi.fn(actual.access) as any
+	mockedFs.mkdtemp = vi.fn(actual.mkdtemp) as any
+	mockedFs.rm = vi.fn(actual.rm) as any
+	mockedFs.readdir = vi.fn(actual.readdir) as any
+	mockedFs.mkdir = vi.fn(actual.mkdir) as any
+	// fs.stat and fs.lstat will be available via { ...actual }
+
+	return mockedFs
+})
+
+// Mock the 'fs' module for fsSync.createWriteStream
+vi.mock("fs", async () => {
+	const actualFs = await vi.importActual<typeof import("fs")>("fs")
+	return {
+		...actualFs, // Spread actual implementations
+		createWriteStream: vi.fn(actualFs.createWriteStream) as any, // Default to actual, but mockable
+	}
+})
+
+import * as fs from "fs/promises" // This will now be the mocked version
+
+describe("safeWriteJson", () => {
+	let originalConsoleError: typeof console.error
+
+	beforeAll(() => {
+		// Store original console.error
+		originalConsoleError = console.error
+	})
+
+	afterAll(() => {
+		// Restore original console.error
+		console.error = originalConsoleError
+	})
+
+	let tempDir: string
+	let currentTestFilePath: string
+
+	beforeEach(async () => {
+		// Create a temporary directory for each test
+		tempDir = await fs.mkdtemp(path.join(os.tmpdir(), "safeWriteJson-test-"))
+
+		// Create a unique file path for each test
+		currentTestFilePath = path.join(tempDir, "test-file.json")
+
+		// Pre-create the file with initial content to ensure it exists
+		// This allows proper-lockfile to acquire a lock on an existing file.
+		await fs.writeFile(currentTestFilePath, JSON.stringify({ initial: "content" }))
+	})
+
+	afterEach(async () => {
+		// Clean up the temporary directory after each test
+		await fs.rm(tempDir, { recursive: true, force: true })
+
+		// Reset all mocks to their actual implementations
+		vi.restoreAllMocks()
+	})
+
+	// Helper function to read file content
+	async function readFileContent(filePath: string): Promise<any> {
+		const readContent = await fs.readFile(filePath, "utf-8")
+		return JSON.parse(readContent)
+	}
+
+	// Helper function to check if a file exists
+	async function fileExists(filePath: string): Promise<boolean> {
+		try {
+			await fs.access(filePath)
+			return true
+		} catch {
+			return false
+		}
+	}
+
+	// Success Scenarios
+	// Note: Since we pre-create the file in beforeEach, this test will overwrite it.
+	// If "creation from non-existence" is critical and locking prevents it, safeWriteJson or locking strategy needs review.
+	test("should successfully write a new file (overwriting initial content from beforeEach)", async () => {
+		const data = { message: "Hello, new world!" }
+
+		await safeWriteJson(currentTestFilePath, data)
+
+		const content = await readFileContent(currentTestFilePath)
+		expect(content).toEqual(data)
+	})
+
+	test("should successfully overwrite an existing file", async () => {
+		const initialData = { message: "Initial content" }
+		const newData = { message: "Updated content" }
+
+		// Write initial data (overwriting the pre-created file from beforeEach)
+		await originalFsPromisesWriteFile(currentTestFilePath, JSON.stringify(initialData))
+
+		await safeWriteJson(currentTestFilePath, newData)
+
+		const content = await readFileContent(currentTestFilePath)
+		expect(content).toEqual(newData)
+	})
+
+	// Failure Scenarios
+	test("should handle failure when writing to tempNewFilePath", async () => {
+		// currentTestFilePath exists due to beforeEach, allowing lock acquisition.
+		const data = { message: "test write failure" }
+
+		const mockErrorStream = new Writable() as any
+		mockErrorStream._write = (_chunk: any, _encoding: any, callback: any) => {
+			callback(new Error("Write stream error"))
+		}
+		// Add missing WriteStream properties
+		mockErrorStream.close = vi.fn()
+		mockErrorStream.bytesWritten = 0
+		mockErrorStream.path = ""
+		mockErrorStream.pending = false
+
+		// Mock createWriteStream to return a stream that errors on write
+		;(fsSyncActual.createWriteStream as any).mockImplementationOnce((_path: any, _options: any) => {
+			return mockErrorStream
+		})
+
+		await expect(safeWriteJson(currentTestFilePath, data)).rejects.toThrow("Write stream error")
+
+		// Verify the original file still exists and is unchanged
+		const exists = await fileExists(currentTestFilePath)
+		expect(exists).toBe(true)
+
+		// Verify content is unchanged (should still have the initial content from beforeEach)
+		const content = await readFileContent(currentTestFilePath)
+		expect(content).toEqual({ initial: "content" })
+	})
+
+	test("should handle failure when renaming filePath to tempBackupFilePath (filePath exists)", async () => {
+		const initialData = { message: "Initial content, should remain" }
+		const newData = { message: "New content, should not be written" }
+
+		// Overwrite the pre-created file with specific initial data
+		await originalFsPromisesWriteFile(currentTestFilePath, JSON.stringify(initialData))
+
+		const renameSpy = vi.spyOn(fs, "rename")
+
+		// Mock rename to fail on the first call (filePath -> tempBackupFilePath)
+		renameSpy.mockImplementationOnce(async () => {
+			throw new Error("Rename to backup failed")
+		})
+
+		await expect(safeWriteJson(currentTestFilePath, newData)).rejects.toThrow("Rename to backup failed")
+
+		// Verify the original file still exists with initial content
+		const content = await readFileContent(currentTestFilePath)
+		expect(content).toEqual(initialData)
+	})
+
+	test("should handle failure when renaming tempNewFilePath to filePath (filePath exists, backup succeeded)", async () => {
+		const initialData = { message: "Initial content, should be restored" }
+		const newData = { message: "New content" }
+
+		// Overwrite the pre-created file with specific initial data
+		await originalFsPromisesWriteFile(currentTestFilePath, JSON.stringify(initialData))
+
+		const renameSpy = vi.spyOn(fs, "rename")
+
+		// Track rename calls
+		let renameCallCount = 0
+
+		// Mock rename to succeed on first call (filePath -> tempBackupFilePath)
+		// and fail on second call (tempNewFilePath -> filePath)
+		renameSpy.mockImplementation(async (oldPath, newPath) => {
+			renameCallCount++
+			if (renameCallCount === 1) {
+				// First call: filePath -> tempBackupFilePath (should succeed)
+				return originalFsPromisesRename(oldPath, newPath)
+			} else if (renameCallCount === 2) {
+				// Second call: tempNewFilePath -> filePath (should fail)
+				throw new Error("Rename from temp to final failed")
+			} else if (renameCallCount === 3) {
+				// Third call: tempBackupFilePath -> filePath (rollback, should succeed)
+				return originalFsPromisesRename(oldPath, newPath)
+			}
+			// Default: use original implementation
+			return originalFsPromisesRename(oldPath, newPath)
+		})
+
+		await expect(safeWriteJson(currentTestFilePath, newData)).rejects.toThrow("Rename from temp to final failed")
+
+		// Verify the file was restored to initial content
+		const content = await readFileContent(currentTestFilePath)
+		expect(content).toEqual(initialData)
+	})
+
+	// Tests for directory creation functionality
+	test("should create parent directory if it doesn't exist", async () => {
+		// Create a path in a non-existent subdirectory of the temp dir
+		const subDir = path.join(tempDir, "new-subdir")
+		const filePath = path.join(subDir, "file.json")
+		const data = { test: "directory creation" }
+
+		// Verify directory doesn't exist
+		await expect(fs.access(subDir)).rejects.toThrow()
+
+		// Write file
+		await safeWriteJson(filePath, data)
+
+		// Verify directory was created
+		await expect(fs.access(subDir)).resolves.toBeUndefined()
+
+		// Verify file was written
+		const content = await readFileContent(filePath)
+		expect(content).toEqual(data)
+	})
+
+	test("should handle multi-level directory creation", async () => {
+		// Create a new non-existent subdirectory path with multiple levels
+		const deepDir = path.join(tempDir, "level1", "level2", "level3")
+		const filePath = path.join(deepDir, "deep-file.json")
+		const data = { nested: "deeply" }
+
+		// Verify none of the directories exist
+		await expect(fs.access(path.join(tempDir, "level1"))).rejects.toThrow()
+
+		// Write file
+		await safeWriteJson(filePath, data)
+
+		// Verify all directories were created
+		await expect(fs.access(path.join(tempDir, "level1"))).resolves.toBeUndefined()
+		await expect(fs.access(path.join(tempDir, "level1", "level2"))).resolves.toBeUndefined()
+		await expect(fs.access(deepDir)).resolves.toBeUndefined()
+
+		// Verify file was written
+		const content = await readFileContent(filePath)
+		expect(content).toEqual(data)
+	})
+
+	test("should handle directory creation permission errors", async () => {
+		// Mock mkdir to simulate a permission error
+		const mkdirSpy = vi.spyOn(fs, "mkdir")
+		mkdirSpy.mockImplementationOnce(async () => {
+			const error = new Error("EACCES: permission denied") as any
+			error.code = "EACCES"
+			throw error
+		})
+
+		const subDir = path.join(tempDir, "forbidden-dir")
+		const filePath = path.join(subDir, "file.json")
+		const data = { test: "permission error" }
+
+		// Should throw the permission error
+		await expect(safeWriteJson(filePath, data)).rejects.toThrow("EACCES: permission denied")
+
+		// Verify directory was not created
+		await expect(fs.access(subDir)).rejects.toThrow()
+	})
+
+	test("should successfully write to a non-existent file in an existing directory", async () => {
+		// Create directory but not the file
+		const subDir = path.join(tempDir, "existing-dir")
+		await fs.mkdir(subDir)
+
+		const filePath = path.join(subDir, "new-file.json")
+		const data = { fresh: "file" }
+
+		// Verify file doesn't exist yet
+		await expect(fs.access(filePath)).rejects.toThrow()
+
+		// Write file
+		await safeWriteJson(filePath, data)
+
+		// Verify file was created with correct content
+		const content = await readFileContent(filePath)
+		expect(content).toEqual(data)
+	})
+
+	test("should handle failure when deleting tempBackupFilePath (filePath exists, all renames succeed)", async () => {
+		const initialData = { message: "Initial content" }
+		const newData = { message: "Successfully written new content" }
+
+		// Overwrite the pre-created file with specific initial data
+		await originalFsPromisesWriteFile(currentTestFilePath, JSON.stringify(initialData))
+
+		const unlinkSpy = vi.spyOn(fs, "unlink")
+
+		// Mock unlink to fail when trying to delete the backup file
+		unlinkSpy.mockImplementationOnce(async () => {
+			throw new Error("Failed to delete backup file")
+		})
+
+		// The write should succeed even if backup deletion fails
+		await safeWriteJson(currentTestFilePath, newData)
+
+		// Verify the new content was written successfully
+		const content = await readFileContent(currentTestFilePath)
+		expect(content).toEqual(newData)
+	})
+
+	// Test for console error suppression during backup deletion
+	test("should suppress console.error when backup deletion fails", async () => {
+		const consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => {}) // Suppress console.error
+		const initialData = { message: "Initial" }
+		const newData = { message: "New" }
+
+		await originalFsPromisesWriteFile(currentTestFilePath, JSON.stringify(initialData))
+
+		// Mock unlink to fail when deleting backup files
+		const unlinkSpy = vi.spyOn(fs, "unlink")
+		unlinkSpy.mockImplementation(async (filePath: any) => {
+			if (filePath.toString().includes(".bak_")) {
+				throw new Error("Backup deletion failed")
+			}
+			return originalFsPromisesUnlink(filePath)
+		})
+
+		await safeWriteJson(currentTestFilePath, newData)
+
+		// Verify console.error was called with the expected message
+		expect(consoleErrorSpy).toHaveBeenCalledWith(expect.stringContaining("Successfully wrote"), expect.any(Error))
+
+		consoleErrorSpy.mockRestore()
+		unlinkSpy.mockRestore()
+	})
+
+	// The expected error message might need to change if the mock behaves differently.
+	test("should handle failure when renaming tempNewFilePath to filePath (filePath initially exists)", async () => {
+		// currentTestFilePath exists due to beforeEach.
+		const initialData = { message: "Initial content" }
+		const newData = { message: "New content" }
+
+		await originalFsPromisesWriteFile(currentTestFilePath, JSON.stringify(initialData))
+
+		const renameSpy = vi.spyOn(fs, "rename")
+		// Mock rename to fail on the second call (tempNewFilePath -> filePath)
+		// This test assumes that the first rename (filePath -> tempBackupFilePath) succeeds,
+		// which is the expected behavior when the file exists.
+		// The existing complex mock in `test("should handle failure when renaming tempNewFilePath to filePath (filePath exists, backup succeeded)"`
+		// might be more relevant or adaptable here.
+
+		let renameCallCount = 0
+		renameSpy.mockImplementation(async (oldPath, newPath) => {
+			renameCallCount++
+			if (renameCallCount === 2) {
+				// Second call: tempNewFilePath -> filePath (should fail)
+				throw new Error("Rename failed")
+			}
+			// For all other calls, use the original implementation
+			return originalFsPromisesRename(oldPath, newPath)
+		})
+
+		await expect(safeWriteJson(currentTestFilePath, newData)).rejects.toThrow("Rename failed")
+
+		// The file should be restored to its initial content
+		const content = await readFileContent(currentTestFilePath)
+		expect(content).toEqual(initialData)
+	})
+
+	test("should throw an error if an inter-process lock is already held for the filePath", async () => {
+		vi.resetModules() // Clear module cache to ensure fresh imports for this test
+
+		const data = { message: "test lock failure" }
+
+		// Create a new file path for this specific test to avoid conflicts
+		const lockTestFilePath = path.join(tempDir, "lock-test-file.json")
+		await fs.writeFile(lockTestFilePath, JSON.stringify({ initial: "lock test content" }))
+
+		vi.doMock("proper-lockfile", () => ({
+			...vi.importActual("proper-lockfile"),
+			lock: vi.fn().mockRejectedValueOnce(new Error("Failed to get lock.")),
+		}))
+
+		// Re-import safeWriteJson to use the mocked proper-lockfile
+		const { safeWriteJson: mockedSafeWriteJson } = await import("../safeWriteJson")
+
+		await expect(mockedSafeWriteJson(lockTestFilePath, data)).rejects.toThrow("Failed to get lock.")
+
+		// Clean up
+		await fs.unlink(lockTestFilePath).catch(() => {}) // Ignore errors if file doesn't exist
+		vi.unmock("proper-lockfile") // Ensure the mock is removed after this test
+	})
+	test("should release lock even if an error occurs mid-operation", async () => {
+		const data = { message: "test lock release on error" }
+
+		// Mock createWriteStream to throw an error
+		const createWriteStreamSpy = vi.spyOn(fsSyncActual, "createWriteStream")
+		createWriteStreamSpy.mockImplementationOnce((_path: any, _options: any) => {
+			const errorStream = new Writable() as any
+			errorStream._write = (_chunk: any, _encoding: any, callback: any) => {
+				callback(new Error("Stream write error"))
+			}
+			// Add missing WriteStream properties
+			errorStream.close = vi.fn()
+			errorStream.bytesWritten = 0
+			errorStream.path = _path
+			errorStream.pending = false
+			return errorStream
+		})
+
+		// This should throw but still release the lock
+		await expect(safeWriteJson(currentTestFilePath, data)).rejects.toThrow("Stream write error")
+
+		// Reset the mock to allow the second call to work normally
+		createWriteStreamSpy.mockRestore()
+
+		// If the lock wasn't released, this second attempt would fail with a lock error
+		// Instead, it should succeed (proving the lock was released)
+		await expect(safeWriteJson(currentTestFilePath, data)).resolves.toBeUndefined()
+	})
+
+	test("should handle fs.access error that is not ENOENT", async () => {
+		const data = { message: "access error test" }
+		const accessSpy = vi.spyOn(fs, "access").mockImplementationOnce(async () => {
+			const error = new Error("EACCES: permission denied") as any
+			error.code = "EACCES"
+			throw error
+		})
+
+		// Create a path that will trigger the access check
+		const testPath = path.join(tempDir, "access-error-test.json")
+
+		await expect(safeWriteJson(testPath, data)).rejects.toThrow("EACCES: permission denied")
+
+		// Verify access was called
+		expect(accessSpy).toHaveBeenCalled()
+	})
+
+	// Test for rollback failure scenario
+	test("should log error and re-throw original if rollback fails", async () => {
+		const initialData = { message: "Initial, should be lost if rollback fails" }
+		const newData = { message: "New content" }
+
+		await originalFsPromisesWriteFile(currentTestFilePath, JSON.stringify(initialData))
+
+		const renameSpy = vi.spyOn(fs, "rename")
+		const consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => {}) // Suppress console.error
+
+		let renameCallCount = 0
+		renameSpy.mockImplementation(async (oldPath, newPath) => {
+			renameCallCount++
+			if (renameCallCount === 2) {
+				// Second call: tempNewFilePath -> filePath (fail)
+				throw new Error("Primary rename failed")
+			} else if (renameCallCount === 3) {
+				// Third call: tempBackupFilePath -> filePath (rollback, also fail)
+				throw new Error("Rollback rename failed")
+			}
+			return originalFsPromisesRename(oldPath, newPath)
+		})
+
+		// Should throw the original error, not the rollback error
+		await expect(safeWriteJson(currentTestFilePath, newData)).rejects.toThrow("Primary rename failed")
+
+		// Verify console.error was called for the rollback failure
+		expect(consoleErrorSpy).toHaveBeenCalledWith(
+			expect.stringContaining("Failed to restore backup"),
+			expect.objectContaining({ message: "Rollback rename failed" }),
+		)
+
+		consoleErrorSpy.mockRestore()
+	})
+})

+ 235 - 0
src/utils/safeWriteJson.ts

@@ -0,0 +1,235 @@
+import * as fs from "fs/promises"
+import * as fsSync from "fs"
+import * as path from "path"
+import * as lockfile from "proper-lockfile"
+import Disassembler from "stream-json/Disassembler"
+import Stringer from "stream-json/Stringer"
+
+/**
+ * Safely writes JSON data to a file.
+ * - Creates parent directories if they don't exist
+ * - Uses 'proper-lockfile' for inter-process advisory locking to prevent concurrent writes to the same path.
+ * - Writes to a temporary file first.
+ * - If the target file exists, it's backed up before being replaced.
+ * - Attempts to roll back and clean up in case of errors.
+ *
+ * @param {string} filePath - The absolute path to the target file.
+ * @param {any} data - The data to serialize to JSON and write.
+ * @returns {Promise<void>}
+ */
+
+async function safeWriteJson(filePath: string, data: any): Promise<void> {
+	const absoluteFilePath = path.resolve(filePath)
+	let releaseLock = async () => {} // Initialized to a no-op
+
+	// For directory creation
+	const dirPath = path.dirname(absoluteFilePath)
+
+	// Ensure directory structure exists with improved reliability
+	try {
+		// Create directory with recursive option
+		await fs.mkdir(dirPath, { recursive: true })
+
+		// Verify directory exists after creation attempt
+		await fs.access(dirPath)
+	} catch (dirError: any) {
+		console.error(`Failed to create or access directory for ${absoluteFilePath}:`, dirError)
+		throw dirError
+	}
+
+	// Acquire the lock before any file operations
+	try {
+		releaseLock = await lockfile.lock(absoluteFilePath, {
+			stale: 31000, // Stale after 31 seconds
+			update: 10000, // Update mtime every 10 seconds to prevent staleness if operation is long
+			realpath: false, // the file may not exist yet, which is acceptable
+			retries: {
+				// Configuration for retrying lock acquisition
+				retries: 5, // Number of retries after the initial attempt
+				factor: 2, // Exponential backoff factor (e.g., 100ms, 200ms, 400ms, ...)
+				minTimeout: 100, // Minimum time to wait before the first retry (in ms)
+				maxTimeout: 1000, // Maximum time to wait for any single retry (in ms)
+			},
+			onCompromised: (err) => {
+				console.error(`Lock at ${absoluteFilePath} was compromised:`, err)
+				throw err
+			},
+		})
+	} catch (lockError) {
+		// If lock acquisition fails, we throw immediately.
+		// The releaseLock remains a no-op, so the finally block in the main file operations
+		// try-catch-finally won't try to release an unacquired lock if this path is taken.
+		console.error(`Failed to acquire lock for ${absoluteFilePath}:`, lockError)
+		// Propagate the lock acquisition error
+		throw lockError
+	}
+
+	// Variables to hold the actual paths of temp files if they are created.
+	let actualTempNewFilePath: string | null = null
+	let actualTempBackupFilePath: string | null = null
+
+	try {
+		// Step 1: Write data to a new temporary file.
+		actualTempNewFilePath = path.join(
+			path.dirname(absoluteFilePath),
+			`.${path.basename(absoluteFilePath)}.new_${Date.now()}_${Math.random().toString(36).substring(2)}.tmp`,
+		)
+
+		await _streamDataToFile(actualTempNewFilePath, data)
+
+		// Step 2: Check if the target file exists. If so, rename it to a backup path.
+		try {
+			// Check for target file existence
+			await fs.access(absoluteFilePath)
+			// Target exists, create a backup path and rename.
+			actualTempBackupFilePath = path.join(
+				path.dirname(absoluteFilePath),
+				`.${path.basename(absoluteFilePath)}.bak_${Date.now()}_${Math.random().toString(36).substring(2)}.tmp`,
+			)
+			await fs.rename(absoluteFilePath, actualTempBackupFilePath)
+		} catch (accessError: any) {
+			// Explicitly type accessError
+			if (accessError.code !== "ENOENT") {
+				// An error other than "file not found" occurred during access check.
+				throw accessError
+			}
+			// Target file does not exist, so no backup is made. actualTempBackupFilePath remains null.
+		}
+
+		// Step 3: Rename the new temporary file to the target file path.
+		// This is the main "commit" step.
+		await fs.rename(actualTempNewFilePath, absoluteFilePath)
+
+		// If we reach here, the new file is successfully in place.
+		// The original actualTempNewFilePath is now the main file, so we shouldn't try to clean it up as "temp".
+		// Mark as "used" or "committed"
+		actualTempNewFilePath = null
+
+		// Step 4: If a backup was created, attempt to delete it.
+		if (actualTempBackupFilePath) {
+			try {
+				await fs.unlink(actualTempBackupFilePath)
+				// Mark backup as handled
+				actualTempBackupFilePath = null
+			} catch (unlinkBackupError) {
+				// Log this error, but do not re-throw. The main operation was successful.
+				// actualTempBackupFilePath remains set, indicating an orphaned backup.
+				console.error(
+					`Successfully wrote ${absoluteFilePath}, but failed to clean up backup ${actualTempBackupFilePath}:`,
+					unlinkBackupError,
+				)
+			}
+		}
+	} catch (originalError) {
+		console.error(`Operation failed for ${absoluteFilePath}: [Original Error Caught]`, originalError)
+
+		const newFileToCleanupWithinCatch = actualTempNewFilePath
+		const backupFileToRollbackOrCleanupWithinCatch = actualTempBackupFilePath
+
+		// Attempt rollback if a backup was made
+		if (backupFileToRollbackOrCleanupWithinCatch) {
+			try {
+				await fs.rename(backupFileToRollbackOrCleanupWithinCatch, absoluteFilePath)
+				// Mark as handled, prevent later unlink of this path
+				actualTempBackupFilePath = null
+			} catch (rollbackError) {
+				// actualTempBackupFilePath (outer scope) remains pointing to backupFileToRollbackOrCleanupWithinCatch
+				console.error(
+					`[Catch] Failed to restore backup ${backupFileToRollbackOrCleanupWithinCatch} to ${absoluteFilePath}:`,
+					rollbackError,
+				)
+			}
+		}
+
+		// Cleanup the .new file if it exists
+		if (newFileToCleanupWithinCatch) {
+			try {
+				await fs.unlink(newFileToCleanupWithinCatch)
+			} catch (cleanupError) {
+				console.error(
+					`[Catch] Failed to clean up temporary new file ${newFileToCleanupWithinCatch}:`,
+					cleanupError,
+				)
+			}
+		}
+
+		// Cleanup the .bak file if it still needs to be (i.e., wasn't successfully restored)
+		if (actualTempBackupFilePath) {
+			try {
+				await fs.unlink(actualTempBackupFilePath)
+			} catch (cleanupError) {
+				console.error(
+					`[Catch] Failed to clean up temporary backup file ${actualTempBackupFilePath}:`,
+					cleanupError,
+				)
+			}
+		}
+		throw originalError // This MUST be the error that rejects the promise.
+	} finally {
+		// Release the lock in the main finally block.
+		try {
+			// releaseLock will be the actual unlock function if lock was acquired,
+			// or the initial no-op if acquisition failed.
+			await releaseLock()
+		} catch (unlockError) {
+			// Do not re-throw here, as the originalError from the try/catch (if any) is more important.
+			console.error(`Failed to release lock for ${absoluteFilePath}:`, unlockError)
+		}
+	}
+}
+
+/**
+ * Helper function to stream JSON data to a file.
+ * @param targetPath The path to write the stream to.
+ * @param data The data to stream.
+ * @returns Promise<void>
+ */
+async function _streamDataToFile(targetPath: string, data: any): Promise<void> {
+	// Stream data to avoid high memory usage for large JSON objects.
+	const fileWriteStream = fsSync.createWriteStream(targetPath, { encoding: "utf8" })
+	const disassembler = Disassembler.disassembler()
+	// Output will be compact JSON as standard Stringer is used.
+	const stringer = Stringer.stringer()
+
+	return new Promise<void>((resolve, reject) => {
+		let errorOccurred = false
+		const handleError = (_streamName: string) => (err: Error) => {
+			if (!errorOccurred) {
+				errorOccurred = true
+				if (!fileWriteStream.destroyed) {
+					fileWriteStream.destroy(err)
+				}
+				reject(err)
+			}
+		}
+
+		disassembler.on("error", handleError("Disassembler"))
+		stringer.on("error", handleError("Stringer"))
+		fileWriteStream.on("error", (err: Error) => {
+			if (!errorOccurred) {
+				errorOccurred = true
+				reject(err)
+			}
+		})
+
+		fileWriteStream.on("finish", () => {
+			if (!errorOccurred) {
+				resolve()
+			}
+		})
+
+		disassembler.pipe(stringer).pipe(fileWriteStream)
+
+		// stream-json's Disassembler might error if `data` is undefined.
+		// JSON.stringify(undefined) would produce the string "undefined" if it's the root value.
+		// Writing 'null' is a safer JSON representation for a root undefined value.
+		if (data === undefined) {
+			disassembler.write(null)
+		} else {
+			disassembler.write(data)
+		}
+		disassembler.end()
+	})
+}
+
+export { safeWriteJson }