Browse Source

updater: Migrate to nlohmann JSON

derrod 2 years ago
parent
commit
d1dd7dd6d3

+ 4 - 5
UI/win-update/updater/CMakeLists.txt

@@ -3,10 +3,7 @@ cmake_minimum_required(VERSION 3.24...3.25)
 legacy_check()
 
 find_package(zstd)
-
-if(NOT TARGET OBS::json11)
-  add_subdirectory("${CMAKE_SOURCE_DIR}/deps/json11" "${CMAKE_BINARY_DIR}/deps/json11")
-endif()
+find_package(nlohmann_json 3 REQUIRED)
 
 add_executable(updater WIN32)
 
@@ -17,6 +14,7 @@ target_sources(
           helpers.hpp
           http.cpp
           init-hook-files.c
+          manifest.hpp
           patch.cpp
           resource.h
           updater.cpp
@@ -29,7 +27,8 @@ target_compile_definitions(updater PRIVATE NOMINMAX "PSAPI_VERSION=2")
 
 target_include_directories(updater PRIVATE "${CMAKE_SOURCE_DIR}/libobs" "${CMAKE_SOURCE_DIR}/UI/win-update")
 
-target_link_libraries(updater PRIVATE OBS::blake2 OBS::json11 zstd::libzstd_static comctl32 shell32 winhttp)
+target_link_libraries(updater PRIVATE OBS::blake2 nlohmann_json::nlohmann_json zstd::libzstd_static comctl32 shell32
+                                      winhttp)
 target_link_options(updater PRIVATE /IGNORE:4098)
 
 set_target_properties(updater PROPERTIES FOLDER frontend OUTPUT_NAME updater)

+ 5 - 5
UI/win-update/updater/cmake/legacy.cmake

@@ -8,6 +8,7 @@ if(NOT ENABLE_UPDATER)
 endif()
 
 find_package(zstd)
+find_package(nlohmann_json 3 REQUIRED)
 
 add_executable(updater WIN32)
 
@@ -23,11 +24,9 @@ target_sources(
           init-hook-files.c
           updater.manifest
           helpers.cpp
-          helpers.hpp
-          ${CMAKE_SOURCE_DIR}/deps/json11/json11.hpp
-          ${CMAKE_SOURCE_DIR}/deps/json11/json11.cpp)
+          helpers.hpp)
 
-target_include_directories(updater PRIVATE ${CMAKE_SOURCE_DIR}/libobs ${CMAKE_SOURCE_DIR}/deps/json11)
+target_include_directories(updater PRIVATE ${CMAKE_SOURCE_DIR}/libobs)
 
 target_compile_definitions(updater PRIVATE NOMINMAX "PSAPI_VERSION=2")
 
@@ -37,6 +36,7 @@ if(MSVC)
   target_link_options(updater PRIVATE "LINKER:/IGNORE:4098")
 endif()
 
-target_link_libraries(updater PRIVATE OBS::blake2 zstd::libzstd_static comctl32 shell32 winhttp)
+target_link_libraries(updater PRIVATE OBS::blake2 nlohmann_json::nlohmann_json zstd::libzstd_static comctl32 shell32
+                                      winhttp)
 
 set_target_properties(updater PROPERTIES FOLDER "frontend")

+ 94 - 0
UI/win-update/updater/manifest.hpp

@@ -0,0 +1,94 @@
+/*
+ * Copyright (c) 2023 Dennis Sädtler <[email protected]>
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+#pragma once
+
+#include <string>
+#include <nlohmann/json.hpp>
+
+using json = nlohmann::json;
+
+namespace updater {
+struct File {
+	/* 160-bit blake2b hashes of zstd compressed and raw file (hex string) */
+	std::string compressed_hash;
+	std::string hash;
+	/* Relative path */
+	std::string name;
+	/* Uncompressed size in bytes */
+	size_t size = 0;
+
+	NLOHMANN_DEFINE_TYPE_INTRUSIVE(File, compressed_hash, hash, name, size)
+};
+
+struct Package {
+	std::string name;
+	std::vector<File> files = {};
+	/* Relative paths of files to remove */
+	std::vector<std::string> removed_files = {};
+
+	NLOHMANN_DEFINE_TYPE_INTRUSIVE(Package, name, files, removed_files)
+};
+
+struct Manifest {
+	/* Update packages */
+	std::vector<Package> packages = {};
+
+	/* Version information */
+	uint8_t version_major = 0;
+	uint8_t version_minor = 0;
+	uint8_t version_patch = 0;
+	uint8_t beta = 0;
+	uint8_t rc = 0;
+
+	/* Hash of VC redist file */
+	std::string vc2019_redist_x64;
+
+	/* Unused until UI is migrated to nlohmann_json */
+	// std::string commit;
+	// std::string notes;
+
+	NLOHMANN_DEFINE_TYPE_INTRUSIVE(Manifest, packages, version_major,
+				       version_minor, version_patch, beta, rc,
+				       vc2019_redist_x64)
+};
+
+struct PatchRequest {
+	/* Relative path of file */
+	std::string name;
+	/* 160-bit blake2b hash of existing file */
+	std::string hash;
+
+	NLOHMANN_DEFINE_TYPE_INTRUSIVE(PatchRequest, name, hash)
+};
+
+using PatchesRequest = std::vector<PatchRequest>;
+
+struct PatchResponse {
+	/* Relative path of file */
+	std::string name;
+	/* 160-bit blake2b hash of patch file */
+	std::string hash;
+	/* Download URL for patch file */
+	std::string source;
+	/* Size of patch file */
+	size_t size = 0;
+
+	NLOHMANN_DEFINE_TYPE_INTRUSIVE(PatchResponse, name, hash, size, source)
+};
+
+using PatchesResponse = std::vector<PatchResponse>;
+}

+ 72 - 159
UI/win-update/updater/updater.cpp

@@ -15,6 +15,7 @@
  */
 
 #include "updater.hpp"
+#include "manifest.hpp"
 
 #include <psapi.h>
 
@@ -28,7 +29,7 @@
 #include <queue>
 
 using namespace std;
-using namespace json11;
+using namespace updater;
 
 /* ----------------------------------------------------------------------- */
 
@@ -52,8 +53,8 @@ static bool updateFailed = false;
 
 static bool downloadThreadFailure = false;
 
-int totalFileSize = 0;
-int completedFileSize = 0;
+size_t totalFileSize = 0;
+size_t completedFileSize = 0;
 static int completedUpdates = 0;
 
 static wchar_t tempPath[MAX_PATH];
@@ -297,7 +298,7 @@ struct update_t {
 	B2Hash my_hash;
 	B2Hash downloadHash;
 
-	DWORD fileSize = 0;
+	size_t fileSize = 0;
 	state_t state = STATE_INVALID;
 	bool has_hash = false;
 	bool patchable = false;
@@ -702,14 +703,12 @@ void HasherThread()
 	}
 }
 
-static void RunHasherWorkers(int num, const Json &packages)
+static void RunHasherWorkers(int num, const vector<Package> &packages)
 try {
 
-	for (const Json &package : packages.array_items()) {
-		for (const Json &file : package["files"].array_items()) {
-			if (!file["name"].is_string())
-				continue;
-			hashQueue.push(file["name"].string_value());
+	for (const Package &package : packages) {
+		for (const File &file : package.files) {
+			hashQueue.push(file.name);
 		}
 	}
 
@@ -747,54 +746,26 @@ static bool NonCorePackageInstalled(const char *name)
 	return false;
 }
 
-static bool AddPackageUpdateFiles(const Json &package, const wchar_t *tempPath,
+static bool AddPackageUpdateFiles(const Package &package,
+				  const wchar_t *tempPath,
 				  const wchar_t *branch)
 {
-	const Json &name = package["name"];
-	const Json &files = package["files"];
-
-	if (!files.is_array())
-		return true;
-	if (!name.is_string())
-		return true;
-
 	wchar_t wPackageName[512];
-	const string &packageName = name.string_value();
-	size_t fileCount = files.array_items().size();
-
-	if (!UTF8ToWideBuf(wPackageName, packageName.c_str()))
+	if (!UTF8ToWideBuf(wPackageName, package.name.c_str()))
 		return false;
 
-	if (packageName != "core" &&
-	    !NonCorePackageInstalled(packageName.c_str()))
+	if (package.name != "core" &&
+	    !NonCorePackageInstalled(package.name.c_str()))
 		return true;
 
-	for (size_t j = 0; j < fileCount; j++) {
-		const Json &file = files[j];
-		const Json &fileName = file["name"];
-		const Json &hash = file["hash"];
-		const Json &dlHash = file["compressed_hash"];
-		const Json &size = file["size"];
-
-		if (!fileName.is_string())
-			continue;
-		if (!hash.is_string())
-			continue;
-		if (!size.is_number())
-			continue;
-
-		const string &fileUTF8 = fileName.string_value();
-		const string &hashUTF8 = hash.string_value();
-		const string &dlHashUTF8 = dlHash.string_value();
-		int fileSize = size.int_value();
-
-		if (hashUTF8.size() != kBlake2StrLength)
+	for (const File &file : package.files) {
+		if (file.hash.size() != kBlake2StrLength)
 			continue;
 
 		/* The download hash may not exist if a file is uncompressed */
 
 		bool compressed = false;
-		if (dlHashUTF8.size() == kBlake2StrLength)
+		if (file.compressed_hash.size() == kBlake2StrLength)
 			compressed = true;
 
 		/* convert strings to wide */
@@ -802,7 +773,7 @@ static bool AddPackageUpdateFiles(const Json &package, const wchar_t *tempPath,
 		wchar_t sourceURL[1024];
 		wchar_t updateFileName[MAX_PATH];
 
-		if (!UTF8ToWideBuf(updateFileName, fileUTF8.c_str()))
+		if (!UTF8ToWideBuf(updateFileName, file.name.c_str()))
 			continue;
 
 		/* make sure paths are safe */
@@ -820,15 +791,15 @@ static bool AddPackageUpdateFiles(const Json &package, const wchar_t *tempPath,
 
 		/* Convert hashes */
 		B2Hash updateHash;
-		StringToHash(hashUTF8, updateHash);
+		StringToHash(file.hash, updateHash);
 
 		/* We don't really care if this fails, it's just to avoid
 		 * wasting bandwidth by downloading unmodified files */
 		B2Hash localFileHash;
 		bool has_hash = false;
 
-		if (hashes.count(fileUTF8)) {
-			localFileHash = hashes[fileUTF8];
+		if (hashes.count(file.name)) {
+			localFileHash = hashes[file.name];
 			if (localFileHash == updateHash)
 				continue;
 
@@ -837,10 +808,10 @@ static bool AddPackageUpdateFiles(const Json &package, const wchar_t *tempPath,
 
 		/* Add update file */
 		update_t update;
-		update.fileSize = fileSize;
+		update.fileSize = file.size;
 		update.outputPath = updateFileName;
 		update.sourceURL = sourceURL;
-		update.packageName = packageName;
+		update.packageName = package.name;
 		update.state = STATE_PENDING_DOWNLOAD;
 		update.patchable = false;
 		update.compressed = compressed;
@@ -848,7 +819,7 @@ static bool AddPackageUpdateFiles(const Json &package, const wchar_t *tempPath,
 
 		if (compressed) {
 			update.sourceURL += L".zst";
-			StringToHash(dlHashUTF8, update.downloadHash);
+			StringToHash(file.compressed_hash, update.downloadHash);
 		} else {
 			update.downloadHash = updateHash;
 		}
@@ -859,25 +830,17 @@ static bool AddPackageUpdateFiles(const Json &package, const wchar_t *tempPath,
 
 		updates.push_back(move(update));
 
-		totalFileSize += fileSize;
+		totalFileSize += file.size;
 	}
 
 	return true;
 }
 
-static void AddPackageRemovedFiles(const Json &package)
+static void AddPackageRemovedFiles(const Package &package)
 {
-	const Json &removed_files = package["removed_files"];
-	if (!removed_files.is_array())
-		return;
-
-	for (auto &item : removed_files.array_items()) {
-		if (!item.is_string())
-			continue;
-
+	for (const string &filename : package.removed_files) {
 		wchar_t removedFileName[MAX_PATH];
-		if (!UTF8ToWideBuf(removedFileName,
-				   item.string_value().c_str()))
+		if (!UTF8ToWideBuf(removedFileName, filename.c_str()))
 			continue;
 
 		/* Ensure paths are safe, also check if file exists */
@@ -934,42 +897,23 @@ static bool RenameRemovedFile(deletion_t &deletion)
 	return false;
 }
 
-static void UpdateWithPatchIfAvailable(const Json &patch)
+static void UpdateWithPatchIfAvailable(const PatchResponse &patch)
 {
-	const Json &name_json = patch["name"];
-	const Json &hash_json = patch["hash"];
-	const Json &source_json = patch["source"];
-	const Json &size_json = patch["size"];
-
-	if (!name_json.is_string())
-		return;
-	if (!hash_json.is_string())
-		return;
-	if (!source_json.is_string())
-		return;
-	if (!size_json.is_number())
-		return;
-
-	const string &name = name_json.string_value();
-	const string &hash = hash_json.string_value();
-	const string &source = source_json.string_value();
-	int size = size_json.int_value();
-
 	wchar_t widePatchableFilename[MAX_PATH];
 	wchar_t sourceURL[1024];
 
-	if (source.compare(0, kCDNUrl.size(), kCDNUrl) != 0)
+	if (patch.source.compare(0, kCDNUrl.size(), kCDNUrl) != 0)
 		return;
 
-	if (name.find("/") == string::npos)
+	if (patch.name.find("/") == string::npos)
 		return;
 
-	string patchPackageName(name, 0, name.find("/"));
-	string fileName(name, name.find("/") + 1);
+	string patchPackageName(patch.name, 0, patch.name.find("/"));
+	string fileName(patch.name, patch.name.find("/") + 1);
 
 	if (!UTF8ToWideBuf(widePatchableFilename, fileName.c_str()))
 		return;
-	if (!UTF8ToWideBuf(sourceURL, source.c_str()))
+	if (!UTF8ToWideBuf(sourceURL, patch.source.c_str()))
 		return;
 
 	for (update_t &update : updates) {
@@ -982,10 +926,10 @@ static void UpdateWithPatchIfAvailable(const Json &patch)
 
 		/* Replace the source URL with the patch file, update
 	         * the download hash, and re-calculate download size */
-		StringToHash(hash, update.downloadHash);
+		StringToHash(patch.hash, update.downloadHash);
 		update.sourceURL = sourceURL;
-		totalFileSize -= (update.fileSize - size);
-		update.fileSize = size;
+		totalFileSize -= (update.fileSize - patch.size);
+		update.fileSize = patch.size;
 
 		break;
 	}
@@ -1251,7 +1195,7 @@ try {
 	return false;
 }
 
-static bool UpdateVS2019Redists(const Json &root)
+static bool UpdateVS2019Redists(const string &vc_redist_hash)
 {
 	/* ------------------------------------------ *
 	 * Initialize session                         */
@@ -1314,16 +1258,8 @@ static bool UpdateVS2019Redists(const Json &root)
 	/* ------------------------------------------ *
 	 * Get expected hash                          */
 
-	const Json &redistJson = root["vc2019_redist_x64"];
-	if (!redistJson.is_string()) {
-		Status(L"Update failed: Could not parse VC2019 redist json");
-		return false;
-	}
-
-	const string &expectedHashUTF8 = redistJson.string_value();
 	B2Hash expectedHash;
-
-	StringToHash(expectedHashUTF8, expectedHash);
+	StringToHash(vc_redist_hash, expectedHash);
 
 	/* ------------------------------------------ *
 	 * Get download hash                          */
@@ -1382,7 +1318,7 @@ static bool UpdateVS2019Redists(const Json &root)
 	return success;
 }
 
-static void UpdateRegistryVersion(const Json &manifest)
+static void UpdateRegistryVersion(const Manifest &manifest)
 {
 	const char *regKey =
 		R"(Software\Microsoft\Windows\CurrentVersion\Uninstall\OBS Studio)";
@@ -1392,20 +1328,17 @@ static void UpdateRegistryVersion(const Json &manifest)
 	int formattedLen;
 
 	/* The manifest does not store a version string, so we gotta make one ourselves. */
-	int beta = manifest["beta"].int_value();
-	int rc = manifest["rc"].int_value();
-	int major = manifest["version_major"].int_value();
-	int minor = manifest["version_minor"].int_value();
-	int patch = manifest["version_patch"].int_value();
-
-	if (beta || rc) {
-		formattedLen = sprintf_s(version, sizeof(version),
-					 "%d.%d.%d-%s%d", major, minor, patch,
-					 beta ? "beta" : "rc",
-					 beta ? beta : rc);
+	if (manifest.beta || manifest.rc) {
+		formattedLen = sprintf_s(
+			version, sizeof(version), "%d.%d.%d-%s%d",
+			manifest.version_major, manifest.version_minor,
+			manifest.version_patch, manifest.beta ? "beta" : "rc",
+			manifest.beta ? manifest.beta : manifest.rc);
 	} else {
 		formattedLen = sprintf_s(version, sizeof(version), "%d.%d.%d",
-					 major, minor, patch);
+					 manifest.version_major,
+					 manifest.version_minor,
+					 manifest.version_patch);
 	}
 
 	if (formattedLen <= 0)
@@ -1577,7 +1510,7 @@ static bool Update(wchar_t *cmdLine)
 	/* ------------------------------------- *
 	 * Load manifest file                    */
 
-	Json root;
+	Manifest manifest;
 	{
 		string manifestFile = QuickReadFile(manifestPath);
 		if (manifestFile.empty()) {
@@ -1585,32 +1518,26 @@ static bool Update(wchar_t *cmdLine)
 			return false;
 		}
 
-		string error;
-		root = Json::parse(manifestFile, error);
-
-		if (!error.empty()) {
+		try {
+			json manifestContents = json::parse(manifestFile);
+			manifest = manifestContents.get<Manifest>();
+		} catch (json::exception &e) {
 			Status(L"Update failed: Couldn't parse update "
 			       L"manifest: %S",
-			       error.c_str());
+			       e.what());
 			return false;
 		}
 	}
 
-	if (!root.is_object()) {
-		Status(L"Update failed: Invalid update manifest");
-		return false;
-	}
-
 	/* ------------------------------------- *
 	 * Hash local files listed in manifest   */
 
-	RunHasherWorkers(4, root["packages"]);
+	RunHasherWorkers(4, manifest.packages);
 
 	/* ------------------------------------- *
 	 * Parse current manifest update files   */
 
-	const Json::array &packages = root["packages"].array_items();
-	for (const Json &package : packages) {
+	for (const Package &package : manifest.packages) {
 		if (!AddPackageUpdateFiles(package, tempPath, branch.c_str())) {
 			Status(L"Update failed: Failed to process update packages");
 			return false;
@@ -1636,7 +1563,7 @@ static bool Update(wchar_t *cmdLine)
 	 * Check for VS2019 redistributables     */
 
 	if (!HasVS2019Redist()) {
-		if (!UpdateVS2019Redists(root)) {
+		if (!UpdateVS2019Redists(manifest.vc2019_redist_x64)) {
 			return false;
 		}
 	}
@@ -1644,8 +1571,7 @@ static bool Update(wchar_t *cmdLine)
 	/* ------------------------------------- *
 	 * Generate file hash json               */
 
-	Json::array files;
-
+	PatchesRequest files;
 	for (update_t &update : updates) {
 		if (!update.has_hash)
 			continue;
@@ -1662,22 +1588,16 @@ static bool Update(wchar_t *cmdLine)
 		package_path += "/";
 		package_path += outputPath;
 
-		files.emplace_back(Json::object{
-			{"name", package_path},
-			{"hash", hash_string},
-		});
+		files.push_back({package_path, hash_string});
 	}
 
 	/* ------------------------------------- *
 	 * Send file hashes                      */
 
 	string newManifest;
-
-	if (files.size() > 0) {
-		string post_body;
-		Json(files).dump(post_body);
-
-		int responseCode;
+	if (!files.empty()) {
+		json request = files;
+		string post_body = request.dump();
 
 		int len = (int)post_body.size();
 		size_t compressSize = ZSTD_compressBound(len);
@@ -1699,6 +1619,7 @@ static bool Update(wchar_t *cmdLine)
 		if (branch != L"stable")
 			manifestUrl += L"?branch=" + branch;
 
+		int responseCode;
 		bool success = !!HTTPPostData(manifestUrl.c_str(),
 					      (BYTE *)&compressedJson[0],
 					      (int)compressedJson.size(),
@@ -1721,26 +1642,18 @@ static bool Update(wchar_t *cmdLine)
 	/* ------------------------------------- *
 	 * Parse new manifest                    */
 
-	string error;
-	Json patchRoot = Json::parse(newManifest, error);
-	if (!error.empty()) {
+	PatchesResponse patches;
+	try {
+		json patchManifest = json::parse(newManifest);
+		patches = patchManifest.get<PatchesResponse>();
+	} catch (json::exception &e) {
 		Status(L"Update failed: Couldn't parse patch manifest: %S",
-		       error.c_str());
-		return false;
-	}
-
-	if (!patchRoot.is_array()) {
-		Status(L"Update failed: Invalid patch manifest");
+		       e.what());
 		return false;
 	}
 
 	/* Update updates with patch information. */
-	for (const Json &patch : patchRoot.array_items()) {
-		if (!patch.is_object()) {
-			Status(L"Update failed: Invalid patch manifest");
-			return false;
-		}
-
+	for (const PatchResponse &patch : patches) {
 		UpdateWithPatchIfAvailable(patch);
 	}
 
@@ -1849,7 +1762,7 @@ static bool Update(wchar_t *cmdLine)
 
 	if (!bIsPortable) {
 		Status(L"Updating version information...");
-		UpdateRegistryVersion(root);
+		UpdateRegistryVersion(manifest);
 	}
 
 	/* ------------------------------------- *

+ 2 - 3
UI/win-update/updater/updater.hpp

@@ -76,7 +76,6 @@ using B2Hash = std::array<std::byte, kBlake2HashLength>;
 #endif
 
 #include <util/windows/WinHandle.hpp>
-#include <json11.hpp>
 #include "resource.h"
 
 bool HTTPGetFile(HINTERNET hConnect, const wchar_t *url,
@@ -99,8 +98,8 @@ int ApplyPatch(ZSTD_DCtx *zstdCtx, std::byte *patch_data,
 
 extern HWND hwndMain;
 extern HCRYPTPROV hProvider;
-extern int totalFileSize;
-extern int completedFileSize;
+extern size_t totalFileSize;
+extern size_t completedFileSize;
 extern HANDLE cancelRequested;
 
 #pragma pack(push, r1, 1)