From dc5a11c632a462a8c785c29475a378661b45489a Mon Sep 17 00:00:00 2001 From: Refringe Date: Wed, 1 Jan 2025 21:53:18 -0500 Subject: [PATCH] File System Classes This adds the `FileSystem` and `FileSystemSync` classes to replace the VFS class. These classes handle file system operations using `fs-extra` for most tasks, except where the `atomically` package can be used to improve reads and writes. The goal is to ensure that file operations are as safe as possible while still providing a comfortable API. File operation atomicity is focused on single files, as there's no trivial, strict way to ensure atomicity for directory operations. ## Changes - Adds `FileSystem` class for asynchronous file operations - Adds `FileSystemSync` class for synchronous file operations - Updates `atomically` to `2.0.3` - Removes `VFS` class - Removes `AsyncQueue` class # TODO - Test with mods (transpiles) - Test compiled build --- project/package.json | 6 +- project/src/ErrorHandler.ts | 7 +- project/src/di/Container.ts | 9 +- project/src/loaders/BundleLoader.ts | 10 +- project/src/loaders/PreSptModLoader.ts | 51 +-- project/src/models/spt/utils/IAsyncQueue.ts | 5 - project/src/models/spt/utils/ICommand.ts | 4 - project/src/routers/ImageRouter.ts | 5 +- project/src/servers/ConfigServer.ts | 32 +- project/src/servers/SaveServer.ts | 26 +- project/src/services/BackupService.ts | 40 +- .../services/BotEquipmentModPoolService.ts | 2 - project/src/services/ModCompilerService.ts | 29 +- .../services/cache/BundleHashCacheService.ts | 15 +- .../src/services/cache/ModHashCacheService.ts | 12 +- .../HideoutCustomisationGen.ts | 6 +- .../ItemTplGenerator/ItemTplGenerator.ts | 7 +- .../ProductionQuestsGen.ts | 6 +- project/src/utils/AsyncQueue.ts | 26 -- project/src/utils/DatabaseImporter.ts | 16 +- project/src/utils/FileSystem.ts | 348 ++++++++++++++++++ project/src/utils/FileSystemSync.ts | 342 +++++++++++++++++ project/src/utils/HashUtil.ts | 9 +- project/src/utils/HttpFileUtil.ts | 6 +- project/src/utils/ImporterUtil.ts | 67 +--- project/src/utils/JsonUtil.ts | 12 +- project/src/utils/VFS.ts | 283 -------------- .../utils/logging/AbstractWinstonLogger.ts | 70 ++-- .../src/utils/logging/WinstonMainLogger.ts | 10 +- .../src/utils/logging/WinstonRequestLogger.ts | 10 +- 30 files changed, 878 insertions(+), 593 deletions(-) delete mode 100644 project/src/models/spt/utils/IAsyncQueue.ts delete mode 100644 project/src/models/spt/utils/ICommand.ts delete mode 100644 project/src/utils/AsyncQueue.ts create mode 100644 project/src/utils/FileSystem.ts create mode 100644 project/src/utils/FileSystemSync.ts delete mode 100644 project/src/utils/VFS.ts diff --git a/project/package.json b/project/package.json index 4b652d6cd..96682abf0 100644 --- a/project/package.json +++ b/project/package.json @@ -36,18 +36,17 @@ "gen:customisationstorage": "tsx ./src/tools/HideoutCustomisation/HideoutCustomisationProgram.ts" }, "dependencies": { - "atomically": "~1.7", + "atomically": "2.0.3", "buffer-crc32": "~1.0", "date-fns": "~3.6", "date-fns-tz": "~3.1", - "fs-extra": "^11.2.0", + "fs-extra": "11.2.0", "i18n": "~0.15", "json-fixer": "~1.6", "json5": "~2.2", "jsonc": "~2.0", "logform": "~2.6", "mongoid-js": "~1.3", - "proper-lockfile": "~4.1", "reflect-metadata": "~0.2", "semver": "~7.6", "source-map-support": "~0.5", @@ -65,7 +64,6 @@ "@types/fs-extra": "11.0.4", "@types/i18n": "~0.13", "@types/node": "22.10.2", - "@types/proper-lockfile": "~4.1", "@types/semver": "~7.5", "@types/ws": "~8.5", "@vitest/coverage-istanbul": "^2.1.8", diff --git a/project/src/ErrorHandler.ts b/project/src/ErrorHandler.ts index 17ea9babd..9302af3f0 100644 --- a/project/src/ErrorHandler.ts +++ b/project/src/ErrorHandler.ts @@ -1,14 +1,17 @@ import readline from "node:readline"; import { ILogger } from "@spt/models/spt/utils/ILogger"; -import { AsyncQueue } from "@spt/utils/AsyncQueue"; import { WinstonMainLogger } from "@spt/utils/logging/WinstonMainLogger"; +import { FileSystem } from "./utils/FileSystem"; +import { FileSystemSync } from "./utils/FileSystemSync"; export class ErrorHandler { private logger: ILogger; private readLine: readline.Interface; constructor() { - this.logger = new WinstonMainLogger(new AsyncQueue()); + const fileSystem = new FileSystem(); + const fileSystemSync = new FileSystemSync(); + this.logger = new WinstonMainLogger(fileSystem, fileSystemSync); this.readLine = readline.createInterface({ input: process.stdin, output: process.stdout }); } diff --git a/project/src/di/Container.ts b/project/src/di/Container.ts index b981eaa2c..7a735297e 100644 --- a/project/src/di/Container.ts +++ b/project/src/di/Container.ts @@ -134,7 +134,6 @@ import { ModTypeCheck } from "@spt/loaders/ModTypeCheck"; import { PostDBModLoader } from "@spt/loaders/PostDBModLoader"; import { PostSptModLoader } from "@spt/loaders/PostSptModLoader"; import { PreSptModLoader } from "@spt/loaders/PreSptModLoader"; -import { IAsyncQueue } from "@spt/models/spt/utils/IAsyncQueue"; import { ILogger } from "@spt/models/spt/utils/ILogger"; import { EventOutputHolder } from "@spt/routers/EventOutputHolder"; import { HttpRouter } from "@spt/routers/HttpRouter"; @@ -256,10 +255,11 @@ import { OnLoadModService } from "@spt/services/mod/onLoad/OnLoadModService"; import { OnUpdateModService } from "@spt/services/mod/onUpdate/OnUpdateModService"; import { StaticRouterModService } from "@spt/services/mod/staticRouter/StaticRouterModService"; import { App } from "@spt/utils/App"; -import { AsyncQueue } from "@spt/utils/AsyncQueue"; import { CompareUtil } from "@spt/utils/CompareUtil"; import { DatabaseImporter } from "@spt/utils/DatabaseImporter"; import { EncodingUtil } from "@spt/utils/EncodingUtil"; +import { FileSystem } from "@spt/utils/FileSystem"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { HashUtil } from "@spt/utils/HashUtil"; import { HttpFileUtil } from "@spt/utils/HttpFileUtil"; import { HttpResponseUtil } from "@spt/utils/HttpResponseUtil"; @@ -269,7 +269,6 @@ import { MathUtil } from "@spt/utils/MathUtil"; import { ObjectId } from "@spt/utils/ObjectId"; import { RandomUtil } from "@spt/utils/RandomUtil"; import { TimeUtil } from "@spt/utils/TimeUtil"; -import { VFS } from "@spt/utils/VFS"; import { Watermark, WatermarkLocale } from "@spt/utils/Watermark"; import type { ICloner } from "@spt/utils/cloners/ICloner"; import { JsonCloner } from "@spt/utils/cloners/JsonCloner"; @@ -443,10 +442,10 @@ export class Container { depContainer.register("ObjectId", ObjectId); depContainer.register("RandomUtil", RandomUtil, { lifecycle: Lifecycle.Singleton }); depContainer.register("TimeUtil", TimeUtil, { lifecycle: Lifecycle.Singleton }); - depContainer.register("VFS", VFS, { lifecycle: Lifecycle.Singleton }); + depContainer.register("FileSystem", FileSystem, { lifecycle: Lifecycle.Singleton }); + depContainer.register("FileSystemSync", FileSystemSync, { lifecycle: Lifecycle.Singleton }); depContainer.register("WatermarkLocale", WatermarkLocale, { lifecycle: Lifecycle.Singleton }); depContainer.register("Watermark", Watermark, { lifecycle: Lifecycle.Singleton }); - depContainer.register("AsyncQueue", AsyncQueue, { lifecycle: Lifecycle.Singleton }); depContainer.register("HttpFileUtil", HttpFileUtil, { lifecycle: Lifecycle.Singleton }); depContainer.register("ModLoadOrder", ModLoadOrder, { lifecycle: Lifecycle.Singleton }); depContainer.register("ModTypeCheck", ModTypeCheck, { lifecycle: Lifecycle.Singleton }); diff --git a/project/src/loaders/BundleLoader.ts b/project/src/loaders/BundleLoader.ts index 44240d944..4b806e87d 100644 --- a/project/src/loaders/BundleLoader.ts +++ b/project/src/loaders/BundleLoader.ts @@ -1,8 +1,7 @@ -import path from "node:path"; import { HttpServerHelper } from "@spt/helpers/HttpServerHelper"; import { BundleHashCacheService } from "@spt/services/cache/BundleHashCacheService"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { JsonUtil } from "@spt/utils/JsonUtil"; -import { VFS } from "@spt/utils/VFS"; import type { ICloner } from "@spt/utils/cloners/ICloner"; import { inject, injectable } from "tsyringe"; @@ -26,7 +25,7 @@ export class BundleLoader { constructor( @inject("HttpServerHelper") protected httpServerHelper: HttpServerHelper, - @inject("VFS") protected vfs: VFS, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @inject("JsonUtil") protected jsonUtil: JsonUtil, @inject("BundleHashCacheService") protected bundleHashCacheService: BundleHashCacheService, @inject("PrimaryCloner") protected cloner: ICloner, @@ -50,9 +49,8 @@ export class BundleLoader { } public addBundles(modpath: string): void { - const bundleManifestArr = this.jsonUtil.deserialize( - this.vfs.readFile(`${modpath}bundles.json`), - ).manifest; + const bundles = this.fileSystemSync.readJson(`${modpath}bundles.json`) as IBundleManifest; + const bundleManifestArr = bundles?.manifest; for (const bundleManifest of bundleManifestArr) { const relativeModPath = modpath.slice(0, -1).replace(/\\/g, "/"); diff --git a/project/src/loaders/PreSptModLoader.ts b/project/src/loaders/PreSptModLoader.ts index 007734adb..05f3d3315 100644 --- a/project/src/loaders/PreSptModLoader.ts +++ b/project/src/loaders/PreSptModLoader.ts @@ -15,8 +15,8 @@ import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { ConfigServer } from "@spt/servers/ConfigServer"; import { LocalisationService } from "@spt/services/LocalisationService"; import { ModCompilerService } from "@spt/services/ModCompilerService"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { JsonUtil } from "@spt/utils/JsonUtil"; -import { VFS } from "@spt/utils/VFS"; import { maxSatisfying, satisfies, valid, validRange } from "semver"; import { DependencyContainer, inject, injectable } from "tsyringe"; @@ -34,7 +34,7 @@ export class PreSptModLoader implements IModLoader { constructor( @inject("PrimaryLogger") protected logger: ILogger, - @inject("VFS") protected vfs: VFS, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @inject("JsonUtil") protected jsonUtil: JsonUtil, @inject("ModCompilerService") protected modCompilerService: ModCompilerService, @inject("LocalisationService") protected localisationService: LocalisationService, @@ -45,7 +45,7 @@ export class PreSptModLoader implements IModLoader { this.sptConfig = this.configServer.getConfig(ConfigTypes.CORE); const packageJsonPath: string = path.join(__dirname, "../../package.json"); - this.serverDependencies = JSON.parse(this.vfs.readFile(packageJsonPath)).dependencies; + this.serverDependencies = this.fileSystemSync.readJson(packageJsonPath)?.dependencies; this.skippedMods = new Set(); } @@ -103,28 +103,28 @@ export class PreSptModLoader implements IModLoader { } protected async importModsAsync(): Promise { - if (!this.vfs.exists(this.basepath)) { + if (!this.fileSystemSync.exists(this.basepath)) { // no mods folder found this.logger.info(this.localisationService.getText("modloader-user_mod_folder_missing")); - this.vfs.createDir(this.basepath); + this.fileSystemSync.ensureDir(this.basepath); return; } /** * array of mod folder names */ - const mods: string[] = this.vfs.getDirs(this.basepath); + const mods: string[] = this.fileSystemSync.getDirectories(this.basepath); this.logger.info(this.localisationService.getText("modloader-loading_mods", mods.length)); // Mod order - if (!this.vfs.exists(this.modOrderPath)) { + if (!this.fileSystemSync.exists(this.modOrderPath)) { this.logger.info(this.localisationService.getText("modloader-mod_order_missing")); // Write file with empty order array to disk - this.vfs.writeFile(this.modOrderPath, this.jsonUtil.serializeAdvanced({ order: [] }, undefined, 4)); + this.fileSystemSync.writeJson(this.modOrderPath, { order: [] }); } else { - const modOrder = this.vfs.readFile(this.modOrderPath, { encoding: "utf8" }); + const modOrder = this.fileSystemSync.read(this.modOrderPath); try { const modOrderArray = this.jsonUtil.deserialize(modOrder, this.modOrderPath).order; for (const [index, mod] of modOrderArray.entries()) { @@ -154,7 +154,7 @@ export class PreSptModLoader implements IModLoader { if ( modToValidate.dependencies && Object.keys(modToValidate.dependencies).length > 0 && - !this.vfs.exists(`${this.basepath}${modFolderName}/node_modules`) + !this.fileSystemSync.exists(`${this.basepath}${modFolderName}/node_modules`) ) { this.autoInstallDependencies(`${this.basepath}${modFolderName}`, modToValidate); } @@ -274,7 +274,7 @@ export class PreSptModLoader implements IModLoader { const loadedMods = new Map(); for (const mod of mods) { - loadedMods.set(mod, this.jsonUtil.deserialize(this.vfs.readFile(`${this.getModPath(mod)}/package.json`))); + loadedMods.set(mod, this.fileSystemSync.readJson(`${this.getModPath(mod)}/package.json`)); } return loadedMods; @@ -380,8 +380,8 @@ export class PreSptModLoader implements IModLoader { public sortModsLoadOrder(): string[] { // if loadorder.json exists: load it, otherwise generate load order const loadOrderPath = `${this.basepath}loadorder.json`; - if (this.vfs.exists(loadOrderPath)) { - return this.jsonUtil.deserialize(this.vfs.readFile(loadOrderPath), loadOrderPath); + if (this.fileSystemSync.exists(loadOrderPath)) { + return this.fileSystemSync.readJson(loadOrderPath); } return this.modLoadOrder.getLoadOrder(); @@ -394,7 +394,7 @@ export class PreSptModLoader implements IModLoader { protected async addModAsync(mod: string, pkg: IPackageJsonData): Promise { const modPath = this.getModPath(mod); - const typeScriptFiles = this.vfs.getFilesOfType(`${modPath}src`, ".ts"); + const typeScriptFiles = this.fileSystemSync.getFiles(`${modPath}src`, true, ["ts"]); if (typeScriptFiles.length > 0) { if (ProgramStatics.COMPILED) { @@ -468,9 +468,10 @@ export class PreSptModLoader implements IModLoader { return; } - // Temporarily rename package.json because otherwise npm, pnpm and any other package manager will forcefully download all packages in dependencies without any way of disabling this behavior - this.vfs.rename(`${modPath}/package.json`, `${modPath}/package.json.bak`); - this.vfs.writeFile(`${modPath}/package.json`, "{}"); + // Temporarily rename package.json because otherwise npm, pnpm and any other package manager will forcefully + // download all packages in dependencies without any way of disabling this behavior + this.fileSystemSync.rename(`${modPath}/package.json`, `${modPath}/package.json.bak`); + this.fileSystemSync.writeJson(`${modPath}/package.json`, {}); this.logger.info( this.localisationService.getText("modloader-installing_external_dependencies", { @@ -494,8 +495,8 @@ export class PreSptModLoader implements IModLoader { execSync(command, { cwd: modPath }); // Delete the new blank package.json then rename the backup back to the original name - this.vfs.removeFile(`${modPath}/package.json`); - this.vfs.rename(`${modPath}/package.json.bak`, `${modPath}/package.json`); + this.fileSystemSync.remove(`${modPath}/package.json`); + this.fileSystemSync.rename(`${modPath}/package.json.bak`, `${modPath}/package.json`); } protected areModDependenciesFulfilled(pkg: IPackageJsonData, loadedMods: Map): boolean { @@ -568,8 +569,8 @@ export class PreSptModLoader implements IModLoader { const modIsCalledUser = modName.toLowerCase() === "user"; const modIsCalledSrc = modName.toLowerCase() === "src"; const modIsCalledDb = modName.toLowerCase() === "db"; - const hasBepinExFolderStructure = this.vfs.exists(`${modPath}/plugins`); - const containsDll = this.vfs.getFiles(`${modPath}`).find((x) => x.includes(".dll")); + const hasBepinExFolderStructure = this.fileSystemSync.exists(`${modPath}/plugins`); + const containsDll = this.fileSystemSync.getFiles(`${modPath}`, true, ["dll"]).length > 0; if (modIsCalledSrc || modIsCalledDb || modIsCalledUser) { this.logger.error(this.localisationService.getText("modloader-not_correct_mod_folder", modName)); @@ -583,13 +584,13 @@ export class PreSptModLoader implements IModLoader { // Check if config exists const modPackagePath = `${modPath}/package.json`; - if (!this.vfs.exists(modPackagePath)) { + if (!this.fileSystemSync.exists(modPackagePath)) { this.logger.error(this.localisationService.getText("modloader-missing_package_json", modName)); return false; } // Validate mod - const config = this.jsonUtil.deserialize(this.vfs.readFile(modPackagePath), modPackagePath); + const config = this.fileSystemSync.readJson(modPackagePath) as IPackageJsonData; const checks = ["name", "author", "version", "license"]; let issue = false; @@ -617,10 +618,10 @@ export class PreSptModLoader implements IModLoader { issue = true; } - if (!this.vfs.exists(`${modPath}/${config.main}`)) { + if (!this.fileSystemSync.exists(`${modPath}/${config.main}`)) { // If TS file exists with same name, dont perform check as we'll generate JS from TS file const tsFileName = config.main.replace(".js", ".ts"); - const tsFileExists = this.vfs.exists(`${modPath}/${tsFileName}`); + const tsFileExists = this.fileSystemSync.exists(`${modPath}/${tsFileName}`); if (!tsFileExists) { this.logger.error( diff --git a/project/src/models/spt/utils/IAsyncQueue.ts b/project/src/models/spt/utils/IAsyncQueue.ts deleted file mode 100644 index 79872ac6a..000000000 --- a/project/src/models/spt/utils/IAsyncQueue.ts +++ /dev/null @@ -1,5 +0,0 @@ -import { ICommand } from "@spt/models/spt/utils/ICommand"; - -export interface IAsyncQueue { - waitFor(command: ICommand): Promise; -} diff --git a/project/src/models/spt/utils/ICommand.ts b/project/src/models/spt/utils/ICommand.ts deleted file mode 100644 index 696bb8396..000000000 --- a/project/src/models/spt/utils/ICommand.ts +++ /dev/null @@ -1,4 +0,0 @@ -export interface ICommand { - uuid: string; - cmd: () => Promise; -} diff --git a/project/src/routers/ImageRouter.ts b/project/src/routers/ImageRouter.ts index 911ca3e03..d5ce247db 100644 --- a/project/src/routers/ImageRouter.ts +++ b/project/src/routers/ImageRouter.ts @@ -1,13 +1,12 @@ import { IncomingMessage, ServerResponse } from "node:http"; import { ImageRouteService } from "@spt/services/mod/image/ImageRouteService"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { HttpFileUtil } from "@spt/utils/HttpFileUtil"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectable } from "tsyringe"; @injectable() export class ImageRouter { constructor( - @inject("VFS") protected vfs: VFS, @inject("ImageRouteService") protected imageRouteService: ImageRouteService, @inject("HttpFileUtil") protected httpFileUtil: HttpFileUtil, ) {} @@ -18,7 +17,7 @@ export class ImageRouter { public async sendImage(sessionID: string, req: IncomingMessage, resp: ServerResponse, body: any): Promise { // remove file extension - const url = this.vfs.stripExtension(req.url); + const url = req.url ? FileSystemSync.stripExtension(req.url) : ""; // send image if (this.imageRouteService.existsByKey(url)) { diff --git a/project/src/servers/ConfigServer.ts b/project/src/servers/ConfigServer.ts index 1d6953cff..a42a23868 100644 --- a/project/src/servers/ConfigServer.ts +++ b/project/src/servers/ConfigServer.ts @@ -1,8 +1,8 @@ import { ProgramStatics } from "@spt/ProgramStatics"; import { ConfigTypes } from "@spt/models/enums/ConfigTypes"; import type { ILogger } from "@spt/models/spt/utils/ILogger"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { JsonUtil } from "@spt/utils/JsonUtil"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectable } from "tsyringe"; @injectable() @@ -12,7 +12,7 @@ export class ConfigServer { constructor( @inject("PrimaryLogger") protected logger: ILogger, - @inject("VFS") protected vfs: VFS, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @inject("JsonUtil") protected jsonUtil: JsonUtil, ) { this.initialize(); @@ -35,29 +35,21 @@ export class ConfigServer { // Get all filepaths const filepath = ProgramStatics.COMPILED ? "SPT_Data/Server/configs/" : "./assets/configs/"; - const files = this.vfs.getFiles(filepath); + const files = this.fileSystemSync.getFiles(filepath, true, this.acceptableFileExtensions); // Add file content to result for (const file of files) { - if (this.acceptableFileExtensions.includes(this.vfs.getFileExtension(file.toLowerCase()))) { - const fileName = this.vfs.stripExtension(file); - const filePathAndName = `${filepath}${file}`; - const deserialsiedJson = this.jsonUtil.deserializeJsonC( - this.vfs.readFile(filePathAndName), - filePathAndName, - ); - - if (!deserialsiedJson) { - this.logger.error( - `Config file: ${filePathAndName} is corrupt. Use a site like: https://jsonlint.com to find the issue.`, - ); - throw new Error( - `Server will not run until the: ${filePathAndName} config error mentioned above is fixed`, - ); - } + const fileName = FileSystemSync.getFileName(file); + const deserialsiedJson = this.jsonUtil.deserializeJsonC(this.fileSystemSync.read(file), fileName); - this.configs[`spt-${fileName}`] = deserialsiedJson; + if (!deserialsiedJson) { + this.logger.error( + `Config file: ${fileName} is corrupt. Use a site like: https://jsonlint.com to find the issue.`, + ); + throw new Error(`Server will not run until the: ${fileName} config error mentioned above is fixed`); } + + this.configs[`spt-${fileName}`] = deserialsiedJson; } this.logger.info(`Commit hash: ${ProgramStatics.COMMIT || "DEBUG"}`); diff --git a/project/src/servers/SaveServer.ts b/project/src/servers/SaveServer.ts index d259a279d..198c70283 100644 --- a/project/src/servers/SaveServer.ts +++ b/project/src/servers/SaveServer.ts @@ -5,9 +5,9 @@ import { ICoreConfig } from "@spt/models/spt/config/ICoreConfig"; import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { ConfigServer } from "@spt/servers/ConfigServer"; import { LocalisationService } from "@spt/services/LocalisationService"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { HashUtil } from "@spt/utils/HashUtil"; import { JsonUtil } from "@spt/utils/JsonUtil"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectAll, injectable } from "tsyringe"; @injectable() @@ -19,7 +19,7 @@ export class SaveServer { protected saveMd5 = {}; constructor( - @inject("VFS") protected vfs: VFS, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @injectAll("SaveLoadRouter") protected saveLoadRouters: SaveLoadRouter[], @inject("JsonUtil") protected jsonUtil: JsonUtil, @inject("HashUtil") protected hashUtil: HashUtil, @@ -49,20 +49,16 @@ export class SaveServer { * Load all profiles in /user/profiles folder into memory (this.profiles) */ public load(): void { - // get files to load - if (!this.vfs.exists(this.profileFilepath)) { - this.vfs.createDir(this.profileFilepath); - } + this.fileSystemSync.ensureDir(this.profileFilepath); - const files = this.vfs.getFiles(this.profileFilepath).filter((item) => { - return this.vfs.getFileExtension(item) === "json"; - }); + // get files to load + const files = this.fileSystemSync.getFiles(this.profileFilepath, false, ["json"]); // load profiles const start = performance.now(); let loadTimeCount = 0; for (const file of files) { - this.loadProfile(this.vfs.stripExtension(file)); + this.loadProfile(FileSystemSync.getFileName(file)); loadTimeCount += performance.now() - start; } @@ -160,9 +156,9 @@ export class SaveServer { public loadProfile(sessionID: string): void { const filename = `${sessionID}.json`; const filePath = `${this.profileFilepath}${filename}`; - if (this.vfs.exists(filePath)) { + if (this.fileSystemSync.exists(filePath)) { // File found, store in profiles[] - this.profiles[sessionID] = this.jsonUtil.deserialize(this.vfs.readFile(filePath), filename); + this.profiles[sessionID] = this.fileSystemSync.readJson(filePath); } // Run callbacks @@ -200,7 +196,7 @@ export class SaveServer { if (typeof this.saveMd5[sessionID] !== "string" || this.saveMd5[sessionID] !== fmd5) { this.saveMd5[sessionID] = String(fmd5); // save profile to disk - this.vfs.writeFile(filePath, jsonProfile); + this.fileSystemSync.write(filePath, jsonProfile); } return Number(performance.now() - start); @@ -216,8 +212,8 @@ export class SaveServer { delete this.profiles[sessionID]; - this.vfs.removeFile(file); + this.fileSystemSync.remove(file); - return !this.vfs.exists(file); + return !this.fileSystemSync.exists(file); } } diff --git a/project/src/services/BackupService.ts b/project/src/services/BackupService.ts index 43eea5f76..af9bc7cc9 100644 --- a/project/src/services/BackupService.ts +++ b/project/src/services/BackupService.ts @@ -4,7 +4,7 @@ import { ConfigTypes } from "@spt/models/enums/ConfigTypes"; import { IBackupConfig } from "@spt/models/spt/config/IBackupConfig"; import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { ConfigServer } from "@spt/servers/ConfigServer"; -import fs from "fs-extra"; +import { FileSystem } from "@spt/utils/FileSystem"; import { inject, injectable } from "tsyringe"; @injectable() @@ -17,6 +17,7 @@ export class BackupService { @inject("PrimaryLogger") protected logger: ILogger, @inject("PreSptModLoader") protected preSptModLoader: PreSptModLoader, @inject("ConfigServer") protected configServer: ConfigServer, + @inject("FileSystem") protected fileSystem: FileSystem, ) { this.backupConfig = this.configServer.getConfig(ConfigTypes.BACKUP); this.activeServerMods = this.getActiveServerMods(); @@ -41,7 +42,7 @@ export class BackupService { // Fetch all profiles in the profile directory. let currentProfiles: string[] = []; try { - currentProfiles = await this.fetchProfileFiles(); + currentProfiles = await this.fileSystem.getFiles(this.profileDir, false, ["json"]); } catch (error) { this.logger.debug("Skipping profile backup: Unable to read profiles directory"); return; @@ -53,15 +54,15 @@ export class BackupService { } try { - await fs.ensureDir(targetDir); + await this.fileSystem.ensureDir(targetDir); // Track write promises. const writes: Promise[] = currentProfiles.map((profile) => - fs.copy(path.join(this.profileDir, profile), path.join(targetDir, profile)), + this.fileSystem.copy(path.normalize(profile), path.join(targetDir, path.basename(profile))), ); // Write a copy of active mods. - writes.push(fs.writeJson(path.join(targetDir, "activeMods.json"), this.activeServerMods)); + writes.push(this.fileSystem.writeJson(path.join(targetDir, "activeMods.json"), this.activeServerMods)); await Promise.all(writes); // Wait for all writes to complete. } catch (error) { @@ -74,25 +75,6 @@ export class BackupService { this.cleanBackups(); } - /** - * Fetches the names of all JSON files in the profile directory. - * - * This method normalizes the profile directory path and reads all files within it. It then filters the files to - * include only those with a `.json` extension and returns their names. - * - * @returns A promise that resolves to an array of JSON file names. - */ - protected async fetchProfileFiles(): Promise { - const normalizedProfileDir = path.normalize(this.profileDir); - - try { - const allFiles = await fs.readdir(normalizedProfileDir); - return allFiles.filter((file) => path.extname(file).toLowerCase() === ".json"); - } catch (error) { - return Promise.reject(error); - } - } - /** * Check to see if the backup service is enabled via the config. * @@ -165,8 +147,8 @@ export class BackupService { * @returns A promise that resolves to an array of sorted backup file paths. */ private async getBackupPaths(dir: string): Promise { - const backups = await fs.readdir(dir); - return backups.filter((backup) => path.join(dir, backup)).sort(this.compareBackupDates.bind(this)); + const backups = await this.fileSystem.getFiles(dir, false, ["json"]); + return backups.sort(this.compareBackupDates.bind(this)); } /** @@ -176,12 +158,12 @@ export class BackupService { * @param b - The name of the second backup folder. * @returns The difference in time between the two dates in milliseconds, or `null` if either date is invalid. */ - private compareBackupDates(a: string, b: string): number | null { + private compareBackupDates(a: string, b: string): number { const dateA = this.extractDateFromFolderName(a); const dateB = this.extractDateFromFolderName(b); if (!dateA || !dateB) { - return null; // Skip comparison if either date is invalid. + return 0; // Skip comparison if either date is invalid. } return dateA.getTime() - dateB.getTime(); @@ -213,7 +195,7 @@ export class BackupService { */ private async removeExcessBackups(backups: string[]): Promise { const removePromises = backups.map((backupPath) => - fs.remove(path.join(this.backupConfig.directory, backupPath)), + this.fileSystem.remove(path.join(this.backupConfig.directory, backupPath)), ); await Promise.all(removePromises); diff --git a/project/src/services/BotEquipmentModPoolService.ts b/project/src/services/BotEquipmentModPoolService.ts index 247d3fc50..95b8a129e 100644 --- a/project/src/services/BotEquipmentModPoolService.ts +++ b/project/src/services/BotEquipmentModPoolService.ts @@ -8,7 +8,6 @@ import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { ConfigServer } from "@spt/servers/ConfigServer"; import { DatabaseService } from "@spt/services/DatabaseService"; import { LocalisationService } from "@spt/services/LocalisationService"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectable } from "tsyringe"; /** Store a mapping between weapons, their slots and the items that fit those slots */ @@ -22,7 +21,6 @@ export class BotEquipmentModPoolService { constructor( @inject("PrimaryLogger") protected logger: ILogger, - @inject("VFS") protected vfs: VFS, @inject("ItemHelper") protected itemHelper: ItemHelper, @inject("DatabaseService") protected databaseService: DatabaseService, @inject("LocalisationService") protected localisationService: LocalisationService, diff --git a/project/src/services/ModCompilerService.ts b/project/src/services/ModCompilerService.ts index 454225c6a..8dcb93788 100644 --- a/project/src/services/ModCompilerService.ts +++ b/project/src/services/ModCompilerService.ts @@ -1,9 +1,9 @@ -import fs from "node:fs"; import path from "node:path"; import { ProgramStatics } from "@spt/ProgramStatics"; import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { ModHashCacheService } from "@spt/services/cache/ModHashCacheService"; -import { VFS } from "@spt/utils/VFS"; +import { FileSystem } from "@spt/utils/FileSystem"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { inject, injectable } from "tsyringe"; import { CompilerOptions, ModuleKind, ModuleResolutionKind, ScriptTarget, transpileModule } from "typescript"; @@ -14,10 +14,11 @@ export class ModCompilerService { constructor( @inject("PrimaryLogger") protected logger: ILogger, @inject("ModHashCacheService") protected modHashCacheService: ModHashCacheService, - @inject("VFS") protected vfs: VFS, + @inject("FileSystem") protected fileSystem: FileSystem, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, ) { const packageJsonPath: string = path.join(__dirname, "../../package.json"); - this.serverDependencies = Object.keys(JSON.parse(this.vfs.readFile(packageJsonPath)).dependencies); + this.serverDependencies = Object.keys(this.fileSystemSync.readJson(packageJsonPath).dependencies); } /** @@ -32,11 +33,11 @@ export class ModCompilerService { let tsFileContents = ""; let fileExists = true; // does every js file exist (been compiled before) for (const file of modTypeScriptFiles) { - const fileContent = this.vfs.readFile(file); + const fileContent = await this.fileSystem.read(file); tsFileContents += fileContent; // Does equivalent .js file exist - if (!this.vfs.exists(file.replace(".ts", ".js"))) { + if (!(await this.fileSystem.exists(file.replace(".ts", ".js")))) { fileExists = false; } } @@ -83,7 +84,7 @@ export class ModCompilerService { const destPath = filePath.replace(".ts", ".js"); const parsedPath = path.parse(filePath); const parsedDestPath = path.parse(destPath); - const text = fs.readFileSync(filePath).toString(); + const text = await this.fileSystem.read(filePath); let replacedText: string; if (ProgramStatics.COMPILED) { @@ -108,12 +109,12 @@ export class ModCompilerService { sourceMap.file = parsedDestPath.base; sourceMap.sources = [parsedPath.base]; - fs.writeFileSync(`${destPath}.map`, JSON.stringify(sourceMap)); + await this.fileSystem.writeJson(`${destPath}.map`, sourceMap); } - fs.writeFileSync(destPath, output.outputText); + await this.fileSystem.write(destPath, output.outputText); } - while (!this.areFilesReady(fileNames)) { + while (!(await this.areFilesReady(fileNames))) { await this.delay(200); } } @@ -123,8 +124,10 @@ export class ModCompilerService { * @param fileNames * @returns */ - protected areFilesReady(fileNames: string[]): boolean { - return fileNames.filter((x) => !this.vfs.exists(x.replace(".ts", ".js"))).length === 0; + protected async areFilesReady(fileNames: string[]): Promise { + const fileExistencePromises = fileNames.map(async (x) => await this.fileSystem.exists(x.replace(".ts", ".js"))); + const fileExistenceResults = await Promise.all(fileExistencePromises); + return fileExistenceResults.every((exists) => exists); } /** @@ -132,7 +135,7 @@ export class ModCompilerService { * @param ms Milliseconds * @returns */ - protected delay(ms: number): Promise { + protected async delay(ms: number): Promise { return new Promise((resolve) => setTimeout(resolve, ms)); } } diff --git a/project/src/services/cache/BundleHashCacheService.ts b/project/src/services/cache/BundleHashCacheService.ts index bd2d53a09..a16d034b4 100644 --- a/project/src/services/cache/BundleHashCacheService.ts +++ b/project/src/services/cache/BundleHashCacheService.ts @@ -1,7 +1,7 @@ import type { ILogger } from "@spt/models/spt/utils/ILogger"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { HashUtil } from "@spt/utils/HashUtil"; import { JsonUtil } from "@spt/utils/JsonUtil"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectable } from "tsyringe"; @injectable() @@ -10,19 +10,16 @@ export class BundleHashCacheService { protected readonly bundleHashCachePath = "./user/cache/bundleHashCache.json"; constructor( - @inject("VFS") protected vfs: VFS, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @inject("HashUtil") protected hashUtil: HashUtil, @inject("JsonUtil") protected jsonUtil: JsonUtil, @inject("PrimaryLogger") protected logger: ILogger, ) { - if (!this.vfs.exists(this.bundleHashCachePath)) { - this.vfs.writeFile(this.bundleHashCachePath, "{}"); + if (!this.fileSystemSync.exists(this.bundleHashCachePath)) { + this.fileSystemSync.writeJson(this.bundleHashCachePath, {}); } - this.bundleHashes = this.jsonUtil.deserialize( - this.vfs.readFile(this.bundleHashCachePath), - this.bundleHashCachePath, - ); + this.bundleHashes = this.fileSystemSync.readJson(this.bundleHashCachePath); } public getStoredValue(key: string): number { @@ -32,7 +29,7 @@ export class BundleHashCacheService { public storeValue(key: string, value: number): void { this.bundleHashes[key] = value; - this.vfs.writeFile(this.bundleHashCachePath, this.jsonUtil.serialize(this.bundleHashes)); + this.fileSystemSync.writeJson(this.bundleHashCachePath, this.bundleHashes); this.logger.debug(`Bundle ${key} hash stored in ${this.bundleHashCachePath}`); } diff --git a/project/src/services/cache/ModHashCacheService.ts b/project/src/services/cache/ModHashCacheService.ts index 6b13d3faa..388baee5a 100644 --- a/project/src/services/cache/ModHashCacheService.ts +++ b/project/src/services/cache/ModHashCacheService.ts @@ -1,7 +1,7 @@ import type { ILogger } from "@spt/models/spt/utils/ILogger"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { HashUtil } from "@spt/utils/HashUtil"; import { JsonUtil } from "@spt/utils/JsonUtil"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectable } from "tsyringe"; @injectable() @@ -10,16 +10,16 @@ export class ModHashCacheService { protected readonly modCachePath = "./user/cache/modCache.json"; constructor( - @inject("VFS") protected vfs: VFS, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @inject("HashUtil") protected hashUtil: HashUtil, @inject("JsonUtil") protected jsonUtil: JsonUtil, @inject("PrimaryLogger") protected logger: ILogger, ) { - if (!this.vfs.exists(this.modCachePath)) { - this.vfs.writeFile(this.modCachePath, "{}"); + if (!this.fileSystemSync.exists(this.modCachePath)) { + this.fileSystemSync.writeJson(this.modCachePath, {}); } - this.modHashes = this.jsonUtil.deserialize(this.vfs.readFile(this.modCachePath), this.modCachePath); + this.modHashes = this.fileSystemSync.readJson(this.modCachePath); } public getStoredValue(key: string): string { @@ -29,7 +29,7 @@ export class ModHashCacheService { public storeValue(key: string, value: string): void { this.modHashes[key] = value; - this.vfs.writeFile(this.modCachePath, this.jsonUtil.serialize(this.modHashes)); + this.fileSystemSync.writeJson(this.modCachePath, this.modHashes); this.logger.debug(`Mod ${key} hash stored in ${this.modCachePath}`); } diff --git a/project/src/tools/HideoutCustomisation/HideoutCustomisationGen.ts b/project/src/tools/HideoutCustomisation/HideoutCustomisationGen.ts index e4041283c..2fc53de9f 100644 --- a/project/src/tools/HideoutCustomisation/HideoutCustomisationGen.ts +++ b/project/src/tools/HideoutCustomisation/HideoutCustomisationGen.ts @@ -5,12 +5,12 @@ * - Run this script using npm: `npm run gen:customisationstorage` * */ -import { writeFileSync } from "node:fs"; import { dirname, join, resolve } from "node:path"; import { OnLoad } from "@spt/di/OnLoad"; import { IQuestReward } from "@spt/models/eft/common/tables/IQuest"; import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { DatabaseServer } from "@spt/servers/DatabaseServer"; +import { FileSystem } from "@spt/utils/FileSystem"; import { inject, injectAll, injectable } from "tsyringe"; @injectable() @@ -21,6 +21,7 @@ export class HideoutCustomisationGen { constructor( @inject("DatabaseServer") protected databaseServer: DatabaseServer, @inject("PrimaryLogger") protected logger: ILogger, + @inject("FileSystem") protected fileSystem: FileSystem, @injectAll("OnLoad") protected onLoadComponents: OnLoad[], ) {} @@ -40,10 +41,9 @@ export class HideoutCustomisationGen { const projectDir = resolve(currentDir, "..", "..", ".."); const templatesDir = join(projectDir, "assets", "database", "templates"); const customisationStorageOutPath = join(templatesDir, "customisationStorage.json"); - writeFileSync( + await this.fileSystem.write( customisationStorageOutPath, JSON.stringify(this.databaseServer.getTables().templates?.customisationStorage, null, 2), - "utf-8", ); } diff --git a/project/src/tools/ItemTplGenerator/ItemTplGenerator.ts b/project/src/tools/ItemTplGenerator/ItemTplGenerator.ts index bb59645f7..085d77590 100644 --- a/project/src/tools/ItemTplGenerator/ItemTplGenerator.ts +++ b/project/src/tools/ItemTplGenerator/ItemTplGenerator.ts @@ -23,8 +23,7 @@ * - Finalized enum names are created as a combination of the parent name, prefix, item name, and suffix */ -import * as fs from "node:fs"; -import * as path from "node:path"; +import path from "node:path"; import { OnLoad } from "@spt/di/OnLoad"; import { ItemHelper } from "@spt/helpers/ItemHelper"; import { ITemplateItem } from "@spt/models/eft/common/tables/ITemplateItem"; @@ -35,6 +34,7 @@ import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { DatabaseServer } from "@spt/servers/DatabaseServer"; import { LocaleService } from "@spt/services/LocaleService"; import * as itemTplOverrides from "@spt/tools/ItemTplGenerator/itemOverrides"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { inject, injectAll, injectable } from "tsyringe"; @injectable() @@ -49,6 +49,7 @@ export class ItemTplGenerator { @inject("LocaleService") protected localeService: LocaleService, @inject("PrimaryLogger") protected logger: ILogger, @inject("ItemHelper") protected itemHelper: ItemHelper, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @injectAll("OnLoad") protected onLoadComponents: OnLoad[], ) {} @@ -494,6 +495,6 @@ export class ItemTplGenerator { enumFileData += "}\n"; } - fs.writeFileSync(outputPath, enumFileData, "utf-8"); + this.fileSystemSync.write(outputPath, enumFileData); } } diff --git a/project/src/tools/ProductionQuestsGen/ProductionQuestsGen.ts b/project/src/tools/ProductionQuestsGen/ProductionQuestsGen.ts index 7f3f52cbe..aa99a35fe 100644 --- a/project/src/tools/ProductionQuestsGen/ProductionQuestsGen.ts +++ b/project/src/tools/ProductionQuestsGen/ProductionQuestsGen.ts @@ -10,13 +10,13 @@ * - Some productions may output "Quest ... is already associated" if a quest unlocks multiple assorts, this can be ignored * - The list of "blacklistedProductions" is to stop spurious errors when we know a production is no longer necessary (Old events) */ -import * as fs from "node:fs"; import * as path from "node:path"; import { OnLoad } from "@spt/di/OnLoad"; import { IHideoutProduction, IRequirement } from "@spt/models/eft/hideout/IHideoutProduction"; import { QuestRewardType } from "@spt/models/enums/QuestRewardType"; import type { ILogger } from "@spt/models/spt/utils/ILogger"; import { DatabaseServer } from "@spt/servers/DatabaseServer"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { inject, injectAll, injectable } from "tsyringe"; @injectable() @@ -35,6 +35,7 @@ export class ProductionQuestsGen { constructor( @inject("DatabaseServer") protected databaseServer: DatabaseServer, @inject("PrimaryLogger") protected logger: ILogger, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @injectAll("OnLoad") protected onLoadComponents: OnLoad[], ) {} @@ -53,10 +54,9 @@ export class ProductionQuestsGen { const projectDir = path.resolve(currentDir, "..", "..", ".."); const hideoutDir = path.join(projectDir, "assets", "database", "hideout"); const productionOutPath = path.join(hideoutDir, "production.json"); - fs.writeFileSync( + this.fileSystemSync.write( productionOutPath, JSON.stringify(this.databaseServer.getTables().hideout.production, null, 2), - "utf-8", ); } diff --git a/project/src/utils/AsyncQueue.ts b/project/src/utils/AsyncQueue.ts deleted file mode 100644 index e58f4a16e..000000000 --- a/project/src/utils/AsyncQueue.ts +++ /dev/null @@ -1,26 +0,0 @@ -import { IAsyncQueue } from "@spt/models/spt/utils/IAsyncQueue"; -import { ICommand } from "@spt/models/spt/utils/ICommand"; - -export class AsyncQueue implements IAsyncQueue { - protected commandsQueue: ICommand[]; - - constructor() { - this.commandsQueue = []; - } - - // Wait for the right command to execute - // This ensures that the commands execute in the right order, thus no data corruption - public async waitFor(command: ICommand): Promise { - // Add to the queue - this.commandsQueue.push(command); - - while (this.commandsQueue[0].uuid !== command.uuid) { - await new Promise((resolve) => { - setTimeout(resolve, 100); - }); - } - - // When the command is ready, execute it - return this.commandsQueue.shift().cmd(); - } -} diff --git a/project/src/utils/DatabaseImporter.ts b/project/src/utils/DatabaseImporter.ts index 3eebf4235..ab2399305 100644 --- a/project/src/utils/DatabaseImporter.ts +++ b/project/src/utils/DatabaseImporter.ts @@ -9,10 +9,10 @@ import { ConfigServer } from "@spt/servers/ConfigServer"; import { DatabaseServer } from "@spt/servers/DatabaseServer"; import { LocalisationService } from "@spt/services/LocalisationService"; import { EncodingUtil } from "@spt/utils/EncodingUtil"; +import { FileSystem } from "@spt/utils/FileSystem"; import { HashUtil } from "@spt/utils/HashUtil"; import { ImporterUtil } from "@spt/utils/ImporterUtil"; import { JsonUtil } from "@spt/utils/JsonUtil"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectable } from "tsyringe"; @injectable() @@ -24,7 +24,7 @@ export class DatabaseImporter implements OnLoad { constructor( @inject("PrimaryLogger") protected logger: ILogger, - @inject("VFS") protected vfs: VFS, + @inject("FileSystem") protected fileSystem: FileSystem, @inject("JsonUtil") protected jsonUtil: JsonUtil, @inject("LocalisationService") protected localisationService: LocalisationService, @inject("DatabaseServer") protected databaseServer: DatabaseServer, @@ -42,7 +42,7 @@ export class DatabaseImporter implements OnLoad { * @returns path to data */ public getSptDataPath(): string { - return ProgramStatics.COMPILED ? "SPT_Data/Server/" : "./assets/"; + return ProgramStatics.COMPILED ? "SPT_Data/Server/" : "assets/"; } public async onLoad(): Promise { @@ -53,9 +53,9 @@ export class DatabaseImporter implements OnLoad { // Reading the dynamic SHA1 file const file = "checks.dat"; const fileWithPath = `${this.filepath}${file}`; - if (this.vfs.exists(fileWithPath)) { + if (await this.fileSystem.exists(fileWithPath)) { this.hashedFile = this.jsonUtil.deserialize( - this.encodingUtil.fromBase64(this.vfs.readFile(fileWithPath)), + this.encodingUtil.fromBase64(await this.fileSystem.read(fileWithPath)), file, ); } else { @@ -71,7 +71,7 @@ export class DatabaseImporter implements OnLoad { await this.hydrateDatabase(this.filepath); const imageFilePath = `${this.filepath}images/`; - const directories = await this.vfs.getDirsAsync(imageFilePath); + const directories = await this.fileSystem.getDirectories(imageFilePath, true); await this.loadImagesAsync(imageFilePath, directories, [ "/files/achievement/", "/files/CONTENT/banners/", @@ -145,10 +145,10 @@ export class DatabaseImporter implements OnLoad { public async loadImagesAsync(filepath: string, directories: string[], routes: string[]): Promise { for (const directoryIndex in directories) { // Get all files in directory - const filesInDirectory = await this.vfs.getFilesAsync(`${filepath}${directories[directoryIndex]}`); + const filesInDirectory = await this.fileSystem.getFiles(`${filepath}${directories[directoryIndex]}`, true); for (const file of filesInDirectory) { // Register each file in image router - const filename = this.vfs.stripExtension(file); + const filename = FileSystem.stripExtension(file); const routeKey = `${routes[directoryIndex]}${filename}`; let imagePath = `${filepath}${directories[directoryIndex]}/${file}`; diff --git a/project/src/utils/FileSystem.ts b/project/src/utils/FileSystem.ts new file mode 100644 index 000000000..566111128 --- /dev/null +++ b/project/src/utils/FileSystem.ts @@ -0,0 +1,348 @@ +import path from "node:path"; +import { readFile as atomicallyRead, writeFile as atomicallyWrite } from "atomically"; +import fsExtra from "fs-extra"; +import type { Data, Path } from "node_modules/atomically/dist/types"; +import { injectable } from "tsyringe"; + +/** + * This class handles file system operations, using `fs-extra` for most tasks except where the `atomically` package can + * be used to improve reads and writes. The goal is to ensure that file operations are as safe as possible while still + * providing a comfortable API. + * + * In this class, atomicity is focused on single files, as there's no trivial way to ensure atomicity for directories. + * + * This class' API matches that of the FileSystemSync class, but with async methods. If you can, use this class. + */ +@injectable() +export class FileSystem { + /** + * Copy a file or directory. The directory can have contents. + * + * This is file-atomic, but not directory-atomic. If the process crashes mid-operation, you may end up with some + * files removed and some not, but not a partial file. + * + * @param src The source file or directory. + * @param dest The destination file or directory. + * @param extensionsWhitelist An optional array of file extensions to copy. If empty, all files are copied. + * @returns A promise that resolves when the copy operation is complete. + */ + public async copy(src: string, dest: string, extensionsWhitelist: string[] = []): Promise { + const stat = await fsExtra.stat(src); + if (!stat.isDirectory()) { + return this.copyFile(src, dest, extensionsWhitelist); + } + + const dirents = await fsExtra.readdir(src, { withFileTypes: true, recursive: true }); + if (dirents.length === 0) { + return fsExtra.ensureDir(dest); // Ensures that an empty directory is created at the destination. + } + + const tasks: Promise[] = []; + + for (const dirent of dirents) { + const srcItem = path.join(src, dirent.name); + const destItem = path.join(dest, dirent.name); + + if (!dirent.isDirectory()) { + tasks.push(this.copyFile(srcItem, destItem, extensionsWhitelist)); + } else { + tasks.push(fsExtra.ensureDir(destItem)); // Ensures that an empty directories are copied. + } + } + + await Promise.all(tasks); + } + + /** + * Atomically copy a file. If the destination file exists, it will be overwritten. + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param src The source file path. + * @param dest The destination file path. + * @param extensionsWhitelist An optional array of file extensions to copy. If empty, all files are copied. + * @returns A promise that resolves when the copy operation is complete. + */ + private async copyFile(src: string, dest: string, extensionsWhitelist: string[] = []): Promise { + const ext = FileSystem.getFileExtension(src); + if (extensionsWhitelist.length === 0 || extensionsWhitelist.map((e) => e.toLowerCase()).includes(ext)) { + const data = await this.read(src); + return this.write(dest, data); + } + } + + /** + * Ensures that a directory is empty. Deletes directory contents if the directory is not empty. If the directory + * does not exist, it is created. The directory itself is not deleted. + * + * This is not atomic. If the process crashes mid-operation, you may end up with a partially empty directory. + * + * @param dirPath The directory to empty. + * @returns A promise that resolves when the directory is empty. + */ + public async emptyDir(dirPath: string): Promise { + return fsExtra.emptyDir(dirPath); + } + + /** + * Ensures that the directory exists. If the directory structure does not exist, it is created. + * + * @param dirPath The directory to ensure exists. + * @returns A promise that resolves when the directory exists. + */ + public async ensureDir(dirPath: string): Promise { + return fsExtra.ensureDir(dirPath); + } + + /** + * Ensures that the file exists. If the file that is requested to be created is in directories that do not exist, + * these directories are created. If the file already exists, it is NOT MODIFIED. + * + * @param file The file path to ensure exists. + * @returns A promise that resolves when the file exists. + */ + public async ensureFile(file: string): Promise { + return fsExtra.ensureFile(file); + } + + /** + * Moves a file or directory, even across devices. Overwrites by default. + * + * Note: When `src` is a file, `dest` must be a file and when `src` is a directory, `dest` must be a directory. + * + * This is atomic for same-device single file operations, but not as a whole opteration. + * + * @param src The source file path or directory. + * @param dest The destination file path or directory. + * @param overwriteDest Whether to overwrite the destination if it already exists. + * @returns A promise that resolves when the move operation is complete. + */ + public async move(src: string, dest: string, overwriteDest = true): Promise { + return fsExtra.move(src, dest, { overwrite: overwriteDest, dereference: true }); + } + + /** + * Change the name or location of a file or directory. + * + * This is atomic for same-device single file operations, but not as a whole opteration. + * + * @param currentPath The current file or directory path. + * @param newPath The new file or directory path. + * @returns A promise that resolves when the rename operation is complete. + */ + public async rename(currentPath: string, newPath: string): Promise { + return fsExtra.rename(currentPath, newPath); + } + + /** + * Reads a file and returns the contents as a string. + * + * @param file The file path to read. + * @returns A promise that resolves with the file data. + */ + public async read(file: string): Promise { + return atomicallyRead(file, { encoding: "utf8" }); + } + + /** + * Writes data to a file, overwriting if the file already exists. If the parent directory does not exist, it's + * created. File must be a file path (a buffer or a file descriptor is not allowed). + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param file The file path to write to. + * @param data The data to write to the file. + * @returns A promise that resolves when the write operation is complete. + */ + public async write(file: string, data: Data): Promise { + return atomicallyWrite(file, data); + } + + /** + * Writes an object to a JSON file, overwriting if the file already exists. If the parent directory does not exist, + * it's created. File must be a file path (a buffer or a file descriptor is not allowed). + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param file The file path to write to. + * @param jsonObject The object to write to the file. + * @param indentationSpaces The number of spaces to use for indentation. + * @returns A promise that resolves when the write operation is complete. + */ + public async writeJson(file: string, jsonObject: object, indentationSpaces?: 4): Promise { + const jsonString = JSON.stringify(jsonObject, null, indentationSpaces); + return this.write(file, jsonString); + } + + /** + * Appends a string to the bottom of a file. If the file does not exist, it is created. + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param file The file path to append to. + * @param data The string to append to the file. + * @returns A promise that resolves when the append operation is complete. + */ + public async append(file: string, data: string): Promise { + await this.ensureFile(file); + const existingData = await this.read(file); + const newData = existingData + data; + return this.write(file, newData); + } + + /** + * Test whether the given path exists. + * + * @param fileOrDirPath The path to test. + * @returns A promise that resolves with a boolean indicating whether the path exists. + */ + public async exists(fileOrDirPath: string): Promise { + return fsExtra.pathExists(fileOrDirPath); + } + + /** + * Reads a JSON file and then parses it into an object. + * + * @param file The file path to read. + * @returns A promise that resolves with the parsed JSON object. + */ + // biome-ignore lint/suspicious/noExplicitAny: JSON.parse returns any + public async readJson(file: Path): Promise { + const data = await this.read(file); + return JSON.parse(data); + } + + /** + * Removes a file or directory. The directory can have contents. If the path does not exist, silently does nothing. + * + * This is file-atomic, but not directory-atomic. If the process crashes mid-operation, you may end up with some + * files removed and some not, but not a partial file. + * + * @param dir The file path or directory to remove. + * @returns A promise that resolves when the removal operation is complete. + */ + public async remove(dir: string): Promise { + return fsExtra.remove(dir); + } + + /** + * Get the extension of a file without the dot in lowercase. + * + * @param filepath The file path to get the extension of. + * @returns The file extension without the dot in lowercase. + */ + public static getFileExtension(filepath: string): string { + return path.extname(filepath).replace(".", "").toLowerCase(); + } + + /** + * Get the filename without its extension. + * + * @param filepath The file path to get the filename of. + * @returns The filename without its extension. + */ + public static stripExtension(filepath: string): string { + return filepath.slice(0, -path.extname(filepath).length); + } + + /** + * Get the file name without its extension from a file path. + * + * @param filepath The file path to get the file name from. + * @returns The file name without its extension. + */ + public static getFileName(filepath: string): string { + const baseName = path.basename(filepath); + return FileSystem.stripExtension(baseName); + } + + /** + * Minify a JSON file by reading, parsing, and then stringifying it with no indentation. + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param filePath The file path to minify. + * @returns A promise that resolves when the minify operation is complete. + */ + public async minifyJson(filePath: string): Promise { + const originalData = await this.read(filePath); + const parsed = JSON.parse(originalData); + const minified = JSON.stringify(parsed, null, 0); + return this.write(filePath, minified); + } + + /** + * Minify all JSON files in a directory by recursively finding all JSON files and minifying them. + * + * This is atomic for single files, but not as a whole opteration. You'll never end up with a partial file, but you + * may end up with a partial directory if the process crashes mid-minify. + * + * @param dir The directory to minify JSON files in. + * @returns A promise that resolves when the minify operation is complete. + */ + public async minifyJsonInDir(dir: string): Promise { + const dirents = await fsExtra.readdir(dir, { withFileTypes: true, recursive: true }); + const tasks: Promise[] = []; + + for (const dirent of dirents) { + if (dirent.isFile() && FileSystem.getFileExtension(dirent.name) === "json") { + const fullPath = path.join(dir, dirent.name); + tasks.push(this.minifyJson(fullPath)); + } + } + + await Promise.all(tasks); + } + + /** + * Get all files in a directory, optionally filtering by file type. + * + * Will always return paths with forward slashes. + * + * @param directory The directory to get files from. + * @param searchRecursive Whether to search recursively. + * @param fileTypes An optional array of file extensions to filter by (without the dot). + * @returns A promise that resolves with an array of file paths. + */ + public async getFiles(directory: string, searchRecursive = false, fileTypes?: string[]): Promise { + if (!(await fsExtra.pathExists(directory))) { + return []; + } + const dirents = await fsExtra.readdir(directory, { withFileTypes: true, recursive: searchRecursive }); + return ( + dirents + // Filter out anything that isn't a file. + .filter((dirent) => dirent.isFile()) + // Filter by file types, if specified. + .filter((dirent) => { + const extension = FileSystem.getFileExtension(dirent.name); + return !fileTypes || fileTypes.includes(extension); + }) + // Join and normalize the input directory and dirent.name to use forward slashes. + .map((dirent) => path.join(dirent.parentPath, dirent.name).replace(/\\/g, "/")) + ); + } + + /** + * Get all directories in a directory. + * + * Will always return paths with forward slashes. + * + * @param directory The directory to get directories from. + * @param searchRecursive Whether to search recursively. + * @returns A promise that resolves with an array of directory paths. + */ + public async getDirectories(directory: string, searchRecursive = false): Promise { + if (!(await fsExtra.pathExists(directory))) { + return []; + } + const dirents = await fsExtra.readdir(directory, { withFileTypes: true, recursive: searchRecursive }); + return ( + dirents + // Filter out anything that isn't a directory. + .filter((dirent) => dirent.isDirectory()) + // Join and normalize the input directory and dirent.name to use forward slashes. + .map((dirent) => path.join(directory, dirent.name).replace(/\\/g, "/")) + ); + } +} diff --git a/project/src/utils/FileSystemSync.ts b/project/src/utils/FileSystemSync.ts new file mode 100644 index 000000000..a052747a2 --- /dev/null +++ b/project/src/utils/FileSystemSync.ts @@ -0,0 +1,342 @@ +import path from "node:path"; +import { readFileSync as atomicallyReadSync, writeFileSync as atomicallyWriteSync } from "atomically"; +import fsExtra from "fs-extra"; +import type { Data, Path } from "node_modules/atomically/dist/types"; +import { injectable } from "tsyringe"; + +/** + * This class handles file system operations, using `fs-extra` for most tasks except where the `atomically` package can + * be used to improve reads and writes. The goal is to ensure that file operations are as safe as possible while still + * providing a comfortable API. + * + * In this class, atomicity is focused on single files, as there's no trivial way to ensure atomicity for directories. + * + * This class' API matches that of the FileSystem class, but with sync methods. If you can, use the async version. + */ +@injectable() +export class FileSystemSync { + /** + * Copy a file or directory. The directory can have contents. + * + * This is file-atomic, but not directory-atomic. If the process crashes mid-operation, you may end up with some + * files copied and some not, but never a partial file. The copy runs to completion before returning. + * + * @param src The source file or directory. + * @param dest The destination file or directory. + * @param extensionsWhitelist An optional array of file extensions to copy. If empty, all files are copied. + * @returns void + */ + public copy(src: string, dest: string, extensionsWhitelist: string[] = []): void { + const stat = fsExtra.statSync(src); + if (!stat.isDirectory()) { + this.copyFile(src, dest, extensionsWhitelist); + return; + } + + const dirents = fsExtra.readdirSync(src, { withFileTypes: true, recursive: true }); + if (dirents.length === 0) { + fsExtra.ensureDirSync(dest); // Ensures that an empty directory is created at the destination. + return; + } + + for (const dirent of dirents) { + const srcItem = path.join(src, dirent.name); + const destItem = path.join(dest, dirent.name); + + if (!dirent.isDirectory()) { + this.copyFile(srcItem, destItem, extensionsWhitelist); + } else { + fsExtra.ensureDirSync(destItem); // Ensures that empty subdirectories are copied. + } + } + } + + /** + * Atomically copy a file. If the destination file exists, it will be overwritten. + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param src The source file path. + * @param dest The destination file path. + * @param extensionsWhitelist An optional array of file extensions to copy. If empty, all files are copied. + * @returns void + */ + private copyFile(src: string, dest: string, extensionsWhitelist: string[] = []): void { + const ext = FileSystemSync.getFileExtension(src); + if (extensionsWhitelist.length === 0 || extensionsWhitelist.map((e) => e.toLowerCase()).includes(ext)) { + const data = this.read(src); + this.write(dest, data); + } + } + + /** + * Ensures that a directory is empty. Deletes directory contents if the directory is not empty. If the directory + * does not exist, it is created. The directory itself is not deleted. + * + * This is not atomic. If the process crashes mid-operation, you may end up with a partially empty directory. + * + * @param dirPath The directory to empty. + * @returns void + */ + public emptyDir(dirPath: string): void { + fsExtra.emptyDirSync(dirPath); + } + + /** + * Ensures that the directory exists. If the directory structure does not exist, it is created. + * + * @param dirPath The directory to ensure exists. + * @returns void + */ + public ensureDir(dirPath: string): void { + fsExtra.ensureDirSync(dirPath); + } + + /** + * Ensures that the file exists. If the file that is requested to be created is in directories that do not exist, + * these directories are created. If the file already exists, it is NOT MODIFIED. + * + * @param file The file path to ensure exists. + * @returns void + */ + public ensureFile(file: string): void { + fsExtra.ensureFileSync(file); + } + + /** + * Moves a file or directory, even across devices. Overwrites by default. + * + * Note: When `src` is a file, `dest` must be a file and when `src` is a directory, `dest` must be a directory. + * + * This is atomic for same-device single file operations, but not as a whole opteration. + * + * @param src The source file path or directory. + * @param dest The destination file path or directory. + * @param overwriteDest Whether to overwrite the destination if it already exists. + * @returns void + */ + public move(src: string, dest: string, overwriteDest = true): void { + fsExtra.moveSync(src, dest, { overwrite: overwriteDest, dereference: true }); + } + + /** + * Change the name or location of a file or directory. + * + * This is atomic for same-device single file operations, but not as a whole opteration. + * + * @param currentPath The current file or directory path. + * @param newPath The new file or directory path. + * @returns void + */ + public rename(currentPath: string, newPath: string): void { + fsExtra.renameSync(currentPath, newPath); + } + + /** + * Reads a file and returns the contents as a string. + * + * @param file The file path to read. + * @returns The file contents as a string. + */ + public read(file: string): string { + return atomicallyReadSync(file, { encoding: "utf8" }); + } + + /** + * Writes data to a file, overwriting if the file already exists. If the parent directory does not exist, it's + * created. File must be a file path (a buffer or a file descriptor is not allowed). + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param file The file path to write to. + * @param data The data to write to the file. + * @returns void + */ + public write(file: string, data: Data): void { + atomicallyWriteSync(file, data); + } + + /** + * Writes an object to a JSON file, overwriting if the file already exists. If the parent directory does not exist, + * it's created. File must be a file path (a buffer or a file descriptor is not allowed). + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param file The file path to write to. + * @param jsonObject The object to write to the file. + * @param indentationSpaces The number of spaces to use for indentation. + * @returns void + */ + public writeJson(file: string, jsonObject: object, indentationSpaces?: 4): void { + const jsonString = JSON.stringify(jsonObject, null, indentationSpaces); + this.write(file, jsonString); + } + + /** + * Appends a string to the bottom of a file. If the file does not exist, it is created. + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param file The file path to append to. + * @param data The string to append to the file. + * @returns void + */ + public append(file: string, data: string): void { + this.ensureFile(file); + const existingData = this.read(file); + const newData = existingData + data; + this.write(file, newData); + } + + /** + * Test whether the given path exists. + * + * @param fileOrDirPath The path to test. + * @returns True if the path exists, false otherwise. + */ + public exists(fileOrDirPath: string): boolean { + return fsExtra.pathExistsSync(fileOrDirPath); + } + + /** + * Reads a JSON file and then parses it into an object. + * + * @param file The file path to read. + * @returns The object parsed from the JSON file. + */ + // biome-ignore lint/suspicious/noExplicitAny: JSON.parse returns any + public readJson(file: Path): any { + const data = this.read(file as string); + return JSON.parse(data); + } + + /** + * Removes a file or directory. The directory can have contents. If the path does not exist, silently does nothing. + * + * This is file-atomic, but not directory-atomic. If the process crashes mid-operation, you may end up with some + * files removed and some not, but not a partial file. + * + * @param dir The file path or directory to remove. + * @returns void + */ + public remove(dir: string): void { + fsExtra.removeSync(dir); + } + + /** + * Get the extension of a file without the dot in lowercase. + * + * @param filepath The file path to get the extension of. + * @returns The file extension without the dot in lowercase. + */ + public static getFileExtension(filepath: string): string { + return path.extname(filepath).replace(".", "").toLowerCase(); + } + + /** + * Get the filename without its extension. + * + * @param filepath The file path to get the filename of. + * @returns The filename without its extension. + */ + public static stripExtension(filepath: string): string { + return filepath.slice(0, -path.extname(filepath).length); + } + + /** + * Get the file name without its extension from a file path. + * + * @param filepath The file path to get the file name from. + * @returns The file name without its extension. + */ + public static getFileName(filepath: string): string { + const baseName = path.basename(filepath); + return FileSystemSync.stripExtension(baseName); + } + + /** + * Minify a JSON file by reading, parsing, and then stringifying it with no indentation. + * + * This is atomic. If the process crashes mid-write, you'll never end up with a partial file. + * + * @param filePath The file path to minify. + * @returns void + */ + public minifyJson(filePath: string): void { + const originalData = this.read(filePath); + const parsed = JSON.parse(originalData); + const minified = JSON.stringify(parsed, null, 0); + this.write(filePath, minified); + } + + /** + * Minify all JSON files in a directory by recursively finding all JSON files and minifying them. + * + * This is atomic for single files, but not as a whole opteration. You'll never end up with a partial file, but you + * may end up with a partial directory if the process crashes mid-minify. + * + * @param dir The directory to minify JSON files in. + * @returns void + */ + public minifyJsonInDir(dir: string): void { + const dirents = fsExtra.readdirSync(dir, { withFileTypes: true, recursive: true }); + for (const dirent of dirents) { + if (dirent.isFile() && FileSystemSync.getFileExtension(dirent.name) === "json") { + const fullPath = path.join(dir, dirent.name); + this.minifyJson(fullPath); + } + } + } + + /** + * Get all files in a directory, optionally filtering by file type. + * + * Will always return paths with forward slashes. + * + * @param directory The directory to get files from. + * @param searchRecursive Whether to search recursively. + * @param fileTypes An optional array of file extensions to filter by (without the dot). + * @returns An array of file paths. + */ + public getFiles(directory: string, searchRecursive = false, fileTypes?: string[]): string[] { + if (!fsExtra.pathExistsSync(directory)) { + return []; + } + const dirents = fsExtra.readdirSync(directory, { withFileTypes: true, recursive: searchRecursive }); + return ( + dirents + // Filter out anything that isn't a file. + .filter((dirent) => dirent.isFile()) + // Filter by file types, if specified. + .filter((dirent) => { + const extension = FileSystemSync.getFileExtension(dirent.name); + return !fileTypes || fileTypes.includes(extension); + }) + // Join and normalize the input directory and dirent.name to use forward slashes. + .map((dirent) => path.join(dirent.parentPath, dirent.name).replace(/\\/g, "/")) + ); + } + + /** + * Get all directories in a directory. + * + * Will always return paths with forward slashes. + * + * @param directory The directory to get directories from. + * @param searchRecursive Whether to search recursively. + * @returns An array of directory paths. + */ + public getDirectories(directory: string, searchRecursive = false): string[] { + if (!fsExtra.pathExistsSync(directory)) { + return []; + } + const dirents = fsExtra.readdirSync(directory, { withFileTypes: true, recursive: searchRecursive }); + return ( + dirents + // Filter out anything that isn't a directory. + .filter((dirent) => dirent.isDirectory()) + // Join and normalize the input directory and dirent.name to use forward slashes. + .map((dirent) => path.join(directory, dirent.name).replace(/\\/g, "/")) + ); + } +} diff --git a/project/src/utils/HashUtil.ts b/project/src/utils/HashUtil.ts index 0553ad306..bd1c4056f 100644 --- a/project/src/utils/HashUtil.ts +++ b/project/src/utils/HashUtil.ts @@ -1,13 +1,16 @@ import crypto from "node:crypto"; -import fs from "node:fs"; import { TimeUtil } from "@spt/utils/TimeUtil"; import crc32 from "buffer-crc32"; import { mongoid } from "mongoid-js"; import { inject, injectable } from "tsyringe"; +import { FileSystemSync } from "./FileSystemSync"; @injectable() export class HashUtil { - constructor(@inject("TimeUtil") protected timeUtil: TimeUtil) {} + constructor( + @inject("TimeUtil") protected timeUtil: TimeUtil, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, + ) {} /** * Create a 24 character id using the sha256 algorithm + current timestamp @@ -35,7 +38,7 @@ export class HashUtil { } public generateCRC32ForFile(filePath: fs.PathLike): number { - return crc32.unsigned(fs.readFileSync(filePath)); + return crc32.unsigned(this.fileSystemSync.read(filePath)); } /** diff --git a/project/src/utils/HttpFileUtil.ts b/project/src/utils/HttpFileUtil.ts index 41130c9a2..777090943 100644 --- a/project/src/utils/HttpFileUtil.ts +++ b/project/src/utils/HttpFileUtil.ts @@ -1,8 +1,8 @@ -import fs from "node:fs"; +import { createReadStream } from "node:fs"; import { ServerResponse } from "node:http"; +import { pipeline } from "node:stream/promises"; import { HttpServerHelper } from "@spt/helpers/HttpServerHelper"; import { inject, injectable } from "tsyringe"; -import { pipeline } from "stream/promises"; @injectable() export class HttpFileUtil { @@ -16,6 +16,6 @@ export class HttpFileUtil { resp.setHeader("Content-Type", type); - await pipeline(fs.createReadStream(filePath), resp); + await pipeline(createReadStream(filePath), resp); } } diff --git a/project/src/utils/ImporterUtil.ts b/project/src/utils/ImporterUtil.ts index 18172ddbf..130ffb138 100644 --- a/project/src/utils/ImporterUtil.ts +++ b/project/src/utils/ImporterUtil.ts @@ -1,12 +1,12 @@ +import { FileSystem } from "@spt/utils/FileSystem"; import { JsonUtil } from "@spt/utils/JsonUtil"; import { ProgressWriter } from "@spt/utils/ProgressWriter"; -import { VFS } from "@spt/utils/VFS"; import { inject, injectable } from "tsyringe"; @injectable() export class ImporterUtil { constructor( - @inject("VFS") protected vfs: VFS, + @inject("FileSystem") protected fileSystem: FileSystem, @inject("JsonUtil") protected jsonUtil: JsonUtil, ) {} @@ -18,65 +18,22 @@ export class ImporterUtil { ): Promise { const result = {} as T; - // Fetch files and directories concurrently for the root path - const [files, directories] = await Promise.all([ - this.vfs.getFilesAsync(filepath), - this.vfs.getDirsAsync(filepath), - ]); - - // Queue to process files and directories for the root path first. - const filesToProcess = files.map((f) => new VisitNode(filepath, f)); - const directoriesToRead = directories.map((d) => `${filepath}${d}`); - - const allFiles = [...filesToProcess]; - - // Method to traverse directories and collect all files recursively - const traverseDirectories = async (directory: string) => { - const [directoryFiles, subDirectories] = await Promise.all([ - this.vfs.getFilesAsync(directory), - this.vfs.getDirsAsync(directory), - ]); - - // Add the files from this directory to the processing queue - const fileNodes = directoryFiles.map((f) => new VisitNode(directory, f)); - allFiles.push(...fileNodes); - - // Recurse into subdirectories - for (const subDirectory of subDirectories) { - await traverseDirectories(`${directory}/${subDirectory}`); - } - }; - - // Start recursive directory traversal - const traversalPromises = directoriesToRead.map((dir) => traverseDirectories(dir)); - await Promise.all(traversalPromises); // Ensure all directories are processed - - // Setup the progress writer with the total amount of files to load - const progressWriter = new ProgressWriter(allFiles.length); - - const fileProcessingPromises = allFiles.map(async (fileNode) => { - if (this.vfs.getFileExtension(fileNode.fileName) !== "json") { - return Promise.resolve(); // Skip non-JSON files - } - - // Ensure we're attempting to read the correct file path - const filePathAndName = `${fileNode.filePath}${fileNode.filePath.endsWith("/") ? "" : "/"}${fileNode.fileName}`; - + const allFiles = await this.fileSystem.getFiles(filepath, true, ["json"]); + const progressWriter = new ProgressWriter(allFiles.length); // Progress bar initialization + const fileProcessingPromises = allFiles.map(async (file) => { try { - const fileData = await this.vfs.readFileAsync(filePathAndName); - onReadCallback(filePathAndName, fileData); - const fileDeserialized = await this.jsonUtil.deserializeWithCacheCheckAsync(fileData, filePathAndName); - onObjectDeserialized(filePathAndName, fileDeserialized); - const strippedFilePath = this.vfs.stripExtension(filePathAndName).replace(filepath, ""); + const fileData = await this.fileSystem.read(file); + onReadCallback(file, fileData); + const fileDeserialized = await this.jsonUtil.deserializeWithCacheCheckAsync(fileData, file); + onObjectDeserialized(file, fileDeserialized); + const strippedFilePath = FileSystem.stripExtension(file).replace(filepath, ""); this.placeObject(fileDeserialized, strippedFilePath, result, strippablePath); } finally { - return progressWriter.increment(); // Update progress after each file + progressWriter.increment(); // Update progress bar after each file is processed } }); - // Wait for all file processing to complete - await Promise.all(fileProcessingPromises).catch((e) => console.error(e)); - + await Promise.all(fileProcessingPromises).catch((e) => console.error(e)); // Wait for promises to resolve return result; } diff --git a/project/src/utils/JsonUtil.ts b/project/src/utils/JsonUtil.ts index 5ddc816cf..a50f1b243 100644 --- a/project/src/utils/JsonUtil.ts +++ b/project/src/utils/JsonUtil.ts @@ -1,6 +1,6 @@ import type { ILogger } from "@spt/models/spt/utils/ILogger"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { HashUtil } from "@spt/utils/HashUtil"; -import { VFS } from "@spt/utils/VFS"; import fixJson from "json-fixer"; import { parse, stringify } from "json5"; import { jsonc } from "jsonc"; @@ -14,7 +14,7 @@ export class JsonUtil { protected jsonCachePath = "./user/cache/jsonCache.json"; constructor( - @inject("VFS") protected vfs: VFS, + @inject("FileSystemSync") protected fileSystemSync: FileSystemSync, @inject("HashUtil") protected hashUtil: HashUtil, @inject("PrimaryLogger") protected logger: ILogger, ) {} @@ -160,7 +160,7 @@ export class JsonUtil { } else { // data valid, save hash and call function again this.fileHashes[filePath] = generatedHash; - this.vfs.writeFile(this.jsonCachePath, this.serialize(this.fileHashes, true)); + this.fileSystemSync.write(this.jsonCachePath, this.serialize(this.fileHashes, true)); savedHash = generatedHash; } return data as T; @@ -186,9 +186,9 @@ export class JsonUtil { */ protected ensureJsonCacheExists(jsonCachePath: string): void { if (!this.jsonCacheExists) { - if (!this.vfs.exists(jsonCachePath)) { + if (!this.fileSystemSync.exists(jsonCachePath)) { // Create empty object at path - this.vfs.writeFile(jsonCachePath, "{}"); + this.fileSystemSync.writeJson(jsonCachePath, {}); } this.jsonCacheExists = true; } @@ -201,7 +201,7 @@ export class JsonUtil { protected hydrateJsonCache(jsonCachePath: string): void { // Get all file hashes if (!this.fileHashes) { - this.fileHashes = this.deserialize(this.vfs.readFile(`${jsonCachePath}`)); + this.fileHashes = this.deserialize(this.fileSystemSync.read(`${jsonCachePath}`)); } } diff --git a/project/src/utils/VFS.ts b/project/src/utils/VFS.ts deleted file mode 100644 index 67732ec76..000000000 --- a/project/src/utils/VFS.ts +++ /dev/null @@ -1,283 +0,0 @@ -import "reflect-metadata"; - -import crypto from "node:crypto"; -import fs from "node:fs"; -import * as fsPromises from "node:fs/promises"; -import path, { resolve } from "node:path"; -import type { IAsyncQueue } from "@spt/models/spt/utils/IAsyncQueue"; -import { writeFileSync } from "atomically"; -import { checkSync, lockSync, unlockSync } from "proper-lockfile"; -import { inject, injectable } from "tsyringe"; - -@injectable() -export class VFS { - constructor(@inject("AsyncQueue") protected asyncQueue: IAsyncQueue) {} - - public exists(filepath: fs.PathLike): boolean { - return fs.existsSync(filepath); - } - - public async existsAsync(filepath: fs.PathLike): Promise { - try { - await fsPromises.access(filepath); - - // If no Exception, the file exists - return true; - } catch { - // If Exception, the file does not exist - return false; - } - } - - public copyFile(filepath: fs.PathLike, target: fs.PathLike): void { - fs.copyFileSync(filepath, target); - } - - public async copyAsync(filepath: fs.PathLike, target: fs.PathLike): Promise { - await fsPromises.copyFile(filepath, target); - } - - public createDir(filepath: string): void { - fs.mkdirSync(filepath.substr(0, filepath.lastIndexOf("/")), { recursive: true }); - } - - public async createDirAsync(filepath: string): Promise { - await fsPromises.mkdir(filepath.slice(0, filepath.lastIndexOf("/")), { recursive: true }); - } - - public copyDir(filepath: string, target: string, fileExtensions?: string | string[]): void { - const files = this.getFiles(filepath); - const dirs = this.getDirs(filepath); - - if (!this.exists(target)) { - this.createDir(`${target}/`); - } - - for (const dir of dirs) { - this.copyDir(path.join(filepath, dir), path.join(target, dir), fileExtensions); - } - - for (const file of files) { - // copy all if fileExtension is not set, copy only those with fileExtension if set - if (!fileExtensions || fileExtensions.includes(file.split(".").pop() ?? "")) { - this.copyFile(path.join(filepath, file), path.join(target, file)); - } - } - } - - public async copyDirAsync(filepath: string, target: string, fileExtensions: string | string[]): Promise { - const files = this.getFiles(filepath); - const dirs = this.getDirs(filepath); - - if (!(await this.existsAsync(target))) { - await this.createDirAsync(`${target}/`); - } - - for (const dir of dirs) { - await this.copyDirAsync(path.join(filepath, dir), path.join(target, dir), fileExtensions); - } - - for (const file of files) { - // copy all if fileExtension is not set, copy only those with fileExtension if set - if (!fileExtensions || fileExtensions.includes(file.split(".").pop() ?? "")) { - await this.copyAsync(path.join(filepath, file), path.join(target, file)); - } - } - } - - public readFile(...args: Parameters): string { - const read = fs.readFileSync(...args); - if (this.isBuffer(read)) { - return read.toString(); - } - return read; - } - - public async readFileAsync(path: fs.PathLike): Promise { - const read = await fsPromises.readFile(path); - if (this.isBuffer(read)) { - return read.toString(); - } - return read; - } - - private isBuffer(value: Buffer | string): value is Buffer { - return Buffer.isBuffer(value); - } - - public writeFile(filepath: string, data = "", append = false, atomic = true): void { - const options = append ? { flag: "a" } : { flag: "w" }; - - if (!this.exists(filepath)) { - this.createDir(filepath); - fs.writeFileSync(filepath, ""); - } - - const releaseCallback = this.lockFileSync(filepath); - - if (!append && atomic) { - writeFileSync(filepath, data); - } else { - fs.writeFileSync(filepath, data, options); - } - - releaseCallback(); - } - - public async writeFileAsync(filepath: string, data = "", append = false, atomic = true): Promise { - const options = append ? { flag: "a" } : { flag: "w" }; - - if (!(await this.existsAsync(filepath))) { - await this.createDirAsync(filepath); - await fsPromises.writeFile(filepath, ""); - } - - if (!append && atomic) { - await fsPromises.writeFile(filepath, data); - } else { - await fsPromises.writeFile(filepath, data, options); - } - } - - public getFiles(filepath: string): string[] { - return fs.readdirSync(filepath).filter((item) => { - return fs.statSync(path.join(filepath, item)).isFile(); - }); - } - - public async getFilesAsync(filepath: string): Promise { - const entries = await fsPromises.readdir(filepath, { withFileTypes: true }); - return entries.filter((entry) => entry.isFile()).map((entry) => entry.name); - } - - public getDirs(filepath: string): string[] { - return fs.readdirSync(filepath).filter((item) => { - return fs.statSync(path.join(filepath, item)).isDirectory(); - }); - } - - public async getDirsAsync(filepath: string): Promise { - const entries = await fsPromises.readdir(filepath, { withFileTypes: true }); - return entries.filter((entry) => entry.isDirectory()).map((entry) => entry.name); - } - - public removeFile(filepath: string): void { - fs.unlinkSync(filepath); - } - - public async removeFileAsync(filepath: string): Promise { - await fsPromises.unlink(filepath); - } - - public removeDir(filepath: string): void { - const files = this.getFiles(filepath); - const dirs = this.getDirs(filepath); - - for (const dir of dirs) { - this.removeDir(path.join(filepath, dir)); - } - - for (const file of files) { - this.removeFile(path.join(filepath, file)); - } - - fs.rmdirSync(filepath); - } - - public async removeDirAsync(filepath: string): Promise { - const files = this.getFiles(filepath); - const dirs = this.getDirs(filepath); - - const promises: Promise[] = []; - - for (const dir of dirs) { - promises.push(this.removeDirAsync(path.join(filepath, dir))); - } - - for (const file of files) { - promises.push(this.removeFileAsync(path.join(filepath, file))); - } - - await Promise.all(promises); - await fsPromises.rmdir(filepath); - } - - public rename(oldPath: string, newPath: string): void { - fs.renameSync(oldPath, newPath); - } - - public async renameAsync(oldPath: string, newPath: string): Promise { - await fsPromises.rename(oldPath, newPath); - } - - protected lockFileSync(filepath: string): () => void { - return lockSync(filepath); - } - - protected checkFileSync(filepath: string): boolean { - return checkSync(filepath); - } - - protected unlockFileSync(filepath: string): void { - unlockSync(filepath); - } - - public getFileExtension(filepath: string): string | undefined { - return filepath.split(".").pop(); - } - - public stripExtension(filepath: string): string { - return filepath.split(".").slice(0, -1).join("."); - } - - public async minifyAllJsonInDirRecursive(filepath: string): Promise { - const files = this.getFiles(filepath).filter((item) => this.getFileExtension(item) === "json"); - for (const file of files) { - const filePathAndName = path.join(filepath, file); - const minified = JSON.stringify(JSON.parse(this.readFile(filePathAndName))); - this.writeFile(filePathAndName, minified); - } - - const dirs = this.getDirs(filepath); - for (const dir of dirs) { - this.minifyAllJsonInDirRecursive(path.join(filepath, dir)); - } - } - - public async minifyAllJsonInDirRecursiveAsync(filepath: string): Promise { - const files = this.getFiles(filepath).filter((item) => this.getFileExtension(item) === "json"); - for (const file of files) { - const filePathAndName = path.join(filepath, file); - const minified = JSON.stringify(JSON.parse(await this.readFile(filePathAndName))); - await this.writeFile(filePathAndName, minified); - } - - const dirs = this.getDirs(filepath); - const promises: Promise[] = []; - for (const dir of dirs) { - promises.push(this.minifyAllJsonInDirRecursive(path.join(filepath, dir))); - } - await Promise.all(promises); - } - - public getFilesOfType(directory: string, fileType: string, files: string[] = []): string[] { - // no dir so exit early - if (!fs.existsSync(directory)) { - return files; - } - - const dirents = fs.readdirSync(directory, { encoding: "utf-8", withFileTypes: true }); - for (const dirent of dirents) { - const res = resolve(directory, dirent.name); - if (dirent.isDirectory()) { - this.getFilesOfType(res, fileType, files); - } else { - if (res.endsWith(fileType)) { - files.push(res); - } - } - } - - return files; - } -} diff --git a/project/src/utils/logging/AbstractWinstonLogger.ts b/project/src/utils/logging/AbstractWinstonLogger.ts index ea79d0cbb..fa1cb8741 100644 --- a/project/src/utils/logging/AbstractWinstonLogger.ts +++ b/project/src/utils/logging/AbstractWinstonLogger.ts @@ -1,21 +1,20 @@ -import crypto from "node:crypto"; -import fs from "node:fs"; import path from "node:path"; -import { promisify } from "node:util"; import { ProgramStatics } from "@spt/ProgramStatics"; import { IDaum } from "@spt/models/eft/itemEvent/IItemEventRouterRequest"; import { LogBackgroundColor } from "@spt/models/spt/logging/LogBackgroundColor"; import { LogTextColor } from "@spt/models/spt/logging/LogTextColor"; import { SptLogger } from "@spt/models/spt/logging/SptLogger"; -import { IAsyncQueue } from "@spt/models/spt/utils/IAsyncQueue"; -import { ICommand } from "@spt/models/spt/utils/ICommand"; import { ILogger } from "@spt/models/spt/utils/ILogger"; +import { FileSystem } from "@spt/utils/FileSystem"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import winston, { createLogger, format, transports, addColors } from "winston"; import DailyRotateFile from "winston-daily-rotate-file"; export abstract class AbstractWinstonLogger implements ILogger { protected showDebugInConsole = false; protected filePath: string; + protected fileSystem: FileSystem; + protected fileSystemSync: FileSystemSync; protected logLevels = { levels: { error: 0, warn: 1, succ: 2, info: 3, custom: 4, debug: 5 }, colors: { error: "red", warn: "yellow", succ: "green", info: "white", custom: "black", debug: "gray" }, @@ -31,17 +30,15 @@ export abstract class AbstractWinstonLogger implements ILogger { whiteBG: "whiteBG", }, }; - protected logger: winston.Logger & SptLogger; - protected writeFilePromisify: (path: fs.PathLike, data: string, options?: any) => Promise; - constructor(protected asyncQueue: IAsyncQueue) { + constructor(fileSystem: FileSystem, fileSystemSync: FileSystemSync) { + this.fileSystem = fileSystem; + this.fileSystemSync = fileSystemSync; this.filePath = path.join(this.getFilePath(), this.getFileName()); - this.writeFilePromisify = promisify(fs.writeFile); this.showDebugInConsole = ProgramStatics.DEBUG; - if (!fs.existsSync(this.getFilePath())) { - fs.mkdirSync(this.getFilePath(), { recursive: true }); - } + + this.fileSystemSync.ensureDir(this.getFilePath()); const transportsList: winston.transport[] = []; @@ -58,6 +55,7 @@ export abstract class AbstractWinstonLogger implements ILogger { }), ); } + if (this.isLogToFile()) { transportsList.push( new DailyRotateFile({ @@ -114,11 +112,11 @@ export abstract class AbstractWinstonLogger implements ILogger { } public async writeToLogFile(data: string | IDaum): Promise { - const command: ICommand = { - uuid: crypto.randomUUID(), - cmd: async () => await this.writeFilePromisify(this.filePath, `${data}\n`, true), - }; - await this.asyncQueue.waitFor(command); + try { + this.fileSystem.append(this.filePath, `${data}\n`); + } catch (error) { + this.error(`Failed to write to log file: ${error}`); + } } public async log( @@ -140,38 +138,27 @@ export abstract class AbstractWinstonLogger implements ILogger { ], }); - let command: ICommand; - if (typeof data === "string") { - command = { uuid: crypto.randomUUID(), cmd: async () => await tmpLogger.log("custom", data) }; + tmpLogger.log("custom", data); } else { - command = { - uuid: crypto.randomUUID(), - cmd: async () => await tmpLogger.log("custom", JSON.stringify(data, undefined, 4)), - }; + tmpLogger.log("custom", JSON.stringify(data, undefined, 4)); } - - await this.asyncQueue.waitFor(command); } public async error(data: string | Record): Promise { - const command: ICommand = { uuid: crypto.randomUUID(), cmd: async () => await this.logger.error(data) }; - await this.asyncQueue.waitFor(command); + this.logger.error(data); } public async warning(data: string | Record): Promise { - const command: ICommand = { uuid: crypto.randomUUID(), cmd: async () => await this.logger.warn(data) }; - await this.asyncQueue.waitFor(command); + this.logger.warn(data); } public async success(data: string | Record): Promise { - const command: ICommand = { uuid: crypto.randomUUID(), cmd: async () => await this.logger.succ(data) }; - await this.asyncQueue.waitFor(command); + this.logger.succ(data); } public async info(data: string | Record): Promise { - const command: ICommand = { uuid: crypto.randomUUID(), cmd: async () => await this.logger.info(data) }; - await this.asyncQueue.waitFor(command); + this.logger.info(data); } /** @@ -185,23 +172,14 @@ export abstract class AbstractWinstonLogger implements ILogger { textColor: LogTextColor, backgroundColor = LogBackgroundColor.DEFAULT, ): Promise { - const command: ICommand = { - uuid: crypto.randomUUID(), - cmd: async () => await this.log(data, textColor.toString(), backgroundColor.toString()), - }; - - await this.asyncQueue.waitFor(command); + this.log(data, textColor.toString(), backgroundColor.toString()); } public async debug(data: string | Record, onlyShowInConsole = false): Promise { - let command: ICommand; - if (onlyShowInConsole) { - command = { uuid: crypto.randomUUID(), cmd: async () => await this.log(data, this.logLevels.colors.debug) }; + this.log(data, this.logLevels.colors.debug); } else { - command = { uuid: crypto.randomUUID(), cmd: async () => await this.logger.debug(data) }; + this.logger.debug(data); } - - await this.asyncQueue.waitFor(command); } } diff --git a/project/src/utils/logging/WinstonMainLogger.ts b/project/src/utils/logging/WinstonMainLogger.ts index 1ec28a38e..49836b249 100644 --- a/project/src/utils/logging/WinstonMainLogger.ts +++ b/project/src/utils/logging/WinstonMainLogger.ts @@ -1,12 +1,16 @@ import path from "node:path"; -import type { IAsyncQueue } from "@spt/models/spt/utils/IAsyncQueue"; +import { FileSystem } from "@spt/utils/FileSystem"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { AbstractWinstonLogger } from "@spt/utils/logging/AbstractWinstonLogger"; import { inject, injectable } from "tsyringe"; @injectable() export class WinstonMainLogger extends AbstractWinstonLogger { - constructor(@inject("AsyncQueue") protected asyncQueue: IAsyncQueue) { - super(asyncQueue); + constructor( + @inject("FileSystem") fileSystem: FileSystem, + @inject("FileSystemSync") fileSystemSync: FileSystemSync, + ) { + super(fileSystem, fileSystemSync); } protected isLogExceptions(): boolean { diff --git a/project/src/utils/logging/WinstonRequestLogger.ts b/project/src/utils/logging/WinstonRequestLogger.ts index 65276e391..b3e6b6d7f 100644 --- a/project/src/utils/logging/WinstonRequestLogger.ts +++ b/project/src/utils/logging/WinstonRequestLogger.ts @@ -1,12 +1,16 @@ import path from "node:path"; -import type { IAsyncQueue } from "@spt/models/spt/utils/IAsyncQueue"; +import { FileSystem } from "@spt/utils/FileSystem"; +import { FileSystemSync } from "@spt/utils/FileSystemSync"; import { AbstractWinstonLogger } from "@spt/utils/logging/AbstractWinstonLogger"; import { inject, injectable } from "tsyringe"; @injectable() export class WinstonRequestLogger extends AbstractWinstonLogger { - constructor(@inject("AsyncQueue") protected asyncQueue: IAsyncQueue) { - super(asyncQueue); + constructor( + @inject("FileSystem") fileSystem: FileSystem, + @inject("FileSystemSync") fileSystemSync: FileSystemSync, + ) { + super(fileSystem, fileSystemSync); } protected isLogExceptions(): boolean {