From 1819e1b0962e5426e458a5444d24eb07d7b65f37 Mon Sep 17 00:00:00 2001 From: Refringe Date: Tue, 3 Dec 2024 20:15:34 -0500 Subject: [PATCH] Reverts Location Compression Updates This commit reverts the compression code that was added to lower the amount of LFS bandwidth that is used. We have spun up a custom LFS server, so we can safely bring back the raw JSON files as we're not as concerned about bandwidth. --- .gitattributes | 22 ++- .gitignore | 1 - .lfsconfig | 3 + README.md | 2 - .../assets/compressed/database/locations.7z | 3 - project/gulpfile.mjs | 66 -------- project/package.json | 9 +- project/scripts/databaseCompress.js | 28 ---- project/scripts/databaseDecompress.js | 47 ------ project/scripts/fix-7za-permissions.js | 30 ---- project/src/Program.ts | 5 - project/src/di/Container.ts | 4 - .../src/utils/DatabaseDecompressionUtil.ts | 143 ------------------ 13 files changed, 25 insertions(+), 338 deletions(-) create mode 100644 .lfsconfig delete mode 100644 project/assets/compressed/database/locations.7z delete mode 100644 project/scripts/databaseCompress.js delete mode 100644 project/scripts/databaseDecompress.js delete mode 100644 project/scripts/fix-7za-permissions.js delete mode 100644 project/src/utils/DatabaseDecompressionUtil.ts diff --git a/.gitattributes b/.gitattributes index d96d6be44..15a2230dc 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,4 +1,22 @@ -# Normalize Line Endings +# Normalize Line Endings. * text=auto eol=lf -project/assets/compressed/database/locations.7z filter=lfs diff=lfs merge=lfs -text +# LFS Tracking for large (10MB+) JSON files. +# +# Can be found with the following command: +# find project/assets/database -type f -name "*.json" -size +5M | while read file; do +# git lfs track "$file" +# done +project/assets/database/locations/bigmap/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/laboratory/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/interchange/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/factory4_night/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/shoreline/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/tarkovstreets/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/sandbox/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/sandbox_high/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/lighthouse/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/rezervbase/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/woods/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/locations/factory4_day/looseLoot.json filter=lfs diff=lfs merge=lfs -text +project/assets/database/templates/items.json filter=lfs diff=lfs merge=lfs -text diff --git a/.gitignore b/.gitignore index 16b66c3af..f3d39d296 100644 --- a/.gitignore +++ b/.gitignore @@ -2,7 +2,6 @@ *.exe *.zip *.d.ts -project/assets/database/locations/ project/build/ project/obj/ project/dist/ diff --git a/.lfsconfig b/.lfsconfig new file mode 100644 index 000000000..0590e8277 --- /dev/null +++ b/.lfsconfig @@ -0,0 +1,3 @@ +[lfs] + url = https://lfs.sp-tarkov.com/sp-tarkov/server + locksverify = false diff --git a/README.md b/README.md index 19690f53e..440a5b7a4 100644 --- a/README.md +++ b/README.md @@ -72,8 +72,6 @@ The following commands are available after the initial setup. Run them with `npm | `run:profiler` | Run the project in profiler mode. | | `gen:types` | Generate types for the project. | | `gen:docs` | Generate documentation for the project. | -| `database:compress` | Compress database files from working directory into archive. | -| `database:decompress` | Decompress database archives into working directory. | ### Debugging diff --git a/project/assets/compressed/database/locations.7z b/project/assets/compressed/database/locations.7z deleted file mode 100644 index 79491b6f4..000000000 --- a/project/assets/compressed/database/locations.7z +++ /dev/null @@ -1,3 +0,0 @@ -version https://git-lfs.github.com/spec/v1 -oid sha256:7d6083cc4da7fe86bdc16a309b0620e7c37b2d2eef06845041483a4b070c4835 -size 22287650 diff --git a/project/gulpfile.mjs b/project/gulpfile.mjs index 2bc8e02b7..a09fc72b1 100644 --- a/project/gulpfile.mjs +++ b/project/gulpfile.mjs @@ -1,6 +1,5 @@ import crypto from "node:crypto"; import path from "node:path"; -import { path7za } from "7zip-bin"; import pkg from "@yao-pkg/pkg"; import pkgfetch from "@yao-pkg/pkg-fetch"; import fs from "fs-extra"; @@ -10,7 +9,6 @@ import download from "gulp-download"; import { exec } from "gulp-execa"; import rename from "gulp-rename"; import minimist from "minimist"; -import Seven from "node-7z"; import * as ResEdit from "resedit"; import manifest from "./package.json" assert { type: "json" }; @@ -36,69 +34,6 @@ const entries = { }; const licenseFile = "../LICENSE.md"; -/** - * Decompresses the database archives from the assets/compressed/database directory into the assets/database directory. - */ -const decompressArchives = async () => { - const compressedDir = path.resolve("assets", "compressed", "database"); - const assetsDir = path.resolve("assets", "database"); - - // Read the compressed directory and filter out only the 7z files. - let compressedFiles = []; - try { - const files = await fs.readdir(compressedDir); - compressedFiles = files.filter((file) => file.endsWith(".7z")); - } catch (error) { - console.error(`Error reading compressed directory: ${error}`); - return; - } - - if (compressedFiles.length === 0) { - console.log("No compressed database archives found."); - return; - } - - for (const compressedFile of compressedFiles) { - const compressedFilePath = path.join(compressedDir, compressedFile); - const relativeTargetPath = compressedFile.replace(".7z", ""); - const targetDir = path.join(assetsDir, relativeTargetPath); - - console.log(`Processing archive: ${compressedFile}`); - - // Clean the target directory before extracting the archive. - try { - await fs.remove(targetDir); - console.log(`Cleaned target directory: ${targetDir}`); - } catch (error) { - console.error(`Error cleaning target directory ${targetDir}: ${error}`); - continue; - } - - // Extract the archive. - await new Promise((resolve, reject) => { - const stream = Seven.extractFull(compressedFilePath, targetDir, { - $bin: path7za, - overwrite: "a", - }); - - let hadError = false; - - stream.on("end", () => { - if (!hadError) { - console.log(`Successfully decompressed: ${compressedFile}`); - resolve(); - } - }); - - stream.on("error", (err) => { - hadError = true; - console.error(`Error decompressing ${compressedFile}: ${err}`); - reject(err); - }); - }); - } -}; - /** * Transpile src files into Javascript with SWC */ @@ -255,7 +190,6 @@ const createHashFile = async () => { // Combine all tasks into addAssets const addAssets = gulp.series( - decompressArchives, copyAssets, downloadPnpm, copyLicense, diff --git a/project/package.json b/project/package.json index 019392686..686621bb4 100644 --- a/project/package.json +++ b/project/package.json @@ -1,6 +1,6 @@ { "name": "spt-server", - "version": "3.10.0", + "version": "3.10.2", "author": "SPT Server", "license": "NCSA", "main": "obj/bundle.js", @@ -11,7 +11,6 @@ "node": "20.11.1" }, "scripts": { - "postinstall": "node scripts/fix-7za-permissions.js", "check:circular": "madge --circular --ts-config tsconfig.json --extensions ts ./src/", "lint": "npx @biomejs/biome lint ./", "lint:fix": "npx @biomejs/biome lint --write ./", @@ -32,12 +31,9 @@ "gen:types": "tsc -p tsconfig.typedef.json --resolveJsonModule", "gen:docs": "typedoc --options ./typedoc.json --entryPointStrategy expand ./src", "gen:items": "ts-node -r tsconfig-paths/register ./src/tools/ItemTplGenerator/ItemTplGeneratorProgram.ts", - "gen:productionquests": "ts-node -r tsconfig-paths/register ./src/tools/ProductionQuestsGen/ProductionQuestsGenProgram.ts", - "database:compress": "node scripts/databaseCompress.js", - "database:decompress": "node scripts/databaseDecompress.js" + "gen:productionquests": "ts-node -r tsconfig-paths/register ./src/tools/ProductionQuestsGen/ProductionQuestsGenProgram.ts" }, "dependencies": { - "7zip-bin": "^5.2.0", "atomically": "~1.7", "buffer-crc32": "~1.0", "date-fns": "~3.6", @@ -47,7 +43,6 @@ "json5": "~2.2", "jsonc": "~2.0", "mongoid-js": "~1.3", - "node-7z": "^3.0.0", "proper-lockfile": "~4.1", "reflect-metadata": "~0.2", "semver": "~7.6", diff --git a/project/scripts/databaseCompress.js b/project/scripts/databaseCompress.js deleted file mode 100644 index ea27f262a..000000000 --- a/project/scripts/databaseCompress.js +++ /dev/null @@ -1,28 +0,0 @@ -// This script compresses the locations database into a 7z archive. - -const Seven = require("node-7z"); -const path = require("node:path"); -const { path7za } = require("7zip-bin"); - -const archivePath = path.resolve(__dirname, "../assets/compressed/database/locations.7z"); -const locationsDir = path.resolve(__dirname, "../assets/database/locations/*.json"); - -let hadError = false; - -const myStream = Seven.add(archivePath, locationsDir, { - recursive: true, - $bin: path7za, - method: ["0=LZMA2"], - compressionLevel: 9, -}); - -myStream.on("end", () => { - if (!hadError) { - console.log("Compression completed successfully."); - } -}); - -myStream.on("error", (err) => { - hadError = true; - console.error(`Error compressing locations: ${err}`); -}); diff --git a/project/scripts/databaseDecompress.js b/project/scripts/databaseDecompress.js deleted file mode 100644 index 030d9b6a8..000000000 --- a/project/scripts/databaseDecompress.js +++ /dev/null @@ -1,47 +0,0 @@ -// This script removes the contents of the locations directory and then decompresses -// the locations database from a 7z archive. - -const Seven = require("node-7z"); -const path = require("node:path"); -const fs = require("fs-extra"); -const { path7za } = require("7zip-bin"); - -const archivePath = path.resolve(__dirname, "../assets/compressed/database/locations.7z"); -const databaseDir = path.resolve(__dirname, "../assets/database/locations"); - -(async () => { - try { - const archiveExists = await fs.pathExists(archivePath); - if (!archiveExists) { - console.error("Error: Archive file does not exist:", archivePath); - process.exit(1); - } - - const locationsDir = path.join(databaseDir, "locations"); - if (await fs.pathExists(locationsDir)) { - await fs.remove(locationsDir); - console.log("Existing locations directory removed."); - } - - let hadError = false; - - const myStream = Seven.extractFull(archivePath, databaseDir, { - $bin: path7za, - overwrite: "a", - }); - - myStream.on("end", () => { - if (!hadError) { - console.log("Decompression completed successfully."); - } - }); - - myStream.on("error", (err) => { - hadError = true; - console.error(`Error decompressing locations: ${err}`); - }); - } catch (err) { - console.error(`Error during decompression: ${err}`); - process.exit(1); - } -})(); diff --git a/project/scripts/fix-7za-permissions.js b/project/scripts/fix-7za-permissions.js deleted file mode 100644 index e166bbc90..000000000 --- a/project/scripts/fix-7za-permissions.js +++ /dev/null @@ -1,30 +0,0 @@ -// This script sets the execute permission on the 7za binary if you're on macOS or Linux. - -const fs = require("node:fs"); -const path = require("node:path"); -const os = require("node:os"); - -const platform = os.platform(); -const arch = os.arch(); - -let sevenZipPath; - -if (platform === "darwin") { - // macOS - sevenZipPath = path.join(__dirname, "..", "node_modules", "7zip-bin", "mac", arch, "7za"); -} else if (platform === "linux") { - // Linux - sevenZipPath = path.join(__dirname, "..", "node_modules", "7zip-bin", "linux", arch, "7za"); -} else { - // Windows (or other) - process.exit(0); -} - -fs.chmod(sevenZipPath, 0o755, (err) => { - if (err) { - console.error("Failed to set execute permission on 7za:", err); - process.exit(1); - } else { - console.log("Execute permission set on 7za."); - } -}); diff --git a/project/src/Program.ts b/project/src/Program.ts index a90294a7c..167cdc94a 100644 --- a/project/src/Program.ts +++ b/project/src/Program.ts @@ -2,7 +2,6 @@ import { ErrorHandler } from "@spt/ErrorHandler"; import { Container } from "@spt/di/Container"; import type { PreSptModLoader } from "@spt/loaders/PreSptModLoader"; import { App } from "@spt/utils/App"; -import { DatabaseDecompressionUtil } from "@spt/utils/DatabaseDecompressionUtil"; import { Watermark } from "@spt/utils/Watermark"; import { container } from "tsyringe"; @@ -22,10 +21,6 @@ export class Program { const watermark = childContainer.resolve("Watermark"); watermark.initialize(); - const databaseDecompressionUtil = - childContainer.resolve("DatabaseDecompressionUtil"); - await databaseDecompressionUtil.initialize(); - const preSptModLoader = childContainer.resolve("PreSptModLoader"); Container.registerListTypes(childContainer); await preSptModLoader.load(childContainer); diff --git a/project/src/di/Container.ts b/project/src/di/Container.ts index d66367ab2..4433426c4 100644 --- a/project/src/di/Container.ts +++ b/project/src/di/Container.ts @@ -253,7 +253,6 @@ import { StaticRouterModService } from "@spt/services/mod/staticRouter/StaticRou import { App } from "@spt/utils/App"; import { AsyncQueue } from "@spt/utils/AsyncQueue"; import { CompareUtil } from "@spt/utils/CompareUtil"; -import { DatabaseDecompressionUtil } from "@spt/utils/DatabaseDecompressionUtil"; import { DatabaseImporter } from "@spt/utils/DatabaseImporter"; import { EncodingUtil } from "@spt/utils/EncodingUtil"; import { HashUtil } from "@spt/utils/HashUtil"; @@ -420,9 +419,6 @@ export class Container { private static registerUtils(depContainer: DependencyContainer): void { // Utils depContainer.register("App", App, { lifecycle: Lifecycle.Singleton }); - depContainer.register("DatabaseDecompressionUtil", DatabaseDecompressionUtil, { - lifecycle: Lifecycle.Singleton, - }); depContainer.register("DatabaseImporter", DatabaseImporter, { lifecycle: Lifecycle.Singleton, }); diff --git a/project/src/utils/DatabaseDecompressionUtil.ts b/project/src/utils/DatabaseDecompressionUtil.ts deleted file mode 100644 index 17ca893df..000000000 --- a/project/src/utils/DatabaseDecompressionUtil.ts +++ /dev/null @@ -1,143 +0,0 @@ -import * as path from "node:path"; -import { path7za } from "7zip-bin"; -import { ILogger } from "@spt/models/spt/utils/ILogger"; -import * as fs from "fs-extra"; -import * as Seven from "node-7z"; -import { inject, injectable } from "tsyringe"; - -@injectable() -export class DatabaseDecompressionUtil { - private compressedDir: string; - private assetsDir: string; - private compiled: boolean; - - constructor(@inject("PrimaryLogger") protected logger: ILogger) { - this.compressedDir = path.normalize("./assets/compressed/database"); - this.assetsDir = path.normalize("./assets/database"); - this.compiled = this.isCompiled(); - } - - /** - * Checks if the application is running in a compiled environment. A simple check is done to see if the relative - * assets directory exists. If it does not, the application is assumed to be running in a compiled environment. All - * relative asset paths are different within a compiled environment, so this simple check is sufficient. - */ - private isCompiled(): boolean { - const assetsDir = path.normalize("./assets"); - return !fs.existsSync(assetsDir); - } - - /** - * Initializes the database compression utility. - * - * This method will decompress all 7-zip archives within the compressed database directory. The decompressed files - * are placed in their respective directories based on the name and location of the compressed file. - */ - public async initialize(): Promise { - if (this.compiled) { - this.logger.debug("Skipping database decompression in compiled environment"); - return; - } - - try { - const compressedFiles = await this.getCompressedFiles(); - if (compressedFiles.length === 0) { - this.logger.debug("No database archives found"); - return; - } - - for (const compressedFile of compressedFiles) { - await this.processCompressedFile(compressedFile); - } - this.logger.info("Database archives processed"); - } catch (error) { - this.logger.error(`Error handling database archives: ${error}`); - } - } - - /** - * Retrieves a list of all 7-zip archives within the compressed database directory. - */ - private async getCompressedFiles(): Promise { - try { - const files = await fs.readdir(this.compressedDir); - const compressedFiles = files.filter((file) => file.endsWith(".7z")); - return compressedFiles; - } catch (error) { - this.logger.error(`Error reading database archive directory: ${error}`); - return []; - } - } - - /** - * Processes a compressed file by checking if the target directory is empty, and if so, decompressing the file into - * the target directory. - */ - private async processCompressedFile(compressedFileName: string): Promise { - this.logger.info("Processing database archives..."); - - const compressedFilePath = path.join(this.compressedDir, compressedFileName); - const relativeTargetPath = compressedFileName.replace(".7z", ""); - const targetDir = path.join(this.assetsDir, relativeTargetPath); - - try { - this.logger.debug(`Processing: ${compressedFileName}`); - - const isTargetDirEmpty = await this.isDirectoryEmpty(targetDir); - if (!isTargetDirEmpty) { - this.logger.debug(`Archive target directory not empty, skipping: ${targetDir}`); - return; - } - - await this.decompressFile(compressedFilePath, targetDir); - - this.logger.debug(`Successfully processed: ${compressedFileName}`); - } catch (error) { - this.logger.error(`Error processing ${compressedFileName}: ${error}`); - } - } - - /** - * Checks if a directory exists and is empty. - */ - private async isDirectoryEmpty(directoryPath: string): Promise { - try { - const exists = await fs.pathExists(directoryPath); - if (!exists) { - return true; // Directory doesn't exist, consider it empty. - } - const files = await fs.readdir(directoryPath); - return files.length === 0; - } catch (error) { - this.logger.error(`Error checking if directory is empty ${directoryPath}: ${error}`); - throw error; - } - } - - /** - * Decompresses a 7-zip archive to the target directory. - */ - private decompressFile(archivePath: string, destinationPath: string): Promise { - return new Promise((resolve, reject) => { - const myStream = Seven.extractFull(archivePath, destinationPath, { - $bin: path7za, - overwrite: "a", - }); - - let hadError = false; - - myStream.on("end", () => { - if (!hadError) { - this.logger.debug(`Decompressed ${archivePath} to ${destinationPath}`); - resolve(); - } - }); - - myStream.on("error", (err) => { - hadError = true; - this.logger.error(`Error decompressing ${archivePath}: ${err}`); - reject(err); - }); - }); - } -}