diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c2658d7 --- /dev/null +++ b/.gitignore @@ -0,0 +1 @@ +node_modules/ diff --git a/README.md b/README.md new file mode 100644 index 0000000..7298b43 --- /dev/null +++ b/README.md @@ -0,0 +1,67 @@ +# rsc-archiver +compress and decompress runescape classic .jag/.mem cache files. these files +contain a proprietary header describing the size of the archive, and of +each individual entry (file). filenames are stored with a "hash" so it's +impossible to recover the originals without bruteforcing (unless they're under +~5 characters). + +## install + + $ npm install @2003scape/rsc-archiver + +## usage +### cli +``` +rsc-archiver + +Commands: + rsc-archiver x [] extract a file from an archive + [aliases: extract] + rsc-archiver a [-g] add a file to an archive + [aliases: add] + rsc-archiver d remove a file from an archive + [aliases: delete] + rsc-archiver l list hashes and file sizes in an + archive [aliases: list] + +Options: + --help Show help [boolean] + --version Show version number [boolean] +``` + +### api +```javascript +const fs = require('fs'); +const { JagArchive } = require('./src'); + +let rawJag = fs.readFileSync('./data204/sounds1.mem'); +let archive = new JagArchive(); +archive.readArchive(rawJag); +console.log(`cache has ${archive.entries.size} files`); +fs.writeFileSync('death.pcm', archive.getEntry('death.pcm')); + +const testArchive = new JagArchive(); +testArchive.putEntry('test.txt', Buffer.from('test string')); +fs.writeFileSync('./data204/test.jag', testArchive.toArchive(true)); + +rawJag = fs.readFileSync('./data204/test.jag'); +archive = new JagArchive(); +archive.readArchive(rawJag); +console.log(`cache has ${archive.entries.size} files`); +console.log(archive.getEntry('test.txt').toString()); +``` + +## license +Copyright 2019 2003Scape Team + +This program is free software: you can redistribute it and/or modify it under +the terms of the GNU Affero General Public License as published by the +Free Software Foundation, either version 3 of the License, or (at your option) +any later version. + +This program is distributed in the hope that it will be useful, but WITHOUT ANY +WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A +PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. + +You should have received a copy of the GNU Affero General Public License along +with this program. If not, see http://www.gnu.org/licenses/. diff --git a/example.js b/example.js new file mode 100644 index 0000000..4c01a67 --- /dev/null +++ b/example.js @@ -0,0 +1,18 @@ +const fs = require('fs'); +const { JagArchive } = require('./src'); + +let rawJag = fs.readFileSync('./data204/sounds1.mem'); +let archive = new JagArchive(); +archive.readArchive(rawJag); +console.log(`cache has ${archive.entries.size} files`); +fs.writeFileSync('death.pcm', archive.getEntry('death.pcm')); + +const testArchive = new JagArchive(); +testArchive.putEntry('test.txt', Buffer.from('test string')); +fs.writeFileSync('./data204/test.jag', testArchive.toArchive(true)); + +rawJag = fs.readFileSync('./data204/test.jag'); +archive = new JagArchive(); +archive.readArchive(rawJag); +console.log(`cache has ${archive.entries.size} files`); +console.log(archive.getEntry('test.txt').toString()); diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000..4bfd89e --- /dev/null +++ b/package-lock.json @@ -0,0 +1,210 @@ +{ + "name": "rsc-archiver", + "version": "1.0.0", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "@ledgerhq/compressjs": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/@ledgerhq/compressjs/-/compressjs-1.3.2.tgz", + "integrity": "sha512-gonFwAifRkSYDO7rt3NIBlvjvY8Nw+NM6LT1SuOBppuvoKbYtBViNh3EBPbP86+3Y4ux7DLUsNiUlqOgubJsdA==", + "requires": { + "commander": "^2.20.0" + } + }, + "ansi-regex": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-4.1.0.tgz", + "integrity": "sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg==" + }, + "ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "requires": { + "color-convert": "^1.9.0" + } + }, + "camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==" + }, + "cliui": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-5.0.0.tgz", + "integrity": "sha512-PYeGSEmmHM6zvoef2w8TPzlrnNpXIjTipYK780YswmIP9vjxmd6Y2a3CB2Ks6/AU8NHjZugXvo8w3oWM2qnwXA==", + "requires": { + "string-width": "^3.1.0", + "strip-ansi": "^5.2.0", + "wrap-ansi": "^5.1.0" + } + }, + "color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "requires": { + "color-name": "1.1.3" + } + }, + "color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha1-p9BVi9icQveV3UIyj3QIMcpTvCU=" + }, + "commander": { + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==" + }, + "decamelize": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", + "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA=" + }, + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==" + }, + "find-up": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-3.0.0.tgz", + "integrity": "sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg==", + "requires": { + "locate-path": "^3.0.0" + } + }, + "get-caller-file": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", + "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==" + }, + "is-fullwidth-code-point": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", + "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8=" + }, + "locate-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-3.0.0.tgz", + "integrity": "sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A==", + "requires": { + "p-locate": "^3.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "2.2.1", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.1.tgz", + "integrity": "sha512-85Tk+90UCVWvbDavCLKPOLC9vvY8OwEX/RtKF+/1OADJMVlFfEHOiMTPVyxg7mk/dKa+ipdHm0OUkTvCpMTuwg==", + "requires": { + "p-try": "^2.0.0" + } + }, + "p-locate": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-3.0.0.tgz", + "integrity": "sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ==", + "requires": { + "p-limit": "^2.0.0" + } + }, + "p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==" + }, + "path-exists": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-3.0.0.tgz", + "integrity": "sha1-zg6+ql94yxiSXqfYENe1mwEP1RU=" + }, + "pretty-bytes": { + "version": "5.3.0", + "resolved": "https://registry.npmjs.org/pretty-bytes/-/pretty-bytes-5.3.0.tgz", + "integrity": "sha512-hjGrh+P926p4R4WbaB6OckyRtO0F0/lQBiT+0gnxjV+5kjPBrfVBFCsCLbMqVQeydvIoouYTCmmEURiH3R1Bdg==" + }, + "require-directory": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz", + "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I=" + }, + "require-main-filename": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz", + "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg==" + }, + "set-blocking": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz", + "integrity": "sha1-BF+XgtARrppoA93TgrJDkrPYkPc=" + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + }, + "strip-ansi": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-5.2.0.tgz", + "integrity": "sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA==", + "requires": { + "ansi-regex": "^4.1.0" + } + }, + "which-module": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz", + "integrity": "sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho=" + }, + "wrap-ansi": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-5.1.0.tgz", + "integrity": "sha512-QC1/iN/2/RPVJ5jYK8BGttj5z83LmSKmvbvrXPNCLZSEb32KKVDJDl/MOt2N01qU2H/FkzEa9PKto1BqDjtd7Q==", + "requires": { + "ansi-styles": "^3.2.0", + "string-width": "^3.0.0", + "strip-ansi": "^5.0.0" + } + }, + "y18n": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz", + "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==" + }, + "yargs": { + "version": "14.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-14.2.0.tgz", + "integrity": "sha512-/is78VKbKs70bVZH7w4YaZea6xcJWOAwkhbR0CFuZBmYtfTYF0xjGJF43AYd8g2Uii1yJwmS5GR2vBmrc32sbg==", + "requires": { + "cliui": "^5.0.0", + "decamelize": "^1.2.0", + "find-up": "^3.0.0", + "get-caller-file": "^2.0.1", + "require-directory": "^2.1.1", + "require-main-filename": "^2.0.0", + "set-blocking": "^2.0.0", + "string-width": "^3.0.0", + "which-module": "^2.0.0", + "y18n": "^4.0.0", + "yargs-parser": "^15.0.0" + } + }, + "yargs-parser": { + "version": "15.0.0", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-15.0.0.tgz", + "integrity": "sha512-xLTUnCMc4JhxrPEPUYD5IBR1mWCK/aT6+RJ/K29JY2y1vD+FhtgKK0AXRWvI262q3QSffAQuTouFIKUuHX89wQ==", + "requires": { + "camelcase": "^5.0.0", + "decamelize": "^1.2.0" + } + } + } +} diff --git a/package.json b/package.json new file mode 100644 index 0000000..ff2846e --- /dev/null +++ b/package.json @@ -0,0 +1,29 @@ +{ + "name": "rsc-archiver", + "version": "1.0.0", + "description": "compress and decompress runescape classic cache archives", + "main": "index.js", + "bin": "./src/bin.js", + "repository": { + "type": "git", + "url": "git+https://github.com/2003scape/rsc-archiver.git" + }, + "keywords": [ + "runescape", + "rsc", + "cache", + "archive", + "bzip" + ], + "author": "2003Scape Team", + "license": "AGPL-3.0+", + "bugs": { + "url": "https://github.com/2003scape/rsc-archiver/issues" + }, + "homepage": "https://github.com/2003scape/rsc-archiver#readme", + "dependencies": { + "@ledgerhq/compressjs": "^1.3.2", + "pretty-bytes": "^5.3.0", + "yargs": "^14.2.0" + } +} diff --git a/src/bin.js b/src/bin.js new file mode 100755 index 0000000..35a95b5 --- /dev/null +++ b/src/bin.js @@ -0,0 +1,158 @@ +#!/usr/bin/env node + +const fs = require('fs').promises; +const path = require('path'); +const pkg = require('../package'); +const prettyBytes = require('pretty-bytes'); +const yargs = require('yargs'); +const { JagArchive, hashFilename } = require('./'); + +yargs + .scriptName(pkg.name) + .version(pkg.version) + .command( + ['x []', 'extract'], + 'extract a file from an archive', + yargs => { + yargs.positional('archive', { + description: 'the .jag or .mem archive file', + type: 'string' + }); + + yargs.positional('file', { + description: 'the filename or hash within the archive', + type: 'string' + }); + + yargs.positional('out', { + description: 'the filename to write to', + type: 'string', + optional: true + }); + }, + async argv => { + const archive = new JagArchive(); + + try { + archive.readArchive(await fs.readFile(argv.archive)); + + const file = archive.entries.get(+argv.file) || + archive.entries.get(hashFilename(argv.file)); + + if (!file) { + process.errorCode = 1; + console.error(`file/hash "${argv.file}" not found in "` + + `${argv.archive}"`); + return; + } + + argv.out = argv.out || argv.file; + + await fs.writeFile(path.join('.', argv.out), file); + } catch (e) { + process.errorCode = 1; + console.error(e); + } + }) + .command( + ['a [-g]', 'add'], + 'add a file to an archive', + yargs => { + yargs.option('g', { + alias: 'group', + description: 'compress files in one round instead of ' + + 'individually', + type: 'boolean', + default: false + }); + + yargs.positional('archive', { + description: 'the .jag or .mem archive file', + type: 'string' + }); + + yargs.positional('file', { + description: 'the file you would like to add', + type: 'string' + }); + }, + async argv => { + const archive = new JagArchive(); + + try { + archive.readArchive(await fs.readFile(argv.archive)); + } catch (e) { + // file doesn't exist, but writeFile will create it + } + + try { + const filename = path.basename(argv.file); + archive.putEntry(filename, await fs.readFile(argv.file)); + await fs.writeFile(argv.archive, archive.toArchive(!argv.g)); + } catch (e) { + process.exitCode = 1; + console.error(e); + } + }) + .command( + ['d ', 'delete'], + 'remove a file from an archive', + yargs => { + yargs.positional('archive', { + description: 'the .jag or .mem archive file', + type: 'string' + }); + + yargs.positional('file', { + description: 'the filename or hash you would like to remove', + type: 'string' + }); + }, + async argv => { + const archive = new JagArchive(); + + try { + archive.readArchive(await fs.readFile(argv.archive)); + + if (!archive.entries.delete(+argv.file) || + !archive.entries.delete(hashFilename(argv.file))) { + console.log(`"${argv.file}" not found in "` + + `${argv.archive}"`); + return; + } + + await fs.writeFile(argv.archive, archive.toArchive()); + } catch (e) { + process.exitCode = 1; + console.error(e); + } + }) + .command( + ['l ', 'list'], + 'list hashes and file sizes in an archive', + yargs => { + yargs.positional('archive', { + description: 'the .jag or .mem archive file', + type: 'string' + }); + }, + async argv => { + const archive = new JagArchive(); + + try { + archive.readArchive(await fs.readFile(argv.archive)); + } catch (e) { + process.exitCode = 1; + console.error(e); + return; + } + + console.log('hash\t\tsize'); + + for (const [hash, entry] of archive.entries) { + console.log(`${hash}\t${entry.length} (` + + `${prettyBytes(entry.length)})`); + } + }) + .demandCommand() + .argv; diff --git a/src/index.js b/src/index.js new file mode 100644 index 0000000..5cc1c59 --- /dev/null +++ b/src/index.js @@ -0,0 +1,203 @@ +const JagBuffer = require('./jag-buffer'); +// we have to use this fork of compressjs as the main one has issues compiling +// with browserify and webpack +const { Bzip2 } = require('@ledgerhq/compressjs'); + +// BZ is a magic symbol, h is for huffman and 1 is the level of compression ( +// from 1-9) +const BZIP_HEADER = Buffer.from(['B', 'Z', 'h', '1'].map(c => c.charCodeAt(0))); + +// maximum amount of files one archive can hold +const MAX_ENTRIES = 65535; + +// maximum file size for overall cache or individual entries +const MAX_FILE_SIZE = 16777215; + +// convert the desired file name to hash used in the archive index +function hashFilename(filename) { + filename = filename.toUpperCase(); + + let hash = 0; + + for (let i = 0; i < filename.length; i += 1) { + hash = (((hash * 61) | 0) + filename.charCodeAt(i)) - 32; + } + + return hash; +} + +// add the bzip magic header and decompress it +function bzipDecompress(compressed) { + return Buffer.from( + Bzip2.decompressFile(Buffer.concat([BZIP_HEADER, compressed]))); +} + +// compress data remove the bzip magic header +function bzipCompress(data) { + return Buffer.from(Bzip2.compressFile(data, undefined, 1)) + .slice(BZIP_HEADER.length); +} + +class JagArchive { + constructor() { + // { fileHash: Buffer } + this.entries = new Map(); + } + + // read the archive sizes + readHeader() { + // this is the uncompressed size + this.size = this.header.getUInt3(); + + // this is the size of the compressed buffer + 6 + this.compressedSize = this.header.getUInt3(); + } + + // check if the entire archive needs to be decompressed + decompress() { + if (!this.zippedBuffer || !this.zippedBuffer.length) { + throw new Error('no archive to decompress'); + } + + if (this.size !== this.compressedSize) { + this.unzippedBuffer = new JagBuffer( + bzipDecompress(this.zippedBuffer)); + } else { + this.unzippedBuffer = new JagBuffer(this.zippedBuffer); + } + } + + // populate the entries table + readEntries() { + const totalEntries = this.unzippedBuffer.getUShort(); + + let offset = 2 + totalEntries * 10; + + for (let i = 0; i < totalEntries; i += 1) { + const hash = this.unzippedBuffer.getInt4(); + const size = this.unzippedBuffer.getUInt3(); + const compressedSize = this.unzippedBuffer.getUInt3(); + const compressed = this.unzippedBuffer.data.slice( + offset, offset + compressedSize); + + let decompressed; + + if (size !== compressedSize) { + decompressed = bzipDecompress(compressed); + } else { + decompressed = compressed; + } + + this.entries.set(hash, decompressed); + offset += compressedSize; + } + } + + // decompress the archive and populate our entries + readArchive(buffer) { + this.header = new JagBuffer(buffer.slice(0, 6)); + this.zippedBuffer = buffer.slice(6, buffer.length); + + this.readHeader(); + this.decompress(); + this.readEntries(); + } + + // read a file from the decompressed archive + getEntry(filename) { + const hash = hashFilename(filename); + + if (!this.unzippedBuffer || !this.unzippedBuffer.data.length) { + throw new Error('no decompressed data found'); + } + + if (!this.entries.has(hash)) { + throw new Error(`entry ${filename} (${hash}) not found`); + } + + return this.entries.get(hash); + } + + // add an entry to be compressed + putEntry(filename, entry) { + const hash = hashFilename(filename); + this.entries.set(hash, entry); + } + + // write the archive sizes + writeHeader() { + this.header = new JagBuffer(Buffer.alloc(6)); + this.header.writeUInt3(this.unzippedBuffer.size); + this.header.writeUInt3(this.zippedBuffer.length); + } + + // compress the entire archive (if we didn't compress each file + // individually) + compress(individualCompress = true) { + if (!individualCompress) { + this.zippedBuffer = bzipCompress(this.unzippedBuffer.data); + } else { + this.zippedBuffer = this.unzippedBuffer.data; + } + } + + // add each of the entries to the unzipped buffer + writeEntries(individualCompress = true) { + if (this.entries.length > MAX_ENTRIES) { + throw new RangeError(`too many entries (${this.entries.length})`); + } + + const compressedEntries = new Map(); + + // the size of the concatinated compressed entries + let compressedSize = 0; + + for (const [hash, entry] of this.entries) { + const compressed = individualCompress ? bzipCompress(entry) : entry; + compressedEntries.set(hash, compressed); + + if (entry.length > MAX_FILE_SIZE || + compressed.length > MAX_FILE_SIZE) { + throw new RangeError(`entry ${hash} is too big for archive (` + + `${entry.length > MAX_FILE_SIZE})`); + } + + compressedSize += compressed.length; + } + + // where we start storing the files + let entryOffset = 2 + this.entries.size * 10; + + this.unzippedBuffer = new JagBuffer( + Buffer.alloc(entryOffset + compressedSize)); + + this.unzippedBuffer.writeUShort(this.entries.size); + + for (const [hash, compressedEntry] of compressedEntries) { + this.unzippedBuffer.writeInt4(hash); + this.unzippedBuffer.writeUInt3(this.entries.get(hash).length); + this.unzippedBuffer.writeUInt3(compressedEntry.length); + + compressedEntry.copy(this.unzippedBuffer.data, entryOffset); + entryOffset += compressedEntry.length; + } + } + + // compress the entries to a properly formatted jagex archive. if + // `individual` is enabled, each entry will be compressed individually + // rather than compressing them together + toArchive(individualCompress = true) { + this.writeEntries(individualCompress); + this.compress(individualCompress); + this.writeHeader(); + + return Buffer.concat([this.header.data, this.zippedBuffer]); + } + + toString() { + return `[object ${this.constructor.name} (${this.entries.size})]`; + } +} + +module.exports.hashFilename = hashFilename; +module.exports.JagArchive = JagArchive; diff --git a/src/jag-buffer.js b/src/jag-buffer.js new file mode 100644 index 0000000..a6d5f33 --- /dev/null +++ b/src/jag-buffer.js @@ -0,0 +1,109 @@ +function oobError() { + return new RangeError('out of bounds'); +} + +class JagBuffer { + constructor(data) { + this.data = data; + this.caret = 0; + } + + getUByte() { + if (this.caret + 1 > this.size) { + throw oobError(); + } + + const out = this.data[this.caret] >>> 0; + + this.caret += 1; + + return out; + } + + getUShort() { + if (this.caret + 2 > this.size) { + throw oobError(); + } + + let out = ((this.data[this.caret] & 0xff) << 8) >>> 0; + out = (out | (this.data[this.caret + 1] & 0xff)) >>> 0; + + this.caret += 2; + + return out; + } + + getUInt3() { + if (this.caret + 3 > this.size) { + throw oobError(); + } + + let out = ((this.data[this.caret] & 0xff) << 16) >>> 0; + out = (out | ((this.data[this.caret + 1] & 0xff) << 8)) >>> 0; + out = (out | (this.data[this.caret + 2] & 0xff)) >>> 0; + + this.caret += 3; + + return out; + } + + getInt4() { + if (this.caret + 4 > this.size) { + throw oobError(); + } + + let out = ((this.data[this.caret] & 0xff) << 24); + out = (out | ((this.data[this.caret + 1] & 0xff) << 16)); + out = (out | ((this.data[this.caret + 2] & 0xff) << 8)); + out = (out | (this.data[this.caret + 3] & 0xff)); + + this.caret += 4; + + return out; + } + + getBytes(length, start) { + start = isNaN(+start) ? this.caret : start; + + const bytes = this.data.slice(start, start + length); + this.caret += length; + + return bytes; + } + + writeUByte(value) { + this.data[this.caret] = value; + this.caret += 1; + } + + writeUShort(value) { + this.data[this.caret] = (value >> 8) & 0xff; + this.data[this.caret + 1] = value & 0xff; + this.caret += 2; + } + + writeUInt3(value) { + this.data[this.caret] = (value >> 16) >>> 0; + this.data[this.caret + 1] = (value >> 8) >>> 0; + this.data[this.caret + 2] = value & 0xff; + this.caret += 3; + } + + writeInt4(value) { + this.data[this.caret] = value >> 24; + this.data[this.caret + 1] = value >> 16; + this.data[this.caret + 2] = value >> 8; + this.data[this.caret + 3] = value & 0xff; + this.caret += 4; + } + + writeBytes(bytes, start) { + start = isNaN(+start) ? this.caret : start; + } + + get size() { + return this.data.byteLength; + } +} + +module.exports = JagBuffer;