diff --git a/changelog.md b/changelog.md index 46162cc..fbb6dae 100644 --- a/changelog.md +++ b/changelog.md @@ -49,6 +49,7 @@ - `addFile` is added; you can use this method whenever you use `addFileFromPath`, `addFileFromBuffer` or `addFileFromReadable` - `getConfig()` and `getType()` are implemented as getter as well, resp.: `storage.config` and `storage.type` - The configuration object are no longer extensible; if you want to provide extra parameters you can use the `options` object, for instance: +- removed adapter config types ```typescript const conf: ConfigAmazonS3 = { diff --git a/package.json b/package.json index e49e7d2..98d438b 100644 --- a/package.json +++ b/package.json @@ -49,6 +49,8 @@ "test": "ts-node ./tests/test.ts", "test-mode": "ts-node ./tests/test-mode.ts", "testB2": "ts-node ./tests/testB2.ts", + "testS3": "ts-node ./tests/testS3.ts", + "testGCS": "ts-node ./tests/testGCS.ts", "testLocal": "ts-node ./tests/testLocal.ts", "ts": "ts-node", "tsc": "node_modules/.bin/tsc", diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index b2f7f21..dd276b3 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -19,10 +19,8 @@ import { } from "@aws-sdk/client-s3"; import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; import { - ConfigAmazonS3, AdapterConfig, StorageType, - S3Compatible, ResultObjectStream, ResultObject, ResultObjectBuckets, @@ -37,98 +35,54 @@ import { parseUrl } from "./util"; export class AdapterAmazonS3 extends AbstractAdapter { protected _type = StorageType.S3; - protected _config: ConfigAmazonS3; + protected _config: AdapterConfig; private configError: string | null = null; private storage: S3Client; - private s3Compatible: S3Compatible = S3Compatible.Amazon; - constructor(config: string | AdapterConfig) { + constructor(config?: string | AdapterConfig) { super(); - this._config = this.parseConfig(config as ConfigAmazonS3); - - // handle small differences in supported S3 compatible storages - if (typeof (this._config as ConfigAmazonS3).region === "undefined") { - if (this.s3Compatible === S3Compatible.R2) { - this._config.region = "auto"; - } else if (this.s3Compatible === S3Compatible.Backblaze) { - let ep = this._config.endpoint; - ep = ep.substring(ep.indexOf("s3.") + 3); - this._config.region = ep.substring(0, ep.indexOf(".")); - } - } - if (typeof this._config.endpoint === "undefined") { - this.storage = new S3Client({ region: this._config.region }); - } else { - this.storage = new S3Client({ - region: this._config.region, - endpoint: this._config.endpoint, - credentials: { - accessKeyId: this._config.accessKeyId, - secretAccessKey: this._config.secretAccessKey, - }, - }); - } - } - - private parseConfig(config: string | ConfigAmazonS3): ConfigAmazonS3 | null { - let cfg: ConfigAmazonS3; if (typeof config === "string") { - const { value, error } = parseUrl(config); - if (error) { - this.configError = error; - return null; - } - const { - type, - part1: accessKeyId, - part2: secretAccessKey, - part3: region, - bucketName, - queryString: options, - } = value; - cfg = { - type, - accessKeyId, - secretAccessKey, - region, - bucketName, - ...options, - }; + this._config = this.parseConfig(config); } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + this._config = config; } - if (cfg.skipCheck === true) { - return cfg; + if (this._config === null) { + return; } - if (!cfg.accessKeyId || !cfg.secretAccessKey) { - this.configError = - "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 's3'"; - return null; - } + this.storage = new S3Client(this.config); + console.log(this.storage.config); + } - if (typeof cfg.endpoint !== "undefined") { - if (cfg.endpoint.indexOf("r2.cloudflarestorage.com") !== -1) { - this.s3Compatible = S3Compatible.R2; - } else if (cfg.endpoint.indexOf("backblazeb2.com") !== -1) { - this.s3Compatible = S3Compatible.Backblaze; - } - } - if (!cfg.region && this.s3Compatible === S3Compatible.Amazon) { - this.configError = "You must specify a default region for storage type 's3'"; + private parseConfig(config: string): AdapterConfig | null { + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; return null; } - - return cfg; + const { + type, + part1: accessKeyId, + part2: secretAccessKey, + part3: region, + bucketName, + queryString: options, + } = value; + + return { + type, + accessKeyId, + secretAccessKey, + region, + bucketName, + ...options, + }; } - async getFileAsStream( + // Public API + + public async getFileAsStream( bucketName: string, fileName: string, options: { start?: number; end?: number } = { start: 0 } @@ -152,7 +106,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async removeFile(bucketName: string, fileName: string): Promise { + public async removeFile(bucketName: string, fileName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -170,7 +124,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async createBucket(name: string, options: object = {}): Promise { + public async createBucket(name: string, options: object = {}): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -195,7 +149,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { ...options, }; // see issue: https://github.com/aws/aws-sdk-js/issues/3647 - if (typeof this._config.region !== "undefined" && this._config.region !== "us-east-1") { + if (typeof this._config.region === "string" && this._config.region !== "us-east-1") { input.CreateBucketConfiguration = { LocationConstraint: BucketLocationConstraint[this._config.region.replace("-", "_")], }; @@ -217,7 +171,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async clearBucket(name: string): Promise { + public async clearBucket(name: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -256,7 +210,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async deleteBucket(name: string): Promise { + public async deleteBucket(name: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -277,7 +231,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async listBuckets(): Promise { + public async listBuckets(): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -335,7 +289,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async getFileAsURL(bucketName: string, fileName: string): Promise { + public async getFileAsURL(bucketName: string, fileName: string): Promise { return getSignedUrl( this.storage, new GetObjectCommand({ @@ -352,7 +306,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { }); } - async listFiles(bucketName: string, maxFiles: number = 1000): Promise { + public async listFiles(bucketName: string, maxFiles: number = 1000): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -373,7 +327,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async sizeOf(bucketName: string, fileName: string): Promise { + public async sizeOf(bucketName: string, fileName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -391,7 +345,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async bucketExists(bucketName: string): Promise { + public async bucketExists(bucketName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -410,7 +364,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { }); } - async fileExists(bucketName: string, fileName: string): Promise { + public async fileExists(bucketName: string, fileName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index cd21417..1274396 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -40,7 +40,8 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { ); this.storage = new BlobServiceClient( `https://${(this._config as ConfigAzureStorageBlob).storageAccount}.blob.core.windows.net`, - this.sharedKeyCredential + this.sharedKeyCredential, + this._config.options ); } @@ -65,15 +66,10 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { storageAccount, accessKey, bucketName, - ...options, + options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; } if (cfg.skipCheck === true) { diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index c004d9c..f40b317 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -3,7 +3,6 @@ import { Readable } from "stream"; import { AbstractAdapter } from "./AbstractAdapter"; import { StorageType, - ConfigBackblazeB2, BackblazeB2File, ResultObjectBoolean, ResultObject, @@ -21,6 +20,7 @@ import { ResultObjectNumber, BackblazeAxiosResponse, BackblazeBucketOptions, + AdapterConfig, } from "./types"; import { parseUrl, validateName } from "./util"; @@ -28,17 +28,20 @@ require("@gideo-llc/backblaze-b2-upload-any").install(B2); export class AdapterBackblazeB2 extends AbstractAdapter { protected _type = StorageType.B2; - protected _config: ConfigBackblazeB2; + protected _config: AdapterConfig; private storage: B2; private authorized: boolean = false; private configError: string | null = null; - constructor(config: string | ConfigBackblazeB2) { + constructor(config?: string | AdapterConfig) { super(); this._config = this.parseConfig(config); if (this._config !== null) { try { - this.storage = new B2(this._config); + const c = { ...this._config, ...(this._config.options as object) }; + delete c.options; + this.storage = new B2(c); + console.log(this.storage.config); } catch (e) { this.configError = e.message; } @@ -47,8 +50,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { // util members - private parseConfig(config: string | ConfigBackblazeB2): ConfigBackblazeB2 | null { - let cfg: ConfigBackblazeB2; + private parseConfig(config: string | AdapterConfig): AdapterConfig | null { + let cfg: AdapterConfig; if (typeof config === "string") { const { error, value } = parseUrl(config); if (error !== null) { @@ -70,12 +73,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { ...options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; } if (cfg.skipCheck === true) { diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index 28cb401..8c180d1 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -8,7 +8,6 @@ import { import { AbstractAdapter } from "./AbstractAdapter"; import { StorageType, - ConfigGoogleCloud, ResultObject, ResultObjectStream, FileBufferParams, @@ -20,17 +19,24 @@ import { ResultObjectBoolean, } from "./types"; import { parseUrl } from "./util"; +import { AdapterConfig } from "@tweedegolf/storage-abstraction"; export class AdapterGoogleCloudStorage extends AbstractAdapter { protected _type = StorageType.GCS; - protected _config: ConfigGoogleCloud; + protected _config: AdapterConfig; private configError: string | null = null; private storage: GoogleCloudStorage; - constructor(config: string | ConfigGoogleCloud) { + constructor(config?: string | AdapterConfig) { super(); - this._config = this.parseConfig(config); - this.storage = new GoogleCloudStorage(this._config as ConfigGoogleCloud); + // this._config = this.parseConfig(config); + // const c = { + // ...this._config, + // ...this._config.options, + // }; + // delete c.options; + // this.storage = new GoogleCloudStorage(c); + this.storage = new GoogleCloudStorage(config as object); } /** @@ -45,8 +51,8 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { return json.project_id; } - private parseConfig(config: string | ConfigGoogleCloud): ConfigGoogleCloud { - let cfg: ConfigGoogleCloud; + private parseConfig(config: string | AdapterConfig): AdapterConfig { + let cfg: AdapterConfig; if (typeof config === "string") { const { value, error } = parseUrl(config); if (error) { @@ -66,15 +72,10 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { keyFilename, projectId, bucketName, - ...options, + options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; } if (cfg.skipCheck === true) { diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 7929b44..c416b4e 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -57,15 +57,10 @@ export class AdapterLocal extends AbstractAdapter { directory, bucketName, mode: this.mode, - ...options, + options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; if (!cfg.directory) { this.configError = "You must specify a value for 'directory' for storage type 'local'"; diff --git a/src/types.ts b/src/types.ts index 986393b..f368ac8 100644 --- a/src/types.ts +++ b/src/types.ts @@ -20,7 +20,7 @@ export interface IStorage { /** * Same as `getType` but implemented as getter - * @returns adapter tyoe + * @returns adapter type, e.g. 'gcs', 'b2', 'local' etc. */ type: string; @@ -217,56 +217,13 @@ export type JSON = { export type GenericKey = number | string | boolean | number[] | string[] | boolean[]; -export interface IAdapterConfig { +export interface AdapterConfig { // type: StorageType; type: string; - skipCheck?: boolean; - bucketName?: string; - options?: { - [id: string]: GenericKey; - }; -} - -export interface ConfigAmazonS3 extends IAdapterConfig { - accessKeyId?: string; - secretAccessKey?: string; - region?: string; - endpoint?: string; -} - -export interface ConfigAzureStorageBlob extends IAdapterConfig { - storageAccount?: string; - accessKey?: string; -} - -export interface ConfigBackblazeB2 extends IAdapterConfig { - applicationKeyId?: string; - applicationKey?: string; -} - -export interface ConfigGoogleCloud extends IAdapterConfig { - keyFilename?: string; - projectId?: string; -} - -export interface ConfigLocal extends IAdapterConfig { - directory: string; - mode?: number | string; -} - -export interface ConfigTemplate extends IAdapterConfig { - someKey: string; - someOtherKey: string; // [id: string]: GenericKey; + [id: string]: number | string | boolean | number[] | string[] | boolean[]; } -export type AdapterConfig = - | ConfigLocal - | ConfigAmazonS3 - | ConfigGoogleCloud - | ConfigBackblazeB2 - | ConfigTemplate; - export type BackblazeAxiosResponse = { response: { data: { diff --git a/tests/test-config-s3.jasmine.ts b/tests/test-config-s3.jasmine.ts index b0ad7eb..242ba1f 100644 --- a/tests/test-config-s3.jasmine.ts +++ b/tests/test-config-s3.jasmine.ts @@ -1,49 +1,48 @@ import "jasmine"; import { Storage } from "../src/Storage"; -import { ConfigAmazonS3, StorageType } from "../src/types"; +import { AdapterAmazonS3 } from "../src/AdapterAmazonS3"; +import { StorageType } from "../src/types"; describe(`testing Amazon urls`, () => { // it("[0] no options", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes"); // expect(storage.getType()).toBe(StorageType.S3); // expect(storage.getSelectedBucket()).toBe(""); - // expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - // expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe( + // expect(storage.config.accessKeyId).toBe("key"); + // expect(storage.config.secretAccessKey).toBe( // "secret/can/contain/slashes" // ); - // expect((storage.config as ConfigAmazonS3).region).toBe(""); + // expect(storage.config.region).toBe(""); // }); it("[1] parameter string", () => { - const storage = new Storage( + const storage = new AdapterAmazonS3( "s3://key:secret/can/contain/slashes@eu-west-2/the-buck?sslEnabled=true" ); expect(storage.getType()).toBe(StorageType.S3); expect(storage.config.bucketName).toBe("the-buck"); - expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.config as ConfigAmazonS3).options?.sslEnabled as unknown as string).toBe( - "true" - ); + expect(storage.config.accessKeyId).toBe("key"); + expect(storage.config.secretAccessKey).toBe("secret/can/contain/slashes"); + expect(storage.config.region).toBe("eu-west-2"); + expect(storage.config.options?.sslEnabled as unknown as string).toBe("true"); }); // it("[2a] no region", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@the-buck"); // expect(storage.getSelectedBucket()).toBe("the-buck"); - // expect((storage.config as ConfigAmazonS3).region).toBe(""); - // expect((storage.config as ConfigAmazonS3).bucketName).toBe("the-buck"); + // expect(storage.config.region).toBe(""); + // expect(storage.config.bucketName).toBe("the-buck"); // }); // it("[2b] no region 2", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@/the-buck"); // expect(storage.getSelectedBucket()).toBe("the-buck"); - // expect((storage.config as ConfigAmazonS3).region).toBe(""); - // expect((storage.config as ConfigAmazonS3).bucketName).toBe("the-buck"); + // expect(storage.config.region).toBe(""); + // expect(storage.config.bucketName).toBe("the-buck"); // }); it("[3] non-existent keys will not be filtered anymore, nor will invalid typed values (e.g. a numeric value for useDualStack)", () => { - const storage = new Storage( + const storage = new AdapterAmazonS3( [ "s3://key:secret/can/contain/slashes@eu-west-2/the-buck", "?sslEnabled=true", @@ -54,37 +53,37 @@ describe(`testing Amazon urls`, () => { ); expect(storage.getType()).toBe(StorageType.S3); expect(storage.config.bucketName).toBe("the-buck"); - expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect(storage.config.accessKeyId).toBe("key"); + expect(storage.config.secretAccessKey).toBe("secret/can/contain/slashes"); + expect(storage.config.region).toBe("eu-west-2"); expect(storage.config.options?.sslEnabled as unknown as string).toBe("true"); expect(storage.config.options?.useDualStack as unknown).toBe(undefined); expect(storage.config.options?.otherExistentKey as string).toBe("true"); - expect((storage.config as ConfigAmazonS3).endpoint).toBe(undefined); - expect((storage.config as ConfigAmazonS3).endPoint).toBe( - "https://kms-fips.us-west-2.amazonaws.com" - ); + expect(storage.config.options?.endPoint).toBe("https://kms-fips.us-west-2.amazonaws.com"); + expect(storage.config.endpoint).toBe(undefined); }); it("[4] object", () => { - const storage = new Storage({ + const storage = new AdapterAmazonS3({ type: "s3", accessKeyId: "key", secretAccessKey: "secret/can/contain/slashes", region: "eu-west-2", bucketName: "the-buck", - sslEnabled: true, + options: { + sslEnabled: true, + }, }); expect(storage.getType()).toBe(StorageType.S3); - expect(storage.getSelectedBucket()).toBe("the-buck"); - expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.config as ConfigAmazonS3).sslEnabled).toBe(true); + expect(storage.config.bucketName).toBe("the-buck"); + expect(storage.config.accessKeyId).toBe("key"); + expect(storage.config.secretAccessKey).toBe("secret/can/contain/slashes"); + expect(storage.config.region).toBe("eu-west-2"); + expect(storage.config.options?.sslEnabled).toBe(true); }); it("[5] no bucket", () => { - const storage = new Storage({ + const storage = new AdapterAmazonS3({ type: "s3", region: "eu-west-2", accessKeyId: "key", @@ -96,31 +95,33 @@ describe(`testing Amazon urls`, () => { // it("[5a] no bucket URL", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2"); // expect(storage.getSelectedBucket()).toBe("eu-west-2"); - // expect((storage.config as ConfigAmazonS3).region).not.toBe("eu-west-2"); + // expect(storage.config.region).not.toBe("eu-west-2"); // }); it("[5a1] no bucket URL", () => { - const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2/"); + const storage = new AdapterAmazonS3("s3://key:secret/can/contain/slashes@eu-west-2/"); expect(storage.getSelectedBucket()).toBe(""); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect(storage.config.region).toBe("eu-west-2"); }); it("[5b] no bucket URL plus queryString", () => { - const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2/?sslEnabled=true"); + const storage = new AdapterAmazonS3( + "s3://key:secret/can/contain/slashes@eu-west-2/?sslEnabled=true" + ); expect(storage.getSelectedBucket()).toBe(""); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.config as ConfigAmazonS3).sslEnabled as unknown as string).toBe("true"); + expect(storage.config.region).toBe("eu-west-2"); + expect(storage.config.sslEnabled as unknown as string).toBe("true"); }); // it("[5b1] no bucket URL plus queryString", () => { - // const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2?sslEnabled=true"); + // const storage = new AdapterAmazonS3("s3://key:secret/can/contain/slashes@eu-west-2?sslEnabled=true"); // expect(storage.getSelectedBucket()).toBe("eu-west-2"); - // expect((storage.config as ConfigAmazonS3).region).not.toBe("eu-west-2"); - // expect((storage.config as ConfigAmazonS3).sslEnabled).toBe(true); + // expect(storage.config.region).not.toBe("eu-west-2"); + // expect(storage.config.sslEnabled).toBe(true); // }); it("[6] number and boolean in config object keep their original type", () => { - const storage = new Storage({ + const storage = new AdapterAmazonS3({ type: "s3", region: "eu-west-2", accessKeyId: "key", @@ -128,12 +129,12 @@ describe(`testing Amazon urls`, () => { optionNumber: 42, optionBoolean: true, }); - expect((storage.config as ConfigAmazonS3).optionNumber).toBe(42); - expect((storage.config as ConfigAmazonS3).optionBoolean).toBe(true); + expect(storage.config.optionNumber).toBe(42); + expect(storage.config.optionBoolean).toBe(true); }); it("[7] number and boolean used in config will stay string types", () => { - const storage = new Storage( + const storage = new AdapterAmazonS3( [ "s3://key:secret/can/contain/slashes", "@eu-west-2/", @@ -141,7 +142,7 @@ describe(`testing Amazon urls`, () => { "&optionBoolean=true", ].join("") ); - expect((storage.config as ConfigAmazonS3).optionNumber).toBe("42"); - expect((storage.config as ConfigAmazonS3).optionBoolean).toBe("true"); + expect(storage.config.optionNumber).toBe("42"); + expect(storage.config.optionBoolean).toBe("true"); }); }); diff --git a/tests/testB2.ts b/tests/testB2.ts index aeb648a..e56ffe3 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -77,7 +77,7 @@ async function testB2() { // targetPath: "test/image1.jpg", // }); // console.timeEnd("addFileFromPath"); - + /* console.time("addFileFromStream"); const data4 = await storage.addFileFromStream({ bucketName: "the-buck", @@ -90,7 +90,7 @@ async function testB2() { const response = await storage.clearBucket("the-buck"); console.log(response); console.timeEnd("clearBucket"); - +*/ // console.time("listFiles"); // const data2 = await storage.listFiles("the-buck"); // console.log(data2); diff --git a/tests/testGCS.ts b/tests/testGCS.ts new file mode 100644 index 0000000..a87d7c1 --- /dev/null +++ b/tests/testGCS.ts @@ -0,0 +1,14 @@ +import dotenv from "dotenv"; +import { StorageType } from "../src/types"; +import { AdapterGoogleCloudStorage } from "../src/AdapterGoogleCloudStorage"; +import { parseMode, parseUrl } from "../src/util"; + +dotenv.config(); + +async function test() { + const a = new AdapterGoogleCloudStorage(); + const b = await a.listBuckets(); + console.log(b); +} + +test(); diff --git a/tests/testLocal.ts b/tests/testLocal.ts new file mode 100644 index 0000000..f19983e --- /dev/null +++ b/tests/testLocal.ts @@ -0,0 +1,35 @@ +import dotenv from "dotenv"; +import { StorageType } from "../src/types"; +import { ConfigBackblazeB2 } from "@tweedegolf/storage-abstraction"; +import { parseMode, parseUrl } from "../src/util"; + +dotenv.config(); + +const applicationKeyId = process.env.B2_APPLICATION_KEY_ID; +const applicationKey = process.env.B2_APPLICATION_KEY; +const configBackblaze: ConfigBackblazeB2 = { + type: StorageType.B2, + applicationKeyId, + applicationKey, + bucketName: process.env.BUCKET_NAME, + versioning: true, +}; + +function test() { + // const config = "local://tests/tmp/@the-buck?param=value"; + const config = "s3://key:secret/can/contain/slashes@eu-west-2/the-buck"; + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; + return null; + } + + console.log(value); + + console.log(parseMode("0o777")); + console.log(parseMode("511")); + console.log(parseMode(0o777)); + console.log(parseMode(511)); +} + +test(); diff --git a/tests/testS3.ts b/tests/testS3.ts new file mode 100644 index 0000000..8f8d801 --- /dev/null +++ b/tests/testS3.ts @@ -0,0 +1,85 @@ +import dotenv from "dotenv"; +import { StorageType } from "../src/types"; +import { AdapterAmazonS3 } from "../src/AdapterAmazonS3"; +// import { parseMode, parseUrl } from "../src/util"; +import { ListBucketsCommand, S3Client } from "@aws-sdk/client-s3"; +import { Storage } from "../src/Storage"; + +dotenv.config(); + +// const accessKeyId = process.env["AWS_ACCESS_KEY_ID"]; +// const secretAccessKey = process.env["AWS_SECRET_ACCESS_KEY"]; + +// const configS3: ConfigAmazonS3 = { +// type: StorageType.S3, +// region: "us-east-1", +// skipCheck: true, +// // accessKeyId, +// // secretAccessKey, +// bucketName: process.env.BUCKET_NAME, +// }; + +async function test() { + // const s = new AdapterAmazonS3({ region: "eu-west-1" }); + const s = new Storage({ type: StorageType.S3, region: "eu-west-1" }); + console.log(s.config); + const b = await s.listBuckets(); + console.log(b); + + // const s3 = new S3Client({ region: "us-east-1" }); + // const command = new ListBucketsCommand({}); + // s3.send(command) + // .then((response) => { + // const bucketNames = response.Buckets?.map((d) => d?.Name); + // console.log(bucketNames); + // }) + // .catch((e) => { + // console.log(e); + // }); +} + +test(); + +/* +this.storage = new S3Client({ + region: this._config.region, + endpoint: this._config.endpoint, + credentials: { + accessKeyId: this._config.accessKeyId, + secretAccessKey: this._config.secretAccessKey, + }, +}); + + +if (typeof this._config.region === "undefined") { + if (this.s3Compatible === S3Compatible.R2) { + this._config.region = "auto"; + } else if (this.s3Compatible === S3Compatible.Backblaze) { + let ep = this._config.endpoint as string; + ep = ep.substring(ep.indexOf("s3.") + 3); + this._config.region = ep.substring(0, ep.indexOf(".")); + } +} +if (typeof this._config.endpoint === "undefined") { + // this.storage = new S3Client({ region: this._config.region, ...this._config.options }); + this.storage = new S3Client({ region: "us-east-1" }); + console.log(this.storage.config); +} else { + this.storage = new S3Client(config); +} + + + if (typeof cfg.endpoint !== "undefined") { + if (cfg.endpoint.indexOf("r2.cloudflarestorage.com") !== -1) { + this.s3Compatible = S3Compatible.R2; + } else if (cfg.endpoint.indexOf("backblazeb2.com") !== -1) { + this.s3Compatible = S3Compatible.Backblaze; + } + } + if (!cfg.region && this.s3Compatible === S3Compatible.Amazon) { + this.configError = "You must specify a default region for storage type 's3'"; + return null; + } + + +*/