From 0f1061a8fe141da047c0a96922b3dfc8536934f4 Mon Sep 17 00:00:00 2001 From: abudaan Date: Thu, 16 Nov 2023 19:06:30 +0100 Subject: [PATCH 01/26] wip --- tests/test_all.ts | 21 ++++++++ tests/units/get_configuration.ts | 90 ++++++++++++++++++++++++++++++++ tests/units/init.ts | 39 ++++++++++++++ 3 files changed, 150 insertions(+) create mode 100644 tests/test_all.ts create mode 100644 tests/units/get_configuration.ts create mode 100644 tests/units/init.ts diff --git a/tests/test_all.ts b/tests/test_all.ts new file mode 100644 index 0000000..299a97f --- /dev/null +++ b/tests/test_all.ts @@ -0,0 +1,21 @@ +import { init, initSkipCheck } from "./units/init"; + +async function testAll() { + const storage = await init(); + // const storage = await initSkipCheck(); + try { + const msg = await storage.createBucket("the-buck8"); + console.log(msg); + } catch (e) { + console.error(e); + } + storage.createBucket("the-buck8").then().catch(); + + const msg = await storage.createBucket("the-buck8"); + // if(msg.error) { + + // } + //console.log(storage.getSelectedBucket()); +} + +testAll(); diff --git a/tests/units/get_configuration.ts b/tests/units/get_configuration.ts new file mode 100644 index 0000000..77566d8 --- /dev/null +++ b/tests/units/get_configuration.ts @@ -0,0 +1,90 @@ +import dotenv from "dotenv"; +import { AdapterConfig, StorageType } from "../../src/types"; + +dotenv.config(); + +const debug = false; +const type = process.env["TYPE"]; +const skipCheck = process.env["SKIP_CHECK"] === "true" ? true : false; +const configUrl = process.env["CONFIG_URL"]; +const bucketName = process.env["BUCKET_NAME"]; +const directory = process.env["LOCAL_DIRECTORY"]; +const projectId = process.env["GOOGLE_CLOUD_PROJECT_ID"]; +const keyFilename = process.env["GOOGLE_CLOUD_KEYFILE"]; +const accessKeyId = process.env["AWS_ACCESS_KEY_ID"]; +const secretAccessKey = process.env["AWS_SECRET_ACCESS_KEY"]; +const region = process.env["AWS_REGION"]; +const applicationKeyId = process.env["B2_APPLICATION_KEY_ID"]; +const applicationKey = process.env["B2_APPLICATION_KEY"]; +const storageAccount = process.env["AZURE_STORAGE_ACCOUNT"]; +const accessKey = process.env["AZURE_STORAGE_ACCESS_KEY"]; + +if (debug) { + console.group(".env"); + console.log({ + type, + skipCheck, + configUrl, + bucketName, + directory, + projectId, + keyFilename, + accessKeyId, + secretAccessKey, + storageAccount, + accessKey, + }); + console.groupEnd(); +} + +export function getConfiguration(): string | AdapterConfig { + let config: AdapterConfig | string = ""; + if (type === StorageType.LOCAL) { + config = { + type, + skipCheck, + bucketName, + directory, + }; + } else if (type === StorageType.GCS) { + config = { + type, + skipCheck, + bucketName, + projectId, + keyFilename, + }; + } else if (type === StorageType.S3) { + config = { + type, + skipCheck, + bucketName, + accessKeyId, + secretAccessKey, + region, + }; + } else if (type === StorageType.B2) { + config = { + type, + skipCheck, + bucketName, + applicationKeyId, + applicationKey, + }; + } else if (type === StorageType.AZURE) { + config = { + type, + skipCheck, + storageAccount, + accessKey, + bucketName, + }; + } else { + if (!configUrl) { + config = `local://${process.cwd()}/the-buck`; + } else { + config = configUrl; + } + } + return config; +} diff --git a/tests/units/init.ts b/tests/units/init.ts new file mode 100644 index 0000000..90e9128 --- /dev/null +++ b/tests/units/init.ts @@ -0,0 +1,39 @@ +import { Storage } from "../../src/Storage"; +import { ConfigAmazonS3, IAdapterConfig } from "../../src/types"; +import { getConfiguration } from "./get_configuration"; + +const debug = false; + +export async function init(): Promise { + const config = getConfiguration(); + try { + const storage = new Storage(config); + await storage.init(); + if (debug) { + console.log(storage.getConfiguration()); + } + return storage; + } catch (e) { + console.error(`\x1b[31m[init] ${e.message}`); + process.exit(0); + } +} + +export async function initSkipCheck(): Promise { + const config = getConfiguration() as ConfigAmazonS3; + try { + const storage = new Storage({ + skipCheck: true, + type: config.type, + region: config.region, + } as IAdapterConfig); + await storage.init(); + if (debug) { + console.log(storage.getConfiguration()); + } + return storage; + } catch (e) { + console.error(`\x1b[31m[initSkipCheck] ${e.message}`); + process.exit(0); + } +} From bf34a0da4cc5ea9e94e1cf485a40947ffe7c0e3e Mon Sep 17 00:00:00 2001 From: abudaan Date: Thu, 16 Nov 2023 23:36:38 +0100 Subject: [PATCH 02/26] drafted new API --- changelog.md | 143 +++++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 143 insertions(+) diff --git a/changelog.md b/changelog.md index 3d77c3c..7ff7026 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,146 @@ +# 2.0.0 + +- Every API method that needs access to the abstracted cloud storage service returns a Promise that resolves to an object: + ```typescript + type ReturnObject = { + error: string | null; + value: string | number | Array<[string, number]> | Readable; // depends on method + }; + ``` +- `init` will automatically select (and if necessary create) the bucket if your configuration object or url has a value set for `bucketName` +- The storage instance will no longer hold a reference to the last used or selected bucket in its local state; you will have to provide a bucket name for every bucket operation, for instance `clearBucket`, but also `removeFile`. +- The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created a new bucket. +- `validateName` will not only perform a local check, it will also check if the name is valid and/or not taken at the cloud storage service. + +### Old API (1.5.x) compared to new API (2.x) + +#### init + +`init(config):Promise`
+`init(config):Promise` + +#### test + +`test():Promise`
+`test():Promise` + +#### selectBucket + +`selectBucket(name: string | null): Promise`
+`N/A` + +#### getSelectedBucket + +`getSelectedBucket(): string`
+`N/A` + +#### createBucket + +`createBucket(name: string, options?: object): Promise`
+`createBucket(name: string, options?: object): Promise` + +#### clearBucket + +`clearBucket(name?: string): Promise`
+`clearBucket(name: string): Promise` + +#### deleteBucket + +`deleteBucket(name?: string): Promise`
+`deleteBucket(name: string): Promise` + +#### removeFile + +`removeFile(fileName: string): Promise`
+`removeFile(bucketName: string, fileName: string): Promise` + +#### listFiles + +`listFiles(): Promise<[string, number][]>`
+`listFiles(bucketName: string): Promise` + +#### listBuckets + +`listBuckets(): Promise`
+`listBuckets(): Promise` + +#### sizeOf + +`sizeOf(name: string): Promise`
+`sizeOf(bucketName: string, fileName: string): Promise` + +#### fileExists + +`fileExists(name: string): Promise`
+`fileExists(bucketName: string, fileName: string): Promise` + +#### validateName + +`validateName(name: string): string`
+`validateName(name: string): Promise` + +#### getFileAsReadable + +```typescript +getFileAsReadable( + name: string, + options?: { start?: number; end?: number } + ): Promise +``` + +```typescript +getFileAsReadable( + bucketName: string, + fileName: string, + options?: { start?: number; end?: number } + ): Promise +``` + +#### addFileFromPath + +```typescript +addFileFromPath(origPath: string, targetPath: string, options: object = {}): Promise +``` + +```typescript +addFileFromPath({ + bucketName: string, + origPath: string, + targetPath: string, + options: object = {} + }): Promise +``` + +#### addFileFromBuffer + +```typescript +addFileFromBuffer(buffer: Buffer, targetPath: string, options: object = {}): Promise +``` + +```typescript +addFileFromBuffer({ + bucketName: string, + buffer: Buffer, + targetPath: string, + options: object = {} + }): Promise +``` + +#### addFileFromReadable + +```typescript +addFileFromReadable(stream: Readable, targetPath: string, options: object = {}): Promise +``` + +```typescript +addFileFromReadable({ + bucketName: string, + stream: Readable, + targetPath: string, + options: object = {} + }): Promise +``` + # 1.4.7 - 1.5.2 - Added support for Azure → all credits: [tesirm99](https://github.com/tesirm99) From 8286e6f1cfb589f396dea038e74e072567b85228 Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 17 Nov 2023 10:38:24 +0100 Subject: [PATCH 03/26] updated proposal --- changelog.md | 80 +++++++++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 79 insertions(+), 1 deletion(-) diff --git a/changelog.md b/changelog.md index 7ff7026..e44b9dc 100644 --- a/changelog.md +++ b/changelog.md @@ -4,9 +4,39 @@ ```typescript type ReturnObject = { error: string | null; - value: string | number | Array<[string, number]> | Readable; // depends on method + value: string | number | Array<[string, number]> | Array | Readable; // depends on method }; ``` +- Perhaps a type for all possible return values: + + ```typescript + // most common type + type ReturnObject = { + error: string | null; + value: string | null; + }; + + type ReturnObjectNumber = { + error: string | null; + value: number | null; + }; + + type ReturnObjectFiles = { + error: string | null; + value: Array<[string, number]> | null; + }; + + type ReturnObjectBuckets = { + error: string | null; + value: Array | null; + }; + + type ReturnObjectReadable = { + error: string | null; + value: Readable | null; + }; + ``` + - `init` will automatically select (and if necessary create) the bucket if your configuration object or url has a value set for `bucketName` - The storage instance will no longer hold a reference to the last used or selected bucket in its local state; you will have to provide a bucket name for every bucket operation, for instance `clearBucket`, but also `removeFile`. - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created a new bucket. @@ -141,6 +171,54 @@ addFileFromReadable({ }): Promise ``` +### Some other ideas + +Maybe merge all `addFileFrom*` methods into a single `addFile` method that behaves differently dependent on the given argument/parameter: + +```typescript +type FilePath ={ + bucketName: string, + origPath: string, + targetPath: string, + options: object = {} +} + +type FileBuffer ={ + bucketName: string, + buffer: Buffer, + targetPath: string, + options: object = {} +} + +type FileStream ={ + bucketName: string, + stream: Readable, + targetPath: string, + options: object = {} +} + +addFile(FilePath | FileBuffer | FileStream): Promise +``` + +And analogue to this: + +```typescript +enum FileReturnType = { + Stream, + SomethingElse, + ... +} + +type GetFile = { + bucketName: string, + fileName: string, + type: FileReturnType, + options?: { start?: number; end?: number } +} + +getFile(GetFile): Promise +``` + # 1.4.7 - 1.5.2 - Added support for Azure → all credits: [tesirm99](https://github.com/tesirm99) From 6062c6fad6ff31f5cbd6961cc535419deba91530 Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 17 Nov 2023 15:13:43 +0100 Subject: [PATCH 04/26] updated AbstractAdapter --- changelog.md | 60 +++++++-------- src/AbstractAdapter.ts | 148 +++++++++++++++++++++---------------- src/AdapterLocal.ts | 29 +++----- src/types.ts | 163 ++++++++++++++++++++++++++++------------- 4 files changed, 235 insertions(+), 165 deletions(-) diff --git a/changelog.md b/changelog.md index e44b9dc..3ad6313 100644 --- a/changelog.md +++ b/changelog.md @@ -2,7 +2,7 @@ - Every API method that needs access to the abstracted cloud storage service returns a Promise that resolves to an object: ```typescript - type ReturnObject = { + type ResultObject = { error: string | null; value: string | number | Array<[string, number]> | Array | Readable; // depends on method }; @@ -11,27 +11,27 @@ ```typescript // most common type - type ReturnObject = { + type ResultObject = { error: string | null; value: string | null; }; - type ReturnObjectNumber = { + type ResultObjectNumber = { error: string | null; value: number | null; }; - type ReturnObjectFiles = { + type ResultObjectFiles = { error: string | null; value: Array<[string, number]> | null; }; - type ReturnObjectBuckets = { + type ResultObjectBuckets = { error: string | null; value: Array | null; }; - type ReturnObjectReadable = { + type ResultObjectReadable = { error: string | null; value: Readable | null; }; @@ -47,12 +47,12 @@ #### init `init(config):Promise`
-`init(config):Promise` +`init(config):Promise` #### test `test():Promise`
-`test():Promise` +`test():Promise` #### selectBucket @@ -67,47 +67,47 @@ #### createBucket `createBucket(name: string, options?: object): Promise`
-`createBucket(name: string, options?: object): Promise` +`createBucket(name: string, options?: object): Promise` #### clearBucket `clearBucket(name?: string): Promise`
-`clearBucket(name: string): Promise` +`clearBucket(name: string): Promise` #### deleteBucket `deleteBucket(name?: string): Promise`
-`deleteBucket(name: string): Promise` +`deleteBucket(name: string): Promise` #### removeFile `removeFile(fileName: string): Promise`
-`removeFile(bucketName: string, fileName: string): Promise` +`removeFile(bucketName: string, fileName: string): Promise` #### listFiles `listFiles(): Promise<[string, number][]>`
-`listFiles(bucketName: string): Promise` +`listFiles(bucketName: string): Promise` #### listBuckets `listBuckets(): Promise`
-`listBuckets(): Promise` +`listBuckets(): Promise` #### sizeOf `sizeOf(name: string): Promise`
-`sizeOf(bucketName: string, fileName: string): Promise` +`sizeOf(bucketName: string, fileName: string): Promise` #### fileExists `fileExists(name: string): Promise`
-`fileExists(bucketName: string, fileName: string): Promise` +`fileExists(bucketName: string, fileName: string): Promise` #### validateName `validateName(name: string): string`
-`validateName(name: string): Promise` +`validateName(name: string): Promise` #### getFileAsReadable @@ -123,7 +123,7 @@ getFileAsReadable( bucketName: string, fileName: string, options?: { start?: number; end?: number } - ): Promise + ): Promise ``` #### addFileFromPath @@ -138,7 +138,7 @@ addFileFromPath({ origPath: string, targetPath: string, options: object = {} - }): Promise + }): Promise ``` #### addFileFromBuffer @@ -153,7 +153,7 @@ addFileFromBuffer({ buffer: Buffer, targetPath: string, options: object = {} - }): Promise + }): Promise ``` #### addFileFromReadable @@ -167,8 +167,8 @@ addFileFromReadable({ bucketName: string, stream: Readable, targetPath: string, - options: object = {} - }): Promise + options: object = {}, + }): Promise ``` ### Some other ideas @@ -176,28 +176,28 @@ addFileFromReadable({ Maybe merge all `addFileFrom*` methods into a single `addFile` method that behaves differently dependent on the given argument/parameter: ```typescript -type FilePath ={ +type FilePath = { bucketName: string, origPath: string, targetPath: string, - options: object = {} + options?: object, } -type FileBuffer ={ +type FileBuffer = { bucketName: string, buffer: Buffer, targetPath: string, - options: object = {} + options?: object, } -type FileStream ={ +type FileStream = { bucketName: string, stream: Readable, targetPath: string, - options: object = {} + options?: object, } -addFile(FilePath | FileBuffer | FileStream): Promise +addFile(FilePath | FileBuffer | FileStream): Promise ``` And analogue to this: @@ -216,7 +216,7 @@ type GetFile = { options?: { start?: number; end?: number } } -getFile(GetFile): Promise +getFile(GetFile): Promise ``` # 1.4.7 - 1.5.2 diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 11a7076..2194de9 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -1,6 +1,16 @@ -import { Readable } from "stream"; -import { validateName } from "./util"; -import { AdapterConfig, IStorage } from "./types"; +import { + AdapterConfig, + FileBuffer, + FilePath, + FileStream, + IStorage, + ResultObject, + ResultObjectBoolean, + ResultObjectBuckets, + ResultObjectFiles, + ResultObjectNumber, + ResultObjectReadable, +} from "./types"; export abstract class AbstractAdapter implements IStorage { // protected type: StorageType; @@ -17,100 +27,108 @@ export abstract class AbstractAdapter implements IStorage { return this.config; } - protected validateName(name: string): string { - return validateName(name); - } - - async test(): Promise { + async test(): Promise { if (this.initialized === false) { - return Promise.reject("storage has not been initialized yet; call Storage.init() first"); + return Promise.resolve({ + value: null, + error: "storage has not been initialized yet; call Storage.init() first", + }); } + if (this.bucketName) { + let result: ResultObject; try { - await this.listFiles(); - return Promise.resolve("ok"); + const { error } = await this.bucketExists(this.bucketName); + if (error === null) { + result = { value: "ok", error }; + } else { + result = { value: null, error }; + } } catch (e) { - throw new Error(`Looks like the storage configuration is not correct (${e.message})`); + result = { + value: null, + error: `Looks like the storage configuration is not correct (${e.message})`, + }; } + return Promise.resolve(result); } + + let result: ResultObject; try { - await this.listBuckets(); - return Promise.resolve("ok"); + const { error } = await this.listBuckets(); + if (error === null) { + result = { value: "ok", error }; + } else { + result = { value: null, error }; + } } catch (e) { - throw new Error(`Looks like the storage configuration is not correct (${e.message})`); + result = { + value: null, + error: `Looks like the storage configuration is not correct (${e.message})`, + }; } + return Promise.resolve(result); } - async addFileFromPath( - origPath: string, - targetPath: string, - options: object = {} - ): Promise { - return await this.store(origPath, targetPath, options); - } + async addFileFromPath(params: FilePath): Promise { + if (this.initialized === false) { + return Promise.resolve({ + value: null, + error: "storage has not been initialized yet; call Storage.init() first", + }); + } - async addFileFromBuffer( - buffer: Buffer, - targetPath: string, - options: object = {} - ): Promise { - return await this.store(buffer, targetPath, options); + return await this.store(params); } - async addFileFromReadable( - stream: Readable, - targetPath: string, - options: object = {} - ): Promise { - return await this.store(stream, targetPath, options); + async addFileFromBuffer(params: FileBuffer): Promise { + if (this.initialized === false) { + return Promise.resolve({ + value: null, + error: "storage has not been initialized yet; call Storage.init() first", + }); + } + return await this.store(params); } - public getSelectedBucket(): string { - return this.bucketName; + async addFileFromReadable(params: FileStream): Promise { + if (this.initialized === false) { + return Promise.resolve({ + value: null, + error: "storage has not been initialized yet; call Storage.init() first", + }); + } + return await this.store(params); } // stubs - protected abstract store( - filePath: string, - targetFileName: string, - options: object - ): Promise; + protected abstract store(param: FilePath): Promise; + protected abstract store(param: FileBuffer): Promise; + protected abstract store(param: FileStream): Promise; - protected abstract store( - buffer: Buffer, - targetFileName: string, - options: object - ): Promise; + abstract init(): Promise; - protected abstract store( - stream: Readable, - targetFileName: string, - options: object - ): Promise; + abstract createBucket(name: string, options?: object): Promise; - abstract init(): Promise; + abstract clearBucket(name: string): Promise; - abstract selectBucket(name: string | null): Promise; + abstract deleteBucket(name: string): Promise; - abstract createBucket(name: string, options?: object): Promise; - - abstract clearBucket(name?: string): Promise; - - abstract deleteBucket(name?: string): Promise; - - abstract listBuckets(): Promise; + abstract listBuckets(): Promise; abstract getFileAsReadable( name: string, options?: { start?: number; end?: number } - ): Promise; + ): Promise; + + abstract removeFile(bucketName: string, fileName: string): Promise; - abstract removeFile(fileName: string): Promise; + abstract listFiles(bucketName: string): Promise; - abstract listFiles(): Promise<[string, number][]>; + abstract sizeOf(bucketName: string, fileName: string): Promise; - abstract sizeOf(name: string): Promise; + abstract fileExists(bucketName: string, fileName: string): Promise; - abstract fileExists(name: string): Promise; + abstract bucketExists(name: string): Promise; } diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 7036c7b..8baccfe 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -9,29 +9,17 @@ import { parseQuerystring, parseMode } from "./util"; export class AdapterLocal extends AbstractAdapter { protected type = StorageType.LOCAL; - // protected bucketName: string; - private directory: string; - private buckets: string[] = []; - private mode: number | string = 0o777; constructor(config: ConfigLocal) { super(); this.config = this.parseConfig(config); // console.log(config); // console.log(this.config); - if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { - const msg = this.validateName(this.config.bucketName); - if (msg !== null) { - throw new Error(msg); - } - this.bucketName = this.config.bucketName; - } const mode = (this.config as ConfigLocal).mode; - if (typeof mode !== "undefined") { - this.mode = mode; + if (typeof mode === "undefined") { + (this.config as ConfigLocal).mode = 0o777; } const directory = (this.config as ConfigLocal).directory; - this.directory = directory; } private parseConfig(config: string | ConfigLocal): ConfigLocal { @@ -86,11 +74,6 @@ export class AdapterLocal extends AbstractAdapter { // cfg.directory = process.cwd(); // } } - if (cfg.mode) { - this.mode = cfg.mode; - } - // console.log(cfg); - if (cfg.skipCheck === true) { return cfg; } @@ -103,6 +86,14 @@ export class AdapterLocal extends AbstractAdapter { return Promise.resolve(true); } + if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { + const { error } = await this.validateName(this.config.bucketName); + if (error !== null) { + Promise.resolve({ error, value: null }); + return; + } + } + if (typeof this.bucketName !== "undefined") { await this.createDirectory(path.join(this.directory, this.bucketName)); } diff --git a/src/types.ts b/src/types.ts index 536f172..3327e62 100644 --- a/src/types.ts +++ b/src/types.ts @@ -1,4 +1,3 @@ -import { BucketLocationConstraint } from "@aws-sdk/client-s3"; import { Readable } from "stream"; // import { ConfigLocal } from "../adapters/local/types"; // import { ConfigBackblazeB2 } from "../adapters/backblaze/types"; @@ -20,7 +19,7 @@ export interface IStorage { * want to) handle async action in the constructor all storage types have an init() method * that needs to be called before any other API method call */ - init(): Promise; + init(): Promise; /** * Returns the storage type, e.g. 'gcs', 'b2', 'local' etc. @@ -50,72 +49,53 @@ export interface IStorage { * Runs a simple test to test the storage configuration: calls `listBuckets` only to check * if it fails and if so, it throws an error. */ - test(): Promise; + test(): Promise; /** - * @param name: name of the bucket to create, returns true once the bucket has been created but + * @param name name of the bucket to create, returns true once the bucket has been created but * also when the bucket already exists. Note that you have to use `selectBucket` to start using * the newly created bucket. * @param: options: additional options for creating a bucket such as access rights */ - createBucket(name: string, options?: object): Promise; + createBucket(name: string, options?: object): Promise; /** - * @param name: name of the bucket that will be used to store files, if the bucket does not exist it - * will be created. If you pass null, "" or no value the currently selected bucket will be deselected. + * @param name: deletes all file in the bucket. */ - selectBucket(name?: string | null): Promise; + clearBucket(name: string): Promise; /** - * @param name?: deletes all file in the bucket. If no name is provided the currently selected bucket - * of the storage will be emptied. If no bucket is selected an error will be thrown. + * @param name: deletes the bucket with this name. */ - clearBucket(name?: string): Promise; + deleteBucket(name: string): Promise; /** - * @param name?: deletes the bucket with this name. If no name is provided the currently selected bucket - * of the storage will be deleted. If no bucket is selected an error will be thrown. + * Retrieves an array of the names of the buckets in this storage */ - deleteBucket(name?: string): Promise; + listBuckets(): Promise; /** - * Retrieves a list of the names of the buckets in this storage - */ - listBuckets(): Promise; - - /** - * Returns the name of the currently selected bucket or an empty string ("") if no bucket has been selected yet - */ - getSelectedBucket(): string; - - /** - * @param origPath: path of the file to be copied - * @param targetPath: path to copy the file to, folders will be created automatically - * @param options: additional option such as access rights + * @paramObject data about the file to be added * @returns the public url to the file */ - addFileFromPath(origPath: string, targetPath: string, options?: object): Promise; + addFileFromPath(paramObject: FilePath): Promise; /** - * @param buffer: file as buffer - * @param targetPath: path to the file to save the buffer to, folders will be created automatically - * @param options: additional option such as access rights + * @paramObject data about the file to be added * @returns the public url to the file */ - addFileFromBuffer(buffer: Buffer, targetPath: string, options?: object): Promise; + addFileFromBuffer(paramObject: FileBuffer): Promise; /** - * @param stream: a read stream - * @param targetPath: path to the file to save the stream to, folders will be created automatically - * @param options: additional option such as access rights + * @paramObject data about the file to be added * @returns the public url to the file */ - addFileFromReadable(stream: Readable, targetPath: string, options?: object): Promise; + addFileFromReadable(paramObject: FileStream): Promise; /** - * @param name: name of the file to be returned as a readable stream - * @param start?: the byte of the file where the stream starts (default: 0) - * @param end?: the byte in the file where the stream ends (default: last byte of file) + * @param name name of the file to be returned as a readable stream + * @param start? the byte of the file where the stream starts (default: 0) + * @param end? the byte in the file where the stream ends (default: last byte of file) */ getFileAsReadable( name: string, @@ -123,32 +103,44 @@ export interface IStorage { start?: number; end?: number; } - ): Promise; + ): Promise; /** - * @param name: name of the file to be removed + * @param bucketName name of the bucket where the file is stored + * @param fileName name of the file to be removed */ - removeFile(name: string): Promise; + removeFile(bucketName: string, fileName: string): Promise; /** - * Returns an array of tuples containing the file path and the file size of all files in the currently - * selected bucket. If no bucket is selected an error will be thrown. + * @param bucketName name of the bucket + * @param numFiles optional, only works for S3 compatible storages: the maximal number of files to retrieve + * Returns an array of tuples containing the file path and the file size of all files in the bucket. */ - listFiles(numFiles?: number): Promise<[string, number][]>; + listFiles(bucketName: string, numFiles?: number): Promise; /** * Returns the size in bytes of the file - * @param name + * @param bucketName name of the bucket where the file is stored + * @param fileName name of the file */ - sizeOf(name: string): Promise; + sizeOf(bucketName: string, fileName: string): Promise; /** - * Check if a file with the provided name exists - * @param name + * @param bucketName name of the bucket where the file is stored + * @param fileName name of the file */ - fileExists(name: string): Promise; + fileExists(bucketName: string, fileName: string): Promise; - getFileAsURL?(name: string): Promise; + /** + * @param bucketName name of the bucket + */ + bucketExists(bucketName: string): Promise; + + /** + * @param bucketName name of the bucket where the file is stored + * @param fileName name of the file + */ + getFileAsURL?(bucketName: string, fileName: string): Promise; } export enum StorageType { @@ -257,3 +249,72 @@ export enum S3Compatible { R2, Backblaze, } + +export type ResultObject = { + error: string | null; + value: string | null; +}; + +export type ResultObjectNumber = { + error: string | null; + value: number | null; +}; + +export type ResultObjectBoolean = { + error: string | null; + value: boolean | null; +}; + +export type ResultObjectFiles = { + error: string | null; + value: Array<[string, number]> | null; +}; + +export type ResultObjectBuckets = { + error: string | null; + value: Array | null; +}; + +export type ResultObjectReadable = { + error: string | null; + value: Readable | null; +}; + +/** + * @param bucketName name of the bucket you want to use + * @param origPath path of the file to be copied + * @param targetPath path to copy the file to, folders will be created automatically + * @param options additional option such as access rights + **/ +export type FilePath = { + bucketName: string; + origPath: string; + targetPath: string; + options?: object; +}; + +/** + * @param bucketName name of the bucket you want to use + * @param buffer file as buffer + * @param targetPath path to the file to save the buffer to, folders will be created automatically + * @param options additional option such as access rights + **/ +export type FileBuffer = { + bucketName: string; + buffer: Buffer; + targetPath: string; + options?: object; +}; + +/** + * @param bucketName name of the bucket you want to use + * @param stream a read stream + * @param targetPath path to the file to save the stream to, folders will be created automatically + * @param options additional option such as access rights + **/ +export type FileStream = { + bucketName: string; + stream: Readable; + targetPath: string; + options?: object; +}; From c883e5eab7c8384b41d3cf2cf0de1f38b38231d9 Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 17 Nov 2023 21:58:24 +0100 Subject: [PATCH 05/26] improved API --- changelog.md | 27 +++++++++++++-- src/AbstractAdapter.ts | 78 ++++++------------------------------------ src/types.ts | 47 +++++++++++++------------ 3 files changed, 61 insertions(+), 91 deletions(-) diff --git a/changelog.md b/changelog.md index 3ad6313..1a92870 100644 --- a/changelog.md +++ b/changelog.md @@ -37,7 +37,8 @@ }; ``` -- `init` will automatically select (and if necessary create) the bucket if your configuration object or url has a value set for `bucketName` +- ~~`init` will automatically select (and if necessary create) the bucket if your configuration object or url has a value set for `bucketName`~~ +- Backblaze B2 native API storage requires initial authorization (by calling the async `authorize` function) so `init` will only be implemented for this type of storage. For other storage type `init` will be a stub. - The storage instance will no longer hold a reference to the last used or selected bucket in its local state; you will have to provide a bucket name for every bucket operation, for instance `clearBucket`, but also `removeFile`. - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created a new bucket. - `validateName` will not only perform a local check, it will also check if the name is valid and/or not taken at the cloud storage service. @@ -52,7 +53,7 @@ #### test `test():Promise`
-`test():Promise` +`N/A` #### selectBucket @@ -219,6 +220,28 @@ type GetFile = { getFile(GetFile): Promise ``` +### The `init` function is not required anymore + +Only Backblaze B2 Native API storage requires initial authorization by calling the async `authorize` function. So only for this type of storage it is required to call `init` after instantiating and before you call any API method. Although it is implemented (as a stub) in all storage types, for other storage types you don't need to call this method: + +```typescript +const b2 = new Storage("b2://applicationKeyId:applicationKey"); +await b2.init(); // mandatory +await b2.listBuckets(); + +const gcs = new Storage("gcs://keyFile.json"); +await gcs.listBuckets(); +``` + +### The bucket in the config is not automatically selected or created + +The bucket name that you've provided with the configuration url or object is available by calling `getConfig`: + +```typescript +const s3 = new Storage("s3://key:secret@eu-west-2/bucketName"); +await s3.listFiles(s3.getConfig().bucketName, "your-file.jpg') +``` + # 1.4.7 - 1.5.2 - Added support for Azure → all credits: [tesirm99](https://github.com/tesirm99) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 2194de9..2985a4d 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -16,88 +16,31 @@ export abstract class AbstractAdapter implements IStorage { // protected type: StorageType; protected type: string; protected config: AdapterConfig; - protected bucketName: string = ""; - protected initialized: boolean = false; getType(): string { return this.type; } - public getConfiguration(): AdapterConfig { - return this.config; + public async init(): Promise { + // Except for Backblaze B2 Native API this method doesn't have to do anything + // so it doesn't have to be overridden at all, it is only here to make the + // API consistent. + return Promise.resolve({ error: null, value: "ok" }); } - async test(): Promise { - if (this.initialized === false) { - return Promise.resolve({ - value: null, - error: "storage has not been initialized yet; call Storage.init() first", - }); - } - - if (this.bucketName) { - let result: ResultObject; - try { - const { error } = await this.bucketExists(this.bucketName); - if (error === null) { - result = { value: "ok", error }; - } else { - result = { value: null, error }; - } - } catch (e) { - result = { - value: null, - error: `Looks like the storage configuration is not correct (${e.message})`, - }; - } - return Promise.resolve(result); - } - - let result: ResultObject; - try { - const { error } = await this.listBuckets(); - if (error === null) { - result = { value: "ok", error }; - } else { - result = { value: null, error }; - } - } catch (e) { - result = { - value: null, - error: `Looks like the storage configuration is not correct (${e.message})`, - }; - } - return Promise.resolve(result); + public getConfiguration(): AdapterConfig { + return this.config; } async addFileFromPath(params: FilePath): Promise { - if (this.initialized === false) { - return Promise.resolve({ - value: null, - error: "storage has not been initialized yet; call Storage.init() first", - }); - } - return await this.store(params); } async addFileFromBuffer(params: FileBuffer): Promise { - if (this.initialized === false) { - return Promise.resolve({ - value: null, - error: "storage has not been initialized yet; call Storage.init() first", - }); - } return await this.store(params); } async addFileFromReadable(params: FileStream): Promise { - if (this.initialized === false) { - return Promise.resolve({ - value: null, - error: "storage has not been initialized yet; call Storage.init() first", - }); - } return await this.store(params); } @@ -107,8 +50,6 @@ export abstract class AbstractAdapter implements IStorage { protected abstract store(param: FileBuffer): Promise; protected abstract store(param: FileStream): Promise; - abstract init(): Promise; - abstract createBucket(name: string, options?: object): Promise; abstract clearBucket(name: string): Promise; @@ -118,10 +59,13 @@ export abstract class AbstractAdapter implements IStorage { abstract listBuckets(): Promise; abstract getFileAsReadable( - name: string, + bucketName: string, + fileName: string, options?: { start?: number; end?: number } ): Promise; + abstract getFileAsURL(): Promise; + abstract removeFile(bucketName: string, fileName: string): Promise; abstract listFiles(bucketName: string): Promise; diff --git a/src/types.ts b/src/types.ts index 3327e62..4f0ec6d 100644 --- a/src/types.ts +++ b/src/types.ts @@ -14,10 +14,13 @@ import { Readable } from "stream"; export interface IStorage { /** - * Initializes the storage. Some storage types don't need any initialization, others - * may require async actions such as an initial authorization. Because you can't (and don't - * want to) handle async action in the constructor all storage types have an init() method - * that needs to be called before any other API method call + * This method is only implemented for Backblaze B2 native API because this type of storage + * requires an async authorization step; because a constructor function can not be async, + * you need to call this method right after instantiation and before you can use the other API + * methods. + * + * For all other storage types this method is only a stub: you don't need to call it and if you do, + * it does noting. */ init(): Promise; @@ -36,6 +39,8 @@ export interface IStorage { * * The object also contains the key `options` which are only the options passed in during * initialization; if you want all options, including the default options use `getOptions()` + * + * @returns adapter configuration as object */ getConfiguration(): AdapterConfig; @@ -45,17 +50,12 @@ export interface IStorage { */ // getOptions(): JSON; - /** - * Runs a simple test to test the storage configuration: calls `listBuckets` only to check - * if it fails and if so, it throws an error. - */ - test(): Promise; - /** * @param name name of the bucket to create, returns true once the bucket has been created but * also when the bucket already exists. Note that you have to use `selectBucket` to start using * the newly created bucket. - * @param: options: additional options for creating a bucket such as access rights + * @param options: additional options for creating a bucket such as access rights + * @returns string or error */ createBucket(name: string, options?: object): Promise; @@ -70,7 +70,7 @@ export interface IStorage { deleteBucket(name: string): Promise; /** - * Retrieves an array of the names of the buckets in this storage + * @returns an array of the names of the buckets in this storage */ listBuckets(): Promise; @@ -93,18 +93,26 @@ export interface IStorage { addFileFromReadable(paramObject: FileStream): Promise; /** + * @param bucketName name of the bucket where the file is stored * @param name name of the file to be returned as a readable stream * @param start? the byte of the file where the stream starts (default: 0) * @param end? the byte in the file where the stream ends (default: last byte of file) */ getFileAsReadable( - name: string, + bucketName: string, + fileName: string, options?: { start?: number; end?: number; } ): Promise; + /** + * @param bucketName name of the bucket where the file is stored + * @param fileName name of the file + */ + getFileAsURL(bucketName: string, fileName: string): Promise; + /** * @param bucketName name of the bucket where the file is stored * @param fileName name of the file to be removed @@ -114,25 +122,20 @@ export interface IStorage { /** * @param bucketName name of the bucket * @param numFiles optional, only works for S3 compatible storages: the maximal number of files to retrieve - * Returns an array of tuples containing the file path and the file size of all files in the bucket. + * @returns an array of tuples containing the file path and the file size of all files in the bucket. */ listFiles(bucketName: string, numFiles?: number): Promise; /** - * Returns the size in bytes of the file * @param bucketName name of the bucket where the file is stored * @param fileName name of the file + * @returns the size of the file in bytes */ sizeOf(bucketName: string, fileName: string): Promise; - /** - * @param bucketName name of the bucket where the file is stored - * @param fileName name of the file - */ - fileExists(bucketName: string, fileName: string): Promise; - /** * @param bucketName name of the bucket + * @returns boolean */ bucketExists(bucketName: string): Promise; @@ -140,7 +143,7 @@ export interface IStorage { * @param bucketName name of the bucket where the file is stored * @param fileName name of the file */ - getFileAsURL?(bucketName: string, fileName: string): Promise; + fileExists(bucketName: string, fileName: string): Promise; } export enum StorageType { From a9bda41a196c7e478b8148a82a316a94c686584d Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 17 Nov 2023 23:44:33 +0100 Subject: [PATCH 06/26] wip backblaze --- package.json | 1 + src/AbstractAdapter.ts | 12 ++--- src/AdapterBackblazeB2.ts | 98 ++++++++++++--------------------------- src/types.ts | 2 +- tests/testB2.ts | 40 ++++++++++++++++ 5 files changed, 77 insertions(+), 76 deletions(-) create mode 100644 tests/testB2.ts diff --git a/package.json b/package.json index cbbc5d4..a5f0f2b 100644 --- a/package.json +++ b/package.json @@ -49,6 +49,7 @@ "test-azure": "TYPE='azure' ts-node ./node_modules/.bin/jasmine ./tests/test.jasmine.ts", "test": "ts-node ./tests/test.ts", "test-mode": "ts-node ./tests/test-mode.ts", + "testB2": "ts-node ./tests/testB2.ts", "ts": "ts-node", "tsc": "node_modules/.bin/tsc", "copy-readme": "cp ./README.md ./publish", diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 2985a4d..5ae0918 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -21,12 +21,12 @@ export abstract class AbstractAdapter implements IStorage { return this.type; } - public async init(): Promise { - // Except for Backblaze B2 Native API this method doesn't have to do anything - // so it doesn't have to be overridden at all, it is only here to make the - // API consistent. - return Promise.resolve({ error: null, value: "ok" }); - } + // public async init(): Promise { + // // Except for Backblaze B2 Native API this method doesn't have to do anything + // // so it doesn't have to be overridden at all, it is only here to make the + // // API consistent. + // return Promise.resolve({ error: null, value: "ok" }); + // } public getConfiguration(): AdapterConfig { return this.config; diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 96f47ce..62357a6 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -7,6 +7,8 @@ import { BackblazeB2Bucket, BackblazeB2File, IStorage, + ResultObjectBoolean, + ResultObject, } from "./types"; import { parseUrl } from "./util"; @@ -18,18 +20,11 @@ export class AdapterBackblazeB2 extends AbstractAdapter { private storage: B2; private buckets: BackblazeB2Bucket[] = []; private files: BackblazeB2File[] = []; - private nextFileName: string; + private initialized: boolean = false; constructor(config: string | ConfigBackblazeB2) { super(); this.config = this.parseConfig(config); - if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { - const msg = this.validateName(this.config.bucketName); - if (msg !== null) { - throw new Error(msg); - } - this.bucketName = this.config.bucketName; - } this.storage = new B2(this.config); } @@ -66,38 +61,42 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return cfg; } - public async init(): Promise { - // console.log("init()", this.initialized, this.bucketName); + private async init(): Promise { if (this.initialized) { - return Promise.resolve(true); + return Promise.resolve({ value: "ok", error: null }); } try { await this.storage.authorize(); + this.initialized = true; + return Promise.resolve({ value: "ok", error: null }); } catch (e) { - throw new Error(e.message); - } - // check if the bucket already exists - if (this.bucketName) { - // create new bucket if it doesn't exist - await this.createBucket(this.bucketName); - this.bucketId = this.getBucketId(); + return Promise.resolve({ value: null, error: e.message }); } - this.initialized = true; - return true; } - private getBucketId(): string { - // console.log(this.buckets); - const index = this.buckets.findIndex( - (b: BackblazeB2Bucket) => b.bucketName === this.bucketName - ); - if (index !== -1) { - return this.buckets[index].bucketId; + private async getBucketId(name: string) { + const { error } = await this.init(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } + const { + data: { buckets }, + } = await this.storage.listBuckets(); + + let id = null; + for (let i = 0; i < buckets.length; i++) { + const { bucketId, bucketName } = buckets[i]; + if (bucketName === name) { + id = bucketId; + return Promise.resolve({ value: bucketId, error: null }); + } + } + return Promise.resolve({ value: null, error: `could not find bucket ${name}` }); } - async getFileAsReadable( - name: string, + public async getFileAsReadable( + bucketName: string, + fileName: string, options: { start?: number; end?: number } = { start: 0 } ): Promise { const file = await this.findFile(name); @@ -117,16 +116,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return d.data; } - async removeFile(name: string): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); - } - - const file = await this.findFile(name); - if (file === null) { - return "file not found"; - } - + async removeFile(bucketName: string, fileName: string): Promise { const { data: { files }, } = await this.storage.listFileVersions({ @@ -169,10 +159,6 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return this.findBucketLocal(name); } - public getSelectedBucket(): string | null { - return this.bucketName; - } - // util members protected async store(buffer: Buffer, targetPath: string, options: object): Promise; @@ -231,32 +217,6 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return "bucket created"; } - async selectBucket(name: string): Promise { - if (!name) { - this.bucketName = ""; - return `bucket '${name}' deselected`; - } - - if (name === this.bucketName) { - return `bucket '${name}' selected`; - } - - const b = await this.findBucket(name); - if (b !== null) { - this.bucketName = name; - this.bucketId = b.bucketId; - this.files = []; - return `bucket '${name}' selected`; - } - - // return `bucket ${name} not found`; - await this.createBucket(name); - this.bucketName = name; - this.bucketId = this.getBucketId(); - this.files = []; - return `bucket '${name}' selected`; - } - async clearBucket(name?: string): Promise { const n = name || this.bucketName; diff --git a/src/types.ts b/src/types.ts index 4f0ec6d..4a91444 100644 --- a/src/types.ts +++ b/src/types.ts @@ -22,7 +22,7 @@ export interface IStorage { * For all other storage types this method is only a stub: you don't need to call it and if you do, * it does noting. */ - init(): Promise; + // init(): Promise; /** * Returns the storage type, e.g. 'gcs', 'b2', 'local' etc. diff --git a/tests/testB2.ts b/tests/testB2.ts new file mode 100644 index 0000000..2db9682 --- /dev/null +++ b/tests/testB2.ts @@ -0,0 +1,40 @@ +import B2 from "backblaze-b2"; +import dotenv from "dotenv"; + +import { StorageType } from "@tweedegolf/storage-abstraction"; + +dotenv.config(); + +const configBackblaze = { + type: StorageType.B2, + applicationKeyId: process.env.B2_APPLICATION_KEY_ID, + applicationKey: process.env.B2_APPLICATION_KEY, + // bucketName: process.env.BUCKET_NAME, +}; + +async function testB2() { + const storage = new B2(configBackblaze); + + let s = new Date().getTime(); + await storage.authorize(); + console.log(1, new Date().getTime() - s); + + s = new Date().getTime(); + const { + data: { buckets }, + } = await storage.listBuckets(); + + const n = "the-buck"; + let id = null; + for (let i = 0; i < buckets.length; i++) { + const { bucketId, bucketName } = buckets[i]; + if (bucketName === n) { + id = bucketId; + break; + } + } + console.log(2, new Date().getTime() - s); + console.log("B2", id); +} + +testB2(); From 97a34dc286dab727065f2dfd57a2403adb5106f4 Mon Sep 17 00:00:00 2001 From: abudaan Date: Sat, 18 Nov 2023 15:46:28 +0100 Subject: [PATCH 07/26] wip remove init --- changelog.md | 25 ++++--- src/AbstractAdapter.ts | 9 +-- src/AdapterBackblazeB2.ts | 146 ++++++++++++++++++++++++++++++-------- src/types.ts | 43 ++++++++--- tests/testB2.ts | 6 ++ tsconfig.json | 2 +- 6 files changed, 170 insertions(+), 61 deletions(-) diff --git a/changelog.md b/changelog.md index 1a92870..8e95f8a 100644 --- a/changelog.md +++ b/changelog.md @@ -38,8 +38,9 @@ ``` - ~~`init` will automatically select (and if necessary create) the bucket if your configuration object or url has a value set for `bucketName`~~ -- Backblaze B2 native API storage requires initial authorization (by calling the async `authorize` function) so `init` will only be implemented for this type of storage. For other storage type `init` will be a stub. -- The storage instance will no longer hold a reference to the last used or selected bucket in its local state; you will have to provide a bucket name for every bucket operation, for instance `clearBucket`, but also `removeFile`. +- ~~Backblaze B2 native API storage requires initial authorization (by calling the async `authorize` function) so `init` will only be implemented for this type of storage. For other storage type `init` will be a stub.~~ +- No more magic behind the screen; `init` and `selectBucket` have been removed. +- No more local state: the storage instance will no longer hold a reference to the last used or selected bucket in its local state; you will have to provide a bucket name for every bucket operation, for instance `clearBucket`, but also `removeFile`. - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created a new bucket. - `validateName` will not only perform a local check, it will also check if the name is valid and/or not taken at the cloud storage service. @@ -48,7 +49,7 @@ #### init `init(config):Promise`
-`init(config):Promise` +`N/A` #### test @@ -222,20 +223,24 @@ getFile(GetFile): Promise ### The `init` function is not required anymore -Only Backblaze B2 Native API storage requires initial authorization by calling the async `authorize` function. So only for this type of storage it is required to call `init` after instantiating and before you call any API method. Although it is implemented (as a stub) in all storage types, for other storage types you don't need to call this method: +Only Backblaze B2 Native API storage requires initial authorization by calling the async `authorize` function. This authorization step was performed once by calling the `init` method. Although it would yield an error, it was still possible to call API methods without calling `init` prior to that. In the new version every API call checks if the initial authorization has been performed. + +Other storage services do not require initial authorization but their `init` method was used to select and/or create the bucket that was provided in the config. + +Because in the new API seeks to be more transparent, there will be no more 'magic behind the screen'. So if you want to create a bucket (provided you have the access rights to do so) you have to call `createBucket` explicitly. + +Also the new version tries to keep as little local state as possible so `selectBucket` and `getSelectedBucket` have been removed. + +Because of all aforementioned changes the `init` is no longer required! You can start calling API methods right after instantiating a storage: ```typescript const b2 = new Storage("b2://applicationKeyId:applicationKey"); -await b2.init(); // mandatory await b2.listBuckets(); - -const gcs = new Storage("gcs://keyFile.json"); -await gcs.listBuckets(); ``` -### The bucket in the config is not automatically selected or created +### The bucket in the config is no longer automatically selected or created -The bucket name that you've provided with the configuration url or object is available by calling `getConfig`: +However, the bucket name that you've provided with the configuration url or object is available by calling `getConfig`: ```typescript const s3 = new Storage("s3://key:secret@eu-west-2/bucketName"); diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 5ae0918..93467f2 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -21,13 +21,6 @@ export abstract class AbstractAdapter implements IStorage { return this.type; } - // public async init(): Promise { - // // Except for Backblaze B2 Native API this method doesn't have to do anything - // // so it doesn't have to be overridden at all, it is only here to make the - // // API consistent. - // return Promise.resolve({ error: null, value: "ok" }); - // } - public getConfiguration(): AdapterConfig { return this.config; } @@ -64,7 +57,7 @@ export abstract class AbstractAdapter implements IStorage { options?: { start?: number; end?: number } ): Promise; - abstract getFileAsURL(): Promise; + abstract getFileAsURL(bucketName: string, fileName: string): Promise; abstract removeFile(bucketName: string, fileName: string): Promise; diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 62357a6..d1104b9 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -9,6 +9,13 @@ import { IStorage, ResultObjectBoolean, ResultObject, + ResultObjectReadable, + ResultObjectBucketsB2, + ResultObjectFilesB2, + BucketB2, + ResultObjectBucketB2, + ResultObjectFileB2, + FileB2, } from "./types"; import { parseUrl } from "./util"; @@ -20,7 +27,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { private storage: B2; private buckets: BackblazeB2Bucket[] = []; private files: BackblazeB2File[] = []; - private initialized: boolean = false; + private authorized: boolean = false; constructor(config: string | ConfigBackblazeB2) { super(); @@ -61,50 +68,109 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return cfg; } - private async init(): Promise { - if (this.initialized) { + private async authorize(): Promise { + if (this.authorized) { return Promise.resolve({ value: "ok", error: null }); } try { await this.storage.authorize(); - this.initialized = true; + this.authorized = true; return Promise.resolve({ value: "ok", error: null }); } catch (e) { return Promise.resolve({ value: null, error: e.message }); } } - private async getBucketId(name: string) { - const { error } = await this.init(); + private async getBuckets(): Promise { + return this.storage + .listBuckets() + .then(({ data: { buckets } }) => { + const value = buckets.map(({ bucketId, bucketName }) => { + return { + bucketId, + bucketName, + }; + }); + return Promise.resolve({ value, error: null }); + }) + .catch((e: Error) => { + return Promise.resolve({ value: null, error: e.message }); + }); + } + + private async getFiles(bucketName: string): Promise { + const { value: bucket, error } = await this.getBucket(bucketName); if (error !== null) { return Promise.resolve({ error, value: null }); } - const { - data: { buckets }, - } = await this.storage.listBuckets(); - let id = null; + return this.storage + .listFileVersions({ + bucketId: bucket.id, + }) + .then(({ data: { files } }) => { + const value = files.map(({ fileId, fileName, contentType, contentLength }) => { + return { + fileId, + fileName, + contentType, + contentLength, + }; + }); + return Promise.resolve({ value, error: null }); + }) + .catch((e: Error) => { + return Promise.resolve({ value: null, error: e.message }); + }); + } + + private async getBucket(name: string): Promise { + const { value: buckets, error } = await this.getBuckets(); + if (error !== null) { + return Promise.resolve({ value: null, error }); + } + for (let i = 0; i < buckets.length; i++) { - const { bucketId, bucketName } = buckets[i]; - if (bucketName === name) { - id = bucketId; - return Promise.resolve({ value: bucketId, error: null }); + const bucket = buckets[i]; + if (bucket.name === name) { + return Promise.resolve({ value: bucket, error: null }); } } return Promise.resolve({ value: null, error: `could not find bucket ${name}` }); } + private async getFile(bucketName: string, name: string): Promise { + const { value: files, error } = await this.getFiles(bucketName); + if (error !== null) { + return Promise.resolve({ error, value: null }); + } + + for (let i = 0; i < files.length; i++) { + const file = files[i]; + if (file.name === name) { + return Promise.resolve({ value: file, error: null }); + } + } + return Promise.resolve({ value: null, error: `could not find file ${name}` }); + } + public async getFileAsReadable( bucketName: string, fileName: string, options: { start?: number; end?: number } = { start: 0 } - ): Promise { - const file = await this.findFile(name); - if (file === null) { - throw new Error("file not found"); + ): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } + + const data = await this.getFile(bucketName, fileName); + if (data.error !== null) { + return Promise.resolve({ error: data.error, value: null }); + } + const { value: file } = data; const d = await this.storage.downloadFileById({ - fileId: file.fileId, + fileId: file.id, responseType: "stream", axios: { headers: { @@ -116,25 +182,43 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return d.data; } - async removeFile(bucketName: string, fileName: string): Promise { - const { - data: { files }, - } = await this.storage.listFileVersions({ - bucketId: this.bucketId, - }); + public async getFileAsURL(bucketName: string, fileName: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); + } - Promise.all( + return Promise.resolve({ value: "ok", error: null }); + } + + public async removeFile(bucketName: string, fileName: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); + } + + const data = await this.getFiles(bucketName); + if (error !== null) { + return Promise.resolve({ error, value: null }); + } + const { value: files } = data; + + return Promise.all( files - .filter((f: BackblazeB2File) => f.fileName === name) - .map(({ fileId, fileName }) => + .filter((f: FileB2) => f.name === fileName) + .map(({ id: fileId, name: fileName }) => this.storage.deleteFileVersion({ fileId, fileName, }) ) - ); - this.files = this.files.filter((file) => file.fileName !== name); - return "file removed"; + ) + .then(() => { + return Promise.resolve({ error: null, value: "ok" }); + }) + .catch((e: Error) => { + return Promise.resolve({ error: e.message, value: null }); + }); } // util function for findBucket diff --git a/src/types.ts b/src/types.ts index 4a91444..46ca0a2 100644 --- a/src/types.ts +++ b/src/types.ts @@ -13,17 +13,6 @@ import { Readable } from "stream"; // }; export interface IStorage { - /** - * This method is only implemented for Backblaze B2 native API because this type of storage - * requires an async authorization step; because a constructor function can not be async, - * you need to call this method right after instantiation and before you can use the other API - * methods. - * - * For all other storage types this method is only a stub: you don't need to call it and if you do, - * it does noting. - */ - // init(): Promise; - /** * Returns the storage type, e.g. 'gcs', 'b2', 'local' etc. */ @@ -247,6 +236,18 @@ export type BackblazeB2File = { uploadTimestamp: number; }; +export type BucketB2 = { + id: string; + name: string; +}; + +export type FileB2 = { + id: string; + name: string; + contentType: string; + contentLength: number; +}; + export enum S3Compatible { Amazon, R2, @@ -278,6 +279,26 @@ export type ResultObjectBuckets = { value: Array | null; }; +export type ResultObjectBucketsB2 = { + error: string | null; + value: Array | null; +}; + +export type ResultObjectBucketB2 = { + error: string | null; + value: BucketB2 | null; +}; + +export type ResultObjectFileB2 = { + error: string | null; + value: FileB2 | null; +}; + +export type ResultObjectFilesB2 = { + error: string | null; + value: Array | null; +}; + export type ResultObjectReadable = { error: string | null; value: Readable | null; diff --git a/tests/testB2.ts b/tests/testB2.ts index 2db9682..51aed2c 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -35,6 +35,12 @@ async function testB2() { } console.log(2, new Date().getTime() - s); console.log("B2", id); + + s = new Date().getTime(); + const { + data: { files }, + } = await storage.listFileVersions({ bucketId: id }); + console.log("B2", files); } testB2(); diff --git a/tsconfig.json b/tsconfig.json index 46b1580..b94ecb6 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -15,7 +15,7 @@ "declaration": true, "allowSyntheticDefaultImports": true }, - "include": ["src/*.ts"], + "include": ["src/*.ts", "tests/testB2.ts"], // "include": ["src/index.ts"], "exclude": [ "node_modules", From 5839f27e61a7a54499832c2fe2bf496c72347b6a Mon Sep 17 00:00:00 2001 From: abudaan Date: Sat, 18 Nov 2023 18:24:12 +0100 Subject: [PATCH 08/26] wip backblaze --- src/AdapterBackblazeB2.ts | 203 +++++++++++++++++++------------------- 1 file changed, 102 insertions(+), 101 deletions(-) diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index d1104b9..13fc80a 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -16,8 +16,11 @@ import { ResultObjectBucketB2, ResultObjectFileB2, FileB2, + FileBuffer, + FileStream, + FilePath, } from "./types"; -import { parseUrl } from "./util"; +import { parseUrl, validateName } from "./util"; require("@gideo-llc/backblaze-b2-upload-any").install(B2); @@ -72,13 +75,16 @@ export class AdapterBackblazeB2 extends AbstractAdapter { if (this.authorized) { return Promise.resolve({ value: "ok", error: null }); } - try { - await this.storage.authorize(); - this.authorized = true; - return Promise.resolve({ value: "ok", error: null }); - } catch (e) { - return Promise.resolve({ value: null, error: e.message }); - } + + return this.storage + .authorize() + .then(() => { + this.authorized = true; + return { value: "ok", error: null }; + }) + .catch((e: Error) => { + return { value: null, error: e.message }; + }); } private async getBuckets(): Promise { @@ -91,13 +97,28 @@ export class AdapterBackblazeB2 extends AbstractAdapter { bucketName, }; }); - return Promise.resolve({ value, error: null }); + return { value, error: null }; }) .catch((e: Error) => { - return Promise.resolve({ value: null, error: e.message }); + return { value: null, error: e.message }; }); } + private async getBucket(name: string): Promise { + const { value: buckets, error } = await this.getBuckets(); + if (error !== null) { + return Promise.resolve({ value: null, error }); + } + + for (let i = 0; i < buckets.length; i++) { + const bucket = buckets[i]; + if (bucket.name === name) { + return Promise.resolve({ value: bucket, error: null }); + } + } + return Promise.resolve({ value: null, error: `could not find bucket ${name}` }); + } + private async getFiles(bucketName: string): Promise { const { value: bucket, error } = await this.getBucket(bucketName); if (error !== null) { @@ -117,28 +138,13 @@ export class AdapterBackblazeB2 extends AbstractAdapter { contentLength, }; }); - return Promise.resolve({ value, error: null }); + return { value, error: null }; }) .catch((e: Error) => { - return Promise.resolve({ value: null, error: e.message }); + return { value: null, error: e.message }; }); } - private async getBucket(name: string): Promise { - const { value: buckets, error } = await this.getBuckets(); - if (error !== null) { - return Promise.resolve({ value: null, error }); - } - - for (let i = 0; i < buckets.length; i++) { - const bucket = buckets[i]; - if (bucket.name === name) { - return Promise.resolve({ value: bucket, error: null }); - } - } - return Promise.resolve({ value: null, error: `could not find bucket ${name}` }); - } - private async getFile(bucketName: string, name: string): Promise { const { value: files, error } = await this.getFiles(bucketName); if (error !== null) { @@ -214,120 +220,115 @@ export class AdapterBackblazeB2 extends AbstractAdapter { ) ) .then(() => { - return Promise.resolve({ error: null, value: "ok" }); + return { error: null, value: "ok" }; }) .catch((e: Error) => { - return Promise.resolve({ error: e.message, value: null }); + return { error: e.message, value: null }; }); } - // util function for findBucket - private findBucketLocal(name: string): BackblazeB2Bucket | null { - if (this.buckets.length === 0) { - return null; - } - const index = this.buckets.findIndex((b) => b.bucketName === name); - if (index !== -1) { - return this.buckets[index]; - } - return null; - } + // util members - // check if we have accessed and stored the bucket earlier - private async findBucket(name: string): Promise { - const b = this.findBucketLocal(name); - if (b !== null) { - return b; + protected async store(params: FilePath): Promise; + protected async store(params: FileBuffer): Promise; + protected async store(params: FileStream): Promise; + protected async store(params: FilePath | FileBuffer | FileStream): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - await this.listBuckets(); - return this.findBucketLocal(name); - } - // util members + const { bucketName, targetPath } = params; - protected async store(buffer: Buffer, targetPath: string, options: object): Promise; - protected async store(stream: Readable, targetPath: string, options: object): Promise; - protected async store(origPath: string, targetPath: string, options: object): Promise; - protected async store( - arg: string | Buffer | Readable, - targetPath: string, - options: object - ): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + let { options } = params; + if (typeof options === "undefined") { + options = {}; } - await this.createBucket(this.bucketName); - return await this.storage + + let data: string | Buffer | Readable; + if (typeof (params as FilePath).origPath !== "undefined") { + data = (params as FilePath).origPath; + } else if (typeof (params as FileBuffer).buffer !== "undefined") { + data = (params as FileBuffer).buffer; + } else if (typeof (params as FileStream).stream !== "undefined") { + data = (params as FileStream).stream; + } + + return this.storage .uploadAny({ ...options, bucketId: this.bucketId, fileName: targetPath, - data: arg, + data, }) .then((file: BackblazeB2File) => { - this.files.push(file); - // console.log("FILE", file); - return `${this.storage.downloadUrl}/file/${this.bucketName}/${targetPath}`; + console.log(file); + return { + error: null, + value: `${this.storage.downloadUrl}/file/${bucketName}/${targetPath}`, + }; }) - .catch((err: Error) => { - // console.log("ERROR", err); - return Promise.reject(err); + .catch((e: Error) => { + return { error: e.message, value: null }; }); } - async createBucket(name: string, options: object = {}): Promise { - const msg = this.validateName(name); - if (msg !== null) { - return Promise.reject(msg); + async createBucket(name: string, options: object = {}): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - const b = await this.findBucket(name); - if (b !== null) { - return; + const msg = validateName(name); + if (msg !== null) { + return Promise.reject({ error: msg, value: null }); } - const d = await this.storage + return this.storage .createBucket({ ...options, bucketName: name, bucketType: "allPrivate", // should be a config option! }) - .catch((e) => { - throw new Error(e.response.data.message); + .then((what) => { + console.log(what); + return Promise.reject({ error: null, value: "ok" }); + }) + .catch((e: Error) => { + return Promise.reject({ error: e.message, value: null }); }); - - this.buckets.push(d.data); - // console.log("createBucket", this.buckets, d.data); - return "bucket created"; } - async clearBucket(name?: string): Promise { - const n = name || this.bucketName; - - const b = await this.findBucket(n); - if (b === null) { - throw new Error("bucket not found"); + async clearBucket(name: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - const { - data: { files }, - } = await this.storage.listFileVersions({ - bucketId: b.bucketId, - }); + const data = await this.getFiles(name); + if (data.error !== null) { + return Promise.resolve({ error: data.error, value: null }); + } - await Promise.all( - files.map((file: BackblazeB2File) => + const { value: files } = data; + return Promise.all( + files.map((file: FileB2) => this.storage.deleteFileVersion({ - fileId: file.fileId, - fileName: file.fileName, + fileId: file.id, + fileName: file.name, }) ) - ); - - return "bucket cleared"; + ) + .then((what) => { + console.log(what); + return { error: null, value: "ok" }; + }) + .catch((e: Error) => { + return { error: e.message, value: null }; + }); } - async deleteBucket(name?: string): Promise { + async deleteBucket(name: string): Promise { const n = name || this.bucketName; const b = await this.findBucket(n); From f1098f725d0f6a22fb5e2ab94231a31a8169c97d Mon Sep 17 00:00:00 2001 From: abudaan Date: Sat, 18 Nov 2023 20:34:02 +0100 Subject: [PATCH 09/26] wip backblaze --- src/AdapterBackblazeB2.ts | 102 +++++++++++++++++++------------------- 1 file changed, 51 insertions(+), 51 deletions(-) diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 13fc80a..2f64162 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -19,6 +19,8 @@ import { FileBuffer, FileStream, FilePath, + ResultObjectBuckets, + ResultObjectFiles, } from "./types"; import { parseUrl, validateName } from "./util"; @@ -329,67 +331,65 @@ export class AdapterBackblazeB2 extends AbstractAdapter { } async deleteBucket(name: string): Promise { - const n = name || this.bucketName; - - const b = await this.findBucket(n); - if (b === null) { - throw new Error("bucket not found"); - } - - try { - await this.clearBucket(n); - } catch (e) { - return e.response.data.message; + const data = await this.clearBucket(name); + if (data.error !== null) { + return Promise.resolve({ error: data.error, value: null }); } - const { bucketId } = b; - try { - await this.storage.deleteBucket({ bucketId }); - } catch (e) { - return e.response.data.message; - } - this.buckets = this.buckets.filter((b) => b.bucketName !== n); - if (n === this.bucketName) { - this.bucketId = ""; - this.bucketName = ""; + const { error, value: bucket } = await this.getBucket(name); + if (error !== null) { + return Promise.resolve({ error: error, value: null }); } - return "bucket deleted"; - } - async listBuckets(): Promise { - const { - data: { buckets }, - } = await this.storage.listBuckets(); - // this.bucketsById = buckets.reduce((acc: { [id: string]: string }, val: BackBlazeB2Bucket) => { - // acc[val.bucketId] = val.bucketName; - // return acc; - // }, {}); - this.buckets = buckets; - const names = this.buckets.map((b) => b.bucketName); - return names; + return this.storage + .deleteBucket({ bucketId: bucket.id }) + .then(() => { + return { error: null, value: "ok" }; + }) + .catch((e: Error) => { + return { error: e.message, value: null }; + }); } - async listFiles(numFiles: number = 1000): Promise<[string, number][]> { - // console.log("ID", this.bucketId); - if (!this.bucketName) { - throw new Error("no bucket selected"); + async listBuckets(): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - const { - data: { files, nextFileName }, - } = await this.storage.listFileNames({ - bucketId: this.bucketId, - maxFileCount: numFiles, - }); - // console.log(files); - this.files = [...files]; + return this.getBuckets() + .then(({ value: buckets }) => { + return { + error: null, + value: buckets.map((b) => { + return b.name; + }), + }; + }) + .catch((e: Error) => { + return { error: e.message, value: null }; + }); + } - // @TODO; should loop this until all files are listed - if (nextFileName !== null) { - // console.log(nextFileName); - this.nextFileName = nextFileName; + async listFiles(bucketName: string, numFiles: number = 1000): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - return this.files.map((f) => [f.fileName, f.contentLength]); + + return this.getFiles(bucketName) + .then(({ value: files }) => { + const f: Array<[string, number]> = files.map((f) => { + return [f.name, f.contentLength]; + }); + return { + error: null, + value: f, + }; + }) + .catch((e: Error) => { + return { error: e.message, value: null }; + }); } private async findFile(name: string): Promise { From 87d0fdbc5fcda81301971e25e1722873bf0c6e7f Mon Sep 17 00:00:00 2001 From: abudaan Date: Sat, 18 Nov 2023 22:18:19 +0100 Subject: [PATCH 10/26] backblaze: first version of api 2.0 --- src/AdapterBackblazeB2.ts | 90 +++++++++++++++++++++++++++------------ 1 file changed, 62 insertions(+), 28 deletions(-) diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 2f64162..049b00c 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -21,6 +21,7 @@ import { FilePath, ResultObjectBuckets, ResultObjectFiles, + ResultObjectNumber, } from "./types"; import { parseUrl, validateName } from "./util"; @@ -392,41 +393,74 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); } - private async findFile(name: string): Promise { - let i = this.files.findIndex((file: BackblazeB2File) => file?.fileName === name); - if (i > -1) { - return this.files[i]; + // probably not necessary + private async listFileNames(bucketName: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - const { - data: { files }, - } = await this.storage.listFileNames({ bucketId: this.bucketId }); - this.files = files; - i = this.files.findIndex((file: BackblazeB2File) => file.fileName === name); - if (i > -1) { - return this.files[i]; + + const data = await this.getBucket(bucketName); + if (data.error !== null) { + return Promise.resolve({ error: data.error, value: null }); } - return null; + + const { value: bucketId } = data; + return this.storage + .listFileNames({ bucketId: bucketId }) + .then(({ data: { files } }) => { + return { + error: null, + value: files.map(({ fileName }) => { + return fileName; + }), + }; + }) + .catch((e: Error) => { + return { + error: e.message, + value: null, + }; + }); } - async sizeOf(name: string): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); - } - const file = await this.findFile(name); - if (file === null) { - throw new Error("File not found"); + public async sizeOf(bucketName: string, fileName: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - return file.contentLength; + + return this.getFile(bucketName, fileName) + .then(({ value: file }) => { + return { error: null, value: file.contentLength }; + }) + .catch((e: Error) => { + return { error: e.message, value: null }; + }); } - async fileExists(name: string): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); - } - const file = await this.findFile(name); - if (file === null) { - return false; + async bucketExists(bucketName: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return Promise.resolve({ error, value: null }); } - return true; + + return this.getBucket(bucketName) + .then(() => { + return { error: null, value: true }; + }) + .catch(() => { + return { error: null, value: false }; + }); + } + + async fileExists(bucketName: string, fileName: string): Promise { + return this.sizeOf(bucketName, fileName) + .then(() => { + return { error: null, value: true }; + }) + .catch(() => { + return { error: null, value: false }; + }); } } From 10b8a2a6a675aa8b3d58f30e538b0fc86ebcbc9b Mon Sep 17 00:00:00 2001 From: abudaan Date: Sun, 19 Nov 2023 00:23:25 +0100 Subject: [PATCH 11/26] fixed issues in backblaze --- src/AbstractAdapter.ts | 8 +- src/AdapterAmazonS3.ts | 42 ++++---- src/AdapterAzureStorageBlob.ts | 30 ++++-- src/AdapterBackblazeB2.ts | 175 ++++++++++++++++--------------- src/AdapterGoogleCloudStorage.ts | 13 ++- src/AdapterLocal.ts | 15 +-- tests/testB2.ts | 48 +++++++-- 7 files changed, 195 insertions(+), 136 deletions(-) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 93467f2..04b45b6 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -15,14 +15,18 @@ import { export abstract class AbstractAdapter implements IStorage { // protected type: StorageType; protected type: string; - protected config: AdapterConfig; + protected configuration: AdapterConfig; getType(): string { return this.type; } + public get config(): AdapterConfig { + return this.configuration; + } + public getConfiguration(): AdapterConfig { - return this.config; + return this.configuration; } async addFileFromPath(params: FilePath): Promise { diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index 005a3fb..ff90a7c 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -28,42 +28,45 @@ export class AdapterAmazonS3 extends AbstractAdapter { private bucketNames: string[] = []; private region: string = ""; private s3Compatible: S3Compatible = S3Compatible.Amazon; - protected config: ConfigAmazonS3; + protected configuration: ConfigAmazonS3; constructor(config: string | AdapterConfig) { super(); - this.config = this.parseConfig(config as ConfigAmazonS3); - if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { - const msg = this.validateName(this.config.bucketName); + this.configuration = this.parseConfig(config as ConfigAmazonS3); + if ( + typeof this.configuration.bucketName !== "undefined" && + this.configuration.bucketName !== "" + ) { + const msg = this.validateName(this.configuration.bucketName); if (msg !== null) { throw new Error(msg); } - this.bucketName = this.config.bucketName; + this.bucketName = this.configuration.bucketName; } - if (typeof (this.config as ConfigAmazonS3).region === "undefined") { + if (typeof (this.configuration as ConfigAmazonS3).region === "undefined") { if (this.s3Compatible === S3Compatible.R2) { - this.config.region = "auto"; - this.region = this.config.region; + this.configuration.region = "auto"; + this.region = this.configuration.region; } else if (this.s3Compatible === S3Compatible.Backblaze) { - let ep = this.config.endpoint; + let ep = this.configuration.endpoint; ep = ep.substring(ep.indexOf("s3.") + 3); - this.config.region = ep.substring(0, ep.indexOf(".")); + this.configuration.region = ep.substring(0, ep.indexOf(".")); // console.log(this.config.region); - this.region = this.config.region; + this.region = this.configuration.region; } } else { - this.region = (this.config as ConfigAmazonS3).region; + this.region = (this.configuration as ConfigAmazonS3).region; } - if (typeof this.config.endpoint === "undefined") { + if (typeof this.configuration.endpoint === "undefined") { this.storage = new S3Client({ region: this.region }); } else { this.storage = new S3Client({ region: this.region, - endpoint: this.config.endpoint, + endpoint: this.configuration.endpoint, credentials: { - accessKeyId: this.config.accessKeyId, - secretAccessKey: this.config.secretAccessKey, + accessKeyId: this.configuration.accessKeyId, + secretAccessKey: this.configuration.secretAccessKey, }, }); } @@ -196,9 +199,12 @@ export class AdapterAmazonS3 extends AbstractAdapter { ...options, }; // see issue: https://github.com/aws/aws-sdk-js/issues/3647 - if (typeof this.config.region !== "undefined" && this.config.region !== "us-east-1") { + if ( + typeof this.configuration.region !== "undefined" && + this.configuration.region !== "us-east-1" + ) { input.CreateBucketConfiguration = { - LocationConstraint: BucketLocationConstraint[this.config.region.replace("-", "_")], + LocationConstraint: BucketLocationConstraint[this.configuration.region.replace("-", "_")], }; } diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index 7fdca30..a73b30c 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -20,22 +20,27 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { constructor(config: string | ConfigAzureStorageBlob) { super(); - this.config = this.parseConfig(config as ConfigAzureStorageBlob); + this.configuration = this.parseConfig(config as ConfigAzureStorageBlob); // console.log(this.config); - if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { - const msg = this.validateName(this.config.bucketName); + if ( + typeof this.configuration.bucketName !== "undefined" && + this.configuration.bucketName !== "" + ) { + const msg = this.validateName(this.configuration.bucketName); if (msg !== null) { throw new Error(msg); } - this.bucketName = this.config.bucketName; + this.bucketName = this.configuration.bucketName; } this.sharedKeyCredential = new StorageSharedKeyCredential( - (this.config as ConfigAzureStorageBlob).storageAccount, - (this.config as ConfigAzureStorageBlob).accessKey + (this.configuration as ConfigAzureStorageBlob).storageAccount, + (this.configuration as ConfigAzureStorageBlob).accessKey ); this.storage = new BlobServiceClient( - `https://${(this.config as ConfigAzureStorageBlob).storageAccount}.blob.core.windows.net`, + `https://${ + (this.configuration as ConfigAzureStorageBlob).storageAccount + }.blob.core.windows.net`, this.sharedKeyCredential ); } @@ -82,13 +87,16 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { if (this.initialized) { return Promise.resolve(true); } - if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { - const msg = this.validateName(this.config.bucketName); + if ( + typeof this.configuration.bucketName !== "undefined" && + this.configuration.bucketName !== "" + ) { + const msg = this.validateName(this.configuration.bucketName); if (msg !== null) { throw new Error(msg); } - await this.createBucket(this.config.bucketName).then(() => { - this.bucketName = this.config.bucketName; + await this.createBucket(this.configuration.bucketName).then(() => { + this.bucketName = this.configuration.bucketName; this.bucketNames.push(this.bucketName); }); } diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 049b00c..9f57a59 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -4,15 +4,12 @@ import { AbstractAdapter } from "./AbstractAdapter"; import { StorageType, ConfigBackblazeB2, - BackblazeB2Bucket, BackblazeB2File, - IStorage, ResultObjectBoolean, ResultObject, ResultObjectReadable, ResultObjectBucketsB2, ResultObjectFilesB2, - BucketB2, ResultObjectBucketB2, ResultObjectFileB2, FileB2, @@ -29,16 +26,14 @@ require("@gideo-llc/backblaze-b2-upload-any").install(B2); export class AdapterBackblazeB2 extends AbstractAdapter { protected type = StorageType.B2; - private bucketId: string; private storage: B2; - private buckets: BackblazeB2Bucket[] = []; - private files: BackblazeB2File[] = []; private authorized: boolean = false; + private configError: string | null = null; constructor(config: string | ConfigBackblazeB2) { super(); - this.config = this.parseConfig(config); - this.storage = new B2(this.config); + this.configuration = this.parseConfig(config); + this.storage = new B2(this.configuration); } private parseConfig(config: string | ConfigBackblazeB2): ConfigBackblazeB2 { @@ -67,16 +62,18 @@ export class AdapterBackblazeB2 extends AbstractAdapter { } if (!cfg.applicationKey || !cfg.applicationKeyId) { - throw new Error( - "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 'b2'" - ); + this.configError = + "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 'b2'"; } return cfg; } private async authorize(): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } if (this.authorized) { - return Promise.resolve({ value: "ok", error: null }); + return { value: "ok", error: null }; } return this.storage @@ -96,8 +93,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { .then(({ data: { buckets } }) => { const value = buckets.map(({ bucketId, bucketName }) => { return { - bucketId, - bucketName, + id: bucketId, + name: bucketName, }; }); return { value, error: null }; @@ -110,33 +107,35 @@ export class AdapterBackblazeB2 extends AbstractAdapter { private async getBucket(name: string): Promise { const { value: buckets, error } = await this.getBuckets(); if (error !== null) { - return Promise.resolve({ value: null, error }); + return { value: null, error }; } for (let i = 0; i < buckets.length; i++) { const bucket = buckets[i]; if (bucket.name === name) { - return Promise.resolve({ value: bucket, error: null }); + return { value: bucket, error: null }; } } - return Promise.resolve({ value: null, error: `could not find bucket ${name}` }); + return { value: null, error: `could not find bucket ${name}` }; } private async getFiles(bucketName: string): Promise { const { value: bucket, error } = await this.getBucket(bucketName); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } return this.storage .listFileVersions({ bucketId: bucket.id, + maxFileCount: 1000, }) .then(({ data: { files } }) => { + // console.log(files); const value = files.map(({ fileId, fileName, contentType, contentLength }) => { return { - fileId, - fileName, + id: fileId, + name: fileName, contentType, contentLength, }; @@ -151,16 +150,49 @@ export class AdapterBackblazeB2 extends AbstractAdapter { private async getFile(bucketName: string, name: string): Promise { const { value: files, error } = await this.getFiles(bucketName); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } for (let i = 0; i < files.length; i++) { const file = files[i]; if (file.name === name) { - return Promise.resolve({ value: file, error: null }); + return { value: file, error: null }; } } - return Promise.resolve({ value: null, error: `could not find file ${name}` }); + return { value: null, error: `could not find file ${name}` }; + } + + // probably not necessary; may be a little bit more lightweight compared to listFileVersions + // if you don't have file versions + public async listFileNames(bucketName: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return { error, value: null }; + } + + const data = await this.getBucket(bucketName); + if (data.error !== null) { + return { error: data.error, value: null }; + } + + const { value: bucket } = data; + return this.storage + .listFileNames({ bucketId: bucket.id }) + .then(({ data: { files } }) => { + // console.log(files); + return { + error: null, + value: files.map(({ fileName }) => { + return fileName; + }), + }; + }) + .catch((e: Error) => { + return { + error: e.message, + value: null, + }; + }); } public async getFileAsReadable( @@ -170,12 +202,12 @@ export class AdapterBackblazeB2 extends AbstractAdapter { ): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } const data = await this.getFile(bucketName, fileName); if (data.error !== null) { - return Promise.resolve({ error: data.error, value: null }); + return { error: data.error, value: null }; } const { value: file } = data; const d = await this.storage.downloadFileById({ @@ -194,21 +226,22 @@ export class AdapterBackblazeB2 extends AbstractAdapter { public async getFileAsURL(bucketName: string, fileName: string): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } - return Promise.resolve({ value: "ok", error: null }); + // return Promise.resolve({ value: "ok", error: null }); + return { value: "ok", error: null }; } public async removeFile(bucketName: string, fileName: string): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } const data = await this.getFiles(bucketName); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } const { value: files } = data; @@ -238,31 +271,36 @@ export class AdapterBackblazeB2 extends AbstractAdapter { protected async store(params: FilePath | FileBuffer | FileStream): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } const { bucketName, targetPath } = params; + const data = await this.getBucket(bucketName); + if (data.error !== null) { + return { error: data.error, value: null }; + } + const { value: bucket } = data; let { options } = params; if (typeof options === "undefined") { options = {}; } - let data: string | Buffer | Readable; + let fileData: string | Buffer | Readable; if (typeof (params as FilePath).origPath !== "undefined") { - data = (params as FilePath).origPath; + fileData = (params as FilePath).origPath; } else if (typeof (params as FileBuffer).buffer !== "undefined") { - data = (params as FileBuffer).buffer; + fileData = (params as FileBuffer).buffer; } else if (typeof (params as FileStream).stream !== "undefined") { - data = (params as FileStream).stream; + fileData = (params as FileStream).stream; } return this.storage .uploadAny({ ...options, - bucketId: this.bucketId, + bucketId: bucket.id, fileName: targetPath, - data, + data: fileData, }) .then((file: BackblazeB2File) => { console.log(file); @@ -276,15 +314,15 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); } - async createBucket(name: string, options: object = {}): Promise { + public async createBucket(name: string, options: object = {}): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } const msg = validateName(name); if (msg !== null) { - return Promise.reject({ error: msg, value: null }); + return { error: msg, value: null }; } return this.storage @@ -295,22 +333,22 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }) .then((what) => { console.log(what); - return Promise.reject({ error: null, value: "ok" }); + return { error: null, value: "ok" }; }) .catch((e: Error) => { - return Promise.reject({ error: e.message, value: null }); + return { error: e.message, value: null }; }); } - async clearBucket(name: string): Promise { + public async clearBucket(name: string): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } const data = await this.getFiles(name); if (data.error !== null) { - return Promise.resolve({ error: data.error, value: null }); + return { error: data.error, value: null }; } const { value: files } = data; @@ -331,15 +369,15 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); } - async deleteBucket(name: string): Promise { + public async deleteBucket(name: string): Promise { const data = await this.clearBucket(name); if (data.error !== null) { - return Promise.resolve({ error: data.error, value: null }); + return { error: data.error, value: null }; } const { error, value: bucket } = await this.getBucket(name); if (error !== null) { - return Promise.resolve({ error: error, value: null }); + return { error: error, value: null }; } return this.storage @@ -352,10 +390,10 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); } - async listBuckets(): Promise { + public async listBuckets(): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } return this.getBuckets() @@ -372,10 +410,10 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); } - async listFiles(bucketName: string, numFiles: number = 1000): Promise { + public async listFiles(bucketName: string, numFiles: number = 1000): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } return this.getFiles(bucketName) @@ -393,41 +431,10 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); } - // probably not necessary - private async listFileNames(bucketName: string): Promise { - const { error } = await this.authorize(); - if (error !== null) { - return Promise.resolve({ error, value: null }); - } - - const data = await this.getBucket(bucketName); - if (data.error !== null) { - return Promise.resolve({ error: data.error, value: null }); - } - - const { value: bucketId } = data; - return this.storage - .listFileNames({ bucketId: bucketId }) - .then(({ data: { files } }) => { - return { - error: null, - value: files.map(({ fileName }) => { - return fileName; - }), - }; - }) - .catch((e: Error) => { - return { - error: e.message, - value: null, - }; - }); - } - public async sizeOf(bucketName: string, fileName: string): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } return this.getFile(bucketName, fileName) @@ -442,7 +449,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { async bucketExists(bucketName: string): Promise { const { error } = await this.authorize(); if (error !== null) { - return Promise.resolve({ error, value: null }); + return { error, value: null }; } return this.getBucket(bucketName) diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index 01a5a93..14c72ba 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -18,15 +18,18 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { constructor(config: string | ConfigGoogleCloud) { super(); - this.config = this.parseConfig(config); - if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { - const msg = this.validateName(this.config.bucketName); + this.configuration = this.parseConfig(config); + if ( + typeof this.configuration.bucketName !== "undefined" && + this.configuration.bucketName !== "" + ) { + const msg = this.validateName(this.configuration.bucketName); if (msg !== null) { throw new Error(msg); } - this.bucketName = this.config.bucketName; + this.bucketName = this.configuration.bucketName; } - this.storage = new GoogleCloudStorage(this.config as ConfigGoogleCloud); + this.storage = new GoogleCloudStorage(this.configuration as ConfigGoogleCloud); } /** diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 8baccfe..8a726b5 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -12,14 +12,14 @@ export class AdapterLocal extends AbstractAdapter { constructor(config: ConfigLocal) { super(); - this.config = this.parseConfig(config); + this.configuration = this.parseConfig(config); // console.log(config); // console.log(this.config); - const mode = (this.config as ConfigLocal).mode; + const mode = (this.configuration as ConfigLocal).mode; if (typeof mode === "undefined") { - (this.config as ConfigLocal).mode = 0o777; + (this.configuration as ConfigLocal).mode = 0o777; } - const directory = (this.config as ConfigLocal).directory; + const directory = (this.configuration as ConfigLocal).directory; } private parseConfig(config: string | ConfigLocal): ConfigLocal { @@ -86,8 +86,11 @@ export class AdapterLocal extends AbstractAdapter { return Promise.resolve(true); } - if (typeof this.config.bucketName !== "undefined" && this.config.bucketName !== "") { - const { error } = await this.validateName(this.config.bucketName); + if ( + typeof this.configuration.bucketName !== "undefined" && + this.configuration.bucketName !== "" + ) { + const { error } = await this.validateName(this.configuration.bucketName); if (error !== null) { Promise.resolve({ error, value: null }); return; diff --git a/tests/testB2.ts b/tests/testB2.ts index 51aed2c..6238402 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -1,6 +1,6 @@ import B2 from "backblaze-b2"; import dotenv from "dotenv"; - +import { AdapterBackblazeB2 } from "../src/AdapterBackblazeB2"; import { StorageType } from "@tweedegolf/storage-abstraction"; dotenv.config(); @@ -9,13 +9,34 @@ const configBackblaze = { type: StorageType.B2, applicationKeyId: process.env.B2_APPLICATION_KEY_ID, applicationKey: process.env.B2_APPLICATION_KEY, - // bucketName: process.env.BUCKET_NAME, + bucketName: process.env.BUCKET_NAME, }; async function testB2() { - const storage = new B2(configBackblaze); + const storage = new AdapterBackblazeB2(configBackblaze); + // console.log(storage.config); + // console.log(storage.getConfiguration()); + + const type = storage.getType(); + let s: number; + + s = new Date().getTime(); + const data = await storage.listBuckets(); + console.log(1, new Date().getTime() - s, data); - let s = new Date().getTime(); + s = new Date().getTime(); + const data2 = await storage.listFiles("the-buck"); + console.log(1, new Date().getTime() - s, data2); + + s = new Date().getTime(); + const data3 = await storage.listFileNames("the-buck"); + console.log(3, new Date().getTime() - s, data3); +} + +async function testB2_2() { + const storage = new B2(configBackblaze); + let s; + s = new Date().getTime(); await storage.authorize(); console.log(1, new Date().getTime() - s); @@ -23,7 +44,10 @@ async function testB2() { const { data: { buckets }, } = await storage.listBuckets(); + console.log(2, new Date().getTime() - s); + // console.log(buckets); + s = new Date().getTime(); const n = "the-buck"; let id = null; for (let i = 0; i < buckets.length; i++) { @@ -33,14 +57,18 @@ async function testB2() { break; } } - console.log(2, new Date().getTime() - s); - console.log("B2", id); + // console.log(2, new Date().getTime() - s); + // console.log("B2", id); + + const r = await storage.listFileVersions({ bucketId: id }); + console.log(3, new Date().getTime() - s); + // console.log("listFileVersions", r.data.files); s = new Date().getTime(); - const { - data: { files }, - } = await storage.listFileVersions({ bucketId: id }); - console.log("B2", files); + const r2 = await storage.listFileNames({ bucketId: id }); + console.log(4, new Date().getTime() - s); + // console.log("listFileNames", r2.data.files); } testB2(); +// testB2_2(); From d39404422f819eb896dbf29337847b10562fb5c5 Mon Sep 17 00:00:00 2001 From: abudaan Date: Mon, 20 Nov 2023 23:52:48 +0100 Subject: [PATCH 12/26] wip error handling --- README.md | 82 ++++----- changelog.md | 1 + src/AbstractAdapter.ts | 6 +- src/AdapterAmazonS3.ts | 42 ++--- src/AdapterAzureStorageBlob.ts | 30 ++-- src/AdapterBackblazeB2.ts | 294 +++++++++++++++++-------------- src/AdapterGoogleCloudStorage.ts | 13 +- src/AdapterLocal.ts | 15 +- src/types.ts | 40 ++++- src/util.ts | 22 +-- tests/test-config-b2.jasmine.ts | 26 +-- tests/test.jasmine.ts | 30 ---- tests/testB2.ts | 93 +++++++--- 13 files changed, 362 insertions(+), 332 deletions(-) diff --git a/README.md b/README.md index f301355..7aa2992 100644 --- a/README.md +++ b/README.md @@ -11,43 +11,43 @@ Because the API only provides basic storage operations (see [below](#api-methods - [Instantiate a storage](#instantiate-a-storage) - * [Configuration object](#configuration-object) - * [Configuration URL](#configuration-url) + - [Configuration object](#configuration-object) + - [Configuration URL](#configuration-url) - [Adapters](#adapters) - * [Local storage](#local-storage) - * [Google Cloud](#google-cloud) - * [Amazon S3](#amazon-s3) - + [S3 Compatible Storage](#s3-compatible-storage) - + [Cloudflare R2](#cloudflare-r2) - + [Backblaze S3](#backblaze-s3) - * [Backblaze B2](#backblaze-b2) - * [Azure Blob Storage](#azure-blob-storage) + - [Local storage](#local-storage) + - [Google Cloud](#google-cloud) + - [Amazon S3](#amazon-s3) + - [S3 Compatible Storage](#s3-compatible-storage) + - [Cloudflare R2](#cloudflare-r2) + - [Backblaze S3](#backblaze-s3) + - [Backblaze B2](#backblaze-b2) + - [Azure Blob Storage](#azure-blob-storage) - [API methods](#api-methods) - * [init](#init) - * [test](#test) - * [createBucket](#createbucket) - * [selectBucket](#selectbucket) - * [clearBucket](#clearbucket) - * [deleteBucket](#deletebucket) - * [listBuckets](#listbuckets) - * [getSelectedBucket](#getselectedbucket) - * [addFileFromPath](#addfilefrompath) - * [addFileFromBuffer](#addfilefrombuffer) - * [addFileFromReadable](#addfilefromreadable) - * [getFileAsReadable](#getfileasreadable) - * [removeFile](#removefile) - * [sizeOf](#sizeof) - * [fileExists](#fileexists) - * [listFiles](#listfiles) - * [getType](#gettype) - * [getConfiguration](#getconfiguration) - * [switchAdapter](#switchadapter) + - [init](#init) + - [test](#test) + - [createBucket](#createbucket) + - [selectBucket](#selectbucket) + - [clearBucket](#clearbucket) + - [deleteBucket](#deletebucket) + - [listBuckets](#listbuckets) + - [getSelectedBucket](#getselectedbucket) + - [addFileFromPath](#addfilefrompath) + - [addFileFromBuffer](#addfilefrombuffer) + - [addFileFromReadable](#addfilefromreadable) + - [getFileAsReadable](#getfileasreadable) + - [removeFile](#removefile) + - [sizeOf](#sizeof) + - [fileExists](#fileexists) + - [listFiles](#listfiles) + - [getType](#gettype) + - [getConfiguration](#getconfiguration) + - [switchAdapter](#switchadapter) - [How it works](#how-it-works) - [Adding more adapters](#adding-more-adapters) - * [Define your configuration](#define-your-configuration) - * [Adapter class](#adapter-class) - * [Adapter function](#adapter-function) - * [Register your adapter](#register-your-adapter) + - [Define your configuration](#define-your-configuration) + - [Adapter class](#adapter-class) + - [Adapter function](#adapter-function) + - [Register your adapter](#register-your-adapter) - [Tests](#tests) - [Example application](#example-application) - [Questions and requests](#questions-and-requests) @@ -486,22 +486,10 @@ Runs a simple test to test the storage configuration. The test is a call to `lis ### createBucket ```typescript -createBucket(name: string, options?: object): Promise; +createBucket(name: string, options?: object): Promise; ``` -Creates a new bucket, does not fail if the bucket already exists. If the bucket was created successfully it returns "bucket created" or if already existed "bucket exists", else it will reject with an error message. You can provide extra storage-specific settings such as access rights using the `options` object. - -> Note: dependent on the type of storage and the credentials used, you may need extra access rights for this action. E.g.: sometimes a user may only access the contents of one single bucket. - -### selectBucket - -```typescript -selectBucket(name: string | null): Promise; -``` - -Selects a or another bucket for storing files, the bucket will be created automatically if it doesn't exist. If you pass `null` an empty string or nothing at all the currently selected bucket will be deselected. - -Returns "bucket selected" or "bucket deselected". +Creates a new bucket. If the bucket was created successfully it resolves to "ok". If the bucket exists or the creating the bucket fails for another reason it resolves to an error message. You can provide extra storage-specific settings such as access rights using the `options` object. > Note: dependent on the type of storage and the credentials used, you may need extra access rights for this action. E.g.: sometimes a user may only access the contents of one single bucket. diff --git a/changelog.md b/changelog.md index 8e95f8a..7d9edb0 100644 --- a/changelog.md +++ b/changelog.md @@ -43,6 +43,7 @@ - No more local state: the storage instance will no longer hold a reference to the last used or selected bucket in its local state; you will have to provide a bucket name for every bucket operation, for instance `clearBucket`, but also `removeFile`. - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created a new bucket. - `validateName` will not only perform a local check, it will also check if the name is valid and/or not taken at the cloud storage service. +- `createBucket` resolves with an error when that bucket already exists ### Old API (1.5.x) compared to new API (2.x) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 04b45b6..d4969e3 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -15,18 +15,18 @@ import { export abstract class AbstractAdapter implements IStorage { // protected type: StorageType; protected type: string; - protected configuration: AdapterConfig; + protected conf: AdapterConfig; getType(): string { return this.type; } public get config(): AdapterConfig { - return this.configuration; + return this.conf; } public getConfiguration(): AdapterConfig { - return this.configuration; + return this.conf; } async addFileFromPath(params: FilePath): Promise { diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index ff90a7c..c279f81 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -28,45 +28,42 @@ export class AdapterAmazonS3 extends AbstractAdapter { private bucketNames: string[] = []; private region: string = ""; private s3Compatible: S3Compatible = S3Compatible.Amazon; - protected configuration: ConfigAmazonS3; + protected conf: ConfigAmazonS3; constructor(config: string | AdapterConfig) { super(); - this.configuration = this.parseConfig(config as ConfigAmazonS3); - if ( - typeof this.configuration.bucketName !== "undefined" && - this.configuration.bucketName !== "" - ) { - const msg = this.validateName(this.configuration.bucketName); + this.conf = this.parseConfig(config as ConfigAmazonS3); + if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { + const msg = this.validateName(this.conf.bucketName); if (msg !== null) { throw new Error(msg); } - this.bucketName = this.configuration.bucketName; + this.bucketName = this.conf.bucketName; } - if (typeof (this.configuration as ConfigAmazonS3).region === "undefined") { + if (typeof (this.conf as ConfigAmazonS3).region === "undefined") { if (this.s3Compatible === S3Compatible.R2) { - this.configuration.region = "auto"; - this.region = this.configuration.region; + this.conf.region = "auto"; + this.region = this.conf.region; } else if (this.s3Compatible === S3Compatible.Backblaze) { - let ep = this.configuration.endpoint; + let ep = this.conf.endpoint; ep = ep.substring(ep.indexOf("s3.") + 3); - this.configuration.region = ep.substring(0, ep.indexOf(".")); + this.conf.region = ep.substring(0, ep.indexOf(".")); // console.log(this.config.region); - this.region = this.configuration.region; + this.region = this.conf.region; } } else { - this.region = (this.configuration as ConfigAmazonS3).region; + this.region = (this.conf as ConfigAmazonS3).region; } - if (typeof this.configuration.endpoint === "undefined") { + if (typeof this.conf.endpoint === "undefined") { this.storage = new S3Client({ region: this.region }); } else { this.storage = new S3Client({ region: this.region, - endpoint: this.configuration.endpoint, + endpoint: this.conf.endpoint, credentials: { - accessKeyId: this.configuration.accessKeyId, - secretAccessKey: this.configuration.secretAccessKey, + accessKeyId: this.conf.accessKeyId, + secretAccessKey: this.conf.secretAccessKey, }, }); } @@ -199,12 +196,9 @@ export class AdapterAmazonS3 extends AbstractAdapter { ...options, }; // see issue: https://github.com/aws/aws-sdk-js/issues/3647 - if ( - typeof this.configuration.region !== "undefined" && - this.configuration.region !== "us-east-1" - ) { + if (typeof this.conf.region !== "undefined" && this.conf.region !== "us-east-1") { input.CreateBucketConfiguration = { - LocationConstraint: BucketLocationConstraint[this.configuration.region.replace("-", "_")], + LocationConstraint: BucketLocationConstraint[this.conf.region.replace("-", "_")], }; } diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index a73b30c..4705b04 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -20,27 +20,22 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { constructor(config: string | ConfigAzureStorageBlob) { super(); - this.configuration = this.parseConfig(config as ConfigAzureStorageBlob); + this.conf = this.parseConfig(config as ConfigAzureStorageBlob); // console.log(this.config); - if ( - typeof this.configuration.bucketName !== "undefined" && - this.configuration.bucketName !== "" - ) { - const msg = this.validateName(this.configuration.bucketName); + if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { + const msg = this.validateName(this.conf.bucketName); if (msg !== null) { throw new Error(msg); } - this.bucketName = this.configuration.bucketName; + this.bucketName = this.conf.bucketName; } this.sharedKeyCredential = new StorageSharedKeyCredential( - (this.configuration as ConfigAzureStorageBlob).storageAccount, - (this.configuration as ConfigAzureStorageBlob).accessKey + (this.conf as ConfigAzureStorageBlob).storageAccount, + (this.conf as ConfigAzureStorageBlob).accessKey ); this.storage = new BlobServiceClient( - `https://${ - (this.configuration as ConfigAzureStorageBlob).storageAccount - }.blob.core.windows.net`, + `https://${(this.conf as ConfigAzureStorageBlob).storageAccount}.blob.core.windows.net`, this.sharedKeyCredential ); } @@ -87,16 +82,13 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { if (this.initialized) { return Promise.resolve(true); } - if ( - typeof this.configuration.bucketName !== "undefined" && - this.configuration.bucketName !== "" - ) { - const msg = this.validateName(this.configuration.bucketName); + if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { + const msg = this.validateName(this.conf.bucketName); if (msg !== null) { throw new Error(msg); } - await this.createBucket(this.configuration.bucketName).then(() => { - this.bucketName = this.configuration.bucketName; + await this.createBucket(this.conf.bucketName).then(() => { + this.bucketName = this.conf.bucketName; this.bucketNames.push(this.bucketName); }); } diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 9f57a59..8dbdbab 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -19,6 +19,7 @@ import { ResultObjectBuckets, ResultObjectFiles, ResultObjectNumber, + BackblazeAxiosResponse, } from "./types"; import { parseUrl, validateName } from "./util"; @@ -32,20 +33,33 @@ export class AdapterBackblazeB2 extends AbstractAdapter { constructor(config: string | ConfigBackblazeB2) { super(); - this.configuration = this.parseConfig(config); - this.storage = new B2(this.configuration); + this.conf = this.parseConfig(config); + if (this.conf !== null) { + try { + this.storage = new B2(this.conf); + } catch (e) { + this.configError = e.message; + } + } } - private parseConfig(config: string | ConfigBackblazeB2): ConfigBackblazeB2 { + // util members + + private parseConfig(config: string | ConfigBackblazeB2): ConfigBackblazeB2 | null { let cfg: ConfigBackblazeB2; if (typeof config === "string") { + const { error, value } = parseUrl(config); + if (error !== null) { + this.configError = error; + return null; + } const { type, part1: applicationKeyId, part2: applicationKey, bucketName, queryString, - } = parseUrl(config); + } = value; cfg = { type, applicationKeyId, @@ -64,6 +78,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { if (!cfg.applicationKey || !cfg.applicationKeyId) { this.configError = "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 'b2'"; + return null; } return cfg; } @@ -78,12 +93,13 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return this.storage .authorize() - .then(() => { + .then((r: BackblazeAxiosResponse) => { + // console.log(r.data.allowed.capabilities); this.authorized = true; return { value: "ok", error: null }; }) - .catch((e: Error) => { - return { value: null, error: e.message }; + .catch((r: BackblazeAxiosResponse) => { + return { value: null, error: r.response.data.message }; }); } @@ -99,8 +115,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); return { value, error: null }; }) - .catch((e: Error) => { - return { value: null, error: e.message }; + .catch((r: BackblazeAxiosResponse) => { + return { value: null, error: r.response.data.message }; }); } @@ -142,8 +158,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); return { value, error: null }; }) - .catch((e: Error) => { - return { value: null, error: e.message }; + .catch((r: BackblazeAxiosResponse) => { + return { value: null, error: r.response.data.message }; }); } @@ -162,6 +178,55 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { value: null, error: `could not find file ${name}` }; } + protected async store(params: FilePath): Promise; + protected async store(params: FileBuffer): Promise; + protected async store(params: FileStream): Promise; + protected async store(params: FilePath | FileBuffer | FileStream): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return { error, value: null }; + } + + const { bucketName, targetPath } = params; + const data = await this.getBucket(bucketName); + if (data.error !== null) { + return { error: data.error, value: null }; + } + const { value: bucket } = data; + + let { options } = params; + if (typeof options === "undefined") { + options = {}; + } + + let fileData: string | Buffer | Readable; + if (typeof (params as FilePath).origPath !== "undefined") { + fileData = (params as FilePath).origPath; + } else if (typeof (params as FileBuffer).buffer !== "undefined") { + fileData = (params as FileBuffer).buffer; + } else if (typeof (params as FileStream).stream !== "undefined") { + fileData = (params as FileStream).stream; + } + + return this.storage + .uploadAny({ + ...options, + bucketId: bucket.id, + fileName: targetPath, + data: fileData, + }) + .then((file: BackblazeB2File) => { + console.log(file); + return { + error: null, + value: `${this.storage.downloadUrl}/file/${bucketName}/${targetPath}`, + }; + }) + .catch((r: BackblazeAxiosResponse) => { + return { error: r.response.data.message, value: null }; + }); + } + // probably not necessary; may be a little bit more lightweight compared to listFileVersions // if you don't have file versions public async listFileNames(bucketName: string): Promise { @@ -187,14 +252,16 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }), }; }) - .catch((e: Error) => { + .catch((e: BackblazeAxiosResponse) => { return { - error: e.message, + error: e.response.data.message, value: null, }; }); } + // public API + public async getFileAsReadable( bucketName: string, fileName: string, @@ -210,17 +277,20 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { error: data.error, value: null }; } const { value: file } = data; - const d = await this.storage.downloadFileById({ - fileId: file.id, - responseType: "stream", - axios: { - headers: { - "Content-Type": file.contentType, - Range: `bytes=${options.start}-${options.end || ""}`, + return this.storage + .downloadFileById({ + fileId: file.id, + responseType: "stream", + axios: { + headers: { + "Content-Type": file.contentType, + Range: `bytes=${options.start}-${options.end || ""}`, + }, }, - }, - }); - return d.data; + }) + .then((r: BackblazeAxiosResponse) => { + return { error: null, value: r.response.data }; + }); } public async getFileAsURL(bucketName: string, fileName: string): Promise { @@ -229,8 +299,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { error, value: null }; } - // return Promise.resolve({ value: "ok", error: null }); - return { value: "ok", error: null }; + const url = `${this.storage.downloadUrl}/file/${bucketName}/${fileName}`; + return { value: url, error: null }; } public async removeFile(bucketName: string, fileName: string): Promise { @@ -245,6 +315,18 @@ export class AdapterBackblazeB2 extends AbstractAdapter { } const { value: files } = data; + // return this.storage + // .deleteFileVersion({ + // fileId: "adadadad", + // fileName: "adasdadad", + // }) + + // .then(() => { + // return { error: null, value: "ok" }; + // }) + // .catch((r: BackblazeAxiosResponse) => { + // return { error: r.response.data.message, value: null }; + // }); return Promise.all( files .filter((f: FileB2) => f.name === fileName) @@ -258,59 +340,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { .then(() => { return { error: null, value: "ok" }; }) - .catch((e: Error) => { - return { error: e.message, value: null }; - }); - } - - // util members - - protected async store(params: FilePath): Promise; - protected async store(params: FileBuffer): Promise; - protected async store(params: FileStream): Promise; - protected async store(params: FilePath | FileBuffer | FileStream): Promise { - const { error } = await this.authorize(); - if (error !== null) { - return { error, value: null }; - } - - const { bucketName, targetPath } = params; - const data = await this.getBucket(bucketName); - if (data.error !== null) { - return { error: data.error, value: null }; - } - const { value: bucket } = data; - - let { options } = params; - if (typeof options === "undefined") { - options = {}; - } - - let fileData: string | Buffer | Readable; - if (typeof (params as FilePath).origPath !== "undefined") { - fileData = (params as FilePath).origPath; - } else if (typeof (params as FileBuffer).buffer !== "undefined") { - fileData = (params as FileBuffer).buffer; - } else if (typeof (params as FileStream).stream !== "undefined") { - fileData = (params as FileStream).stream; - } - - return this.storage - .uploadAny({ - ...options, - bucketId: bucket.id, - fileName: targetPath, - data: fileData, - }) - .then((file: BackblazeB2File) => { - console.log(file); - return { - error: null, - value: `${this.storage.downloadUrl}/file/${bucketName}/${targetPath}`, - }; - }) - .catch((e: Error) => { - return { error: e.message, value: null }; + .catch((r: BackblazeAxiosResponse) => { + return { error: r.response.data.message, value: null }; }); } @@ -331,12 +362,14 @@ export class AdapterBackblazeB2 extends AbstractAdapter { bucketName: name, bucketType: "allPrivate", // should be a config option! }) - .then((what) => { - console.log(what); + .then((response: { data: { bucketType: string } }) => { + const { + data: { bucketType }, + } = response; return { error: null, value: "ok" }; }) - .catch((e: Error) => { - return { error: e.message, value: null }; + .catch((r: BackblazeAxiosResponse) => { + return { error: r.response.data.message, value: null }; }); } @@ -355,7 +388,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return Promise.all( files.map((file: FileB2) => this.storage.deleteFileVersion({ - fileId: file.id, + fileId: "file.id", fileName: file.name, }) ) @@ -364,8 +397,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { console.log(what); return { error: null, value: "ok" }; }) - .catch((e: Error) => { - return { error: e.message, value: null }; + .catch((r: BackblazeAxiosResponse) => { + return { error: r.response.data.message, value: null }; }); } @@ -385,8 +418,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { .then(() => { return { error: null, value: "ok" }; }) - .catch((e: Error) => { - return { error: e.message, value: null }; + .catch((r: BackblazeAxiosResponse) => { + return { error: r.response.data.message, value: null }; }); } @@ -396,18 +429,18 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { error, value: null }; } - return this.getBuckets() - .then(({ value: buckets }) => { - return { - error: null, - value: buckets.map((b) => { - return b.name; - }), - }; - }) - .catch((e: Error) => { - return { error: e.message, value: null }; - }); + const data = await this.getBuckets(); + if (data.error === null) { + const { value: buckets } = data; + return { + error: null, + value: buckets.map((b) => { + return b.name; + }), + }; + } else { + return { error: data.error, value: null }; + } } public async listFiles(bucketName: string, numFiles: number = 1000): Promise { @@ -416,19 +449,18 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { error, value: null }; } - return this.getFiles(bucketName) - .then(({ value: files }) => { - const f: Array<[string, number]> = files.map((f) => { + const data = await this.getFiles(bucketName); + if (data.error === null) { + const { value: files } = data; + return { + error: null, + value: files.map((f) => { return [f.name, f.contentLength]; - }); - return { - error: null, - value: f, - }; - }) - .catch((e: Error) => { - return { error: e.message, value: null }; - }); + }), + }; + } else { + return { error: data.error, value: null }; + } } public async sizeOf(bucketName: string, fileName: string): Promise { @@ -437,13 +469,13 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { error, value: null }; } - return this.getFile(bucketName, fileName) - .then(({ value: file }) => { - return { error: null, value: file.contentLength }; - }) - .catch((e: Error) => { - return { error: e.message, value: null }; - }); + const data = await this.getFile(bucketName, fileName); + if (data.error === null) { + const { value: file } = data; + return { error: null, value: file.contentLength }; + } else { + return { error: data.error, value: null }; + } } async bucketExists(bucketName: string): Promise { @@ -452,22 +484,20 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { error, value: null }; } - return this.getBucket(bucketName) - .then(() => { - return { error: null, value: true }; - }) - .catch(() => { - return { error: null, value: false }; - }); + const data = await this.getBucket(bucketName); + if (data.error === null) { + return { error: null, value: true }; + } else { + return { error: data.error, value: null }; + } } async fileExists(bucketName: string, fileName: string): Promise { - return this.sizeOf(bucketName, fileName) - .then(() => { - return { error: null, value: true }; - }) - .catch(() => { - return { error: null, value: false }; - }); + const { error, value } = await this.sizeOf(bucketName, fileName); + if (error === null) { + return { error: null, value: true }; + } else { + return { error: null, value: false }; + } } } diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index 14c72ba..deee0b9 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -18,18 +18,15 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { constructor(config: string | ConfigGoogleCloud) { super(); - this.configuration = this.parseConfig(config); - if ( - typeof this.configuration.bucketName !== "undefined" && - this.configuration.bucketName !== "" - ) { - const msg = this.validateName(this.configuration.bucketName); + this.conf = this.parseConfig(config); + if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { + const msg = this.validateName(this.conf.bucketName); if (msg !== null) { throw new Error(msg); } - this.bucketName = this.configuration.bucketName; + this.bucketName = this.conf.bucketName; } - this.storage = new GoogleCloudStorage(this.configuration as ConfigGoogleCloud); + this.storage = new GoogleCloudStorage(this.conf as ConfigGoogleCloud); } /** diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 8a726b5..7a025c7 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -12,14 +12,14 @@ export class AdapterLocal extends AbstractAdapter { constructor(config: ConfigLocal) { super(); - this.configuration = this.parseConfig(config); + this.conf = this.parseConfig(config); // console.log(config); // console.log(this.config); - const mode = (this.configuration as ConfigLocal).mode; + const mode = (this.conf as ConfigLocal).mode; if (typeof mode === "undefined") { - (this.configuration as ConfigLocal).mode = 0o777; + (this.conf as ConfigLocal).mode = 0o777; } - const directory = (this.configuration as ConfigLocal).directory; + const directory = (this.conf as ConfigLocal).directory; } private parseConfig(config: string | ConfigLocal): ConfigLocal { @@ -86,11 +86,8 @@ export class AdapterLocal extends AbstractAdapter { return Promise.resolve(true); } - if ( - typeof this.configuration.bucketName !== "undefined" && - this.configuration.bucketName !== "" - ) { - const { error } = await this.validateName(this.configuration.bucketName); + if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { + const { error } = await this.validateName(this.conf.bucketName); if (error !== null) { Promise.resolve({ error, value: null }); return; diff --git a/src/types.ts b/src/types.ts index 46ca0a2..528e387 100644 --- a/src/types.ts +++ b/src/types.ts @@ -33,6 +33,12 @@ export interface IStorage { */ getConfiguration(): AdapterConfig; + /** + * Same as `getConfiguration` but implemented as getter + * @returns adapter configuration as object + */ + config: AdapterConfig; + /** * Returns an object that contains both the options passed with the configuration and the * default options of the storage type if not overruled by the options you passed in. @@ -40,9 +46,8 @@ export interface IStorage { // getOptions(): JSON; /** - * @param name name of the bucket to create, returns true once the bucket has been created but - * also when the bucket already exists. Note that you have to use `selectBucket` to start using - * the newly created bucket. + * @param name name of the bucket to create, returns "ok" once the bucket has been created but + * also when the bucket already exists. * @param options: additional options for creating a bucket such as access rights * @returns string or error */ @@ -210,6 +215,19 @@ export type AdapterConfig = | ConfigBackblazeB2 | ConfigTemplate; +export type BackblazeAxiosResponse = { + response: { + data: { + code: string; + message: string; + status: number; + allowed?: { + capabilities: Array; + }; + }; + }; +}; + export type BackblazeB2Bucket = { accountId: "string"; bucketId: "string"; @@ -254,11 +272,23 @@ export enum S3Compatible { Backblaze, } -export type ResultObject = { +export type ParseUrlResult = { error: string | null; - value: string | null; + value: { + type: string; + part1: string; + part2: string; + part3: string; + bucketName: string; + queryString: { [key: string]: string }; + }; }; +export interface ResultObject { + error: string | null; + value: string | null; +} + export type ResultObjectNumber = { error: string | null; value: number | null; diff --git a/src/util.ts b/src/util.ts index 852d34a..ad18832 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,4 +1,6 @@ import { BucketLocationConstraint } from "@aws-sdk/client-s3"; +import { ParseUrlResult } from "./types"; +import { StorageType } from "@tweedegolf/storage-abstraction"; /** * @param: url @@ -27,20 +29,14 @@ export const parseQuerystring = (url: string): { [id: string]: string } => { * Parses a url string into fragments and parses the query string into a * key-value object. */ -export const parseUrl = ( - url: string -): { - type: string; - part1: string; - part2: string; - part3: string; - bucketName: string; - queryString: { [key: string]: string }; -} => { - if (url === "" || typeof url === "undefined") { - throw new Error("please provide a configuration url"); +export const parseUrl = (url: string): ParseUrlResult => { + if (url.indexOf("://") === -1) { + return { value: null, error: "Please provide a valid configuration url" }; } const type = url.substring(0, url.indexOf("://")); + if (Object.values(StorageType).includes(type as StorageType) === false) { + return { value: null, error: `"${type}" is not a valid storage type` }; + } let config = url.substring(url.indexOf("://") + 3); const at = config.indexOf("@"); const questionMark = config.indexOf("?"); @@ -81,7 +77,7 @@ export const parseUrl = ( } // console.log(type, part1, part2, region, bucketName, queryString); - return { type, part1, part2, part3, bucketName, queryString }; + return { error: null, value: { type, part1, part2, part3, bucketName, queryString } }; }; /** diff --git a/tests/test-config-b2.jasmine.ts b/tests/test-config-b2.jasmine.ts index bfd509d..7fe09d8 100644 --- a/tests/test-config-b2.jasmine.ts +++ b/tests/test-config-b2.jasmine.ts @@ -1,47 +1,47 @@ import "jasmine"; -import { Storage } from "../src/Storage"; import { ConfigBackblazeB2, StorageType } from "../src/types"; +import { AdapterBackblazeB2 } from "../src/AdapterBackblazeB2"; describe(`testing backblaze b2 urls`, () => { it("[0]", () => { - const storage = new Storage("b2://application-key-id:application-key/can/contain/slashes"); - expect(storage.getType()).toBe(StorageType.B2); - expect(storage.getSelectedBucket()).toBe(""); - expect((storage.getConfiguration() as ConfigBackblazeB2).applicationKeyId).toBe( - "application-key-id" + const storage = new AdapterBackblazeB2( + "b2://application-key-id:application-key/can/contain/slashes" ); - expect((storage.getConfiguration() as ConfigBackblazeB2).applicationKey).toBe( + expect(storage.getType()).toBe(StorageType.B2); + expect(storage.config.bucketName).toBe(""); + expect((storage.config as ConfigBackblazeB2).applicationKeyId).toBe("application-key-id"); + expect((storage.config as ConfigBackblazeB2).applicationKey).toBe( "application-key/can/contain/slashes" ); }); it("[1]", () => { - const storage = new Storage( + const storage = new AdapterBackblazeB2( "b2://application-key-id:application-key/can/contain/slashes@the-buck" ); expect(storage.getType()).toBe(StorageType.B2); - expect(storage.getSelectedBucket()).toBe("the-buck"); + expect(storage.config.bucketName).toBe("the-buck"); }); it("[2a] object", () => { - const storage = new Storage({ + const storage = new AdapterBackblazeB2({ type: StorageType.B2, applicationKeyId: "keyId", applicationKey: "key", }); expect(storage.getType()).toBe(StorageType.B2); - expect(storage.getSelectedBucket()).toBe(""); + expect(storage.config.bucketName).toBe(undefined); }); it("[2b] object", () => { - const storage = new Storage({ + const storage = new AdapterBackblazeB2({ type: StorageType.B2, applicationKeyId: "keyId", applicationKey: "key", bucketName: "bucket", }); expect(storage.getType()).toBe(StorageType.B2); - expect(storage.getSelectedBucket()).toBe("bucket"); + expect(storage.config.bucketName).toBe("bucket"); expect((storage.getConfiguration() as ConfigBackblazeB2).applicationKeyId).toBe("keyId"); expect((storage.getConfiguration() as ConfigBackblazeB2).applicationKey).toBe("key"); }); diff --git a/tests/test.jasmine.ts b/tests/test.jasmine.ts index 2066cd3..b1ad815 100644 --- a/tests/test.jasmine.ts +++ b/tests/test.jasmine.ts @@ -94,17 +94,6 @@ const newBucketName3 = `bucket-${uniquid()}-${new Date().getTime()}`; // console.log("newBucketName:", newBucketName, "\n"); let storage: Storage; -const test = async () => { - try { - storage = new Storage(config); - // console.log(storage); - await storage.init(); - } catch (e) { - console.error(`\x1b[31m${e.message}`); - process.exit(0); - } -}; -// test(); const waitABit = async (millis = 100): Promise => new Promise((resolve) => { @@ -187,25 +176,6 @@ describe(`[testing ${type} storage]`, async () => { // } // }); - it("init", async () => { - try { - storage = new Storage(config); - await storage.init(); - } catch (e) { - console.error(e); - return; - } - }); - - it("test", async () => { - try { - await storage.test(); - } catch (e) { - console.error(e); - return; - } - }); - it("create bucket", async () => { const bucketName = storage.getSelectedBucket(); if (bucketName === "") { diff --git a/tests/testB2.ts b/tests/testB2.ts index 6238402..0d05f36 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -5,49 +5,84 @@ import { StorageType } from "@tweedegolf/storage-abstraction"; dotenv.config(); +const applicationKeyId = process.env.B2_APPLICATION_KEY_ID; +const applicationKey = process.env.B2_APPLICATION_KEY; const configBackblaze = { - type: StorageType.B2, - applicationKeyId: process.env.B2_APPLICATION_KEY_ID, - applicationKey: process.env.B2_APPLICATION_KEY, + type: "null", //StorageType.B2, + applicationKeyId, + applicationKey, bucketName: process.env.BUCKET_NAME, }; async function testB2() { const storage = new AdapterBackblazeB2(configBackblaze); - // console.log(storage.config); + // const storage = new AdapterBackblazeB2("opt://configBackblaze"); + // const storage = new AdapterBackblazeB2(`b2://${applicationKeyId}:${applicationKey}`); + console.log(storage.config); // console.log(storage.getConfiguration()); const type = storage.getType(); - let s: number; - s = new Date().getTime(); - const data = await storage.listBuckets(); - console.log(1, new Date().getTime() - s, data); + // console.time("createBucket"); + // const response = await storage.createBucket("the-buck-2023/:{{{"); + // console.log(response); + // console.timeEnd("createBucket"); - s = new Date().getTime(); - const data2 = await storage.listFiles("the-buck"); - console.log(1, new Date().getTime() - s, data2); + // console.time("removeFile"); + // const response = await storage.removeFile("the-buck", "-2023/:{{{"); + // console.log(response); + // console.timeEnd("removeFile"); - s = new Date().getTime(); - const data3 = await storage.listFileNames("the-buck"); - console.log(3, new Date().getTime() - s, data3); + console.time("clearBucket"); + const response = await storage.clearBucket("the-buck"); + console.log(response); + console.timeEnd("clearBucket"); + + // console.time("fileExists"); + // response = await storage.fileExists("the-buck", "input.txt"); + // console.timeEnd("fileExists"); + // console.log(response); + + // console.time("sizeOf"); + // response = await storage.sizeOf("the-buck", "inputsss.txt"); + // console.timeEnd("sizeOf"); + // console.log(response); + + // console.time("listBuckets"); + // const data = await storage.listBuckets(); + // console.timeEnd("listBuckets"); + + // console.time("listFiles"); + // const data2 = await storage.listFiles("the-buck"); + // console.timeEnd("listFiles"); + + // console.time("listFileNames"); + // const data3 = await storage.listFileNames("the-buck"); + // console.timeEnd("listFileNames"); + + // const url = await storage.getFileAsURL("the-buck", "input.txt"); + // console.log(url); } async function testB2_2() { const storage = new B2(configBackblaze); - let s; - s = new Date().getTime(); + + console.time("authorize"); await storage.authorize(); - console.log(1, new Date().getTime() - s); + console.timeEnd("authorize"); - s = new Date().getTime(); + const bucketName = configBackblaze.bucketName; + const targetPath = "input.txt"; + const s = `${storage.downloadUrl}/file/${bucketName}/${targetPath}`; + console.log(s); + + console.time("buckets"); const { data: { buckets }, } = await storage.listBuckets(); - console.log(2, new Date().getTime() - s); - // console.log(buckets); + console.timeEnd("buckets"); - s = new Date().getTime(); + console.time("listFileVersions"); const n = "the-buck"; let id = null; for (let i = 0; i < buckets.length; i++) { @@ -57,18 +92,18 @@ async function testB2_2() { break; } } - // console.log(2, new Date().getTime() - s); - // console.log("B2", id); const r = await storage.listFileVersions({ bucketId: id }); - console.log(3, new Date().getTime() - s); - // console.log("listFileVersions", r.data.files); + console.timeEnd("listFileVersions"); - s = new Date().getTime(); + console.time("listFileNames"); const r2 = await storage.listFileNames({ bucketId: id }); - console.log(4, new Date().getTime() - s); + console.timeEnd("listFileNames"); // console.log("listFileNames", r2.data.files); } -testB2(); -// testB2_2(); +(async function run() { + await testB2(); + + // testB2_2(); +})(); From 67c347df082a589777bf2033de59f5755b48124d Mon Sep 17 00:00:00 2001 From: abudaan Date: Tue, 21 Nov 2023 21:46:50 +0100 Subject: [PATCH 13/26] wip addFile abstraction --- README.md | 111 ++++++++++++++++++-------------------- changelog.md | 2 + src/AbstractAdapter.ts | 37 ++++++++----- src/AdapterBackblazeB2.ts | 81 ++++++++++++++++------------ src/Storage.ts | 95 +++++++++++++++++--------------- src/types.ts | 41 +++++++++----- src/util.ts | 13 +++-- tests/testB2.ts | 37 ++++++++----- 8 files changed, 236 insertions(+), 181 deletions(-) diff --git a/README.md b/README.md index 7aa2992..e5c01fd 100644 --- a/README.md +++ b/README.md @@ -465,24 +465,6 @@ const s = new Storage("azure://storage1:accessKey1@container1"); ## API methods -### init - -```typescript -init():Promise; -``` - -Some cloud storage services need some initial setup that can't be handled in the constructor before they can be used, for instance an async authorization. Also if your storage is set to use a previously non-existing bucket, this bucket will be created in this method. - -If initial setup is required it is handled in this method, if no setup is required this method simply returns true. Note that you need to call this method even it the storage type doesn't need any setup; this is done to abstract away the differences between all types of storage. - -### test - -```typescript -test():Promise; -``` - -Runs a simple test to test the storage configuration. The test is a call to `listFiles` and if it fails it throws an error. - ### createBucket ```typescript @@ -496,57 +478,45 @@ Creates a new bucket. If the bucket was created successfully it resolves to "ok" ### clearBucket ```typescript -clearBucket(name?: string): Promise; +clearBucket(name: string): Promise; ``` -Removes all files in the bucket. If you omit the `name` parameter all files in the currently selected bucket will be removed. If no bucket is selected an error will be thrown. - -Returns "bucket cleared". +Removes all files in the bucket. > Note: dependent on the type of storage and the credentials used, you may need extra access rights for this action. ### deleteBucket ```typescript -deleteBucket(name?: string): Promise; +deleteBucket(name: string): Promise; ``` -Deletes the bucket and all files in it. If you omit the `name` parameter the currently selected bucket will be deleted. If no bucket is selected an error will be thrown. - -Returns "bucket deleted" +Deletes the bucket and all files in it. > Note: dependent on the type of storage and the credentials used, you may need extra access rights for this action. ### listBuckets ```typescript -listBuckets(): Promise +listBuckets(): Promise ``` -Returns a list with the names of all buckets in the storage. +Returns an array with the names of all buckets in the storage. > Note: dependent on the type of storage and the credentials used, you may need extra access rights for this action. E.g.: sometimes a user may only access the contents of one single bucket. -### getSelectedBucket - -```typescript -getSelectedBucket(): string -``` - -Returns the name of the currently selected bucket or an empty string ("") if no bucket has been selected. - ### addFileFromPath ```typescript -addFileFromPath(filePath: string, targetPath: string, options?: object): Promise; +addFileFromPath({filePath: string, targetPath: string, options?: object}: FilePathParams): Promise; ``` -Copies a file from a local path to the provided path in the storage. The value for `targetPath` needs to include at least a file name. You can provide extra storage-specific settings such as access rights using the `options` object. Returns the public url to the file. +Copies a file from a local path to the provided path in the storage. The value for `targetPath` needs to include at least a file name. You can provide extra storage-specific settings such as access rights using the `options` object. Returns the public url to the file (if the bucket is publicly accessible). ### addFileFromBuffer ```typescript -addFileFromBuffer(buffer: Buffer, targetPath: string, options?: object): Promise; +addFileFromBuffer({buffer: Buffer, targetPath: string, options?: object}: FileBufferParams): Promise; ``` Copies a buffer to a file in the storage. The value for `targetPath` needs to include at least a file name. You can provide extra storage-specific settings such as access rights using the `options` object. This method is particularly handy when you want to move uploaded files to the storage, for instance when you use Express.Multer with [MemoryStorage](https://github.com/expressjs/multer#memorystorage). Returns the public url to the file. @@ -554,55 +524,71 @@ Copies a buffer to a file in the storage. The value for `targetPath` needs to in ### addFileFromReadable ```typescript -addFileFromReadable(stream: Readable, targetPath: string, options?: object): Promise; +addFileFromReadable({stream: Readable, targetPath: string, options?: object}: FileStreamParams): Promise; ``` Allows you to stream a file directly to the storage. The value for `targetPath` needs to include at least a file name. You can provide extra storage-specific settings such as access rights using the `options` object. This method is particularly handy when you want to store files while they are being processed; for instance if a user has uploaded a full-size image and you want to store resized versions of this image in the storage; you can pipe the output stream of the resizing process directly to the storage. Returns the public url to the file. +### addFile + +```typescript +addFile(params: FilePathParams | FileBufferParams | FileStreamParams): Promise; +``` + +Generic method for adding a file to the storage; this method is actually called if you use one of the three aforementioned methods. + ### getFileAsReadable ```typescript -getFileAsReadable(name: string, options?: {start?: number, end?: number}): Promise; +getFileAsReadable(bucketName: string, fileName: string, options?: {start?: number, end?: number}): Promise; ``` Returns a file in the storage as a readable stream. You can specify a byte range by using the extra range argument, see these examples: ```typescript -getFileAsReadable("image.png"); // → reads whole file +getFileAsReadable("bucket-name", "image.png"); // → reads whole file -getFileAsReadable("image.png", {}); // → reads whole file +getFileAsReadable("bucket-name", "image.png", {}); // → reads whole file -getFileAsReadable("image.png", { start: 0 }); // → reads whole file +getFileAsReadable("bucket-name", "image.png", { start: 0 }); // → reads whole file -getFileAsReadable("image.png", { start: 0, end: 1999 }); // → reads first 2000 bytes +getFileAsReadable("bucket-name", "image.png", { start: 0, end: 1999 }); // → reads first 2000 bytes -getFileAsReadable("image.png", { end: 1999 }); // → reads first 2000 bytes +getFileAsReadable("bucket-name", "image.png", { end: 1999 }); // → reads first 2000 bytes -getFileAsReadable("image.png", { start: 2000 }); // → reads file from byte 2000 +getFileAsReadable("bucket-name", "image.png", { start: 2000 }); // → reads file from byte 2000 ``` ### removeFile ```typescript -removeFile(name: string): Promise; +removeFile(bucketName: string, fileName: string, allVersions: boolean = false): Promise; ``` Removes a file from the bucket. Does not fail if the file doesn't exist. -Returns "file removed" or "file not found". +Returns "ok" or "file not found". ### sizeOf ```typescript -sizeOf(name: string): number; +sizeOf(bucketName: string, fileName: string): Promise; +``` + +Returns the size of a file. + +### bucketExists + +```typescript +bucketExists(name: string): Promise; ``` -Returns the size of a file in the currently selected bucket and throws an error if no bucket has been selected. +Returns whether a bucket exists or not. ### fileExists ```typescript -fileExists(name: string): Promise; +fileExists(bucketName: string, fileName: string): Promise; ``` Returns whether a file exists or not. @@ -610,10 +596,10 @@ Returns whether a file exists or not. ### listFiles ```typescript -listFiles(): Promise<[string, number][]>; +listFiles(bucketName: string): Promise; ``` -Returns a list of all files in the currently selected bucket; for each file a tuple is returned containing the path and the size of the file. If no bucket is selected an error will be thrown. +Returns a list of all files in the bucket; for each file a tuple is returned containing the path and the size of the file. ### getType @@ -627,12 +613,15 @@ Returns the type of storage, value is one of the enum `StorageType`. ```typescript getConfiguration(): AdapterConfig + +// also implemented as getter: + +const storage = new Storage(config); +console.log(storage.conf) ``` Retrieves the configuration as provided during instantiation. If you have provided the configuration in url form, the function will return it as an configuration object. -Note that in this configuration object the value of the key `bucketName` will not change if you have selected a different bucket after initialization, the key `bucketName` will still hold the value of the initially set bucket. Use `getSelectedBucket()` to retrieve the actual value of `bucketName`. - ### switchAdapter ```typescript @@ -647,7 +636,7 @@ A `Storage` instance is actually a thin wrapper around one of the available adap ```typescript // member function of class Storage -async createBucket(name?: string): Promise { +async createBucket(name: string): Promise { return this.adapter.createBucket(name); }; ``` @@ -656,7 +645,9 @@ The class `Storage` implements the interface `IStorage` and this interface decla The adapter subsequently takes care of translating the generic API to storage specific functions. Therefor, dependent on what definitions you use, this library could be seen as a wrapper or a shim. -The method `switchAdapter` is not declared in `IStorage` but in the `Storage` class itself; this method parses the configuration and creates the appropriate adapter instance. This is done by a lookup table that maps a storage type to a path to an adapter module; the module will be loaded in runtime using `require()`. +The method `switchAdapter` is not declared in `IStorage` but in the `Storage` class itself; this is because the adapter have to implement `IStorage` and an adapter cannot (and should not) switch itself into another adapter + +`switchAdapter` parses the configuration and creates the appropriate adapter instance. This is done by a lookup table that maps a storage type to a path to an adapter module; the module will be loaded in runtime using `require()`. More adapter classes can be added for different storage types, note however that there are many cloud storage providers that keep their API compliant with Amazon S3, for instance [Wasabi](https://wasabi.com/). @@ -691,6 +682,8 @@ enum StorageType { GCS = "gcs", // Google Cloud Storage S3 = "s3", // Amazon S3 B2 = "b2", // BackBlaze B2 + AZURE = "azure", // Microsoft Azure Blob + MINIO = 'minio", YOUR_TYPE = "yourtype", } // your configuration URL @@ -707,7 +700,7 @@ You can format the configuration URL completely as you like as long as your adap ### Adapter class -You could choose to let your adapter class extend the class `AbstractStorage`. If you look at the [code](https://github.com/tweedegolf/storage-abstraction/blob/master/src/AbstractAdapter.ts) you can see that it only implements small parts of the API such as the `test` method. Also it performs some sanity checking of parameters of a few API functions; this way you don't have to implement these checks in all derived classes. +You could choose to let your adapter class extend the class `AbstractStorage`. If you look at the [code](https://github.com/tweedegolf/storage-abstraction/blob/master/src/AbstractAdapter.ts) you can see that it only implements small parts of the API such as the `getType` method. Also it performs some sanity checking of parameters of a few API functions; this way you don't have to implement these checks in all derived classes. One thing to note is the way `addFileFromPath`, `addFileFromBuffer` and `addFileFromReadable` are implemented; these are all forwarded to the non-API function `store`. This function stores files in the storage using 3 different types of origin; a path, a buffer and a stream. Because these ways of storing have a lot in common they are grouped together in a single overloaded method. diff --git a/changelog.md b/changelog.md index 7d9edb0..8d13e4a 100644 --- a/changelog.md +++ b/changelog.md @@ -44,6 +44,8 @@ - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created a new bucket. - `validateName` will not only perform a local check, it will also check if the name is valid and/or not taken at the cloud storage service. - `createBucket` resolves with an error when that bucket already exists +- `removeFile` has an additional optional boolean argument `allVersions`; if set to true all version of the specified file will be removed. Default: false +- `addFile` is added; you can use this method whenever you use `addFileFromPath`, `addFileFromBuffer` or `addFileFromReadable` ### Old API (1.5.x) compared to new API (2.x) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index d4969e3..163b26c 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -1,8 +1,8 @@ import { AdapterConfig, - FileBuffer, - FilePath, - FileStream, + FileBufferParams, + FilePathParams, + FileStreamParams, IStorage, ResultObject, ResultObjectBoolean, @@ -29,23 +29,30 @@ export abstract class AbstractAdapter implements IStorage { return this.conf; } - async addFileFromPath(params: FilePath): Promise { - return await this.store(params); + /** + * @param FilePath + * @param {string} FilePath.bucketName + * @param {string} FilePath.origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + */ + async addFileFromPath(params: FilePathParams): Promise { + return await this.addFile(params); } - async addFileFromBuffer(params: FileBuffer): Promise { - return await this.store(params); + async addFileFromBuffer(params: FileBufferParams): Promise { + return await this.addFile(params); } - async addFileFromReadable(params: FileStream): Promise { - return await this.store(params); + async addFileFromReadable(params: FileStreamParams): Promise { + return await this.addFile(params); } // stubs - protected abstract store(param: FilePath): Promise; - protected abstract store(param: FileBuffer): Promise; - protected abstract store(param: FileStream): Promise; + abstract addFile(param: FilePathParams): Promise; + abstract addFile(param: FileBufferParams): Promise; + abstract addFile(param: FileStreamParams): Promise; abstract createBucket(name: string, options?: object): Promise; @@ -63,7 +70,11 @@ export abstract class AbstractAdapter implements IStorage { abstract getFileAsURL(bucketName: string, fileName: string): Promise; - abstract removeFile(bucketName: string, fileName: string): Promise; + abstract removeFile( + bucketName: string, + fileName: string, + allVersions?: boolean + ): Promise; abstract listFiles(bucketName: string): Promise; diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 8dbdbab..01abab7 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -13,13 +13,14 @@ import { ResultObjectBucketB2, ResultObjectFileB2, FileB2, - FileBuffer, - FileStream, - FilePath, + FileBufferParams, + FileStreamParams, + FilePathParams, ResultObjectBuckets, ResultObjectFiles, ResultObjectNumber, BackblazeAxiosResponse, + BackblazeBucketOptions, } from "./types"; import { parseUrl, validateName } from "./util"; @@ -132,7 +133,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { value: bucket, error: null }; } } - return { value: null, error: `could not find bucket ${name}` }; + return { value: null, error: `Could not find bucket "${name}"` }; } private async getFiles(bucketName: string): Promise { @@ -175,13 +176,18 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { value: file, error: null }; } } - return { value: null, error: `could not find file ${name}` }; + return { value: null, error: `Could not find file "${name}" in bucket "${bucketName}".` }; } - protected async store(params: FilePath): Promise; - protected async store(params: FileBuffer): Promise; - protected async store(params: FileStream): Promise; - protected async store(params: FilePath | FileBuffer | FileStream): Promise { + /** + * Called by addFileFromPath, addFileFromBuffer and addFileFromReadable + */ + // public async addFile(param: FilePathParams): Promise; + // public async addFile(param: FileBufferParams): Promise; + // public async addFile(param: FileStreamParams): Promise; + public async addFile( + params: FilePathParams | FileBufferParams | FileStreamParams + ): Promise { const { error } = await this.authorize(); if (error !== null) { return { error, value: null }; @@ -200,12 +206,12 @@ export class AdapterBackblazeB2 extends AbstractAdapter { } let fileData: string | Buffer | Readable; - if (typeof (params as FilePath).origPath !== "undefined") { - fileData = (params as FilePath).origPath; - } else if (typeof (params as FileBuffer).buffer !== "undefined") { - fileData = (params as FileBuffer).buffer; - } else if (typeof (params as FileStream).stream !== "undefined") { - fileData = (params as FileStream).stream; + if (typeof (params as FilePathParams).origPath !== "undefined") { + fileData = (params as FilePathParams).origPath; + } else if (typeof (params as FileBufferParams).buffer !== "undefined") { + fileData = (params as FileBufferParams).buffer; + } else if (typeof (params as FileStreamParams).stream !== "undefined") { + fileData = (params as FileStreamParams).stream; } return this.storage @@ -216,7 +222,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { data: fileData, }) .then((file: BackblazeB2File) => { - console.log(file); + // console.log(file); return { error: null, value: `${this.storage.downloadUrl}/file/${bucketName}/${targetPath}`, @@ -252,9 +258,9 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }), }; }) - .catch((e: BackblazeAxiosResponse) => { + .catch((r: BackblazeAxiosResponse) => { return { - error: e.response.data.message, + error: r.response.data.message, value: null, }; }); @@ -313,20 +319,14 @@ export class AdapterBackblazeB2 extends AbstractAdapter { if (error !== null) { return { error, value: null }; } + const { value: files } = data; + const index = files.findIndex(({ name }) => name === fileName); + if (index === -1) { + return { error: `Could not find file "${fileName}"`, value: null }; + } + const file = files[index]; - // return this.storage - // .deleteFileVersion({ - // fileId: "adadadad", - // fileName: "adasdadad", - // }) - - // .then(() => { - // return { error: null, value: "ok" }; - // }) - // .catch((r: BackblazeAxiosResponse) => { - // return { error: r.response.data.message, value: null }; - // }); return Promise.all( files .filter((f: FileB2) => f.name === fileName) @@ -343,9 +343,24 @@ export class AdapterBackblazeB2 extends AbstractAdapter { .catch((r: BackblazeAxiosResponse) => { return { error: r.response.data.message, value: null }; }); + + return this.storage + .deleteFileVersion({ + fileId: file.id, + fileName: file.name, + }) + .then(() => { + return { error: null, value: "ok" }; + }) + .catch((r: BackblazeAxiosResponse) => { + return { error: r.response.data.message, value: null }; + }); } - public async createBucket(name: string, options: object = {}): Promise { + public async createBucket( + name: string, + options: BackblazeBucketOptions = { bucketType: "allPrivate" } + ): Promise { const { error } = await this.authorize(); if (error !== null) { return { error, value: null }; @@ -360,7 +375,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { .createBucket({ ...options, bucketName: name, - bucketType: "allPrivate", // should be a config option! + bucketType: options.bucketType, }) .then((response: { data: { bucketType: string } }) => { const { @@ -388,7 +403,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return Promise.all( files.map((file: FileB2) => this.storage.deleteFileVersion({ - fileId: "file.id", + fileId: file.id, fileName: file.name, }) ) diff --git a/src/Storage.ts b/src/Storage.ts index 5e03984..9cc8d41 100644 --- a/src/Storage.ts +++ b/src/Storage.ts @@ -1,6 +1,17 @@ import path from "path"; -import { Readable } from "stream"; -import { IStorage, AdapterConfig } from "./types"; +import { + IStorage, + AdapterConfig, + FileBufferParams, + ResultObject, + FilePathParams, + FileStreamParams, + ResultObjectBuckets, + ResultObjectReadable, + ResultObjectFiles, + ResultObjectNumber, + ResultObjectBoolean, +} from "./types"; // add new storage adapters here const adapterClasses = { @@ -9,6 +20,7 @@ const adapterClasses = { gcs: "AdapterGoogleCloudStorage", local: "AdapterLocal", azure: "AdapterAzureStorageBlob", + minio: "AdapterMinIO", }; // or here for functional adapters @@ -38,9 +50,9 @@ export class Storage implements IStorage { return this.adapter.getType(); } - // public getOptions(): TypeJSON { - // return this.adapter.getOptions(); - // } + get config(): AdapterConfig { + return this.adapter.getConfiguration(); + } public getConfiguration(): AdapterConfig { return this.adapter.getConfiguration(); @@ -72,80 +84,75 @@ export class Storage implements IStorage { // all methods below are implementing IStorage - async init(): Promise { - return this.adapter.init(); - } - - async test(): Promise { - return this.adapter.test(); + public async addFile( + paramObject: FilePathParams | FileBufferParams | FileStreamParams + ): Promise { + return this.adapter.addFile(paramObject); } - async addFileFromBuffer(buffer: Buffer, targetPath: string, options?: object): Promise { - return this.adapter.addFileFromBuffer(buffer, targetPath, options); + async addFileFromPath(params: FilePathParams): Promise { + return this.adapter.addFileFromPath(params); } - async addFileFromPath(origPath: string, targetPath: string, options?: object): Promise { - return this.adapter.addFileFromPath(origPath, targetPath, options); + async addFileFromBuffer(params: FileBufferParams): Promise { + return this.adapter.addFileFromBuffer(params); } - async addFileFromReadable( - stream: Readable, - targetPath: string, - options?: object - ): Promise { - return this.adapter.addFileFromReadable(stream, targetPath, options); + async addFileFromReadable(params: FileStreamParams): Promise { + return this.adapter.addFileFromReadable(params); } - async createBucket(name?: string, options?: object): Promise { + async createBucket(name: string, options?: object): Promise { return this.adapter.createBucket(name, options); } - async clearBucket(name?: string): Promise { + async clearBucket(name: string): Promise { return this.adapter.clearBucket(name); } - async deleteBucket(name?: string): Promise { + async deleteBucket(name: string): Promise { return this.adapter.deleteBucket(name); } - async listBuckets(): Promise { + async listBuckets(): Promise { return this.adapter.listBuckets(); } - public getSelectedBucket(): string { - return this.adapter.getSelectedBucket(); - } - async getFileAsReadable( - name: string, + bucketName: string, + fileName: string, options: { start?: number; end?: number } = {} - ): Promise { + ): Promise { const { start = 0, end } = options; // console.log(start, end, options); - return this.adapter.getFileAsReadable(name, { start, end }); + return this.adapter.getFileAsReadable(bucketName, fileName, { start, end }); } - async getFileAsURL(name: string): Promise { - return this.adapter.getFileAsURL(name); + async getFileAsURL(bucketName: string, fileName: string): Promise { + return this.adapter.getFileAsURL(bucketName, fileName); } - async removeFile(fileName: string): Promise { - return this.adapter.removeFile(fileName); + async removeFile( + bucketName: string, + fileName: string, + allVersions = false + ): Promise { + return this.adapter.removeFile(bucketName, fileName, allVersions); } - async listFiles(numFiles?: number): Promise<[string, number][]> { - return this.adapter.listFiles(numFiles); + async listFiles(bucketName: string, numFiles?: number): Promise { + return this.adapter.listFiles(bucketName, numFiles); } - async selectBucket(name?: string): Promise { - return this.adapter.selectBucket(name); + async sizeOf(bucketName: string, fileName: string): Promise { + return this.adapter.sizeOf(bucketName, fileName); } - async sizeOf(name: string): Promise { - return this.adapter.sizeOf(name); + async bucketExists(bucketName: string): Promise { + return this.adapter.bucketExists(bucketName); } - async fileExists(name: string): Promise { - return this.adapter.fileExists(name); + async fileExists(bucketName: string, fileName: string): Promise { + return this.adapter.fileExists(bucketName, fileName); } } diff --git a/src/types.ts b/src/types.ts index 528e387..4196b85 100644 --- a/src/types.ts +++ b/src/types.ts @@ -69,22 +69,31 @@ export interface IStorage { listBuckets(): Promise; /** - * @paramObject data about the file to be added + * @paramObject {filePathParams | FileBufferParams | FileStreamParams} - params related to the file to be added * @returns the public url to the file */ - addFileFromPath(paramObject: FilePath): Promise; + addFile(paramObject: FilePathParams): Promise; + addFile(paramObject: FileBufferParams): Promise; + addFile(paramObject: FileStreamParams): Promise; + addFile(paramObject: FilePathParams | FileBufferParams | FileStreamParams): Promise; /** - * @paramObject data about the file to be added + * @paramObject params related to the file to be added * @returns the public url to the file */ - addFileFromBuffer(paramObject: FileBuffer): Promise; + addFileFromPath(paramObject: FilePathParams): Promise; /** - * @paramObject data about the file to be added + * @paramObject params related to the file to be added * @returns the public url to the file */ - addFileFromReadable(paramObject: FileStream): Promise; + addFileFromBuffer(paramObject: FileBufferParams): Promise; + + /** + * @paramObject params related to the file to be added + * @returns the public url to the file + */ + addFileFromReadable(paramObject: FileStreamParams): Promise; /** * @param bucketName name of the bucket where the file is stored @@ -108,10 +117,13 @@ export interface IStorage { getFileAsURL(bucketName: string, fileName: string): Promise; /** - * @param bucketName name of the bucket where the file is stored - * @param fileName name of the file to be removed + * @param {string} bucketName name of the bucket where the file is stored + * @param {string} fileName name of the file to be removed + * @param {boolean} [allVersions = true] in case there are more versions of this file you can choose to remove + * all of them in one go or delete only the latest version (only if applicable such as with Backblaze B2 and S3 + * when you've enabled versioning) */ - removeFile(bucketName: string, fileName: string): Promise; + removeFile(bucketName: string, fileName: string, allVersions?: boolean): Promise; /** * @param bucketName name of the bucket @@ -146,6 +158,7 @@ export enum StorageType { S3 = "s3", // Amazon S3 B2 = "b2", // BackBlaze B2 AZURE = "azure", // Azure Storage Blob + MINIO = "minio", } export type JSON = { @@ -266,6 +279,10 @@ export type FileB2 = { contentLength: number; }; +export type BackblazeBucketOptions = { + bucketType: string; +}; + export enum S3Compatible { Amazon, R2, @@ -340,7 +357,7 @@ export type ResultObjectReadable = { * @param targetPath path to copy the file to, folders will be created automatically * @param options additional option such as access rights **/ -export type FilePath = { +export type FilePathParams = { bucketName: string; origPath: string; targetPath: string; @@ -353,7 +370,7 @@ export type FilePath = { * @param targetPath path to the file to save the buffer to, folders will be created automatically * @param options additional option such as access rights **/ -export type FileBuffer = { +export type FileBufferParams = { bucketName: string; buffer: Buffer; targetPath: string; @@ -366,7 +383,7 @@ export type FileBuffer = { * @param targetPath path to the file to save the stream to, folders will be created automatically * @param options additional option such as access rights **/ -export type FileStream = { +export type FileStreamParams = { bucketName: string; stream: Readable; targetPath: string; diff --git a/src/util.ts b/src/util.ts index ad18832..37f96db 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,6 +1,5 @@ import { BucketLocationConstraint } from "@aws-sdk/client-s3"; import { ParseUrlResult } from "./types"; -import { StorageType } from "@tweedegolf/storage-abstraction"; /** * @param: url @@ -34,9 +33,9 @@ export const parseUrl = (url: string): ParseUrlResult => { return { value: null, error: "Please provide a valid configuration url" }; } const type = url.substring(0, url.indexOf("://")); - if (Object.values(StorageType).includes(type as StorageType) === false) { - return { value: null, error: `"${type}" is not a valid storage type` }; - } + // if (Object.values(StorageType).includes(type as StorageType) === false) { + // return { value: null, error: `"${type}" is not a valid storage type` }; + // } let config = url.substring(url.indexOf("://") + 3); const at = config.indexOf("@"); const questionMark = config.indexOf("?"); @@ -81,7 +80,7 @@ export const parseUrl = (url: string): ParseUrlResult => { }; /** - * @param s + * @param {string} s * * Parses a string that contains a radix prefix to a number * @@ -123,7 +122,7 @@ export const parseMode = (s: number | string): string | number => { }; /** - * @param: url + * @param {string} url * * strips off the protocol of an url and returns it */ @@ -132,7 +131,7 @@ export const getProtocol = (url: string): string => { }; /** - * @param name + * @param {string} name * * Checks if the value of the name is not null or undefined */ diff --git a/tests/testB2.ts b/tests/testB2.ts index 0d05f36..bde9bf2 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -1,24 +1,25 @@ import B2 from "backblaze-b2"; import dotenv from "dotenv"; import { AdapterBackblazeB2 } from "../src/AdapterBackblazeB2"; -import { StorageType } from "@tweedegolf/storage-abstraction"; +import { StorageType } from "../src/types"; +import { Storage } from "../src/Storage"; dotenv.config(); const applicationKeyId = process.env.B2_APPLICATION_KEY_ID; const applicationKey = process.env.B2_APPLICATION_KEY; const configBackblaze = { - type: "null", //StorageType.B2, + type: StorageType.B2, applicationKeyId, applicationKey, bucketName: process.env.BUCKET_NAME, }; async function testB2() { - const storage = new AdapterBackblazeB2(configBackblaze); - // const storage = new AdapterBackblazeB2("opt://configBackblaze"); + const storage = new Storage(configBackblaze); + // const storage = new Storage("opt://configBackblaze"); // const storage = new AdapterBackblazeB2(`b2://${applicationKeyId}:${applicationKey}`); - console.log(storage.config); + // console.log(storage.config); // console.log(storage.getConfiguration()); const type = storage.getType(); @@ -33,13 +34,13 @@ async function testB2() { // console.log(response); // console.timeEnd("removeFile"); - console.time("clearBucket"); - const response = await storage.clearBucket("the-buck"); - console.log(response); - console.timeEnd("clearBucket"); + // console.time("clearBucket"); + // const response = await storage.clearBucket("the-buck"); + // console.log(response); + // console.timeEnd("clearBucket"); // console.time("fileExists"); - // response = await storage.fileExists("the-buck", "input.txt"); + // const response = await storage.fileExists("the-buck", "input.txt"); // console.timeEnd("fileExists"); // console.log(response); @@ -52,9 +53,9 @@ async function testB2() { // const data = await storage.listBuckets(); // console.timeEnd("listBuckets"); - // console.time("listFiles"); - // const data2 = await storage.listFiles("the-buck"); - // console.timeEnd("listFiles"); + console.time("listFiles"); + const data2 = await storage.listFiles("the-buck"); + console.timeEnd("listFiles"); // console.time("listFileNames"); // const data3 = await storage.listFileNames("the-buck"); @@ -62,6 +63,14 @@ async function testB2() { // const url = await storage.getFileAsURL("the-buck", "input.txt"); // console.log(url); + + console.time("addFileFromPath"); + const data3 = await storage.addFile({ + bucketName: "the-buck", + origPath: `${process.cwd()}/tests/data/image2.jpg`, + targetPath: "test/image1.jpg", + }); + console.timeEnd("addFileFromPath"); } async function testB2_2() { @@ -71,6 +80,7 @@ async function testB2_2() { await storage.authorize(); console.timeEnd("authorize"); + /* const bucketName = configBackblaze.bucketName; const targetPath = "input.txt"; const s = `${storage.downloadUrl}/file/${bucketName}/${targetPath}`; @@ -100,6 +110,7 @@ async function testB2_2() { const r2 = await storage.listFileNames({ bucketId: id }); console.timeEnd("listFileNames"); // console.log("listFileNames", r2.data.files); +*/ } (async function run() { From 1907949192adedfc1c117b0aba43e18972f536f6 Mon Sep 17 00:00:00 2001 From: abudaan Date: Tue, 21 Nov 2023 23:13:40 +0100 Subject: [PATCH 14/26] removed overloading --- src/AbstractAdapter.ts | 29 +++++++++++++++++++++++++---- src/AdapterBackblazeB2.ts | 1 + src/Storage.ts | 21 +++++++++++++++++++++ src/types.ts | 14 +++++++++++--- tests/testB2.ts | 4 ++++ 5 files changed, 62 insertions(+), 7 deletions(-) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 163b26c..dc37731 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -30,7 +30,7 @@ export abstract class AbstractAdapter implements IStorage { } /** - * @param FilePath + * @paramObject FilePath * @param {string} FilePath.bucketName * @param {string} FilePath.origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file @@ -40,24 +40,45 @@ export abstract class AbstractAdapter implements IStorage { return await this.addFile(params); } + /** + * @paramObject FileBufferParams + * @param {string} FilePath.bucketName + * @param {Buffer} FilePath.buffer - buffer + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + */ async addFileFromBuffer(params: FileBufferParams): Promise { return await this.addFile(params); } + /** + * @paramObject FileStreamParams + * @param {string} FilePath.bucketName + * @param {Readable} FilePath.readable - stream + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + */ async addFileFromReadable(params: FileStreamParams): Promise { return await this.addFile(params); } // stubs + /* no need to overload method anymore */ + // abstract addFile(param: FilePathParams): Promise; + // abstract addFile(param: FileBufferParams): Promise; + // abstract addFile(param: FileStreamParams): Promise; - abstract addFile(param: FilePathParams): Promise; - abstract addFile(param: FileBufferParams): Promise; - abstract addFile(param: FileStreamParams): Promise; + abstract addFile( + paramObject: FilePathParams | FileBufferParams | FileStreamParams + ): Promise; abstract createBucket(name: string, options?: object): Promise; abstract clearBucket(name: string): Promise; + /** + * @param name: deletes the bucket with this name. + */ abstract deleteBucket(name: string): Promise; abstract listBuckets(): Promise; diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 01abab7..37ff819 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -182,6 +182,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { /** * Called by addFileFromPath, addFileFromBuffer and addFileFromReadable */ + /* no need to overload method anymore */ // public async addFile(param: FilePathParams): Promise; // public async addFile(param: FileBufferParams): Promise; // public async addFile(param: FileStreamParams): Promise; diff --git a/src/Storage.ts b/src/Storage.ts index 9cc8d41..1bb2cb2 100644 --- a/src/Storage.ts +++ b/src/Storage.ts @@ -84,6 +84,27 @@ export class Storage implements IStorage { // all methods below are implementing IStorage + /** + * @paramObject FilePath + * @param {string} FilePath.bucketName + * @param {string} FilePath.origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + * + * @paramObject FileBufferParams + * @param {string} FilePath.bucketName + * @param {Buffer} FilePath.buffer - buffer + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + * + * @paramObject FileStreamParams + * @param {string} FilePath.bucketName + * @param {Readable} FilePath.readable - stream + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + * + * @returns {ResultObject} + */ public async addFile( paramObject: FilePathParams | FileBufferParams | FileStreamParams ): Promise { diff --git a/src/types.ts b/src/types.ts index 4196b85..4aef3bc 100644 --- a/src/types.ts +++ b/src/types.ts @@ -72,11 +72,19 @@ export interface IStorage { * @paramObject {filePathParams | FileBufferParams | FileStreamParams} - params related to the file to be added * @returns the public url to the file */ - addFile(paramObject: FilePathParams): Promise; - addFile(paramObject: FileBufferParams): Promise; - addFile(paramObject: FileStreamParams): Promise; + /* no need to overload method anymore */ + // addFile(paramObject: FilePathParams): Promise; + // addFile(paramObject: FileBufferParams): Promise; + // addFile(paramObject: FileStreamParams): Promise; addFile(paramObject: FilePathParams | FileBufferParams | FileStreamParams): Promise; + /** + * @paramObject {filePathParams} - params related to the file to be added + * @returns the public url to the file + */ + // no need to overload method anymore + addFile(paramObject: FilePathParams): Promise; + /** * @paramObject params related to the file to be added * @returns the public url to the file diff --git a/tests/testB2.ts b/tests/testB2.ts index bde9bf2..5825bda 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -71,6 +71,10 @@ async function testB2() { targetPath: "test/image1.jpg", }); console.timeEnd("addFileFromPath"); + + console.time("deleteBucket"); + const r2 = await storage.deleteBucket("the-buck"); + console.timeEnd("deleteBucket"); } async function testB2_2() { From 4da4a5872da8c7f2a9d9d1c8e79e941807572cf0 Mon Sep 17 00:00:00 2001 From: abudaan Date: Wed, 22 Nov 2023 12:27:49 +0100 Subject: [PATCH 15/26] wip stream and js doc --- src/AbstractAdapter.ts | 23 ++------ src/AdapterAmazonS3.ts | 2 +- src/AdapterAzureStorageBlob.ts | 2 +- src/AdapterBackblazeB2.ts | 14 ++--- src/AdapterBackblazeB2F.ts | 4 +- src/AdapterGoogleCloudStorage.ts | 2 +- src/AdapterLocal.ts | 2 +- src/Storage.ts | 33 ++--------- src/types.ts | 99 ++++++++++++++++++++++++++++---- tests/testB2.ts | 59 +++++++++++++++---- 10 files changed, 158 insertions(+), 82 deletions(-) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index dc37731..e6dae77 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -9,7 +9,7 @@ import { ResultObjectBuckets, ResultObjectFiles, ResultObjectNumber, - ResultObjectReadable, + ResultObjectStream, } from "./types"; export abstract class AbstractAdapter implements IStorage { @@ -36,6 +36,7 @@ export abstract class AbstractAdapter implements IStorage { * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file * @param {object} FilePath.options */ + async addFileFromPath(params: FilePathParams): Promise { return await this.addFile(params); } @@ -51,22 +52,11 @@ export abstract class AbstractAdapter implements IStorage { return await this.addFile(params); } - /** - * @paramObject FileStreamParams - * @param {string} FilePath.bucketName - * @param {Readable} FilePath.readable - stream - * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file - * @param {object} FilePath.options - */ - async addFileFromReadable(params: FileStreamParams): Promise { + async addFileFromStream(params: FileStreamParams): Promise { return await this.addFile(params); } // stubs - /* no need to overload method anymore */ - // abstract addFile(param: FilePathParams): Promise; - // abstract addFile(param: FileBufferParams): Promise; - // abstract addFile(param: FileStreamParams): Promise; abstract addFile( paramObject: FilePathParams | FileBufferParams | FileStreamParams @@ -76,18 +66,15 @@ export abstract class AbstractAdapter implements IStorage { abstract clearBucket(name: string): Promise; - /** - * @param name: deletes the bucket with this name. - */ abstract deleteBucket(name: string): Promise; abstract listBuckets(): Promise; - abstract getFileAsReadable( + abstract getFileAsStream( bucketName: string, fileName: string, options?: { start?: number; end?: number } - ): Promise; + ): Promise; abstract getFileAsURL(bucketName: string, fileName: string): Promise; diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index c279f81..0c207ef 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -134,7 +134,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { return cfg; } - async getFileAsReadable( + async getFileAsStream( fileName: string, options: { start?: number; end?: number } = { start: 0 } ): Promise { diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index 4705b04..96f6c72 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -97,7 +97,7 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { return Promise.resolve(true); } - async getFileAsReadable( + async getFileAsStream( fileName: string, options: CreateReadStreamOptions = { start: 0 } ): Promise { diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 37ff819..dc2d781 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -7,7 +7,7 @@ import { BackblazeB2File, ResultObjectBoolean, ResultObject, - ResultObjectReadable, + ResultObjectStream, ResultObjectBucketsB2, ResultObjectFilesB2, ResultObjectBucketB2, @@ -182,10 +182,6 @@ export class AdapterBackblazeB2 extends AbstractAdapter { /** * Called by addFileFromPath, addFileFromBuffer and addFileFromReadable */ - /* no need to overload method anymore */ - // public async addFile(param: FilePathParams): Promise; - // public async addFile(param: FileBufferParams): Promise; - // public async addFile(param: FileStreamParams): Promise; public async addFile( params: FilePathParams | FileBufferParams | FileStreamParams ): Promise { @@ -269,11 +265,11 @@ export class AdapterBackblazeB2 extends AbstractAdapter { // public API - public async getFileAsReadable( + public async getFileAsStream( bucketName: string, fileName: string, options: { start?: number; end?: number } = { start: 0 } - ): Promise { + ): Promise { const { error } = await this.authorize(); if (error !== null) { return { error, value: null }; @@ -295,8 +291,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }, }, }) - .then((r: BackblazeAxiosResponse) => { - return { error: null, value: r.response.data }; + .then((r) => { + return { error: null, value: r.data }; }); } diff --git a/src/AdapterBackblazeB2F.ts b/src/AdapterBackblazeB2F.ts index 5360b80..952b42f 100644 --- a/src/AdapterBackblazeB2F.ts +++ b/src/AdapterBackblazeB2F.ts @@ -62,8 +62,8 @@ const adapter: IStorage = { getSelectedBucket, addFileFromPath, addFileFromBuffer, - addFileFromReadable, - getFileAsReadable, + addFileFromStream: addFileFromReadable, + getFileAsStream: getFileAsReadable, removeFile, listFiles, sizeOf, diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index deee0b9..5013369 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -108,7 +108,7 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { return file; } - async getFileAsReadable( + async getFileAsStream( fileName: string, options: CreateReadStreamOptions = { start: 0 } ): Promise { diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 7a025c7..1c49657 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -255,7 +255,7 @@ export class AdapterLocal extends AbstractAdapter { return result; } - async getFileAsReadable( + async getFileAsStream( name: string, options: { start?: number; end?: number } = { start: 0 } ): Promise { diff --git a/src/Storage.ts b/src/Storage.ts index 1bb2cb2..7eb37f2 100644 --- a/src/Storage.ts +++ b/src/Storage.ts @@ -7,7 +7,7 @@ import { FilePathParams, FileStreamParams, ResultObjectBuckets, - ResultObjectReadable, + ResultObjectStream, ResultObjectFiles, ResultObjectNumber, ResultObjectBoolean, @@ -84,27 +84,6 @@ export class Storage implements IStorage { // all methods below are implementing IStorage - /** - * @paramObject FilePath - * @param {string} FilePath.bucketName - * @param {string} FilePath.origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg - * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file - * @param {object} FilePath.options - * - * @paramObject FileBufferParams - * @param {string} FilePath.bucketName - * @param {Buffer} FilePath.buffer - buffer - * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file - * @param {object} FilePath.options - * - * @paramObject FileStreamParams - * @param {string} FilePath.bucketName - * @param {Readable} FilePath.readable - stream - * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file - * @param {object} FilePath.options - * - * @returns {ResultObject} - */ public async addFile( paramObject: FilePathParams | FileBufferParams | FileStreamParams ): Promise { @@ -119,8 +98,8 @@ export class Storage implements IStorage { return this.adapter.addFileFromBuffer(params); } - async addFileFromReadable(params: FileStreamParams): Promise { - return this.adapter.addFileFromReadable(params); + async addFileFromStream(params: FileStreamParams): Promise { + return this.adapter.addFileFromStream(params); } async createBucket(name: string, options?: object): Promise { @@ -139,14 +118,14 @@ export class Storage implements IStorage { return this.adapter.listBuckets(); } - async getFileAsReadable( + async getFileAsStream( bucketName: string, fileName: string, options: { start?: number; end?: number } = {} - ): Promise { + ): Promise { const { start = 0, end } = options; // console.log(start, end, options); - return this.adapter.getFileAsReadable(bucketName, fileName, { start, end }); + return this.adapter.getFileAsStream(bucketName, fileName, { start, end }); } async getFileAsURL(bucketName: string, fileName: string): Promise { diff --git a/src/types.ts b/src/types.ts index 4aef3bc..16a2a28 100644 --- a/src/types.ts +++ b/src/types.ts @@ -59,7 +59,12 @@ export interface IStorage { clearBucket(name: string): Promise; /** - * @param name: deletes the bucket with this name. + * deletes the bucket with the provided name + * @param {string} name name of the bucket + * @returns {Promise} a promise that always resolves in a ResultObject: + * ```typescript + * { error: null | string, value: null | string } + * ``` */ deleteBucket(name: string): Promise; @@ -86,10 +91,24 @@ export interface IStorage { addFile(paramObject: FilePathParams): Promise; /** - * @paramObject params related to the file to be added - * @returns the public url to the file + * @param {FilePathParams} params object that has the following keys: + * ```typescript + * { + * bucketName: string + * origPath: string //path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg + * targetPath: string //path on the storage, you can add a path or only provide name of the file + * options?: object + * } + * ``` + * @returns {ResultObject} a promise that always resolves in a ResultObject: + * ```typescript + * { + * value: string | null + * error: string | null + * } + * ``` */ - addFileFromPath(paramObject: FilePathParams): Promise; + addFileFromPath(params: FilePathParams): Promise; /** * @paramObject params related to the file to be added @@ -98,10 +117,24 @@ export interface IStorage { addFileFromBuffer(paramObject: FileBufferParams): Promise; /** - * @paramObject params related to the file to be added - * @returns the public url to the file + * @param {FileStreamParams} params object that contains the following keys: + * ```typescript + * { + * bucketName: string + * readable: Readable // stream from the local file, e.g. fs.createReadStream(path) + * targetPath: string // path on the storage, you can add a path or only provide name of the file + * options?: object + * } + * ``` + * @returns {ResultObject} a promise that always resolves in a ResultObject + * ```typescript + * { + * value: string | null // if success value is the public url to the file + * error: string | null // if fails error is the error message + * } + * ``` */ - addFileFromReadable(paramObject: FileStreamParams): Promise; + addFileFromStream(params: FileStreamParams): Promise; /** * @param bucketName name of the bucket where the file is stored @@ -109,14 +142,14 @@ export interface IStorage { * @param start? the byte of the file where the stream starts (default: 0) * @param end? the byte in the file where the stream ends (default: last byte of file) */ - getFileAsReadable( + getFileAsStream( bucketName: string, fileName: string, options?: { start?: number; end?: number; } - ): Promise; + ): Promise; /** * @param bucketName name of the bucket where the file is stored @@ -354,7 +387,7 @@ export type ResultObjectFilesB2 = { value: Array | null; }; -export type ResultObjectReadable = { +export type ResultObjectStream = { error: string | null; value: Readable | null; }; @@ -397,3 +430,49 @@ export type FileStreamParams = { targetPath: string; options?: object; }; + +/** + * @paramObject FilePath + * @param {string} FilePath.bucketName + * @param {string} FilePath.origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + * @returns {ResultObject} + */ + +/** + * @paramObject FileBufferParams + * @param {string} FilePath.bucketName + * @param {Buffer} FilePath.buffer - buffer + * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @param {object} FilePath.options + * @returns {ResultObject} + */ + +/** + * @typedef {Object} FilePathParams + * @property {string} bucketName + * @property {string} origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg + * @property {string} argetPath - path on the storage, you can add a path or only provide name of the file + * @property {Object} options + */ + +/** + * @typedef {Object} ResultObject + * @property {string | null} value + * @property {string | null} error + */ + +/** + * @param {FilePathParams} params + * @returns {ResultObject} result f + */ + +/** + * Params for adding a file to the storage + * @typedef {Object} FilePathParams + * @property {string} bucketName + * @property {string} origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg + * @property {string} argetPath - path on the storage, you can add a path or only provide name of the file + * @property {Object} options + */ diff --git a/tests/testB2.ts b/tests/testB2.ts index 5825bda..a2166ff 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -1,8 +1,12 @@ +import fs from "fs"; import B2 from "backblaze-b2"; import dotenv from "dotenv"; import { AdapterBackblazeB2 } from "../src/AdapterBackblazeB2"; import { StorageType } from "../src/types"; import { Storage } from "../src/Storage"; +import { Readable } from "stream"; +import { copyFile } from "./util"; +import path from "path"; dotenv.config(); @@ -15,6 +19,15 @@ const configBackblaze = { bucketName: process.env.BUCKET_NAME, }; +function streamToString(stream: Readable) { + const chunks: Array = []; + return new Promise((resolve, reject) => { + stream.on("data", (chunk) => chunks.push(Buffer.from(chunk))); + stream.on("error", (err) => reject(err)); + stream.on("end", () => resolve(Buffer.concat(chunks).toString("utf8"))); + }); +} + async function testB2() { const storage = new Storage(configBackblaze); // const storage = new Storage("opt://configBackblaze"); @@ -53,10 +66,6 @@ async function testB2() { // const data = await storage.listBuckets(); // console.timeEnd("listBuckets"); - console.time("listFiles"); - const data2 = await storage.listFiles("the-buck"); - console.timeEnd("listFiles"); - // console.time("listFileNames"); // const data3 = await storage.listFileNames("the-buck"); // console.timeEnd("listFileNames"); @@ -64,17 +73,43 @@ async function testB2() { // const url = await storage.getFileAsURL("the-buck", "input.txt"); // console.log(url); - console.time("addFileFromPath"); - const data3 = await storage.addFile({ + // console.time("addFileFromPath"); + // const data3 = await storage.addFileFromPath({ + // bucketName: "the-buck", + // origPath: `${process.cwd()}/tests/data/image2.jpg`, + // targetPath: "test/image1.jpg", + // }); + // console.timeEnd("addFileFromPath"); + + console.time("addFileFromStream"); + const data4 = await storage.addFileFromStream({ bucketName: "the-buck", - origPath: `${process.cwd()}/tests/data/image2.jpg`, - targetPath: "test/image1.jpg", + stream: fs.createReadStream("./tests/data/image2.jpg"), + targetPath: "test/image2.jpg", }); - console.timeEnd("addFileFromPath"); + console.timeEnd("addFileFromStream"); + + console.time("listFiles"); + const data2 = await storage.listFiles("the-buck"); + console.log(data2); + console.timeEnd("listFiles"); + + console.time("getFileAsStream"); + const data = await storage.getFileAsStream("the-buck", "test/image2.jpg"); + const filePath = path.join(process.cwd(), "tests", `test-${storage.getType()}.jpg`); + const writeStream = fs.createWriteStream(filePath); + if (data.value !== null) { + const { value: readStream } = data; + await copyFile(readStream, writeStream); + } + + // fs.createWriteStream(filePath); + // console.log(data5); + console.timeEnd("getFileAsStream"); - console.time("deleteBucket"); - const r2 = await storage.deleteBucket("the-buck"); - console.timeEnd("deleteBucket"); + // console.time("deleteBucket"); + // const r2 = await storage.deleteBucket("the-buck"); + // console.timeEnd("deleteBucket"); } async function testB2_2() { From 38039394a5c6124b91c2836956b947f9f1d65662 Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 24 Nov 2023 00:08:20 +0100 Subject: [PATCH 16/26] wip adapter S3 --- changelog.md | 2 + src/AbstractAdapter.ts | 30 +- src/AdapterAmazonS3.ts | 433 ++++++++++++++--------------- src/AdapterBackblazeB2.ts | 77 +++-- src/Storage.ts | 6 + src/types.ts | 52 ++-- tests/test-config-local.jasmine.ts | 53 ++-- tests/testB2.ts | 42 +-- 8 files changed, 332 insertions(+), 363 deletions(-) diff --git a/changelog.md b/changelog.md index 8d13e4a..582b007 100644 --- a/changelog.md +++ b/changelog.md @@ -44,8 +44,10 @@ - The storage instance will also no longer hold a reference to all available buckets; a call to `listBuckets` will access the cloud storage service every time it is called; this is handy in case another process or user has created a new bucket. - `validateName` will not only perform a local check, it will also check if the name is valid and/or not taken at the cloud storage service. - `createBucket` resolves with an error when that bucket already exists +- ~~`deleteBucket` has been renamed to `removeBucket` (analogue to `removeFile`)~~ - `removeFile` has an additional optional boolean argument `allVersions`; if set to true all version of the specified file will be removed. Default: false - `addFile` is added; you can use this method whenever you use `addFileFromPath`, `addFileFromBuffer` or `addFileFromReadable` +- `getConfig()` and `getType()` are implemented as getter as well, resp.: `storage.config` and `storage.type` ### Old API (1.5.x) compared to new API (2.x) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index e6dae77..333a27f 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -13,41 +13,29 @@ import { } from "./types"; export abstract class AbstractAdapter implements IStorage { - // protected type: StorageType; - protected type: string; + protected _type = "abstract-adapter"; protected conf: AdapterConfig; - getType(): string { - return this.type; + get type(): string { + return this._type; } - public get config(): AdapterConfig { + get config(): AdapterConfig { return this.conf; } - public getConfiguration(): AdapterConfig { - return this.conf; + getType(): string { + return this._type; } - /** - * @paramObject FilePath - * @param {string} FilePath.bucketName - * @param {string} FilePath.origPath - path to the file that you want to add, e.g. /home/user/Pictures/image1.jpg - * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file - * @param {object} FilePath.options - */ + getConfiguration(): AdapterConfig { + return this.conf; + } async addFileFromPath(params: FilePathParams): Promise { return await this.addFile(params); } - /** - * @paramObject FileBufferParams - * @param {string} FilePath.bucketName - * @param {Buffer} FilePath.buffer - buffer - * @param {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file - * @param {object} FilePath.options - */ async addFileFromBuffer(params: FileBufferParams): Promise { return await this.addFile(params); } diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index 0c207ef..fdec933 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -13,53 +13,56 @@ import { HeadBucketCommand, HeadObjectCommand, ListBucketsCommand, + ListObjectVersionsCommand, // ListObjectVersionsCommand, ListObjectsCommand, PutObjectCommand, S3Client, } from "@aws-sdk/client-s3"; import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; -import { ConfigAmazonS3, AdapterConfig, StorageType, S3Compatible } from "./types"; +import { + ConfigAmazonS3, + AdapterConfig, + StorageType, + S3Compatible, + ResultObjectStream, + ResultObject, + ResultObjectBuckets, + FileBufferParams, + FilePathParams, + FileStreamParams, + ResultObjectFiles, + ResultObjectNumber, + ResultObjectBoolean, +} from "./types"; import { parseUrl } from "./util"; export class AdapterAmazonS3 extends AbstractAdapter { - protected type = StorageType.S3; + protected _type = StorageType.S3; + protected conf: ConfigAmazonS3; + private configError: string | null = null; private storage: S3Client; - private bucketNames: string[] = []; - private region: string = ""; private s3Compatible: S3Compatible = S3Compatible.Amazon; - protected conf: ConfigAmazonS3; constructor(config: string | AdapterConfig) { super(); this.conf = this.parseConfig(config as ConfigAmazonS3); - if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { - const msg = this.validateName(this.conf.bucketName); - if (msg !== null) { - throw new Error(msg); - } - this.bucketName = this.conf.bucketName; - } + // handle small differences in supported S3 compatible storages if (typeof (this.conf as ConfigAmazonS3).region === "undefined") { if (this.s3Compatible === S3Compatible.R2) { this.conf.region = "auto"; - this.region = this.conf.region; } else if (this.s3Compatible === S3Compatible.Backblaze) { let ep = this.conf.endpoint; ep = ep.substring(ep.indexOf("s3.") + 3); this.conf.region = ep.substring(0, ep.indexOf(".")); - // console.log(this.config.region); - this.region = this.conf.region; } - } else { - this.region = (this.conf as ConfigAmazonS3).region; } if (typeof this.conf.endpoint === "undefined") { - this.storage = new S3Client({ region: this.region }); + this.storage = new S3Client({ region: this.conf.region }); } else { this.storage = new S3Client({ - region: this.region, + region: this.conf.region, endpoint: this.conf.endpoint, credentials: { accessKeyId: this.conf.accessKeyId, @@ -69,27 +72,14 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async init(): Promise { - if (this.initialized) { - return Promise.resolve(true); - } - if (this.bucketName) { - await this.createBucket(this.bucketName) - .then((_data) => { - this.bucketNames.push(this.bucketName); - }) - .catch((message: string) => { - return Promise.reject(message); - }); - } - // no further initialization required - this.initialized = true; - return Promise.resolve(true); - } - - private parseConfig(config: string | ConfigAmazonS3): ConfigAmazonS3 { + private parseConfig(config: string | ConfigAmazonS3): ConfigAmazonS3 | null { let cfg: ConfigAmazonS3; if (typeof config === "string") { + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; + return null; + } const { type, part1: accessKeyId, @@ -97,7 +87,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { part3: region, bucketName, queryString, - } = parseUrl(config); + } = value; cfg = { type, accessKeyId, @@ -115,9 +105,9 @@ export class AdapterAmazonS3 extends AbstractAdapter { } if (!cfg.accessKeyId || !cfg.secretAccessKey) { - throw new Error( - "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 's3'" - ); + this.configError = + "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 's3'"; + return null; } if (typeof cfg.endpoint !== "undefined") { @@ -128,49 +118,58 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } if (!cfg.region && this.s3Compatible === S3Compatible.Amazon) { - throw new Error("You must specify a default region for storage type 's3'"); + this.configError = "You must specify a default region for storage type 's3'"; + return null; } return cfg; } async getFileAsStream( + bucketName: string, fileName: string, options: { start?: number; end?: number } = { start: 0 } - ): Promise { + ): Promise { + if (this.configError !== null) { + return { error: this.configError, value: null }; + } + const params = { - Bucket: this.bucketName, + Bucket: bucketName, Key: fileName, Range: `bytes=${options.start}-${options.end || ""}`, }; const command = new GetObjectCommand(params); - const response = await this.storage.send(command); - return response.Body as Readable; + try { + const response = await this.storage.send(command); + return { value: response.Body as Readable, error: null }; + } catch (e) { + return { value: null, error: e.code }; + } } - async removeFile(fileName: string): Promise { + async removeFile(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + const input = { - Bucket: this.bucketName, + Bucket: bucketName, Key: fileName, }; const command = new DeleteObjectCommand(input); - const response = await this.storage.send(command); - // console.log(response); - return "file removed"; - } - - // util members - - async createBucket(name: string, options: object = {}): Promise { - // return Promise.reject("oops"); - const msg = this.validateName(name); - if (msg !== null) { - return Promise.reject(msg); + try { + const response = await this.storage.send(command); + return { value: "ok", error: null }; + } catch (e) { + return { value: null, error: e.code }; } + } - if (this.bucketNames.findIndex((b) => b === name) !== -1) { - return "bucket exists"; + async createBucket(name: string, options: object = {}): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } try { @@ -180,10 +179,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { const command = new HeadBucketCommand(input); const response = await this.storage.send(command); if (response.$metadata.httpStatusCode === 200) { - // console.log("response", response); - this.bucketNames.push(name); - this.bucketName = name; - return "bucket exists"; + return { error: "bucket exists", value: null }; } } catch (_e) { // this error simply means that the bucket doesn't exist yet @@ -206,227 +202,228 @@ export class AdapterAmazonS3 extends AbstractAdapter { const response = await this.storage.send(command); // console.log("response", response); if (response.$metadata.httpStatusCode === 200) { - this.bucketNames.push(name); - this.bucketName = name; - return "bucket created"; + return { value: "ok", error: null }; + } else { + return { + error: `Error http status code ${response.$metadata.httpStatusCode}`, + value: null, + }; } - } catch (e) { - return Promise.reject(e.message); - } - } - - async selectBucket(name: string | null): Promise { - // add check if bucket exists! - if (!name) { - this.bucketName = ""; - return `bucket '${name}' deselected`; + } catch (error) { + return { error, value: null }; } - await this.createBucket(name); - this.bucketName = name; - return `bucket '${name}' selected`; } - async clearBucket(name?: string): Promise { - const n = name || this.bucketName; - - if (!n) { - throw new Error("no bucket selected"); + async clearBucket(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - /* - const input1 = { - Bucket: n, - MaxKeys: 1000, - }; - const command = new ListObjectVersionsCommand(input1); - const { Versions } = await this.storage.send(command); - // console.log("Versions", Versions); - if (typeof Versions === "undefined") { - return "bucket is empty"; - } - const input2 = { - Bucket: n, - Delete: { - Objects: Versions.map((value) => ({ - Key: value.Key, - VersionId: value.VersionId, - })), - Quiet: false, - }, - }; - const command2 = new DeleteObjectsCommand(input2); - const response = await this.storage.send(command2); - return "bucket cleared"; - */ - - const input1 = { - Bucket: n, - MaxKeys: 1000, - }; - const command1 = new ListObjectsCommand(input1); - const response1 = await this.storage.send(command1); - const Contents = response1.Contents; + try { + const input1 = { + Bucket: name, + MaxKeys: 1000, + }; + const command = new ListObjectVersionsCommand(input1); + const { Versions } = await this.storage.send(command); + // console.log("Versions", Versions); + if (typeof Versions === "undefined") { + return { value: "bucket is empty", error: null }; + } - if (!Contents || Contents.length === 0) { - return; + try { + const input2 = { + Bucket: name, + Delete: { + Objects: Versions.map((value) => ({ + Key: value.Key, + VersionId: value.VersionId, + })), + Quiet: false, + }, + }; + const command2 = new DeleteObjectsCommand(input2); + await this.storage.send(command2); + return { value: "ok", error: null }; + } catch (e) { + return { value: null, error: e.code }; + } + } catch (e) { + return { value: null, error: e.code }; } - // console.log(Contents); - const input2 = { - Bucket: n, - Delete: { - Objects: Contents.map((value) => ({ Key: value.Key })), - Quiet: false, - }, - }; - const command2 = new DeleteObjectsCommand(input2); - const response = await this.storage.send(command2); - // console.log(response); - return "bucket cleared"; } - async deleteBucket(name?: string): Promise { - const n = name || this.bucketName; - // console.log("deleteBucket", n); - - if (n === "") { - throw new Error("deleteBucket: no bucket selected"); + async deleteBucket(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } + try { const input = { - Bucket: n, + Bucket: name, }; const command = new DeleteBucketCommand(input); const response = await this.storage.send(command); // console.log(response); - - if (n === this.bucketName) { - this.bucketName = ""; - } - // console.log("selected bucket", this.bucketName); - this.bucketNames = this.bucketNames.filter((b) => b !== n); - - return "bucket deleted"; + return { value: "ok", error: null }; } catch (e) { if (e.code === "NoSuchBucket") { - throw new Error("bucket not found"); + return { value: "bucket not found", error: null }; } - throw e; + return { value: "bucket not found", error: e.code }; } } - async listBuckets(): Promise { + async listBuckets(): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + const input = {}; const command = new ListBucketsCommand(input); return this.storage .send(command) .then((response) => { - this.bucketNames = response.Buckets?.map((d) => d?.Name); - return this.bucketNames; + const bucketNames = response.Buckets?.map((d) => d?.Name); + return { value: bucketNames, error: null }; }) .catch((e) => { - console.log("[ERROR listBuckets]", e); - return []; + return { value: null, error: e.code }; }); } - protected async store(buffer: Buffer, targetPath: string, options: object): Promise; - protected async store(stream: Readable, targetPath: string, options: object): Promise; - protected async store(origPath: string, targetPath: string, options: object): Promise; - protected async store( - arg: string | Buffer | Readable, - targetPath: string, - options: object - ): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + public async addFile( + params: FilePathParams | FileBufferParams | FileStreamParams + ): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } + + let { options } = params; if (typeof options !== "object") { options = {}; } - await this.createBucket(this.bucketName); - - const input = { - ...options, - Bucket: this.bucketName, - Key: targetPath, - Body: arg, - }; - if (typeof arg === "string") { - if (!fs.existsSync(arg)) { - throw new Error(`File with given path: ${arg}, was not found`); + let fileData: Readable | Buffer; + if (typeof (params as FilePathParams).origPath !== "undefined") { + const f = (params as FilePathParams).origPath; + if (!fs.existsSync(f)) { + return { value: null, error: `File with given path: ${f}, was not found` }; } - input.Body = fs.createReadStream(arg); + fileData = fs.createReadStream(f); + } else if (typeof (params as FileBufferParams).buffer !== "undefined") { + fileData = (params as FileBufferParams).buffer; + } else if (typeof (params as FileStreamParams).stream !== "undefined") { + fileData = (params as FileStreamParams).stream; } - const command = new PutObjectCommand(input); - const response = await this.storage.send(command); - - if (this.region !== "") { + try { const input = { - Bucket: this.bucketName, + Bucket: params.bucketName, + Key: params.targetPath, + Body: fileData, + ...options, }; - const command = new GetBucketLocationCommand(input); + const command = new PutObjectCommand(input); const response = await this.storage.send(command); - this.region = response.LocationConstraint; + return this.getFileAsURL(params.bucketName, params.targetPath); + } catch (e) { + return { value: null, error: e.code }; } - // return `https://${this.bucketName}.s3.${this.region}.amazonaws.com/${targetPath}`; - return await getSignedUrl( + } + + async getFileAsURL(bucketName: string, fileName: string): Promise { + return getSignedUrl( this.storage, - new GetObjectCommand({ Bucket: this.bucketName, Key: targetPath }) + new GetObjectCommand({ + Bucket: bucketName, + Key: fileName, + }) // { expiresIn: 3600 } - ); + ) + .then((url: string) => { + return { value: url, error: null }; + }) + .catch((e) => { + return { value: null, error: e.code }; + }); } - async listFiles(maxFiles: number = 1000): Promise<[string, number][]> { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async listFiles(bucketName: string, maxFiles: number = 1000): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } + try { + const input = { + Bucket: bucketName, + MaxKeys: maxFiles, + }; + const command = new ListObjectsCommand(input); + const response = await this.storage.send(command); + const { Contents } = response; + if (!Contents) { + return { value: [], error: null }; + } + return { value: Contents.map((o) => [o.Key, o.Size]), error: null }; + } catch (e) { + return { value: null, error: e.code }; + } + } - const input = { - Bucket: this.bucketName, - MaxKeys: maxFiles, - }; - const command = new ListObjectsCommand(input); - const response = await this.storage.send(command); - const { Contents } = response; - if (!Contents) { - return []; + async sizeOf(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const input = { + Bucket: bucketName, + Key: fileName, + }; + const command = new HeadObjectCommand(input); + const response = await this.storage.send(command); + return { value: response.ContentLength, error: null }; + } catch (e) { + return { value: null, error: e.code }; } - return Contents.map((o) => [o.Key, o.Size]) as [string, number][]; } - async sizeOf(name: string): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async bucketExists(bucketName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } const input = { - Bucket: this.bucketName, - Key: name, + Bucket: bucketName, }; - const command = new HeadObjectCommand(input); - const response = await this.storage.send(command); - return response.ContentLength; + const command = new HeadBucketCommand(input); + return this.storage + .send(command) + .then(() => { + return { value: true, error: null }; + }) + .catch(() => { + return { value: false, error: null }; + }); } - async fileExists(name: string): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async fileExists(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } const input = { - Bucket: this.bucketName, - Key: name, + Bucket: bucketName, + Key: fileName, }; const command = new HeadObjectCommand(input); return this.storage .send(command) .then(() => { - return true; + return { value: true, error: null }; }) .catch(() => { - return false; + return { value: false, error: null }; }); } } diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index dc2d781..75055fb 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -27,7 +27,7 @@ import { parseUrl, validateName } from "./util"; require("@gideo-llc/backblaze-b2-upload-any").install(B2); export class AdapterBackblazeB2 extends AbstractAdapter { - protected type = StorageType.B2; + protected _type = StorageType.B2; private storage: B2; private authorized: boolean = false; private configError: string | null = null; @@ -179,9 +179,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { value: null, error: `Could not find file "${name}" in bucket "${bucketName}".` }; } - /** - * Called by addFileFromPath, addFileFromBuffer and addFileFromReadable - */ + // public API + public async addFile( params: FilePathParams | FileBufferParams | FileStreamParams ): Promise { @@ -230,41 +229,6 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }); } - // probably not necessary; may be a little bit more lightweight compared to listFileVersions - // if you don't have file versions - public async listFileNames(bucketName: string): Promise { - const { error } = await this.authorize(); - if (error !== null) { - return { error, value: null }; - } - - const data = await this.getBucket(bucketName); - if (data.error !== null) { - return { error: data.error, value: null }; - } - - const { value: bucket } = data; - return this.storage - .listFileNames({ bucketId: bucket.id }) - .then(({ data: { files } }) => { - // console.log(files); - return { - error: null, - value: files.map(({ fileName }) => { - return fileName; - }), - }; - }) - .catch((r: BackblazeAxiosResponse) => { - return { - error: r.response.data.message, - value: null, - }; - }); - } - - // public API - public async getFileAsStream( bucketName: string, fileName: string, @@ -291,7 +255,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { }, }, }) - .then((r) => { + .then((r: { data: Readable }) => { return { error: null, value: r.data }; }); } @@ -512,4 +476,37 @@ export class AdapterBackblazeB2 extends AbstractAdapter { return { error: null, value: false }; } } + + // probably not necessary; may be a little bit more lightweight compared to listFileVersions + // if you don't have file versions + public async listFileNames(bucketName: string): Promise { + const { error } = await this.authorize(); + if (error !== null) { + return { error, value: null }; + } + + const data = await this.getBucket(bucketName); + if (data.error !== null) { + return { error: data.error, value: null }; + } + + const { value: bucket } = data; + return this.storage + .listFileNames({ bucketId: bucket.id }) + .then(({ data: { files } }) => { + // console.log(files); + return { + error: null, + value: files.map(({ fileName }) => { + return fileName; + }), + }; + }) + .catch((r: BackblazeAxiosResponse) => { + return { + error: r.response.data.message, + value: null, + }; + }); + } } diff --git a/src/Storage.ts b/src/Storage.ts index 7eb37f2..6365794 100644 --- a/src/Storage.ts +++ b/src/Storage.ts @@ -12,6 +12,8 @@ import { ResultObjectNumber, ResultObjectBoolean, } from "./types"; +import { AdapterGoogleCloudStorage } from "./AdapterGoogleCloudStorage"; +import { AdapterLocal } from "./AdapterLocal"; // add new storage adapters here const adapterClasses = { @@ -46,6 +48,10 @@ export class Storage implements IStorage { this.switchAdapter(config); } + get type(): string { + return this.adapter.getType(); + } + public getType(): string { return this.adapter.getType(); } diff --git a/src/types.ts b/src/types.ts index 16a2a28..86e9ddc 100644 --- a/src/types.ts +++ b/src/types.ts @@ -18,6 +18,12 @@ export interface IStorage { */ getType(): string; + /** + * Same as `getType` but implemented as getter + * @returns adapter tyoe + */ + type: string; + /** * Returns configuration settings that you've provided when instantiating as an object. * Use this only for debugging and with great care as it may expose sensitive information. @@ -74,21 +80,11 @@ export interface IStorage { listBuckets(): Promise; /** - * @paramObject {filePathParams | FileBufferParams | FileStreamParams} - params related to the file to be added + * @param {filePathParams | FileBufferParams | FileStreamParams} params related to the file to be added * @returns the public url to the file + * Called internally by addFileFromPath, addFileFromBuffer and addFileFromReadable */ - /* no need to overload method anymore */ - // addFile(paramObject: FilePathParams): Promise; - // addFile(paramObject: FileBufferParams): Promise; - // addFile(paramObject: FileStreamParams): Promise; - addFile(paramObject: FilePathParams | FileBufferParams | FileStreamParams): Promise; - - /** - * @paramObject {filePathParams} - params related to the file to be added - * @returns the public url to the file - */ - // no need to overload method anymore - addFile(paramObject: FilePathParams): Promise; + addFile(params: FilePathParams | FileBufferParams | FileStreamParams): Promise; /** * @param {FilePathParams} params object that has the following keys: @@ -111,10 +107,13 @@ export interface IStorage { addFileFromPath(params: FilePathParams): Promise; /** - * @paramObject params related to the file to be added - * @returns the public url to the file + * @param {FileBufferParams} params + * @property {string} FilePath.bucketName + * @property {Buffer} FilePath.buffer - buffer + * @property {string} FilePath.targetPath - path on the storage, you can add a path or only provide name of the file + * @property {object} FilePath.options */ - addFileFromBuffer(paramObject: FileBufferParams): Promise; + addFileFromBuffer(params: FileBufferParams): Promise; /** * @param {FileStreamParams} params object that contains the following keys: @@ -212,25 +211,26 @@ export type JSON = { | boolean[] | { [id: string]: JSON }; }; + +// export interface Options { +// [key: string]: string | number | boolean; +// } + +export type GenericKey = undefined | number | string | boolean | number[] | string[] | boolean[]; + export interface IAdapterConfig { // type: StorageType; type: string; skipCheck?: boolean; bucketName?: string; + // [id: string]: GenericKey; } -export type GenericKey = number | string | boolean | number[] | string[] | boolean[]; - export interface ConfigAmazonS3 extends IAdapterConfig { accessKeyId?: string; secretAccessKey?: string; region?: string; endpoint?: string; - useDualstack?: boolean; - maxRetries?: number; - maxRedirects?: number; - sslEnabled?: boolean; - [id: string]: GenericKey; } export interface ConfigAzureStorageBlob extends IAdapterConfig { @@ -241,7 +241,6 @@ export interface ConfigAzureStorageBlob extends IAdapterConfig { export interface ConfigBackblazeB2 extends IAdapterConfig { applicationKeyId?: string; applicationKey?: string; - // [id: string]: GenericKey; } export interface ConfigGoogleCloud extends IAdapterConfig { @@ -463,11 +462,6 @@ export type FileStreamParams = { * @property {string | null} error */ -/** - * @param {FilePathParams} params - * @returns {ResultObject} result f - */ - /** * Params for adding a file to the storage * @typedef {Object} FilePathParams diff --git a/tests/test-config-local.jasmine.ts b/tests/test-config-local.jasmine.ts index 47a7609..f2a3450 100644 --- a/tests/test-config-local.jasmine.ts +++ b/tests/test-config-local.jasmine.ts @@ -5,13 +5,6 @@ import { rimraf } from "rimraf"; import { Storage } from "../src/Storage"; import { StorageType, ConfigLocal } from "../src/types"; -// describe("test jasmine", () => { -// it("weird", () => { -// expect("false").toBeTruthy(); -// expect("0").toBeTruthy(); -// }); -// }); - describe(`testing local urls`, () => { beforeAll(async () => { // override Linux's umask 0o002 @@ -20,37 +13,36 @@ describe(`testing local urls`, () => { it("[0]", async () => { const storage = new Storage("local://tests/tmp/the-buck?param=value"); - await storage.init(); - expect(storage.getType()).toBe(StorageType.LOCAL); - expect(storage.getSelectedBucket()).toBe("the-buck"); + expect(storage.type).toBe(StorageType.LOCAL); + expect((storage.config as ConfigLocal).directory).toBe("the-tests/tmp/"); + expect(storage.config.bucketName).toBe("the-buck"); }); it("[0a]", async () => { const storage = new Storage("local://the-buck?param=value"); - await storage.init(); - expect(storage.getType()).toBe(StorageType.LOCAL); - expect(storage.getSelectedBucket()).toBe("the-buck"); + expect(storage.type).toBe(StorageType.LOCAL); + expect(storage.config.bucketName).toBe("the-buck"); + expect(storage.config["param"]).toBe("value"); }); it("[1]", async () => { const storage = new Storage("local://tests/tmp"); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("tmp"); + expect(storage.config.bucketName).toBe("tmp"); }); it("[2] store in folder where process runs", async () => { const storage = new Storage(`local://${process.cwd()}/the-buck`); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("the-buck"); + expect((storage.config as ConfigLocal).directory).toBe(process.cwd()); }); it("[3]", async () => { const storage = new Storage({ type: StorageType.LOCAL, directory: "tests/tmp/the-buck", + // versioning: true, }); - await storage.init(); expect(storage.getType()).toBe(StorageType.LOCAL); - expect(storage.getSelectedBucket()).toBe(""); + expect(storage.config.bucketName).toBe(undefined); + // expect(storage.config.versioning).toBe(undefined); }); it("[4]", async () => { @@ -59,9 +51,8 @@ describe(`testing local urls`, () => { directory: "tests/tmp", bucketName: "the-buck", }); - await storage.init(); expect(storage.getType()).toBe(StorageType.LOCAL); - expect(storage.getSelectedBucket()).toBe("the-buck"); + expect(storage.config.bucketName).toBe("the-buck"); }); it("[5] numeric values in options stay numeric and keep their radix (8)", async () => { @@ -70,7 +61,6 @@ describe(`testing local urls`, () => { directory: "tests/tmp", mode: 0o777, }); - await storage.init(); expect((storage.getConfiguration() as ConfigLocal).mode).toBe(0o777); }); @@ -80,14 +70,12 @@ describe(`testing local urls`, () => { directory: "tests/tmp", mode: 511, }); - await storage.init(); expect((storage.getConfiguration() as ConfigLocal).mode).toBe(511); }); it("[6] string values in options stay string values (will be converted when used in code when necessary)", async () => { const storage = new Storage("local://tests/tmp?mode=0o777"); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("tmp"); + expect(storage.config.bucketName).toBe("tmp"); expect((storage.getConfiguration() as ConfigLocal).mode).toBe("0o777"); const mode = (await fs.promises.stat(path.join(process.cwd(), "tests", "tmp"))).mode; expect(mode.toString(8)).toBe("40777"); @@ -96,8 +84,7 @@ describe(`testing local urls`, () => { it("[6a]", async () => { const storage = new Storage("local://tests/tmp?mode=777"); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("tmp"); + expect(storage.config.bucketName).toBe("tmp"); expect((storage.getConfiguration() as ConfigLocal).mode).toBe("777"); const mode = (await fs.promises.stat(path.join(process.cwd(), "tests", "tmp"))).mode; expect(mode.toString(8)).toBe("40777"); @@ -111,8 +98,7 @@ describe(`testing local urls`, () => { bucketName: "the-buck", mode: 0o777, }); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("the-buck"); + expect(storage.config.bucketName).toBe("the-buck"); expect((storage.getConfiguration() as ConfigLocal).mode).toBe(511); const mode = (await fs.promises.stat(path.join(process.cwd(), "tests", "tmp", "the-buck"))) .mode; @@ -127,8 +113,7 @@ describe(`testing local urls`, () => { bucketName: "the-buck", mode: 511, }); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("the-buck"); + expect(storage.config.bucketName).toBe("the-buck"); expect((storage.getConfiguration() as ConfigLocal).mode).toBe(511); const mode = (await fs.promises.stat(path.join(process.cwd(), "tests", "tmp", "the-buck"))) .mode; @@ -138,8 +123,7 @@ describe(`testing local urls`, () => { it("[6e]", async () => { const storage = new Storage("local://tests/tmp?mode=0o777"); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("tmp"); + expect(storage.config.bucketName).toBe("tmp"); expect((storage.getConfiguration() as ConfigLocal).mode).toBe("0o777"); const mode = (await fs.promises.stat(path.join(process.cwd(), "tests", "tmp"))).mode; expect(mode.toString(8)).toBe("40777"); @@ -154,8 +138,7 @@ describe(`testing local urls`, () => { bucketName: "tmp", mode: "755", // this is an error! the parseMode function will return the default value 0o777 }); - await storage.init(); - expect(storage.getSelectedBucket()).toBe("tmp"); + expect(storage.config.bucketName).toBe("tmp"); expect((storage.getConfiguration() as ConfigLocal).mode).toBe("755"); const mode = (await fs.promises.stat(path.join(process.cwd(), "tests", "tmp"))).mode; expect(mode.toString(8)).toBe("40777"); diff --git a/tests/testB2.ts b/tests/testB2.ts index a2166ff..aeb648a 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -7,16 +7,18 @@ import { Storage } from "../src/Storage"; import { Readable } from "stream"; import { copyFile } from "./util"; import path from "path"; +import { ConfigBackblazeB2 } from "@tweedegolf/storage-abstraction"; dotenv.config(); const applicationKeyId = process.env.B2_APPLICATION_KEY_ID; const applicationKey = process.env.B2_APPLICATION_KEY; -const configBackblaze = { +const configBackblaze: ConfigBackblazeB2 = { type: StorageType.B2, applicationKeyId, applicationKey, bucketName: process.env.BUCKET_NAME, + versioning: true, }; function streamToString(stream: Readable) { @@ -47,11 +49,6 @@ async function testB2() { // console.log(response); // console.timeEnd("removeFile"); - // console.time("clearBucket"); - // const response = await storage.clearBucket("the-buck"); - // console.log(response); - // console.timeEnd("clearBucket"); - // console.time("fileExists"); // const response = await storage.fileExists("the-buck", "input.txt"); // console.timeEnd("fileExists"); @@ -89,23 +86,28 @@ async function testB2() { }); console.timeEnd("addFileFromStream"); - console.time("listFiles"); - const data2 = await storage.listFiles("the-buck"); - console.log(data2); - console.timeEnd("listFiles"); - - console.time("getFileAsStream"); - const data = await storage.getFileAsStream("the-buck", "test/image2.jpg"); - const filePath = path.join(process.cwd(), "tests", `test-${storage.getType()}.jpg`); - const writeStream = fs.createWriteStream(filePath); - if (data.value !== null) { - const { value: readStream } = data; - await copyFile(readStream, writeStream); - } + console.time("clearBucket"); + const response = await storage.clearBucket("the-buck"); + console.log(response); + console.timeEnd("clearBucket"); + + // console.time("listFiles"); + // const data2 = await storage.listFiles("the-buck"); + // console.log(data2); + // console.timeEnd("listFiles"); + + // console.time("getFileAsStream"); + // const data = await storage.getFileAsStream("the-buck", "test/image2.jpg"); + // const filePath = path.join(process.cwd(), "tests", `test-${storage.getType()}.jpg`); + // const writeStream = fs.createWriteStream(filePath); + // if (data.value !== null) { + // const { value: readStream } = data; + // await copyFile(readStream, writeStream); + // } // fs.createWriteStream(filePath); // console.log(data5); - console.timeEnd("getFileAsStream"); + // console.timeEnd("getFileAsStream"); // console.time("deleteBucket"); // const r2 = await storage.deleteBucket("the-buck"); From c3efd3dca2e41f7ee726c4626ad3c5a959fae7dc Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 24 Nov 2023 09:59:51 +0100 Subject: [PATCH 17/26] wip Azure --- src/AbstractAdapter.ts | 8 +- src/AdapterAmazonS3.ts | 30 ++- src/AdapterAzureStorageBlob.ts | 412 ++++++++++++++++--------------- src/AdapterBackblazeB2.ts | 6 +- src/AdapterGoogleCloudStorage.ts | 10 +- src/AdapterLocal.ts | 12 +- 6 files changed, 239 insertions(+), 239 deletions(-) diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 333a27f..26dc291 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -14,22 +14,22 @@ import { export abstract class AbstractAdapter implements IStorage { protected _type = "abstract-adapter"; - protected conf: AdapterConfig; + protected _config: AdapterConfig; get type(): string { return this._type; } get config(): AdapterConfig { - return this.conf; + return this._config; } getType(): string { - return this._type; + return this.type; } getConfiguration(): AdapterConfig { - return this.conf; + return this.config; } async addFileFromPath(params: FilePathParams): Promise { diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index fdec933..feed396 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -8,13 +8,11 @@ import { DeleteBucketCommand, DeleteObjectCommand, DeleteObjectsCommand, - GetBucketLocationCommand, GetObjectCommand, HeadBucketCommand, HeadObjectCommand, ListBucketsCommand, ListObjectVersionsCommand, - // ListObjectVersionsCommand, ListObjectsCommand, PutObjectCommand, S3Client, @@ -39,34 +37,34 @@ import { parseUrl } from "./util"; export class AdapterAmazonS3 extends AbstractAdapter { protected _type = StorageType.S3; - protected conf: ConfigAmazonS3; + protected _config: ConfigAmazonS3; private configError: string | null = null; private storage: S3Client; private s3Compatible: S3Compatible = S3Compatible.Amazon; constructor(config: string | AdapterConfig) { super(); - this.conf = this.parseConfig(config as ConfigAmazonS3); + this._config = this.parseConfig(config as ConfigAmazonS3); // handle small differences in supported S3 compatible storages - if (typeof (this.conf as ConfigAmazonS3).region === "undefined") { + if (typeof (this._config as ConfigAmazonS3).region === "undefined") { if (this.s3Compatible === S3Compatible.R2) { - this.conf.region = "auto"; + this._config.region = "auto"; } else if (this.s3Compatible === S3Compatible.Backblaze) { - let ep = this.conf.endpoint; + let ep = this._config.endpoint; ep = ep.substring(ep.indexOf("s3.") + 3); - this.conf.region = ep.substring(0, ep.indexOf(".")); + this._config.region = ep.substring(0, ep.indexOf(".")); } } - if (typeof this.conf.endpoint === "undefined") { - this.storage = new S3Client({ region: this.conf.region }); + if (typeof this._config.endpoint === "undefined") { + this.storage = new S3Client({ region: this._config.region }); } else { this.storage = new S3Client({ - region: this.conf.region, - endpoint: this.conf.endpoint, + region: this._config.region, + endpoint: this._config.endpoint, credentials: { - accessKeyId: this.conf.accessKeyId, - secretAccessKey: this.conf.secretAccessKey, + accessKeyId: this._config.accessKeyId, + secretAccessKey: this._config.secretAccessKey, }, }); } @@ -192,9 +190,9 @@ export class AdapterAmazonS3 extends AbstractAdapter { ...options, }; // see issue: https://github.com/aws/aws-sdk-js/issues/3647 - if (typeof this.conf.region !== "undefined" && this.conf.region !== "us-east-1") { + if (typeof this._config.region !== "undefined" && this._config.region !== "us-east-1") { input.CreateBucketConfiguration = { - LocationConstraint: BucketLocationConstraint[this.conf.region.replace("-", "_")], + LocationConstraint: BucketLocationConstraint[this._config.region.replace("-", "_")], }; } diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index 96f6c72..df1567e 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -5,37 +5,40 @@ import { BlobGenerateSasUrlOptions, BlobSASPermissions, BlobServiceClient, - ContainerCreateOptions, StorageSharedKeyCredential, } from "@azure/storage-blob"; -import { ConfigAzureStorageBlob, AdapterConfig, StorageType } from "./types"; +import { + ConfigAzureStorageBlob, + StorageType, + ResultObjectStream, + ResultObject, + ResultObjectBuckets, + ResultObjectFiles, + ResultObjectNumber, + ResultObjectBoolean, + FileBufferParams, + FilePathParams, + FileStreamParams, +} from "./types"; import { parseUrl } from "./util"; import { CreateReadStreamOptions } from "@google-cloud/storage"; export class AdapterAzureStorageBlob extends AbstractAdapter { - protected type = StorageType.AZURE; - private storage: BlobServiceClient; - private bucketNames: string[] = []; + protected _type = StorageType.AZURE; private sharedKeyCredential: StorageSharedKeyCredential; + private configError: string | null = null; + private storage: BlobServiceClient; constructor(config: string | ConfigAzureStorageBlob) { super(); - this.conf = this.parseConfig(config as ConfigAzureStorageBlob); - // console.log(this.config); + this._config = this.parseConfig(config as ConfigAzureStorageBlob); - if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { - const msg = this.validateName(this.conf.bucketName); - if (msg !== null) { - throw new Error(msg); - } - this.bucketName = this.conf.bucketName; - } this.sharedKeyCredential = new StorageSharedKeyCredential( - (this.conf as ConfigAzureStorageBlob).storageAccount, - (this.conf as ConfigAzureStorageBlob).accessKey + (this._config as ConfigAzureStorageBlob).storageAccount, + (this._config as ConfigAzureStorageBlob).accessKey ); this.storage = new BlobServiceClient( - `https://${(this.conf as ConfigAzureStorageBlob).storageAccount}.blob.core.windows.net`, + `https://${(this._config as ConfigAzureStorageBlob).storageAccount}.blob.core.windows.net`, this.sharedKeyCredential ); } @@ -43,13 +46,13 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { private parseConfig(config: string | ConfigAzureStorageBlob): ConfigAzureStorageBlob { let cfg: ConfigAzureStorageBlob; if (typeof config === "string") { - const { - type, - part1: storageAccount, - part2: accessKey, - bucketName, - queryString, - } = parseUrl(config); + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; + return null; + } + + const { type, part1: storageAccount, part2: accessKey, bucketName, queryString } = value; cfg = { type, storageAccount, @@ -66,127 +69,108 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { } if (!cfg.storageAccount) { - throw new Error( - "You must specify a value for 'storageAccount' for storage type 'azurestorageblob'" - ); + this.configError = + "You must specify a value for 'storageAccount' for storage type 'azurestorageblob'"; + return null; } if (!cfg.accessKey) { - throw new Error( - "You must specify a value for 'accessKey' for storage type 'azurestorageblob'" - ); + this.configError = + "You must specify a value for 'accessKey' for storage type 'azurestorageblob'"; + return null; } return cfg; } - async init(): Promise { - if (this.initialized) { - return Promise.resolve(true); - } - if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { - const msg = this.validateName(this.conf.bucketName); - if (msg !== null) { - throw new Error(msg); - } - await this.createBucket(this.conf.bucketName).then(() => { - this.bucketName = this.conf.bucketName; - this.bucketNames.push(this.bucketName); - }); - } - // no further initialization required - this.initialized = true; - return Promise.resolve(true); - } - async getFileAsStream( + bucketName: string, fileName: string, options: CreateReadStreamOptions = { start: 0 } - ): Promise { - const file = this.storage.getContainerClient(this.bucketName).getBlobClient(fileName); - const exists = await file.exists(); - if (!exists) { - throw new Error(`File ${fileName} could not be retrieved from bucket ${this.bucketName}`); + ): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - if (options.end !== undefined) { - options.end = options.end + 1; - } - return (await file.download(options.start, options.end)).readableStreamBody as Readable; - } - async getFileAsURL(fileName: string): Promise { - const file = this.storage.getContainerClient(this.bucketName).getBlobClient(fileName); - - const exists = await file.exists(); + try { + const file = this.storage.getContainerClient(bucketName).getBlobClient(fileName); + const exists = await file.exists(); + if (!exists) { + return { + value: null, + error: `File ${fileName} could not be retrieved from bucket ${bucketName}`, + }; + } + if (options.end !== undefined) { + options.end = options.end + 1; + } - if (!exists) { - throw new Error(`File ${fileName} could not be retrieved from bucket ${this.bucketName}`); + try { + const stream = await file.download(options.start, options.end); + return { value: stream.readableStreamBody as Readable, error: null }; + } catch (e) { + return { value: null, error: JSON.stringify(e) }; + } + } catch (e) { + return { value: null, error: JSON.stringify(e) }; } - - const options: BlobGenerateSasUrlOptions = { - permissions: BlobSASPermissions.parse("r"), - expiresOn: new Date(new Date().valueOf() + 86400), - }; - - return file.generateSasUrl(options); } - async selectBucket(name: string | null): Promise { - if (name === null) { - this.bucketName = ""; - return `bucket '${name}' deselected`; + async getFileAsURL(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - return await this.createBucket(name) - .then(() => { - this.bucketName = name; - return `bucket '${name}' selected`; - }) - .catch((e) => { - throw e; - }); - } - - async createBucket(name: string, options?: object): Promise { - const msg = this.validateName(name); - if (msg !== null) { - return Promise.reject(msg); - } - if (this.bucketNames.findIndex((b) => b === name) !== -1) { - return "bucket already exists"; - } try { - const cont = this.storage.getContainerClient(name); - const exists = await cont.exists(); - if (exists) { - return "container already exists"; + const file = this.storage.getContainerClient(bucketName).getBlobClient(fileName); + const exists = await file.exists(); + + if (!exists) { + return { + value: null, + error: `File ${fileName} could not be retrieved from bucket ${bucketName}`, + }; + } + + try { + const options: BlobGenerateSasUrlOptions = { + permissions: BlobSASPermissions.parse("r"), + expiresOn: new Date(new Date().valueOf() + 86400), + }; + const url = await file.generateSasUrl(options); + return { value: url, error: null }; + } catch (e) { + return { value: null, error: JSON.stringify(e) }; } } catch (e) { - // console.log(e); - return `error creating container ${e.message}`; + return { value: null, error: JSON.stringify(e) }; + } + } + + async createBucket(name: string, options?: object): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } + try { - const res = await this.storage.createContainer(name); - this.bucketNames.push(res.containerClient.containerName); - return "container created"; + const res = await this.storage.createContainer(name, options); + return { value: "ok", error: null }; } catch (e) { - // console.log("error creating container: ", e); - return `error creating container ${e.message}`; + return { value: null, error: JSON.stringify(e) }; } } - async clearBucket(name?: string): Promise { - const n = name || this.bucketName; - if (!n) { - return Promise.reject("no bucket selected"); + async clearBucket(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } try { - // const containerClient = this.storage.getContainerClient(n); + // const containerClient = this.storage.getContainerClient(name); // const blobs = containerClient.listBlobsFlat(); // for await (const blob of blobs) { // console.log(blob.name); // await containerClient.deleteBlob(blob.name); // } - const containerClient = this.storage.getContainerClient(n); + const containerClient = this.storage.getContainerClient(name); const blobs = containerClient.listBlobsByHierarchy("/"); for await (const blob of blobs) { if (blob.kind === "prefix") { @@ -195,138 +179,156 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { await containerClient.deleteBlob(blob.name); } } - return "bucket cleared"; + return { value: "ok", error: null }; } catch (e) { - return Promise.reject(e); + return { value: null, error: JSON.stringify(e) }; } } - async deleteBucket(name?: string): Promise { - const n = name || this.bucketName; - if (!n) { - return Promise.reject("no bucket selected"); - } - + async deleteBucket(name: string): Promise { try { - await this.clearBucket(n); - const del = await this.storage.deleteContainer(n); + await this.clearBucket(name); + const del = await this.storage.deleteContainer(name); //console.log('deleting container: ', del); - if (n === this.bucketName) { - this.bucketName = ""; - } - this.bucketNames = this.bucketNames.filter((b) => b !== n); - return "bucket deleted"; + return { value: "ok", error: null }; } catch (e) { - return Promise.reject(e); + return { value: null, error: JSON.stringify(e) }; } } - async listBuckets(): Promise { - this.bucketNames = []; + async listBuckets(): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } // let i = 0; - for await (const container of this.storage.listContainers()) { - // console.log(`${i++} ${container.name}`); - this.bucketNames.push(container.name); + try { + const bucketNames = []; + for await (const container of this.storage.listContainers()) { + // console.log(`${i++} ${container.name}`); + bucketNames.push(container.name); + } + return { value: bucketNames, error: null }; + } catch (e) { + return { value: null, error: JSON.stringify(e) }; } - return this.bucketNames; } - async listFiles(): Promise<[string, number][]> { - if (!this.bucketName) { - return Promise.reject("no bucket selected"); + async listFiles(bucketName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - const files: [string, number][] = []; - const data = this.storage.getContainerClient(this.bucketName).listBlobsFlat(); - for await (const blob of data) { - if (blob.properties["ResourceType"] !== "directory") { - files.push([blob.name, blob.properties.contentLength]); + + try { + const files: [string, number][] = []; + const data = this.storage.getContainerClient(bucketName).listBlobsFlat(); + for await (const blob of data) { + if (blob.properties["ResourceType"] !== "directory") { + files.push([blob.name, blob.properties.contentLength]); + } } + return { value: files, error: null }; + } catch (e) { + return { value: null, error: JSON.stringify(e) }; } - - return files; } - removeFile(fileName: string): Promise { + async removeFile(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + try { - const container = this.storage.getContainerClient(this.bucketName); - const file = container.getBlobClient(fileName).deleteIfExists(); - /*if(file.()) { - file.delete(); - return Promise.resolve("file deleted"); - } else { - return Promise.resolve("file does not exist"); - }*/ - return Promise.resolve("file deleted"); + const container = this.storage.getContainerClient(bucketName); + const file = await container.getBlobClient(fileName).deleteIfExists(); + return { value: "ok", error: null }; } catch (e) { - console.log("error deleting file: ", e); - - return Promise.resolve(e); + return { value: null, error: JSON.stringify(e) }; } } - async sizeOf(name: string): Promise { - if (!this.bucketName) { - return Promise.reject("no bucket selected"); + async sizeOf(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } try { - const blob = this.storage.getContainerClient(this.bucketName).getBlobClient(name); - return Promise.resolve((await blob.getProperties()).contentLength); + const blob = this.storage.getContainerClient(bucketName).getBlobClient(fileName); + const length = (await blob.getProperties()).contentLength; + return { value: length, error: null }; } catch (e) { - return Promise.reject(e); + return { value: null, error: JSON.stringify(e) }; } } - async fileExists(name: string): Promise { - if (!this.bucketName) { - return Promise.reject("no bucket selected"); + async bucketExists(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const cont = this.storage.getContainerClient(name); + const exists = await cont.exists(); + if (exists) { + return { value: null, error: "container already exists" }; + } + } catch (e) { + return { value: null, error: JSON.stringify(e) }; } - const data = await this.storage - .getContainerClient(this.bucketName) - .getBlobClient(name) - .exists(); - return data; } - protected async store(buffer: Buffer, targetPath: string, options: object): Promise; - protected async store(stream: Readable, targetPath: string, options: object): Promise; - protected async store(origPath: string, targetPath: string, options: object): Promise; - protected async store( - arg: string | Buffer | Readable, - targetPath: string, - options: object - ): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async fileExists(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - await this.createBucket(this.bucketName); - - let readStream: Readable; - if (typeof arg === "string") { - await fs.promises.stat(arg); // throws error if path doesn't exist - readStream = fs.createReadStream(arg); - } else if (arg instanceof Buffer) { - readStream = new Readable(); - readStream._read = (): void => {}; // _read is required but you can noop it - readStream.push(arg); - readStream.push(null); - } else if (arg instanceof Readable) { - readStream = arg; + + try { + const data = await this.storage + .getContainerClient(bucketName) + .getBlobClient(fileName) + .exists(); + return { value: data, error: null }; + } catch (e) { + return { value: null, error: JSON.stringify(e) }; } - const file = this.storage - .getContainerClient(this.bucketName) - .getBlobClient(targetPath) - .getBlockBlobClient(); - const writeStream = await file.uploadStream(readStream, 64000, 20, { - onProgress: (ev) => null, - }); - - return new Promise((resolve, reject) => { + } + + public async addFile( + params: FilePathParams | FileBufferParams | FileStreamParams + ): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + let readStream: Readable; + if (typeof (params as FilePathParams).origPath === "string") { + const f = (params as FilePathParams).origPath; + if (!fs.existsSync(f)) { + return { value: null, error: `File with given path: ${f}, was not found` }; + } + readStream = fs.createReadStream(f); + } else if (typeof (params as FileBufferParams).buffer !== "undefined") { + readStream = new Readable(); + readStream._read = (): void => {}; // _read is required but you can noop it + readStream.push((params as FileBufferParams).buffer); + readStream.push(null); + } else if (typeof (params as FileStreamParams).stream !== "undefined") { + readStream = (params as FileStreamParams).stream; + } + const file = this.storage + .getContainerClient(params.bucketName) + .getBlobClient(params.targetPath) + .getBlockBlobClient(); + const writeStream = await file.uploadStream(readStream, 64000, 20, { + onProgress: (ev) => null, + }); if (writeStream.errorCode) { - reject(writeStream.errorCode); + return { value: null, error: writeStream.errorCode }; } else { - resolve("file stored"); + return this.getFileAsURL(params.bucketName, params.targetPath); } - }); + } catch (e) { + return { value: null, error: JSON.stringify(e) }; + } } } diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 75055fb..257728c 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -34,10 +34,10 @@ export class AdapterBackblazeB2 extends AbstractAdapter { constructor(config: string | ConfigBackblazeB2) { super(); - this.conf = this.parseConfig(config); - if (this.conf !== null) { + this._config = this.parseConfig(config); + if (this._config !== null) { try { - this.storage = new B2(this.conf); + this.storage = new B2(this._config); } catch (e) { this.configError = e.message; } diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index 5013369..6ad07b3 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -18,15 +18,15 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { constructor(config: string | ConfigGoogleCloud) { super(); - this.conf = this.parseConfig(config); - if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { - const msg = this.validateName(this.conf.bucketName); + this._config = this.parseConfig(config); + if (typeof this._config.bucketName !== "undefined" && this._config.bucketName !== "") { + const msg = this.validateName(this._config.bucketName); if (msg !== null) { throw new Error(msg); } - this.bucketName = this.conf.bucketName; + this.bucketName = this._config.bucketName; } - this.storage = new GoogleCloudStorage(this.conf as ConfigGoogleCloud); + this.storage = new GoogleCloudStorage(this._config as ConfigGoogleCloud); } /** diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 1c49657..85f3e43 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -12,14 +12,14 @@ export class AdapterLocal extends AbstractAdapter { constructor(config: ConfigLocal) { super(); - this.conf = this.parseConfig(config); + this._config = this.parseConfig(config); // console.log(config); // console.log(this.config); - const mode = (this.conf as ConfigLocal).mode; + const mode = (this._config as ConfigLocal).mode; if (typeof mode === "undefined") { - (this.conf as ConfigLocal).mode = 0o777; + (this._config as ConfigLocal).mode = 0o777; } - const directory = (this.conf as ConfigLocal).directory; + const directory = (this._config as ConfigLocal).directory; } private parseConfig(config: string | ConfigLocal): ConfigLocal { @@ -86,8 +86,8 @@ export class AdapterLocal extends AbstractAdapter { return Promise.resolve(true); } - if (typeof this.conf.bucketName !== "undefined" && this.conf.bucketName !== "") { - const { error } = await this.validateName(this.conf.bucketName); + if (typeof this._config.bucketName !== "undefined" && this._config.bucketName !== "") { + const { error } = await this.validateName(this._config.bucketName); if (error !== null) { Promise.resolve({ error, value: null }); return; From 35b1ffea127e834cf1660516a833cfdf20a7b1a2 Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 24 Nov 2023 16:35:13 +0100 Subject: [PATCH 18/26] wip adapter GCS --- package.json | 1 - src/AdapterAzureStorageBlob.ts | 9 +- src/AdapterGoogleCloudStorage.ts | 386 +++++++++++++++---------------- 3 files changed, 191 insertions(+), 205 deletions(-) diff --git a/package.json b/package.json index a5f0f2b..68fe734 100644 --- a/package.json +++ b/package.json @@ -12,7 +12,6 @@ "@azure/storage-blob": "^12.17.0", "@gideo-llc/backblaze-b2-upload-any": "^0.1.4", "@google-cloud/storage": "^7.6.0", - "@ramda/zip": "^0.26.1", "backblaze-b2": "^1.7.0", "glob": "^10.3.10", "rimraf": "^5.0.5", diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index df1567e..57c08d9 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -299,6 +299,11 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { return { value: null, error: this.configError }; } + let { options } = params; + if (typeof options !== "object") { + options = {}; + } + try { let readStream: Readable; if (typeof (params as FilePathParams).origPath === "string") { @@ -319,9 +324,7 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { .getContainerClient(params.bucketName) .getBlobClient(params.targetPath) .getBlockBlobClient(); - const writeStream = await file.uploadStream(readStream, 64000, 20, { - onProgress: (ev) => null, - }); + const writeStream = await file.uploadStream(readStream, 64000, 20, options); if (writeStream.errorCode) { return { value: null, error: writeStream.errorCode }; } else { diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index 6ad07b3..dd25eab 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -1,6 +1,4 @@ import fs from "fs"; -import path from "path"; -import zip from "@ramda/zip"; import { Readable } from "stream"; import { Storage as GoogleCloudStorage, @@ -8,24 +6,29 @@ import { CreateReadStreamOptions, } from "@google-cloud/storage"; import { AbstractAdapter } from "./AbstractAdapter"; -import { StorageType, ConfigGoogleCloud } from "./types"; +import { + StorageType, + ConfigGoogleCloud, + ResultObject, + ResultObjectStream, + FileBufferParams, + FilePathParams, + FileStreamParams, + ResultObjectBuckets, + ResultObjectFiles, + ResultObjectNumber, + ResultObjectBoolean, +} from "./types"; import { parseUrl } from "./util"; export class AdapterGoogleCloudStorage extends AbstractAdapter { - protected type = StorageType.GCS; - private bucketNames: string[] = []; + protected _type = StorageType.GCS; + private configError: string | null = null; private storage: GoogleCloudStorage; constructor(config: string | ConfigGoogleCloud) { super(); this._config = this.parseConfig(config); - if (typeof this._config.bucketName !== "undefined" && this._config.bucketName !== "") { - const msg = this.validateName(this._config.bucketName); - if (msg !== null) { - throw new Error(msg); - } - this.bucketName = this._config.bucketName; - } this.storage = new GoogleCloudStorage(this._config as ConfigGoogleCloud); } @@ -44,13 +47,13 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { private parseConfig(config: string | ConfigGoogleCloud): ConfigGoogleCloud { let cfg: ConfigGoogleCloud; if (typeof config === "string") { - const { - type, - part1: keyFilename, - part2: projectId, - bucketName, - queryString, - } = parseUrl(config); + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; + return null; + } + + const { type, part1: keyFilename, part2: projectId, bucketName, queryString } = value; cfg = { type, keyFilename, @@ -66,9 +69,6 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { return cfg; } - // if (!cfg.keyFilename) { - // throw new Error("You must specify a value for 'keyFilename' for storage type 'gcs'"); - // } if (cfg.projectId === "" && cfg.keyFilename !== "") { cfg.projectId = this.getGCSProjectId(cfg.keyFilename); } @@ -76,125 +76,112 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { return cfg; } - async init(): Promise { - if (this.initialized) { - return Promise.resolve(true); + async getFileAsURL(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - if (this.bucketName) { - await this.createBucket(this.bucketName); - this.bucketNames.push(this.bucketName); - } - // no further initialization required - this.initialized = true; - return Promise.resolve(true); - } - // After uploading a file to Google Storage it may take a while before the file - // can be discovered and downloaded; this function adds a little delay - async getFile(fileName: string, retries: number = 5): Promise { - const file = this.storage.bucket(this.bucketName).file(fileName); - const [exists] = await file.exists(); - if (!exists && retries !== 0) { - const r = retries - 1; - await new Promise((res) => { - setTimeout(res, 250); - }); - // console.log('RETRY', r, fileName); - return this.getFile(fileName, r); - } - if (!exists) { - throw new Error(`File ${fileName} could not be retrieved from bucket ${this.bucketName}`); + try { + const file = this.storage.bucket(bucketName).file(fileName); + return { value: file.publicUrl(), error: null }; + } catch (e) { + return { value: null, error: e.message }; } - return file; } async getFileAsStream( + bucketName: string, fileName: string, options: CreateReadStreamOptions = { start: 0 } - ): Promise { - const file = this.storage.bucket(this.bucketName).file(fileName); - const [exists] = await file.exists(); - if (exists) { - return file.createReadStream(options); + ): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - throw new Error(`File ${fileName} could not be retrieved from bucket ${this.bucketName}`); - } - // not in use - async downloadFile(fileName: string, downloadPath: string): Promise { - const file = this.storage.bucket(this.bucketName).file(fileName); - const localFilename = path.join(downloadPath, fileName); - await file.download({ destination: localFilename }); + try { + const file = this.storage.bucket(bucketName).file(fileName); + const [exists] = await file.exists(); + if (exists) { + return { value: file.createReadStream(options), error: null }; + } + } catch (e) { + return { + value: null, + error: `File ${fileName} could not be retrieved from bucket ${bucketName}`, + }; + } } - async removeFile(fileName: string): Promise { + async removeFile(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + try { - await this.storage.bucket(this.bucketName).file(fileName).delete(); - return "file deleted"; + await this.storage.bucket(bucketName).file(fileName).delete(); + return { value: "ok", error: null }; } catch (e) { - if (e.message.indexOf("No such object") !== -1) { - return "file deleted"; - } - // console.log(e.message); - throw e; + return { value: null, error: e.message }; } } - // util members - - protected async store(buffer: Buffer, targetPath: string, options: object): Promise; - protected async store(stream: Readable, targetPath: string, options: object): Promise; - protected async store(origPath: string, targetPath: string, options: object): Promise; - protected async store( - arg: string | Buffer | Readable, - targetPath: string, - options: object - ): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + public async addFile( + params: FilePathParams | FileBufferParams | FileStreamParams + ): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - await this.createBucket(this.bucketName); - - let readStream: Readable; - if (typeof arg === "string") { - await fs.promises.stat(arg); // throws error if path doesn't exist - readStream = fs.createReadStream(arg); - } else if (arg instanceof Buffer) { - readStream = new Readable(); - readStream._read = (): void => {}; // _read is required but you can noop it - readStream.push(arg); - readStream.push(null); - } else if (arg instanceof Readable) { - readStream = arg; + + let { options } = params; + if (typeof options !== "object") { + options = {}; } - const file = this.storage.bucket(this.bucketName).file(targetPath, options); - const writeStream = file.createWriteStream(); - return new Promise((resolve, reject) => { + + try { + let readStream: Readable; + if (typeof (params as FilePathParams).origPath === "string") { + const f = (params as FilePathParams).origPath; + if (!fs.existsSync(f)) { + return { value: null, error: `File with given path: ${f}, was not found` }; + } + readStream = fs.createReadStream(f); + } else if (typeof (params as FileBufferParams).buffer !== "undefined") { + readStream = new Readable(); + readStream._read = (): void => {}; // _read is required but you can noop it + readStream.push((params as FileBufferParams).buffer); + readStream.push(null); + } else if (typeof (params as FileStreamParams).stream !== "undefined") { + readStream = (params as FileStreamParams).stream; + } + + const file = this.storage.bucket(params.bucketName).file(params.targetPath, options); + const writeStream = file.createWriteStream(options); readStream .pipe(writeStream) - .on("error", reject) + .on("error", (e) => { + return { value: null, error: e.message }; + }) .on("finish", () => { - resolve(file.publicUrl()); + return { value: file.publicUrl(), error: null }; }); - writeStream.on("error", reject); - }); - } - - async createBucket(name: string, options: object = {}): Promise { - const msg = this.validateName(name); - if (msg !== null) { - return Promise.reject(msg); + writeStream.on("error", (e) => { + return { value: null, error: e.message }; + }); + } catch (e) { + return { value: null, error: e.message }; } + } - if (this.bucketNames.findIndex((b) => b === name) !== -1) { - return "bucket exists"; + async createBucket(name: string, options: object = {}): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } try { const bucket = this.storage.bucket(name, options); const [exists] = await bucket.exists(); if (exists) { - return "bucket exists"; + return { value: null, error: "bucket exists" }; } } catch (e) { // console.log(e.message); @@ -203,122 +190,119 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { try { await this.storage.createBucket(name, options); - this.bucketNames.push(name); - return "bucket created"; + return { value: "ok", error: null }; } catch (e) { - // console.log("ERROR", e.message, e.code); - if ( - e.code === 409 && - e.message === "You already own this bucket. Please select another name." - ) { - // error code 409 can have messages like: - // "You already own this bucket. Please select another name." (bucket exists!) - // "Sorry, that name is not available. Please try a different one." (notably bucket name "new-bucket") - // So in some cases we can safely ignore this error, in some case we can't - return; - } - throw new Error(e.message); + return { value: null, error: e.message }; } - - // ossia: - // await this.storage - // .createBucket(n) - // .then(() => { - // this.bucketNames.push(n); - // return "bucket created"; - // }) - // .catch(e => { - // if (e.code === 409) { - // // error code 409 is 'You already own this bucket. Please select another name.' - // // so we can safely return true if this error occurs - // return; - // } - // throw new Error(e.message); - // }); } - async selectBucket(name: string | null): Promise { - if (name === null) { - this.bucketName = ""; - return `bucket '${name}' deselected`; + async clearBucket(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - // const [error] = await to(this.createBucket(name)); - // if (error !== null) { - // throw error; - // } - return await this.createBucket(name) - .then(() => { - this.bucketName = name; - return `bucket '${name}' selected`; - }) - .catch((e) => { - throw e; - }); + try { + await this.storage.bucket(name).deleteFiles({ force: true }); + return { value: "ok", error: null }; + } catch (e) { + return { value: null, error: e.message }; + } } - async clearBucket(name?: string): Promise { - let n = name; - if (typeof n === "undefined" || n === null || n === "") { - n = this.bucketName; + async deleteBucket(name: string): Promise { + try { + await this.clearBucket(name); + } catch (e) { + return { value: null, error: e.message }; + } + try { + await this.storage.bucket(name).delete(); + return { value: "ok", error: null }; + } catch (e) { + return { value: null, error: e.message }; } - await this.storage.bucket(n).deleteFiles({ force: true }); - return "bucket cleared"; } - async deleteBucket(name?: string): Promise { - const n = name || this.bucketName; - await this.clearBucket(n); - const data = await this.storage.bucket(n).delete(); - // console.log(data); - if (n === this.bucketName) { - this.bucketName = ""; + async listBuckets(): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const [buckets] = await this.storage.getBuckets(); + return { value: buckets.map((b) => b.name), error: null }; + } catch (e) { + return { value: null, error: e.message }; } - this.bucketNames = this.bucketNames.filter((b) => b !== n); - // console.log(this.bucketName, this.bucketNames); - return "bucket deleted"; } - async listBuckets(): Promise { - const [buckets] = await this.storage.getBuckets(); - this.bucketNames = buckets.map((b) => b.metadata.id); - return this.bucketNames; + private async getFileSize(bucketName: string, fileNames: string[]): Promise { + const result: Array<[string, number]> = []; + for (let i = 0; i < fileNames.length; i += 1) { + const file = this.storage.bucket(bucketName).file(fileNames[i]); + try { + const [metadata] = await file.getMetadata(); + result.push([file.name, parseInt(metadata.size as string, 10)]); + } catch (e) { + return { value: null, error: e.message }; + } + } + return { value: result, error: null }; } - private async getMetaData(files: string[]): Promise { - const sizes: number[] = []; - for (let i = 0; i < files.length; i += 1) { - const file = this.storage.bucket(this.bucketName).file(files[i]); - const [metadata] = await file.getMetadata(); - // console.log(metadata); - sizes.push(parseInt(metadata.size as string, 10)); + async listFiles(bucketName: string, numFiles: number = 1000): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const data = await this.storage.bucket(bucketName).getFiles(); + const names = data[0].map((f) => f.name); + return this.getFileSize(bucketName, names); + } catch (e) { + return { value: null, error: e.message }; } - return sizes; } - async listFiles(numFiles: number = 1000): Promise<[string, number][]> { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async sizeOf(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const file = this.storage.bucket(bucketName).file(fileName); + const [metadata] = await file.getMetadata(); + return { value: parseInt(metadata.size as string, 10), error: null }; + } catch (e) { + return { value: null, error: e.message }; } - const data = await this.storage.bucket(this.bucketName).getFiles(); - const names = data[0].map((f) => f.name); - const sizes = await this.getMetaData(names); - return zip(names, sizes) as [string, number][]; } - async sizeOf(name: string): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async bucketExists(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const data = await this.storage.bucket(name).exists(); + // console.log(data); + return { value: data[0], error: null }; + } catch (e) { + return { value: null, error: e.message }; } - const file = this.storage.bucket(this.bucketName).file(name); - const [metadata] = await file.getMetadata(); - return parseInt(metadata.size as string, 10); } - async fileExists(name: string): Promise { - const data = await this.storage.bucket(this.bucketName).file(name).exists(); + async fileExists(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } - // console.log(data); - return data[0]; + try { + const data = await this.storage.bucket(bucketName).file(fileName).exists(); + // console.log(data); + return { value: data[0], error: null }; + } catch (e) { + return { value: null, error: e.message }; + } } } From 004ff29929c377ac5dd4038763bc26002de1ed24 Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 24 Nov 2023 19:14:23 +0100 Subject: [PATCH 19/26] wip adapter local --- README.md | 14 +- package.json | 1 + src/AdapterAzureStorageBlob.ts | 1 + src/AdapterBackblazeB2.ts | 1 + src/AdapterGoogleCloudStorage.ts | 1 + src/AdapterLocal.ts | 339 +++++++++++++++---------------- src/types.ts | 6 +- src/util.ts | 28 ++- 8 files changed, 188 insertions(+), 203 deletions(-) diff --git a/README.md b/README.md index e5c01fd..fdfa3d3 100644 --- a/README.md +++ b/README.md @@ -300,16 +300,14 @@ Config object: ```typescript type ConfigAmazonS3 = { type: StorageType; - accessKeyId: string; - secretAccessKey: string; - region: string; bucketName?: string; + accessKeyId?: string; + secretAccessKey?: string; + region?: string; endpoint?: string; - useDualstack?: boolean; - maxRetries?: number; - maxRedirects?: number; - sslEnabled?: boolean; - [id: string]: boolean | string | number; // configuration is extensible + options: { + [id: string]: boolean | number | string; + }; }; ``` diff --git a/package.json b/package.json index 68fe734..e49e7d2 100644 --- a/package.json +++ b/package.json @@ -49,6 +49,7 @@ "test": "ts-node ./tests/test.ts", "test-mode": "ts-node ./tests/test-mode.ts", "testB2": "ts-node ./tests/testB2.ts", + "testLocal": "ts-node ./tests/testLocal.ts", "ts": "ts-node", "tsc": "node_modules/.bin/tsc", "copy-readme": "cp ./README.md ./publish", diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index 57c08d9..8136e0d 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -25,6 +25,7 @@ import { CreateReadStreamOptions } from "@google-cloud/storage"; export class AdapterAzureStorageBlob extends AbstractAdapter { protected _type = StorageType.AZURE; + protected _config: ConfigAzureStorageBlob; private sharedKeyCredential: StorageSharedKeyCredential; private configError: string | null = null; private storage: BlobServiceClient; diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 257728c..0ff0c8b 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -28,6 +28,7 @@ require("@gideo-llc/backblaze-b2-upload-any").install(B2); export class AdapterBackblazeB2 extends AbstractAdapter { protected _type = StorageType.B2; + protected _config: ConfigBackblazeB2; private storage: B2; private authorized: boolean = false; private configError: string | null = null; diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index dd25eab..f4d988c 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -23,6 +23,7 @@ import { parseUrl } from "./util"; export class AdapterGoogleCloudStorage extends AbstractAdapter { protected _type = StorageType.GCS; + protected _config: ConfigGoogleCloud; private configError: string | null = null; private storage: GoogleCloudStorage; diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 85f3e43..a7d2902 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -3,76 +3,67 @@ import path from "path"; import { glob } from "glob"; import { rimraf } from "rimraf"; import { Readable } from "stream"; -import { StorageType, ConfigLocal } from "./types"; +import { + StorageType, + ConfigLocal, + ResultObjectBoolean, + FileBufferParams, + FilePathParams, + FileStreamParams, + ResultObject, + ResultObjectBuckets, + ResultObjectFiles, +} from "./types"; import { AbstractAdapter } from "./AbstractAdapter"; -import { parseQuerystring, parseMode } from "./util"; +import { parseQuerystring, parseMode, parseUrl, validateName } from "./util"; export class AdapterLocal extends AbstractAdapter { - protected type = StorageType.LOCAL; + protected _type = StorageType.LOCAL; + protected _config: ConfigLocal; + private mode: number = 0o777; + private configError: string | null = null; constructor(config: ConfigLocal) { super(); this._config = this.parseConfig(config); - // console.log(config); - // console.log(this.config); - const mode = (this._config as ConfigLocal).mode; - if (typeof mode === "undefined") { - (this._config as ConfigLocal).mode = 0o777; - } - const directory = (this._config as ConfigLocal).directory; + const directory = this._config.directory; } - private parseConfig(config: string | ConfigLocal): ConfigLocal { + private parseConfig(config: string | ConfigLocal): ConfigLocal | null { let cfg: ConfigLocal; if (typeof config === "string") { - const qm = config.indexOf("?"); - const sep = config.indexOf("://"); - const type = config.substring(0, sep); - // const { mode } = parseQuerystring(config); - const querystring = parseQuerystring(config); - const end = qm !== -1 ? qm : config.length; - const lastSlash = config.lastIndexOf("/"); - // console.log(end, lastSlash); - let directory = config.substring(sep + 3, end); - let bucketName: string; - if (lastSlash !== -1) { - if (lastSlash > sep + 3) { - directory = config.substring(sep + 3, lastSlash); + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; + return null; + } + + const { type, part1: directory, bucketName, queryString: options } = value; + + if (typeof options.mode !== "undefined") { + const { value, error } = parseMode(options.mode); + if (error !== null) { + this.configError = error; + return null; + } else { + this.mode = value; } - bucketName = config.substring(lastSlash + 1, end); } - // console.log("DIR", sep, directory, end, lastSlash, qm); - // console.log("DIR", config, directory, bucketName, lastSlash); + cfg = { type, directory, bucketName, - ...querystring, - // mode: mode as string, + mode: this.mode, + ...options, }; } else { cfg = { ...config }; if (!cfg.directory) { - throw new Error("You must specify a value for 'directory' for storage type 'local'"); + this.configError = "You must specify a value for 'directory' for storage type 'local'"; + return null; } - - // retrieve bucketName from directory - // if (!cfg.bucketName) { - // const lastSlash = cfg.directory.lastIndexOf("/"); - // if (lastSlash === -1) { - // cfg.bucketName = cfg.directory; - // cfg.directory = ""; - // } else { - // const dir = cfg.directory; - // cfg.directory = dir.substring(0, lastSlash); - // cfg.bucketName = dir.substring(lastSlash + 1); - // } - // } - - // if (cfg.directory === "") { - // cfg.directory = process.cwd(); - // } } if (cfg.skipCheck === true) { return cfg; @@ -81,168 +72,162 @@ export class AdapterLocal extends AbstractAdapter { return cfg; } - async init(): Promise { - if (this.initialized) { - return Promise.resolve(true); - } - - if (typeof this._config.bucketName !== "undefined" && this._config.bucketName !== "") { - const { error } = await this.validateName(this._config.bucketName); - if (error !== null) { - Promise.resolve({ error, value: null }); - return; - } - } - - if (typeof this.bucketName !== "undefined") { - await this.createDirectory(path.join(this.directory, this.bucketName)); - } - this.initialized = true; - return Promise.resolve(true); - } - /** * @param path * creates a directory if it doesn't exist */ - private async createDirectory(path: string): Promise { + private async createDirectory(path: string): Promise { try { - await fs.promises.access(path); - return true; + await fs.promises.access(path, this.mode); + return { value: true, error: null }; // directory exists already } catch (e) { - await fs.promises - .mkdir(path, { + try { + await fs.promises.mkdir(path, { recursive: true, - mode: parseMode(this.mode), - }) - .catch((e) => { - throw e; - // console.error(`\x1b[31m${e.message}`); - // return false; + mode: this._config.mode, }); - // const m = (await fs.promises.stat(path)).mode; - // console.log(m, this.options.mode); - return true; + // const m = (await fs.promises.stat(path)).mode; + // console.log(m, this.options.mode); + return { value: true, error: null }; + } catch (e) { + return { value: null, error: e.message }; + } } } - protected async store(buffer: Buffer, targetPath: string): Promise; - protected async store(stream: Readable, targetPath: string): Promise; - protected async store(filePath: string, targetPath: string): Promise; - protected async store(arg: string | Buffer | Readable, targetPath: string): Promise { - const dest = path.join(this.directory, this.bucketName, targetPath); - await this.createDirectory(path.dirname(dest)); - if (typeof arg === "string") { - await fs.promises.copyFile(arg, dest); - return dest; + private async globFiles(folder: string): Promise { + return glob(`${folder}/**/*.*`, {}) + .then((files) => { + return Promise.resolve(files); + }) + .catch((err) => { + return Promise.reject(err); + }); + } + + // Public API + + public async addFile( + params: FilePathParams | FileBufferParams | FileStreamParams + ): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + let { options } = params; + if (typeof options !== "object") { + options = {}; } - const writeStream = fs.createWriteStream(dest); - let readStream: Readable = null; - if (arg instanceof Buffer) { - readStream = new Readable(); - readStream._read = (): void => {}; // _read is required but you can noop it - readStream.push(arg); - readStream.push(null); // close stream - } else if (arg instanceof Readable) { - readStream = arg; + + const dest = path.join(this._config.directory, params.bucketName, params.targetPath); + + const { error } = await this.createDirectory(path.dirname(dest)); + if (error !== null) { + return { value: null, error }; + } + + try { + let readStream: Readable; + if (typeof (params as FilePathParams).origPath === "string") { + await fs.promises.copyFile((params as FilePathParams).origPath, dest); + } else if (typeof (params as FileBufferParams).buffer !== "undefined") { + readStream = new Readable(); + readStream._read = (): void => {}; // _read is required but you can noop it + readStream.push((params as FileBufferParams).buffer); + readStream.push(null); + } else if (typeof (params as FileStreamParams).stream !== "undefined") { + readStream = (params as FileStreamParams).stream; + } + + const writeStream = fs.createWriteStream(dest); + return new Promise((resolve, reject) => { + readStream + .pipe(writeStream) + .on("error", (e) => { + return { value: null, error: e.message }; + }) + .on("finish", () => { + return { value: dest, error: null }; + }); + writeStream.on("error", reject); + }); + } catch (e) { + return { value: null, error: e.message }; } - return new Promise((resolve, reject) => { - readStream - .pipe(writeStream) - .on("error", reject) - .on("finish", () => { - resolve(dest); - }); - writeStream.on("error", reject); - }); } - async createBucket(name: string): Promise { - const msg = this.validateName(name); + async createBucket(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + const msg = validateName(name); if (msg !== null) { - return Promise.reject(msg); + return { value: null, error: msg }; } - // console.log(bn, name); - const created = await this.createDirectory(path.join(this.directory, name)); - if (created) { - this.buckets.push(name); - return "ok"; + try { + const p = path.join(this._config.directory, name); + const created = await this.createDirectory(p); + if (created) { + return { value: "ok", error: null }; + } else { + return { value: null, error: `Could not create bucket ${p}` }; + } + } catch (e) { + return { value: null, error: e.message }; } } - async clearBucket(name?: string): Promise { - const n = name || this.bucketName; - if (!n) { - return; + async clearBucket(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - // remove all files and folders inside bucket directory, but not the directory itself - const p = path.join(this.directory, n, "*"); - return rimraf(p) - .then(() => { - return ""; - }) - .catch((e: Error) => { - throw e; - }); - } - async deleteBucket(name?: string): Promise { - const n = name || this.bucketName; - if (!n) { - return Promise.resolve(""); + try { + // remove all files and folders inside bucket directory, but not the directory itself + const p = path.join(this._config.directory, name, "*"); + await rimraf(p); + return { value: "ok", error: null }; + } catch (e) { + return { value: null, error: e.message }; } - const p = path.join(this.directory, n); - return rimraf(p) - .then(() => { - if (n === this.bucketName) { - this.bucketName = ""; - } - return ""; - }) - .catch((e: Error) => { - if (n === this.bucketName) { - this.bucketName = ""; - } - if (e !== null) { - return Promise.reject(e); - } - }); } - async selectBucket(name?: string | null): Promise { - if (!name) { - this.bucketName = ""; - return `bucket '${name}' deselected`; + async deleteBucket(name: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } - await this.createBucket(name); - this.bucketName = name; - return `bucket '${name}' selected`; - } - async listBuckets(): Promise { - // console.log(this.directory); - const files = await fs.promises.readdir(this.directory); - const stats = await Promise.all( - files.map((f) => fs.promises.stat(path.join(this.directory, f))) - ); - this.buckets = files.filter((_, i) => stats[i].isDirectory()); - return this.buckets; + try { + const p = path.join(this._config.directory, name); + await rimraf(p); + return { value: "ok", error: null }; + } catch (e) { + return { value: null, error: e.message }; + } } - private async globFiles(folder: string): Promise { - return glob(`${folder}/**/*.*`, {}) - .then((files) => { - return Promise.resolve(files); - }) - .catch((err) => { - return Promise.reject(err); - }); + async listBuckets(): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const files = await fs.promises.readdir(this._config.directory); + // const stats = await Promise.all( + // files.map((f) => fs.promises.stat(path.join(this._config.directory, f))) + // ); + return { value: files, error: null }; + } catch (e) { + return { value: null, error: e.message }; + } } - async listFiles(): Promise<[string, number][]> { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async listFiles(): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; } + const storagePath = path.join(this.directory, this.bucketName); const files = await this.globFiles(storagePath); const result: [string, number][] = []; diff --git a/src/types.ts b/src/types.ts index 86e9ddc..ad93460 100644 --- a/src/types.ts +++ b/src/types.ts @@ -223,7 +223,9 @@ export interface IAdapterConfig { type: string; skipCheck?: boolean; bucketName?: string; - // [id: string]: GenericKey; + options?: { + [id: string]: GenericKey; + }; } export interface ConfigAmazonS3 extends IAdapterConfig { @@ -246,13 +248,11 @@ export interface ConfigBackblazeB2 extends IAdapterConfig { export interface ConfigGoogleCloud extends IAdapterConfig { keyFilename?: string; projectId?: string; - // [id: string]: GenericKey; } export interface ConfigLocal extends IAdapterConfig { directory: string; mode?: number | string; - // [id: string]: GenericKey; } export interface ConfigTemplate extends IAdapterConfig { diff --git a/src/util.ts b/src/util.ts index 37f96db..4fedff8 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,5 +1,5 @@ import { BucketLocationConstraint } from "@aws-sdk/client-s3"; -import { ParseUrlResult } from "./types"; +import { ParseUrlResult, ResultObjectNumber } from "./types"; /** * @param: url @@ -98,27 +98,28 @@ export const parseIntFromString = (s: string): number => { return parseInt(s); }; -export const parseMode = (s: number | string): string | number => { +export const parseMode = (mode: number | string): ResultObjectNumber => { // if mode is a number, parseMode assumes it is a decimal number - if (typeof s === "number") { - if (s < 0) { - throw new Error( - `The argument 'mode' must be a 32-bit unsigned integer or an octal string. Received ${s}` - ); + if (typeof mode === "number") { + if (mode < 0) { + return { + value: null, + error: `The argument 'mode' must be a 32-bit unsigned integer or an octal string. Received ${mode}`, + }; } - return s; + return { value: mode, error: null }; } // mode is a string // e.g "0x755" (octal) - if (s.startsWith("0o")) { - return parseInt(s.substring(2), 8).toString(8); + if (mode.startsWith("0o")) { + return { value: parseInt(mode.substring(2), 8), error: null }; } // e.g '511' (decimal) - const i = parseInt(s, 10); + const i = parseInt(mode, 10); // quick fix for erroneously passed octal number as string (without 0o prefix) - return i > 511 ? 511 : i; + return { value: i > 511 ? 511 : i, error: null }; }; /** @@ -137,7 +138,6 @@ export const getProtocol = (url: string): string => { */ export const validateName = (name: string): string => { if (name === null) { - // throw new Error("Can not use `null` as bucket name"); return "Can not use `null` as bucket name"; } if (name === "null") { @@ -147,11 +147,9 @@ export const validateName = (name: string): string => { return 'Can not use "undefined" as bucket name'; } if (name === "" || typeof name === "undefined") { - // throw new Error("Please provide a bucket name"); return "Please provide a bucket name"; } if (name.indexOf(" ") !== -1) { - // throw new Error("Please provide a bucket name"); return "Please provide a valid bucket name"; } return null; From 92759bbdc8b0002ff3698ea54adada0813439115 Mon Sep 17 00:00:00 2001 From: abudaan Date: Fri, 24 Nov 2023 20:03:28 +0100 Subject: [PATCH 20/26] wip adapter local --- src/AdapterLocal.ts | 135 +++++++++++++++++++++++++++++++------------- 1 file changed, 95 insertions(+), 40 deletions(-) diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index a7d2902..95302e9 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -13,6 +13,8 @@ import { ResultObject, ResultObjectBuckets, ResultObjectFiles, + ResultObjectStream, + ResultObjectNumber, } from "./types"; import { AbstractAdapter } from "./AbstractAdapter"; import { parseQuerystring, parseMode, parseUrl, validateName } from "./util"; @@ -140,16 +142,18 @@ export class AdapterLocal extends AbstractAdapter { } const writeStream = fs.createWriteStream(dest); - return new Promise((resolve, reject) => { + return new Promise((resolve) => { readStream .pipe(writeStream) .on("error", (e) => { - return { value: null, error: e.message }; + resolve({ value: null, error: e.message }); }) .on("finish", () => { - return { value: dest, error: null }; + resolve({ value: dest, error: null }); }); - writeStream.on("error", reject); + writeStream.on("error", (e) => { + resolve({ value: null, error: e.message }); + }); }); } catch (e) { return { value: null, error: e.message }; @@ -223,64 +227,115 @@ export class AdapterLocal extends AbstractAdapter { } } - async listFiles(): Promise { + async listFiles(bucketName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } - - const storagePath = path.join(this.directory, this.bucketName); - const files = await this.globFiles(storagePath); - const result: [string, number][] = []; - for (let i = 0; i < files.length; i += 1) { - const f = files[i]; - const stat = await fs.promises.stat(f); - // result.push([path.basename(f), stat.size]) - result.push([f.replace(`${storagePath}/`, ""), stat.size]); + try { + const storagePath = path.join(this._config.directory, bucketName); + const files = await this.globFiles(storagePath); + const result: [string, number][] = []; + for (let i = 0; i < files.length; i += 1) { + const f = files[i]; + const stat = await fs.promises.stat(f); + // result.push([path.basename(f), stat.size]) + result.push([f.replace(`${storagePath}/`, ""), stat.size]); + } + return { value: result, error: null }; + } catch (e) { + return { value: null, error: e.message }; } - return result; } async getFileAsStream( - name: string, + bucketName: string, + fileName: string, options: { start?: number; end?: number } = { start: 0 } - ): Promise { - const p = path.join(this.directory, this.bucketName, name); - const s = (await fs.promises.stat(p)).size; - // console.log(p, s, options); - return fs.createReadStream(p, options); + ): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const p = path.join(this._config.directory, bucketName, fileName); + // const { size } = await fs.promises.stat(p); + // console.log(p, size, options); + return { value: fs.createReadStream(p, options), error: null }; + } catch (e) { + return { value: null, error: e.message }; + } + } + + async getFileAsURL(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const p = path.join(this._config.directory, bucketName, fileName); + await fs.promises.access(p); + return { value: p, error: null }; + } catch (e) { + return { value: null, error: e.message }; + } } - async removeFile(fileName: string): Promise { - const p = path.join(this.directory, this.bucketName, fileName); - return fs.promises + async removeFile(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + const p = path.join(this._config.directory, bucketName, fileName); + await fs.promises .unlink(p) .then(() => { - return ""; + return { value: "ok", error: null }; }) .catch((err) => { - // don't throw an error if the file has already been removed (or didn't exist at all) - if (err.message.indexOf("no such file or directory") !== -1) { - return ""; - } - throw new Error(err.message); + // if (err.message.indexOf("no such file or directory") !== -1) { + // return { value: "file doesn't exist", error: null }; + // } + return { value: null, error: err.message }; }); } - async sizeOf(name: string): Promise { - if (!this.bucketName) { - throw new Error("no bucket selected"); + async sizeOf(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + const p = path.join(this._config.directory, bucketName, fileName); + const { size } = await fs.promises.stat(p); + return { value: size, error: null }; + } catch (e) { + return { value: null, error: e.message }; } - const p = path.join(this.directory, this.bucketName, name); - const stat = await fs.promises.stat(p); - return stat.size; } - async fileExists(name: string): Promise { + async bucketExists(bucketName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + + try { + await fs.promises.access(path.join(this._config.directory, bucketName)); + return { value: true, error: null }; + } catch (e) { + return { value: false, error: null }; + } + } + + async fileExists(bucketName: string, fileName: string): Promise { + if (this.configError !== null) { + return { value: null, error: this.configError }; + } + try { - await fs.promises.access(path.join(this.directory, this.bucketName, name)); - return true; + await fs.promises.access(path.join(this._config.directory, bucketName, fileName)); + return { value: true, error: null }; } catch (e) { - return false; + return { value: false, error: null }; } } } From 37efc17c8b83645ca844a86c146486071beb0206 Mon Sep 17 00:00:00 2001 From: abudaan Date: Sat, 25 Nov 2023 17:50:45 +0100 Subject: [PATCH 21/26] functional stub Backblaze --- src/AdapterBackblazeB2F.ts | 129 ++++++++++++++++++++++++++----------- 1 file changed, 91 insertions(+), 38 deletions(-) diff --git a/src/AdapterBackblazeB2F.ts b/src/AdapterBackblazeB2F.ts index 952b42f..e4b9e65 100644 --- a/src/AdapterBackblazeB2F.ts +++ b/src/AdapterBackblazeB2F.ts @@ -1,14 +1,22 @@ -import fs, { ReadStream } from "fs"; -import { Readable } from "stream"; +import fs from "fs"; import B2 from "backblaze-b2"; require("@gideo-llc/backblaze-b2-upload-any").install(B2); -import { StorageType, IStorage, ConfigBackblazeB2, AdapterConfig, JSON as TypeJSON } from "./types"; -import { parseUrl } from "./util"; - -const init = async (): Promise => { - return true; -}; +import { + StorageType, + IStorage, + ConfigBackblazeB2, + ResultObject, + ResultObjectBuckets, + FileBufferParams, + FilePathParams, + FileStreamParams, + ResultObjectStream, + ResultObjectFiles, + ResultObjectNumber, + ResultObjectBoolean, +} from "./types"; +// import { parseUrl } from "./util"; const getConfiguration = (): ConfigBackblazeB2 => { return { @@ -18,55 +26,100 @@ const getConfiguration = (): ConfigBackblazeB2 => { }; }; -// const init = (): Promise => Promise.resolve(true); const getType = (): string => "string"; -// const getConfiguration = (): AdapterConfig => ({} as AdapterConfig); -const test = (): Promise => Promise.resolve("ok"); -const createBucket = (name: string): Promise => Promise.resolve("ok"); -const selectBucket = (name?: string | null): Promise => Promise.resolve("ok"); -const clearBucket = (name?: string): Promise => Promise.resolve("ok"); -const deleteBucket = (name?: string): Promise => Promise.resolve("ok"); -const listBuckets = (): Promise => Promise.resolve(["string", "string"]); -const getSelectedBucket = (): string => "string"; -const addFileFromPath = (origPath: string, targetPath: string, options?: object): Promise => - Promise.resolve("public url"); -const addFileFromBuffer = (buffer: Buffer, targetPath: string, options?: object): Promise => - Promise.resolve("public url"); -const addFileFromReadable = ( - stream: Readable, - targetPath: string, - options?: object -): Promise => Promise.resolve("public url"); -const getFileAsReadable = ( - name: string, + +const createBucket = async (name: string): Promise => { + return { value: "ok", error: null }; +}; + +const clearBucket = async (name: string): Promise => { + return { value: "ok", error: null }; +}; + +const deleteBucket = async (name: string): Promise => { + return { value: "ok", error: null }; +}; + +const listBuckets = async (): Promise => { + return { value: ["string", "string"], error: null }; +}; + +const addFileFromPath = async (params: FilePathParams): Promise => { + return { value: "public url", error: null }; +}; + +const addFileFromBuffer = async (params: FileBufferParams): Promise => { + return { value: "public url", error: null }; +}; + +const addFileFromReadable = async (params: FileStreamParams): Promise => { + return { value: "public url", error: null }; +}; + +const addFile = async ( + params: FilePathParams | FileBufferParams | FileStreamParams +): Promise => { + return { value: "public url", error: null }; +}; + +const getFileAsReadable = async ( + bucketName: string, + fileName: string, options?: { start?: number; end?: number; } -): Promise => Promise.resolve(fs.createReadStream("")); -const removeFile = (name: string): Promise => Promise.resolve("ok"); -const listFiles = (numFiles?: number): Promise<[string, number][]> => Promise.resolve([["s", 0]]); -const sizeOf = (name: string): Promise => Promise.resolve(42); -const fileExists = (name: string): Promise => Promise.resolve(true); +): Promise => { + return { value: fs.createReadStream(""), error: null }; +}; + +const getFileAsURL = async (bucketName: string, fileName: string): Promise => { + return { value: "url", error: null }; +}; + +const removeFile = async (bucketName: string, fileName: string): Promise => { + return { value: "ok", error: null }; +}; + +const listFiles = async (bucketName: string, numFiles?: number): Promise => { + return { value: [["s", 0]], error: null }; +}; + +const sizeOf = async (bucketName: string, fileName: string): Promise => { + return { value: 42, error: null }; +}; + +const fileExists = async (bucketName: string, fileName: string): Promise => { + return { value: true, error: null }; +}; + +const bucketExists = async (bucketName: string): Promise => { + return { value: true, error: null }; +}; const adapter: IStorage = { - init, - getType: () => StorageType.B2, + get type() { + return getType(); + }, + get config() { + return getConfiguration(); + }, + getType, getConfiguration, - test, createBucket, - selectBucket, clearBucket, deleteBucket, listBuckets, - getSelectedBucket, + addFile, addFileFromPath, addFileFromBuffer, addFileFromStream: addFileFromReadable, getFileAsStream: getFileAsReadable, + getFileAsURL, removeFile, listFiles, sizeOf, + bucketExists, fileExists, }; From bc1df5b36385481ec8b2701c6d6420a1e9e180b2 Mon Sep 17 00:00:00 2001 From: abudaan Date: Sat, 25 Nov 2023 22:36:13 +0100 Subject: [PATCH 22/26] wip options --- README.md | 29 +++++++++-- changelog.md | 14 ++++++ src/AdapterAmazonS3.ts | 11 +++-- src/AdapterAzureStorageBlob.ts | 17 +++++-- src/AdapterBackblazeB2.ts | 11 +++-- src/AdapterGoogleCloudStorage.ts | 17 +++++-- src/AdapterLocal.ts | 7 ++- src/types.ts | 5 +- tests/test-config-s3.jasmine.ts | 83 ++++++++++++++------------------ 9 files changed, 127 insertions(+), 67 deletions(-) diff --git a/README.md b/README.md index fdfa3d3..730890f 100644 --- a/README.md +++ b/README.md @@ -65,7 +65,7 @@ When instantiating a new `Storage` the argument `config` is used to create an ad 1. using a configuration object (js: `typeof === "object"` ts: `AdapterConfig`) 2. using a configuration URL (`typeof === "string"`) -Internally configuration URL will be converted to a configuration object so any rule that applies to a configuration object also applies to configuration URLs. +Internally the configuration URL will be converted to a configuration object so any rule that applies to a configuration object also applies to configuration URLs. The configuration must specify a type; the type is used to create the appropriate adapter. The value of the type is one of the enum members of `StorageType`: @@ -88,14 +88,35 @@ interface IAdapterConfig { type: string; skipCheck?: boolean; bucketName?: string; + options?: { + [id: string]: number | string | boolean | number[] | string[] | boolean[]; + }; } ``` -Besides the mandatory key `type` one or more keys may be mandatory or optional dependent on the type of storage; for instance keys for passing credentials such as `keyFilename` for Google Storage or `accessKeyId` and `secretAccessKey` for Amazon S3, and keys for further configuring the storage service such as `sslEnabled` for Amazon S3. +Besides the mandatory key `type` one or more keys may be mandatory or optional dependent on the type of storage; for instance keys for passing credentials such as `keyFilename` for Google Storage or `accessKeyId` and `secretAccessKey` for Amazon S3, and keys for further configuring the storage service such as `systemClockOffset` for Amazon S3. + +When your create a storage instance a check is performed if the mandatory keys are set in the configuration object. You can skip this check by setting `skipCheck` to `true`. + +Another optional key is `bucketName`. -When your create a storage instance a check is performed if the storage-specific mandatory keys are set in the configuration object. You can skip this check by setting `skipCheck` to `true`. +Note that the `options` object and the query string will be flattened in the config object of the instantiated storage: -Another optional key is `bucketName`; for most cloud storage services it is required to select a bucket after a connection to the service has been made. If you don't want or can't provide a bucket name on initialization you can use `selectBucket` to do so afterwards. +```typescript +const conf = { + accessKeyId: "yourKeyId"; + secretAccessKey: "yourAccessKey"; + region: "yourRegion"; + options: { + systemClockOffset: 40000, + useArnRegion: true, + } +} + +const storage = new Storage(conf); +console.log(storage.conf. + +``` ### Configuration URL diff --git a/changelog.md b/changelog.md index 582b007..46162cc 100644 --- a/changelog.md +++ b/changelog.md @@ -48,6 +48,20 @@ - `removeFile` has an additional optional boolean argument `allVersions`; if set to true all version of the specified file will be removed. Default: false - `addFile` is added; you can use this method whenever you use `addFileFromPath`, `addFileFromBuffer` or `addFileFromReadable` - `getConfig()` and `getType()` are implemented as getter as well, resp.: `storage.config` and `storage.type` +- The configuration object are no longer extensible; if you want to provide extra parameters you can use the `options` object, for instance: + +```typescript + const conf: ConfigAmazonS3 = { + accessKeyId: "yourKeyId"; + secretAccessKey?: "yourAccessKey"; + region: "us-east-2"; + endpoint: "yourEndpoint"; + options: { + systemClockOffset: 40000, + useArnRegion: true, + } + } +``` ### Old API (1.5.x) compared to new API (2.x) diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index feed396..b2f7f21 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -84,7 +84,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { part2: secretAccessKey, part3: region, bucketName, - queryString, + queryString: options, } = value; cfg = { type, @@ -92,10 +92,15 @@ export class AdapterAmazonS3 extends AbstractAdapter { secretAccessKey, region, bucketName, - ...queryString, + ...options, }; } else { - cfg = { ...config }; + if (typeof config.options !== "undefined") { + cfg = { ...config, ...config.options }; + delete cfg.options; + } else { + cfg = { ...config }; + } } if (cfg.skipCheck === true) { diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index 8136e0d..cd21417 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -53,16 +53,27 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { return null; } - const { type, part1: storageAccount, part2: accessKey, bucketName, queryString } = value; + const { + type, + part1: storageAccount, + part2: accessKey, + bucketName, + queryString: options, + } = value; cfg = { type, storageAccount, accessKey, bucketName, - ...queryString, + ...options, }; } else { - cfg = { ...config }; + if (typeof config.options !== "undefined") { + cfg = { ...config, ...config.options }; + delete cfg.options; + } else { + cfg = { ...config }; + } } if (cfg.skipCheck === true) { diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index 0ff0c8b..c004d9c 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -60,17 +60,22 @@ export class AdapterBackblazeB2 extends AbstractAdapter { part1: applicationKeyId, part2: applicationKey, bucketName, - queryString, + queryString: options, } = value; cfg = { type, applicationKeyId, applicationKey, bucketName, - ...queryString, + ...options, }; } else { - cfg = { ...config }; + if (typeof config.options !== "undefined") { + cfg = { ...config, ...config.options }; + delete cfg.options; + } else { + cfg = { ...config }; + } } if (cfg.skipCheck === true) { diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index f4d988c..28cb401 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -54,16 +54,27 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { return null; } - const { type, part1: keyFilename, part2: projectId, bucketName, queryString } = value; + const { + type, + part1: keyFilename, + part2: projectId, + bucketName, + queryString: options, + } = value; cfg = { type, keyFilename, projectId, bucketName, - ...queryString, + ...options, }; } else { - cfg = { ...config }; + if (typeof config.options !== "undefined") { + cfg = { ...config, ...config.options }; + delete cfg.options; + } else { + cfg = { ...config }; + } } if (cfg.skipCheck === true) { diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 95302e9..7929b44 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -60,7 +60,12 @@ export class AdapterLocal extends AbstractAdapter { ...options, }; } else { - cfg = { ...config }; + if (typeof config.options !== "undefined") { + cfg = { ...config, ...config.options }; + delete cfg.options; + } else { + cfg = { ...config }; + } if (!cfg.directory) { this.configError = "You must specify a value for 'directory' for storage type 'local'"; diff --git a/src/types.ts b/src/types.ts index ad93460..986393b 100644 --- a/src/types.ts +++ b/src/types.ts @@ -29,8 +29,7 @@ export interface IStorage { * Use this only for debugging and with great care as it may expose sensitive information. * * The object contains the key `bucketName` which is the initial value that you've set during - * initialization; if you have selected another bucket after initialization it will still show - * the original value. Use `getSelectedBucket()` to retrieve the current value. + * initialization. * * The object also contains the key `options` which are only the options passed in during * initialization; if you want all options, including the default options use `getOptions()` @@ -216,7 +215,7 @@ export type JSON = { // [key: string]: string | number | boolean; // } -export type GenericKey = undefined | number | string | boolean | number[] | string[] | boolean[]; +export type GenericKey = number | string | boolean | number[] | string[] | boolean[]; export interface IAdapterConfig { // type: StorageType; diff --git a/tests/test-config-s3.jasmine.ts b/tests/test-config-s3.jasmine.ts index 1a6637f..b0ad7eb 100644 --- a/tests/test-config-s3.jasmine.ts +++ b/tests/test-config-s3.jasmine.ts @@ -7,26 +7,23 @@ describe(`testing Amazon urls`, () => { // const storage = new Storage("s3://key:secret/can/contain/slashes"); // expect(storage.getType()).toBe(StorageType.S3); // expect(storage.getSelectedBucket()).toBe(""); - // expect((storage.getConfiguration() as ConfigAmazonS3).accessKeyId).toBe("key"); - // expect((storage.getConfiguration() as ConfigAmazonS3).secretAccessKey).toBe( + // expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); + // expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe( // "secret/can/contain/slashes" // ); - // expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe(""); + // expect((storage.config as ConfigAmazonS3).region).toBe(""); // }); it("[1] parameter string", () => { const storage = new Storage( "s3://key:secret/can/contain/slashes@eu-west-2/the-buck?sslEnabled=true" ); - // console.log((storage.getConfiguration() as ConfigAmazonS3)); expect(storage.getType()).toBe(StorageType.S3); - expect(storage.getSelectedBucket()).toBe("the-buck"); - expect((storage.getConfiguration() as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.getConfiguration() as ConfigAmazonS3).secretAccessKey).toBe( - "secret/can/contain/slashes" - ); - expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.getConfiguration() as ConfigAmazonS3).sslEnabled as unknown as string).toBe( + expect(storage.config.bucketName).toBe("the-buck"); + expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); + expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); + expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect((storage.config as ConfigAmazonS3).options?.sslEnabled as unknown as string).toBe( "true" ); }); @@ -34,15 +31,15 @@ describe(`testing Amazon urls`, () => { // it("[2a] no region", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@the-buck"); // expect(storage.getSelectedBucket()).toBe("the-buck"); - // expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe(""); - // expect((storage.getConfiguration() as ConfigAmazonS3).bucketName).toBe("the-buck"); + // expect((storage.config as ConfigAmazonS3).region).toBe(""); + // expect((storage.config as ConfigAmazonS3).bucketName).toBe("the-buck"); // }); // it("[2b] no region 2", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@/the-buck"); // expect(storage.getSelectedBucket()).toBe("the-buck"); - // expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe(""); - // expect((storage.getConfiguration() as ConfigAmazonS3).bucketName).toBe("the-buck"); + // expect((storage.config as ConfigAmazonS3).region).toBe(""); + // expect((storage.config as ConfigAmazonS3).bucketName).toBe("the-buck"); // }); it("[3] non-existent keys will not be filtered anymore, nor will invalid typed values (e.g. a numeric value for useDualStack)", () => { @@ -51,24 +48,20 @@ describe(`testing Amazon urls`, () => { "s3://key:secret/can/contain/slashes@eu-west-2/the-buck", "?sslEnabled=true", "&useDualstack=23", - "&nonExistentKey=true", + "&otherExistentKey=true", "&endPoint=https://kms-fips.us-west-2.amazonaws.com", // note: endpoint should not be camel cased ].join("") ); expect(storage.getType()).toBe(StorageType.S3); - expect(storage.getSelectedBucket()).toBe("the-buck"); - expect((storage.getConfiguration() as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.getConfiguration() as ConfigAmazonS3).secretAccessKey).toBe( - "secret/can/contain/slashes" - ); - expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.getConfiguration() as ConfigAmazonS3).sslEnabled as unknown as string).toBe( - "true" - ); - expect((storage.getConfiguration() as ConfigAmazonS3).useDualStack as unknown).toBe(undefined); - expect((storage.getConfiguration() as ConfigAmazonS3).nonExistentKey as string).toBe("true"); - expect((storage.getConfiguration() as ConfigAmazonS3).endpoint).toBe(undefined); - expect((storage.getConfiguration() as ConfigAmazonS3).endPoint).toBe( + expect(storage.config.bucketName).toBe("the-buck"); + expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); + expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); + expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect(storage.config.options?.sslEnabled as unknown as string).toBe("true"); + expect(storage.config.options?.useDualStack as unknown).toBe(undefined); + expect(storage.config.options?.otherExistentKey as string).toBe("true"); + expect((storage.config as ConfigAmazonS3).endpoint).toBe(undefined); + expect((storage.config as ConfigAmazonS3).endPoint).toBe( "https://kms-fips.us-west-2.amazonaws.com" ); }); @@ -84,12 +77,10 @@ describe(`testing Amazon urls`, () => { }); expect(storage.getType()).toBe(StorageType.S3); expect(storage.getSelectedBucket()).toBe("the-buck"); - expect((storage.getConfiguration() as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.getConfiguration() as ConfigAmazonS3).secretAccessKey).toBe( - "secret/can/contain/slashes" - ); - expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.getConfiguration() as ConfigAmazonS3).sslEnabled).toBe(true); + expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); + expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); + expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect((storage.config as ConfigAmazonS3).sslEnabled).toBe(true); }); it("[5] no bucket", () => { @@ -105,29 +96,27 @@ describe(`testing Amazon urls`, () => { // it("[5a] no bucket URL", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2"); // expect(storage.getSelectedBucket()).toBe("eu-west-2"); - // expect((storage.getConfiguration() as ConfigAmazonS3).region).not.toBe("eu-west-2"); + // expect((storage.config as ConfigAmazonS3).region).not.toBe("eu-west-2"); // }); it("[5a1] no bucket URL", () => { const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2/"); expect(storage.getSelectedBucket()).toBe(""); - expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe("eu-west-2"); + expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); }); it("[5b] no bucket URL plus queryString", () => { const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2/?sslEnabled=true"); expect(storage.getSelectedBucket()).toBe(""); - expect((storage.getConfiguration() as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.getConfiguration() as ConfigAmazonS3).sslEnabled as unknown as string).toBe( - "true" - ); + expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect((storage.config as ConfigAmazonS3).sslEnabled as unknown as string).toBe("true"); }); // it("[5b1] no bucket URL plus queryString", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2?sslEnabled=true"); // expect(storage.getSelectedBucket()).toBe("eu-west-2"); - // expect((storage.getConfiguration() as ConfigAmazonS3).region).not.toBe("eu-west-2"); - // expect((storage.getConfiguration() as ConfigAmazonS3).sslEnabled).toBe(true); + // expect((storage.config as ConfigAmazonS3).region).not.toBe("eu-west-2"); + // expect((storage.config as ConfigAmazonS3).sslEnabled).toBe(true); // }); it("[6] number and boolean in config object keep their original type", () => { @@ -139,8 +128,8 @@ describe(`testing Amazon urls`, () => { optionNumber: 42, optionBoolean: true, }); - expect((storage.getConfiguration() as ConfigAmazonS3).optionNumber).toBe(42); - expect((storage.getConfiguration() as ConfigAmazonS3).optionBoolean).toBe(true); + expect((storage.config as ConfigAmazonS3).optionNumber).toBe(42); + expect((storage.config as ConfigAmazonS3).optionBoolean).toBe(true); }); it("[7] number and boolean used in config will stay string types", () => { @@ -152,7 +141,7 @@ describe(`testing Amazon urls`, () => { "&optionBoolean=true", ].join("") ); - expect((storage.getConfiguration() as ConfigAmazonS3).optionNumber).toBe("42"); - expect((storage.getConfiguration() as ConfigAmazonS3).optionBoolean).toBe("true"); + expect((storage.config as ConfigAmazonS3).optionNumber).toBe("42"); + expect((storage.config as ConfigAmazonS3).optionBoolean).toBe("true"); }); }); From f0fbe380a5ad356643b4240ce2906c05e2d1f203 Mon Sep 17 00:00:00 2001 From: abudaan Date: Sun, 26 Nov 2023 20:44:35 +0100 Subject: [PATCH 23/26] wip configuration --- changelog.md | 1 + package.json | 2 + src/AdapterAmazonS3.ts | 134 ++++++++++--------------------- src/AdapterAzureStorageBlob.ts | 12 +-- src/AdapterBackblazeB2.ts | 22 +++-- src/AdapterGoogleCloudStorage.ts | 29 +++---- src/AdapterLocal.ts | 9 +-- src/types.ts | 49 +---------- tests/test-config-s3.jasmine.ts | 93 ++++++++++----------- tests/testB2.ts | 4 +- tests/testGCS.ts | 14 ++++ tests/testLocal.ts | 35 ++++++++ tests/testS3.ts | 85 ++++++++++++++++++++ 13 files changed, 264 insertions(+), 225 deletions(-) create mode 100644 tests/testGCS.ts create mode 100644 tests/testLocal.ts create mode 100644 tests/testS3.ts diff --git a/changelog.md b/changelog.md index 46162cc..fbb6dae 100644 --- a/changelog.md +++ b/changelog.md @@ -49,6 +49,7 @@ - `addFile` is added; you can use this method whenever you use `addFileFromPath`, `addFileFromBuffer` or `addFileFromReadable` - `getConfig()` and `getType()` are implemented as getter as well, resp.: `storage.config` and `storage.type` - The configuration object are no longer extensible; if you want to provide extra parameters you can use the `options` object, for instance: +- removed adapter config types ```typescript const conf: ConfigAmazonS3 = { diff --git a/package.json b/package.json index e49e7d2..98d438b 100644 --- a/package.json +++ b/package.json @@ -49,6 +49,8 @@ "test": "ts-node ./tests/test.ts", "test-mode": "ts-node ./tests/test-mode.ts", "testB2": "ts-node ./tests/testB2.ts", + "testS3": "ts-node ./tests/testS3.ts", + "testGCS": "ts-node ./tests/testGCS.ts", "testLocal": "ts-node ./tests/testLocal.ts", "ts": "ts-node", "tsc": "node_modules/.bin/tsc", diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index b2f7f21..dd276b3 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -19,10 +19,8 @@ import { } from "@aws-sdk/client-s3"; import { getSignedUrl } from "@aws-sdk/s3-request-presigner"; import { - ConfigAmazonS3, AdapterConfig, StorageType, - S3Compatible, ResultObjectStream, ResultObject, ResultObjectBuckets, @@ -37,98 +35,54 @@ import { parseUrl } from "./util"; export class AdapterAmazonS3 extends AbstractAdapter { protected _type = StorageType.S3; - protected _config: ConfigAmazonS3; + protected _config: AdapterConfig; private configError: string | null = null; private storage: S3Client; - private s3Compatible: S3Compatible = S3Compatible.Amazon; - constructor(config: string | AdapterConfig) { + constructor(config?: string | AdapterConfig) { super(); - this._config = this.parseConfig(config as ConfigAmazonS3); - - // handle small differences in supported S3 compatible storages - if (typeof (this._config as ConfigAmazonS3).region === "undefined") { - if (this.s3Compatible === S3Compatible.R2) { - this._config.region = "auto"; - } else if (this.s3Compatible === S3Compatible.Backblaze) { - let ep = this._config.endpoint; - ep = ep.substring(ep.indexOf("s3.") + 3); - this._config.region = ep.substring(0, ep.indexOf(".")); - } - } - if (typeof this._config.endpoint === "undefined") { - this.storage = new S3Client({ region: this._config.region }); - } else { - this.storage = new S3Client({ - region: this._config.region, - endpoint: this._config.endpoint, - credentials: { - accessKeyId: this._config.accessKeyId, - secretAccessKey: this._config.secretAccessKey, - }, - }); - } - } - - private parseConfig(config: string | ConfigAmazonS3): ConfigAmazonS3 | null { - let cfg: ConfigAmazonS3; if (typeof config === "string") { - const { value, error } = parseUrl(config); - if (error) { - this.configError = error; - return null; - } - const { - type, - part1: accessKeyId, - part2: secretAccessKey, - part3: region, - bucketName, - queryString: options, - } = value; - cfg = { - type, - accessKeyId, - secretAccessKey, - region, - bucketName, - ...options, - }; + this._config = this.parseConfig(config); } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + this._config = config; } - if (cfg.skipCheck === true) { - return cfg; + if (this._config === null) { + return; } - if (!cfg.accessKeyId || !cfg.secretAccessKey) { - this.configError = - "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 's3'"; - return null; - } + this.storage = new S3Client(this.config); + console.log(this.storage.config); + } - if (typeof cfg.endpoint !== "undefined") { - if (cfg.endpoint.indexOf("r2.cloudflarestorage.com") !== -1) { - this.s3Compatible = S3Compatible.R2; - } else if (cfg.endpoint.indexOf("backblazeb2.com") !== -1) { - this.s3Compatible = S3Compatible.Backblaze; - } - } - if (!cfg.region && this.s3Compatible === S3Compatible.Amazon) { - this.configError = "You must specify a default region for storage type 's3'"; + private parseConfig(config: string): AdapterConfig | null { + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; return null; } - - return cfg; + const { + type, + part1: accessKeyId, + part2: secretAccessKey, + part3: region, + bucketName, + queryString: options, + } = value; + + return { + type, + accessKeyId, + secretAccessKey, + region, + bucketName, + ...options, + }; } - async getFileAsStream( + // Public API + + public async getFileAsStream( bucketName: string, fileName: string, options: { start?: number; end?: number } = { start: 0 } @@ -152,7 +106,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async removeFile(bucketName: string, fileName: string): Promise { + public async removeFile(bucketName: string, fileName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -170,7 +124,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async createBucket(name: string, options: object = {}): Promise { + public async createBucket(name: string, options: object = {}): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -195,7 +149,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { ...options, }; // see issue: https://github.com/aws/aws-sdk-js/issues/3647 - if (typeof this._config.region !== "undefined" && this._config.region !== "us-east-1") { + if (typeof this._config.region === "string" && this._config.region !== "us-east-1") { input.CreateBucketConfiguration = { LocationConstraint: BucketLocationConstraint[this._config.region.replace("-", "_")], }; @@ -217,7 +171,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async clearBucket(name: string): Promise { + public async clearBucket(name: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -256,7 +210,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async deleteBucket(name: string): Promise { + public async deleteBucket(name: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -277,7 +231,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async listBuckets(): Promise { + public async listBuckets(): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -335,7 +289,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async getFileAsURL(bucketName: string, fileName: string): Promise { + public async getFileAsURL(bucketName: string, fileName: string): Promise { return getSignedUrl( this.storage, new GetObjectCommand({ @@ -352,7 +306,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { }); } - async listFiles(bucketName: string, maxFiles: number = 1000): Promise { + public async listFiles(bucketName: string, maxFiles: number = 1000): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -373,7 +327,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async sizeOf(bucketName: string, fileName: string): Promise { + public async sizeOf(bucketName: string, fileName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -391,7 +345,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { } } - async bucketExists(bucketName: string): Promise { + public async bucketExists(bucketName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } @@ -410,7 +364,7 @@ export class AdapterAmazonS3 extends AbstractAdapter { }); } - async fileExists(bucketName: string, fileName: string): Promise { + public async fileExists(bucketName: string, fileName: string): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; } diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index cd21417..1274396 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -40,7 +40,8 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { ); this.storage = new BlobServiceClient( `https://${(this._config as ConfigAzureStorageBlob).storageAccount}.blob.core.windows.net`, - this.sharedKeyCredential + this.sharedKeyCredential, + this._config.options ); } @@ -65,15 +66,10 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { storageAccount, accessKey, bucketName, - ...options, + options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; } if (cfg.skipCheck === true) { diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index c004d9c..f40b317 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -3,7 +3,6 @@ import { Readable } from "stream"; import { AbstractAdapter } from "./AbstractAdapter"; import { StorageType, - ConfigBackblazeB2, BackblazeB2File, ResultObjectBoolean, ResultObject, @@ -21,6 +20,7 @@ import { ResultObjectNumber, BackblazeAxiosResponse, BackblazeBucketOptions, + AdapterConfig, } from "./types"; import { parseUrl, validateName } from "./util"; @@ -28,17 +28,20 @@ require("@gideo-llc/backblaze-b2-upload-any").install(B2); export class AdapterBackblazeB2 extends AbstractAdapter { protected _type = StorageType.B2; - protected _config: ConfigBackblazeB2; + protected _config: AdapterConfig; private storage: B2; private authorized: boolean = false; private configError: string | null = null; - constructor(config: string | ConfigBackblazeB2) { + constructor(config?: string | AdapterConfig) { super(); this._config = this.parseConfig(config); if (this._config !== null) { try { - this.storage = new B2(this._config); + const c = { ...this._config, ...(this._config.options as object) }; + delete c.options; + this.storage = new B2(c); + console.log(this.storage.config); } catch (e) { this.configError = e.message; } @@ -47,8 +50,8 @@ export class AdapterBackblazeB2 extends AbstractAdapter { // util members - private parseConfig(config: string | ConfigBackblazeB2): ConfigBackblazeB2 | null { - let cfg: ConfigBackblazeB2; + private parseConfig(config: string | AdapterConfig): AdapterConfig | null { + let cfg: AdapterConfig; if (typeof config === "string") { const { error, value } = parseUrl(config); if (error !== null) { @@ -70,12 +73,7 @@ export class AdapterBackblazeB2 extends AbstractAdapter { ...options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; } if (cfg.skipCheck === true) { diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index 28cb401..8c180d1 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -8,7 +8,6 @@ import { import { AbstractAdapter } from "./AbstractAdapter"; import { StorageType, - ConfigGoogleCloud, ResultObject, ResultObjectStream, FileBufferParams, @@ -20,17 +19,24 @@ import { ResultObjectBoolean, } from "./types"; import { parseUrl } from "./util"; +import { AdapterConfig } from "@tweedegolf/storage-abstraction"; export class AdapterGoogleCloudStorage extends AbstractAdapter { protected _type = StorageType.GCS; - protected _config: ConfigGoogleCloud; + protected _config: AdapterConfig; private configError: string | null = null; private storage: GoogleCloudStorage; - constructor(config: string | ConfigGoogleCloud) { + constructor(config?: string | AdapterConfig) { super(); - this._config = this.parseConfig(config); - this.storage = new GoogleCloudStorage(this._config as ConfigGoogleCloud); + // this._config = this.parseConfig(config); + // const c = { + // ...this._config, + // ...this._config.options, + // }; + // delete c.options; + // this.storage = new GoogleCloudStorage(c); + this.storage = new GoogleCloudStorage(config as object); } /** @@ -45,8 +51,8 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { return json.project_id; } - private parseConfig(config: string | ConfigGoogleCloud): ConfigGoogleCloud { - let cfg: ConfigGoogleCloud; + private parseConfig(config: string | AdapterConfig): AdapterConfig { + let cfg: AdapterConfig; if (typeof config === "string") { const { value, error } = parseUrl(config); if (error) { @@ -66,15 +72,10 @@ export class AdapterGoogleCloudStorage extends AbstractAdapter { keyFilename, projectId, bucketName, - ...options, + options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; } if (cfg.skipCheck === true) { diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index 7929b44..c416b4e 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -57,15 +57,10 @@ export class AdapterLocal extends AbstractAdapter { directory, bucketName, mode: this.mode, - ...options, + options, }; } else { - if (typeof config.options !== "undefined") { - cfg = { ...config, ...config.options }; - delete cfg.options; - } else { - cfg = { ...config }; - } + cfg = { ...config }; if (!cfg.directory) { this.configError = "You must specify a value for 'directory' for storage type 'local'"; diff --git a/src/types.ts b/src/types.ts index 986393b..f368ac8 100644 --- a/src/types.ts +++ b/src/types.ts @@ -20,7 +20,7 @@ export interface IStorage { /** * Same as `getType` but implemented as getter - * @returns adapter tyoe + * @returns adapter type, e.g. 'gcs', 'b2', 'local' etc. */ type: string; @@ -217,56 +217,13 @@ export type JSON = { export type GenericKey = number | string | boolean | number[] | string[] | boolean[]; -export interface IAdapterConfig { +export interface AdapterConfig { // type: StorageType; type: string; - skipCheck?: boolean; - bucketName?: string; - options?: { - [id: string]: GenericKey; - }; -} - -export interface ConfigAmazonS3 extends IAdapterConfig { - accessKeyId?: string; - secretAccessKey?: string; - region?: string; - endpoint?: string; -} - -export interface ConfigAzureStorageBlob extends IAdapterConfig { - storageAccount?: string; - accessKey?: string; -} - -export interface ConfigBackblazeB2 extends IAdapterConfig { - applicationKeyId?: string; - applicationKey?: string; -} - -export interface ConfigGoogleCloud extends IAdapterConfig { - keyFilename?: string; - projectId?: string; -} - -export interface ConfigLocal extends IAdapterConfig { - directory: string; - mode?: number | string; -} - -export interface ConfigTemplate extends IAdapterConfig { - someKey: string; - someOtherKey: string; // [id: string]: GenericKey; + [id: string]: number | string | boolean | number[] | string[] | boolean[]; } -export type AdapterConfig = - | ConfigLocal - | ConfigAmazonS3 - | ConfigGoogleCloud - | ConfigBackblazeB2 - | ConfigTemplate; - export type BackblazeAxiosResponse = { response: { data: { diff --git a/tests/test-config-s3.jasmine.ts b/tests/test-config-s3.jasmine.ts index b0ad7eb..242ba1f 100644 --- a/tests/test-config-s3.jasmine.ts +++ b/tests/test-config-s3.jasmine.ts @@ -1,49 +1,48 @@ import "jasmine"; import { Storage } from "../src/Storage"; -import { ConfigAmazonS3, StorageType } from "../src/types"; +import { AdapterAmazonS3 } from "../src/AdapterAmazonS3"; +import { StorageType } from "../src/types"; describe(`testing Amazon urls`, () => { // it("[0] no options", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes"); // expect(storage.getType()).toBe(StorageType.S3); // expect(storage.getSelectedBucket()).toBe(""); - // expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - // expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe( + // expect(storage.config.accessKeyId).toBe("key"); + // expect(storage.config.secretAccessKey).toBe( // "secret/can/contain/slashes" // ); - // expect((storage.config as ConfigAmazonS3).region).toBe(""); + // expect(storage.config.region).toBe(""); // }); it("[1] parameter string", () => { - const storage = new Storage( + const storage = new AdapterAmazonS3( "s3://key:secret/can/contain/slashes@eu-west-2/the-buck?sslEnabled=true" ); expect(storage.getType()).toBe(StorageType.S3); expect(storage.config.bucketName).toBe("the-buck"); - expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.config as ConfigAmazonS3).options?.sslEnabled as unknown as string).toBe( - "true" - ); + expect(storage.config.accessKeyId).toBe("key"); + expect(storage.config.secretAccessKey).toBe("secret/can/contain/slashes"); + expect(storage.config.region).toBe("eu-west-2"); + expect(storage.config.options?.sslEnabled as unknown as string).toBe("true"); }); // it("[2a] no region", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@the-buck"); // expect(storage.getSelectedBucket()).toBe("the-buck"); - // expect((storage.config as ConfigAmazonS3).region).toBe(""); - // expect((storage.config as ConfigAmazonS3).bucketName).toBe("the-buck"); + // expect(storage.config.region).toBe(""); + // expect(storage.config.bucketName).toBe("the-buck"); // }); // it("[2b] no region 2", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@/the-buck"); // expect(storage.getSelectedBucket()).toBe("the-buck"); - // expect((storage.config as ConfigAmazonS3).region).toBe(""); - // expect((storage.config as ConfigAmazonS3).bucketName).toBe("the-buck"); + // expect(storage.config.region).toBe(""); + // expect(storage.config.bucketName).toBe("the-buck"); // }); it("[3] non-existent keys will not be filtered anymore, nor will invalid typed values (e.g. a numeric value for useDualStack)", () => { - const storage = new Storage( + const storage = new AdapterAmazonS3( [ "s3://key:secret/can/contain/slashes@eu-west-2/the-buck", "?sslEnabled=true", @@ -54,37 +53,37 @@ describe(`testing Amazon urls`, () => { ); expect(storage.getType()).toBe(StorageType.S3); expect(storage.config.bucketName).toBe("the-buck"); - expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect(storage.config.accessKeyId).toBe("key"); + expect(storage.config.secretAccessKey).toBe("secret/can/contain/slashes"); + expect(storage.config.region).toBe("eu-west-2"); expect(storage.config.options?.sslEnabled as unknown as string).toBe("true"); expect(storage.config.options?.useDualStack as unknown).toBe(undefined); expect(storage.config.options?.otherExistentKey as string).toBe("true"); - expect((storage.config as ConfigAmazonS3).endpoint).toBe(undefined); - expect((storage.config as ConfigAmazonS3).endPoint).toBe( - "https://kms-fips.us-west-2.amazonaws.com" - ); + expect(storage.config.options?.endPoint).toBe("https://kms-fips.us-west-2.amazonaws.com"); + expect(storage.config.endpoint).toBe(undefined); }); it("[4] object", () => { - const storage = new Storage({ + const storage = new AdapterAmazonS3({ type: "s3", accessKeyId: "key", secretAccessKey: "secret/can/contain/slashes", region: "eu-west-2", bucketName: "the-buck", - sslEnabled: true, + options: { + sslEnabled: true, + }, }); expect(storage.getType()).toBe(StorageType.S3); - expect(storage.getSelectedBucket()).toBe("the-buck"); - expect((storage.config as ConfigAmazonS3).accessKeyId).toBe("key"); - expect((storage.config as ConfigAmazonS3).secretAccessKey).toBe("secret/can/contain/slashes"); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.config as ConfigAmazonS3).sslEnabled).toBe(true); + expect(storage.config.bucketName).toBe("the-buck"); + expect(storage.config.accessKeyId).toBe("key"); + expect(storage.config.secretAccessKey).toBe("secret/can/contain/slashes"); + expect(storage.config.region).toBe("eu-west-2"); + expect(storage.config.options?.sslEnabled).toBe(true); }); it("[5] no bucket", () => { - const storage = new Storage({ + const storage = new AdapterAmazonS3({ type: "s3", region: "eu-west-2", accessKeyId: "key", @@ -96,31 +95,33 @@ describe(`testing Amazon urls`, () => { // it("[5a] no bucket URL", () => { // const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2"); // expect(storage.getSelectedBucket()).toBe("eu-west-2"); - // expect((storage.config as ConfigAmazonS3).region).not.toBe("eu-west-2"); + // expect(storage.config.region).not.toBe("eu-west-2"); // }); it("[5a1] no bucket URL", () => { - const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2/"); + const storage = new AdapterAmazonS3("s3://key:secret/can/contain/slashes@eu-west-2/"); expect(storage.getSelectedBucket()).toBe(""); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); + expect(storage.config.region).toBe("eu-west-2"); }); it("[5b] no bucket URL plus queryString", () => { - const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2/?sslEnabled=true"); + const storage = new AdapterAmazonS3( + "s3://key:secret/can/contain/slashes@eu-west-2/?sslEnabled=true" + ); expect(storage.getSelectedBucket()).toBe(""); - expect((storage.config as ConfigAmazonS3).region).toBe("eu-west-2"); - expect((storage.config as ConfigAmazonS3).sslEnabled as unknown as string).toBe("true"); + expect(storage.config.region).toBe("eu-west-2"); + expect(storage.config.sslEnabled as unknown as string).toBe("true"); }); // it("[5b1] no bucket URL plus queryString", () => { - // const storage = new Storage("s3://key:secret/can/contain/slashes@eu-west-2?sslEnabled=true"); + // const storage = new AdapterAmazonS3("s3://key:secret/can/contain/slashes@eu-west-2?sslEnabled=true"); // expect(storage.getSelectedBucket()).toBe("eu-west-2"); - // expect((storage.config as ConfigAmazonS3).region).not.toBe("eu-west-2"); - // expect((storage.config as ConfigAmazonS3).sslEnabled).toBe(true); + // expect(storage.config.region).not.toBe("eu-west-2"); + // expect(storage.config.sslEnabled).toBe(true); // }); it("[6] number and boolean in config object keep their original type", () => { - const storage = new Storage({ + const storage = new AdapterAmazonS3({ type: "s3", region: "eu-west-2", accessKeyId: "key", @@ -128,12 +129,12 @@ describe(`testing Amazon urls`, () => { optionNumber: 42, optionBoolean: true, }); - expect((storage.config as ConfigAmazonS3).optionNumber).toBe(42); - expect((storage.config as ConfigAmazonS3).optionBoolean).toBe(true); + expect(storage.config.optionNumber).toBe(42); + expect(storage.config.optionBoolean).toBe(true); }); it("[7] number and boolean used in config will stay string types", () => { - const storage = new Storage( + const storage = new AdapterAmazonS3( [ "s3://key:secret/can/contain/slashes", "@eu-west-2/", @@ -141,7 +142,7 @@ describe(`testing Amazon urls`, () => { "&optionBoolean=true", ].join("") ); - expect((storage.config as ConfigAmazonS3).optionNumber).toBe("42"); - expect((storage.config as ConfigAmazonS3).optionBoolean).toBe("true"); + expect(storage.config.optionNumber).toBe("42"); + expect(storage.config.optionBoolean).toBe("true"); }); }); diff --git a/tests/testB2.ts b/tests/testB2.ts index aeb648a..e56ffe3 100644 --- a/tests/testB2.ts +++ b/tests/testB2.ts @@ -77,7 +77,7 @@ async function testB2() { // targetPath: "test/image1.jpg", // }); // console.timeEnd("addFileFromPath"); - + /* console.time("addFileFromStream"); const data4 = await storage.addFileFromStream({ bucketName: "the-buck", @@ -90,7 +90,7 @@ async function testB2() { const response = await storage.clearBucket("the-buck"); console.log(response); console.timeEnd("clearBucket"); - +*/ // console.time("listFiles"); // const data2 = await storage.listFiles("the-buck"); // console.log(data2); diff --git a/tests/testGCS.ts b/tests/testGCS.ts new file mode 100644 index 0000000..a87d7c1 --- /dev/null +++ b/tests/testGCS.ts @@ -0,0 +1,14 @@ +import dotenv from "dotenv"; +import { StorageType } from "../src/types"; +import { AdapterGoogleCloudStorage } from "../src/AdapterGoogleCloudStorage"; +import { parseMode, parseUrl } from "../src/util"; + +dotenv.config(); + +async function test() { + const a = new AdapterGoogleCloudStorage(); + const b = await a.listBuckets(); + console.log(b); +} + +test(); diff --git a/tests/testLocal.ts b/tests/testLocal.ts new file mode 100644 index 0000000..f19983e --- /dev/null +++ b/tests/testLocal.ts @@ -0,0 +1,35 @@ +import dotenv from "dotenv"; +import { StorageType } from "../src/types"; +import { ConfigBackblazeB2 } from "@tweedegolf/storage-abstraction"; +import { parseMode, parseUrl } from "../src/util"; + +dotenv.config(); + +const applicationKeyId = process.env.B2_APPLICATION_KEY_ID; +const applicationKey = process.env.B2_APPLICATION_KEY; +const configBackblaze: ConfigBackblazeB2 = { + type: StorageType.B2, + applicationKeyId, + applicationKey, + bucketName: process.env.BUCKET_NAME, + versioning: true, +}; + +function test() { + // const config = "local://tests/tmp/@the-buck?param=value"; + const config = "s3://key:secret/can/contain/slashes@eu-west-2/the-buck"; + const { value, error } = parseUrl(config); + if (error) { + this.configError = error; + return null; + } + + console.log(value); + + console.log(parseMode("0o777")); + console.log(parseMode("511")); + console.log(parseMode(0o777)); + console.log(parseMode(511)); +} + +test(); diff --git a/tests/testS3.ts b/tests/testS3.ts new file mode 100644 index 0000000..8f8d801 --- /dev/null +++ b/tests/testS3.ts @@ -0,0 +1,85 @@ +import dotenv from "dotenv"; +import { StorageType } from "../src/types"; +import { AdapterAmazonS3 } from "../src/AdapterAmazonS3"; +// import { parseMode, parseUrl } from "../src/util"; +import { ListBucketsCommand, S3Client } from "@aws-sdk/client-s3"; +import { Storage } from "../src/Storage"; + +dotenv.config(); + +// const accessKeyId = process.env["AWS_ACCESS_KEY_ID"]; +// const secretAccessKey = process.env["AWS_SECRET_ACCESS_KEY"]; + +// const configS3: ConfigAmazonS3 = { +// type: StorageType.S3, +// region: "us-east-1", +// skipCheck: true, +// // accessKeyId, +// // secretAccessKey, +// bucketName: process.env.BUCKET_NAME, +// }; + +async function test() { + // const s = new AdapterAmazonS3({ region: "eu-west-1" }); + const s = new Storage({ type: StorageType.S3, region: "eu-west-1" }); + console.log(s.config); + const b = await s.listBuckets(); + console.log(b); + + // const s3 = new S3Client({ region: "us-east-1" }); + // const command = new ListBucketsCommand({}); + // s3.send(command) + // .then((response) => { + // const bucketNames = response.Buckets?.map((d) => d?.Name); + // console.log(bucketNames); + // }) + // .catch((e) => { + // console.log(e); + // }); +} + +test(); + +/* +this.storage = new S3Client({ + region: this._config.region, + endpoint: this._config.endpoint, + credentials: { + accessKeyId: this._config.accessKeyId, + secretAccessKey: this._config.secretAccessKey, + }, +}); + + +if (typeof this._config.region === "undefined") { + if (this.s3Compatible === S3Compatible.R2) { + this._config.region = "auto"; + } else if (this.s3Compatible === S3Compatible.Backblaze) { + let ep = this._config.endpoint as string; + ep = ep.substring(ep.indexOf("s3.") + 3); + this._config.region = ep.substring(0, ep.indexOf(".")); + } +} +if (typeof this._config.endpoint === "undefined") { + // this.storage = new S3Client({ region: this._config.region, ...this._config.options }); + this.storage = new S3Client({ region: "us-east-1" }); + console.log(this.storage.config); +} else { + this.storage = new S3Client(config); +} + + + if (typeof cfg.endpoint !== "undefined") { + if (cfg.endpoint.indexOf("r2.cloudflarestorage.com") !== -1) { + this.s3Compatible = S3Compatible.R2; + } else if (cfg.endpoint.indexOf("backblazeb2.com") !== -1) { + this.s3Compatible = S3Compatible.Backblaze; + } + } + if (!cfg.region && this.s3Compatible === S3Compatible.Amazon) { + this.configError = "You must specify a default region for storage type 's3'"; + return null; + } + + +*/ From cd03183ada501acab1755c7253a1e68d51579e20 Mon Sep 17 00:00:00 2001 From: abudaan Date: Mon, 27 Nov 2023 23:08:44 +0100 Subject: [PATCH 24/26] wip config --- changelog.md | 1 + package.json | 1 + src/AbstractAdapter.ts | 24 ++++++- src/AdapterAmazonS3.ts | 43 ++---------- src/AdapterAzureStorageBlob.ts | 78 +++++---------------- src/AdapterBackblazeB2.ts | 56 ++------------- src/AdapterBackblazeB2F.ts | 13 ++-- src/AdapterGoogleCloudStorage.ts | 70 ++---------------- src/AdapterLocal.ts | 4 +- src/Storage.ts | 18 +++-- src/types.ts | 34 ++------- src/util.ts | 117 +++++++++++++++++++------------ tests/testAzure.ts | 12 ++++ 13 files changed, 174 insertions(+), 297 deletions(-) create mode 100644 tests/testAzure.ts diff --git a/changelog.md b/changelog.md index fbb6dae..29e2bd7 100644 --- a/changelog.md +++ b/changelog.md @@ -50,6 +50,7 @@ - `getConfig()` and `getType()` are implemented as getter as well, resp.: `storage.config` and `storage.type` - The configuration object are no longer extensible; if you want to provide extra parameters you can use the `options` object, for instance: - removed adapter config types +- added a member `configError` in storage that can be queried with a getter e.g. `storage.configError` ```typescript const conf: ConfigAmazonS3 = { diff --git a/package.json b/package.json index 98d438b..bcab06a 100644 --- a/package.json +++ b/package.json @@ -51,6 +51,7 @@ "testB2": "ts-node ./tests/testB2.ts", "testS3": "ts-node ./tests/testS3.ts", "testGCS": "ts-node ./tests/testGCS.ts", + "testAzure": "ts-node ./tests/testAzure.ts", "testLocal": "ts-node ./tests/testLocal.ts", "ts": "ts-node", "tsc": "node_modules/.bin/tsc", diff --git a/src/AbstractAdapter.ts b/src/AbstractAdapter.ts index 26dc291..7562602 100644 --- a/src/AbstractAdapter.ts +++ b/src/AbstractAdapter.ts @@ -11,10 +11,24 @@ import { ResultObjectNumber, ResultObjectStream, } from "./types"; +import { parseUrl } from "./util"; export abstract class AbstractAdapter implements IStorage { protected _type = "abstract-adapter"; - protected _config: AdapterConfig; + protected _config: AdapterConfig | null; + protected _configError: string | null = null; + + constructor(config?: string | AdapterConfig) { + if (typeof config === "string") { + const { value, error } = parseUrl(config); + if (error) { + this._configError = error; + } + this._config = value; + } else { + this._config = { ...config }; + } + } get type(): string { return this._type; @@ -24,10 +38,18 @@ export abstract class AbstractAdapter implements IStorage { return this._config; } + get configError(): string { + return this._configError; + } + getType(): string { return this.type; } + getConfigError(): string { + return this.configError; + } + getConfiguration(): AdapterConfig { return this.config; } diff --git a/src/AdapterAmazonS3.ts b/src/AdapterAmazonS3.ts index dd276b3..e2fe3a0 100644 --- a/src/AdapterAmazonS3.ts +++ b/src/AdapterAmazonS3.ts @@ -36,48 +36,15 @@ import { parseUrl } from "./util"; export class AdapterAmazonS3 extends AbstractAdapter { protected _type = StorageType.S3; protected _config: AdapterConfig; - private configError: string | null = null; + protected _configError: string | null = null; private storage: S3Client; constructor(config?: string | AdapterConfig) { - super(); - if (typeof config === "string") { - this._config = this.parseConfig(config); - } else { - this._config = config; + super(config); + if (this._configError === null) { + this.storage = new S3Client(this.config); + console.log(this.storage.config); } - - if (this._config === null) { - return; - } - - this.storage = new S3Client(this.config); - console.log(this.storage.config); - } - - private parseConfig(config: string): AdapterConfig | null { - const { value, error } = parseUrl(config); - if (error) { - this.configError = error; - return null; - } - const { - type, - part1: accessKeyId, - part2: secretAccessKey, - part3: region, - bucketName, - queryString: options, - } = value; - - return { - type, - accessKeyId, - secretAccessKey, - region, - bucketName, - ...options, - }; } // Public API diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index 1274396..d2b9183 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -8,7 +8,6 @@ import { StorageSharedKeyCredential, } from "@azure/storage-blob"; import { - ConfigAzureStorageBlob, StorageType, ResultObjectStream, ResultObject, @@ -19,74 +18,33 @@ import { FileBufferParams, FilePathParams, FileStreamParams, + AdapterConfig, } from "./types"; -import { parseUrl } from "./util"; import { CreateReadStreamOptions } from "@google-cloud/storage"; export class AdapterAzureStorageBlob extends AbstractAdapter { protected _type = StorageType.AZURE; - protected _config: ConfigAzureStorageBlob; + protected _config: AdapterConfig; + protected _configError: string | null = null; private sharedKeyCredential: StorageSharedKeyCredential; - private configError: string | null = null; private storage: BlobServiceClient; - constructor(config: string | ConfigAzureStorageBlob) { - super(); - this._config = this.parseConfig(config as ConfigAzureStorageBlob); - - this.sharedKeyCredential = new StorageSharedKeyCredential( - (this._config as ConfigAzureStorageBlob).storageAccount, - (this._config as ConfigAzureStorageBlob).accessKey - ); - this.storage = new BlobServiceClient( - `https://${(this._config as ConfigAzureStorageBlob).storageAccount}.blob.core.windows.net`, - this.sharedKeyCredential, - this._config.options - ); - } - - private parseConfig(config: string | ConfigAzureStorageBlob): ConfigAzureStorageBlob { - let cfg: ConfigAzureStorageBlob; - if (typeof config === "string") { - const { value, error } = parseUrl(config); - if (error) { - this.configError = error; - return null; - } - - const { - type, - part1: storageAccount, - part2: accessKey, - bucketName, - queryString: options, - } = value; - cfg = { - type, - storageAccount, - accessKey, - bucketName, - options, - }; - } else { - cfg = { ...config }; - } - - if (cfg.skipCheck === true) { - return cfg; - } - - if (!cfg.storageAccount) { - this.configError = - "You must specify a value for 'storageAccount' for storage type 'azurestorageblob'"; - return null; - } - if (!cfg.accessKey) { - this.configError = - "You must specify a value for 'accessKey' for storage type 'azurestorageblob'"; - return null; + constructor(config?: string | AdapterConfig) { + super(config); + if (this._configError === null) { + this.sharedKeyCredential = new StorageSharedKeyCredential( + this._config.storageAccount as string, + this._config.accessKey as string + ); + // this.storage = new BlobServiceClient( + // `https://${this._config.storageAccount as string}.blob.core.windows.net`, + // this.sharedKeyCredential, + // this._config.options as object + // ); + this.storage = new BlobServiceClient( + `https://${this._config.storageAccount as string}.blob.core.windows.net` + ); } - return cfg; } async getFileAsStream( diff --git a/src/AdapterBackblazeB2.ts b/src/AdapterBackblazeB2.ts index f40b317..cf5e86e 100644 --- a/src/AdapterBackblazeB2.ts +++ b/src/AdapterBackblazeB2.ts @@ -22,72 +22,30 @@ import { BackblazeBucketOptions, AdapterConfig, } from "./types"; -import { parseUrl, validateName } from "./util"; +import { validateName } from "./util"; require("@gideo-llc/backblaze-b2-upload-any").install(B2); export class AdapterBackblazeB2 extends AbstractAdapter { protected _type = StorageType.B2; protected _config: AdapterConfig; - private storage: B2; + protected _configError: string | null = null; + private storage: B2 = null; private authorized: boolean = false; - private configError: string | null = null; constructor(config?: string | AdapterConfig) { - super(); - this._config = this.parseConfig(config); - if (this._config !== null) { + super(config); + if (this._configError === null) { try { - const c = { ...this._config, ...(this._config.options as object) }; - delete c.options; - this.storage = new B2(c); - console.log(this.storage.config); + this.storage = new B2(this._config); } catch (e) { - this.configError = e.message; + this._configError = e.message; } } } // util members - private parseConfig(config: string | AdapterConfig): AdapterConfig | null { - let cfg: AdapterConfig; - if (typeof config === "string") { - const { error, value } = parseUrl(config); - if (error !== null) { - this.configError = error; - return null; - } - const { - type, - part1: applicationKeyId, - part2: applicationKey, - bucketName, - queryString: options, - } = value; - cfg = { - type, - applicationKeyId, - applicationKey, - bucketName, - ...options, - }; - } else { - cfg = { ...config }; - } - - if (cfg.skipCheck === true) { - return cfg; - } - - if (!cfg.applicationKey || !cfg.applicationKeyId) { - this.configError = - "You must specify a value for both 'applicationKeyId' and 'applicationKey' for storage type 'b2'"; - return null; - } - return cfg; - } - private async authorize(): Promise { if (this.configError !== null) { return { value: null, error: this.configError }; diff --git a/src/AdapterBackblazeB2F.ts b/src/AdapterBackblazeB2F.ts index e4b9e65..c263db8 100644 --- a/src/AdapterBackblazeB2F.ts +++ b/src/AdapterBackblazeB2F.ts @@ -5,7 +5,6 @@ require("@gideo-llc/backblaze-b2-upload-any").install(B2); import { StorageType, IStorage, - ConfigBackblazeB2, ResultObject, ResultObjectBuckets, FileBufferParams, @@ -15,10 +14,10 @@ import { ResultObjectFiles, ResultObjectNumber, ResultObjectBoolean, + AdapterConfig, } from "./types"; -// import { parseUrl } from "./util"; -const getConfiguration = (): ConfigBackblazeB2 => { +const getConfiguration = (): AdapterConfig => { return { type: StorageType.B2, applicationKeyId: "", @@ -28,6 +27,8 @@ const getConfiguration = (): ConfigBackblazeB2 => { const getType = (): string => "string"; +const getConfigError = (): string => "string"; + const createBucket = async (name: string): Promise => { return { value: "ok", error: null }; }; @@ -104,7 +105,11 @@ const adapter: IStorage = { get config() { return getConfiguration(); }, + get configError() { + return getConfigError(); + }, getType, + getConfigError, getConfiguration, createBucket, clearBucket, @@ -123,7 +128,7 @@ const adapter: IStorage = { fileExists, }; -const createAdapter = (config: ConfigBackblazeB2): IStorage => { +const createAdapter = (config: AdapterConfig): IStorage => { console.log("create functional adapter"); const state = { applicationKeyId: config.applicationKeyId, diff --git a/src/AdapterGoogleCloudStorage.ts b/src/AdapterGoogleCloudStorage.ts index 8c180d1..61a5298 100644 --- a/src/AdapterGoogleCloudStorage.ts +++ b/src/AdapterGoogleCloudStorage.ts @@ -1,10 +1,6 @@ import fs from "fs"; import { Readable } from "stream"; -import { - Storage as GoogleCloudStorage, - File, - CreateReadStreamOptions, -} from "@google-cloud/storage"; +import { Storage as GoogleCloudStorage, CreateReadStreamOptions } from "@google-cloud/storage"; import { AbstractAdapter } from "./AbstractAdapter"; import { StorageType, @@ -18,75 +14,19 @@ import { ResultObjectNumber, ResultObjectBoolean, } from "./types"; -import { parseUrl } from "./util"; import { AdapterConfig } from "@tweedegolf/storage-abstraction"; export class AdapterGoogleCloudStorage extends AbstractAdapter { protected _type = StorageType.GCS; protected _config: AdapterConfig; - private configError: string | null = null; + protected _configError: string | null = null; private storage: GoogleCloudStorage; constructor(config?: string | AdapterConfig) { - super(); - // this._config = this.parseConfig(config); - // const c = { - // ...this._config, - // ...this._config.options, - // }; - // delete c.options; - // this.storage = new GoogleCloudStorage(c); - this.storage = new GoogleCloudStorage(config as object); - } - - /** - * @param {string} keyFile - path to the keyFile - * - * Read in the keyFile and retrieve the projectId, this is function - * is called when the user did not provide a projectId - */ - private getGCSProjectId(keyFile: string): string { - const data = fs.readFileSync(keyFile).toString("utf-8"); - const json = JSON.parse(data); - return json.project_id; - } - - private parseConfig(config: string | AdapterConfig): AdapterConfig { - let cfg: AdapterConfig; - if (typeof config === "string") { - const { value, error } = parseUrl(config); - if (error) { - this.configError = error; - return null; - } - - const { - type, - part1: keyFilename, - part2: projectId, - bucketName, - queryString: options, - } = value; - cfg = { - type, - keyFilename, - projectId, - bucketName, - options, - }; - } else { - cfg = { ...config }; - } - - if (cfg.skipCheck === true) { - return cfg; + super(config); + if (this._configError === null) { + this.storage = new GoogleCloudStorage(this._config as object); } - - if (cfg.projectId === "" && cfg.keyFilename !== "") { - cfg.projectId = this.getGCSProjectId(cfg.keyFilename); - } - - return cfg; } async getFileAsURL(bucketName: string, fileName: string): Promise { diff --git a/src/AdapterLocal.ts b/src/AdapterLocal.ts index c416b4e..aa55c89 100644 --- a/src/AdapterLocal.ts +++ b/src/AdapterLocal.ts @@ -22,8 +22,8 @@ import { parseQuerystring, parseMode, parseUrl, validateName } from "./util"; export class AdapterLocal extends AbstractAdapter { protected _type = StorageType.LOCAL; protected _config: ConfigLocal; + protected _configError: string | null = null; private mode: number = 0o777; - private configError: string | null = null; constructor(config: ConfigLocal) { super(); @@ -36,7 +36,7 @@ export class AdapterLocal extends AbstractAdapter { if (typeof config === "string") { const { value, error } = parseUrl(config); if (error) { - this.configError = error; + this._configError = error; return null; } diff --git a/src/Storage.ts b/src/Storage.ts index 6365794..220e8f5 100644 --- a/src/Storage.ts +++ b/src/Storage.ts @@ -12,8 +12,6 @@ import { ResultObjectNumber, ResultObjectBoolean, } from "./types"; -import { AdapterGoogleCloudStorage } from "./AdapterGoogleCloudStorage"; -import { AdapterLocal } from "./AdapterLocal"; // add new storage adapters here const adapterClasses = { @@ -49,19 +47,27 @@ export class Storage implements IStorage { } get type(): string { - return this.adapter.getType(); + return this.adapter.type; } public getType(): string { - return this.adapter.getType(); + return this.adapter.type; } get config(): AdapterConfig { - return this.adapter.getConfiguration(); + return this.adapter.config; } public getConfiguration(): AdapterConfig { - return this.adapter.getConfiguration(); + return this.adapter.config; + } + + get configError(): string { + return this.adapter.configError; + } + + public getConfigError(): string { + return this.adapter.configError; } public switchAdapter(args: string | AdapterConfig): void { diff --git a/src/types.ts b/src/types.ts index f368ac8..ac54153 100644 --- a/src/types.ts +++ b/src/types.ts @@ -44,6 +44,10 @@ export interface IStorage { */ config: AdapterConfig; + getConfigError(): string; + + configError: string; + /** * Returns an object that contains both the options passed with the configuration and the * default options of the storage type if not overruled by the options you passed in. @@ -200,28 +204,9 @@ export enum StorageType { MINIO = "minio", } -export type JSON = { - [id: string]: - | string - | number - | boolean - | string[] - | number[] - | boolean[] - | { [id: string]: JSON }; -}; - -// export interface Options { -// [key: string]: string | number | boolean; -// } - -export type GenericKey = number | string | boolean | number[] | string[] | boolean[]; - export interface AdapterConfig { - // type: StorageType; type: string; - // [id: string]: GenericKey; - [id: string]: number | string | boolean | number[] | string[] | boolean[]; + [id: string]: number | string | boolean | number[] | string[] | boolean[] | object; } export type BackblazeAxiosResponse = { @@ -287,14 +272,7 @@ export enum S3Compatible { export type ParseUrlResult = { error: string | null; - value: { - type: string; - part1: string; - part2: string; - part3: string; - bucketName: string; - queryString: { [key: string]: string }; - }; + value: AdapterConfig; }; export interface ResultObject { diff --git a/src/util.ts b/src/util.ts index 4fedff8..480889f 100644 --- a/src/util.ts +++ b/src/util.ts @@ -1,5 +1,6 @@ import { BucketLocationConstraint } from "@aws-sdk/client-s3"; import { ParseUrlResult, ResultObjectNumber } from "./types"; +import { AdapterConfig } from "@tweedegolf/storage-abstraction"; /** * @param: url @@ -25,8 +26,7 @@ export const parseQuerystring = (url: string): { [id: string]: string } => { /** * @param url - * Parses a url string into fragments and parses the query string into a - * key-value object. + * Parses a url into a key-value object. */ export const parseUrl = (url: string): ParseUrlResult => { if (url.indexOf("://") === -1) { @@ -36,47 +36,19 @@ export const parseUrl = (url: string): ParseUrlResult => { // if (Object.values(StorageType).includes(type as StorageType) === false) { // return { value: null, error: `"${type}" is not a valid storage type` }; // } - let config = url.substring(url.indexOf("://") + 3); - const at = config.indexOf("@"); - const questionMark = config.indexOf("?"); - const colon = config.indexOf(":"); - let part1 = ""; - let part2 = ""; - let part3 = ""; - let bucketName = ""; - - // parse options - const queryString: { [key: string]: string } = parseQuerystring(url); - if (questionMark !== -1) { - config = config.substring(0, questionMark); - } - // console.log("config", config); - - // get bucket name and region - let bucketString = ""; - if (at !== -1) { - bucketString = config.substring(at + 1); - const slash = bucketString.indexOf("/"); - if (slash !== -1) { - // Amazon S3 @region/bucket - bucketName = bucketString.substring(slash + 1); - part3 = bucketString.substring(0, slash); - } else { - bucketName = bucketString; - } - // console.log(bucketName, bucketString, slash); - config = config.substring(0, at); - } - // get credentials - if (colon !== -1) { - [part1, part2] = config.split(":"); - } else { - part1 = config; - } - - // console.log(type, part1, part2, region, bucketName, queryString); - return { error: null, value: { type, part1, part2, part3, bucketName, queryString } }; + const config: AdapterConfig = url + .substring(url.indexOf("://") + 3) + .split("&") + .map((pair) => pair.split("=")) + .reduce( + (acc, val) => { + acc[val[0]] = val[1]; + return acc; + }, + { type } + ); + return { value: config, error: null }; }; /** @@ -176,8 +148,65 @@ export const readFilePromise = (path: string): Promise => } }); }); + + export const BucketLocationConstraintAsString = (c: BucketLocationConstraint): string => { + return; + }; */ -export const BucketLocationConstraintAsString = (c: BucketLocationConstraint): string => { - return; +/** + * @param url + * Parses a url string into fragments and parses the query string into a + * key-value object. + +export const parseUrl = (url: string): ParseUrlResult => { + if (url.indexOf("://") === -1) { + return { value: null, error: "Please provide a valid configuration url" }; + } + const type = url.substring(0, url.indexOf("://")); + // if (Object.values(StorageType).includes(type as StorageType) === false) { + // return { value: null, error: `"${type}" is not a valid storage type` }; + // } + let config = url.substring(url.indexOf("://") + 3); + const at = config.indexOf("@"); + const questionMark = config.indexOf("?"); + const colon = config.indexOf(":"); + let part1 = ""; + let part2 = ""; + let part3 = ""; + let bucketName = ""; + + // parse options + const queryString: { [key: string]: string } = parseQuerystring(url); + if (questionMark !== -1) { + config = config.substring(0, questionMark); + } + // console.log("config", config); + + // get bucket name and region + let bucketString = ""; + if (at !== -1) { + bucketString = config.substring(at + 1); + const slash = bucketString.indexOf("/"); + if (slash !== -1) { + // Amazon S3 @region/bucket + bucketName = bucketString.substring(slash + 1); + part3 = bucketString.substring(0, slash); + } else { + bucketName = bucketString; + } + // console.log(bucketName, bucketString, slash); + config = config.substring(0, at); + } + + // get credentials + if (colon !== -1) { + [part1, part2] = config.split(":"); + } else { + part1 = config; + } + + // console.log(type, part1, part2, region, bucketName, queryString); + return { error: null, value: { type, part1, part2, part3, bucketName, queryString } }; }; +*/ diff --git a/tests/testAzure.ts b/tests/testAzure.ts new file mode 100644 index 0000000..8ba17f2 --- /dev/null +++ b/tests/testAzure.ts @@ -0,0 +1,12 @@ +import dotenv from "dotenv"; +import { AdapterAzureStorageBlob } from "../src/AdapterAzureStorageBlob"; + +dotenv.config(); + +async function test() { + const a = new AdapterAzureStorageBlob(); + const b = await a.listBuckets(); + console.log(b); +} + +test(); From 3b930b0dddd22444e191df46a3bd5f5c8876b2e3 Mon Sep 17 00:00:00 2001 From: abudaan Date: Tue, 28 Nov 2023 14:02:03 +0100 Subject: [PATCH 25/26] wip auth Azure --- package-lock.json | 245 +++++++++++++++++++++++++++++++-- package.json | 1 + src/AdapterAzureStorageBlob.ts | 55 +++++--- src/types.ts | 9 +- tests/testAzure.ts | 3 +- tests/testGCS.ts | 16 ++- 6 files changed, 298 insertions(+), 31 deletions(-) diff --git a/package-lock.json b/package-lock.json index aa648c7..5a748f5 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,20 +1,20 @@ { "name": "@tweedegolf/storage-abstraction", - "version": "1.4.7", + "version": "1.5.2", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "@tweedegolf/storage-abstraction", - "version": "1.4.7", + "version": "1.5.2", "license": "MIT", "dependencies": { "@aws-sdk/client-s3": "^3.445.0", "@aws-sdk/s3-request-presigner": "^3.445.0", + "@azure/identity": "^4.0.0", "@azure/storage-blob": "^12.17.0", "@gideo-llc/backblaze-b2-upload-any": "^0.1.4", "@google-cloud/storage": "^7.6.0", - "@ramda/zip": "^0.26.1", "backblaze-b2": "^1.7.0", "glob": "^10.3.10", "rimraf": "^5.0.5", @@ -804,6 +804,34 @@ "node": ">=14.0.0" } }, + "node_modules/@azure/core-client": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/@azure/core-client/-/core-client-1.7.3.tgz", + "integrity": "sha512-kleJ1iUTxcO32Y06dH9Pfi9K4U+Tlb111WXEnbt7R/ne+NLRwppZiTGJuTD5VVoxTMK5NTbEtm5t2vcdNCFe2g==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-rest-pipeline": "^1.9.1", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.0.0", + "@azure/logger": "^1.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/@azure/core-client/node_modules/@azure/core-tracing": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.1.tgz", + "integrity": "sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/@azure/core-http": { "version": "3.0.3", "license": "MIT", @@ -850,6 +878,36 @@ "node": ">=14.0.0" } }, + "node_modules/@azure/core-rest-pipeline": { + "version": "1.12.2", + "resolved": "https://registry.npmjs.org/@azure/core-rest-pipeline/-/core-rest-pipeline-1.12.2.tgz", + "integrity": "sha512-wLLJQdL4v1yoqYtEtjKNjf8pJ/G/BqVomAWxcKOR1KbZJyCEnCv04yks7Y1NhJ3JzxbDs307W67uX0JzklFdCg==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.4.0", + "@azure/core-tracing": "^1.0.1", + "@azure/core-util": "^1.3.0", + "@azure/logger": "^1.0.0", + "form-data": "^4.0.0", + "http-proxy-agent": "^5.0.0", + "https-proxy-agent": "^5.0.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=16.0.0" + } + }, + "node_modules/@azure/core-rest-pipeline/node_modules/@azure/core-tracing": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.1.tgz", + "integrity": "sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, "node_modules/@azure/core-tracing": { "version": "1.0.0-preview.13", "license": "MIT", @@ -872,6 +930,65 @@ "node": ">=16.0.0" } }, + "node_modules/@azure/identity": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/@azure/identity/-/identity-4.0.0.tgz", + "integrity": "sha512-gtPYxIL0kI39Dw4t3HvlbfhOdXqKD2MqDgynlklF0j728j51dcKgRo6FLX0QzpBw/1gGfLxjMXqq3nKOSQ2lmA==", + "dependencies": { + "@azure/abort-controller": "^1.0.0", + "@azure/core-auth": "^1.5.0", + "@azure/core-client": "^1.4.0", + "@azure/core-rest-pipeline": "^1.1.0", + "@azure/core-tracing": "^1.0.0", + "@azure/core-util": "^1.0.0", + "@azure/logger": "^1.0.0", + "@azure/msal-browser": "^3.5.0", + "@azure/msal-node": "^2.5.1", + "events": "^3.0.0", + "jws": "^4.0.0", + "open": "^8.0.0", + "stoppable": "^1.1.0", + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=18.0.0" + } + }, + "node_modules/@azure/identity/node_modules/@azure/core-tracing": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/@azure/core-tracing/-/core-tracing-1.0.1.tgz", + "integrity": "sha512-I5CGMoLtX+pI17ZdiFJZgxMJApsK6jjfm85hpgp3oazCdq5Wxgh4wMr7ge/TTWW1B5WBuvIOI1fMU/FrOAMKrw==", + "dependencies": { + "tslib": "^2.2.0" + }, + "engines": { + "node": ">=12.0.0" + } + }, + "node_modules/@azure/identity/node_modules/define-lazy-prop": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/define-lazy-prop/-/define-lazy-prop-2.0.0.tgz", + "integrity": "sha512-Ds09qNh8yw3khSjiJjiUInaGX9xlqZDY7JVryGxdxV7NPeuqQfplOpQ66yJFZut3jLa5zOwkXw1g9EI2uKh4Og==", + "engines": { + "node": ">=8" + } + }, + "node_modules/@azure/identity/node_modules/open": { + "version": "8.4.2", + "resolved": "https://registry.npmjs.org/open/-/open-8.4.2.tgz", + "integrity": "sha512-7x81NCL719oNbsq/3mh+hVrAWmFuEYUqrq/Iw3kUzH8ReypT9QQ0BLoJS7/G9k6N81XjW4qHWtjWwe/9eLy1EQ==", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@azure/logger": { "version": "1.0.4", "license": "MIT", @@ -882,6 +999,38 @@ "node": ">=14.0.0" } }, + "node_modules/@azure/msal-browser": { + "version": "3.5.0", + "resolved": "https://registry.npmjs.org/@azure/msal-browser/-/msal-browser-3.5.0.tgz", + "integrity": "sha512-2NtMuel4CI3UEelCPKkNRXgKzpWEX48fvxIvPz7s0/sTcCaI08r05IOkH2GkXW+czUOtuY6+oGafJCpumnjRLg==", + "dependencies": { + "@azure/msal-common": "14.4.0" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-common": { + "version": "14.4.0", + "resolved": "https://registry.npmjs.org/@azure/msal-common/-/msal-common-14.4.0.tgz", + "integrity": "sha512-ffCymScQuMKVj+YVfwNI52A5Tu+uiZO2eTf+c+3TXxdAssks4nokJhtr+uOOMxH0zDi6d1OjFKFKeXODK0YLSg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/@azure/msal-node": { + "version": "2.5.1", + "resolved": "https://registry.npmjs.org/@azure/msal-node/-/msal-node-2.5.1.tgz", + "integrity": "sha512-PsPRISqCG253HQk1cAS7eJW7NWTbnBGpG+vcGGz5z4JYRdnM2EIXlj1aBpXCdozenEPtXEVvHn2ELleW1w82nQ==", + "dependencies": { + "@azure/msal-common": "14.4.0", + "jsonwebtoken": "^9.0.0", + "uuid": "^8.3.0" + }, + "engines": { + "node": "16|| 18 || 20" + } + }, "node_modules/@azure/storage-blob": { "version": "12.17.0", "license": "MIT", @@ -1249,10 +1398,6 @@ "url": "https://opencollective.com/unts" } }, - "node_modules/@ramda/zip": { - "version": "0.26.1", - "license": "MIT" - }, "node_modules/@ronomon/crypto-async": { "version": "5.0.1", "hasInstallScript": true, @@ -3604,7 +3749,6 @@ }, "node_modules/is-docker": { "version": "2.2.1", - "dev": true, "license": "MIT", "bin": { "is-docker": "cli.js" @@ -3740,7 +3884,6 @@ }, "node_modules/is-wsl": { "version": "2.2.0", - "dev": true, "license": "MIT", "dependencies": { "is-docker": "^2.0.0" @@ -3842,6 +3985,46 @@ "dev": true, "license": "ISC" }, + "node_modules/jsonwebtoken": { + "version": "9.0.2", + "resolved": "https://registry.npmjs.org/jsonwebtoken/-/jsonwebtoken-9.0.2.tgz", + "integrity": "sha512-PRp66vJ865SSqOlgqS8hujT5U4AOgMfhrwYIuIhfKaoSCZcirrmASQr8CX7cUg+RMih+hgznrjp99o+W4pJLHQ==", + "dependencies": { + "jws": "^3.2.2", + "lodash.includes": "^4.3.0", + "lodash.isboolean": "^3.0.3", + "lodash.isinteger": "^4.0.4", + "lodash.isnumber": "^3.0.3", + "lodash.isplainobject": "^4.0.6", + "lodash.isstring": "^4.0.1", + "lodash.once": "^4.0.0", + "ms": "^2.1.1", + "semver": "^7.5.4" + }, + "engines": { + "node": ">=12", + "npm": ">=6" + } + }, + "node_modules/jsonwebtoken/node_modules/jwa": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/jwa/-/jwa-1.4.1.tgz", + "integrity": "sha512-qiLX/xhEEFKUAJ6FiBMbes3w9ATzyk5W7Hvzpa/SLYdxNtng+gcurvrI7TbACjIXlsJyr05/S1oUhZrc63evQA==", + "dependencies": { + "buffer-equal-constant-time": "1.0.1", + "ecdsa-sig-formatter": "1.0.11", + "safe-buffer": "^5.0.1" + } + }, + "node_modules/jsonwebtoken/node_modules/jws": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/jws/-/jws-3.2.2.tgz", + "integrity": "sha512-YHlZCB6lMTllWDtSPHz/ZXTsi8S00usEV6v1tjq8tOUZzw7DpSDWVXjXDre6ed1w/pd495ODpHZYSdkRTsa0HA==", + "dependencies": { + "jwa": "^1.4.1", + "safe-buffer": "^5.0.1" + } + }, "node_modules/jwa": { "version": "2.0.0", "license": "MIT", @@ -3938,11 +4121,46 @@ "dev": true, "license": "MIT" }, + "node_modules/lodash.includes": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/lodash.includes/-/lodash.includes-4.3.0.tgz", + "integrity": "sha512-W3Bx6mdkRTGtlJISOvVD/lbqjTlPPUDTMnlXZFnVwi9NKJ6tiAk6LVdlhZMm17VZisqhKcgzpO5Wz91PCt5b0w==" + }, + "node_modules/lodash.isboolean": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isboolean/-/lodash.isboolean-3.0.3.tgz", + "integrity": "sha512-Bz5mupy2SVbPHURB98VAcw+aHh4vRV5IPNhILUCsOzRmsTmSQ17jIuqopAentWoehktxGd9e/hbIXq980/1QJg==" + }, + "node_modules/lodash.isinteger": { + "version": "4.0.4", + "resolved": "https://registry.npmjs.org/lodash.isinteger/-/lodash.isinteger-4.0.4.tgz", + "integrity": "sha512-DBwtEWN2caHQ9/imiNeEA5ys1JoRtRfY3d7V9wkqtbycnAmTvRRmbHKDV4a0EYc678/dia0jrte4tjYwVBaZUA==" + }, + "node_modules/lodash.isnumber": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/lodash.isnumber/-/lodash.isnumber-3.0.3.tgz", + "integrity": "sha512-QYqzpfwO3/CWf3XP+Z+tkQsfaLL/EnUlXWVkIk5FUPc4sBdTehEqZONuyRt2P67PXAk+NXmTBcc97zw9t1FQrw==" + }, + "node_modules/lodash.isplainobject": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/lodash.isplainobject/-/lodash.isplainobject-4.0.6.tgz", + "integrity": "sha512-oSXzaWypCMHkPC3NvBEaPHf0KsA5mvPrOPgQWDsbg8n7orZ290M0BmC/jgRZ4vcJ6DTAhjrsSYgdsW/F+MFOBA==" + }, + "node_modules/lodash.isstring": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.isstring/-/lodash.isstring-4.0.1.tgz", + "integrity": "sha512-0wJxfxH1wgO3GrbuP+dTTk7op+6L41QCXbGINEmD+ny/G/eCqGzxyCsh7159S+mgDDcoarnBw6PC1PS5+wUGgw==" + }, "node_modules/lodash.merge": { "version": "4.6.2", "dev": true, "license": "MIT" }, + "node_modules/lodash.once": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/lodash.once/-/lodash.once-4.1.1.tgz", + "integrity": "sha512-Sb487aTOCr9drQVL8pIxOzVhafOjZN9UU54hiN8PU3uAiSV7lx1yYNpbNmex2PK6dSJoNTSJUUswT651yww3Mg==" + }, "node_modules/lodash.template": { "version": "4.5.0", "dev": true, @@ -4846,6 +5064,15 @@ "dev": true, "license": "BSD-3-Clause" }, + "node_modules/stoppable": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/stoppable/-/stoppable-1.1.0.tgz", + "integrity": "sha512-KXDYZ9dszj6bzvnEMRYvxgeTHU74QBFL54XKtP3nyMuJ81CFYtABZ3bAzL2EdFUaEwJOBOgENyFj3R7oTzDyyw==", + "engines": { + "node": ">=4", + "npm": ">=6" + } + }, "node_modules/stream-combiner": { "version": "0.0.4", "dev": true, diff --git a/package.json b/package.json index bcab06a..e794403 100644 --- a/package.json +++ b/package.json @@ -9,6 +9,7 @@ "dependencies": { "@aws-sdk/client-s3": "^3.445.0", "@aws-sdk/s3-request-presigner": "^3.445.0", + "@azure/identity": "^4.0.0", "@azure/storage-blob": "^12.17.0", "@gideo-llc/backblaze-b2-upload-any": "^0.1.4", "@google-cloud/storage": "^7.6.0", diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index d2b9183..4f70760 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -7,6 +7,7 @@ import { BlobServiceClient, StorageSharedKeyCredential, } from "@azure/storage-blob"; +import { DefaultAzureCredential } from "@azure/identity"; import { StorageType, ResultObjectStream, @@ -18,32 +19,54 @@ import { FileBufferParams, FilePathParams, FileStreamParams, - AdapterConfig, + AdapterConfigAzure, } from "./types"; import { CreateReadStreamOptions } from "@google-cloud/storage"; export class AdapterAzureStorageBlob extends AbstractAdapter { protected _type = StorageType.AZURE; - protected _config: AdapterConfig; + protected _config: AdapterConfigAzure; protected _configError: string | null = null; private sharedKeyCredential: StorageSharedKeyCredential; private storage: BlobServiceClient; - constructor(config?: string | AdapterConfig) { + constructor(config?: string | AdapterConfigAzure) { super(config); if (this._configError === null) { - this.sharedKeyCredential = new StorageSharedKeyCredential( - this._config.storageAccount as string, - this._config.accessKey as string - ); - // this.storage = new BlobServiceClient( - // `https://${this._config.storageAccount as string}.blob.core.windows.net`, - // this.sharedKeyCredential, - // this._config.options as object - // ); - this.storage = new BlobServiceClient( - `https://${this._config.storageAccount as string}.blob.core.windows.net` - ); + if (typeof this.config.accountName === "undefined") { + this._configError = 'Please provide a value for "storageAccount"'; + return; + } + // option 1: accountKey + if (typeof this.config.accountKey !== "undefined") { + try { + this.sharedKeyCredential = new StorageSharedKeyCredential( + this.config.accountName as string, + this.config.accountKey as string + ); + } catch (e) { + this._configError = e.message; //JSON.parse(e.message).code; + } + this.storage = new BlobServiceClient( + `https://${this.config.storageAccount as string}.blob.core.windows.net`, + this.sharedKeyCredential, + this.config.options as object + ); + // option 2: sasToken + } else if (typeof this.config.sasToken !== "undefined") { + this.storage = new BlobServiceClient( + `https://${this.config.accountName}.blob.core.windows.net?${this.config.sasToken}`, + null, + this.config.options as object + ); + // option 3: passwordless + } else { + this.storage = new BlobServiceClient( + `https://${this.config.accountName as string}.blob.core.windows.net`, + new DefaultAzureCredential(), + this.config.options as object + ); + } } } @@ -175,7 +198,7 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { } return { value: bucketNames, error: null }; } catch (e) { - return { value: null, error: JSON.stringify(e) }; + return { value: null, error: e }; } } diff --git a/src/types.ts b/src/types.ts index ac54153..481500d 100644 --- a/src/types.ts +++ b/src/types.ts @@ -206,7 +206,14 @@ export enum StorageType { export interface AdapterConfig { type: string; - [id: string]: number | string | boolean | number[] | string[] | boolean[] | object; + [id: string]: any; // eslint-disable-line + // [id: string]: number | string | boolean | number[] | string[] | boolean[] | object; +} + +export interface AdapterConfigAzure extends AdapterConfig { + accountName: string; + accountKey?: string; + sasToken?: string; } export type BackblazeAxiosResponse = { diff --git a/tests/testAzure.ts b/tests/testAzure.ts index 8ba17f2..79b03d9 100644 --- a/tests/testAzure.ts +++ b/tests/testAzure.ts @@ -4,9 +4,10 @@ import { AdapterAzureStorageBlob } from "../src/AdapterAzureStorageBlob"; dotenv.config(); async function test() { - const a = new AdapterAzureStorageBlob(); + const a = new AdapterAzureStorageBlob({ type: "azure", accountName: "tweedegolf" }); const b = await a.listBuckets(); console.log(b); + // console.log(a.configError); } test(); diff --git a/tests/testGCS.ts b/tests/testGCS.ts index a87d7c1..109bc72 100644 --- a/tests/testGCS.ts +++ b/tests/testGCS.ts @@ -3,12 +3,20 @@ import { StorageType } from "../src/types"; import { AdapterGoogleCloudStorage } from "../src/AdapterGoogleCloudStorage"; import { parseMode, parseUrl } from "../src/util"; -dotenv.config(); +// dotenv.config(); async function test() { - const a = new AdapterGoogleCloudStorage(); - const b = await a.listBuckets(); - console.log(b); + // const a = new AdapterGoogleCloudStorage({ + // type: "gcs", + // projectId: "default-demo-app-35b34", + // keyFilename: "/home/abudaan/Projects/storage-abstraction/gcs.json", + // }); + // const a = new AdapterGoogleCloudStorage( + // "gcs://projectId=default-demo-app-35b34&keyFilename=gcs.json" + // ); + const a = new AdapterGoogleCloudStorage("gcs://projectId=default-demo-app-35b34"); + // const b = await a.listBuckets(); + console.log(a.configError); } test(); From ceb633a14207da7e7ab36f261cb94e5df368b45f Mon Sep 17 00:00:00 2001 From: abudaan Date: Tue, 28 Nov 2023 19:25:06 +0100 Subject: [PATCH 26/26] wip Azure credentials --- src/AdapterAzureStorageBlob.ts | 9 ++--- tests/testAzure.ts | 11 ++++-- tests/testGCS.ts | 62 ++++++++++++++++++++++++++-------- 3 files changed, 60 insertions(+), 22 deletions(-) diff --git a/src/AdapterAzureStorageBlob.ts b/src/AdapterAzureStorageBlob.ts index 4f70760..51a8ca0 100644 --- a/src/AdapterAzureStorageBlob.ts +++ b/src/AdapterAzureStorageBlob.ts @@ -34,10 +34,11 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { super(config); if (this._configError === null) { if (typeof this.config.accountName === "undefined") { - this._configError = 'Please provide a value for "storageAccount"'; + this._configError = '[configError] Please provide a value for "storageAccount"'; return; } // option 1: accountKey + console.log("option 1: accountKey"); if (typeof this.config.accountKey !== "undefined") { try { this.sharedKeyCredential = new StorageSharedKeyCredential( @@ -45,10 +46,10 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { this.config.accountKey as string ); } catch (e) { - this._configError = e.message; //JSON.parse(e.message).code; + this._configError = `[configError] ${JSON.parse(e.message).code}`; } this.storage = new BlobServiceClient( - `https://${this.config.storageAccount as string}.blob.core.windows.net`, + `https://${this.config.accountName as string}.blob.core.windows.net`, this.sharedKeyCredential, this.config.options as object ); @@ -198,7 +199,7 @@ export class AdapterAzureStorageBlob extends AbstractAdapter { } return { value: bucketNames, error: null }; } catch (e) { - return { value: null, error: e }; + return { value: null, error: `[listBuckets] ${e}` }; } } diff --git a/tests/testAzure.ts b/tests/testAzure.ts index 79b03d9..6e556a7 100644 --- a/tests/testAzure.ts +++ b/tests/testAzure.ts @@ -1,11 +1,16 @@ import dotenv from "dotenv"; -import { AdapterAzureStorageBlob } from "../src/AdapterAzureStorageBlob"; +import { Storage } from "../src/Storage"; dotenv.config(); async function test() { - const a = new AdapterAzureStorageBlob({ type: "azure", accountName: "tweedegolf" }); - const b = await a.listBuckets(); + const s = new Storage({ + type: "azure", + accountName: "tweedegolf", + accountKey: + "WHtrTUfF3PLc9Dxnua4Dp7hquH6UTGhE93DhVCwBwVeUNnHceLpuV66myHEO89z54yQhKIlYnMhe+AStdvl51A==", + }); + const b = await s.listBuckets(); console.log(b); // console.log(a.configError); } diff --git a/tests/testGCS.ts b/tests/testGCS.ts index 109bc72..a68c039 100644 --- a/tests/testGCS.ts +++ b/tests/testGCS.ts @@ -1,22 +1,54 @@ import dotenv from "dotenv"; -import { StorageType } from "../src/types"; -import { AdapterGoogleCloudStorage } from "../src/AdapterGoogleCloudStorage"; -import { parseMode, parseUrl } from "../src/util"; +import { ResultObjectBuckets } from "../src/types"; +import { Storage } from "../src/Storage"; -// dotenv.config(); +dotenv.config(); async function test() { - // const a = new AdapterGoogleCloudStorage({ - // type: "gcs", - // projectId: "default-demo-app-35b34", - // keyFilename: "/home/abudaan/Projects/storage-abstraction/gcs.json", - // }); - // const a = new AdapterGoogleCloudStorage( - // "gcs://projectId=default-demo-app-35b34&keyFilename=gcs.json" - // ); - const a = new AdapterGoogleCloudStorage("gcs://projectId=default-demo-app-35b34"); - // const b = await a.listBuckets(); - console.log(a.configError); + let storage: Storage; + let b: ResultObjectBuckets; + + // all credentials from environment variables + storage = new Storage({ type: "gcs" }); + b = await storage.listBuckets(); + console.log(1, b); + + storage = new Storage({ + type: "gcs", + projectId: "default-demo-app-35b34", + keyFilename: "gcs.json", + }); + b = await storage.listBuckets(); + console.log(2, b); + + storage = new Storage("gcs://projectId=default-demo-app-35b34&keyFilename=gcs.json"); + b = await storage.listBuckets(); + console.log(3, b); + + storage = new Storage("gcs://projectId=default-demo-app-35b34"); + b = await storage.listBuckets(); + console.log(4, b); + + storage = new Storage({ + type: "gcs", + projectId: "default-demo-app-35b34", + credentials: { + type: "service_account", + project_id: "default-demo-app-35b34", + private_key_id: "da2719acad70df59748d60095b94c11a210dba03", + private_key: + "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQC/+oRZAX2DdLky\niw+kEOkp6uJJdLNxFkef6fRbCCAhuVc4T5LuiEPT0ZJn4HJqLMaFVdbI7iXGEcfT\n6NmahdFKs4hDIUc5bfgC9ma2koihhR7DRL3alUsYaEU1WCDEdy5utrGibWw3LZEf\n4WzeiksuBw+NIMciaG4EVxo6NRu4rTCdrVidIOLZ6wnfTb+ekWO995V6NmUgM6Pv\nKe2PYIOIRVIm1ytsCRX2Ftka/Y2CvqBbYgCFpNi6OaE6D4VnP+Q/TRHsAXTMF2Ir\ncdRKU27M5NhWhTEhAf0IiNp3N/UByzZwblkLEoPV4sVvleH5b+4we0UpMMMKGniS\nVUv0rWthAgMBAAECggEAA4yCgNC1vMXeHJCUh/BmFwfIChqVrJwAU/TztvEWAvmV\noq1Z5n3vXw2LOs9b3hmYy+Mz1sAMQ7aVbpXp2+DoZwydkrwdQHMHeCqM6IgZ7X5z\n/KEbL1y+KCPZT6yi4m7flV1cOEf0jaqSebflltGQpTetMmsuMzUh/4OKsRPFQOsu\nfTOMWZWzel9mjI2ZJGp7Cyvtddk9Ewz0rZmx5cBSLejiAEQdjgiBCgT7SBOfIwWd\nl0TDOm3YWF9dd2t/uq2oj+Q8OieiPDPEkOdA5LboxFwJN0AaKCdYPqJYzuYjRIzC\nANZcG845Qg65ZQULxcwSPgudHcBkK2btl/DBF3RipwKBgQDkX5xvnQreBLfppQ2G\nyVqrr+AP6vQnv1h7/kReny0o/8BMkaM27rqIAqbFvF9A9SKC9YrJm0dD0PFd0qmE\nqoVwMHggxZYHF11SelU2Wt1sQessXgFxERxN2wZNdFJPBrLtzfe/LSksqVr4LYKT\nuZE8yMJSn4HkNKeMGWf10iVy0wKBgQDXM89JwF2jnYuuPItNAoNususefQtQ3P5K\nX1UGIKZR7THkDxpssUa4hkrtbW4uaVEupNdtLmQuEFRykjIYXcTcwCmLvs1kp4T3\nWu+mtiHpw1SfwpBXujYby3GNfNVXpscGQ4JXDvhvhUihdztkeFJ7SRz3vkwZuhBZ\nbfiieovAewKBgQDFtWR4WEvNM+aIduUD+JPvqN5gyXbAZm5TQiishj+RjABOCZN0\nfi6siycYbuFEGjTbjXmu2ihTNLORF0y81ueY3c1SCdy9nOOlANV+riGdlF/l1CiJ\nW1fWqzSEn5yWX+VN7Q97ybwotyFTVAsEmxV6uw9IemD4UQFwT5pp/ZmAEwKBgC1/\nprPRF/sftAiChHEjtuYQreUkSuAt2RWgUjmZbZCktYeiKdQRJwKcYfsQx9rIKYHT\nGDjdNhEHItOWVUERki2Z8y64iHRfdp1VfJWirEjgI2QjnqDtncMCoF9ppTGC8VFB\ntKzzXVM2usRokQYM6mNmcdlvQmeuDwbZWVgA0MtBAoGBANqD/MXcbwbXjxaophqQ\ndVlwx0O8qBj43pDGGCGH9GjtQykSjHImp071ctGGyHMSQXLuMuB/nAsceNSXvAZ9\nSOThHslKGYhIC5iHuN9b3XfUBObWiLtufmOnnmwM91bJh6u2SIq8xXMLraSWd2pI\nZvbJAP835pWsuP3Pwjyrr+Mc\n-----END PRIVATE KEY-----\n", + client_email: "cloud-fs-abstraction@default-demo-app-35b34.iam.gserviceaccount.com", + client_id: "107837355944030382560", + auth_uri: "https://accounts.google.com/o/oauth2/auth", + token_uri: "https://oauth2.googleapis.com/token", + auth_provider_x509_cert_url: "https://www.googleapis.com/oauth2/v1/certs", + client_x509_cert_url: + "https://www.googleapis.com/robot/v1/metadata/x509/cloud-fs-abstraction%40default-demo-app-35b34.iam.gserviceaccount.com", + }, + }); + b = await storage.listBuckets(); + console.log(5, b); } test();