From 9bf534db0c50694c3ac42fe56ca22bf3f16a2ea6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Branca?= Date: Thu, 11 Jan 2024 08:59:39 +0000 Subject: [PATCH 1/6] FT: Provide AWS KMS connector for bucket ciphering Add configuration mechanism for the new AWS KMS connector. Depends on changes in Arsenal to have support of this new connector. --- lib/Config.js | 28 ++++++++++++++++++++++++++++ lib/kms/wrapper.js | 5 +++++ 2 files changed, 33 insertions(+) diff --git a/lib/Config.js b/lib/Config.js index 6e1e853889..361673f227 100644 --- a/lib/Config.js +++ b/lib/Config.js @@ -1092,6 +1092,34 @@ class Config extends EventEmitter { } } + // Use env variables as default values. + // We use the same env variables as the AWS CLI does. + // Please note that if no config is specified here, the AWS Client + // seems to fallback on the local AWS configuration files + // (those contained in ~/.aws directory) + this.kms_aws = { + region: process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION, + endpoint: process.env.AWS_ENDPOINT_URL_KMS || process.env.AWS_ENDPOINT_URL, + ak: process.env.AWS_ACCESS_KEY_ID, + sk: process.env.AWS_SECRET_ACCESS_KEY + }; + if (config.kms_aws) { + const {region, endpoint, ak, sk} = config.kms_aws; + if (region) { + this.kms_aws.region = region; + } + if (endpoint) { + this.kms_aws.endpoint = endpoint; + } + /* Configure credentials. + Currently only support AK+SK authentication, both must be supplied. + */ + if (ak && sk) { + this.kms_aws.ak = ak; + this.kms_aws.sk = sk; + } + } + this.healthChecks = defaultHealthChecks; if (config.healthChecks && config.healthChecks.allowFrom) { assert(config.healthChecks.allowFrom instanceof Array, diff --git a/lib/kms/wrapper.js b/lib/kms/wrapper.js index 4a927f9d94..c2163ca9ae 100644 --- a/lib/kms/wrapper.js +++ b/lib/kms/wrapper.js @@ -7,6 +7,7 @@ const logger = require('../utilities/logger'); const inMemory = require('./in_memory/backend').backend; const file = require('./file/backend'); const KMIPClient = require('arsenal').network.kmipClient; +const AWSClient = require('arsenal').network.awsClient; const Common = require('./common'); let scalityKMS; let scalityKMSImpl; @@ -42,6 +43,10 @@ if (config.backends.kms === 'mem') { } client = new KMIPClient(kmipConfig); implName = 'kmip'; +} else if (config.backends.kms === 'aws') { + const awsConfig = { kms_aws: config.kms_aws }; + client = new AWSClient(awsConfig); + implName = 'aws'; } else { throw new Error('KMS backend is not configured'); } From cf7f175b859ee4a2cccb2d22a400457d7255cc61 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Branca?= Date: Thu, 18 Jan 2024 13:33:44 +0000 Subject: [PATCH 2/6] FT: ciphering, use generateDataKey operation of the KMS backend when available Up to now, the datakey was always generated using a locally generated random number. This commit allow to use the "generateDataKey" operation of a KMS when it is implemented. It fallback to random number generation if not available. The benefit of generating the datakey in the KMS is a better entropy source resulting in a "better" datakey. --- lib/kms/wrapper.js | 74 +++++++++++++++++++++++++++------------------- 1 file changed, 43 insertions(+), 31 deletions(-) diff --git a/lib/kms/wrapper.js b/lib/kms/wrapper.js index c2163ca9ae..4778dbeca6 100644 --- a/lib/kms/wrapper.js +++ b/lib/kms/wrapper.js @@ -136,19 +136,6 @@ class KMS { }); } - /** - * - * @param {object} log - logger object - * @returns {buffer} newKey - a data key - */ - static createDataKey(log) { - log.debug('creating a new data key'); - const newKey = Common.createDataKey(); - log.trace('data key created by the kms'); - return newKey; - } - - /** * createCipherBundle * @param {object} serverSideEncryptionInfo - info for encryption @@ -167,8 +154,6 @@ class KMS { */ static createCipherBundle(serverSideEncryptionInfo, log, cb) { - const dataKey = this.createDataKey(log); - const { algorithm, configuredMasterKeyId, masterKeyId: bucketMasterKeyId } = serverSideEncryptionInfo; let masterKeyId = bucketMasterKeyId; @@ -186,27 +171,54 @@ class KMS { }; async.waterfall([ - function cipherDataKey(next) { - log.debug('ciphering a data key'); - return client.cipherDataKey(cipherBundle.cryptoScheme, - cipherBundle.masterKeyId, - dataKey, log, (err, cipheredDataKey) => { - if (err) { - log.debug('error from kms', - { implName, error: err }); - return next(err); - } - log.trace('data key ciphered by the kms'); - return next(null, cipheredDataKey); - }); + function generateDataKey(next) { + /* There are 2 ways of generating a datakey : + - using the generateDataKey of the KMS backend if it exists + (currently only implemented for the AWS KMS backend). This is + the prefered solution since a dedicated KMS should offer a better + entropy for generating random content. + - using local random number generation, and then use the KMS to + encrypt the datakey. This method is used when the KMS backend doesn't + provide the generateDataKey method. + */ + if (client.generateDataKey) { + log.debug('creating a data key using the KMS'); + return client.generateDataKey(cipherBundle.cryptoScheme, + cipherBundle.masterKeyId, + log, (err, plainTextDataKey, cipheredDataKey) => { + if (err) { + log.debug('error from kms', + { implName, error: err }); + return next(err); + } + log.trace('data key generated by the kms'); + return next(null, plainTextDataKey, cipheredDataKey); + }) + } else { + log.debug('creating a new data key'); + const dataKey = Common.createDataKey(); + + log.debug('ciphering the data key'); + return client.cipherDataKey(cipherBundle.cryptoScheme, + cipherBundle.masterKeyId, + dataKey, log, (err, cipheredDataKey) => { + if (err) { + log.debug('error from kms', + { implName, error: err }); + return next(err); + } + log.trace('data key ciphered by the kms'); + return next(null, dataKey, cipheredDataKey); + }); + } }, - function createCipher(cipheredDataKey, next) { + function createCipher(plainTextDataKey, cipheredDataKey, next) { log.debug('creating a cipher'); cipherBundle.cipheredDataKey = cipheredDataKey.toString('base64'); return Common.createCipher(cipherBundle.cryptoScheme, - dataKey, 0, log, (err, cipher) => { - dataKey.fill(0); + plainTextDataKey, 0, log, (err, cipher) => { + plainTextDataKey.fill(0); if (err) { log.debug('error from kms', { implName, error: err }); From db9627927e0479a6233e68c6b1791dac29c030c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Branca?= Date: Mon, 24 Jun 2024 14:54:44 +0000 Subject: [PATCH 3/6] FT: AWS KMS, fix lint errors + env var renaming after 1st code review --- lib/Config.js | 25 +++++++++++++------------ lib/kms/wrapper.js | 4 ++-- 2 files changed, 15 insertions(+), 14 deletions(-) diff --git a/lib/Config.js b/lib/Config.js index 361673f227..92de14749f 100644 --- a/lib/Config.js +++ b/lib/Config.js @@ -1093,30 +1093,31 @@ class Config extends EventEmitter { } // Use env variables as default values. - // We use the same env variables as the AWS CLI does. + // We use the same env variables as the AWS CLI does but prefixed with "KMS_", + // allowing distinct endpoints betweens AWS compatibility components. // Please note that if no config is specified here, the AWS Client // seems to fallback on the local AWS configuration files // (those contained in ~/.aws directory) - this.kms_aws = { - region: process.env.AWS_REGION || process.env.AWS_DEFAULT_REGION, - endpoint: process.env.AWS_ENDPOINT_URL_KMS || process.env.AWS_ENDPOINT_URL, - ak: process.env.AWS_ACCESS_KEY_ID, - sk: process.env.AWS_SECRET_ACCESS_KEY + this.kmsAWS = { + region: process.env.KMS_AWS_REGION || process.env.KMS_AWS_DEFAULT_REGION, + endpoint: process.env.KMS_AWS_ENDPOINT_URL_KMS || process.env.KMS_AWS_ENDPOINT_URL, + ak: process.env.KMS_AWS_ACCESS_KEY_ID, + sk: process.env.KMS_AWS_SECRET_ACCESS_KEY, }; - if (config.kms_aws) { - const {region, endpoint, ak, sk} = config.kms_aws; + if (config.kmsAWS) { + const { region, endpoint, ak, sk } = config.kmsAWS; if (region) { - this.kms_aws.region = region; + this.kmsAWS.region = region; } if (endpoint) { - this.kms_aws.endpoint = endpoint; + this.kmsAWS.endpoint = endpoint; } /* Configure credentials. Currently only support AK+SK authentication, both must be supplied. */ if (ak && sk) { - this.kms_aws.ak = ak; - this.kms_aws.sk = sk; + this.kmsAWS.ak = ak; + this.kmsAWS.sk = sk; } } diff --git a/lib/kms/wrapper.js b/lib/kms/wrapper.js index 4778dbeca6..c814b02876 100644 --- a/lib/kms/wrapper.js +++ b/lib/kms/wrapper.js @@ -44,7 +44,7 @@ if (config.backends.kms === 'mem') { client = new KMIPClient(kmipConfig); implName = 'kmip'; } else if (config.backends.kms === 'aws') { - const awsConfig = { kms_aws: config.kms_aws }; + const awsConfig = { kmsAWS: config.kmsAWS }; client = new AWSClient(awsConfig); implName = 'aws'; } else { @@ -193,7 +193,7 @@ class KMS { } log.trace('data key generated by the kms'); return next(null, plainTextDataKey, cipheredDataKey); - }) + }); } else { log.debug('creating a new data key'); const dataKey = Common.createDataKey(); From 853102f48538f5bfc030f95c743eb87f8d667992 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fr=C3=A9d=C3=A9ric=20Branca?= Date: Tue, 30 Jul 2024 09:53:58 +0000 Subject: [PATCH 4/6] AWS KMS: TLS configuration --- lib/Config.js | 46 ++++++++++++++++++++++++++++++++++++++++++++-- lib/kms/wrapper.js | 8 +++++--- 2 files changed, 49 insertions(+), 5 deletions(-) diff --git a/lib/Config.js b/lib/Config.js index 92de14749f..6660a6a50d 100644 --- a/lib/Config.js +++ b/lib/Config.js @@ -491,7 +491,7 @@ class Config extends EventEmitter { } const tlsFilePath = (tlsFileName[0] === '/') ? tlsFileName - : path.join(this._basepath, tlsFileName); + : path.join(this._basePath, tlsFileName); let tlsFileContent; try { tlsFileContent = fs.readFileSync(tlsFilePath); @@ -502,6 +502,19 @@ class Config extends EventEmitter { return tlsFileContent; } + // Load TLS file or array of files + // if tlsFilename is a string, result will be a Buffer containing the file content + // if tlsFilename is an array of string, result will be an array of Buffer + _loadTlsFileArray(tlsFileName) { + let res; + if (Array.isArray(tlsFileName)) { + res = tlsFileName.map(this._loadTlsFile); + } else { + res = this._loadTlsFile(tlsFileName); + } + return res; + } + /** * Parse list of endpoints. * @param {string[] | undefined} listenOn - List of string of the form "ip:port" @@ -1105,7 +1118,7 @@ class Config extends EventEmitter { sk: process.env.KMS_AWS_SECRET_ACCESS_KEY, }; if (config.kmsAWS) { - const { region, endpoint, ak, sk } = config.kmsAWS; + const { region, endpoint, ak, sk, tls } = config.kmsAWS; if (region) { this.kmsAWS.region = region; } @@ -1119,6 +1132,35 @@ class Config extends EventEmitter { this.kmsAWS.ak = ak; this.kmsAWS.sk = sk; } + + if (tls) { + this.kmsAWS.tls = {}; + if (tls.rejectUnauthorized !== undefined) { + assert(typeof tls.rejectUnauthorized === 'boolean'); + this.kmsAWS.tls.rejectUnauthorized = tls.rejectUnauthorized; + } + // min & max TLS: One of 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1' + // (see https://nodejs.org/api/tls.html#tlscreatesecurecontextoptions) + if (tls.minVersion !== undefined) { + assert(typeof tls.minVersion === 'string', + 'bad config: KMS AWS TLS minVersion must be a string'); + this.kmsAWS.tls.minVersion = tls.minVersion; + } + if (tls.maxVersion !== undefined) { + assert(typeof tls.maxVersion === 'string', + 'bad config: KMS AWS TLS maxVersion must be a string'); + this.kmsAWS.tls.maxVersion = tls.maxVersion; + } + if (tls.ca !== undefined) { + this.kmsAWS.tls.ca = this._loadTlsFileArray(tls.ca); + } + if (tls.cert !== undefined) { + this.kmsAWS.tls.cert = this._loadTlsFileArray(tls.cert); + } + if (tls.key !== undefined) { + this.kmsAWS.tls.key = this._loadTlsFileArray(tls.key); + } + } } this.healthChecks = defaultHealthChecks; diff --git a/lib/kms/wrapper.js b/lib/kms/wrapper.js index c814b02876..b081715798 100644 --- a/lib/kms/wrapper.js +++ b/lib/kms/wrapper.js @@ -181,9 +181,10 @@ class KMS { encrypt the datakey. This method is used when the KMS backend doesn't provide the generateDataKey method. */ + let res; if (client.generateDataKey) { log.debug('creating a data key using the KMS'); - return client.generateDataKey(cipherBundle.cryptoScheme, + res = client.generateDataKey(cipherBundle.cryptoScheme, cipherBundle.masterKeyId, log, (err, plainTextDataKey, cipheredDataKey) => { if (err) { @@ -199,7 +200,7 @@ class KMS { const dataKey = Common.createDataKey(); log.debug('ciphering the data key'); - return client.cipherDataKey(cipherBundle.cryptoScheme, + res = client.cipherDataKey(cipherBundle.cryptoScheme, cipherBundle.masterKeyId, dataKey, log, (err, cipheredDataKey) => { if (err) { @@ -210,7 +211,8 @@ class KMS { log.trace('data key ciphered by the kms'); return next(null, dataKey, cipheredDataKey); }); - } + } + return res; }, function createCipher(plainTextDataKey, cipheredDataKey, next) { log.debug('creating a cipher'); From a63387e786484cdb42d5df520dabfefead2bf6e7 Mon Sep 17 00:00:00 2001 From: Nicolas Humbert Date: Fri, 13 Sep 2024 22:03:20 +0200 Subject: [PATCH 5/6] CLDSRV-559 AWS KMS backend --- config.json | 9 +- lib/Config.js | 117 ++++--- lib/api/apiUtils/bucket/bucketCreation.js | 8 +- lib/api/apiUtils/bucket/bucketDeletion.js | 11 +- lib/api/apiUtils/bucket/bucketEncryption.js | 4 +- lib/api/bucketDeleteEncryption.js | 1 + lib/api/bucketPutEncryption.js | 11 +- lib/kms/file/backend.js | 4 +- lib/kms/in_memory/backend.js | 6 +- lib/kms/wrapper.js | 85 +++-- package.json | 4 +- .../test/object/encryptionHeaders.js | 84 +++-- tests/unit/api/bucketDelete.js | 96 ++++++ tests/unit/api/bucketDeleteEncryption.js | 154 ++++++++- tests/unit/api/bucketPut.js | 250 ++++++++++++++ tests/unit/api/bucketPutEncryption.js | 149 +++++++- tests/unit/encryption/kms.js | 19 +- tests/unit/testConfigs/parseKmsAWS.js | 326 ++++++++++++++++++ yarn.lock | 16 +- 19 files changed, 1193 insertions(+), 161 deletions(-) create mode 100644 tests/unit/testConfigs/parseKmsAWS.js diff --git a/config.json b/config.json index 34194edfc0..565c191226 100644 --- a/config.json +++ b/config.json @@ -86,5 +86,12 @@ "type": "dummy", "host": "localhost:6000" } - ] + ], + "defaultEncryptionKeyPerAccount": true, + "kmsAWS": { + "region": "us-east-1", + "endpoint": "http://127.0.0.1:8080", + "ak": "tbd", + "sk": "tbd" + } } diff --git a/lib/Config.js b/lib/Config.js index 6660a6a50d..c5e056f4f4 100644 --- a/lib/Config.js +++ b/lib/Config.js @@ -453,6 +453,60 @@ class Config extends EventEmitter { this._configureBackends(); } + _parseKmsAWS(config) { + if (!config.kmsAWS) { + return {}; + } + let kmsAWS = {}; + + const { region, endpoint, ak, sk, tls } = config.kmsAWS; + + assert(endpoint, 'Configuration Error: endpoint must be defined in kmsAWS'); + assert(ak, 'Configuration Error: ak must be defined in kmsAWS'); + assert(sk, 'Configuration Error: sk must be defined in kmsAWS'); + + kmsAWS = { + endpoint, + ak, + sk, + }; + + if (region) { + kmsAWS.region = region; + } + + if (tls) { + kmsAWS.tls = {}; + if (tls.rejectUnauthorized !== undefined) { + assert(typeof tls.rejectUnauthorized === 'boolean'); + kmsAWS.tls.rejectUnauthorized = tls.rejectUnauthorized; + } + // min & max TLS: One of 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1' + // (see https://nodejs.org/api/tls.html#tlscreatesecurecontextoptions) + if (tls.minVersion !== undefined) { + assert(typeof tls.minVersion === 'string', + 'bad config: KMS AWS TLS minVersion must be a string'); + kmsAWS.tls.minVersion = tls.minVersion; + } + if (tls.maxVersion !== undefined) { + assert(typeof tls.maxVersion === 'string', + 'bad config: KMS AWS TLS maxVersion must be a string'); + kmsAWS.tls.maxVersion = tls.maxVersion; + } + if (tls.ca !== undefined) { + kmsAWS.tls.ca = this._loadTlsFileArray(tls.ca); + } + if (tls.cert !== undefined) { + kmsAWS.tls.cert = this._loadTlsFileArray(tls.cert); + } + if (tls.key !== undefined) { + kmsAWS.tls.key = this._loadTlsFileArray(tls.key); + } + } + + return kmsAWS; + } + _getLocationConfig() { let locationConfig; try { @@ -508,7 +562,7 @@ class Config extends EventEmitter { _loadTlsFileArray(tlsFileName) { let res; if (Array.isArray(tlsFileName)) { - res = tlsFileName.map(this._loadTlsFile); + res = tlsFileName.map(tlsFile => this._loadTlsFile(tlsFile)); } else { res = this._loadTlsFile(tlsFileName); } @@ -1105,63 +1159,12 @@ class Config extends EventEmitter { } } - // Use env variables as default values. - // We use the same env variables as the AWS CLI does but prefixed with "KMS_", - // allowing distinct endpoints betweens AWS compatibility components. - // Please note that if no config is specified here, the AWS Client - // seems to fallback on the local AWS configuration files - // (those contained in ~/.aws directory) - this.kmsAWS = { - region: process.env.KMS_AWS_REGION || process.env.KMS_AWS_DEFAULT_REGION, - endpoint: process.env.KMS_AWS_ENDPOINT_URL_KMS || process.env.KMS_AWS_ENDPOINT_URL, - ak: process.env.KMS_AWS_ACCESS_KEY_ID, - sk: process.env.KMS_AWS_SECRET_ACCESS_KEY, - }; - if (config.kmsAWS) { - const { region, endpoint, ak, sk, tls } = config.kmsAWS; - if (region) { - this.kmsAWS.region = region; - } - if (endpoint) { - this.kmsAWS.endpoint = endpoint; - } - /* Configure credentials. - Currently only support AK+SK authentication, both must be supplied. - */ - if (ak && sk) { - this.kmsAWS.ak = ak; - this.kmsAWS.sk = sk; - } + this.kmsAWS = this._parseKmsAWS(config); - if (tls) { - this.kmsAWS.tls = {}; - if (tls.rejectUnauthorized !== undefined) { - assert(typeof tls.rejectUnauthorized === 'boolean'); - this.kmsAWS.tls.rejectUnauthorized = tls.rejectUnauthorized; - } - // min & max TLS: One of 'TLSv1.3', 'TLSv1.2', 'TLSv1.1', or 'TLSv1' - // (see https://nodejs.org/api/tls.html#tlscreatesecurecontextoptions) - if (tls.minVersion !== undefined) { - assert(typeof tls.minVersion === 'string', - 'bad config: KMS AWS TLS minVersion must be a string'); - this.kmsAWS.tls.minVersion = tls.minVersion; - } - if (tls.maxVersion !== undefined) { - assert(typeof tls.maxVersion === 'string', - 'bad config: KMS AWS TLS maxVersion must be a string'); - this.kmsAWS.tls.maxVersion = tls.maxVersion; - } - if (tls.ca !== undefined) { - this.kmsAWS.tls.ca = this._loadTlsFileArray(tls.ca); - } - if (tls.cert !== undefined) { - this.kmsAWS.tls.cert = this._loadTlsFileArray(tls.cert); - } - if (tls.key !== undefined) { - this.kmsAWS.tls.key = this._loadTlsFileArray(tls.key); - } - } - } + const defaultEncryptionKeyPerAccount = config.defaultEncryptionKeyPerAccount; + this.defaultEncryptionKeyPerAccount = defaultEncryptionKeyPerAccount || false; + assert(typeof this.defaultEncryptionKeyPerAccount === 'boolean', + 'config.defaultEncryptionKeyPerAccount must be a boolean'); this.healthChecks = defaultHealthChecks; if (config.healthChecks && config.healthChecks.allowFrom) { diff --git a/lib/api/apiUtils/bucket/bucketCreation.js b/lib/api/apiUtils/bucket/bucketCreation.js index 455261314b..7a3a4cdad4 100644 --- a/lib/api/apiUtils/bucket/bucketCreation.js +++ b/lib/api/apiUtils/bucket/bucketCreation.js @@ -125,16 +125,16 @@ function cleanUpBucket(bucketMD, canonicalID, log, callback) { /** * Manage the server side encryption on bucket creation, as a side effect * a bucket key is created in the kms - * @param {string} bucketName - name of bucket + * @param {BucketInfo} bucket - bucket info * @param {object} headers - request headers * @param {function} log - Werelogs logger * @param {function} cb - called on completion * @returns {undefined} * @callback called with (err, sseInfo: object) */ -function bucketLevelServerSideEncryption(bucketName, headers, log, cb) { +function bucketLevelServerSideEncryption(bucket, headers, log, cb) { kms.bucketLevelEncryption( - bucketName, headers, log, (err, sseInfo) => { + bucket, headers, log, (err, sseInfo) => { if (err) { log.debug('error getting bucket encryption info', { error: err, @@ -231,7 +231,7 @@ function createBucket(authInfo, bucketName, headers, if (existingBucketMD === 'NoBucketYet') { const sseConfig = parseBucketEncryptionHeaders(headers); return bucketLevelServerSideEncryption( - bucketName, sseConfig, log, + bucket, sseConfig, log, (err, sseInfo) => { if (err) { return cb(err); diff --git a/lib/api/apiUtils/bucket/bucketDeletion.js b/lib/api/apiUtils/bucket/bucketDeletion.js index 5b8225c136..498398af07 100644 --- a/lib/api/apiUtils/bucket/bucketDeletion.js +++ b/lib/api/apiUtils/bucket/bucketDeletion.js @@ -138,8 +138,15 @@ function deleteBucket(authInfo, bucketMD, bucketName, canonicalID, request, log, return cb(err); } const serverSideEncryption = bucketMD.getServerSideEncryption(); - if (serverSideEncryption && - serverSideEncryption.algorithm === 'AES256') { + const isScalityManagedEncryptionKey = serverSideEncryption && serverSideEncryption.algorithm === 'AES256'; + const isAccountEncryptionEnabled = bucketMD.isAccountEncryptionEnabled(); + + /** + * If all of the following conditions are met, delete the master encryption key: + * - The encryption key is managed by Scality (not externally managed). + * - The encryption is bucket-specific (to prevent deleting default account encryption key). + */ + if (isScalityManagedEncryptionKey && !isAccountEncryptionEnabled) { const masterKeyId = serverSideEncryption.masterKeyId; return kms.destroyBucketKey(masterKeyId, log, cb); } diff --git a/lib/api/apiUtils/bucket/bucketEncryption.js b/lib/api/apiUtils/bucket/bucketEncryption.js index fe45541f29..0657484691 100644 --- a/lib/api/apiUtils/bucket/bucketEncryption.js +++ b/lib/api/apiUtils/bucket/bucketEncryption.js @@ -173,7 +173,7 @@ function parseObjectEncryptionHeaders(headers) { */ function createDefaultBucketEncryptionMetadata(bucket, log, cb) { return kms.bucketLevelEncryption( - bucket.getName(), + bucket, { algorithm: 'AES256', mandatory: false }, log, (error, sseConfig) => { @@ -236,7 +236,7 @@ function getObjectSSEConfiguration(headers, bucket, log, cb) { // If the bucket has a default encryption config, and it is mandatory // (created with putBucketEncryption or legacy headers) - // pass it through + // pass it through. if (bucketSSE && bucketSSE.mandatory) { return cb(null, bucketSSE); } diff --git a/lib/api/bucketDeleteEncryption.js b/lib/api/bucketDeleteEncryption.js index 9a3c0f8a90..04c72eecfb 100644 --- a/lib/api/bucketDeleteEncryption.js +++ b/lib/api/bucketDeleteEncryption.js @@ -39,6 +39,7 @@ function bucketDeleteEncryption(authInfo, request, log, callback) { algorithm: sseConfig.algorithm, cryptoScheme: sseConfig.cryptoScheme, masterKeyId: sseConfig.masterKeyId, + configuredMasterKeyId: sseConfig.configuredMasterKeyId, }; bucket.setServerSideEncryption(updatedConfig); diff --git a/lib/api/bucketPutEncryption.js b/lib/api/bucketPutEncryption.js index 5a407f2f63..3c965643d7 100644 --- a/lib/api/bucketPutEncryption.js +++ b/lib/api/bucketPutEncryption.js @@ -41,8 +41,9 @@ function bucketPutEncryption(authInfo, request, log, callback) { }, (bucket, encryptionConfig, next) => { const existingConfig = bucket.getServerSideEncryption(); - if (existingConfig === null) { - return kms.bucketLevelEncryption(bucket.getName(), encryptionConfig, log, + // Check if encryption is not configured or if a default master key has not been created yet. + if (existingConfig === null || !existingConfig.masterKeyId) { + return kms.bucketLevelEncryption(bucket, encryptionConfig, log, (err, updatedConfig) => { if (err) { return next(err); @@ -51,6 +52,11 @@ function bucketPutEncryption(authInfo, request, log, callback) { }); } + // If encryption is already configured and a default master key exists + + // If the request does not specify a custom key, reuse the existing default master key id + // This ensures that a new default master key is not generated every time + // `putBucketEncryption` is called, avoiding unnecessary key creation const updatedConfig = { mandatory: true, algorithm: encryptionConfig.algorithm, @@ -58,6 +64,7 @@ function bucketPutEncryption(authInfo, request, log, callback) { masterKeyId: existingConfig.masterKeyId, }; + // If the request specifies a custom master key id, store it in the updated configuration const { configuredMasterKeyId } = encryptionConfig; if (configuredMasterKeyId) { updatedConfig.configuredMasterKeyId = configuredMasterKeyId; diff --git a/lib/kms/file/backend.js b/lib/kms/file/backend.js index 919d5defe1..c841f9aa82 100644 --- a/lib/kms/file/backend.js +++ b/lib/kms/file/backend.js @@ -8,13 +8,13 @@ const backend = { /** * - * @param {string} bucketName - bucket name + * @param {BucketInfo} bucket - bucket info * @param {object} log - logger object * @param {function} cb - callback * @returns {undefined} * @callback called with (err, masterKeyId: string) */ - createBucketKey: function createBucketKeyMem(bucketName, log, cb) { + createBucketKey: function createBucketKeyMem(bucket, log, cb) { process.nextTick(() => { // Using createDataKey here for purposes of createBucketKeyMem // so that we do not need a separate function. diff --git a/lib/kms/in_memory/backend.js b/lib/kms/in_memory/backend.js index 2a2ced9b80..2a694e28b2 100644 --- a/lib/kms/in_memory/backend.js +++ b/lib/kms/in_memory/backend.js @@ -8,15 +8,17 @@ const backend = { * Target implementation will be async. let's mimic it */ + supportsDefaultKeyPerAccount: false, + /** * - * @param {string} bucketName - bucket name + * @param {BucketInfo} bucket - bucket info * @param {object} log - logger object * @param {function} cb - callback * @returns {undefined} * @callback called with (err, masterKeyId: string) */ - createBucketKey: function createBucketKeyMem(bucketName, log, cb) { + createBucketKey: function createBucketKeyMem(bucket, log, cb) { process.nextTick(() => { // Using createDataKey here for purposes of createBucketKeyMem // so that we do not need a separate function. diff --git a/lib/kms/wrapper.js b/lib/kms/wrapper.js index b081715798..a00e52edb8 100644 --- a/lib/kms/wrapper.js +++ b/lib/kms/wrapper.js @@ -7,8 +7,9 @@ const logger = require('../utilities/logger'); const inMemory = require('./in_memory/backend').backend; const file = require('./file/backend'); const KMIPClient = require('arsenal').network.kmipClient; -const AWSClient = require('arsenal').network.awsClient; +const { KmsAWSClient } = require('arsenal').network; const Common = require('./common'); +const vault = require('../auth/vault'); let scalityKMS; let scalityKMSImpl; try { @@ -45,43 +46,68 @@ if (config.backends.kms === 'mem') { implName = 'kmip'; } else if (config.backends.kms === 'aws') { const awsConfig = { kmsAWS: config.kmsAWS }; - client = new AWSClient(awsConfig); + client = new KmsAWSClient(awsConfig); implName = 'aws'; } else { - throw new Error('KMS backend is not configured'); + throw new Error(`KMS backend is not configured: ${config.backends.kms}`); } class KMS { /** + * Create a new bucket encryption key. * - * @param {string} bucketName - bucket name + * This function is responsible for creating an encryption key for a bucket. + * If the client supports using a default master encryption key per account + * and one is configured, the key is managed at the account level by Vault. + * Otherwise, a bucket-level encryption key is created for legacy support. + * + * @param {BucketInfo} bucket - bucket info * @param {object} log - logger object * @param {function} cb - callback * @returns {undefined} - * @callback called with (err, masterKeyId: string) + * @callback called with (err, { masterKeyId: string, isAccountEncryptionEnabled: boolean }) */ - static createBucketKey(bucketName, log, cb) { + static createBucketKey(bucket, log, cb) { log.debug('creating a new bucket key'); - client.createBucketKey(bucketName, log, (err, masterKeyId) => { + // Check if the client supports the use of a default master encryption key per account + // and one is configured. + // If so, retrieve or create the encryption key for the account from Vault. + // Later its id will be stored at the bucket metadata level. + if (client.supportsDefaultKeyPerAccount && config.defaultEncryptionKeyPerAccount) { + return vault.getOrCreateEncryptionKeyId(bucket.getOwner(), log, (err, data) => { + if (err) { + log.debug('error retrieving or creating the default encryption key at the account level from vault', + { implName, error: err }); + return cb(err); + } + + const { encryptionKeyId, action } = data; + log.trace('default encryption key retrieved or created at the account level from vault', + { implName, encryptionKeyId, action }); + return cb(null, { masterKeyId: encryptionKeyId, isAccountEncryptionEnabled: true }); + }); + } + // Otherwise, create a default master encryption key, later its id will be stored at the bucket metadata level. + return client.createBucketKey(bucket.getName(), log, (err, masterKeyId) => { if (err) { log.debug('error from kms', { implName, error: err }); return cb(err); } log.trace('bucket key created in kms'); - return cb(null, masterKeyId); + return cb(null, { masterKeyId }); }); } /** * - * @param {string} bucketName - bucket name + * @param {BucketInfo} bucket - bucket info * @param {object} sseConfig - SSE configuration * @param {object} log - logger object * @param {function} cb - callback * @returns {undefined} * @callback called with (err, serverSideEncryptionInfo: object) */ - static bucketLevelEncryption(bucketName, sseConfig, log, cb) { + static bucketLevelEncryption(bucket, sseConfig, log, cb) { /* The purpose of bucket level encryption is so that the client does not have to send appropriate headers to trigger encryption on each object @@ -92,21 +118,30 @@ class KMS { const { algorithm, configuredMasterKeyId, mandatory } = sseConfig; const _mandatory = mandatory === true; if (algorithm === 'AES256' || algorithm === 'aws:kms') { - return this.createBucketKey(bucketName, log, (err, masterKeyId) => { + const serverSideEncryptionInfo = { + cryptoScheme: 1, + algorithm, + mandatory: _mandatory, + }; + + if (algorithm === 'aws:kms' && configuredMasterKeyId) { + serverSideEncryptionInfo.configuredMasterKeyId = configuredMasterKeyId; + + return process.nextTick(() => cb(null, serverSideEncryptionInfo)); + } + + return this.createBucketKey(bucket, log, (err, data) => { if (err) { return cb(err); } - const serverSideEncryptionInfo = { - cryptoScheme: 1, - algorithm, - masterKeyId, - mandatory: _mandatory, - }; + const { masterKeyId, isAccountEncryptionEnabled } = data; + serverSideEncryptionInfo.masterKeyId = masterKeyId; - if (algorithm === 'aws:kms' && configuredMasterKeyId) { - serverSideEncryptionInfo.configuredMasterKeyId = configuredMasterKeyId; + if (isAccountEncryptionEnabled) { + serverSideEncryptionInfo.isAccountEncryptionEnabled = isAccountEncryptionEnabled; } + return cb(null, serverSideEncryptionInfo); }); } @@ -175,7 +210,7 @@ class KMS { /* There are 2 ways of generating a datakey : - using the generateDataKey of the KMS backend if it exists (currently only implemented for the AWS KMS backend). This is - the prefered solution since a dedicated KMS should offer a better + the preferred solution since a dedicated KMS should offer a better entropy for generating random content. - using local random number generation, and then use the KMS to encrypt the datakey. This method is used when the KMS backend doesn't @@ -188,7 +223,7 @@ class KMS { cipherBundle.masterKeyId, log, (err, plainTextDataKey, cipheredDataKey) => { if (err) { - log.debug('error from kms', + log.debug('error generating a new data key from KMS', { implName, error: err }); return next(err); } @@ -197,19 +232,19 @@ class KMS { }); } else { log.debug('creating a new data key'); - const dataKey = Common.createDataKey(); + const plainTextDataKey = Common.createDataKey(); log.debug('ciphering the data key'); res = client.cipherDataKey(cipherBundle.cryptoScheme, cipherBundle.masterKeyId, - dataKey, log, (err, cipheredDataKey) => { + plainTextDataKey, log, (err, cipheredDataKey) => { if (err) { - log.debug('error from kms', + log.debug('error encrypting the data key using KMS', { implName, error: err }); return next(err); } log.trace('data key ciphered by the kms'); - return next(null, dataKey, cipheredDataKey); + return next(null, plainTextDataKey, cipheredDataKey); }); } return res; diff --git a/package.json b/package.json index fabc16378b..6f803e9911 100644 --- a/package.json +++ b/package.json @@ -20,7 +20,7 @@ "homepage": "https://github.com/scality/S3#readme", "dependencies": { "@hapi/joi": "^17.1.0", - "arsenal": "git+https://github.com/scality/arsenal#7.70.29", + "arsenal": "git+https://github.com/scality/arsenal#7.70.35", "async": "~2.5.0", "aws-sdk": "2.905.0", "azure-storage": "^2.1.0", @@ -38,7 +38,7 @@ "utapi": "git+https://github.com/scality/utapi#7.70.5", "utf8": "~2.1.1", "uuid": "^3.0.1", - "vaultclient": "scality/vaultclient#7.10.13", + "vaultclient": "scality/vaultclient#7.10.15", "werelogs": "scality/werelogs#8.1.5", "xml2js": "~0.4.16" }, diff --git a/tests/functional/aws-node-sdk/test/object/encryptionHeaders.js b/tests/functional/aws-node-sdk/test/object/encryptionHeaders.js index eb32019b49..c9c4c41a14 100644 --- a/tests/functional/aws-node-sdk/test/object/encryptionHeaders.js +++ b/tests/functional/aws-node-sdk/test/object/encryptionHeaders.js @@ -1,6 +1,7 @@ const assert = require('assert'); const async = require('async'); const uuid = require('uuid'); +const BucketInfo = require('arsenal').models.BucketInfo; const withV4 = require('../support/withV4'); const BucketUtility = require('../../lib/utility/bucket-util'); const kms = require('../../../../../lib/kms/wrapper'); @@ -87,8 +88,10 @@ describe('per object encryption headers', () => { let kmsKeyId; before(done => { - kms.createBucketKey('enc-bucket-test', log, - (err, keyId) => { + const bucket = new BucketInfo('enc-bucket-test', 'OwnerId', + 'OwnerDisplayName', new Date().toJSON()); + kms.createBucketKey(bucket, log, + (err, { masterKeyId: keyId }) => { assert.ifError(err); kmsKeyId = keyId; done(); @@ -167,46 +170,51 @@ describe('per object encryption headers', () => { )); testCases - .forEach(existing => it('should override default bucket encryption settings', done => { - const _existing = Object.assign({}, existing); - if (existing.masterKeyId) { - _existing.masterKeyId = kmsKeyId; - } - const params = { - Bucket: bucket, - ServerSideEncryptionConfiguration: hydrateSSEConfig(_existing), - }; - // no op putBucketNotification for the unencrypted case - const s3Op = existing.algo ? (...args) => s3.putBucketEncryption(...args) : s3NoOp; - s3Op(params, error => { - assert.ifError(error); - return putEncryptedObject(s3, bucket, object, target, kmsKeyId, error => { + .forEach(existing => { + const hasKey = target.masterKeyId ? 'a' : 'no'; + const { algo } = target; + it('should override bucket encryption settings with ' + + `algo ${algo || 'none'} with ${hasKey} key id`, done => { + const _existing = Object.assign({}, existing); + if (existing.masterKeyId) { + _existing.masterKeyId = kmsKeyId; + } + const params = { + Bucket: bucket, + ServerSideEncryptionConfiguration: hydrateSSEConfig(_existing), + }; + // no op putBucketNotification for the unencrypted case + const s3Op = existing.algo ? (...args) => s3.putBucketEncryption(...args) : s3NoOp; + s3Op(params, error => { assert.ifError(error); - return getSSEConfig( - s3, - bucket, - object, - (error, sseConfig) => { - assert.ifError(error); - let expected = createExpected(target, kmsKeyId); - // In the null case the expected encryption config is - // the buckets default policy - if (!target.algo) { - expected = createExpected(existing, kmsKeyId); - } - // We differ from aws behavior and always return a - // masterKeyId even when not explicitly configured. - if (expected.algo === 'aws:kms' && !expected.masterKeyId) { - // eslint-disable-next-line no-param-reassign - delete sseConfig.masterKeyId; + return putEncryptedObject(s3, bucket, object, target, kmsKeyId, error => { + assert.ifError(error); + return getSSEConfig( + s3, + bucket, + object, + (error, sseConfig) => { + assert.ifError(error); + let expected = createExpected(target, kmsKeyId); + // In the null case the expected encryption config is + // the buckets default policy + if (!target.algo) { + expected = createExpected(existing, kmsKeyId); + } + // We differ from aws behavior and always return a + // masterKeyId even when not explicitly configured. + if (expected.algo === 'aws:kms' && !expected.masterKeyId) { + // eslint-disable-next-line no-param-reassign + delete sseConfig.masterKeyId; + } + assert.deepStrictEqual(sseConfig, expected); + done(); } - assert.deepStrictEqual(sseConfig, expected); - done(); - } - ); + ); + }); }); }); - })); + }); testCases .forEach(existing => it('should copy an object to an encrypted key overriding bucket settings', diff --git a/tests/unit/api/bucketDelete.js b/tests/unit/api/bucketDelete.js index ab06ae89d8..d718f2f53b 100644 --- a/tests/unit/api/bucketDelete.js +++ b/tests/unit/api/bucketDelete.js @@ -3,9 +3,13 @@ const assert = require('assert'); const async = require('async'); const { parseString } = require('xml2js'); const { errors } = require('arsenal'); +const sinon = require('sinon'); +const inMemory = require('../../../lib/kms/in_memory/backend').backend; const bucketDelete = require('../../../lib/api/bucketDelete'); const { bucketPut } = require('../../../lib/api/bucketPut'); +const bucketPutEncryption = require('../../../lib/api/bucketPutEncryption'); +const { templateSSEConfig, templateRequest } = require('../utils/bucketEncryption'); const constants = require('../../../constants'); const initiateMultipartUpload = require('../../../lib/api/initiateMultipartUpload'); @@ -16,6 +20,7 @@ const objectPutPart = require('../../../lib/api/objectPutPart'); const { cleanup, DummyRequestLogger, makeAuthInfo } = require('../helpers'); const DummyRequest = require('../DummyRequest'); + const log = new DummyRequestLogger(); const canonicalID = 'accessKey1'; const authInfo = makeAuthInfo(canonicalID); @@ -175,4 +180,95 @@ describe('bucketDelete API', () => { done(); }); }); + + describe('with encryption', () => { + let destroyBucketKeySpy; + + beforeEach(() => { + destroyBucketKeySpy = sinon.spy(inMemory, 'destroyBucketKey'); + }); + + afterEach(() => { + sinon.restore(); + }); + + it('should delete the bucket-level encryption key if AES256 algorithm', done => { + bucketPut(authInfo, testRequest, log, () => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + bucketDelete(authInfo, testRequest, log, () => { + metadata.getBucket(bucketName, log, (err, md) => { + assert.strictEqual(err.is.NoSuchBucket, true); + assert.strictEqual(md, undefined); + // delete the default bucket-level master encryption key + sinon.assert.calledOnce(destroyBucketKeySpy); + done(); + }); + }); + }); + }); + }); + + it('should not delete the bucket-level encryption key if aws:kms algorithm', done => { + bucketPut(authInfo, testRequest, log, () => { + const post = templateSSEConfig({ algorithm: 'aws:kms' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + bucketDelete(authInfo, testRequest, log, () => { + metadata.getBucket(bucketName, log, (err, md) => { + assert.strictEqual(err.is.NoSuchBucket, true); + assert.strictEqual(md, undefined); + // do not delete the default bucket-level master encryption key + sinon.assert.notCalled(destroyBucketKeySpy); + done(); + }); + }); + }); + }); + }); + + it('should not delete the account-level encryption key', done => { + sinon.stub(inMemory, 'supportsDefaultKeyPerAccount').value(true); + bucketPut(authInfo, testRequest, log, () => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + bucketDelete(authInfo, testRequest, log, () => { + metadata.getBucket(bucketName, log, (err, md) => { + assert.strictEqual(err.is.NoSuchBucket, true); + assert.strictEqual(md, undefined); + // do not delete the default bucket-level master encryption key + sinon.assert.notCalled(destroyBucketKeySpy); + done(); + }); + }); + }); + }); + }); + }); + + describe('with failed encryption', () => { + beforeEach(() => { + sinon.stub(inMemory, 'destroyBucketKey').callsFake((bucketKeyId, log, cb) => cb(errors.InternalError)); + }); + + afterEach(() => { + sinon.restore(); + cleanup(); + }); + + it('should fail deleting the bucket-level encryption key', done => { + bucketPut(authInfo, testRequest, log, () => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + bucketDelete(authInfo, testRequest, log, err => { + assert(err && err.InternalError); + done(); + }); + }); + }); + }); + }); }); diff --git a/tests/unit/api/bucketDeleteEncryption.js b/tests/unit/api/bucketDeleteEncryption.js index 8d231ab131..da4fa7b524 100644 --- a/tests/unit/api/bucketDeleteEncryption.js +++ b/tests/unit/api/bucketDeleteEncryption.js @@ -57,11 +57,163 @@ describe('bucketDeleteEncryption API', () => { assert.ifError(err); return getSSEConfig(bucketName, log, (err, sseInfo) => { assert.ifError(err); + assert(!sseInfo.masterKeyId); assert.strictEqual(sseInfo.mandatory, false); - assert.strictEqual(sseInfo.configuredMasterKeyId, undefined); + assert.strictEqual(sseInfo.configuredMasterKeyId, '12345'); done(); }); }); }); }); + + it('should generate a new master key and clear the configured key id', done => { + const keyId = '12345'; + const post = templateSSEConfig({ algorithm: 'aws:kms', keyId }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + bucketDeleteEncryption(authInfo, templateRequest(bucketName, {}), log, err => { + assert.ifError(err); + const post2 = templateSSEConfig({ algorithm: 'aws:kms' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post: post2 }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.strictEqual(sseInfo.mandatory, true); + assert.strictEqual(sseInfo.algorithm, 'aws:kms'); + assert(sseInfo.masterKeyId); + assert.notStrictEqual(sseInfo.masterKeyId, keyId, 'masterKeyId should be different from keyId'); + assert(!sseInfo.configuredMasterKeyId); + done(); + }); + }); + }); + }); + }); + + it('should generate a new master key, update the algorithm and clear the configured key id', done => { + const keyId = '12345'; + const post = templateSSEConfig({ algorithm: 'aws:kms', keyId }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + bucketDeleteEncryption(authInfo, templateRequest(bucketName, {}), log, err => { + assert.ifError(err); + const post2 = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post: post2 }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.strictEqual(sseInfo.mandatory, true); + assert.strictEqual(sseInfo.algorithm, 'AES256'); + assert(sseInfo.masterKeyId); + assert.notStrictEqual(sseInfo.masterKeyId, keyId, 'masterKeyId should be different from keyId'); + assert(!sseInfo.configuredMasterKeyId); + done(); + }); + }); + }); + }); + }); + + it('should update the configured key id', done => { + const keyId = '12345'; + const post = templateSSEConfig({ algorithm: 'aws:kms', keyId }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + bucketDeleteEncryption(authInfo, templateRequest(bucketName, {}), log, err => { + assert.ifError(err); + const keyId2 = '12345'; + const post2 = templateSSEConfig({ algorithm: 'aws:kms', keyId: keyId2 }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post: post2 }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.strictEqual(sseInfo.mandatory, true); + assert.strictEqual(sseInfo.algorithm, 'aws:kms'); + assert(!sseInfo.masterKeyId); + assert.strictEqual(sseInfo.configuredMasterKeyId, keyId2); + done(); + }); + }); + }); + }); + }); + + it('should add the configured key id and keep the default master key id', done => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + const expectedMasterKeyId = sseInfo.masterKeyId; + bucketDeleteEncryption(authInfo, templateRequest(bucketName, {}), log, err => { + assert.ifError(err); + const keyId = '12345'; + const post2 = templateSSEConfig({ algorithm: 'aws:kms', keyId }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post: post2 }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.strictEqual(sseInfo.mandatory, true); + assert.strictEqual(sseInfo.algorithm, 'aws:kms'); + assert.strictEqual(sseInfo.masterKeyId, expectedMasterKeyId); + assert.strictEqual(sseInfo.configuredMasterKeyId, keyId); + done(); + }); + }); + }); + }); + }); + }); + + it('should use the default master key id', done => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + const expectedMasterKeyId = sseInfo.masterKeyId; + bucketDeleteEncryption(authInfo, templateRequest(bucketName, {}), log, err => { + assert.ifError(err); + const post2 = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post: post2 }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.strictEqual(sseInfo.mandatory, true); + assert.strictEqual(sseInfo.algorithm, 'AES256'); + assert.strictEqual(sseInfo.masterKeyId, expectedMasterKeyId); + assert(!sseInfo.configuredMasterKeyId); + done(); + }); + }); + }); + }); + }); + }); + + it('should use the default master key id with aws:kms algorithm', done => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + const expectedMasterKeyId = sseInfo.masterKeyId; + bucketDeleteEncryption(authInfo, templateRequest(bucketName, {}), log, err => { + assert.ifError(err); + const post2 = templateSSEConfig({ algorithm: 'aws:kms' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post: post2 }), log, err => { + assert.ifError(err); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.strictEqual(sseInfo.mandatory, true); + assert.strictEqual(sseInfo.algorithm, 'aws:kms'); + assert.strictEqual(sseInfo.masterKeyId, expectedMasterKeyId); + assert(!sseInfo.configuredMasterKeyId); + done(); + }); + }); + }); + }); + }); + }); }); diff --git a/tests/unit/api/bucketPut.js b/tests/unit/api/bucketPut.js index 0bee6955ca..309657f743 100644 --- a/tests/unit/api/bucketPut.js +++ b/tests/unit/api/bucketPut.js @@ -1,6 +1,8 @@ const assert = require('assert'); const { errors } = require('arsenal'); const sinon = require('sinon'); +const inMemory = require('../../../lib/kms/in_memory/backend').backend; +const vault = require('../../../lib/auth/vault'); const { checkLocationConstraint, _handleAuthResults } = require('../../../lib/api/bucketPut'); const { bucketPut } = require('../../../lib/api/bucketPut'); @@ -457,3 +459,251 @@ describe('bucketPut API', () => { })); }); }); + +describe('bucketPut API with bucket-level encryption', () => { + let createBucketKeySpy; + + beforeEach(() => { + createBucketKeySpy = sinon.spy(inMemory, 'createBucketKey'); + }); + + afterEach(() => { + cleanup(); + sinon.restore(); + }); + + it('should create a bucket with AES256 algorithm', done => { + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'AES256', + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert.ifError(err); + sinon.assert.calledOnce(createBucketKeySpy); + return metadata.getBucket(bucketName, log, (err, md) => { + assert.ifError(err); + const serverSideEncryption = md.getServerSideEncryption(); + assert.strictEqual(serverSideEncryption.algorithm, 'AES256'); + assert.strictEqual(serverSideEncryption.mandatory, true); + assert(serverSideEncryption.masterKeyId); + assert(!serverSideEncryption.isAccountEncryptionEnabled); + done(); + }); + }); + }); + + it('should create a bucket with aws:kms algorithm', done => { + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'aws:kms', + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert.ifError(err); + sinon.assert.calledOnce(createBucketKeySpy); + return metadata.getBucket(bucketName, log, (err, md) => { + assert.ifError(err); + const serverSideEncryption = md.getServerSideEncryption(); + assert.strictEqual(serverSideEncryption.algorithm, 'aws:kms'); + assert.strictEqual(serverSideEncryption.mandatory, true); + assert(serverSideEncryption.masterKeyId); + assert(!serverSideEncryption.isAccountEncryptionEnabled); + done(); + }); + }); + }); + + it('should create a bucket with aws:kms algorithm and configured key id', done => { + const keyId = '12345'; + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'aws:kms', + 'x-amz-scal-server-side-encryption-aws-kms-key-id': keyId, + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert.ifError(err); + sinon.assert.notCalled(createBucketKeySpy); + return metadata.getBucket(bucketName, log, (err, md) => { + assert.ifError(err); + assert.deepStrictEqual(md.getServerSideEncryption(), { + cryptoScheme: 1, + algorithm: 'aws:kms', + mandatory: true, + configuredMasterKeyId: keyId, + }); + done(); + }); + }); + }); + + // TODO: Currently, the operation does not fail when both the AES256 algorithm + // and a KMS key ID are specified. Modify the behavior to ensure that bucket + // creation fails in this case. + it.skip('should fail creating a bucket with AES256 algorithm and configured key id', done => { + const keyId = '12345'; + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'AES256', + 'x-amz-scal-server-side-encryption-aws-kms-key-id': keyId, + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert(err); + done(); + }); + }); +}); + +describe('bucketPut API with account level encryption', () => { + let getOrCreateEncryptionKeyIdSpy; + const accountLevelMasterKeyId = 'account-level-master-encryption-key'; + + beforeEach(() => { + sinon.stub(inMemory, 'supportsDefaultKeyPerAccount').value(true); + getOrCreateEncryptionKeyIdSpy = sinon.spy(vault, 'getOrCreateEncryptionKeyId'); + }); + + afterEach(() => { + cleanup(); + sinon.restore(); + }); + + it('should create a bucket with AES256 algorithm', done => { + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'AES256', + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert.ifError(err); + sinon.assert.calledOnce(getOrCreateEncryptionKeyIdSpy); + return metadata.getBucket(bucketName, log, (err, md) => { + assert.ifError(err); + assert.deepStrictEqual(md.getServerSideEncryption(), { + cryptoScheme: 1, + algorithm: 'AES256', + mandatory: true, + masterKeyId: accountLevelMasterKeyId, + isAccountEncryptionEnabled: true, + }); + done(); + }); + }); + }); + + it('should create a bucket with aws:kms algorithm', done => { + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'aws:kms', + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert.ifError(err); + sinon.assert.calledOnce(getOrCreateEncryptionKeyIdSpy); + return metadata.getBucket(bucketName, log, (err, md) => { + assert.ifError(err); + assert.deepStrictEqual(md.getServerSideEncryption(), { + cryptoScheme: 1, + algorithm: 'aws:kms', + mandatory: true, + masterKeyId: accountLevelMasterKeyId, + isAccountEncryptionEnabled: true, + }); + done(); + }); + }); + }); + + it('should create a bucket with aws:kms algorithm and configured key id', done => { + const keyId = '12345'; + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'aws:kms', + 'x-amz-scal-server-side-encryption-aws-kms-key-id': keyId, + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert.ifError(err); + return metadata.getBucket(bucketName, log, (err, md) => { + assert.ifError(err); + sinon.assert.notCalled(getOrCreateEncryptionKeyIdSpy); + assert.deepStrictEqual(md.getServerSideEncryption(), { + cryptoScheme: 1, + algorithm: 'aws:kms', + mandatory: true, + configuredMasterKeyId: keyId, + }); + done(); + }); + }); + }); +}); + +describe('bucketPut API with failed encryption service', () => { + beforeEach(() => { + sinon.stub(inMemory, 'createBucketKey').callsFake((bucketName, log, cb) => cb(errors.InternalError)); + }); + + afterEach(() => { + sinon.restore(); + cleanup(); + }); + + it('should fail creating bucket', done => { + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'AES256', + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert(err && err.InternalError); + done(); + }); + }); +}); + +describe('bucketPut API with failed vault service', () => { + beforeEach(() => { + sinon.stub(inMemory, 'supportsDefaultKeyPerAccount').value(true); + sinon.stub(vault, 'getOrCreateEncryptionKeyId').callsFake((accountCanonicalId, log, cb) => + cb(errors.ServiceFailure)); + }); + + afterEach(() => { + sinon.restore(); + cleanup(); + }); + + it('should fail putting bucket encryption', done => { + const testRequestWithEncryption = { + ...testRequest, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-scal-server-side-encryption': 'AES256', + }, + }; + bucketPut(authInfo, testRequestWithEncryption, log, err => { + assert(err && err.ServiceFailure); + done(); + }); + }); +}); + diff --git a/tests/unit/api/bucketPutEncryption.js b/tests/unit/api/bucketPutEncryption.js index 592b25008e..1b66f40462 100644 --- a/tests/unit/api/bucketPutEncryption.js +++ b/tests/unit/api/bucketPutEncryption.js @@ -1,4 +1,8 @@ const assert = require('assert'); +const sinon = require('sinon'); +const { errors } = require('arsenal'); +const inMemory = require('../../../lib/kms/in_memory/backend').backend; +const vault = require('../../../lib/auth/vault'); const { bucketPut } = require('../../../lib/api/bucketPut'); const bucketPutEncryption = require('../../../lib/api/bucketPutEncryption'); @@ -18,10 +22,17 @@ const bucketPutRequest = { }; describe('bucketPutEncryption API', () => { - before(() => cleanup()); + let createBucketKeySpy; - beforeEach(done => bucketPut(authInfo, bucketPutRequest, log, done)); - afterEach(() => cleanup()); + beforeEach(done => { + createBucketKeySpy = sinon.spy(inMemory, 'createBucketKey'); + bucketPut(authInfo, bucketPutRequest, log, done); + }); + + afterEach(() => { + sinon.restore(); + cleanup(); + }); describe('test invalid sse configs', () => { it('should reject an empty config', done => { @@ -84,6 +95,7 @@ describe('bucketPutEncryption API', () => { const post = templateSSEConfig({ algorithm: 'AES256' }); bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { assert.ifError(err); + sinon.assert.calledOnce(createBucketKeySpy); return getSSEConfig(bucketName, log, (err, sseInfo) => { assert.ifError(err); assert.deepStrictEqual(sseInfo, { @@ -101,6 +113,7 @@ describe('bucketPutEncryption API', () => { const post = templateSSEConfig({ algorithm: 'aws:kms' }); bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { assert.ifError(err); + sinon.assert.calledOnce(createBucketKeySpy); return getSSEConfig(bucketName, log, (err, sseInfo) => { assert.ifError(err); assert.deepStrictEqual(sseInfo, { @@ -118,13 +131,13 @@ describe('bucketPutEncryption API', () => { const post = templateSSEConfig({ algorithm: 'aws:kms', keyId: '12345' }); bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { assert.ifError(err); + sinon.assert.notCalled(createBucketKeySpy); return getSSEConfig(bucketName, log, (err, sseInfo) => { assert.ifError(err); assert.deepStrictEqual(sseInfo, { mandatory: true, algorithm: 'aws:kms', cryptoScheme: 1, - masterKeyId: sseInfo.masterKeyId, configuredMasterKeyId: '12345', }); done(); @@ -237,19 +250,16 @@ describe('bucketPutEncryption API', () => { const post = templateSSEConfig({ algorithm: 'aws:kms', keyId: '12345' }); bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { assert.ifError(err); - return getSSEConfig(bucketName, log, (err, sseInfo) => { + return getSSEConfig(bucketName, log, err => { assert.ifError(err); - const { masterKeyId } = sseInfo; const newConf = templateSSEConfig({ algorithm: 'AES256' }); return bucketPutEncryption(authInfo, templateRequest(bucketName, { post: newConf }), log, err => { assert.ifError(err); return getSSEConfig(bucketName, log, (err, updatedSSEInfo) => { - assert.deepStrictEqual(updatedSSEInfo, { - mandatory: true, - algorithm: 'AES256', - cryptoScheme: 1, - masterKeyId, - }); + assert.strictEqual(updatedSSEInfo.mandatory, true); + assert.strictEqual(updatedSSEInfo.algorithm, 'AES256'); + assert.strictEqual(updatedSSEInfo.cryptoScheme, 1); + assert(updatedSSEInfo.masterKeyId); done(); }); }); @@ -258,3 +268,118 @@ describe('bucketPutEncryption API', () => { }); }); }); + +describe('bucketPutEncryption API with failed encryption service', () => { + beforeEach(done => { + sinon.stub(inMemory, 'createBucketKey').callsFake((bucketName, log, cb) => cb(errors.InternalError)); + bucketPut(authInfo, bucketPutRequest, log, done); + }); + + afterEach(() => { + sinon.restore(); + cleanup(); + }); + + it('should fail putting bucket encryption', done => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert(err && err.InternalError); + done(); + }); + }); +}); + +describe('bucketPutEncryption API with account level encryption', () => { + let getOrCreateEncryptionKeyIdSpy; + const accountLevelMasterKeyId = 'account-level-master-encryption-key'; + + beforeEach(done => { + sinon.stub(inMemory, 'supportsDefaultKeyPerAccount').value(true); + getOrCreateEncryptionKeyIdSpy = sinon.spy(vault, 'getOrCreateEncryptionKeyId'); + bucketPut(authInfo, bucketPutRequest, log, done); + }); + + afterEach(() => { + sinon.restore(); + cleanup(); + }); + + it('should create account level master encryption key with AES256 algorithm', done => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + sinon.assert.calledOnce(getOrCreateEncryptionKeyIdSpy); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.deepStrictEqual(sseInfo, { + cryptoScheme: 1, + algorithm: 'AES256', + mandatory: true, + masterKeyId: accountLevelMasterKeyId, + isAccountEncryptionEnabled: true, + }); + done(); + }); + }); + }); + + it('should create account level master encryption key with aws:kms algorithm', done => { + const post = templateSSEConfig({ algorithm: 'aws:kms' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + sinon.assert.calledOnce(getOrCreateEncryptionKeyIdSpy); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.deepStrictEqual(sseInfo, { + cryptoScheme: 1, + algorithm: 'aws:kms', + mandatory: true, + masterKeyId: accountLevelMasterKeyId, + isAccountEncryptionEnabled: true, + }); + done(); + }); + }); + }); + + it('should not create account level master key if custom master key id is specified', done => { + const keyId = '12345'; + const post = templateSSEConfig({ algorithm: 'aws:kms', keyId }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert.ifError(err); + sinon.assert.notCalled(getOrCreateEncryptionKeyIdSpy); + return getSSEConfig(bucketName, log, (err, sseInfo) => { + assert.ifError(err); + assert.deepStrictEqual(sseInfo, { + cryptoScheme: 1, + algorithm: 'aws:kms', + mandatory: true, + configuredMasterKeyId: keyId, + }); + done(); + }); + }); + }); +}); + +describe('bucketPutEncryption API with failed vault service', () => { + beforeEach(done => { + sinon.stub(inMemory, 'supportsDefaultKeyPerAccount').value(true); + sinon.stub(vault, 'getOrCreateEncryptionKeyId').callsFake((accountCanonicalId, log, cb) => + cb(errors.ServiceFailure)); + bucketPut(authInfo, bucketPutRequest, log, done); + }); + + afterEach(() => { + sinon.restore(); + cleanup(); + }); + + it('should fail putting bucket encryption', done => { + const post = templateSSEConfig({ algorithm: 'AES256' }); + bucketPutEncryption(authInfo, templateRequest(bucketName, { post }), log, err => { + assert(err && err.ServiceFailure); + done(); + }); + }); +}); diff --git a/tests/unit/encryption/kms.js b/tests/unit/encryption/kms.js index 0bb1c0315a..7abd22c191 100644 --- a/tests/unit/encryption/kms.js +++ b/tests/unit/encryption/kms.js @@ -1,10 +1,13 @@ const assert = require('assert'); +const BucketInfo = require('arsenal').models.BucketInfo; const KMS = require('../../../lib/kms/wrapper'); const { parseBucketEncryptionHeaders } = require('../../../lib/api/apiUtils/bucket/bucketEncryption'); const Common = require('../../../lib/kms/common'); const { cleanup, DummyRequestLogger } = require('../helpers'); const log = new DummyRequestLogger(); +const dummyBucket = new BucketInfo( + 'dummyBucket', 'dummyOwnerId', 'Joe, John', new Date().toJSON()); describe('KMS unit tests', () => { beforeEach(() => { @@ -18,7 +21,7 @@ describe('KMS unit tests', () => { }; const sseConfig = parseBucketEncryptionHeaders(headers); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { assert.strictEqual(err, null); assert.strictEqual(sseInfo.cryptoScheme, 1); @@ -39,7 +42,7 @@ describe('KMS unit tests', () => { }; const sseConfig = parseBucketEncryptionHeaders(headers); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { assert.strictEqual(err, null); assert.strictEqual(sseInfo.cryptoScheme, 1); @@ -61,7 +64,7 @@ describe('KMS unit tests', () => { }; const sseConfig = parseBucketEncryptionHeaders(headers); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { assert.strictEqual(err, null); assert.strictEqual(sseInfo, null); @@ -74,7 +77,7 @@ describe('KMS unit tests', () => { done => { const sseConfig = parseBucketEncryptionHeaders({}); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { assert.strictEqual(err, null); assert.strictEqual(sseInfo, null); @@ -89,7 +92,7 @@ describe('KMS unit tests', () => { }; const sseConfig = parseBucketEncryptionHeaders(headers); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { KMS.createCipherBundle( sseInfo, log, (err, cipherBundle) => { @@ -113,7 +116,7 @@ describe('KMS unit tests', () => { let masterKeyId; let sseConfig = parseBucketEncryptionHeaders(headers); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { assert.strictEqual(err, null); masterKeyId = sseInfo.bucketKeyId; @@ -124,7 +127,7 @@ describe('KMS unit tests', () => { masterKeyId; sseConfig = parseBucketEncryptionHeaders(headers); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { KMS.createCipherBundle( sseInfo, log, (err, cipherBundle) => { @@ -149,7 +152,7 @@ describe('KMS unit tests', () => { }; const sseConfig = parseBucketEncryptionHeaders(headers); KMS.bucketLevelEncryption( - 'dummyBucket', sseConfig, log, + dummyBucket, sseConfig, log, (err, sseInfo) => { if (err) { cb(err); diff --git a/tests/unit/testConfigs/parseKmsAWS.js b/tests/unit/testConfigs/parseKmsAWS.js new file mode 100644 index 0000000000..d4a327413d --- /dev/null +++ b/tests/unit/testConfigs/parseKmsAWS.js @@ -0,0 +1,326 @@ +const assert = require('assert'); +const sinon = require('sinon'); +const fs = require('fs'); +const path = require('path'); + +const { ConfigObject: Config } = require('../../../lib/Config'); + +describe('parseKmsAWS Function', () => { + let configInstance; + + beforeEach(() => { + configInstance = new Config(); + }); + + it('should return an empty object if no kmsAWS config is provided', () => { + const config = {}; + const result = configInstance._parseKmsAWS(config); + assert.deepStrictEqual(result, {}); + }); + + it('should throw an error if endpoint is not defined in kmsAWS', () => { + const config = { kmsAWS: { ak: 'ak', sk: 'sk' } }; + assert.throws(() => configInstance._parseKmsAWS(config), 'endpoint must be defined'); + }); + + it('should throw an error if ak is not defined in kmsAWS', () => { + const config = { kmsAWS: { endpoint: 'https://example.com', sk: 'sk' } }; + assert.throws(() => configInstance._parseKmsAWS(config), 'ak must be defined'); + }); + + it('should throw an error if sk is not defined in kmsAWS', () => { + const config = { kmsAWS: { endpoint: 'https://example.com', ak: 'ak' } }; + assert.throws(() => configInstance._parseKmsAWS(config), 'sk must be defined'); + }); + + it('should return the expected kmsAWS object when valid config is provided', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + }, + }; + const result = configInstance._parseKmsAWS(config); + assert.deepStrictEqual(result, { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + }); + }); + + it('should include region if provided in the config', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + region: 'us-west-2', + }, + }; + const result = configInstance._parseKmsAWS(config); + assert.deepStrictEqual(result, { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + region: 'us-west-2', + }); + }); + + it('should include tls configuration if provided', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + rejectUnauthorized: true, + minVersion: 'TLSv1.2', + maxVersion: 'TLSv1.3', + }, + }, + }; + const result = configInstance._parseKmsAWS(config); + assert.deepStrictEqual(result, { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + rejectUnauthorized: true, + minVersion: 'TLSv1.2', + maxVersion: 'TLSv1.3', + }, + }); + }); +}); + +describe('parseKmsAWS TLS section', () => { + let readFileSyncStub; + let configInstance; + + const mockCertifContent = Buffer.from('certificate'); + + beforeEach(() => { + configInstance = new Config(); + readFileSyncStub = sinon.stub(fs, 'readFileSync').returns(mockCertifContent); + }); + + afterEach(() => { + readFileSyncStub.restore(); + }); + + it('should throw an error if tls.rejectUnauthorized is not a boolean', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + rejectUnauthorized: 'true', // Invalid type + }, + }, + }; + + assert.throws(() => configInstance._parseKmsAWS(config)); + }); + + it('should throw an error if tls.minVersion is not a string', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + minVersion: 1.2, // Invalid type + }, + }, + }; + + assert.throws(() => configInstance._parseKmsAWS(config), { + message: 'bad config: KMS AWS TLS minVersion must be a string', + }); + }); + + it('should throw an error if tls.maxVersion is not a string', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + maxVersion: 1.3, // Invalid type + }, + }, + }; + + assert.throws(() => configInstance._parseKmsAWS(config), { + message: 'bad config: KMS AWS TLS maxVersion must be a string', + }); + }); + + it('should throw an error if tls.ca is not a string or an array', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + ca: 12345, // Invalid type + }, + }, + }; + + assert.throws(() => configInstance._parseKmsAWS(config), { + message: 'bad config: TLS file specification must be a string', + }); + }); + + it('should return an empty tls object if all tls fields are undefined', () => { + const config = { + kmsAWS: { + endpoint: 'https://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: {}, + }, + }; + + const result = configInstance._parseKmsAWS(config); + assert.deepStrictEqual(result.tls, {}); + }); + + it('should load tls.ca as an array of files', () => { + const config = { + kmsAWS: { + endpoint: 'http://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + ca: ['/path/to/ca1.pem', '/path/to/ca2.pem'], + }, + }, + }; + + const result = configInstance._parseKmsAWS(config); + + assert.deepStrictEqual(result.tls.ca, [mockCertifContent, mockCertifContent]); + assert(readFileSyncStub.calledTwice); + assert(readFileSyncStub.calledWith('/path/to/ca1.pem')); + assert(readFileSyncStub.calledWith('/path/to/ca2.pem')); + }); + + it('should load tls.cert as a single file', () => { + const config = { + kmsAWS: { + endpoint: 'http://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + cert: '/path/to/cert.pem', + }, + }, + }; + + const result = configInstance._parseKmsAWS(config); + + assert.deepStrictEqual(result.tls.cert, mockCertifContent); + assert(readFileSyncStub.calledOnce); + assert(readFileSyncStub.calledWith('/path/to/cert.pem')); + }); + + it('should load tls.key as a single file', () => { + const config = { + kmsAWS: { + endpoint: 'http://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + key: '/path/to/key.pem', + }, + }, + }; + + const result = configInstance._parseKmsAWS(config); + + assert.deepStrictEqual(result.tls.key, mockCertifContent); + assert(readFileSyncStub.calledOnce); + assert(readFileSyncStub.calledWith('/path/to/key.pem')); + }); + + it('should not load TLS files if tls is undefined', () => { + const config = { + kmsAWS: { + endpoint: 'http://example.com', + ak: 'accessKey', + sk: 'secretKey', + }, + }; + + const result = configInstance._parseKmsAWS(config); + + assert.strictEqual(result.tls, undefined); + assert(readFileSyncStub.notCalled); + }); + + it('should load tls.cert as a single file with relative path', () => { + const certPath = 'path/to/cert.pem'; + const basePath = configInstance._basePath; + const config = { + kmsAWS: { + endpoint: 'http://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + cert: certPath, + }, + }, + }; + + const result = configInstance._parseKmsAWS(config); + + assert.deepStrictEqual(result.tls.cert, mockCertifContent); + assert(readFileSyncStub.calledOnce); + assert(readFileSyncStub.calledWith(path.join(basePath, certPath))); + }); + + it('should load tls.key, tls.cert, and tls.ca as arrays of files with relative paths', () => { + const basePath = configInstance._basePath; + + const keyPaths = ['path/to/key1.pem', 'path/to/key2.pem']; + const certPaths = ['path/to/cert1.pem', 'path/to/cert2.pem']; + const caPaths = ['path/to/ca1.pem', 'path/to/ca2.pem']; + + const config = { + kmsAWS: { + endpoint: 'http://example.com', + ak: 'accessKey', + sk: 'secretKey', + tls: { + key: keyPaths, + cert: certPaths, + ca: caPaths, + }, + }, + }; + + const result = configInstance._parseKmsAWS(config); + + assert.deepStrictEqual(result.tls.key, [mockCertifContent, mockCertifContent]); + assert.deepStrictEqual(result.tls.cert, [mockCertifContent, mockCertifContent]); + assert.deepStrictEqual(result.tls.ca, [mockCertifContent, mockCertifContent]); + + keyPaths.forEach((keyPath) => { + assert(readFileSyncStub.calledWith(path.join(basePath, keyPath))); + }); + + certPaths.forEach((certPath) => { + assert(readFileSyncStub.calledWith(path.join(basePath, certPath))); + }); + + caPaths.forEach((caPath) => { + assert(readFileSyncStub.calledWith(path.join(basePath, caPath))); + }); + + assert(readFileSyncStub.callCount === (keyPaths.length + certPaths.length + caPaths.length)); + }); +}); diff --git a/yarn.lock b/yarn.lock index 468dc36174..ba64c16996 100644 --- a/yarn.lock +++ b/yarn.lock @@ -499,9 +499,9 @@ arraybuffer.slice@~0.0.7: optionalDependencies: ioctl "^2.0.2" -"arsenal@git+https://github.com/scality/arsenal#7.70.29": - version "7.70.29" - resolved "git+https://github.com/scality/arsenal#a643a3e6ccbc49327339a285de1d4cb17afcd171" +"arsenal@git+https://github.com/scality/arsenal#7.70.35": + version "7.70.35" + resolved "git+https://github.com/scality/arsenal#3f79edaaa614221c36aa935931ee9806f3c0d102" dependencies: "@js-sdsl/ordered-set" "^4.4.2" "@types/async" "^3.2.12" @@ -5585,6 +5585,16 @@ vaultclient@scality/vaultclient#7.10.13: werelogs "git+https://github.com/scality/werelogs#8.1.0" xml2js "0.4.19" +vaultclient@scality/vaultclient#7.10.15: + version "7.10.15" + resolved "https://codeload.github.com/scality/vaultclient/tar.gz/a4b098e042188948a284f3d67b93e6ca879ffbbf" + dependencies: + agentkeepalive "^4.1.3" + arsenal "git+https://github.com/scality/Arsenal#7.10.46" + commander "2.20.0" + werelogs "git+https://github.com/scality/werelogs#8.1.0" + xml2js "0.4.19" + verror@1.10.0: version "1.10.0" resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" From b7169deb691c7b23c9e7a7f58928f67343032d14 Mon Sep 17 00:00:00 2001 From: Nicolas Humbert Date: Wed, 9 Oct 2024 09:45:42 +0200 Subject: [PATCH 6/6] CLDSRV-559 bump package version --- package.json | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/package.json b/package.json index 6f803e9911..f6e9893869 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "s3", - "version": "7.70.51", + "version": "7.70.52", "description": "S3 connector", "main": "index.js", "engines": {