diff --git a/constants.js b/constants.js index 085103d608..b4178ba42f 100644 --- a/constants.js +++ b/constants.js @@ -215,6 +215,7 @@ const constants = { 'initiateMultipartUpload', 'objectPutPart', 'completeMultipartUpload', + 'objectPost', ], }; diff --git a/lib/api/api.js b/lib/api/api.js index 23039807da..b79cdc4833 100644 --- a/lib/api/api.js +++ b/lib/api/api.js @@ -52,6 +52,7 @@ const objectGetRetention = require('./objectGetRetention'); const objectGetTagging = require('./objectGetTagging'); const objectHead = require('./objectHead'); const objectPut = require('./objectPut'); +const objectPost = require('./objectPost'); const objectPutACL = require('./objectPutACL'); const objectPutLegalHold = require('./objectPutLegalHold'); const objectPutTagging = require('./objectPutTagging'); @@ -67,7 +68,9 @@ const writeContinue = require('../utilities/writeContinue'); const validateQueryAndHeaders = require('../utilities/validateQueryAndHeaders'); const parseCopySource = require('./apiUtils/object/parseCopySource'); const { tagConditionKeyAuth } = require('./apiUtils/authorization/tagConditionKeys'); +const { checkAuthResults } = require('./apiUtils/authorization/permissionChecks'); const checkHttpHeadersSize = require('./apiUtils/object/checkHttpHeadersSize'); +const { processPostForm } = require('./apiUtils/apiCallers/callPostObject'); const monitoringMap = policies.actionMaps.actionMonitoringMapS3; @@ -142,49 +145,6 @@ const api = { // eslint-disable-next-line no-param-reassign request.apiMethods = apiMethods; - function checkAuthResults(authResults) { - let returnTagCount = true; - const isImplicitDeny = {}; - let isOnlyImplicitDeny = true; - if (apiMethod === 'objectGet') { - // first item checks s3:GetObject(Version) action - if (!authResults[0].isAllowed && !authResults[0].isImplicit) { - log.trace('get object authorization denial from Vault'); - return errors.AccessDenied; - } - // TODO add support for returnTagCount in the bucket policy - // checks - isImplicitDeny[authResults[0].action] = authResults[0].isImplicit; - // second item checks s3:GetObject(Version)Tagging action - if (!authResults[1].isAllowed) { - log.trace('get tagging authorization denial ' + - 'from Vault'); - returnTagCount = false; - } - } else { - for (let i = 0; i < authResults.length; i++) { - isImplicitDeny[authResults[i].action] = true; - if (!authResults[i].isAllowed && !authResults[i].isImplicit) { - // Any explicit deny rejects the current API call - log.trace('authorization denial from Vault'); - return errors.AccessDenied; - } - if (authResults[i].isAllowed) { - // If the action is allowed, the result is not implicit - // Deny. - isImplicitDeny[authResults[i].action] = false; - isOnlyImplicitDeny = false; - } - } - } - // These two APIs cannot use ACLs or Bucket Policies, hence, any - // implicit deny from vault must be treated as an explicit deny. - if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) { - return errors.AccessDenied; - } - return { returnTagCount, isImplicitDeny }; - } - return async.waterfall([ next => auth.server.doAuth( request, log, (err, userInfo, authorizationResults, streamingV4Params) => { @@ -256,7 +216,7 @@ const api = { return callback(err); } if (authorizationResults) { - const checkedResults = checkAuthResults(authorizationResults); + const checkedResults = checkAuthResults(apiMethod, authorizationResults, log); if (checkedResults instanceof Error) { return callback(checkedResults); } @@ -286,6 +246,42 @@ const api = { return this[apiMethod](userInfo, request, log, callback); }); }, + callPostObject(apiMethod, request, response, log, callback) { + request.apiMethod = apiMethod; + + const requestContexts = prepareRequestContexts('objectPost', request, + undefined, undefined, undefined); + // Extract all the _apiMethods and store them in an array + const apiMethods = requestContexts ? requestContexts.map(context => context._apiMethod) : []; + // Attach the names to the current request + // eslint-disable-next-line no-param-reassign + request.apiMethods = apiMethods; + + return processPostForm(request, response, requestContexts, log, + (err, userInfo, authorizationResults, streamingV4Params) => { + if (err) { + return callback(err); + } + if (authorizationResults) { + const checkedResults = checkAuthResults(authorizationResults); + if (checkedResults instanceof Error) { + return callback(checkedResults); + } + request.actionImplicitDenies = checkedResults.isImplicitDeny; + } else { + // create an object of keys apiMethods with all values to false: + // for backward compatibility, all apiMethods are allowed by default + // thus it is explicitly allowed, so implicit deny is false + request.actionImplicitDenies = apiMethods.reduce((acc, curr) => { + acc[curr] = false; + return acc; + }, {}); + } + request._response = response; + return objectPost(userInfo, request, streamingV4Params, + log, callback, authorizationResults); + }); + }, bucketDelete, bucketDeleteCors, bucketDeleteEncryption, @@ -337,6 +333,7 @@ const api = { objectCopy, objectHead, objectPut, + objectPost, objectPutACL, objectPutLegalHold, objectPutTagging, diff --git a/lib/api/apiUtils/apiCallers/callPostObject.js b/lib/api/apiUtils/apiCallers/callPostObject.js new file mode 100644 index 0000000000..f69053176b --- /dev/null +++ b/lib/api/apiUtils/apiCallers/callPostObject.js @@ -0,0 +1,210 @@ +const { auth, errors } = require('arsenal'); +const busboy = require('@fastify/busboy'); +const writeContinue = require('../../../utilities/writeContinue'); +const fs = require('fs'); +const path = require('path'); +const os = require('os'); + +/** @see doc: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTForms.html#HTTPPOSTFormDeclaration */ +const MAX_FIELD_SIZE = 20 * 1024; // 20KB +/** @see doc: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html */ +const MAX_KEY_SIZE = 1024; +const POST_OBJECT_OPTIONAL_FIELDS = [ + 'acl', + 'awsaccesskeyid', + 'bucket', + 'cache-control', + 'content-disposition', + 'content-encoding', + 'content-type', + 'expires', + 'policy', + 'redirect', + 'tagging', + 'success_action_redirect', + 'success_action_status', + 'x-amz-meta-', + 'x-amz-storage-class', + 'x-amz-security-token', + 'x-amz-signgnature', + 'x-amz-website-redirect-location', +]; + +async function authenticateRequest(request, requestContexts, log) { + return new Promise(resolve => { + // TODO RING-45960 remove ignore auth check for POST object here + auth.server.doAuth(request, log, (err, userInfo, authorizationResults, streamingV4Params) => + resolve({ userInfo, authorizationResults, streamingV4Params }), 's3', requestContexts); + }); +} + +async function parseFormData(request, response, requestContexts, log) { + /* eslint-disable no-param-reassign */ + let formDataParser; + try { + formDataParser = busboy({ headers: request.headers }); + } catch (err) { + log.trace('Error creating form data parser', { error: err.toString() }); + return Promise.reject(errors.PreconditionFailed + .customizeDescription('Bucket POST must be of the enclosure-type multipart/form-data')); + } + + // formDataParser = busboy({ headers: request.headers }); + writeContinue(request, response); + + return new Promise((resolve, reject) => { + request.formData = {}; + let totalFieldSize = 0; + let fileEventData = null; + let tempFileStream; + let tempFilePath; + let authResponse; + let fileWrittenPromiseResolve; + let formParserFinishedPromiseResolve; + + const fileWrittenPromise = new Promise((res) => { fileWrittenPromiseResolve = res; }); + const formParserFinishedPromise = new Promise((res) => { formParserFinishedPromiseResolve = res; }); + + formDataParser.on('field', (fieldname, val) => { + // Check if we have exceeded the max size allowed for all fields + totalFieldSize += Buffer.byteLength(val, 'utf8'); + if (totalFieldSize > MAX_FIELD_SIZE) { + return reject(errors.MaxPostPreDataLengthExceeded); + } + + // validate the fieldname + const lowerFieldname = fieldname.toLowerCase(); + // special handling for key field + if (lowerFieldname === 'key') { + if (val.length > MAX_KEY_SIZE) { + return reject(errors.KeyTooLong); + } else if (val.length === 0) { + return reject(errors.InvalidArgument + .customizeDescription('User key must have a length greater than 0.')); + } + request.formData[lowerFieldname] = val; + } + // add only the recognized fields to the formData object + if (POST_OBJECT_OPTIONAL_FIELDS.some(field => lowerFieldname.startsWith(field))) { + request.formData[lowerFieldname] = val; + } + return undefined; + }); + + formDataParser.on('file', async (fieldname, file, filename, encoding, mimetype) => { + if (fileEventData) { + file.resume(); // Resume the stream to drain and discard the file + if (tempFilePath) { + fs.unlink(tempFilePath, unlinkErr => { + if (unlinkErr) { + log.error('Failed to delete temp file', { error: unlinkErr }); + } + }); + } + return reject(errors.InvalidArgument + .customizeDescription('POST requires exactly one file upload per request.')); + } + + fileEventData = { fieldname, file, filename, encoding, mimetype }; + if (!('key' in request.formData)) { + return reject(errors.InvalidArgument + .customizeDescription('Bucket POST must contain a field named ' + + "'key'. If it is specified, please check the order of the fields.")); + } + // Replace `${filename}` with the actual filename + request.formData.key = request.formData.key.replace('${filename}', filename); + try { + // Authenticate request before streaming file + // TODO RING-45960 auth to be properly implemented + authResponse = await authenticateRequest(request, requestContexts, log); + + // Create a temporary file to stream the file data + // This is to finalize validation on form data before storing the file + tempFilePath = path.join(os.tmpdir(), filename); + tempFileStream = fs.createWriteStream(tempFilePath); + + file.pipe(tempFileStream); + + tempFileStream.on('finish', () => { + request.fileEventData = { ...fileEventData, file: tempFilePath }; + fileWrittenPromiseResolve(); + }); + + tempFileStream.on('error', (err) => { + log.trace('Error streaming file to temporary location', { error: err.toString() }); + reject(errors.InternalError); + }); + + // Wait for both file writing and form parsing to finish + return Promise.all([fileWrittenPromise, formParserFinishedPromise]) + .then(() => resolve(authResponse)) + .catch(reject); + } catch (err) { + return reject(err); + } + }); + + formDataParser.on('finish', () => { + if (!fileEventData) { + return reject(errors.InvalidArgument + .customizeDescription('POST requires exactly one file upload per request.')); + } + return formParserFinishedPromiseResolve(); + }); + + formDataParser.on('error', (err) => { + log.trace('Error processing form data:', { error: err.toString() }); + request.unpipe(formDataParser); + // Following observed AWS behaviour + reject(errors.MalformedPOSTRequest); + }); + + request.pipe(formDataParser); + return undefined; + }); +} + +function getFileStat(filePath, log) { + return new Promise((resolve, reject) => { + fs.stat(filePath, (err, stats) => { + if (err) { + log.trace('Error getting file size', { error: err.toString() }); + return reject(errors.InternalError); + } + return resolve(stats); + }); + }); +} + +async function processPostForm(request, response, requestContexts, log, callback) { + try { + const { userInfo, authorizationResults, streamingV4Params } = + await parseFormData(request, response, requestContexts, log); + + const fileStat = await getFileStat(request.fileEventData.file, log); + request.parsedContentLength = fileStat.size; + request.fileEventData.file = fs.createReadStream(request.fileEventData.file); + if (request.formData['content-type']) { + request.headers['content-type'] = request.formData['content-type']; + } else { + request.headers['content-type'] = 'binary/octet-stream'; + } + + const authNames = { accountName: userInfo.getAccountDisplayName() }; + if (userInfo.isRequesterAnIAMUser()) { + authNames.userName = userInfo.getIAMdisplayName(); + } + log.addDefaultFields(authNames); + + return callback(null, userInfo, authorizationResults, streamingV4Params); + } catch (err) { + return callback(err); + } +} + +module.exports = { + authenticateRequest, + parseFormData, + processPostForm, + getFileStat, +}; diff --git a/lib/api/apiUtils/authorization/permissionChecks.js b/lib/api/apiUtils/authorization/permissionChecks.js index 170488b44b..b4e919edee 100644 --- a/lib/api/apiUtils/authorization/permissionChecks.js +++ b/lib/api/apiUtils/authorization/permissionChecks.js @@ -576,6 +576,48 @@ function validatePolicyConditions(policy) { return null; } +function checkAuthResults(apiMethod, authResults, log) { + let returnTagCount = true; + const isImplicitDeny = {}; + let isOnlyImplicitDeny = true; + if (apiMethod === 'objectGet') { + // first item checks s3:GetObject(Version) action + if (!authResults[0].isAllowed && !authResults[0].isImplicit) { + log.trace('get object authorization denial from Vault'); + return errors.AccessDenied; + } + // TODO add support for returnTagCount in the bucket policy + // checks + isImplicitDeny[authResults[0].action] = authResults[0].isImplicit; + // second item checks s3:GetObject(Version)Tagging action + if (!authResults[1].isAllowed) { + log.trace('get tagging authorization denial ' + + 'from Vault'); + returnTagCount = false; + } + } else { + for (let i = 0; i < authResults.length; i++) { + isImplicitDeny[authResults[i].action] = true; + if (!authResults[i].isAllowed && !authResults[i].isImplicit) { + // Any explicit deny rejects the current API call + log.trace('authorization denial from Vault'); + return errors.AccessDenied; + } + if (authResults[i].isAllowed) { + // If the action is allowed, the result is not implicit + // Deny. + isImplicitDeny[authResults[i].action] = false; + isOnlyImplicitDeny = false; + } + } + } + // These two APIs cannot use ACLs or Bucket Policies, hence, any + // implicit deny from vault must be treated as an explicit deny. + if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) { + return errors.AccessDenied; + } + return { returnTagCount, isImplicitDeny }; +} /** isLifecycleSession - check if it is the Lifecycle assumed role session arn. * @param {string} arn - Amazon resource name - example: @@ -607,6 +649,7 @@ module.exports = { checkObjectAcls, validatePolicyResource, validatePolicyConditions, + checkAuthResults, isLifecycleSession, evaluateBucketPolicyWithIAM, }; diff --git a/lib/api/apiUtils/object/createAndStoreObject.js b/lib/api/apiUtils/object/createAndStoreObject.js index 7dc84089bf..62c600d37b 100644 --- a/lib/api/apiUtils/object/createAndStoreObject.js +++ b/lib/api/apiUtils/object/createAndStoreObject.js @@ -210,8 +210,18 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo, metadataStoreParams.contentMD5 = constants.emptyFileMd5; return next(null, null, null); } - return dataStore(objectKeyContext, cipherBundle, request, size, - streamingV4Params, backendInfo, log, next); + // Object Post receives a file stream. + // This is to be used to store data instead of the request stream itself. + + let stream; + + if (request.apiMethod === 'objectPost') { + stream = request.fileEventData ? request.fileEventData.file : undefined; + } else { + stream = request; + } + + return dataStore(objectKeyContext, cipherBundle, stream, size, streamingV4Params, backendInfo, log, next); }, function processDataResult(dataGetInfo, calculatedHash, next) { if (dataGetInfo === null || dataGetInfo === undefined) { diff --git a/lib/api/apiUtils/object/prepareStream.js b/lib/api/apiUtils/object/prepareStream.js index 7d436dd96b..985ec4efe0 100644 --- a/lib/api/apiUtils/object/prepareStream.js +++ b/lib/api/apiUtils/object/prepareStream.js @@ -13,7 +13,7 @@ const V4Transform = require('../../../auth/streamingV4/V4Transform'); * the type of request requires them */ function prepareStream(stream, streamingV4Params, log, errCb) { - if (stream.headers['x-amz-content-sha256'] === + if (stream && stream.headers && stream.headers['x-amz-content-sha256'] === 'STREAMING-AWS4-HMAC-SHA256-PAYLOAD') { if (typeof streamingV4Params !== 'object') { // this might happen if the user provided a valid V2 diff --git a/lib/api/objectPost.js b/lib/api/objectPost.js new file mode 100644 index 0000000000..399da5e5db --- /dev/null +++ b/lib/api/objectPost.js @@ -0,0 +1,117 @@ +const async = require('async'); +const { errors, versioning } = require('arsenal'); + +const collectCorsHeaders = require('../utilities/collectCorsHeaders'); +const createAndStoreObject = require('./apiUtils/object/createAndStoreObject'); +const { standardMetadataValidateBucketAndObj } = require('../metadata/metadataUtils'); +const { config } = require('../Config'); +const { setExpirationHeaders } = require('./apiUtils/object/expirationHeaders'); +const monitoring = require('../utilities/metrics'); +const writeContinue = require('../utilities/writeContinue'); +const { overheadField } = require('../../constants'); + + +const versionIdUtils = versioning.VersionID; + + +/** + * POST Object in the requested bucket. Steps include: + * validating metadata for authorization, bucket and object existence etc. + * store object data in datastore upon successful authorization + * store object location returned by datastore and + * object's (custom) headers in metadata + * return the result in final callback + * + * @param {AuthInfo} authInfo - Instance of AuthInfo class with requester's info + * @param {request} request - request object given by router, + * includes normalized headers + * @param {object | undefined } streamingV4Params - if v4 auth, + * object containing accessKey, signatureFromRequest, region, scopeDate, + * timestamp, and credentialScope + * (to be used for streaming v4 auth if applicable) + * @param {object} log - the log request + * @param {Function} callback - final callback to call with the result + * @return {undefined} + */ +function objectPost(authInfo, request, streamingV4Params, log, callback) { + const { + headers, + method, + formData, + bucketName, + } = request; + const requestType = request.apiMethods || 'objectPost'; + const valParams = { + authInfo, + bucketName, + objectKey: formData.key, + requestType, + request, + }; + const canonicalID = authInfo.getCanonicalID(); + + log.trace('owner canonicalID to send to data', { canonicalID }); + return standardMetadataValidateBucketAndObj(valParams, request.actionImplicitDenies, log, + (err, bucket, objMD) => { + const responseHeaders = collectCorsHeaders(headers.origin, + method, bucket); + + // TODO RING-45960 remove accessdenied skip + if (err && !err.AccessDenied) { + log.trace('error processing request', { + error: err, + method: 'metadataValidateBucketAndObj', + }); + monitoring.promMetrics('POST', request.bucketName, err.code, 'postObject'); + return callback(err, responseHeaders); + } + if (bucket.hasDeletedFlag() && canonicalID !== bucket.getOwner()) { + log.trace('deleted flag on bucket and request ' + + 'from non-owner account'); + monitoring.promMetrics('POST', request.bucketName, 404, 'postObject'); + return callback(errors.NoSuchBucket); + } + + return async.waterfall([ + function objectCreateAndStore(next) { + writeContinue(request, request._response); + return createAndStoreObject(request.bucketName, + bucket, request.formData.key, objMD, authInfo, canonicalID, null, + request, false, streamingV4Params, overheadField, log, next); + }, + ], (err, storingResult) => { + if (err) { + monitoring.promMetrics('POST', request.bucketName, err.code, + 'postObject'); + return callback(err, responseHeaders); + } + setExpirationHeaders(responseHeaders, { + lifecycleConfig: bucket.getLifecycleConfiguration(), + objectParams: { + key: request.key, + date: storingResult.lastModified, + tags: storingResult.tags, + }, + }); + if (storingResult) { + // ETag's hex should always be enclosed in quotes + responseHeaders.ETag = `"${storingResult.contentMD5}"`; + responseHeaders.location = + `${request.headers.host}/${bucketName}/${encodeURIComponent(request.formData.key)}`; + } + const vcfg = bucket.getVersioningConfiguration(); + const isVersionedObj = vcfg && vcfg.Status === 'Enabled'; + if (isVersionedObj) { + if (storingResult && storingResult.versionId) { + responseHeaders['x-amz-version-id'] = + versionIdUtils.encode(storingResult.versionId, + config.versionIdEncodingType); + } + } + + return callback(null, responseHeaders); + }); + }); +} + +module.exports = objectPost; diff --git a/package.json b/package.json index 88d99d1dcd..833056a003 100644 --- a/package.json +++ b/package.json @@ -19,8 +19,9 @@ }, "homepage": "https://github.com/scality/S3#readme", "dependencies": { + "@fastify/busboy": "^2.1.1", "@hapi/joi": "^17.1.0", - "arsenal": "git+https://github.com/scality/arsenal#7.70.29", + "arsenal": "git+https://github.com/scality/arsenal#4ef5748c028619edff10d6d38b21df43c8d63d88", "async": "~2.5.0", "aws-sdk": "2.905.0", "azure-storage": "^2.1.0", @@ -60,6 +61,9 @@ }, "scripts": { "ft_awssdk": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/", + "ft_post": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post.js", + "ft_post_aws": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/object/post-copy.js", + "ft_post_unit": "CI=true S3BACKEND=mem mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json --recursive tests/unit/api/callPostMethod.js", "ft_awssdk_aws": "cd tests/functional/aws-node-sdk && AWS_ON_AIR=true mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/", "ft_awssdk_buckets": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/bucket", "ft_awssdk_objects_misc": "cd tests/functional/aws-node-sdk && mocha --reporter mocha-multi-reporters --reporter-options configFile=$INIT_CWD/tests/reporter-config.json test/legacy test/object test/service test/support", diff --git a/tests/functional/aws-node-sdk/test/object/post.js b/tests/functional/aws-node-sdk/test/object/post.js new file mode 100644 index 0000000000..4f3f1908ba --- /dev/null +++ b/tests/functional/aws-node-sdk/test/object/post.js @@ -0,0 +1,1621 @@ +// const AWS = require('aws-sdk'); + +const xml2js = require('xml2js'); +const axios = require('axios'); +const crypto = require('crypto'); +const FormData = require('form-data'); +const assert = require('assert'); +const http = require('http'); +const { URL } = require('url'); +const BucketUtility = require('../../lib/utility/bucket-util'); +const getConfig = require('../support/config'); + +let bucketName; +const filename = 'test-file.txt'; +let fileBuffer; +const region = 'us-east-1'; +let ak; +let sk; +let s3; + +const generateBucketName = () => `test-bucket-${crypto.randomBytes(8).toString('hex')}`; + +const formatDate = (date) => { + const year = date.getUTCFullYear(); + const month = (date.getUTCMonth() + 1).toString().padStart(2, '0'); + const day = date.getUTCDate().toString().padStart(2, '0'); + return `${year}${month}${day}`; +}; + +const getSignatureKey = (key, dateStamp, regionName, serviceName) => { + const kDate = crypto.createHmac('sha256', `AWS4${key}`).update(dateStamp).digest(); + const kRegion = crypto.createHmac('sha256', kDate).update(regionName).digest(); + const kService = crypto.createHmac('sha256', kRegion).update(serviceName).digest(); + const kSigning = crypto.createHmac('sha256', kService).update('aws4_request').digest(); + return kSigning; +}; + +// 'additionalConditions' will also replace existing keys if they are present +const calculateFields = (ak, sk, additionalConditions, bucket = bucketName, key = filename) => { + const service = 's3'; + + const now = new Date(); + const formattedDate = now.toISOString().replace(/[:-]|\.\d{3}/g, ''); + let shortFormattedDate = formatDate(now); + + const credential = `${ak}/${shortFormattedDate}/${region}/${service}/aws4_request`; + const conditionsFields = [ + { bucket }, + { key }, + { 'x-amz-credential': credential }, + { 'x-amz-algorithm': 'AWS4-HMAC-SHA256' }, + { 'x-amz-date': formattedDate }, + ]; + if (additionalConditions) { + additionalConditions.forEach(field => { + const key = Object.keys(field)[0]; + const value = field[key]; + const index = conditionsFields.findIndex(condition => condition.hasOwnProperty(key)); + if (index !== -1) { + conditionsFields[index][key] = value; + if (key === 'x-amz-date') { + shortFormattedDate = value.split('T')[0]; + } + } else { + conditionsFields.push({ [key]: value }); + } + }); + } + const policy = { + expiration: new Date(new Date().getTime() + 60000).toISOString(), + conditions: conditionsFields, + }; + const policyBase64 = Buffer.from(JSON.stringify(policy)).toString('base64'); + + const signingKey = getSignatureKey(sk, shortFormattedDate, region, service); + const signature = crypto.createHmac('sha256', signingKey).update(policyBase64).digest('hex'); + + const returnFields = [ + { name: 'x-amz-credential', value: credential }, + { name: 'x-amz-algorithm', value: 'AWS4-HMAC-SHA256' }, + { name: 'x-amz-signature', value: signature }, + { name: 'x-amz-date', value: formattedDate }, + { name: 'policy', value: policyBase64 }, + { name: 'bucket', value: bucket }, + { name: 'key', value: key }, + ]; + if (!additionalConditions) { + return returnFields; + } + if (additionalConditions) { + additionalConditions.forEach(field => { + const key = Object.keys(field)[0]; + const value = field[key]; + const index = returnFields.findIndex(f => f.name === key); + if (index !== -1) { + returnFields[index].value = value; + } else { + returnFields.push({ name: key, value }); + } + }); + } + return returnFields; +}; + +describe('POST object', () => { + let bucketUtil; + let config; + const testContext = {}; + + before(() => { + config = getConfig('default'); + ak = config.credentials.accessKeyId; + sk = config.credentials.secretAccessKey; + bucketUtil = new BucketUtility('default'); + s3 = bucketUtil.s3; + }); + + beforeEach(done => { + bucketName = generateBucketName(); + const url = `${config.endpoint}/${bucketName}`; + testContext.bucketName = bucketName; + testContext.url = new URL(url); + + const fileContent = 'This is a test file'; + fileBuffer = Buffer.from(fileContent); + + // Create the bucket + s3.createBucket({ Bucket: bucketName }, err => { + if (err) { + return done(err); + } + return done(); + }); + }); + + + afterEach(done => { + const { bucketName } = testContext; + + process.stdout.write('Emptying bucket\n'); + bucketUtil.empty(bucketName) + .then(() => { + process.stdout.write('Deleting bucket\n'); + return bucketUtil.deleteOne(bucketName); + }) + .then(() => done()) + .catch(err => { + if (err.code !== 'NoSuchBucket') { + process.stdout.write('Error in afterEach\n'); + return done(err); + } + return done(); + }); + }); + + it('should successfully upload an object using a POST form', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + req.on('response', res => { + try { + assert.equal(res.statusCode, 204); + assert.equal(res.headers.location, `${url.hostname}:${url.port}/${bucketName}/${filename}`); + done(); + } catch (err) { + done(err); + } + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle url invalid characters in keys', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk, [{ key: 'key with spaces' }]); + const formData = new FormData(); + const encodedKey = 'key%20with%20spaces'; // Expected URL-encoded key + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + req.on('response', res => { + try { + assert.equal(res.statusCode, 204); + assert.equal(res.headers.location, `${url.hostname}:${url.port}/${bucketName}/${encodedKey}`); + done(); + } catch (err) { + done(err); + } + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when bucket does not exist', done => { + const fakeBucketName = generateBucketName(); + const tempUrl = `${config.endpoint}/${fakeBucketName}`; + const fields = calculateFields(ak, sk, [], fakeBucketName); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const parsedUrl = new URL(tempUrl); + + const options = { + method: 'POST', + hostname: parsedUrl.hostname, + port: parsedUrl.port, + path: parsedUrl.pathname + parsedUrl.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode === 404) { + done(); + } else { + done(new Error('Expected error but got success response')); + } + }); + + req.on('error', done); + + formData.pipe(req); + return undefined; + }); + }); + + it('should successfully upload a larger file to S3 using a POST form', done => { + const { url } = testContext; + const largeFileName = 'large-test-file.txt'; + const largeFileContent = 'This is a larger test file'.repeat(10000); // Simulate a larger file + const largeFileBuffer = Buffer.from(largeFileContent); + + const fields = calculateFields(ak, sk, [{ key: largeFileName }]); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', largeFileBuffer, { filename: largeFileName }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode === 204) { + s3.listObjectsV2({ Bucket: bucketName }, (err, data) => { + if (err) { + return done(err); + } + + const uploadedFile = data.Contents.find(item => item.Key === largeFileName); + try { + assert(uploadedFile, 'Uploaded file should exist in the bucket'); + assert.equal(uploadedFile.Size, + Buffer.byteLength(largeFileContent), 'File size should match'); + done(); + } catch (err) { + done(err); + } + return undefined; + }); + } else { + done(new Error(`Expected status 204 but got ${res.statusCode}`)); + } + }); + + req.on('error', done); + + formData.pipe(req); + return undefined; + }); + }); + + it('should be able to post an empty file and verify its existence', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + const emptyFileBuffer = Buffer.from(''); + + formData.append('file', emptyFileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode === 204) { + // Check if the object exists using listObjects + s3.listObjectsV2({ Bucket: bucketName, Prefix: filename }, (err, data) => { + if (err) { + return done(err); + } + + const fileExists = data.Contents.some(item => item.Key === filename); + const file = data.Contents.find(item => item.Key === filename); + + try { + assert(fileExists, 'File should exist in S3'); + assert.equal(file.Size, 0, 'File size should be 0'); + + // Clean up: delete the empty file from S3 + s3.deleteObject({ Bucket: bucketName, Key: filename }, err => { + if (err) { + return done(err); + } + + return done(); + }); + } catch (err) { + return done(err); + } + return undefined; + }); + } else { + done(new Error(`Expected status 204 but got ${res.statusCode}`)); + } + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when file is missing', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 400) { + done(new Error('Expected error but got success response')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'InvalidArgument'); + assert.equal(error.Message[0], 'POST requires exactly one file upload per request.'); + done(); + } catch (err) { + done(err); + } + return undefined; + }); + }); + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when there are multiple files', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + // Append the same buffer twice to simulate multiple files + formData.append('file', fileBuffer, { filename }); + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 400) { + done(new Error('Expected error but got success response')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'InvalidArgument'); + assert.equal(error.Message[0], 'POST requires exactly one file upload per request.'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when key is missing', done => { + const { url } = testContext; + // Prep fields then remove the key field + let fields = calculateFields(ak, sk); + fields = fields.filter(e => e.name !== 'key'); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + const fileContent = 'This is a test file'; + const fileBuffer = Buffer.from(fileContent); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers: { + ...formData.getHeaders(), + 'Content-Length': length, + }, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 400) { + done(new Error('Request should not succeed without key field')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'InvalidArgument'); + assert.equal(error.Message[0], + "Bucket POST must contain a field named 'key'. " + + 'If it is specified, please check the order of the fields.'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when content-type is incorrect', done => { + const { url } = testContext; + // Prep fields then remove the key field + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + 'Content-Type': 'application/json', // Incorrect content type + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 412) { // 412 Precondition Failed + done(new Error('Request should not succeed with wrong content-type')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'PreconditionFailed'); + assert.equal(error.Message[0], + 'Bucket POST must be of the enclosure-type multipart/form-data'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when content-type is "abc multipart/form-data"', done => { + const { url } = testContext; + // Prep fields then remove the key field + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + 'Content-Type': 'abc multipart/form-data', // Incorrect content type + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 412) { // 412 Precondition Failed + done(new Error('Request should not succeed with wrong content-type')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'PreconditionFailed'); + assert.equal(error.Message[0], + 'Bucket POST must be of the enclosure-type multipart/form-data'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when content-type is "multipart/form-data abc"', done => { + const { url } = testContext; + // Prep fields then remove the key field + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + 'Content-Type': 'multipart/form-data abc', // Incorrect content type + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 412) { // 412 Precondition Failed + done(new Error('Request should not succeed with wrong content-type')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'PreconditionFailed'); + assert.equal(error.Message[0], + 'Bucket POST must be of the enclosure-type multipart/form-data'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should handle error when content-type is missing', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + delete headers['content-type']; // Ensure content-type is missing + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 412) { + done(new Error('Request should not succeed without correct content-type')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'PreconditionFailed'); + assert.equal(error.Message[0], + 'Bucket POST must be of the enclosure-type multipart/form-data'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + req.on('error', done); + + formData.pipe(req); + return undefined; + }); + }); + + it('should upload an object with key slash', done => { + const { url } = testContext; + const slashKey = '/'; + const fields = calculateFields(ak, sk, [{ key: slashKey }]); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode === 204) { + done(); + } else { + done(new Error(`Expected status 204 but got ${res.statusCode}`)); + } + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should fail to upload an object with key length of 0', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk, [{ key: '' }]); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 400) { + done(new Error('Request should have failed but succeeded')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'InvalidArgument'); + assert.equal(error.Message[0], 'User key must have a length greater than 0.'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + req.on('error', err => { + done(err); + }); + + formData.pipe(req); + return undefined; + }); + }); + + it('should fail to upload an object with key longer than 1024 bytes', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk, [{ key: 'a'.repeat(1025) }]); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 400) { + done(new Error('Request should have failed but succeeded')); + return; + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'KeyTooLong'); + assert.equal(error.Message[0], 'Your key is too long.'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + }); + + // Handle any errors during the request + req.on('error', err => { + done(err); + }); + + // Stream the form data into the request + formData.pipe(req); + return undefined; + }); + }); + + it('should replace ${filename} variable in key with the name of the uploaded file', done => { + const { url } = testContext; + const keyTemplate = 'uploads/test/${filename}'; + const fileToUpload = keyTemplate.replace('${filename}', filename); + const fields = calculateFields(ak, sk, [{ key: fileToUpload }]); + const formData = new FormData(); + + fields.forEach(field => { + const value = field.name === 'key' ? keyTemplate : field.value; + formData.append(field.name, value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) return done(err); + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 204) { + done(new Error(`Expected status 204 but got ${res.statusCode}`)); + return; + } + + const expectedKey = keyTemplate.replace('${filename}', filename); + + const listParams = { Bucket: bucketName, Prefix: expectedKey }; + s3.listObjects(listParams, (err, data) => { + if (err) return done(err); + const objectExists = data.Contents.some(item => item.Key === expectedKey); + assert(objectExists, 'Object was not uploaded with the expected key'); + return done(); + }); + }); + + req.on('error', done); + + formData.pipe(req); + return undefined; + }); + }); + + it('should fail to upload an object with an invalid multipart boundary', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + // Generate the form data with a valid boundary + const validBoundary = formData.getBoundary(); + + // Manually create the payload with an invalid boundary + const invalidBoundary = '----InvalidBoundary'; + const payload = Buffer.concat([ + Buffer.from(`--${invalidBoundary}\r\n`), + Buffer.from(`Content-Disposition: form-data; name="key"\r\n\r\n${filename}\r\n`), + Buffer.from(`--${invalidBoundary}\r\n`), + Buffer.from(`Content-Disposition: form-data; name="file"; filename="${filename}"\r\n`), + Buffer.from('Content-Type: application/octet-stream\r\n\r\n'), + fileBuffer, + Buffer.from(`\r\n--${invalidBoundary}--\r\n`), + ]); + + const headers = { + 'Content-Type': `multipart/form-data; boundary=${validBoundary}`, + 'Content-Length': payload.length, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 400) { + done(new Error('Request should have failed but succeeded')); + return; + } + + assert.equal(res.statusCode, 400); + done(); + }); + + req.on('error', err => { + done(err); + }); + + req.write(payload); + req.end(); + }); + + it('should fail to upload an object with a too small content length header', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + // Use an incorrect content length (e.g., actual length - 20) + const incorrectLength = length - 20; + + const headers = { + ...formData.getHeaders(), + 'Content-Length': incorrectLength, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 400) { + return done(new Error('Request should have failed but succeeded')); + } + + return done(); + }); + + // Handle any errors during the request + req.on('error', done); + + // Stream the form data into the request + formData.pipe(req); + return undefined; + }); + }); + + it.skip('should fail to upload an object with a too big content length header', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, filename); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + // Use an incorrect content length (e.g., actual length - 20) + const incorrectLength = length + 2000; + + return axios.post(url, formData, { + headers: { + ...formData.getHeaders(), + 'Content-Length': incorrectLength, + }, + }) + .then(() => done(new Error('Request should have failed but succeeded'))) + .catch(err => { + // Expecting an error response from the API + assert.equal(err.response.status, 400); + done(); + }); + }); + }); + + it('should return an error if form data (excluding file) exceeds 20KB', done => { + const { url } = testContext; + const fields = calculateFields(ak, sk); + + // Add additional fields to make form data exceed 20KB + const largeValue = 'A'.repeat(1024); // 1KB value + for (let i = 0; i < 21; i++) { // Add 21 fields of 1KB each to exceed 20KB + fields.push({ name: `field${i}`, value: largeValue }); + } + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 400) { + return done(new Error('Request should not succeed with form data exceeding 20KB')); + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'MaxPostPreDataLengthExceeded'); + assert.equal(error.Message[0], + 'Your POST request fields preceeding the upload file was too large.'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + return undefined; + }); + + req.on('error', done); + + formData.pipe(req); + + return undefined; + }); + }); + + it('should return an error if a query parameter is present in the URL', done => { + const { url } = testContext; + const queryParam = '?invalidParam=true'; + const invalidUrl = new URL(url.toString() + queryParam); + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: invalidUrl.hostname, + port: invalidUrl.port, + path: invalidUrl.pathname + invalidUrl.search, + headers, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 400) { + return done(new Error('Request should not succeed with an invalid query parameter')); + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'InvalidArgument'); + assert.equal(error.Message[0], 'Query String Parameters not allowed on POST requests.'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + return undefined; + }); + + req.on('error', done); + + formData.pipe(req); + return undefined; + }); + }); + + it('should return 405 Method Not Allowed if objectKey is present with a non-matching query parameter', done => { + const { url } = testContext; + const objectKey = 'someObjectKey'; + const queryParam = '?nonMatchingParam=true'; + const invalidUrl = new URL(`${url}/${objectKey}${queryParam}`); + const fields = calculateFields(ak, sk); + + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: invalidUrl.hostname, + port: invalidUrl.port, + path: invalidUrl.pathname + invalidUrl.search, + headers, + }; + + const req = http.request(options); + + // Handle the response + req.on('response', res => { + if (res.statusCode !== 405) { + return done(new Error('Request should not succeed with a non-matching query parameter')); + } + + let responseData = ''; + res.on('data', chunk => { + responseData += chunk; + }); + + res.on('end', () => { + xml2js.parseString(responseData, (parseErr, result) => { + if (parseErr) { + return done(parseErr); + } + + try { + const error = result.Error; + assert.equal(error.Code[0], 'MethodNotAllowed'); + assert.equal(error.Message[0], + 'The specified method is not allowed against this resource.'); + return done(); + } catch (err) { + return done(err); + } + }); + }); + return undefined; + }); + + req.on('error', done); + + formData.pipe(req); + return undefined; + }); + }); + + it('should successfully upload an object with bucket versioning enabled and verify version ID', done => { + const { url } = testContext; + + // Enable versioning on the bucket + const versioningParams = { + Bucket: bucketName, + VersioningConfiguration: { + Status: 'Enabled', + }, + }; + + return s3.putBucketVersioning(versioningParams, (err) => { + if (err) { + return done(err); + } + + const fields = calculateFields(ak, sk, [{ bucket: bucketName }]); + const formData = new FormData(); + + fields.forEach(field => { + formData.append(field.name, field.value); + }); + + formData.append('file', fileBuffer, { filename }); + + formData.getLength((err, length) => { + if (err) { + return done(err); + } + + const headers = { + ...formData.getHeaders(), + 'Content-Length': length, + }; + + const options = { + method: 'POST', + hostname: url.hostname, + port: url.port, + path: url.pathname + url.search, + headers, + }; + + const req = http.request(options); + + req.on('response', res => { + if (res.statusCode !== 204) { + return done(new Error(`Expected status 204 but got ${res.statusCode}`)); + } + + // Verify version ID is present in the response headers + const versionId = res.headers['x-amz-version-id']; + assert.ok(versionId, 'Version ID should be present in the response headers'); + return done(); + }); + + req.on('error', done); + + formData.pipe(req); + return undefined; + }); + return undefined; + }); + }); +}); + diff --git a/tests/unit/api/objectPost.js b/tests/unit/api/objectPost.js new file mode 100644 index 0000000000..1416558193 --- /dev/null +++ b/tests/unit/api/objectPost.js @@ -0,0 +1,433 @@ +const assert = require('assert'); +const async = require('async'); +const moment = require('moment'); +const { errors } = require('arsenal'); +const sinon = require('sinon'); + +const { bucketPut } = require('../../../lib/api/bucketPut'); +const bucketPutObjectLock = require('../../../lib/api/bucketPutObjectLock'); +const bucketPutVersioning = require('../../../lib/api/bucketPutVersioning'); +const { cleanup, DummyRequestLogger, makeAuthInfo, versioningTestUtils } + = require('../helpers'); +const { ds } = require('arsenal').storage.data.inMemory.datastore; +const metadata = require('../metadataswitch'); +const objectPost = require('../../../lib/api/objectPost'); +const { objectLockTestUtils } = require('../helpers'); +const DummyRequest = require('../DummyRequest'); +const mpuUtils = require('../utils/mpuUtils'); + +const any = sinon.match.any; + +const log = new DummyRequestLogger(); +const canonicalID = 'accessKey1'; +const authInfo = makeAuthInfo(canonicalID); +const bucketName = 'bucketname123'; +const postBody = Buffer.from('I am a body', 'utf8'); +const correctMD5 = 'be747eb4b75517bf6b3cf7c5fbb62f3a'; +const mockDate = new Date(2050, 10, 12); +const testPutBucketRequest = new DummyRequest({ + bucketName, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + url: '/', +}); +const testPutBucketRequestLock = new DummyRequest({ + bucketName, + headers: { + 'host': `${bucketName}.s3.amazonaws.com`, + 'x-amz-bucket-object-lock-enabled': 'true', + }, + url: '/', +}); + +const originalputObjectMD = metadata.putObjectMD; +const objectName = 'objectName'; + +let testPostObjectRequest; +const enableVersioningRequest = + versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Enabled'); +const suspendVersioningRequest = + versioningTestUtils.createBucketPutVersioningReq(bucketName, 'Suspended'); + + +describe('objectPost API', () => { + beforeEach(() => { + cleanup(); + sinon.spy(metadata, 'putObjectMD'); + testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + url: '/', + }, postBody); + }); + + afterEach(() => { + sinon.restore(); + metadata.putObjectMD = originalputObjectMD; + }); + + it('should return an error if the bucket does not exist', done => { + objectPost(authInfo, testPostObjectRequest, undefined, log, err => { + assert.deepStrictEqual(err, errors.NoSuchBucket); + done(); + }); + }); + + it('should successfully post an object', done => { + const testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + bucketPut(authInfo, testPutBucketRequest, log, () => { + objectPost(authInfo, testPostObjectRequest, undefined, log, + (err, resHeaders) => { + assert.strictEqual(resHeaders.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, + {}, log, (err, md) => { + assert(md); + assert + .strictEqual(md['content-md5'], correctMD5); + done(); + }); + }); + }); + }); + + const mockModes = ['GOVERNANCE', 'COMPLIANCE']; + mockModes.forEach(mockMode => { + it(`should post an object with valid date & ${mockMode} mode`, done => { + const testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: { + 'x-amz-object-lock-retain-until-date': mockDate, + 'x-amz-object-lock-mode': mockMode, + }, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + objectPost(authInfo, testPostObjectRequest, undefined, log, + (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, log, + (err, md) => { + const mode = md.retentionMode; + const retainUntilDate = md.retentionDate; + assert.ifError(err); + assert(md); + assert.strictEqual(mode, mockMode); + assert.strictEqual(retainUntilDate, mockDate); + done(); + }); + }); + }); + }); + }); + + const formatTime = time => time.slice(0, 20); + + const testObjectLockConfigs = [ + { + testMode: 'COMPLIANCE', + val: 30, + type: 'Days', + }, + { + testMode: 'GOVERNANCE', + val: 5, + type: 'Years', + }, + ]; + testObjectLockConfigs.forEach(config => { + const { testMode, type, val } = config; + it('should put an object with default retention if object does not ' + + 'have retention configuration but bucket has', done => { + const testPostObjectRequest = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + const testObjLockRequest = { + bucketName, + headers: { host: `${bucketName}.s3.amazonaws.com` }, + post: objectLockTestUtils.generateXml(testMode, val, type), + }; + + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + bucketPutObjectLock(authInfo, testObjLockRequest, log, () => { + objectPost(authInfo, testPostObjectRequest, undefined, log, + (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, + log, (err, md) => { + const mode = md.retentionMode; + const retainDate = md.retentionDate; + const date = moment(); + const days + = type === 'Days' ? val : val * 365; + const expectedDate + = date.add(days, 'days'); + assert.ifError(err); + assert.strictEqual(mode, testMode); + assert.strictEqual(formatTime(retainDate), + formatTime(expectedDate.toISOString())); + done(); + }); + }); + }); + }); + }); + }); + + + it('should successfully put an object with legal hold ON', done => { + const request = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: { + 'x-amz-object-lock-legal-hold': 'ON', + }, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + objectPost(authInfo, request, undefined, log, (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, log, + (err, md) => { + assert.ifError(err); + assert.strictEqual(md.legalHold, true); + done(); + }); + }); + }); + }); + + it('should successfully put an object with legal hold OFF', done => { + const request = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: { + 'x-amz-object-lock-legal-hold': 'OFF', + }, + url: '/', + calculatedHash: 'vnR+tLdVF79rPPfF+7YvOg==', + }, postBody); + + bucketPut(authInfo, testPutBucketRequestLock, log, () => { + objectPost(authInfo, request, undefined, log, (err, headers) => { + assert.ifError(err); + assert.strictEqual(headers.ETag, `"${correctMD5}"`); + metadata.getObjectMD(bucketName, objectName, {}, log, + (err, md) => { + assert.ifError(err); + assert(md); + assert.strictEqual(md.legalHold, false); + done(); + }); + }); + }); + }); + + it('should not leave orphans in data when overwriting an object', done => { + const testPostObjectRequest2 = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + }, Buffer.from('I am another body', 'utf8')); + + bucketPut(authInfo, testPutBucketRequest, log, () => { + objectPost(authInfo, testPostObjectRequest, + undefined, log, () => { + objectPost(authInfo, testPostObjectRequest2, undefined, + log, + () => { + // orphan objects don't get deleted + // until the next tick + // in memory + setImmediate(() => { + // Data store starts at index 1 + assert.strictEqual(ds[0], undefined); + assert.strictEqual(ds[1], undefined); + assert.deepStrictEqual(ds[2].value, + Buffer.from('I am another body', 'utf8')); + done(); + }); + }); + }); + }); + }); + + it('should not leave orphans in data when overwriting an multipart upload object', done => { + bucketPut(authInfo, testPutBucketRequest, log, () => { + mpuUtils.createMPU('default', bucketName, objectName, log, + (err, testUploadId) => { + objectPost(authInfo, testPostObjectRequest, undefined, log, err => { + assert.ifError(err); + sinon.assert.calledWith(metadata.putObjectMD, + any, any, any, sinon.match({ oldReplayId: testUploadId }), any, any); + done(); + }); + }); + }); + }); + + describe('objectPost API with versioning', () => { + beforeEach(() => { + cleanup(); + }); + + const objData = ['foo0', 'foo1', 'foo2'].map(str => + Buffer.from(str, 'utf8')); + const testPostObjectRequests = objData.map(data => versioningTestUtils + .createPostObjectRequest(bucketName, objectName, data)); + + it('should delete latest version when creating new null version ' + + 'if latest version is null version', done => { + async.series([ + callback => bucketPut(authInfo, testPutBucketRequest, log, + callback), + // putting null version by putting obj before versioning configured + callback => objectPost(authInfo, testPostObjectRequests[0], undefined, + log, err => { + versioningTestUtils.assertDataStoreValues(ds, [objData[0]]); + callback(err); + }), + callback => bucketPutVersioning(authInfo, suspendVersioningRequest, + log, callback), + // creating new null version by putting obj after ver suspended + callback => objectPost(authInfo, testPostObjectRequests[1], + undefined, log, err => { + // wait until next tick since mem backend executes + // deletes in the next tick + setImmediate(() => { + // old null version should be deleted + versioningTestUtils.assertDataStoreValues(ds, + [undefined, objData[1]]); + callback(err); + }); + }), + // create another null version + callback => objectPost(authInfo, testPostObjectRequests[2], + undefined, log, err => { + setImmediate(() => { + // old null version should be deleted + versioningTestUtils.assertDataStoreValues(ds, + [undefined, undefined, objData[2]]); + callback(err); + }); + }), + ], done); + }); + + describe('when null version is not the latest version', () => { + const objData = ['foo0', 'foo1', 'foo2'].map(str => + Buffer.from(str, 'utf8')); + const testPostObjectRequests = objData.map(data => versioningTestUtils + .createPostObjectRequest(bucketName, objectName, data)); + beforeEach(done => { + async.series([ + callback => bucketPut(authInfo, testPutBucketRequest, log, + callback), + // putting null version: put obj before versioning configured + callback => objectPost(authInfo, testPostObjectRequests[0], + undefined, log, callback), + callback => bucketPutVersioning(authInfo, + enableVersioningRequest, log, callback), + // put another version: + callback => objectPost(authInfo, testPostObjectRequests[1], + undefined, log, callback), + callback => bucketPutVersioning(authInfo, + suspendVersioningRequest, log, callback), + ], err => { + if (err) { + return done(err); + } + versioningTestUtils.assertDataStoreValues(ds, + objData.slice(0, 2)); + return done(); + }); + }); + + it('should still delete null version when creating new null version', + done => { + objectPost(authInfo, testPostObjectRequests[2], undefined, + log, err => { + assert.ifError(err, `Unexpected err: ${err}`); + setImmediate(() => { + // old null version should be deleted after putting + // new null version + versioningTestUtils.assertDataStoreValues(ds, + [undefined, objData[1], objData[2]]); + done(err); + }); + }); + }); + }); + + it('should return BadDigest error and not leave orphans in data when ' + + 'contentMD5 and completedHash do not match', done => { + const testPostObjectRequests = new DummyRequest({ + bucketName, + formData: { + key: objectName, + }, + fileEventData: {}, + headers: {}, + url: '/', + contentMD5: 'vnR+tLdVF79rPPfF+7YvOg==', + }, Buffer.from('I am another body', 'utf8')); + + bucketPut(authInfo, testPutBucketRequest, log, () => { + objectPost(authInfo, testPostObjectRequests, undefined, log, + err => { + assert.deepStrictEqual(err, errors.BadDigest); + // orphan objects don't get deleted + // until the next tick + // in memory + setImmediate(() => { + // Data store starts at index 1 + assert.strictEqual(ds[0], undefined); + assert.strictEqual(ds[1], undefined); + done(); + }); + }); + }); + }); + }); +}); + diff --git a/tests/unit/helpers.js b/tests/unit/helpers.js index d4c017003d..6f39588048 100644 --- a/tests/unit/helpers.js +++ b/tests/unit/helpers.js @@ -374,6 +374,18 @@ const versioningTestUtils = { }; return new DummyRequest(params, body); }, + createPostObjectRequest: (bucketName, keyName, body) => { + const params = { + bucketName, + formData: { + key: keyName, + }, + fileEventData: {}, + headers: {}, + url: '/', + }; + return new DummyRequest(params, body); + }, createBucketPutVersioningReq: (bucketName, status) => { const request = { bucketName, diff --git a/yarn.lock b/yarn.lock index 57f5a07451..03c97db0c0 100644 --- a/yarn.lock +++ b/yarn.lock @@ -16,6 +16,11 @@ enabled "2.0.x" kuler "^2.0.0" +"@fastify/busboy@^2.1.1": + version "2.1.1" + resolved "https://registry.yarnpkg.com/@fastify/busboy/-/busboy-2.1.1.tgz#b9da6a878a371829a0502c9b6c1c143ef6663f4d" + integrity sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA== + "@gar/promisify@^1.0.1": version "1.1.3" resolved "https://registry.yarnpkg.com/@gar/promisify/-/promisify-1.1.3.tgz#555193ab2e3bb3b6adc3d551c9c030d9e860daf6" @@ -499,9 +504,9 @@ arraybuffer.slice@~0.0.7: optionalDependencies: ioctl "^2.0.2" -"arsenal@git+https://github.com/scality/arsenal#7.70.29": +"arsenal@git+https://github.com/scality/arsenal#4ef5748c028619edff10d6d38b21df43c8d63d88": version "7.70.29" - resolved "git+https://github.com/scality/arsenal#a643a3e6ccbc49327339a285de1d4cb17afcd171" + resolved "git+https://github.com/scality/arsenal#4ef5748c028619edff10d6d38b21df43c8d63d88" dependencies: "@js-sdsl/ordered-set" "^4.4.2" "@types/async" "^3.2.12"