Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/cldsrv 546 post object #5601

Open
wants to merge 23 commits into
base: epic/RING-45960-postObject-api
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,7 @@ const constants = {
'initiateMultipartUpload',
'objectPutPart',
'completeMultipartUpload',
'objectPost',
],
};

Expand Down
85 changes: 41 additions & 44 deletions lib/api/api.js
BourgoisMickael marked this conversation as resolved.
Show resolved Hide resolved
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ const objectGetRetention = require('./objectGetRetention');
const objectGetTagging = require('./objectGetTagging');
const objectHead = require('./objectHead');
const objectPut = require('./objectPut');
const objectPost = require('./objectPost');
const objectPutACL = require('./objectPutACL');
const objectPutLegalHold = require('./objectPutLegalHold');
const objectPutTagging = require('./objectPutTagging');
Expand All @@ -67,7 +68,9 @@ const writeContinue = require('../utilities/writeContinue');
const validateQueryAndHeaders = require('../utilities/validateQueryAndHeaders');
const parseCopySource = require('./apiUtils/object/parseCopySource');
const { tagConditionKeyAuth } = require('./apiUtils/authorization/tagConditionKeys');
const { checkAuthResults } = require('./apiUtils/authorization/permissionChecks');
const checkHttpHeadersSize = require('./apiUtils/object/checkHttpHeadersSize');
const { processPostForm } = require('./apiUtils/apiCallers/callPostObject');

const monitoringMap = policies.actionMaps.actionMonitoringMapS3;

Expand Down Expand Up @@ -142,49 +145,6 @@ const api = {
// eslint-disable-next-line no-param-reassign
request.apiMethods = apiMethods;

function checkAuthResults(authResults) {
let returnTagCount = true;
const isImplicitDeny = {};
let isOnlyImplicitDeny = true;
if (apiMethod === 'objectGet') {
// first item checks s3:GetObject(Version) action
if (!authResults[0].isAllowed && !authResults[0].isImplicit) {
log.trace('get object authorization denial from Vault');
return errors.AccessDenied;
}
// TODO add support for returnTagCount in the bucket policy
// checks
isImplicitDeny[authResults[0].action] = authResults[0].isImplicit;
// second item checks s3:GetObject(Version)Tagging action
if (!authResults[1].isAllowed) {
log.trace('get tagging authorization denial ' +
'from Vault');
returnTagCount = false;
}
} else {
for (let i = 0; i < authResults.length; i++) {
isImplicitDeny[authResults[i].action] = true;
if (!authResults[i].isAllowed && !authResults[i].isImplicit) {
// Any explicit deny rejects the current API call
log.trace('authorization denial from Vault');
return errors.AccessDenied;
}
if (authResults[i].isAllowed) {
// If the action is allowed, the result is not implicit
// Deny.
isImplicitDeny[authResults[i].action] = false;
isOnlyImplicitDeny = false;
}
}
}
// These two APIs cannot use ACLs or Bucket Policies, hence, any
// implicit deny from vault must be treated as an explicit deny.
if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) {
return errors.AccessDenied;
}
return { returnTagCount, isImplicitDeny };
}

return async.waterfall([
next => auth.server.doAuth(
request, log, (err, userInfo, authorizationResults, streamingV4Params) => {
Expand Down Expand Up @@ -256,7 +216,7 @@ const api = {
return callback(err);
}
if (authorizationResults) {
const checkedResults = checkAuthResults(authorizationResults);
const checkedResults = checkAuthResults(apiMethod, authorizationResults, log);
if (checkedResults instanceof Error) {
return callback(checkedResults);
}
Expand Down Expand Up @@ -286,6 +246,42 @@ const api = {
return this[apiMethod](userInfo, request, log, callback);
});
},
callPostObject(apiMethod, request, response, log, callback) {
request.apiMethod = apiMethod;

const requestContexts = prepareRequestContexts('objectPost', request,
undefined, undefined, undefined);
// Extract all the _apiMethods and store them in an array
const apiMethods = requestContexts ? requestContexts.map(context => context._apiMethod) : [];
// Attach the names to the current request
// eslint-disable-next-line no-param-reassign
request.apiMethods = apiMethods;

return processPostForm(request, response, requestContexts, log,
(err, userInfo, authorizationResults, streamingV4Params) => {
if (err) {
return callback(err);
}
if (authorizationResults) {
const checkedResults = checkAuthResults(authorizationResults);
if (checkedResults instanceof Error) {
return callback(checkedResults);
}
request.actionImplicitDenies = checkedResults.isImplicitDeny;
} else {
// create an object of keys apiMethods with all values to false:
// for backward compatibility, all apiMethods are allowed by default
// thus it is explicitly allowed, so implicit deny is false
request.actionImplicitDenies = apiMethods.reduce((acc, curr) => {
acc[curr] = false;
return acc;
}, {});
}
request._response = response;
return objectPost(userInfo, request, streamingV4Params,
log, callback, authorizationResults);
});
},
bucketDelete,
bucketDeleteCors,
bucketDeleteEncryption,
Expand Down Expand Up @@ -337,6 +333,7 @@ const api = {
objectCopy,
objectHead,
objectPut,
objectPost,
objectPutACL,
objectPutLegalHold,
objectPutTagging,
Expand Down
178 changes: 178 additions & 0 deletions lib/api/apiUtils/apiCallers/callPostObject.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,178 @@
const { auth, errors } = require('arsenal');
const busboy = require('@fastify/busboy');
const writeContinue = require('../../../utilities/writeContinue');
const fs = require('fs');
const path = require('path');
const os = require('os');

// per doc: https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTForms.html#HTTPPOSTFormDeclaration
BourgoisMickael marked this conversation as resolved.
Show resolved Hide resolved
const MAX_FIELD_SIZE = 20 * 1024; // 20KB
// per doc: https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-keys.html
const MAX_KEY_SIZE = 1024;

async function authenticateRequest(request, requestContexts, log) {
return new Promise(resolve => {
// TODO RING-45960 remove ignore for POST object here
auth.server.doAuth(request, log, (err, userInfo, authorizationResults, streamingV4Params) =>
resolve({ userInfo, authorizationResults, streamingV4Params }), 's3', requestContexts);
});
}

async function parseFormData(request, response, requestContexts, log) {
/* eslint-disable no-param-reassign */
const formDataParser = busboy({ headers: request.headers });
writeContinue(request, response);

return new Promise((resolve, reject) => {
request.formData = {};
let totalFieldSize = 0;
let fileEventData = null;
let tempFileStream;
let tempFilePath;
let authResponse;
let fileWrittenPromiseResolve;
let formParserFinishedPromiseResolve;

const fileWrittenPromise = new Promise((res) => { fileWrittenPromiseResolve = res; });
const formParserFinishedPromise = new Promise((res) => { formParserFinishedPromiseResolve = res; });

formDataParser.on('field', (fieldname, val) => {
totalFieldSize += Buffer.byteLength(val, 'utf8');
if (totalFieldSize > MAX_FIELD_SIZE) {
return reject(errors.MaxPostPreDataLengthExceeded);
}
const lowerFieldname = fieldname.toLowerCase();
if (lowerFieldname === 'key') {
if (val.length > MAX_KEY_SIZE) {
return reject(errors.KeyTooLongError);
} else if (val.length === 0) {
return reject(errors.InvalidArgument
.customizeDescription('User key must have a length greater than 0.'));
}
}
request.formData[lowerFieldname] = val;
return undefined;
});

formDataParser.on('file', async (fieldname, file, filename, encoding, mimetype) => {
if (fileEventData) {
file.resume(); // Resume the stream to drain and discard the file
if (tempFilePath) {
fs.unlink(tempFilePath, unlinkErr => {
if (unlinkErr) {
log.error('Failed to delete temp file', { error: unlinkErr });
}
});
}
return reject(errors.InvalidArgument
.customizeDescription('POST requires exactly one file upload per request.'));
}

fileEventData = { fieldname, file, filename, encoding, mimetype };
if (!('key' in request.formData)) {
return reject(errors.InvalidArgument
.customizeDescription('Bucket POST must contain a field named '
+ "'key'. If it is specified, please check the order of the fields."));
}
// Replace `${filename}` with the actual filename
request.formData.key = request.formData.key.replace('${filename}', filename);
try {
// Authenticate request before streaming file
// TODO RING-45960 auth to be properly implemented
authResponse = await authenticateRequest(request, requestContexts, log);

// Create a temporary file to stream the file data
// This is to finalize validation on form data before storing the file
tempFilePath = path.join(os.tmpdir(), filename);
tempFileStream = fs.createWriteStream(tempFilePath);

file.pipe(tempFileStream);

tempFileStream.on('finish', () => {
request.fileEventData = { ...fileEventData, file: tempFilePath };
fileWrittenPromiseResolve();
});

tempFileStream.on('error', (err) => {
log.trace('Error streaming file to temporary location', { error: err.message });
reject(errors.InternalError);
});

// Wait for both file writing and form parsing to finish
return Promise.all([fileWrittenPromise, formParserFinishedPromise])
.then(() => resolve(authResponse))
.catch(reject);
} catch (err) {
return reject(err);
}
});

formDataParser.on('finish', () => {
if (!fileEventData) {
return reject(errors.InvalidArgument
.customizeDescription('POST requires exactly one file upload per request.'));
}
return formParserFinishedPromiseResolve();
});

formDataParser.on('error', (err) => {
log.trace('Error processing form data:', { error: err.message });
request.unpipe(formDataParser);
// Following observed AWS behaviour
reject(errors.MalformedPOSTRequest);
});

request.pipe(formDataParser);
return undefined;
});
}

function getFileStat(filePath, log) {
return new Promise((resolve, reject) => {
fs.stat(filePath, (err, stats) => {
if (err) {
log.trace('Error getting file size', { error: err.message });
return reject(errors.InternalError);
}
return resolve(stats);
});
});
}

async function processPostForm(request, response, requestContexts, log, callback) {
if (!request.headers || !request.headers['content-type'].includes('multipart/form-data')) {
const contentTypeError = errors.PreconditionFailed
.customizeDescription('Bucket POST must be of the enclosure-type multipart/form-data');
return process.nextTick(callback, contentTypeError);
}
try {
const { userInfo, authorizationResults, streamingV4Params } =
await parseFormData(request, response, requestContexts, log);

const fileStat = await getFileStat(request.fileEventData.file, log);
request.parsedContentLength = fileStat.size;
request.fileEventData.file = fs.createReadStream(request.fileEventData.file);
if (request.formData['content-type']) {
request.headers['content-type'] = request.formData['content-type'];
} else {
request.headers['content-type'] = 'binary/octet-stream';
}

const authNames = { accountName: userInfo.getAccountDisplayName() };
if (userInfo.isRequesterAnIAMUser()) {
authNames.userName = userInfo.getIAMdisplayName();
}
log.addDefaultFields(authNames);

return callback(null, userInfo, authorizationResults, streamingV4Params);
} catch (err) {
return callback(err);
}
}

module.exports = {
authenticateRequest,
parseFormData,
processPostForm,
getFileStat,
};
43 changes: 43 additions & 0 deletions lib/api/apiUtils/authorization/permissionChecks.js
Original file line number Diff line number Diff line change
Expand Up @@ -576,6 +576,48 @@ function validatePolicyConditions(policy) {
return null;
}

function checkAuthResults(apiMethod, authResults, log) {
let returnTagCount = true;
const isImplicitDeny = {};
let isOnlyImplicitDeny = true;
if (apiMethod === 'objectGet') {
// first item checks s3:GetObject(Version) action
if (!authResults[0].isAllowed && !authResults[0].isImplicit) {
log.trace('get object authorization denial from Vault');
return errors.AccessDenied;
}
// TODO add support for returnTagCount in the bucket policy
// checks
isImplicitDeny[authResults[0].action] = authResults[0].isImplicit;
// second item checks s3:GetObject(Version)Tagging action
if (!authResults[1].isAllowed) {
log.trace('get tagging authorization denial ' +
'from Vault');
returnTagCount = false;
}
} else {
for (let i = 0; i < authResults.length; i++) {
isImplicitDeny[authResults[i].action] = true;
if (!authResults[i].isAllowed && !authResults[i].isImplicit) {
// Any explicit deny rejects the current API call
log.trace('authorization denial from Vault');
return errors.AccessDenied;
}
if (authResults[i].isAllowed) {
// If the action is allowed, the result is not implicit
// Deny.
isImplicitDeny[authResults[i].action] = false;
isOnlyImplicitDeny = false;
}
}
}
// These two APIs cannot use ACLs or Bucket Policies, hence, any
// implicit deny from vault must be treated as an explicit deny.
if ((apiMethod === 'bucketPut' || apiMethod === 'serviceGet') && isOnlyImplicitDeny) {
return errors.AccessDenied;
}
return { returnTagCount, isImplicitDeny };
}

/** isLifecycleSession - check if it is the Lifecycle assumed role session arn.
* @param {string} arn - Amazon resource name - example:
Expand Down Expand Up @@ -607,6 +649,7 @@ module.exports = {
checkObjectAcls,
validatePolicyResource,
validatePolicyConditions,
checkAuthResults,
isLifecycleSession,
evaluateBucketPolicyWithIAM,
};
14 changes: 12 additions & 2 deletions lib/api/apiUtils/object/createAndStoreObject.js
Original file line number Diff line number Diff line change
Expand Up @@ -210,8 +210,18 @@ function createAndStoreObject(bucketName, bucketMD, objectKey, objMD, authInfo,
metadataStoreParams.contentMD5 = constants.emptyFileMd5;
return next(null, null, null);
}
return dataStore(objectKeyContext, cipherBundle, request, size,
streamingV4Params, backendInfo, log, next);
// Object Post receives a file stream.
// This is to be used to store data instead of the request stream itself.

let stream;

if (request.apiMethod === 'objectPost') {
stream = request.fileEventData ? request.fileEventData.file : undefined;
KazToozs marked this conversation as resolved.
Show resolved Hide resolved
} else {
stream = request;
}

return dataStore(objectKeyContext, cipherBundle, stream, size, streamingV4Params, backendInfo, log, next);
},
function processDataResult(dataGetInfo, calculatedHash, next) {
if (dataGetInfo === null || dataGetInfo === undefined) {
Expand Down
Loading
Loading