diff --git a/.mdeprc.js b/.mdeprc.js index bc8ffb8b..dc207fa8 100644 --- a/.mdeprc.js +++ b/.mdeprc.js @@ -8,6 +8,14 @@ module.exports = { post_exec: 'yarn coverage:report' }; +if (process.env.PROVIDER === 'aws') { + module.exports.tests = './test/suites/providers/aws/*.js'; +} + +if (process.env.PROVIDER !== 'aws') { + module.exports.tests = './test/suites/**/!(providers)/*.js'; +} + switch (process.env.DB) { case 'sentinel': module.exports.services.push('redisSentinel'); diff --git a/package.json b/package.json index bc0e64cc..66302f21 100644 --- a/package.json +++ b/package.json @@ -7,9 +7,10 @@ "compile": "rimraf ./lib && babel -d ./lib --copy-files ./src", "pretest": "yarn compile", "test": "rimraf ./coverage && yarn lint && yarn test:e2e", - "test:e2e": "yarn test:e2e:cluster && yarn test:e2e:sentinel", - "test:e2e:cluster": "DB=cluster mdep test run", - "test:e2e:sentinel": "DB=sentinel mdep test run --docker_compose ./test/docker-compose.sentinel.yml", + "test:e2e": "yarn test:e2e:cluster && yarn test:e2e:sentinel && test:e2e:aws-provider", + "test:e2e:cluster": "PROVIDER=gce DB=cluster mdep test run", + "test:e2e:sentinel": "PROVIDER=gce DB=sentinel mdep test run --docker_compose ./test/docker-compose.sentinel.yml", + "test:e2e:aws-provider": "PROVIDER=aws DB=sentinel mdep test run --docker_compose ./test/docker-compose.sentinel.yml", "start": "mfleet", "lint": "eslint ./src ./test", "prepublishOnly": "yarn compile", @@ -41,6 +42,7 @@ "@microfleet/validation": "9", "@sentry/node": "^6.0.2", "ali-oss": "^6.13.2", + "aws-sdk": "^2.914.0", "bl": "^4.0.3", "bluebird": "^3.7.2", "cluster-key-slot": "^1.1.0", diff --git a/src/actions/upload.js b/src/actions/upload.js index 47f68079..492c0cd0 100644 --- a/src/actions/upload.js +++ b/src/actions/upload.js @@ -38,6 +38,7 @@ const { * @return {Promise} */ async function initFileUpload({ params }) { + console.log('init file upload'); const { files, meta, diff --git a/src/configs/core.js b/src/configs/core.js index e785e8a7..48318f39 100644 --- a/src/configs/core.js +++ b/src/configs/core.js @@ -93,6 +93,7 @@ exports.transport = [{ * @returns {Provider} */ exports.selectTransport = function selectTransport() { + // console.log('my provider 1', this.providers[0]); return this.providers[0]; }; diff --git a/src/index.js b/src/index.js index e939ae77..f1c3e511 100644 --- a/src/index.js +++ b/src/index.js @@ -164,8 +164,8 @@ class Files extends Microfleet { await super.connect(); await this.initWebhook(); await Promise.mapSeries(this.providers, (provider) => { - // @todo - if (provider.config.name !== 'gce') return null; + if (!['aws', 'gce'].includes(provider.config.name)) return null; + if (!provider.config.bucket.channel.pubsub) return null; return provider.subscribe(this.handleUploadNotification.bind(this)); }); diff --git a/src/providers/aws.js b/src/providers/aws.js new file mode 100644 index 00000000..b0575984 --- /dev/null +++ b/src/providers/aws.js @@ -0,0 +1,372 @@ +const Promise = require('bluebird'); +const AbstractFileTransfer = require('ms-files-transport'); +const { merge } = require('lodash'); +const S3 = require('aws-sdk/clients/s3'); +const SNS = require('aws-sdk/clients/sns'); + +const DOWNLOAD_URL_EXPIRES_IN_SEC = 60000; + +/** + * Main transport class + */ +class AWSTransport extends AbstractFileTransfer { + constructor(opts = {}) { + super(); + this._config = merge({}, AWSTransport.defaultOpts, opts); + this.log = this._config.logger; + this.setupAWS(); + } + + /** + * Returns base configuration + */ + get config() { + return this._config; + } + + /** + * Creates authenticated instance of aws s3 + */ + + setupAWS() { + try { + this._aws = new S3({ + signatureVersion: 'v4', + region: this._config.aws.credentials.region, + credentials: { + accessKeyId: this._config.aws.credentials.accessKeyId, + secretAccessKey: this._config.aws.credentials.secretAccessKey, + }, + }); + } catch (err) { + this._logger.warn({ err }, 'failed to load aws-sdk/clients/s3'); + } + } + + /** + * Creates notification channel + */ + setupChannel() { + this.log.warn('the method is not implemented yet'); + } + + /** + * stops channel + */ + stopChannel() { + this.log.warn('the method is not implemented yet'); + } + + /** + * Handles pubsub for object change notifications + * @return {Subscription} + */ + async subscribe() { + const { Topics: topics } = await new SNS({ region: this._config.aws.credentials.region }).listTopics({}).promise(); + + const topicArn = `arn:aws:sns:us-west-2:178085672309:${this._config.aws.credentials.topicName}`; + + const topic = topics.find( + (_topic) => _topic.TopicArn === topicArn + ); + + if (!topic) { + await new SNS({ region: this._config.aws.credentials.region }).createTopic({ Name: this._config.aws.credentials.topicName }).promise(); + } + + const params = { + Protocol: 'https', + TopicArn: topicArn, + Endpoint: 'localhost:443', + }; + + await new SNS().subscribe(params).promise(); + } + + /** + * Creates bucket if it doesn't exist, otherwise + * returns an existing one + * @param {Object} [query] + * @return {Promise} + */ + async createBucket() { + const bucketName = this._config.bucket.name; + + const aws = this._aws; + + this._bucket = { + name: this._config.bucket.name, + deleteFiles: () => { }, + }; + + async function handleListBuckets(err, data) { + if (err) { + this.log.warn('s3 bucket can not be created'); + } else { + const buckets = data.Buckets; + + const isBucketExist = !!buckets.find((item) => { + return item.Name === bucketName; + }); + + if (!isBucketExist) { + const bucketParams = { + Bucket: bucketName, + region: this._config.aws.credentials.region, + }; + + await aws.createBucket(bucketParams, function handleCreateBucket(_err) { + if (_err) { + this.log.warn('s3 bucket can not be created'); + } + }); + } + } + } + + aws.listBuckets(handleListBuckets); + } + + /** + * Ensures that we have rights to write to the + * specified bucket + * @returns {Promise} + */ + connect() { + return this.createBucket(); + } + + /** + * Disconnects pubsub handlers if they are alive + * @returns {Promise} + */ + close() { + this.log.warn('the method is not implemented yet'); + } + + // @todo interface + getDownloadUrlSigned(filename) { + const params = { + Bucket: this.getBucketName(), + Expires: DOWNLOAD_URL_EXPIRES_IN_SEC, + Key: filename, + }; + + return new Promise((resolve, reject) => { + this._aws.getSignedUrl('putObject', params, (err, url) => { + if (err) { + return reject(err); + } + return resolve(url); + }); + }); + } + + // @todo interface + getBucketName() { + return this._config.bucket.name; + } + + /** + * Initializes resumable upload + * @param {Object} opts + * @param {String} opts.filename + * @param {Object} opts.metadata + * @param {String} opts.metadata.contentLength + * @param {String} opts.metadata.contentType - must be included + * @param {String} opts.metadata.md5Hash - must be included + * @param {String} [opts.metadata.contentEncoding] - optional, can be set to gzip + * @return {Promise} + */ + async initResumableUpload(opts) { + const params = { + Bucket: this._config.bucket.name, + Expires: DOWNLOAD_URL_EXPIRES_IN_SEC, + Key: opts.filename, + }; + + params.ContentType = opts.contentType; + + // const signedUrl = await this._aws.getSignedUrl('putObject', params); + + return new Promise((resolve, reject) => { + this._aws.getSignedUrl('putObject', params, (err, url) => { + if (err) { + return reject(err); + } + + return resolve(url); + }); + }); + } + + /** + * Creates signed URL + * + * StringToSign = HTTP_Verb + "\n" + + * Content_MD5 + "\n" + + * Content_Type + "\n" + + * Expiration + "\n" + + * Canonicalized_Extension_Headers + + * Canonicalized_Resource + * + * @param {String="read","write","delete"} action + * @param {String} [type] Content-Type, do not supply for downloads + * @param {String} resource `/path/to/objectname/without/bucket` + * You construct the Canonicalized_Resource portion of the message by concatenating the resource path + * (bucket and object and subresource) that the request is acting on. To do this, you can use the following process: + * * Begin with an empty string. + * * If the bucket name appears in the Host header, add a slash and the bucket name to the string (for example, + * /example-travel-maps). If the bucket name appears in the path portion of the HTTP request, do nothing. + * * Add the path portion of the HTTP request to the string, excluding any query string parameters. For example, + * if the path is /europe/france/paris.jpg?cors and you already added the bucket example-travel-maps to the string, + * then you need to add /europe/france/paris.jpg to the string. + * * If the request is scoped to a subresource, such as ?cors, add this subresource to the string, including the + * question mark. + * * Be sure to copy the HTTP request path literally: that is, you should include all URL encoding (percent signs) + * in the string that you create. Also, be sure that you include only query string parameters that designate + * subresources (such as cors). You should not include query string parameters such as ?prefix, + * ?max-keys, ?marker, and ?delimiter. + * @param {String} [md5] - md5 digest of content - Optional. The MD5 digest value in base64. If you provide this in the string, + * the client (usually a browser) must provide this HTTP header with this same value in its request. + * @param {Number} expires This is the timestamp (represented as the number of miliseconds since the Unix Epoch + * of 00:00:00 UTC on January 1, 1970) when the signature expires + * @param {String} [extensionHeaders] : + * You construct the Canonical Extension Headers portion of the message by concatenating all extension + * (custom) headers that begin with x-goog-. However, you cannot perform a simple concatenation. + * You must concatenate the headers using the following process: + * * Make all custom header names lowercase. + * * Sort all custom headers by header name using a lexicographical sort by code point value. + * * Eliminate duplicate header names by creating one header name with a comma-separated list of values. + * Be sure there is no whitespace between the values and be sure that the order of the comma-separated + * list matches the order that the headers appear in your request. For more information, see RFC 2616 section 4.2. + * * Replace any folding whitespace or newlines (CRLF or LF) with a single space. For more + * information about folding whitespace, see RFC 2822 section 2.2.3. + * * Remove any whitespace around the colon that appears after the header name. + * * Append a newline (U+000A) to each custom header. + * * Concatenate all custom headers. + * Important: You must use both the header name and the header value when you construct the Canonical Extension Headers + * portion of the query string. Be sure to remove any whitespace around the colon that separates the header name and + * value. For example, using the custom header x-goog-acl: private without removing the space after the colon will + * return a 403 Forbidden because the request signature you calculate will not match the signature Google calculates. + * @returns {Promise} + */ + createSignedURL(opts) { + const params = { + Bucket: this._config.bucket.name, + Expires: DOWNLOAD_URL_EXPIRES_IN_SEC, + Key: opts.resource, + }; + + return new Promise((resolve, reject) => { + this._aws.getSignedUrl('putObject', params, (err, url) => { + if (err) { + return reject(err); + } + return resolve(url); + }); + }); + } + + /** + * Upload filestream + * @param {String} filename + * @param {Object} opts + * @return {Stream} + */ + writeStream(filename, opts) { + this.log.warn('the method is not implemented yet', { filename, opts }); + } + + /** + * Makes file publicly accessible + * @param {String} filename + * @return {Promise} + */ + makePublic(filename) { + this.log.warn('the method is not implemented yet', { filename }); + } + + /** + * Makes file public + * @param {String} filename + * @return {Promise} + */ + makePrivate(filename, options = {}) { + this.log.warn('the method is not implemented yet', { filename, options }); + } + + /** + * Download file= + * @param {String} filename - what do we want to download + * @param {Object} opts + * @return {Promise} + */ + readFile(filename, opts) { + this.log.warn('the method is not implemented yet', { filename, opts }); + } + + /** + * Tells whether file exists or not + * @param {String} filename + * @return {Promise} + */ + async exists(filename) { + this.log.debug('initiating exists check of %s', filename); + + console.log(`exists method aws for: ${filename}`); + + return new Promise((resolve) => { + this._aws.headObject({ Key: filename, Bucket: this._config.bucket.name }, (err) => { + if (err) { + return resolve(false); + } + return resolve(true); + }); + }); + } + + /** + * Removes file from bucket + * @param {String} filename + * @return {Promise} + */ + remove(filename) { + return new Promise((resolve) => { + this._aws.deleteObject({ Key: filename, Bucket: this._config.bucket.name }, (err) => { + if (err) { + return resolve(false); + } + return resolve(true); + }); + }); + } +} + +AWSTransport.defaultOpts = { + name: 'aws', + aws: { + // specify authentication options + // here + }, + bucket: { + // specify bucket + name: 'must-be-a-valid-bucket-name', + host: 'storage.cloud.google.com', + channel: { + // must be persistent in your app to identify the channel + id: null, + pubsub: null, + config: { + // change to your webhook address + address: 'https://localhost:443', + // token: this is your `SECRET`, so make sure you set it to something unique for your application and + // verify notification + token: undefined, + }, + }, + metadata: {}, + }, +}; + +module.exports = AWSTransport; diff --git a/src/providers/factory.js b/src/providers/factory.js index fdce3471..24747679 100644 --- a/src/providers/factory.js +++ b/src/providers/factory.js @@ -57,6 +57,21 @@ class ProviderFactory { return provider; } + + // eslint-disable-next-line class-methods-use-this + getProviderAWS(transport) { + const ProviderAWS = require('./aws'); + const bucket = transport.options.bucket.name; + + // delegate logging facility + transport.options.logger = this.logger.child({ bucket }); + + // init provider + debug('passing options %j', transport.options); + const provider = new ProviderAWS(transport.options); + + return provider; + } } module.exports = ProviderFactory; diff --git a/src/providers/gce.js b/src/providers/gce.js index 053df3c1..d7d5314e 100644 --- a/src/providers/gce.js +++ b/src/providers/gce.js @@ -19,6 +19,7 @@ class GCETransport extends AbstractFileTransfer { constructor(opts = {}) { super(); this._config = merge({}, GCETransport.defaultOpts, opts); + this._logger = this._config.logger; this.setupGCE(); } diff --git a/src/providers/index.js b/src/providers/index.js index 4e9111e2..a1a68282 100644 --- a/src/providers/index.js +++ b/src/providers/index.js @@ -54,6 +54,12 @@ function initProviders(service) { factory.getProviderOSS(transport) ); } + + if (transport.name === 'aws') { + service.providers.push( + factory.getProviderAWS(transport) + ); + } } // create providerByBucket map for fast access diff --git a/src/utils/fetch-data.js b/src/utils/fetch-data.js index ef95fe0e..83003786 100644 --- a/src/utils/fetch-data.js +++ b/src/utils/fetch-data.js @@ -102,6 +102,7 @@ async function selectMaster(redis) { * @param {String[]} [fieldsFilter.pick] */ module.exports = function fetchData(key, fieldFilter = {}) { + console.log('fetch data for key', key); const { redis } = this; const timer = perf(`fetchData:${key}`); diff --git a/src/utils/process.js b/src/utils/process.js index 284060b7..424beb4e 100644 --- a/src/utils/process.js +++ b/src/utils/process.js @@ -31,6 +31,7 @@ module.exports = function processFile(key, data) { return Promise .using(acquireLock(this, `postprocess:${key}`), (lock) => { const { uploadId } = data; + console.log('port srocess uploadId', uploadId); const { redis } = this; return Promise diff --git a/test/configs/generic/core.js b/test/configs/generic/core.js index b54ceecf..a51a00cb 100644 --- a/test/configs/generic/core.js +++ b/test/configs/generic/core.js @@ -19,28 +19,54 @@ exports.amqp = { }, }; -exports.transport = [{ - name: 'gce', - options: { - gce: { - projectId: env.GCLOUD_PROJECT_ID, - credentials: { - client_email: env.GCLOUD_PROJECT_EMAIL, - private_key: env.GCLOUD_PROJECT_PK, +const awsTransport = [ + { + name: 'aws', + options: { + aws: { + credentials: { + region: env.AWS_REGION, + accessKeyId: env.AWS_ACCESS_KEY_ID, + secretAccessKey: env.AWS_SECRET_ACCESS_KEY, + topicName: env.TOPIC_NAME, + }, }, - }, - bucket: { - name: env.TEST_BUCKET, - metadata: { - location: env.GCLOUD_BUCKET_LOCATION || 'EUROPE-WEST1', - dra: true, + bucket: { + name: env.TEST_BUCKET, }, + // test for direct public URLs }, - // test for direct public URLs + // its not a public name! + cname: 'aws', }, - // its not a public name! - cname: 'gce', -}]; +]; + +// const gceTransport = [ +// { +// name: 'gce', +// options: { +// gce: { +// projectId: env.GCLOUD_PROJECT_ID, +// credentials: { +// client_email: env.GCLOUD_PROJECT_EMAIL, +// private_key: env.GCLOUD_PROJECT_PK, +// }, +// }, +// bucket: { +// name: env.TEST_BUCKET, +// metadata: { +// location: env.GCLOUD_BUCKET_LOCATION || 'EUROPE-WEST1', +// dra: true, +// }, +// }, +// // test for direct public URLs +// }, +// // its not a public name! +// cname: 'gce', +// }, +// ]; + +exports.transport = env.PROVIDER === 'aws' ? awsTransport : awsTransport; exports.hooks = { // return input, assume there are models diff --git a/test/docker-compose.aws-provider.yml b/test/docker-compose.aws-provider.yml new file mode 100644 index 00000000..3b5d9aa5 --- /dev/null +++ b/test/docker-compose.aws-provider.yml @@ -0,0 +1,16 @@ +version: '3' + +services: + tester: + volumes: + - ${PWD}:/src + - ${PWD}/test/configs/generic:/configs/core:ro + # - ${PWD}/test/configs/generic:/configs/aws:ro + - ${PWD}/test/configs/redis-sentinel:/configs/redis:ro + environment: + NODE_ENV: "test" + DEBUG: "${DEBUG}" + TEST_BUCKET: "makeomatic-test" + DOTENV_FILE_PATH: "/src/test/.env" + NCONF_FILE_PATH: '["/configs/core","/configs/redis"]' + diff --git a/test/helpers/utils.js b/test/helpers/utils.js index 4e15ec65..3de92c26 100644 --- a/test/helpers/utils.js +++ b/test/helpers/utils.js @@ -177,37 +177,41 @@ function modelSimpleUpload({ // upload single file // function upload(location, file) { + console.log('upload location', location); return request.put({ url: location, body: file, - headers: { - 'content-length': file.length, - }, + // headers: { + // 'content-length': file.length, + // }, simple: false, resolveWithFullResponse: true, }); } -function uploadSimple(meta, file, isPublic) { - const { query: { Expires } } = url.parse(meta.location); +async function uploadSimple(meta, file) { + // const { query: { Expires } } = url.parse(meta.location); - const headers = { - 'Content-MD5': meta.md5Hash, - 'Cache-Control': `public,max-age=${Expires}`, - 'Content-Type': meta.contentType, - }; + // const headers = { + // 'Content-MD5': meta.md5Hash, + // 'Cache-Control': `public,max-age=${Expires}`, + // 'Content-Type': meta.contentType, + // }; - if (isPublic) { - headers['x-goog-acl'] = 'public-read'; - } + // if (isPublic) { + // headers['x-goog-acl'] = 'public-read'; + // } - return request.put({ + const res = request.put({ url: meta.location, body: file, - headers, - simple: false, + // headers, + // simple: false, resolveWithFullResponse: true, + // ACL: 'public-read', }); + + return res; } // @@ -222,6 +226,7 @@ function uploadFiles(msg, rsp) { const file = files[idx]; const { location } = part; const isSimple = location.indexOf('Signature') !== -1; + console.log('isSimple upload', isSimple); return isSimple ? uploadSimple(part, file, rsp.public) : upload(location, file); }); } @@ -245,10 +250,12 @@ function finishMessage(rsp, skipProcessing = true) { // Initializes upload // function initUpload(data) { + console.log('init upload data', data); return function init() { return this.amqp .publishAndWait('files.upload', data.message, { timeout: 30000 }) .tap((rsp) => { + console.log('files upload res', rsp); this.response = rsp; }) .tap((rsp) => uploadFiles(data, rsp)); diff --git a/test/suites/providers/aws/access.js b/test/suites/providers/aws/access.js new file mode 100644 index 00000000..063c5986 --- /dev/null +++ b/test/suites/providers/aws/access.js @@ -0,0 +1,128 @@ +const assert = require('assert'); +const uuid = require('uuid'); + +describe('access suite', function suite() { + // helpers + const { + startService, + stopService, + inspectPromise, + owner, + modelData, + bindSend, + initAndUpload, + processUpload, + updateAccess, + downloadFile, + } = require('../../../helpers/utils'); + + const route = 'files.access'; + + // setup functions + before('start service', startService); + + // sets `this.response` to `files.finish` response + before('pre-upload file', initAndUpload(modelData)); + before('pre-process file', function preprocess() { + return processUpload.call(this, this.response); + }); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + // tests + it('returns 404 on a missing file', function test() { + return updateAccess + .call(this, uuid.v4(), owner, true) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert(err.statusCode, 404); + return null; + }); + }); + + it('returns 403 on non-matching owner', function test() { + return updateAccess + .call(this, this.response.uploadId, 'martial@arts.com', true) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert(err.statusCode, 403); + return null; + }); + }); + + it('sets file to public', function test() { + return updateAccess + .call(this, this.response.uploadId, owner, true) + .reflect() + .then(inspectPromise()); + }); + + it('sets file to private', function test() { + return updateAccess + .call(this, this.response.uploadId, owner, false) + .reflect() + .then(inspectPromise()); + }); + + describe('direct upload', function directUploadSuite() { + before('pre-upload file', initAndUpload({ + ...modelData, + message: { + ...modelData.message, + directOnly: true, + access: { + setPublic: false, + }, + }, + })); + + it('post-processes files', function test() { + return this.files + .postProcess(0, Date.now()) + .reflect() + .then(inspectPromise()); + }); + + it('rejects to show direct only file without proper username', function test() { + return downloadFile + .call(this, { uploadId: this.response.uploadId }) + .reflect() + .then(inspectPromise(false)); + }); + + it('set to public', function test() { + return updateAccess + .call(this, this.response.uploadId, owner, true) + .reflect() + .then(inspectPromise()); + }); + + it('allows to show direct only file without proper username', function test() { + return downloadFile + .call(this, { uploadId: this.response.uploadId }) + .reflect() + .then(inspectPromise()); + }); + + it('public list does not return direct only file', function test() { + return this + .amqp + .publishAndWait('files.list', { + public: true, + username: owner, + }) + .reflect() + .then(inspectPromise()) + .get('files') + .then((response) => { + const directUpload = response.find((it) => it.id === this.response.uploadId); + assert.ifError(directUpload, 'direct upload was returned from public list'); + return null; + }); + }); + }); +}); diff --git a/test/suites/providers/aws/count.js b/test/suites/providers/aws/count.js new file mode 100644 index 00000000..41f9e43e --- /dev/null +++ b/test/suites/providers/aws/count.js @@ -0,0 +1,34 @@ +const assert = require('assert'); + +// helpers +const { + startService, + stopService, + owner, + bindSend, +} = require('../../../helpers/utils.js'); +const { insertData } = require('../../../helpers/insert-data'); +const { STATUS_PROCESSED } = require('../../../../src/constant'); + +const route = 'files.count'; + +describe('count suite', function suite() { + // setup functions + before('start service', startService); + + before('helpers', bindSend(route)); + + before('insert data', function insertFiles() { + return insertData.call(this, { times: 20, owners: [owner], statuses: [STATUS_PROCESSED] }); + }); + + // tear-down + after('stop service', stopService); + + it('returns file counts for the user', async function test() { + const result = await this.send({ username: owner }); + + assert.strictEqual(result.total, 20); + assert.ok(typeof result.public === 'number'); + }); +}); diff --git a/test/suites/providers/aws/data.js b/test/suites/providers/aws/data.js new file mode 100644 index 00000000..162286e1 --- /dev/null +++ b/test/suites/providers/aws/data.js @@ -0,0 +1,113 @@ +const assert = require('assert'); +const uuid = require('uuid'); + +const { + startService, + stopService, + owner, + modelData, + finishUpload, + processUpload, + initUpload, + updateAccess, +} = require('../../../helpers/utils.js'); + +const route = 'files.data'; + +describe('data suite', () => { + before('start service', startService.bind(this)); + + before('init upload', async () => { + const uploadFn = initUpload.call(this, modelData); + await uploadFn.call(this); + await finishUpload.call(this, this.response); + await processUpload.call(this, this.response); + }); + + after('stop service', stopService.bind(this)); + + it('404 on missing file', async () => { + const req = this.send(route, { uploadId: uuid.v4() }); + + await assert.rejects(req, { statusCode: 404 }); + }); + + it('400 on invalid uploadId', async () => { + const req = this.send(route, { uploadId: false }); + + await assert.rejects(req, { + statusCode: 400, + message: /uploadId/, + }); + }); + + it('400 on invalid fields param', async () => { + const req = this.send(route, { uploadId: this.response.uploadId, fields: 'string' }); + + await assert.rejects(req, { + statusCode: 400, + message: 'data validation failed: data.fields should be array', + }); + }); + + it('400 on if one of the fields length exceeds 50 chars', async () => { + const req = this.send(route, { + uploadId: this.response.uploadId, + fields: [ + 250, + ], + }); + + await assert.rejects(req, { + statusCode: 400, + message: 'data validation failed: data.fields[0] should be string', + }); + }); + + it('400 on if one of the fields length exceeds 50 chars', async () => { + const req = this.send(route, { + uploadId: this.response.uploadId, + fields: [ + 'a'.repeat(51), + ], + }); + + await assert.rejects(req, { + statusCode: 400, + message: 'data validation failed: data.fields[0] should NOT be longer than 50 characters', + }); + }); + + it('400 on additional param', async () => { + const req = this.send(route, { uploadId: this.response.uploadId, newParam: true }); + + await assert.rejects(req, { + statusCode: 417, + message: 'data validation failed: data should NOT have additional properties', + }); + }); + + it('returns only upload id if no fields provided', async () => { + const { file } = await this.send(route, { uploadId: this.response.uploadId }); + + assert.equal(file.uploadId, this.response.uploadId); + assert.equal(Object.getOwnPropertyNames(file).length, 1); + }); + + it('returns requested fields with uploadId', async () => { + const { file } = await this.send(route, { uploadId: this.response.uploadId, fields: ['owner'] }); + + assert.equal(file.owner, owner); + assert.equal(file.uploadId, this.response.uploadId); + assert.equal(Object.getOwnPropertyNames(file).length, 2); + }); + + it('returns data even if file is private', async () => { + await updateAccess.call(this, this.response.uploadId, owner, false); + const { file } = await this.send(route, { uploadId: this.response.uploadId, fields: ['owner'] }); + + assert.equal(file.owner, owner); + assert.equal(file.uploadId, this.response.uploadId); + assert.equal(Object.getOwnPropertyNames(file).length, 2); + }); +}); diff --git a/test/suites/providers/aws/download.js b/test/suites/providers/aws/download.js new file mode 100644 index 00000000..0d076ba7 --- /dev/null +++ b/test/suites/providers/aws/download.js @@ -0,0 +1,176 @@ +const assert = require('assert'); +const uuid = require('uuid'); +const url = require('url'); +const { encodeURI } = require('@google-cloud/storage/build/src/util'); + +describe('download suite', function suite() { + // helpers + const { + startService, + stopService, + inspectPromise, + owner, + modelData, + bindSend, + initAndUpload, + processUpload, + updateAccess, + } = require('../../../helpers/utils'); + + const route = 'files.download'; + const bucketName = require('../../../configs/generic/core').transport[0].options.bucket.name; + + // setup functions + before('start service', startService); + + // sets `this.response` to `files.finish` response + before('pre-upload file', initAndUpload(modelData)); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + // tests + it('returns 404 on a missing file', function test() { + return this + .send({ uploadId: uuid.v4(), username: owner }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 404); + return null; + }); + }); + + it('returns 412 when file is uploaded, but not processed', function test() { + return this + .send({ uploadId: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 412); + return null; + }); + }); + + describe('processed upload', function processedSuite() { + before('process', function pretest() { + return processUpload.call(this, this.response); + }); + + it('returns 403 on a user mismatch', function test() { + return this + .send({ uploadId: this.response.uploadId, username: 'martial@arts.com' }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 403); + return null; + }); + }); + + it('returns download URLs: private', function test() { + console.log('test 000'); + return this + .send({ uploadId: this.response.uploadId, username: owner }); + // .reflect() + // .then(inspectPromise()) + // .then((rsp) => { + // assert.ok(rsp.uploadId); + // assert.ok(rsp.files); + // assert.ok(rsp.urls); + + // rsp.urls.forEach((link, idx) => { + // const parsedLink = url.parse(link, true); + // assert.equal(parsedLink.protocol, 'https:', link); + // assert.equal(parsedLink.host, 'storage.googleapis.com', link); + // assert.equal(parsedLink.pathname, `/${bucketName}/${encodeURI(rsp.files[idx].filename, false)}`, link); + // assert.ok(parsedLink.query.GoogleAccessId, link); + // assert.ok(parsedLink.query.Expires, link); + // assert.ok(parsedLink.query.Signature, link); + // }); + + // return null; + // }); + }); + + it('returns download partial renamed URLs: private', function test() { + return this + .send({ + uploadId: this.response.uploadId, + username: owner, + types: ['c-bin'], + rename: true, + }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.ok(rsp.uploadId); + assert.ok(rsp.files); + assert.ok(rsp.urls); + + rsp.urls.forEach((link, idx) => { + // check that we only have c-bin + assert.equal(rsp.files[idx].type, 'c-bin'); + + const parsedLink = url.parse(link, true); + assert.equal(parsedLink.protocol, 'https:', link); + assert.equal(parsedLink.host, process.env.AWS_STORAGE_HOST_NAME, link); + }); + + return null; + }); + }); + + describe('public file', function publicSuite() { + before('make-file-public', function pretest() { + return updateAccess + .call(this, this.response.uploadId, owner, true) + .reflect() + .then(inspectPromise()); + }); + + it('returns download URLs: public', function test() { + return this + .send({ uploadId: this.response.uploadId }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.ok(rsp.uploadId); + assert.ok(rsp.files); + assert.ok(rsp.urls); + assert.equal(rsp.username, this.response.owner); + + return null; + }); + }); + + it('returns download partial renamed URLs: public', function test() { + return this + .send({ + uploadId: this.response.uploadId, + types: ['c-preview'], + rename: true, + }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.ok(rsp.uploadId); + assert.ok(rsp.files); + assert.ok(rsp.urls); + + rsp.urls.forEach((link, idx) => { + // check that we only have c-bin + assert.equal(rsp.files[idx].type, 'c-preview'); + + const parsedLink = url.parse(link, true); + assert.equal(parsedLink.protocol, 'https:', link); + assert.equal(parsedLink.host, process.env.AWS_STORAGE_HOST_NAME, link); + }); + + return null; + }); + }); + }); + }); +}); diff --git a/test/suites/providers/aws/finish.js b/test/suites/providers/aws/finish.js new file mode 100644 index 00000000..14e19695 --- /dev/null +++ b/test/suites/providers/aws/finish.js @@ -0,0 +1,193 @@ +const Promise = require('bluebird'); +const assert = require('assert'); +const md5 = require('md5'); +const uuid = require('uuid'); + +// helpers +const { + startService, + stopService, + inspectPromise, + modelData, + owner, + bindSend, + initUpload, +} = require('../../../helpers/utils.js'); + +// data +const route = 'files.finish'; + +describe('finish upload suite', function suite() { + // setup functions + before('start service', startService); + before('prepare upload', initUpload(modelData)); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + // tests + it('return 400 on invalid filename', function test() { + return this + .send({ filename: 'random name' }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.name, 'HttpStatusError'); + assert.equal(err.statusCode, 400); + return null; + }); + }); + + it('returns 200: 404 on missing filename', function test() { + return this + .send({ filename: [md5(owner), uuid.v4(), uuid.v4()].join('/') }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 200); + assert.ok(/^404: /.test(err.message)); + return null; + }); + }); + + it('returns progress and 409 on repeated notification', function test() { + const [file] = this.response.files; + return Promise.mapSeries([file, file], (_, idx) => { + return this.send({ filename: file.filename }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + if (idx === 0) { + assert.equal(err.statusCode, 202); + assert.equal(err.message, `1/${this.response.files.length} uploaded`); + } else { + assert.equal(err.statusCode, 200); + assert.equal(err.message, '412: upload was already processed'); + } + + return null; + }); + }); + }); + + it('returns progress until all files have uploaded', function test() { + const files = this.response.files.slice(1, 3); + return Promise + .resolve(files) + .mapSeries((file, idx) => { + return this + .send({ filename: file.filename }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 202); + assert.equal(err.message, `${idx + 2}/${this.response.files.length} uploaded`); + return null; + }); + }); + }); + + it('returns "upload was already processed" for subsequent messages', function test() { + const files = this.response.files.slice(0, 3); + return Promise + .resolve(files) + .mapSeries((file) => { + return this + .send({ filename: file.filename }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 200); + assert.ok(/^412: /.test(err.message)); + return null; + }); + }); + }); + + it('returns "upload completed, processing skipped" on final part', function test() { + const file = this.response.files[3]; + return this + .send({ filename: file.filename, skipProcessing: true }) + .reflect() + .then(inspectPromise()) + .then((response) => { + assert.equal(response, 'upload completed, processing skipped'); + return null; + }); + }); + + describe('directOnly upload public model', function directOnlySuite() { + before('prepare upload of directOnly file', initUpload({ + ...modelData, + message: { + ...modelData.message, + directOnly: true, + }, + })); + + it('finishes upload', function test() { + const total = this.response.files.length; + return Promise + .resolve(this.response.files) + .mapSeries((file, idx) => ( + this + .send({ filename: file.filename, await: true }) + .reflect() + .then(inspectPromise(idx === total - 1)) + )); + }); + + it('list does not return this file from public list', function test() { + return this + .amqp + .publishAndWait('files.list', { + public: true, + username: modelData.message.username, + }) + .reflect() + .then(inspectPromise()) + .get('files') + .then((response) => { + const directUpload = response.find((it) => it.id === this.response.uploadId); + assert.ifError(directUpload, 'direct upload was returned from public list'); + return null; + }); + }); + + it('list returns this file for private list', function test() { + return this + .amqp + .publishAndWait('files.list', { + public: false, + username: modelData.message.username, + }) + .reflect() + .then(inspectPromise()) + .get('files') + .then((response) => { + const directUpload = response.find((it) => it.id === this.response.uploadId); + assert.ok(directUpload, 'direct upload was correctly returned'); + return null; + }); + }); + + it('report endpoint returns stats for public & private models', function test() { + return this + .amqp + .publishAndWait('files.report', { + username: modelData.message.username, + includeStorage: true, + }) + .reflect() + .then(inspectPromise()) + .then((response) => { + assert.equal(response.total, 2); + assert.equal(response.public, 0); + assert.equal(response.totalContentLength, 3802306); + assert.equal(response.publicContentLength, 0); + return null; + }); + }); + }); +}); diff --git a/test/suites/providers/aws/finish.pubsub.js b/test/suites/providers/aws/finish.pubsub.js new file mode 100644 index 00000000..d19f4905 --- /dev/null +++ b/test/suites/providers/aws/finish.pubsub.js @@ -0,0 +1,75 @@ +const Promise = require('bluebird'); +const assert = require('assert'); + +// helpers +const { + startService, + stopService, + inspectPromise, + modelData, + owner, + getInfo, + bindSend, + initUpload, + uploadFiles, +} = require('../../../helpers/utils'); + +const { + enablePubsub, +} = require('../../../helpers/config'); + +const { + STATUS_PROCESSED, +} = require('../../../../src/constant'); + +// data +const route = 'files.finish'; + +describe('finish upload suite with pubsub for hooks', function suite() { + // setup functions + before('enable pubsub', enablePubsub); + before('start service', startService); + before('prepare upload', initUpload(modelData)); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + it('completes file upload', function test() { + return uploadFiles(modelData, this.response) + .reflect() + .then(inspectPromise()) + .map((resp) => { + assert.equal(resp.statusCode, 200); + return null; + }); + }); + + it('verify that upload was processed', function test() { + // eslint-disable-next-line + const attempt = arguments[0] || 0; + + return getInfo + .call(this, { filename: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + try { + assert.equal(rsp.file.status, STATUS_PROCESSED); + } catch (e) { + // 20 * 500 = 10s to make sure it is processed + if (attempt > 20) throw e; + return Promise.bind(this, attempt + 1).delay(5000).then(test); + } + + return null; + }); + }); + + it('verify service.postProcess is still on', function test() { + return this.files + .postProcess() + .reflect() + .then(inspectPromise()); + }); +}); diff --git a/test/suites/providers/aws/head.js b/test/suites/providers/aws/head.js new file mode 100644 index 00000000..9527b636 --- /dev/null +++ b/test/suites/providers/aws/head.js @@ -0,0 +1,51 @@ +const assert = require('assert'); +const { + bindSend, + initAndUpload, + inspectPromise, + modelData, + owner, + processUpload, + startService, + stopService, +} = require('../../../helpers/utils.js'); + +describe('header suite', function suite() { + before('start service', startService); + before('pre-upload file', initAndUpload(modelData)); + before('process', function pretest() { + return processUpload.call(this, this.response); + }); + before('set alias', function setAlias() { + const msg = { + uploadId: this.response.uploadId, + username: owner, + meta: { alias: 'skubidoo' }, + }; + + return this.amqp.publishAndWait('files.update', msg, { timeout: 15000 }); + }); + before('helpers', bindSend('files.head')); + + after('stop service', stopService); + + it('should be able to return files ids', function test() { + return this + .send({ aliases: ['skubidoo', 'yesmomihadeaten'], username: owner }) + .reflect() + .then(inspectPromise()) + .then((response) => { + assert.deepEqual(response, [this.response.uploadId, null]); + }); + }); + + it('should be able to return array of nulls if user does not exists', function test() { + return this + .send({ aliases: ['skubidoo', 'yesmomihadeaten'], username: 'iamnotexist' }) + .reflect() + .then(inspectPromise(false)) + .then((error) => { + assert.equal(error.statusCode, 404); + }); + }); +}); diff --git a/test/suites/providers/aws/info.js b/test/suites/providers/aws/info.js new file mode 100644 index 00000000..6cc289f6 --- /dev/null +++ b/test/suites/providers/aws/info.js @@ -0,0 +1,190 @@ +const assert = require('assert'); +const uuid = require('uuid'); + +// helpers +const { + startService, + stopService, + inspectPromise, + owner, + modelData, + bindSend, + finishUpload, + processUpload, + initUpload, + updateAccess, +} = require('../../../helpers/utils.js'); + +const route = 'files.info'; +const { + STATUS_PENDING, + STATUS_UPLOADED, + STATUS_PROCESSED, +} = require('../../../../src/constant.js'); + +describe('info suite', function suite() { + // setup functions + before('start service', startService); + + // sets `this.response` to `files.finish` response + before('init upload', initUpload(modelData)); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + it('404 on missing filename/upload-id', function test() { + return this + .send({ filename: uuid.v4(), username: owner }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 404); + return null; + }); + }); + + it('401 on valid upload id, invalid user', function test() { + return this + .send({ filename: this.response.uploadId, username: 'martial@arts.com' }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 401); + return null; + }); + }); + + it('STATUS_PENDING on valid upload id', function test() { + return this + .send({ filename: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.equal(rsp.username, owner); + assert.deepEqual(rsp.file, this.response); + assert.equal(rsp.file.embed, undefined); + assert.equal(rsp.file.status, STATUS_PENDING); + return null; + }); + }); + + describe('after upload', function afterUploadSuite() { + before('complete upload', function pretest() { + return finishUpload.call(this, this.response); + }); + + it('401 on invalid user id', function test() { + return this + .send({ filename: this.response.uploadId, username: 'martial@arts.com' }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 401); + return null; + }); + }); + + it('STATUS_UPLOADED on valid user id', function test() { + return this + .send({ filename: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.equal(rsp.username, owner); + assert.equal(rsp.file.status, STATUS_UPLOADED); + return null; + }); + }); + + describe('after processed', function afterProcessedSuite() { + before('process file', function pretest() { + return processUpload.call(this, this.response); + }); + + it('returns 401 on invalid user id', function test() { + return this + .send({ filename: this.response.uploadId, username: 'martial@arts.com' }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 401); + return null; + }); + }); + + it('returns correct STATUS_PROCESSED', function test() { + return this + .send({ filename: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.equal(rsp.username, owner); + assert.equal(rsp.file.status, STATUS_PROCESSED); + + assert.ok(Array.isArray(rsp.file.controlsData)); + assert.ok(Array.isArray(rsp.file.tags)); + + assert.equal(rsp.file.controlsData.length, 29); + assert.deepEqual(rsp.file.tags, ['ok', 'done']); + + assert.ifError(rsp.file.public); + + assert.ok(rsp.file.files); + rsp.file.files.forEach((file) => { + assert.ok(file.contentLength); + if (file.type === 'c-bin') { + assert.ok(file.decompressedLength); + assert.ok(file.decompressedLength > file.contentLength); + } + }); + + assert.ok(rsp.file.embed); + assert.ok(rsp.file.embed.code); + assert.equal(typeof rsp.file.embed.code, 'string'); + assert.notEqual(rsp.file.embed.code.length, 0); + assert.ok(rsp.file.embed.params); + + Object.keys(rsp.file.embed.params).forEach((key) => { + const param = rsp.file.embed.params[key]; + assert.ok(param.type); + assert.notStrictEqual(param.default, undefined); + assert.ok(param.description); + }); + + return null; + }); + }); + + describe('public file', function publicSuite() { + before('make public', function pretest() { + return updateAccess.call(this, this.response.uploadId, owner, true); + }); + + it('returns info when file is public', function test() { + return this + .send({ filename: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.equal(rsp.username, owner); + assert.equal(rsp.file.owner, owner); + assert.equal(rsp.file.public, '1'); + assert.equal(rsp.file.status, STATUS_PROCESSED); + + assert.ok(rsp.file.files); + rsp.file.files.forEach((file) => { + assert.ok(file.contentLength); + if (file.type === 'c-bin') { + assert.ok(file.decompressedLength); + assert.ok(file.decompressedLength > file.contentLength); + } + }); + + return null; + }); + }); + }); + }); + }); +}); diff --git a/test/suites/providers/aws/list.js b/test/suites/providers/aws/list.js new file mode 100644 index 00000000..59cc7b5b --- /dev/null +++ b/test/suites/providers/aws/list.js @@ -0,0 +1,476 @@ +const assert = require('assert'); +const faker = require('faker'); +const ld = require('lodash'); + +// helpers +const { + startService, + stopService, + inspectPromise, + bindSend, + initAndUpload, + processUpload, + modelData, + meta, + owner: username, +} = require('../../../helpers/utils.js'); +const { insertData } = require('../../../helpers/insert-data'); + +const route = 'files.list'; +const updateRoute = 'files.update'; +const { + STATUS_UPLOADED, STATUS_PROCESSED, +} = require('../../../../src/constant.js'); + +describe('list suite', function suite() { + // setup functions + before('start service', startService); + before('pre-upload file', initAndUpload(modelData)); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + // helper to create fake file + const statusValues = [STATUS_UPLOADED, STATUS_PROCESSED]; + const owners = ld.times(5, faker.internet.email); + owners.push(username); // for some intersection with updated file + + function alphanumSort(direction, field) { + return (a, b) => { + const FA = a[field]; + const FB = b[field]; + return direction * FA.localeCompare(FB); + }; + } + + function numericSort(direction, field) { + return (a, b) => { + const FA = a[field]; + const FB = b[field]; + return direction * (FA - FB); + }; + } + + // direction: 1 ASC, -1 DESC + // type: alphanum, numeric + // field: to use for sorting + function sort(direction, type, field) { + return (data) => { + const copy = [].concat(data); + copy.sort((type === 'alphanum' ? alphanumSort : numericSort)(direction, field)); + assert.deepEqual(data, copy); + }; + } + + const ascSortFilename = sort(1, 'alphanum', 'id'); + const descSortFilename = sort(-1, 'alphanum', 'id'); + const ascSortStartAt = sort(1, 'numeric', 'startedAt'); + const descSortStartAt = sort(-1, 'numeric', 'startedAt'); + + before('insert data', function insertFiles() { + return insertData.call(this, { times: 500, owners, statuses: statusValues }); + }); + + describe('owner-based list', function testSuite() { + const owner = ld.sample(owners); + + it('returns files sorted by their filename, ASC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + owner, + order: 'ASC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + ascSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + + if (file.status === STATUS_PROCESSED) { + assert.ok(file.embed); + assert.ok(file.embed.code); + assert.equal(typeof file.embed.code, 'string'); + assert.notEqual(file.embed.code.length, 0); + assert.ok(file.embed.params); + + Object.keys(file.embed.params).forEach((key) => { + const param = file.embed.params[key]; + assert.ok(param.type); + assert.notStrictEqual(param.default, undefined); + assert.ok(param.description); + }); + } else { + assert.equal(file.embed, undefined); + } + }); + }); + }); + + it('returns files sorted by their filename, DESC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + owner, + order: 'DESC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + }); + }); + }); + + it('returns files sorted by their startedAt, ASC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + owner, + order: 'ASC', + offset: 30, + limit: 10, + criteria: 'startedAt', + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + ascSortStartAt(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + }); + }); + }); + + it('returns files sorted by their startedAt, DESC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + owner, + order: 'DESC', + offset: 30, + limit: 10, + criteria: 'startedAt', + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortStartAt(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + }); + }); + }); + + it('returns files sorted by their filename, filtered by size, ASC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: { + contentLength: { + gte: 5, + }, + }, + owner, + order: 'ASC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + ascSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + assert.ok(file.contentLength >= 5, 'gte filter failed'); + }); + }); + }); + + it('returns files sorted by their filename, filtered by size, DESC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: { + contentLength: { + gte: 5, + }, + }, + owner, + order: 'DESC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + assert.ok(file.contentLength >= 5, 'gte filter failed'); + }); + }); + }); + }); + + describe('generic file list', function testSuite() { + const owner = ld.sample(owners); + + it('returns files sorted by their filename, ASC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + order: 'ASC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + ascSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + }); + }); + + it('returns files sorted by their filename, DESC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + order: 'DESC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + }); + }); + + it('returns files sorted by their startedAt, ASC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + order: 'ASC', + offset: 30, + limit: 10, + criteria: 'startedAt', + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + ascSortStartAt(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + }); + }); + + it('returns files sorted by their startedAt, DESC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + order: 'DESC', + offset: 30, + limit: 10, + criteria: 'startedAt', + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortStartAt(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + }); + }); + + it('returns files sorted by their filename, filtered by owner, ASC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: { + owner: { + eq: owner, + }, + }, + order: 'ASC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + ascSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + }); + }); + }); + + it('returns files sorted by their filename, filtered by owner, DESC', function test() { + return this.amqp.publishAndWait('files.list', { + filter: { + owner: { + eq: owner, + }, + }, + order: 'DESC', + offset: 30, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortFilename(data.files); + assert.equal(data.cursor, 40); + assert.equal(data.page, 4); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + }); + }); + }); + + it('lists public files', function test() { + return this.amqp.publishAndWait('files.list', { + public: true, + order: 'DESC', + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortFilename(data.files); + assert.equal(data.cursor, 10); + assert.equal(data.page, 1); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.public, 1); + }); + }); + }); + + it('lists public files with a specific owner', function test() { + return this.amqp.publishAndWait('files.list', { + owner, + public: true, + order: 'DESC', + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + descSortFilename(data.files); + assert.equal(data.cursor, 10); + assert.equal(data.page, 1); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, owner); + }); + }); + }); + }); + + describe('tags-based list', function testSuite() { + before('upload', function pretest() { + return processUpload.call(this, this.response); + }); + + before('update', function pretest() { + return this.amqp.publishAndWait(updateRoute, { + uploadId: this.response.uploadId, + username, + meta, + }); + }); + + it('returns files sorted by their tags', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + tags: meta.tags, + order: 'ASC', + offset: 0, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + assert.equal(data.cursor, 10); + assert.equal(data.page, 1); + assert.ok(data.pages); + + data.files.forEach((file) => { + assert.equal(file.owner, username); + assert.deepEqual(file.tags, meta.tags); + }); + }); + }); + + it('returns files sorted by their filename and tags', function test() { + return this.amqp.publishAndWait('files.list', { + filter: {}, + tags: meta.tags, + owner: username, + order: 'ASC', + offset: 0, + limit: 10, + }) + .reflect() + .then(inspectPromise()) + .then((data) => { + assert.ok(data.files); + + data.files.forEach((file) => { + assert.equal(file.owner, username); + assert.deepEqual(file.tags, meta.tags); + }); + }); + }); + }); +}); diff --git a/test/suites/providers/aws/process.js b/test/suites/providers/aws/process.js new file mode 100644 index 00000000..b0fd6456 --- /dev/null +++ b/test/suites/providers/aws/process.js @@ -0,0 +1,155 @@ +const Promise = require('bluebird'); +const assert = require('assert'); +const uuid = require('uuid'); + +// helpers +const { + startService, + stopService, + inspectPromise, + modelData, + bindSend, + initAndUpload, + processUpload, + resetSinon, +} = require('../../../helpers/utils.js'); + +// data +const route = 'files.process'; +const { STATUS_PROCESSED } = require('../../../../src/constant.js'); + +describe('process suite', function suite() { + // setup functions + before('start service', startService); + // sets `this.response` to `files.finish` response + before('pre-upload file', initAndUpload(modelData)); + before('helpers', bindSend(route)); + + // resets sinon spies + beforeEach('reset sinon', resetSinon); + + // tear-down + after('stop service', stopService); + + it('returns 404 on a missing upload id', function test() { + return processUpload + .call(this, { uploadId: uuid.v4() }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 404); + return null; + }); + }); + + it('processes file', function test() { + return processUpload + .call(this, this.response) + .reflect() + .then(inspectPromise()) + .then((rsp) => { + assert.equal(rsp.status, STATUS_PROCESSED); + assert.ok(rsp.files); + return null; + }); + }); + + it('returns 412 when we try to work on an already processed file', function test() { + return processUpload + .call(this, this.response) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 412); + return null; + }); + }); + + it('exports processed file', function test() { + const message = { + uploadId: this.response.uploadId, + export: { + format: 'obj', + compression: 'gz', + meta: { + extra: 1, + }, + }, + }; + + return Promise + .all([ + this.send(message).reflect().then(inspectPromise()), + this.send(message).reflect().then(inspectPromise(false)), + ]) + .spread((res, err) => { + assert.ok(this.files.config.hooks['files:process:post'].calledOnce); + assert.ok(res.export); + assert.ok(res.obj); + + assert.equal(err.statusCode, 409); + }); + }); + + it('denies to export processed file with same format, but diff compression', function test() { + const message = { + uploadId: this.response.uploadId, + export: { + format: 'obj', + compression: 'zip', + }, + }; + + return this + .send(message) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 418); + return null; + }); + }); + + describe('internal process failure', function suiteFailure() { + before('remap post-processing', function before() { + this.files.on('files:process:post', function failure() { + return Promise.reject(new Error('internal failure')); + }); + }); + + before('pre-upload file', initAndUpload(modelData)); + + before('reset onComplete count', function before() { + this.files.config.amqp.retry.predicate.resetHistory(); + }); + + it('fails due to post-processing issue', function test() { + return processUpload + .call(this, this.response) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + return assert.equal(err.message, 'could not process file'); + }); + }); + + it('wait for 2 seconds to ensure that requeue worked', function test() { + const spy = this.files.config.amqp.retry.predicate; + return Promise.delay(2000).then(() => { + return assert.ok(spy.calledTwice, 'onComplete was called twice'); + }); + }); + + it('next time it wont process because it reached max attempts', function test() { + return processUpload + .call(this, this.response) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.message, 'could not process file'); + assert.equal(err.statusCode, 422); + return null; + }); + }); + }); +}); diff --git a/test/suites/providers/aws/remove.js b/test/suites/providers/aws/remove.js new file mode 100644 index 00000000..95469de3 --- /dev/null +++ b/test/suites/providers/aws/remove.js @@ -0,0 +1,72 @@ +const Promise = require('bluebird'); +const assert = require('assert'); +const uuid = require('uuid'); + +// helpers +const { + startService, + stopService, + inspectPromise, + owner, + modelData, + bindSend, + initAndUpload, +} = require('../../../helpers/utils.js'); + +const route = 'files.remove'; + +describe('remove suite', function suite() { + // setup functions + before('start service', startService); + // sets `this.response` to `files.finish` response + before('pre-upload file', initAndUpload(modelData)); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + // + it('404 on missing filename/upload-id', function test() { + return this + .send({ filename: uuid.v4(), username: owner }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.name, 'HttpStatusError'); + assert.equal(err.statusCode, 404); + }); + }); + + it('403 on invalid user id', function test() { + return this + .send({ + filename: this.response.uploadId, + username: 'martial@arts.com', + }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.name, 'HttpStatusError'); + assert.equal(err.statusCode, 403); + }); + }); + + it('removes file data', function test() { + return this + .send({ filename: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise()); + }); + + it('waits a bit.... 3seconds', () => Promise.delay(3000)); + + it('404 on subsequent remove', function test() { + return this + .send({ filename: this.response.uploadId, username: owner }) + .reflect() + .then(inspectPromise(false)) + .then((err) => { + assert.equal(err.statusCode, 404); + }); + }); +}); diff --git a/test/suites/providers/aws/sync.js b/test/suites/providers/aws/sync.js new file mode 100644 index 00000000..7765926b --- /dev/null +++ b/test/suites/providers/aws/sync.js @@ -0,0 +1,41 @@ +const Promise = require('bluebird'); +const assert = require('assert'); +const { STATUS_PROCESSED } = require('../../../../src/constant'); + +// helpers +const { + startService, + stopService, + modelData, + owner, + bindSend, + initUpload, +} = require('../../../helpers/utils.js'); + +// data +const route = 'files.sync'; + +describe('finish upload suite', function suite() { + // setup functions + before('start service', startService); + before('prepare upload', initUpload(modelData)); + before('helpers', bindSend(route)); + + // tear-down + after('stop service', stopService); + + it('...wait while upload completes', () => Promise.delay(10000)); + + it('runs sync service', function test() { + return this.send({}, 15000); + }); + + it('...waits for a couple of seconds', () => Promise.delay(10000)); + + it('returns correct STATUS_PROCESSED', async function test() { + const rsp = await this.amqp + .publishAndWait('files.info', { filename: this.response.uploadId, username: owner }); + + assert.ok(rsp.file.status === STATUS_PROCESSED, JSON.stringify(rsp.file)); + }); +}); diff --git a/test/suites/providers/aws/upload.js b/test/suites/providers/aws/upload.js new file mode 100644 index 00000000..86aa907b --- /dev/null +++ b/test/suites/providers/aws/upload.js @@ -0,0 +1,643 @@ +const assert = require('assert'); +const md5 = require('md5'); +const url = require('url'); +const request = require('request-promise'); +const clone = require('lodash/cloneDeep'); +// const { expect } = require('chai'); + +describe('upload suite', function suite() { + // helpers + const { + startService, + stopService, + bindSend, + uploadFiles, + modelData, + simpleData, + owner, + finishUpload, + processUpload, + getInfo, + meta, + } = require('../../../helpers/utils'); + + // data + const route = 'files.upload'; + const bucketName = require('../../../configs/generic/core').transport[0].options.bucket.name; + const { STATUS_PENDING, STATUS_PROCESSED } = require('../../../../src/constant'); + + // setup functions + before('start service', startService); + after('stop service', stopService); + before('helpers', bindSend(route)); + + describe('resumable upload suite', function resumableUploadSuite() { + it('verifies input data and rejects on invalid format', async function test() { + await assert.rejects(this.send({ ...modelData.message, username: false })); + }); + + it('rejects upload if meta.alias is specified', async function test() { + await assert.rejects(this.send({ + ...modelData.message, + meta: { + ...modelData.message.meta, + alias: 'sample-alias', + }, + })); + }); + + it('initiates upload and returns correct response format', async function test() { + const { message } = modelData; + + const rsp = await this.send(message, 45000); + + assert.equal(rsp.name, message.meta.name); + assert.equal(rsp.owner, message.username); + assert.ok(rsp.uploadId); + assert.ok(rsp.startedAt); + assert.ok(rsp.files); + assert.ifError(rsp.public); + assert.equal(rsp.status, STATUS_PENDING); + assert.equal(rsp.parts, message.files.length); + assert.deepEqual(rsp.controlsData, message.meta.controlsData); + + rsp.files.forEach((part) => { + assert.ok(part.location); + + // verify upload link + const location = url.parse(part.location, true); + + assert.equal(location.protocol, 'https:'); + assert.equal(location.hostname, process.env.AWS_STORAGE_HOST_NAME); + assert.equal(location.pathname, `/${part.filename}`); + + // verify that filename contains multiple parts + const [ownerHash, uploadId, filename] = part.filename.split('/'); + assert.equal(md5(owner), ownerHash); + assert.equal(rsp.uploadId, uploadId); + assert.ok(filename); + }); + + // save for the next + this.response = rsp; + }); + + it('possible to initial directOnly upload', async function test() { + const { message } = modelData; + + const data = await this.send({ ...message, directOnly: true }, 45000); + + assert.ok(data.direct, 'field direct is not set'); + }); + + // it('upload is possible based on the returned data', async function test() { + // const resp = await uploadFiles(modelData, this.response); + // for (const body of resp) { + // // assert.equal(body.statusCode, 200); + // } + // }); + + it('initiates public upload and returns correct response format', async function test() { + const { message } = modelData; + + const rsp = await this.send({ + ...message, + access: { + setPublic: true, + }, + }, 45000); + + assert.equal(rsp.name, message.meta.name); + assert.equal(rsp.owner, message.username); + assert.ok(rsp.uploadId); + assert.ok(rsp.startedAt); + assert.ok(rsp.files); + assert.ok(rsp.public); + assert.equal(rsp.status, STATUS_PENDING); + assert.equal(rsp.parts, message.files.length); + + // verify that location is present + rsp.files.forEach((part) => { + assert.ok(part.location); + + const location = url.parse(part.location, true); + + assert.equal(location.protocol, 'https:'); + assert.equal(location.hostname, process.env.AWS_STORAGE_HOST_NAME); + assert.equal(location.pathname, `/${part.filename}`); + + // verify that filename contains multiple parts + const [ownerHash, uploadId, filename] = part.filename.split('/'); + assert.equal(md5(owner), ownerHash); + assert.equal(rsp.uploadId, uploadId); + assert.ok(filename); + }); + + // save for the next + this.response = rsp; + }); + + it('upload is possible based on the returned data: public', async function test() { + const resp = await uploadFiles(modelData, this.response); + for (const req of resp) { + assert.equal(req.statusCode, 200); + } + }); + + it('able to download public files right away', function test() { + const [file] = this.response.files; + const location = url.parse(file.location, true); + const isGCEProvider = !!location.query.name; + + if (isGCEProvider) { + return request.get(`https://storage.googleapis.com/${bucketName}/${file.filename}`); + } + }); + }); + + describe('packed upload with post-action', function suitePacked() { + const data = clone(simpleData); + // const { message } = data; + const { message } = modelData; + + let response; + + it('rejects packed upload with invalid postAction', async function test() { + await assert.rejects(this.send({ + ...message, + access: { setPublic: true }, + uploadType: 'simple', + postAction: {}, + }), { + statusCode: 400, + name: 'HttpStatusError', + }); + }); + + it('rejects packed upload with no properties in update postAction', async function test() { + await assert.rejects(this.send({ + ...message, + access: { setPublic: true }, + uploadType: 'simple', + postAction: { + update: {}, + }, + }), { + statusCode: 400, + name: 'HttpStatusError', + }); + }); + + it('creates upload with valid post-action', async function test() { + const rsp = await this.send({ + ...message, + access: { setPublic: true }, + uploadType: 'simple', + postAction: { + update: { + alias: 'ban anza', + }, + }, + }); + + response = rsp; + }); + + it('uploads data', async function test() { + const resp = await uploadFiles(data, response); + for (const req of resp) { + assert.equal(req.statusCode, 200); + } + }); + + it('finishes upload', function test() { + return finishUpload.call(this, response); + }); + + it('processes upload and invokes post-action', function test() { + return processUpload.call(this, response, { awaitPostActions: true }); + }); + + it('info returns data based on alias', async function test() { + const rsp = await getInfo + .call(this, { filename: 'ban anza', username: message.username }); + + assert.equal(rsp.file.status, STATUS_PROCESSED); + }); + }); + + describe('signed url', function signedURLSuite() { + let response; + + it('initiates signed URL upload', async function test() { + const { message } = simpleData; + + const rsp = await this.send({ + ...message, + meta: { + ...meta, + ...message.meta, + }, + resumable: false, + access: { + setPublic: true, + }, + uploadType: 'simple', + }); + + assert.equal(rsp.name, message.meta.name); + assert.equal(rsp.owner, message.username); + assert.ok(rsp.uploadId); + assert.ok(rsp.startedAt); + assert.ok(rsp.files); + assert.ok(rsp.public); + assert.equal(rsp.status, STATUS_PENDING); + assert.equal(rsp.parts, message.files.length); + assert.deepEqual(rsp.playerSettings, meta.playerSettings); + assert.deepEqual(rsp.creationInfo, meta.creationInfo); + + // verify that location is present + rsp.files.forEach((part) => { + assert.ok(part.location); + + // verify upload link + const location = url.parse(part.location, true); + assert.equal(location.protocol, 'https:'); + assert.equal(location.hostname, process.env.AWS_STORAGE_HOST_NAME); + assert.equal(location.pathname, `/${part.filename}`); + + // verify that filename contains multiple parts + const [ownerHash, uploadId, filename] = part.filename.split('/'); + assert.equal(md5(owner), ownerHash); + assert.equal(rsp.uploadId, uploadId); + assert.ok(filename); + }); + + // save for the next + response = rsp; + }); + + it('able to upload files', async function test() { + const resp = await uploadFiles(simpleData, response); + for (const req of resp) { + assert.equal(req.statusCode, 200); + } + }); + + it('should fail when trying to upload non-resumable upload with resumable modifiers', async function type() { + const { message } = simpleData; + + await assert.rejects(this.send({ + ...message, + resumable: false, + access: { + setPublic: true, + }, + unlisted: true, + temp: true, + })); + }); + }); + + describe('upload limits', function limitSuite() { + it('uploading more than 20MB is not allowed if not image/vnd.cappasity', function test() { + const obj = { + username: 'any', + resumable: false, + files: [{ + type: 'c-simple', + contentType: 'image/png', + contentLength: 1024 * 1024 * 21, // 21 MB + md5Hash: '00000000000000000000000000000000', + }], + meta: { + name: 'test', + }, + }; + + assert.ok(this.files.validator.validateSync('upload', obj).error, 'error not thrown!'); + }); + + it('allows to upload > 20MB & < 100MB for image/vnd.cappasity', function test() { + const obj = { + username: 'any', + resumable: false, + files: [{ + type: 'c-simple', + contentType: 'image/vnd.cappasity', + contentLength: 1024 * 1024 * 99, // 99 MB + md5Hash: '00000000000000000000000000000000', + }], + meta: { + name: 'test', + }, + }; + + assert.ifError(this.files.validator.validateSync('upload', obj).error); + }); + + it('rejects uploading > 100MB for image/vnd.cappasity', function test() { + const obj = { + username: 'any', + resumable: false, + files: [{ + type: 'c-simple', + contentType: 'image/vnd.cappasity', + contentLength: 1024 * 1024 * 101, // 51 MB + md5Hash: '00000000000000000000000000000000', + }], + meta: { + name: 'test', + }, + }; + + assert.ok(this.files.validator.validateSync('upload', obj).error, 'error not thrown!'); + }); + }); + + describe('c-type uploads', function cTypeUploads() { + it('c-masks', async function test() { + const msg = { + files: [{ + contentLength: 3795, + contentType: 'image/jpeg', + md5Hash: '6816574b9e4647c3257946838d44be01', + type: 'c-preview', + }, + { + contentLength: 10692, + contentType: 'image/vnd.cappasity', + md5Hash: '499da7aeb8324608ee96ef947ae8aefe', + type: 'c-pack', + }, + { + contentLength: 272234, + contentType: 'image/vnd.cappasity+masks', + md5Hash: '5acb9db280091d3d506197e0dc662c07', + type: 'c-masks', + }], + access: { setPublic: true }, + directOnly: false, + meta: { + backgroundColor: '#FFFFFF', + backgroundImage: '', + c_ver: '4.1.0', + name: 'script_experiment', + type: 'object', + dimensions: [0.5, 0.35], + capabilities: ['ar_3dview', 'web_3dview'], + }, + postAction: { + update: { + alias: 'script_experiment_sku', + }, + }, + resumable: false, + temp: false, + unlisted: false, + uploadType: 'simple', + username: 'any', + }; + + await this.send(msg); + }); + + it('c-packs', async function test() { + const msg = { + files: [{ + contentLength: 3795, + contentType: 'image/jpeg', + md5Hash: '6816574b9e4647c3257946838d44be01', + type: 'c-preview', + }, + { + contentLength: 10692, + contentType: 'image/vnd.cappasity', + md5Hash: '499da7aeb8324608ee96ef947ae8aefe', + type: 'c-pack', + }, + { + contentLength: 272234, + contentType: 'image/vnd.cappasity+2', + md5Hash: '5acb9db280091d3d506197e0dc662c07', + type: 'c-pack2', + }], + access: { setPublic: true }, + directOnly: false, + meta: { + backgroundColor: '#FFFFFF', + backgroundImage: '', + c_ver: '4.1.0', + name: 'script_experiment', + type: 'object', + dimensions: [0.5, 0.35], + capabilities: ['ar_3dview', 'web_3dview'], + }, + postAction: { + update: { + alias: 'script_experiment_sku_2', + }, + }, + resumable: false, + temp: false, + unlisted: false, + uploadType: 'simple', + username: 'any', + }; + + await this.send(msg); + }); + }); + + describe('custom fields for metadata', function customMetaSuite() { + const valid = { + username: 'any', + resumable: false, + files: [{ + type: 'c-simple', + contentType: 'image/vnd.cappasity', + contentLength: 1024 * 1024 * 99, // 99 MB + md5Hash: '00000000000000000000000000000000', + }], + }; + + it('allows custom fields of type string & number', function test() { + const obj = { + ...valid, + meta: { + name: 'test', + c_ver: '1.0.0', + c_type: 10, + }, + }; + + assert.ifError(this.files.validator.validateSync('upload', obj).error); + }); + + it('rejects types other than string or number', function test() { + const obj = { + ...valid, + meta: { + name: 'test', + c_ver: [], + c_type: false, + c_dart: {}, + }, + }; + + assert.ok(this.files.validator.validateSync('upload', obj).error); + }); + + it('doesnt allow for custom names other than prefixed with c_', function test() { + const obj = { + ...valid, + meta: { + name: 'test', + random: '1.0.0', + }, + }; + + assert.ok(this.files.validator.validateSync('upload', obj).error); + }); + + it('validates meta.creationInfo', function test() { + const extraProp = { + ...valid, + meta: { + name: 'some', + creationInfo: { + extraProp: 1, + }, + }, + }; + + const invalidOsVersion = { + ...valid, + meta: { + name: 'some', + creationInfo: { + osVersion: 'a'.repeat(257), + }, + }, + }; + + const invalidOs = { + ...valid, + meta: { + name: 'some', + creationInfo: { + os: 'symbian', + }, + }, + }; + + const invalidApplication = { + ...valid, + meta: { + name: 'some', + creationInfo: { + application: 'a'.repeat(257), + }, + }, + }; + + const invalidApplicationNumber = { + ...valid, + meta: { + name: 'some', + creationInfo: { + application: 42, + }, + }, + }; + + const invalidApplicationVersion = { + ...valid, + meta: { + name: 'some', + creationInfo: { + applicationVersion: 'a'.repeat(257), + }, + }, + }; + + const invalidApplicationVersionNumber = { + ...valid, + meta: { + name: 'some', + creationInfo: { + applicationVersion: 10, + }, + }, + }; + + const vs = this.files.validator.validateSync.bind(this.files.validator); + + assert(vs('upload', extraProp).error.message.match(/creationInfo should NOT have additional properties/)); + assert(vs('upload', invalidOs).error.message.match(/creationInfo.os should be equal to one of the allowed values/)); + assert(vs('upload', invalidOsVersion).error.message.match(/creationInfo.osVersion should NOT be longer than 50 characters/)); + assert(vs('upload', invalidApplication).error.message.match(/creationInfo.application should NOT be longer than 50 characters/)); + assert(vs('upload', invalidApplicationNumber).error.message.match(/creationInfo.application should be string/)); + assert(vs('upload', invalidApplicationVersion).error.message.match(/creationInfo.applicationVersion should NOT be longer than 50 characters/)); + assert(vs('upload', invalidApplicationVersionNumber).error.message.match(/creationInfo.applicationVersion should be string/)); + }); + + it('validates meta.ar3dviewProps', function test() { + const invalidShortString = { + ...valid, + meta: { + ar3dviewProps: { + invalidShortString: '', + }, + }, + }; + + const invalidLongString = { + ...valid, + meta: { + ar3dviewProps: { + invalidLongString: 'a'.repeat(257), + }, + }, + }; + + const invalidArray = { + ...valid, + meta: { + ar3dviewProps: { + invalidArray: new Array(39).fill(20, 0, 39), + }, + }, + }; + + const invalidItemArray = { + ...valid, + meta: { + ar3dviewProps: { + invalidItemArray: [ + { some: 1 }, + ], + }, + }, + }; + + const invalidStringItemArray = { + ...valid, + meta: { + ar3dviewProps: { + invalidStringItemArray: [ + 'a'.repeat(257), + ], + }, + }, + }; + + const vs = this.files.validator.validateSync.bind(this.files.validator); + assert(vs('upload', invalidShortString).error.message.match(/invalidShortString/)); + assert(vs('upload', invalidLongString).error.message.match(/invalidLongString/)); + assert(vs('upload', invalidArray).error.message.match(/invalidArray/)); + assert(vs('upload', invalidItemArray).error.message.match(/invalidItemArray/)); + assert(vs('upload', invalidStringItemArray).error.message.match(/invalidStringItemArray/)); + }); + }); +}); diff --git a/test/suites/upload.js b/test/suites/upload.js index eed77a70..614db4f8 100644 --- a/test/suites/upload.js +++ b/test/suites/upload.js @@ -642,5 +642,39 @@ describe('upload suite', function suite() { assert(vs('upload', invalidItemArray).error.message.match(/invalidItemArray/)); assert(vs('upload', invalidStringItemArray).error.message.match(/invalidStringItemArray/)); }); + + it('validates meta.pWidth/pHeight', function test() { + const invalidPwh = { + ...valid, + meta: { + name: 'some', + pWidth: 'foo', + pHeight: 'foo', + }, + }; + + const missingPw = { + ...valid, + meta: { + name: 'some', + pHeight: 10, + }, + }; + + const missingPh = { + ...valid, + meta: { + name: 'some', + pWidth: 10, + }, + }; + + const vs = this.files.validator.validateSync.bind(this.files.validator); + + assert(vs('upload', invalidPwh).error.message.match(/data\.meta\.pWidth should be integer, data\.meta\.pHeight should be integer/)); + assert(vs('upload', missingPh).error + .message.match(/data\.meta should have required property 'pHeight', data\.meta should match "then" schema/)); + assert(vs('upload', missingPw).error.message.match(/data\.meta should have required property 'pWidth', data\.meta should match "then" schema/)); + }); }); }); diff --git a/yarn.lock b/yarn.lock index c76407b9..e4ac5ba4 100644 --- a/yarn.lock +++ b/yarn.lock @@ -2007,6 +2007,21 @@ atomic-sleep@^1.0.0: resolved "https://registry.yarnpkg.com/atomic-sleep/-/atomic-sleep-1.0.0.tgz#eb85b77a601fc932cfe432c5acd364a9e2c9075b" integrity sha512-kNOjDqAh7px0XWNI+4QbzoiR/nTkHAWNud2uvnJquD1/x5a7EQZMJT0AczqK0Qn67oY/TTQ1LbUKajZpp3I9tQ== +aws-sdk@^2.914.0: + version "2.914.0" + resolved "https://registry.yarnpkg.com/aws-sdk/-/aws-sdk-2.914.0.tgz#55b303d8d685dbe13cc3f89887aaccf920e4127f" + integrity sha512-N7UZBvTMPr+lqlOmd/vZ6NMMaw1K2Y94fq3b6roZWouUQgn5unEixVpCuRhcYleOPSnikSCIrrDs2AXq5bXhLg== + dependencies: + buffer "4.9.2" + events "1.1.1" + ieee754 "1.1.13" + jmespath "0.15.0" + querystring "0.2.0" + sax "1.2.1" + url "0.10.3" + uuid "3.3.2" + xml2js "0.4.19" + aws-sign2@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" @@ -2045,7 +2060,7 @@ balanced-match@^1.0.0: resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= -base64-js@^1.3.0, base64-js@^1.3.1: +base64-js@^1.0.2, base64-js@^1.3.0, base64-js@^1.3.1: version "1.5.1" resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.5.1.tgz#1b1b440160a5bf7ad40b650f095963481903930a" integrity sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA== @@ -2200,6 +2215,15 @@ buffer-from@^1.0.0: resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== +buffer@4.9.2: + version "4.9.2" + resolved "https://registry.yarnpkg.com/buffer/-/buffer-4.9.2.tgz#230ead344002988644841ab0244af8c44bbe3ef8" + integrity sha512-xq+q3SRMOxGivLhBNaUdC64hDTQwejJ+H0T/NB1XMtTVEwNTrfFF3gAxiyW0Bu/xWEGhjVKgUcMhCrUy2+uCWg== + dependencies: + base64-js "^1.0.2" + ieee754 "^1.1.4" + isarray "^1.0.0" + buffer@^5.5.0: version "5.7.1" resolved "https://registry.yarnpkg.com/buffer/-/buffer-5.7.1.tgz#ba62e7c13133053582197160851a8f648e99eed0" @@ -3799,6 +3823,11 @@ eventemitter3@^4.0.4, eventemitter3@^4.0.7: resolved "https://registry.yarnpkg.com/eventemitter3/-/eventemitter3-4.0.7.tgz#2de9b68f6528d5644ef5c59526a1b4a07306169f" integrity sha512-8guHBZCwKnFhYdHr2ysuRWErTwhoN2X8XELRlrRwpmfeY2jjuUN4taQMsULKUVo1K4DvZl+0pgfyoysHxvmvEw== +events@1.1.1: + version "1.1.1" + resolved "https://registry.yarnpkg.com/events/-/events-1.1.1.tgz#9ebdb7635ad099c70dcc4c2a1f5004288e8bd924" + integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ= + execa@^0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/execa/-/execa-0.7.0.tgz#944becd34cc41ee32a63a9faf27ad5a65fc59777" @@ -4475,6 +4504,18 @@ glob@7.1.6, glob@^7.0.0, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4, glo once "^1.3.0" path-is-absolute "^1.0.0" +glob@^7.1.7: + version "7.1.7" + resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.7.tgz#3b193e9233f01d42d0b3f78294bbeeb418f94a90" + integrity sha512-OvD9ENzPLbegENnYP5UUfJIirTg4+XwMWGaQfQTY0JenxNvvIKP3U3/tAQSPIu/lHxXYSZmpXlUHeqAIdKzBLQ== + dependencies: + fs.realpath "^1.0.0" + inflight "^1.0.4" + inherits "2" + minimatch "^3.0.4" + once "^1.3.0" + path-is-absolute "^1.0.0" + global-dirs@^0.1.0, global-dirs@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/global-dirs/-/global-dirs-0.1.1.tgz#b319c0dd4607f353f3be9cca4c72fc148c49f445" @@ -4858,7 +4899,12 @@ iconv-lite@^0.6.2: dependencies: safer-buffer ">= 2.1.2 < 3.0.0" -ieee754@^1.1.13: +ieee754@1.1.13: + version "1.1.13" + resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.1.13.tgz#ec168558e95aa181fd87d37f55c32bbcb6708b84" + integrity sha512-4vf7I2LYV/HaWerSo3XmlMkp5eZ83i+/CDluXi/IGTs/O1sejBNhTtnxzmRZfvOUqj7lZjqHkeTvpgSFDlWZTg== + +ieee754@^1.1.13, ieee754@^1.1.4: version "1.2.1" resolved "https://registry.yarnpkg.com/ieee754/-/ieee754-1.2.1.tgz#8eb7a10a63fff25d15a57b001586d177d1b0d352" integrity sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA== @@ -5455,7 +5501,7 @@ java-properties@^1.0.0: resolved "https://registry.yarnpkg.com/java-properties/-/java-properties-1.0.2.tgz#ccd1fa73907438a5b5c38982269d0e771fe78211" integrity sha512-qjdpeo2yKlYTH7nFdK0vbZWuTCesk4o63v5iVOlhMQPfuIZQfW/HI35SjfhA+4qpg36rnFSvUK5b1m+ckIblQQ== -jmespath@^0.15.0: +jmespath@0.15.0, jmespath@^0.15.0: version "0.15.0" resolved "https://registry.yarnpkg.com/jmespath/-/jmespath-0.15.0.tgz#a3f222a9aae9f966f5d27c796510e28091764217" integrity sha1-o/Iiqarp+Wb10nx5ZRDigJF2Qhc= @@ -7789,6 +7835,11 @@ pumpify@^2.0.0: inherits "^2.0.3" pump "^3.0.0" +punycode@1.3.2: + version "1.3.2" + resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.3.2.tgz#9653a036fb7c1ee42342f2325cceefea3926c48d" + integrity sha1-llOgNvt8HuQjQvIyXM7v6jkmxI0= + punycode@^2.1.0, punycode@^2.1.1: version "2.1.1" resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" @@ -7823,6 +7874,11 @@ query-string@^6.8.2: split-on-first "^1.0.0" strict-uri-encode "^2.0.0" +querystring@0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/querystring/-/querystring-0.2.0.tgz#b209849203bb25df820da756e747005878521620" + integrity sha1-sgmEkgO7Jd+CDadW50cAWHhSFiA= + quick-format-unescaped@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/quick-format-unescaped/-/quick-format-unescaped-4.0.1.tgz#437a5ea1a0b61deb7605f8ab6a8fd3858dbeb701" @@ -8322,6 +8378,11 @@ safe-regex@^2.1.1: resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== +sax@1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.1.tgz#7b8e656190b228e81a66aea748480d828cd2d37a" + integrity sha1-e45lYZCyKOgaZq6nSEgNgozS03o= + sax@>=0.6.0: version "1.2.4" resolved "https://registry.yarnpkg.com/sax/-/sax-1.2.4.tgz#2816234e2378bddc4e5354fab5caa895df7100d9" @@ -9519,6 +9580,14 @@ url-parse-lax@^1.0.0: dependencies: prepend-http "^1.0.1" +url@0.10.3: + version "0.10.3" + resolved "https://registry.yarnpkg.com/url/-/url-0.10.3.tgz#021e4d9c7705f21bbf37d03ceb58767402774c64" + integrity sha1-Ah5NnHcF8hu/N9A861h2dAJ3TGQ= + dependencies: + punycode "1.3.2" + querystring "0.2.0" + urlgrey@0.4.4: version "0.4.4" resolved "https://registry.yarnpkg.com/urlgrey/-/urlgrey-0.4.4.tgz#892fe95960805e85519f1cd4389f2cb4cbb7652f" @@ -9794,6 +9863,14 @@ xdg-basedir@^4.0.0: resolved "https://registry.yarnpkg.com/xdg-basedir/-/xdg-basedir-4.0.0.tgz#4bc8d9984403696225ef83a1573cbbcb4e79db13" integrity sha512-PSNhEJDejZYV7h50BohL09Er9VaIefr2LMAf3OEmpCkjOi34eYyQYAXUTjEQtZJTKcF0E2UKTh+osDLsgNim9Q== +xml2js@0.4.19: + version "0.4.19" + resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.19.tgz#686c20f213209e94abf0d1bcf1efaa291c7827a7" + integrity sha512-esZnJZJOiJR9wWKMyuvSE1y6Dq5LCuJanqhxslH2bxM6duahNZ+HMpCLhBQGZkbX6xRf8x1Y2eJlgt2q3qo49Q== + dependencies: + sax ">=0.6.0" + xmlbuilder "~9.0.1" + xml2js@^0.4.16: version "0.4.23" resolved "https://registry.yarnpkg.com/xml2js/-/xml2js-0.4.23.tgz#a0c69516752421eb2ac758ee4d4ccf58843eac66" @@ -9807,6 +9884,11 @@ xmlbuilder@~11.0.0: resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-11.0.1.tgz#be9bae1c8a046e76b31127726347d0ad7002beb3" integrity sha512-fDlsI/kFEx7gLvbecc0/ohLG50fugQp8ryHzMTuW9vSa1GJ0XYWKnhsUx7oie3G98+r56aTQIUB4kht42R3JvA== +xmlbuilder@~9.0.1: + version "9.0.7" + resolved "https://registry.yarnpkg.com/xmlbuilder/-/xmlbuilder-9.0.7.tgz#132ee63d2ec5565c557e20f4c22df9aca686b10d" + integrity sha1-Ey7mPS7FVlxVfiD0wi35rKaGsQ0= + xorshift@^0.2.0: version "0.2.1" resolved "https://registry.yarnpkg.com/xorshift/-/xorshift-0.2.1.tgz#fcd82267e9351c13f0fb9c73307f25331d29c63a"