From 7c54465f5b99ab85a2dbdb28b31c7af4f30b7b11 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 24 May 2017 18:00:04 +0100 Subject: [PATCH 01/63] New database structure --- app/db/structure.js | 23 +++- test/utils/data.js | 266 ++++++++++++++++++++++++++++++++++---------- 2 files changed, 227 insertions(+), 62 deletions(-) diff --git a/app/db/structure.js b/app/db/structure.js index 132afad0..f544c8a8 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -24,6 +24,11 @@ export function dropScenariosFiles () { return db.schema.dropTableIfExists('scenarios_files'); } +export function dropScenariosSettings () { + DEBUG && console.log('Dropping table: scenarios_settings'); + return db.schema.dropTableIfExists('scenarios_settings'); +} + export function dropOperations () { DEBUG && console.log('Dropping table: operations'); return db.schema.dropTableIfExists('operations'); @@ -61,8 +66,6 @@ export function createScenariosTable () { .references('projects.id') .onDelete('CASCADE'); table.json('admin_areas'); - // Arbitrary additional json data. - table.json('data'); table.timestamps(); table.unique(['project_id', 'name']); @@ -107,6 +110,20 @@ export function createScenariosFilesTable () { }); } +export function createScenariosSettingsTable () { + DEBUG && console.log('Creating table: scenarios_settings'); + return db.schema.createTable('scenarios_settings', table => { + table.string('key'); + table.string('value'); + table.integer('scenario_id').unsigned(); + table.foreign('scenario_id') + .references('scenarios.id') + .onDelete('CASCADE'); + table.timestamps(); + table.primary(['scenario_id', 'key']); + }); +} + export function createOperationsTable () { DEBUG && console.log('Creating table: operations'); return db.schema.createTable('operations', table => { @@ -144,10 +161,12 @@ export function setupStructure () { .then(() => dropProjectsFiles()) .then(() => dropOperationsLogs()) .then(() => dropOperations()) + .then(() => dropScenariosSettings()) .then(() => dropScenarios()) .then(() => dropProjects()) .then(() => createProjectsTable()) .then(() => createScenariosTable()) + .then(() => createScenariosSettingsTable()) .then(() => createOperationsTable()) .then(() => createOperationsLogsTable()) .then(() => createProjectsFilesTable()) diff --git a/test/utils/data.js b/test/utils/data.js index a80294e3..802805a6 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -73,12 +73,24 @@ export function project1000 () { 'project_id': 1000, 'master': true, 'created_at': '2017-02-01T12:00:01.000Z', - 'updated_at': '2017-02-01T12:00:01.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 + 'updated_at': '2017-02-01T12:00:01.000Z' + })) + .then(() => scenarioSettings([ + { + 'scenario_id': 1000, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1000, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' } - })); + ])); } // Project 1002 in pending state with one scenario @@ -99,12 +111,24 @@ export function project1002 () { 'project_id': 1002, 'master': true, 'created_at': '2017-02-01T12:00:02.000Z', - 'updated_at': '2017-02-01T12:00:02.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 + 'updated_at': '2017-02-01T12:00:02.000Z' + })) + .then(() => scenarioSettings([ + { + 'scenario_id': 1002, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1002, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' } - })); + ])); } // Project in pending state with one scenario and a profile file @@ -135,12 +159,24 @@ export function project1001 () { 'project_id': 1001, 'master': true, 'created_at': '2017-02-01T12:00:03.000Z', - 'updated_at': '2017-02-01T12:00:03.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:03.000Z' })) + .then(() => scenarioSettings([ + { + 'scenario_id': 1001, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1001, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + } + ])) .then(() => scenarioFile({ 'id': 1001, 'name': 'poi_000000', @@ -182,12 +218,24 @@ export function project1003 () { 'project_id': 1003, 'master': true, 'created_at': '2017-02-01T12:00:04.000Z', - 'updated_at': '2017-02-01T12:00:04.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:04.000Z' })) + .then(() => scenarioSettings([ + { + 'scenario_id': 1003, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1003, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + } + ])) .then(() => scenarioFile({ 'id': 1003, 'name': 'road-network_000000', @@ -251,12 +299,24 @@ export function project1004 () { 'project_id': 1004, 'master': true, 'created_at': '2017-02-01T12:00:05.000Z', - 'updated_at': '2017-02-01T12:00:05.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:05.000Z' })) + .then(() => scenarioSettings([ + { + 'scenario_id': 1004, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1004, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + } + ])) .then(() => scenarioFile([ { 'id': 1004, @@ -335,12 +395,24 @@ export function project1100 () { 'master': true, 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:06.000Z', - 'updated_at': '2017-02-01T12:00:06.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:06.000Z' })) + .then(() => scenarioSettings([ + { + 'scenario_id': 1100, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1100, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + } + ])) .then(() => scenarioFile([ { 'id': 1100, @@ -420,11 +492,7 @@ export function project1200 () { 'master': true, 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:07.000Z', - 'updated_at': '2017-02-01T12:00:07.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:07.000Z' }, { 'id': 1201, @@ -435,11 +503,37 @@ export function project1200 () { 'master': false, 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:07.000Z', - 'updated_at': '2017-02-01T12:00:07.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:07.000Z' + } + ])) + .then(() => scenarioSettings([ + { + 'scenario_id': 1200, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1200, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1201, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1201, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' } ])) .then(() => scenarioFile([ @@ -543,12 +637,24 @@ export function project2000 () { 'master': true, 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:06.000Z', - 'updated_at': '2017-02-01T12:00:06.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:06.000Z' })) + .then(() => scenarioSettings([ + { + 'scenario_id': 2000, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 2000, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + } + ])) .then(() => scenarioFile([ { 'id': 2000, @@ -612,6 +718,10 @@ function scenarioFile (data) { return db.batchInsert('scenarios_files', _.isArray(data) ? data : [data]); } +function scenarioSettings (data) { + return db.batchInsert('scenarios_settings', _.isArray(data) ? data : [data]); +} + // // Functions for project creation. // @@ -634,12 +744,24 @@ export function projectBarebones (id) { 'project_id': id, 'master': true, 'created_at': '2017-02-01T12:00:00.000Z', - 'updated_at': '2017-02-01T12:00:00.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 + 'updated_at': '2017-02-01T12:00:00.000Z' + })) + .then(() => scenarioSettings([ + { + 'scenario_id': id, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': id, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' } - })); + ])); } // Insert a project, a scenario, a project file and a scenario file. @@ -670,12 +792,24 @@ export function projectPendingWithFiles (id) { 'project_id': id, 'master': true, 'created_at': '2017-02-01T12:00:00.000Z', - 'updated_at': '2017-02-01T12:00:00.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:00.000Z' })) + .then(() => scenarioSettings([ + { + 'scenario_id': id, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': id, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + } + ])) .then(() => scenarioFile({ 'id': id, 'name': 'poi_000000', @@ -740,12 +874,24 @@ export function projectPendingWithAllFiles (id) { 'project_id': id, 'master': true, 'created_at': '2017-02-01T12:00:00.000Z', - 'updated_at': '2017-02-01T12:00:00.000Z', - 'data': { - 'res_gen_at': 0, - 'rn_updated_at': 0 - } + 'updated_at': '2017-02-01T12:00:00.000Z' })) + .then(() => scenarioSettings([ + { + 'scenario_id': id, + 'key': 'res_gen_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': id, + 'key': 'rn_updated_at', + 'value': 0, + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + } + ])) .then(() => scenarioFile([ { 'id': id, From 9760d0ecaff9793d1e577d28d93cd20848745b19 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 24 May 2017 18:06:19 +0100 Subject: [PATCH 02/63] Remove arbitrary data column Settigns tables should be used instead --- app/db/structure.js | 4 ---- 1 file changed, 4 deletions(-) diff --git a/app/db/structure.js b/app/db/structure.js index f544c8a8..40292835 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -83,8 +83,6 @@ export function createProjectsFilesTable () { table.foreign('project_id') .references('projects.id') .onDelete('CASCADE'); - // Arbitrary additional json data. - table.json('data'); table.timestamps(); }); } @@ -104,8 +102,6 @@ export function createScenariosFilesTable () { table.foreign('scenario_id') .references('scenarios.id') .onDelete('CASCADE'); - // Arbitrary additional json data. - table.json('data'); table.timestamps(); }); } From ddaf154f5d619d745faa767187c266060cef1d5c Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 24 May 2017 18:07:24 +0100 Subject: [PATCH 03/63] Implement usage of scenario settings --- app/plugins/rra-osm-p2p-server.js | 18 +++---------- app/routes/projects--create.js | 40 +++++++++++++++++++--------- app/routes/scenarios--create.js | 41 ++++++++++++++++++++++------ app/routes/scenarios--get.js | 44 +++++++++++++++++++++---------- app/routes/scenarios--update.js | 7 ++--- 5 files changed, 99 insertions(+), 51 deletions(-) diff --git a/app/plugins/rra-osm-p2p-server.js b/app/plugins/rra-osm-p2p-server.js index 9b45bc90..434c37c9 100644 --- a/app/plugins/rra-osm-p2p-server.js +++ b/app/plugins/rra-osm-p2p-server.js @@ -48,20 +48,10 @@ const rraOsmRoute = { if (path.match(/changeset\/[0-9]+\/upload/)) { // Update the database with the road generation time. - db.transaction(function (trx) { - return trx('scenarios') - .select('*') - .where('id', scId) - .first() - .then(scenario => { - let data = scenario.data; - data.rn_updated_at = (new Date()); - return trx('scenarios') - .update({ data }) - .where('id', scId); - }) - .then(() => trx.commit()); - }) + db('scenarios_settings') + .update({value: (new Date())}) + .where('scenario_id', scId) + .where('key', 'rn_updated_at') .then(() => handleIt()); } else { handleIt(); diff --git a/app/routes/projects--create.js b/app/routes/projects--create.js index 98a70e26..e6005a35 100644 --- a/app/routes/projects--create.js +++ b/app/routes/projects--create.js @@ -17,32 +17,48 @@ module.exports = [ } }, handler: (request, reply) => { + const now = new Date(); const data = request.payload; const base = { status: 'pending', - created_at: (new Date()), - updated_at: (new Date()) + created_at: now, + updated_at: now }; db('projects') .returning('*') .insert(Object.assign({}, data, base)) - .then(res => { - const projectData = res[0]; - + .then(projectData => { + projectData = projectData[0]; // Create first scenario. This is needed to store the related files. - db('scenarios') + return db('scenarios') + .returning('*') .insert({ name: 'Main scenario', project_id: projectData.id, status: 'pending', master: true, - created_at: (new Date()), - updated_at: (new Date()), - data: { - res_gen_at: 0, - rn_updated_at: 0 - } + created_at: now, + updated_at: now + }) + .then(scenarioData => { + scenarioData = scenarioData[0]; + return db.batchInsert('scenarios_settings', [ + { + scenario_id: scenarioData.id, + key: 'res_gen_at', + value: 0, + created_at: now, + updated_at: now + }, + { + scenario_id: scenarioData.id, + key: 'rn_updated_at', + value: 0, + created_at: now, + updated_at: now + } + ]); }) .then(() => reply(projectData)) .catch(err => reply(Boom.badImplementation(err))); diff --git a/app/routes/scenarios--create.js b/app/routes/scenarios--create.js index f3885a5e..b1862771 100644 --- a/app/routes/scenarios--create.js +++ b/app/routes/scenarios--create.js @@ -12,6 +12,7 @@ import { parseFormData } from '../utils/utils'; // import { closeDatabase } from '../services/rra-osm-p2p'; function handler (params, payload, reply) { + const now = new Date(); const name = payload.name; const description = payload.description; const source = payload.roadNetworkSource; @@ -46,23 +47,45 @@ function handler (params, payload, reply) { status: 'pending', master: false, project_id: params.projId, - created_at: (new Date()), - updated_at: (new Date()), - data: { - res_gen_at: 0, - rn_updated_at: 0 - } + created_at: now, + updated_at: now }; return db('scenarios') .returning('*') .insert(info) - .then(res => res[0]) .catch(err => { if (err.constraint === 'scenarios_project_id_name_unique') { throw new DataConflictError(`Scenario name already in use for this project: ${name}`); } throw err; + }) + .then(scenarios => { + let scenario = scenarios[0]; + return db.batchInsert('scenarios_settings', [ + { + scenario_id: scenario.id, + key: 'res_gen_at', + value: 0, + created_at: now, + updated_at: now + }, + { + scenario_id: scenario.id, + key: 'rn_updated_at', + value: 0, + created_at: now, + updated_at: now + } + ]) + .then(() => scenario); + }) + .then(scenario => { + scenario.data = { + res_gen_at: 0, + rn_updated_at: 0 + }; + return scenario; }); }) // Start operation and return data to continue. @@ -94,7 +117,7 @@ function handler (params, payload, reply) { }); } -module.exports = [ +export default [ { path: '/projects/{projId}/scenarios', method: 'POST', @@ -166,6 +189,8 @@ module.exports = [ } }, handler: (request, reply) => { + // Recursively search for an available name by appending a (n) suffix + // to the input value. const findName = (name) => { let fn = (no) => { let n = `${name} (${no})`; diff --git a/app/routes/scenarios--get.js b/app/routes/scenarios--get.js index 7e154ffe..6ace819a 100644 --- a/app/routes/scenarios--get.js +++ b/app/routes/scenarios--get.js @@ -15,7 +15,7 @@ const routeSingleScenarioConfig = { } }; -module.exports = [ +export default [ { path: '/projects/{projId}/scenarios', method: 'GET', @@ -32,13 +32,10 @@ module.exports = [ Promise.all([ db('scenarios').where('project_id', request.params.projId).count('id'), - db.select('*').from('scenarios').where('project_id', request.params.projId).orderBy('created_at').offset(offset).limit(limit) + db.select('id').from('scenarios').where('project_id', request.params.projId).orderBy('created_at').offset(offset).limit(limit) ]).then(res => { const [count, scenarios] = res; - return Promise.map(scenarios, s => attachScenarioFiles(s) - .then(s => attachOperation('generate-analysis', 'gen_analysis', s)) - .then(s => attachOperation('scenario-create', 'scen_create', s)) - ) + return Promise.map(scenarios, s => loadScenario(request.params.projId, s.id)) .then(scenarios => { request.count = parseInt(count[0].count); reply(scenarios); @@ -78,19 +75,25 @@ module.exports = [ } ]; -function singleScenarioHandler (request, reply) { - db.select('*') +export function loadScenario (projId, scId) { + return db.select('*') .from('scenarios') - .where('id', request.params.scId) - .where('project_id', request.params.projId) + .where('id', scId) + .where('project_id', projId) .orderBy('created_at') - .then(scenarios => { - if (!scenarios.length) throw new ScenarioNotFoundError(); - return scenarios[0]; + .first() + .then(scenario => { + if (!scenario) throw new ScenarioNotFoundError(); + return scenario; }) + .then(scenario => attachScenarioSettings(scenario)) .then(scenario => attachScenarioFiles(scenario)) .then(scenario => attachOperation('generate-analysis', 'gen_analysis', scenario)) - .then(scenario => attachOperation('scenario-create', 'scen_create', scenario)) + .then(scenario => attachOperation('scenario-create', 'scen_create', scenario)); +} + +function singleScenarioHandler (request, reply) { + loadScenario(request.params.projId, request.params.scId) .then(scenario => reply(scenario)) .catch(ScenarioNotFoundError, e => reply(Boom.notFound(e.message))) .catch(err => { @@ -99,6 +102,19 @@ function singleScenarioHandler (request, reply) { }); } +function attachScenarioSettings (scenario) { + return db.select('key', 'value') + .from('scenarios_settings') + .where('scenario_id', scenario.id) + .then(data => { + scenario.data = {}; + data.forEach(o => { + scenario.data[o.key] = o.value; + }); + return scenario; + }); +} + function attachScenarioFiles (scenario) { return db.select('id', 'name', 'type', 'path', 'created_at') .from('scenarios_files') diff --git a/app/routes/scenarios--update.js b/app/routes/scenarios--update.js index 653dffc4..aec6e71c 100644 --- a/app/routes/scenarios--update.js +++ b/app/routes/scenarios--update.js @@ -4,7 +4,7 @@ import Boom from 'boom'; import Promise from 'bluebird'; import db from '../db/'; - +import { loadScenario } from './scenarios--get'; import { ScenarioNotFoundError, DataConflictError } from '../utils/errors'; module.exports = [ @@ -52,7 +52,7 @@ module.exports = [ executor .then(update => db('scenarios') - .returning('*') + .returning('id') .update(update) .where('id', request.params.scId) .where('project_id', request.params.projId) @@ -61,7 +61,8 @@ module.exports = [ if (!scenarios.length) throw new ScenarioNotFoundError(); return scenarios[0]; }) - .then((scenario) => db('projects').update({updated_at: (new Date())}).where('id', request.params.projId).then(() => scenario)) + .then(scenarioId => loadScenario(request.params.projId, scenarioId)) + .then(scenario => db('projects').update({updated_at: (new Date())}).where('id', request.params.projId).then(() => scenario)) .then(scenario => reply(scenario)) .catch(err => { if (err.constraint === 'scenarios_project_id_name_unique') { From f77b9f147a3ed4a6f48e49cb0de153255aa90b98 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 24 May 2017 18:07:51 +0100 Subject: [PATCH 04/63] Improve tests --- test/test-projects.js | 11 +++++++++++ test/test-scenarios.js | 36 ++++++++++++++++++++++++++++-------- 2 files changed, 39 insertions(+), 8 deletions(-) diff --git a/test/test-projects.js b/test/test-projects.js index aea6b035..419bce50 100644 --- a/test/test-projects.js +++ b/test/test-projects.js @@ -334,6 +334,17 @@ describe('Projects', function () { .where('project_id', result.id) .where('master', true) .first() + .then(scenario => db.select('key', 'value') + .from('scenarios_settings') + .where('scenario_id', scenario.id) + .then(data => { + scenario.data = {}; + data.forEach(o => { + scenario.data[o.key] = o.value; + }); + return scenario; + }) + ) .then(scenario => { assert.equal(scenario.name, 'Main scenario'); assert.equal(scenario.data.res_gen_at, 0); diff --git a/test/test-scenarios.js b/test/test-scenarios.js index aa266729..a729415f 100644 --- a/test/test-scenarios.js +++ b/test/test-scenarios.js @@ -38,7 +38,14 @@ describe('Scenarios', function () { assert.equal(res.statusCode, 200, 'Status code is 200'); var result = res.result; assert.equal(result.meta.found, 2); - assert.equal(result.results[0].name, 'Main scenario 1200'); + let scenario = result.results[0]; + assert.equal(scenario.name, 'Main scenario 1200'); + assert.equal(scenario.description, 'Scenario 1200 created when the project 1200 was created'); + assert.equal(scenario.status, 'active'); + assert.equal(scenario.master, true); + assert.deepEqual(scenario.data, { res_gen_at: '0', rn_updated_at: '0' }); + assert.equal(scenario.gen_analysis, null); + assert.equal(scenario.scen_create, null); }); }); @@ -72,8 +79,15 @@ describe('Scenarios', function () { url: '/projects/1000/scenarios/1000' }).then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); - assert.equal(res.result.id, 1000); - assert.equal(res.result.name, 'Main scenario'); + let scenario = res.result; + assert.equal(scenario.id, 1000); + assert.equal(scenario.name, 'Main scenario'); + assert.equal(scenario.description, 'Ghost scenario created when the project was created'); + assert.equal(scenario.status, 'pending'); + assert.equal(scenario.master, true); + assert.deepEqual(scenario.data, { res_gen_at: '0', rn_updated_at: '0' }); + assert.equal(scenario.gen_analysis, null); + assert.equal(scenario.scen_create, null); }); }); }); @@ -280,11 +294,17 @@ describe('Scenarios', function () { } }).then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); - var result = res.result; - assert.equal(result.name, 'updated name'); - assert.equal(result.description, 'updated description'); - assert.equal((new Date(result.created_at)).toISOString(), '2017-02-01T12:00:01.000Z'); - assert.notEqual(result.created_at, result.updated_at); + var scenario = res.result; + assert.equal(scenario.name, 'updated name'); + assert.equal(scenario.description, 'updated description'); + assert.equal(scenario.status, 'pending'); + assert.equal(scenario.master, true); + assert.equal(scenario.files.length, 0); + assert.deepEqual(scenario.data, { res_gen_at: '0', rn_updated_at: '0' }); + assert.equal(scenario.gen_analysis, null); + assert.equal(scenario.scen_create, null); + assert.equal((new Date(scenario.created_at)).toISOString(), '2017-02-01T12:00:01.000Z'); + assert.notEqual(scenario.created_at, scenario.updated_at); }); }); From 335f9eff32593daa5953654d6dccad62472f50ca Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 25 May 2017 18:04:52 +0100 Subject: [PATCH 05/63] Extract admin areas to dedicated table Add fixture data --- app/db/structure.js | 57 +++++++++++++++++--------- test/utils/data.js | 97 ++++++++++++++++++++++++++++----------------- 2 files changed, 99 insertions(+), 55 deletions(-) diff --git a/app/db/structure.js b/app/db/structure.js index 40292835..33558783 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -9,16 +9,21 @@ export function dropProjects () { return db.schema.dropTableIfExists('projects'); } -export function dropScenarios () { - DEBUG && console.log('Dropping table: scenarios'); - return db.schema.dropTableIfExists('scenarios'); -} - export function dropProjectsFiles () { DEBUG && console.log('Dropping table: projects_files'); return db.schema.dropTableIfExists('projects_files'); } +export function dropProjectsAA () { + DEBUG && console.log('Dropping table: projects_aa'); + return db.schema.dropTableIfExists('projects_aa'); +} + +export function dropScenarios () { + DEBUG && console.log('Dropping table: scenarios'); + return db.schema.dropTableIfExists('scenarios'); +} + export function dropScenariosFiles () { DEBUG && console.log('Dropping table: scenarios_files'); return db.schema.dropTableIfExists('scenarios_files'); @@ -53,37 +58,51 @@ export function createProjectsTable () { }); } -export function createScenariosTable () { - DEBUG && console.log('Creating table: scenarios'); - return db.schema.createTable('scenarios', table => { +export function createProjectsFilesTable () { + DEBUG && console.log('Creating table: projects_files'); + return db.schema.createTable('projects_files', table => { table.increments('id').primary(); table.string('name'); - table.text('description'); - table.string('status'); - table.boolean('master').defaultTo(false); + table.string('type'); + table.string('path'); table.integer('project_id').unsigned(); table.foreign('project_id') .references('projects.id') .onDelete('CASCADE'); - table.json('admin_areas'); table.timestamps(); - - table.unique(['project_id', 'name']); }); } -export function createProjectsFilesTable () { - DEBUG && console.log('Creating table: projects_files'); - return db.schema.createTable('projects_files', table => { +export function createProjectsAATable () { + DEBUG && console.log('Creating table: projects_aa'); + return db.schema.createTable('projects_aa', table => { table.increments('id').primary(); table.string('name'); table.string('type'); - table.string('path'); + table.json('geometry'); + table.integer('project_id').unsigned(); + table.foreign('project_id') + .references('projects.id') + .onDelete('CASCADE'); + }); +} + +export function createScenariosTable () { + DEBUG && console.log('Creating table: scenarios'); + return db.schema.createTable('scenarios', table => { + table.increments('id').primary(); + table.string('name'); + table.text('description'); + table.string('status'); + table.boolean('master').defaultTo(false); table.integer('project_id').unsigned(); table.foreign('project_id') .references('projects.id') .onDelete('CASCADE'); + table.json('admin_areas'); table.timestamps(); + + table.unique(['project_id', 'name']); }); } @@ -155,12 +174,14 @@ export function createOperationsLogsTable () { export function setupStructure () { return dropScenariosFiles() .then(() => dropProjectsFiles()) + .then(() => dropProjectsAA()) .then(() => dropOperationsLogs()) .then(() => dropOperations()) .then(() => dropScenariosSettings()) .then(() => dropScenarios()) .then(() => dropProjects()) .then(() => createProjectsTable()) + .then(() => createProjectsAATable()) .then(() => createScenariosTable()) .then(() => createScenariosSettingsTable()) .then(() => createOperationsTable()) diff --git a/test/utils/data.js b/test/utils/data.js index 802805a6..ff6e4c6a 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -21,39 +21,31 @@ const FILE_POI = path.join(__dirname, 'data-sergipe/poi-townhalls.geojson'); const ADMIN_AREAS_BBOX = bbox(readJSONSync(FILE_ADMIN)); -// Admin areas from admin-boundaries.geojson -export const ADMIN_AREAS = [ - {'name': 'Distrito de Abadia', 'selected': false}, - {'name': 'Distrito de Itanhi', 'selected': false}, - {'name': 'Distrito de Conceição de Campinas', 'selected': false}, - {'name': 'Distrito de Sambaíba', 'selected': false}, - {'name': 'Distrito de Buril', 'selected': false}, - {'name': 'Distrito de Itamira', 'selected': false}, - {'name': 'Estância', 'selected': false}, - {'name': 'Itaporanga d\'Ajuda', 'selected': false}, - {'name': 'Salgado', 'selected': true}, - {'name': 'Arauá', 'selected': false}, - {'name': 'Boquim', 'selected': false}, - {'name': 'Cristinápolis', 'selected': false}, - {'name': 'Indiaroba', 'selected': false}, - {'name': 'Itabaianinha', 'selected': false}, - {'name': 'Pedrinhas', 'selected': false}, - {'name': 'Santa Luzia do Itanhy', 'selected': false}, - {'name': 'Tomar do Geru', 'selected': false}, - {'name': 'Umbaúba', 'selected': false}, - {'name': 'Pedra Mole', 'selected': false}, - {'name': 'Campo do Brito', 'selected': false}, - {'name': 'Itabaiana', 'selected': true}, - {'name': 'Lagarto', 'selected': true}, - {'name': 'Macambira', 'selected': false}, - {'name': 'Poço Verde', 'selected': true}, - {'name': 'Simão Dias', 'selected': false}, - {'name': 'São Domingos', 'selected': false}, - {'name': 'Palmares', 'selected': false}, - {'name': 'Riachão do Dantas', 'selected': false}, - {'name': 'Samambaia', 'selected': false}, - {'name': 'Tobias Barreto', 'selected': false} -]; +// Parse admin areas. +let adminAreas = readJSONSync(FILE_ADMIN); +adminAreas = _(adminAreas.features) + .filter(o => !!o.properties.name && o.geometry.type !== 'Point') + .sortBy(o => _.kebabCase(o.properties.name)) + .map(o => { + return { + name: o.properties.name, + type: o.properties.type || 'Admin Area', + geometry: JSON.stringify(o.geometry.coordinates) + }; + }) + .value(); + +export function getAdminAreasForProject (projId) { + return _.cloneDeep(adminAreas).map((o, i) => { + o.id = parseInt(`${projId}0${i + 1}`); + o.project_id = projId; + return o; + }); +} + +export function getSelectedAdminAreas (projId) { + return [13, 16, 21, 23].map(o => parseInt(`${projId}0${o}`)); +} // Project in pending state with one scenario. export function project1000 () { @@ -383,6 +375,7 @@ export function project1100 () { 'updated_at': '2017-02-01T12:00:06.000Z' } ])) + .then(() => projectAA(getAdminAreasForProject(1100))) .then(() => putObjectFromFile(bucket, 'project-1100/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-1100/villages_000000', FILE_VILLAGES)) .then(() => putObjectFromFile(bucket, 'project-1100/admin-bounds_000000', FILE_ADMIN)) @@ -393,7 +386,6 @@ export function project1100 () { 'status': 'active', 'project_id': 1100, 'master': true, - 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' })) @@ -411,6 +403,13 @@ export function project1100 () { 'value': 0, 'created_at': '2017-02-01T12:00:01.000Z', 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1100, + 'key': 'admin_areas', + 'value': JSON.stringify(getSelectedAdminAreas(1100)), + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' } ])) .then(() => scenarioFile([ @@ -479,6 +478,7 @@ export function project1200 () { 'updated_at': '2017-02-01T12:00:07.000Z' } ])) + .then(() => projectAA(getAdminAreasForProject(1200))) .then(() => putObjectFromFile(bucket, 'project-1200/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-1200/villages_000000', FILE_VILLAGES)) .then(() => putObjectFromFile(bucket, 'project-1200/admin-bounds_000000', FILE_ADMIN)) @@ -490,7 +490,6 @@ export function project1200 () { 'status': 'active', 'project_id': 1200, 'master': true, - 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' }, @@ -501,7 +500,6 @@ export function project1200 () { 'status': 'active', 'project_id': 1200, 'master': false, - 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' } @@ -521,6 +519,13 @@ export function project1200 () { 'created_at': '2017-02-01T12:00:01.000Z', 'updated_at': '2017-02-01T12:00:01.000Z' }, + { + 'scenario_id': 1200, + 'key': 'admin_areas', + 'value': JSON.stringify(getSelectedAdminAreas(1200)), + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' + }, { 'scenario_id': 1201, 'key': 'res_gen_at', @@ -534,6 +539,13 @@ export function project1200 () { 'value': 0, 'created_at': '2017-02-01T12:00:01.000Z', 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 1201, + 'key': 'admin_areas', + 'value': JSON.stringify(getSelectedAdminAreas(1200)), + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' } ])) .then(() => scenarioFile([ @@ -625,6 +637,7 @@ export function project2000 () { 'updated_at': '2017-02-01T12:00:06.000Z' } ])) + .then(() => projectAA(getAdminAreasForProject(2000))) .then(() => putObjectFromFile(bucket, 'project-2000/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-2000/villages_000000', FILE_VILLAGES)) .then(() => putObjectFromFile(bucket, 'project-2000/admin-bounds_000000', FILE_ADMIN)) @@ -635,7 +648,6 @@ export function project2000 () { 'status': 'active', 'project_id': 2000, 'master': true, - 'admin_areas': JSON.stringify(ADMIN_AREAS), 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' })) @@ -653,6 +665,13 @@ export function project2000 () { 'value': 0, 'created_at': '2017-02-01T12:00:01.000Z', 'updated_at': '2017-02-01T12:00:01.000Z' + }, + { + 'scenario_id': 2000, + 'key': 'admin_areas', + 'value': JSON.stringify(getSelectedAdminAreas(2000)), + 'created_at': '2017-02-01T12:00:01.000Z', + 'updated_at': '2017-02-01T12:00:01.000Z' } ])) .then(() => scenarioFile([ @@ -722,6 +741,10 @@ function scenarioSettings (data) { return db.batchInsert('scenarios_settings', _.isArray(data) ? data : [data]); } +function projectAA (data) { + return db.batchInsert('projects_aa', _.isArray(data) ? data : [data]); +} + // // Functions for project creation. // From eca497683a442dc1650e9bb421114e630248dfea Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 25 May 2017 18:06:51 +0100 Subject: [PATCH 06/63] Update routes to handle database changes - Parse admin areas form file and store them in the table - Store selected admin areas in the scenario settings table - Update scenario model to include the id of the admin area - Update route to select admin areas --- app/routes/scenarios--gen-results.js | 20 +++++++----- app/routes/scenarios--get.js | 35 +++++++++++++++++++++ app/routes/scenarios--update.js | 26 ++++++++------- app/services/project-setup/project-setup.js | 32 +++++++++++++++---- 4 files changed, 87 insertions(+), 26 deletions(-) diff --git a/app/routes/scenarios--gen-results.js b/app/routes/scenarios--gen-results.js index 51c9f024..f57ff8d5 100644 --- a/app/routes/scenarios--gen-results.js +++ b/app/routes/scenarios--gen-results.js @@ -49,7 +49,6 @@ module.exports = [ return project; }) // Valid scenario ? - // Admin areas selected ? .then(() => db.select('*') .from('scenarios') .where('id', scId) @@ -58,12 +57,17 @@ module.exports = [ if (!scenarios.length) throw new ScenarioNotFoundError(); return scenarios[0]; }) - .then(scenario => { - let hasSelected = scenario.admin_areas.some(o => o.selected); - if (!hasSelected) { - throw new DataConflictError('No admin areas selected'); - } - }) + // Admin areas selected ? + .then(scenario => db('scenarios_settings') + .select('value') + .where('key', 'admin_areas') + .where('scenario_id', scenario.id) + .first() + .then(setting => { + if (setting.value === '[]') { + throw new DataConflictError('No admin areas selected'); + } + }) ) // Good to go. // Delete all existing results. (s3 and database) @@ -79,7 +83,7 @@ module.exports = [ .then(() => db('scenarios_files') .whereIn('id', ids) .del()); - }) + })) ) // Create an operation. .then(() => { diff --git a/app/routes/scenarios--get.js b/app/routes/scenarios--get.js index 6ace819a..acd50f0c 100644 --- a/app/routes/scenarios--get.js +++ b/app/routes/scenarios--get.js @@ -86,6 +86,7 @@ export function loadScenario (projId, scId) { if (!scenario) throw new ScenarioNotFoundError(); return scenario; }) + .then(scenario => attachAdminAreas(scenario)) .then(scenario => attachScenarioSettings(scenario)) .then(scenario => attachScenarioFiles(scenario)) .then(scenario => attachOperation('generate-analysis', 'gen_analysis', scenario)) @@ -105,6 +106,7 @@ function singleScenarioHandler (request, reply) { function attachScenarioSettings (scenario) { return db.select('key', 'value') .from('scenarios_settings') + .whereIn('key', ['res_gen_at', 'rn_updated_at']) .where('scenario_id', scenario.id) .then(data => { scenario.data = {}; @@ -125,6 +127,39 @@ function attachScenarioFiles (scenario) { }); } +function attachAdminAreas (scenario) { + return Promise.all([ + // Get admin areas. + db('projects_aa') + .select('id', 'name', 'type') + .where('project_id', scenario.project_id), + // Get selected ids. + db('scenarios_settings') + .select('value') + .where('key', 'admin_areas') + .where('scenario_id', scenario.id) + .first() + ]) + .then(data => { + let [aa, selected] = data; + + if (!aa.length) { + scenario.admin_areas = null; + } else { + selected = selected ? JSON.parse(selected.value) : []; + + // Mark selected as selected. + aa = aa.map(o => { + o.selected = selected.indexOf(o.id) !== -1; + return o; + }); + scenario.admin_areas = aa; + } + + return scenario; + }); +} + function attachOperation (opName, prop, scenario) { return db.select('*') .from('operations') diff --git a/app/routes/scenarios--update.js b/app/routes/scenarios--update.js index aec6e71c..d98f8018 100644 --- a/app/routes/scenarios--update.js +++ b/app/routes/scenarios--update.js @@ -36,22 +36,24 @@ module.exports = [ let executor = Promise.resolve(update); if (typeof data.selectedAdminAreas !== 'undefined') { - executor = db('scenarios') - .select('admin_areas') - .where('id', request.params.scId) + // Get all the admin areas ids to perform some validation. + executor = db('projects_aa') + .select('id') .where('project_id', request.params.projId) - .then(res => { - let adminAreas = res[0].admin_areas.map(o => { - o.selected = data.selectedAdminAreas.indexOf(o.name) !== -1; - return o; - }); - update.admin_areas = JSON.stringify(adminAreas); - return update; - }); + .then(aa => aa.filter(o => data.selectedAdminAreas + .indexOf(o.id) !== -1) + .map(o => o.id) + ) + // Store the selected admin areas in the settings table as an array. + .then(adminAreas => db('scenarios_settings') + .update({ value: JSON.stringify(adminAreas) }) + .where('key', 'admin_areas') + .where('scenario_id', request.params.scId) + ); } executor - .then(update => db('scenarios') + .then(() => db('scenarios') .returning('id') .update(update) .where('id', request.params.scId) diff --git a/app/services/project-setup/project-setup.js b/app/services/project-setup/project-setup.js index b64ea054..5969e8db 100644 --- a/app/services/project-setup/project-setup.js +++ b/app/services/project-setup/project-setup.js @@ -1,6 +1,7 @@ 'use strict'; import path from 'path'; import bbox from '@turf/bbox'; +import _ from 'lodash'; import config from '../../config'; import db from '../../db/'; @@ -56,9 +57,21 @@ export function concludeProjectSetup (e) { let adminAreaTask = () => { return db.transaction(function (trx) { - let adminAreas = adminBoundsFc.features + if (!adminBoundsFc.features) { + throw new Error('Invalid administrative boundaries file'); + } + let adminAreas = _(adminBoundsFc.features) .filter(o => !!o.properties.name && o.geometry.type !== 'Point') - .map(o => ({name: o.properties.name, selected: false})); + .sortBy(o => _.kebabCase(o.properties.name)) + .map(o => { + return { + name: o.properties.name, + type: o.properties.type || 'Admin Area', + geometry: JSON.stringify(o.geometry.coordinates), + project_id: projId + }; + }) + .value(); let adminAreasBbox = bbox(adminBoundsFc); @@ -69,12 +82,19 @@ export function concludeProjectSetup (e) { updated_at: (new Date()) }) .where('id', projId), - trx('scenarios') - .update({ - admin_areas: JSON.stringify(adminAreas), + + trx.batchInsert('projects_aa', adminAreas) + .returning('id'), + + trx('scenarios_settings') + .insert({ + scenario_id: scId, + key: 'admin_areas', + value: '[]', + created_at: (new Date()), updated_at: (new Date()) }) - .where('id', scId) + .where('id', projId) ]); }); }; From 33568730eeaea01004ceb5e86f7ee95f27c4b1fa Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 25 May 2017 18:09:06 +0100 Subject: [PATCH 07/63] Update tests to handle new database structure --- test/test-result-gen.js | 16 +-- test/test-scenarios.js | 175 ++++++++++++++++++---------- test/test-services-project-setup.js | 78 +++++++------ 3 files changed, 166 insertions(+), 103 deletions(-) diff --git a/test/test-result-gen.js b/test/test-result-gen.js index df119e46..5899d685 100644 --- a/test/test-result-gen.js +++ b/test/test-result-gen.js @@ -4,7 +4,7 @@ import { assert } from 'chai'; import Server from '../app/services/server'; import { setupStructure as setupDdStructure } from '../app/db/structure'; import { setupStructure as setupStorageStructure } from '../app/s3/structure'; -import { fixMeUp, ADMIN_AREAS } from './utils/data'; +import { fixMeUp, getSelectedAdminAreas } from './utils/data'; import db from '../app/db'; var options = { @@ -61,11 +61,12 @@ describe('Result generation', function () { it('should return error when no admin areas are selected', function () { // Modify db entry. - return db('scenarios') + return db('scenarios_settings') .update({ - admin_areas: JSON.stringify([{name: 'test-area', selected: false}]) + value: '[]' }) - .where('id', 2000) + .where('scenario_id', 2000) + .where('key', 'admin_areas') .then(() => instance.injectThen({ method: 'POST', url: '/projects/2000/scenarios/2000/generate' @@ -75,9 +76,10 @@ describe('Result generation', function () { assert.equal(res.result.message, 'No admin areas selected'); }) // Set admin areas back to original. - .then(() => db('scenarios') - .update({admin_areas: JSON.stringify(ADMIN_AREAS)}) - .where('id', 2000) + .then(() => db('scenarios_settings') + .update({value: JSON.stringify(getSelectedAdminAreas(2000))}) + .where('scenario_id', 2000) + .where('key', 'admin_areas') ); }); diff --git a/test/test-scenarios.js b/test/test-scenarios.js index a729415f..2f9dead3 100644 --- a/test/test-scenarios.js +++ b/test/test-scenarios.js @@ -85,6 +85,57 @@ describe('Scenarios', function () { assert.equal(scenario.description, 'Ghost scenario created when the project was created'); assert.equal(scenario.status, 'pending'); assert.equal(scenario.master, true); + assert.equal(scenario.admin_areas, null); + assert.deepEqual(scenario.data, { res_gen_at: '0', rn_updated_at: '0' }); + assert.equal(scenario.gen_analysis, null); + assert.equal(scenario.scen_create, null); + }); + }); + + it('should return the correct scenario - active', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/2000/scenarios/2000' + }).then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + let scenario = res.result; + assert.equal(scenario.id, 2000); + assert.equal(scenario.name, 'Main scenario for Sergipe'); + assert.equal(scenario.description, ''); + assert.equal(scenario.status, 'active'); + assert.equal(scenario.master, true); + assert.deepEqual(scenario.admin_areas, [ + { 'id': 200001, 'name': 'Arauá', 'type': 'boundary', 'selected': false }, + { 'id': 200002, 'name': 'Boquim', 'type': 'boundary', 'selected': false }, + { 'id': 200003, 'name': 'Campo do Brito', 'type': 'boundary', 'selected': false }, + { 'id': 200004, 'name': 'Cristinápolis', 'type': 'boundary', 'selected': false }, + { 'id': 200005, 'name': 'Distrito de Abadia', 'type': 'boundary', 'selected': false }, + { 'id': 200006, 'name': 'Distrito de Buril', 'type': 'boundary', 'selected': false }, + { 'id': 200007, 'name': 'Distrito de Conceição de Campinas', 'type': 'boundary', 'selected': false }, + { 'id': 200008, 'name': 'Distrito de Itamira', 'type': 'boundary', 'selected': false }, + { 'id': 200009, 'name': 'Distrito de Itanhi', 'type': 'boundary', 'selected': false }, + { 'id': 2000010, 'name': 'Distrito de Sambaíba', 'type': 'boundary', 'selected': false }, + { 'id': 2000011, 'name': 'Estância', 'type': 'boundary', 'selected': false }, + { 'id': 2000012, 'name': 'Indiaroba', 'type': 'boundary', 'selected': false }, + { 'id': 2000013, 'name': 'Itabaiana', 'type': 'boundary', 'selected': true }, + { 'id': 2000014, 'name': 'Itabaianinha', 'type': 'boundary', 'selected': false }, + { 'id': 2000015, 'name': 'Itaporanga d\'Ajuda', 'type': 'boundary', 'selected': false }, + { 'id': 2000016, 'name': 'Lagarto', 'type': 'boundary', 'selected': true }, + { 'id': 2000017, 'name': 'Macambira', 'type': 'boundary', 'selected': false }, + { 'id': 2000018, 'name': 'Palmares', 'type': 'boundary', 'selected': false }, + { 'id': 2000019, 'name': 'Pedra Mole', 'type': 'boundary', 'selected': false }, + { 'id': 2000020, 'name': 'Pedrinhas', 'type': 'boundary', 'selected': false }, + { 'id': 2000021, 'name': 'Poço Verde', 'type': 'boundary', 'selected': true }, + { 'id': 2000022, 'name': 'Riachão do Dantas', 'type': 'boundary', 'selected': false }, + { 'id': 2000023, 'name': 'Salgado', 'type': 'boundary', 'selected': true }, + { 'id': 2000024, 'name': 'Samambaia', 'type': 'boundary', 'selected': false }, + { 'id': 2000025, 'name': 'Santa Luzia do Itanhy', 'type': 'boundary', 'selected': false }, + { 'id': 2000026, 'name': 'São Domingos', 'type': 'boundary', 'selected': false }, + { 'id': 2000027, 'name': 'Simão Dias', 'type': 'boundary', 'selected': false }, + { 'id': 2000028, 'name': 'Tobias Barreto', 'type': 'boundary', 'selected': false }, + { 'id': 2000029, 'name': 'Tomar do Geru', 'type': 'boundary', 'selected': false }, + { 'id': 2000030, 'name': 'Umbaúba', 'type': 'boundary', 'selected': false } + ]); assert.deepEqual(scenario.data, { res_gen_at: '0', rn_updated_at: '0' }); assert.equal(scenario.gen_analysis, null); assert.equal(scenario.scen_create, null); @@ -313,44 +364,42 @@ describe('Scenarios', function () { method: 'PATCH', url: '/projects/2000/scenarios/2000', payload: { - selectedAdminAreas: [ - 'Salgado', 'Itabaiana', 'Lagarto', 'Poço Verde' - ] + selectedAdminAreas: [200001, 200002, 200003, 200004] } }).then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); var result = res.result; let adminAreas = [ - {'name': 'Distrito de Abadia', 'selected': false}, - {'name': 'Distrito de Itanhi', 'selected': false}, - {'name': 'Distrito de Conceição de Campinas', 'selected': false}, - {'name': 'Distrito de Sambaíba', 'selected': false}, - {'name': 'Distrito de Buril', 'selected': false}, - {'name': 'Distrito de Itamira', 'selected': false}, - {'name': 'Estância', 'selected': false}, - {'name': 'Itaporanga d\'Ajuda', 'selected': false}, - {'name': 'Salgado', 'selected': true}, - {'name': 'Arauá', 'selected': false}, - {'name': 'Boquim', 'selected': false}, - {'name': 'Cristinápolis', 'selected': false}, - {'name': 'Indiaroba', 'selected': false}, - {'name': 'Itabaianinha', 'selected': false}, - {'name': 'Pedrinhas', 'selected': false}, - {'name': 'Santa Luzia do Itanhy', 'selected': false}, - {'name': 'Tomar do Geru', 'selected': false}, - {'name': 'Umbaúba', 'selected': false}, - {'name': 'Pedra Mole', 'selected': false}, - {'name': 'Campo do Brito', 'selected': false}, - {'name': 'Itabaiana', 'selected': true}, - {'name': 'Lagarto', 'selected': true}, - {'name': 'Macambira', 'selected': false}, - {'name': 'Poço Verde', 'selected': true}, - {'name': 'Simão Dias', 'selected': false}, - {'name': 'São Domingos', 'selected': false}, - {'name': 'Palmares', 'selected': false}, - {'name': 'Riachão do Dantas', 'selected': false}, - {'name': 'Samambaia', 'selected': false}, - {'name': 'Tobias Barreto', 'selected': false} + { 'id': 200001, 'name': 'Arauá', 'type': 'boundary', 'selected': true }, + { 'id': 200002, 'name': 'Boquim', 'type': 'boundary', 'selected': true }, + { 'id': 200003, 'name': 'Campo do Brito', 'type': 'boundary', 'selected': true }, + { 'id': 200004, 'name': 'Cristinápolis', 'type': 'boundary', 'selected': true }, + { 'id': 200005, 'name': 'Distrito de Abadia', 'type': 'boundary', 'selected': false }, + { 'id': 200006, 'name': 'Distrito de Buril', 'type': 'boundary', 'selected': false }, + { 'id': 200007, 'name': 'Distrito de Conceição de Campinas', 'type': 'boundary', 'selected': false }, + { 'id': 200008, 'name': 'Distrito de Itamira', 'type': 'boundary', 'selected': false }, + { 'id': 200009, 'name': 'Distrito de Itanhi', 'type': 'boundary', 'selected': false }, + { 'id': 2000010, 'name': 'Distrito de Sambaíba', 'type': 'boundary', 'selected': false }, + { 'id': 2000011, 'name': 'Estância', 'type': 'boundary', 'selected': false }, + { 'id': 2000012, 'name': 'Indiaroba', 'type': 'boundary', 'selected': false }, + { 'id': 2000013, 'name': 'Itabaiana', 'type': 'boundary', 'selected': false }, + { 'id': 2000014, 'name': 'Itabaianinha', 'type': 'boundary', 'selected': false }, + { 'id': 2000015, 'name': 'Itaporanga d\'Ajuda', 'type': 'boundary', 'selected': false }, + { 'id': 2000016, 'name': 'Lagarto', 'type': 'boundary', 'selected': false }, + { 'id': 2000017, 'name': 'Macambira', 'type': 'boundary', 'selected': false }, + { 'id': 2000018, 'name': 'Palmares', 'type': 'boundary', 'selected': false }, + { 'id': 2000019, 'name': 'Pedra Mole', 'type': 'boundary', 'selected': false }, + { 'id': 2000020, 'name': 'Pedrinhas', 'type': 'boundary', 'selected': false }, + { 'id': 2000021, 'name': 'Poço Verde', 'type': 'boundary', 'selected': false }, + { 'id': 2000022, 'name': 'Riachão do Dantas', 'type': 'boundary', 'selected': false }, + { 'id': 2000023, 'name': 'Salgado', 'type': 'boundary', 'selected': false }, + { 'id': 2000024, 'name': 'Samambaia', 'type': 'boundary', 'selected': false }, + { 'id': 2000025, 'name': 'Santa Luzia do Itanhy', 'type': 'boundary', 'selected': false }, + { 'id': 2000026, 'name': 'São Domingos', 'type': 'boundary', 'selected': false }, + { 'id': 2000027, 'name': 'Simão Dias', 'type': 'boundary', 'selected': false }, + { 'id': 2000028, 'name': 'Tobias Barreto', 'type': 'boundary', 'selected': false }, + { 'id': 2000029, 'name': 'Tomar do Geru', 'type': 'boundary', 'selected': false }, + { 'id': 2000030, 'name': 'Umbaúba', 'type': 'boundary', 'selected': false } ]; assert.deepEqual(result.admin_areas, adminAreas); }); @@ -367,36 +416,36 @@ describe('Scenarios', function () { assert.equal(res.statusCode, 200, 'Status code is 200'); var result = res.result; let adminAreas = [ - {'name': 'Distrito de Abadia', 'selected': false}, - {'name': 'Distrito de Itanhi', 'selected': false}, - {'name': 'Distrito de Conceição de Campinas', 'selected': false}, - {'name': 'Distrito de Sambaíba', 'selected': false}, - {'name': 'Distrito de Buril', 'selected': false}, - {'name': 'Distrito de Itamira', 'selected': false}, - {'name': 'Estância', 'selected': false}, - {'name': 'Itaporanga d\'Ajuda', 'selected': false}, - {'name': 'Salgado', 'selected': false}, - {'name': 'Arauá', 'selected': false}, - {'name': 'Boquim', 'selected': false}, - {'name': 'Cristinápolis', 'selected': false}, - {'name': 'Indiaroba', 'selected': false}, - {'name': 'Itabaianinha', 'selected': false}, - {'name': 'Pedrinhas', 'selected': false}, - {'name': 'Santa Luzia do Itanhy', 'selected': false}, - {'name': 'Tomar do Geru', 'selected': false}, - {'name': 'Umbaúba', 'selected': false}, - {'name': 'Pedra Mole', 'selected': false}, - {'name': 'Campo do Brito', 'selected': false}, - {'name': 'Itabaiana', 'selected': false}, - {'name': 'Lagarto', 'selected': false}, - {'name': 'Macambira', 'selected': false}, - {'name': 'Poço Verde', 'selected': false}, - {'name': 'Simão Dias', 'selected': false}, - {'name': 'São Domingos', 'selected': false}, - {'name': 'Palmares', 'selected': false}, - {'name': 'Riachão do Dantas', 'selected': false}, - {'name': 'Samambaia', 'selected': false}, - {'name': 'Tobias Barreto', 'selected': false} + { 'id': 200001, 'name': 'Arauá', 'type': 'boundary', 'selected': false }, + { 'id': 200002, 'name': 'Boquim', 'type': 'boundary', 'selected': false }, + { 'id': 200003, 'name': 'Campo do Brito', 'type': 'boundary', 'selected': false }, + { 'id': 200004, 'name': 'Cristinápolis', 'type': 'boundary', 'selected': false }, + { 'id': 200005, 'name': 'Distrito de Abadia', 'type': 'boundary', 'selected': false }, + { 'id': 200006, 'name': 'Distrito de Buril', 'type': 'boundary', 'selected': false }, + { 'id': 200007, 'name': 'Distrito de Conceição de Campinas', 'type': 'boundary', 'selected': false }, + { 'id': 200008, 'name': 'Distrito de Itamira', 'type': 'boundary', 'selected': false }, + { 'id': 200009, 'name': 'Distrito de Itanhi', 'type': 'boundary', 'selected': false }, + { 'id': 2000010, 'name': 'Distrito de Sambaíba', 'type': 'boundary', 'selected': false }, + { 'id': 2000011, 'name': 'Estância', 'type': 'boundary', 'selected': false }, + { 'id': 2000012, 'name': 'Indiaroba', 'type': 'boundary', 'selected': false }, + { 'id': 2000013, 'name': 'Itabaiana', 'type': 'boundary', 'selected': false }, + { 'id': 2000014, 'name': 'Itabaianinha', 'type': 'boundary', 'selected': false }, + { 'id': 2000015, 'name': 'Itaporanga d\'Ajuda', 'type': 'boundary', 'selected': false }, + { 'id': 2000016, 'name': 'Lagarto', 'type': 'boundary', 'selected': false }, + { 'id': 2000017, 'name': 'Macambira', 'type': 'boundary', 'selected': false }, + { 'id': 2000018, 'name': 'Palmares', 'type': 'boundary', 'selected': false }, + { 'id': 2000019, 'name': 'Pedra Mole', 'type': 'boundary', 'selected': false }, + { 'id': 2000020, 'name': 'Pedrinhas', 'type': 'boundary', 'selected': false }, + { 'id': 2000021, 'name': 'Poço Verde', 'type': 'boundary', 'selected': false }, + { 'id': 2000022, 'name': 'Riachão do Dantas', 'type': 'boundary', 'selected': false }, + { 'id': 2000023, 'name': 'Salgado', 'type': 'boundary', 'selected': false }, + { 'id': 2000024, 'name': 'Samambaia', 'type': 'boundary', 'selected': false }, + { 'id': 2000025, 'name': 'Santa Luzia do Itanhy', 'type': 'boundary', 'selected': false }, + { 'id': 2000026, 'name': 'São Domingos', 'type': 'boundary', 'selected': false }, + { 'id': 2000027, 'name': 'Simão Dias', 'type': 'boundary', 'selected': false }, + { 'id': 2000028, 'name': 'Tobias Barreto', 'type': 'boundary', 'selected': false }, + { 'id': 2000029, 'name': 'Tomar do Geru', 'type': 'boundary', 'selected': false }, + { 'id': 2000030, 'name': 'Umbaúba', 'type': 'boundary', 'selected': false } ]; assert.deepEqual(result.admin_areas, adminAreas); }); diff --git a/test/test-services-project-setup.js b/test/test-services-project-setup.js index d40a2ba9..727e2e4b 100644 --- a/test/test-services-project-setup.js +++ b/test/test-services-project-setup.js @@ -61,39 +61,51 @@ describe('Finish Project Setup', function () { .first() .then(scenario => { assert.equal(scenario.status, 'active'); + }), + db('scenarios_settings') + .where('scenario_id', 3000) + .where('key', 'admin_areas') + .first() + .then(setting => { + assert.equal(setting.value, '[]'); + }), + db('projects_aa') + .select('name', 'project_id', 'type') + .where('project_id', 3000) + .then(aa => { let adminAreas = [ - {'name': 'Distrito de Abadia', 'selected': false}, - {'name': 'Distrito de Itanhi', 'selected': false}, - {'name': 'Distrito de Conceição de Campinas', 'selected': false}, - {'name': 'Distrito de Sambaíba', 'selected': false}, - {'name': 'Distrito de Buril', 'selected': false}, - {'name': 'Distrito de Itamira', 'selected': false}, - {'name': 'Estância', 'selected': false}, - {'name': 'Itaporanga d\'Ajuda', 'selected': false}, - {'name': 'Salgado', 'selected': false}, - {'name': 'Arauá', 'selected': false}, - {'name': 'Boquim', 'selected': false}, - {'name': 'Cristinápolis', 'selected': false}, - {'name': 'Indiaroba', 'selected': false}, - {'name': 'Itabaianinha', 'selected': false}, - {'name': 'Pedrinhas', 'selected': false}, - {'name': 'Santa Luzia do Itanhy', 'selected': false}, - {'name': 'Tomar do Geru', 'selected': false}, - {'name': 'Umbaúba', 'selected': false}, - {'name': 'Pedra Mole', 'selected': false}, - {'name': 'Campo do Brito', 'selected': false}, - {'name': 'Itabaiana', 'selected': false}, - {'name': 'Lagarto', 'selected': false}, - {'name': 'Macambira', 'selected': false}, - {'name': 'Poço Verde', 'selected': false}, - {'name': 'Simão Dias', 'selected': false}, - {'name': 'São Domingos', 'selected': false}, - {'name': 'Palmares', 'selected': false}, - {'name': 'Riachão do Dantas', 'selected': false}, - {'name': 'Samambaia', 'selected': false}, - {'name': 'Tobias Barreto', 'selected': false} + {'name': 'Arauá', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Boquim', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Campo do Brito', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Cristinápolis', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Distrito de Abadia', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Distrito de Buril', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Distrito de Conceição de Campinas', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Distrito de Itamira', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Distrito de Itanhi', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Distrito de Sambaíba', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Estância', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Indiaroba', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Itabaiana', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Itabaianinha', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Itaporanga d\'Ajuda', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Lagarto', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Macambira', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Palmares', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Pedra Mole', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Pedrinhas', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Poço Verde', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Riachão do Dantas', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Salgado', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Samambaia', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Santa Luzia do Itanhy', 'type': 'boundary', 'project_id': 3000}, + {'name': 'São Domingos', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Simão Dias', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Tobias Barreto', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Tomar do Geru', 'type': 'boundary', 'project_id': 3000}, + {'name': 'Umbaúba', 'type': 'boundary', 'project_id': 3000} ]; - assert.deepEqual(scenario.admin_areas, adminAreas); + assert.deepEqual(aa, adminAreas); }), db('operations') .where('id', op.getId()) @@ -160,10 +172,10 @@ describe('Finish Project Setup', function () { .where('operation_id', op.getId()) .orderBy('id', 'desc') .then(logs => { - assert.equal(err, "Cannot read property 'filter' of undefined"); + assert.equal(err, 'Invalid administrative boundaries file'); assert.lengthOf(logs, 3); assert.equal(logs[0].code, 'error'); - assert.equal(logs[0].data.error, "Cannot read property 'filter' of undefined"); + assert.equal(logs[0].data.error, 'Invalid administrative boundaries file'); }) .then(() => done()) .catch(err => done(err)); From 20bd801cfb0983889911cfe9c903479314cb41fa Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Fri, 26 May 2017 19:49:58 +0100 Subject: [PATCH 08/63] Add subtype column to scenario files table. Ground work for multiple poi types support --- app/db/structure.js | 1 + app/routes/scenarios--files-upload.js | 12 +++++++++++- app/routes/scenarios--get.js | 2 +- test/utils/data.js | 7 +++++++ 4 files changed, 20 insertions(+), 2 deletions(-) diff --git a/app/db/structure.js b/app/db/structure.js index 33558783..5b8e8b6c 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -112,6 +112,7 @@ export function createScenariosFilesTable () { table.increments('id').primary(); table.string('name'); table.string('type'); + table.string('subtype'); table.string('path'); table.integer('project_id').unsigned(); table.foreign('project_id') diff --git a/app/routes/scenarios--files-upload.js b/app/routes/scenarios--files-upload.js index 006511c0..df056dc6 100644 --- a/app/routes/scenarios--files-upload.js +++ b/app/routes/scenarios--files-upload.js @@ -30,6 +30,7 @@ module.exports = [ const scId = parseInt(request.params.scId); let file; let type; + let subtype; let fileName; let filePath; @@ -50,6 +51,9 @@ module.exports = [ throw new DataValidationError('"file" is required'); } + // TODO: Get subtype from request. + subtype = type === 'poi' ? 'pointOfInterest' : ''; + file = result.files.file[0]; fileName = `${type}_${Date.now()}`; filePath = `scenario-${scId}/${fileName}`; @@ -69,7 +73,8 @@ module.exports = [ }) .leftJoin('scenarios_files', function () { this.on('scenarios.id', '=', 'scenarios_files.scenario_id') - .andOn(db.raw('scenarios_files.type = :type', {type})); + .andOn(db.raw('scenarios_files.type = :type', {type})) + .andOn(db.raw('scenarios_files.subtype = :subtype', {subtype})); }) .where('projects.id', projId) .then(res => { @@ -96,6 +101,11 @@ module.exports = [ updated_at: (new Date()) }; + // TODO: Get subtype from request. + if (type === 'poi') { + data.subtype = subtype; + } + return db('scenarios_files') .returning('*') .insert(data) diff --git a/app/routes/scenarios--get.js b/app/routes/scenarios--get.js index acd50f0c..65bcee07 100644 --- a/app/routes/scenarios--get.js +++ b/app/routes/scenarios--get.js @@ -118,7 +118,7 @@ function attachScenarioSettings (scenario) { } function attachScenarioFiles (scenario) { - return db.select('id', 'name', 'type', 'path', 'created_at') + return db.select('id', 'name', 'type', 'subtype', 'path', 'created_at') .from('scenarios_files') .where('scenario_id', scenario.id) .then(files => { diff --git a/test/utils/data.js b/test/utils/data.js index ff6e4c6a..60c64616 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -173,6 +173,7 @@ export function project1001 () { 'id': 1001, 'name': 'poi_000000', 'type': 'poi', + 'subtype': 'pointOfInterest', 'path': 'scenario-1001/poi_000000', 'project_id': 1001, 'scenario_id': 1001, @@ -324,6 +325,7 @@ export function project1004 () { 'id': 1005, 'name': 'poi_000000', 'type': 'poi', + 'subtype': 'pointOfInterest', 'path': 'scenario-1004/poi_000000', 'project_id': 1004, 'scenario_id': 1004, @@ -427,6 +429,7 @@ export function project1100 () { 'id': 1101, 'name': 'poi_000000', 'type': 'poi', + 'subtype': 'pointOfInterest', 'path': 'scenario-1100/poi_000000', 'project_id': 1100, 'scenario_id': 1100, @@ -563,6 +566,7 @@ export function project1200 () { 'id': 1201, 'name': 'poi_000000', 'type': 'poi', + 'subtype': 'pointOfInterest', 'path': 'scenario-1200/poi_000000', 'project_id': 1200, 'scenario_id': 1200, @@ -583,6 +587,7 @@ export function project1200 () { 'id': 1203, 'name': 'poi_000000', 'type': 'poi', + 'subtype': 'pointOfInterest', 'path': 'scenario-1201/poi_000000', 'project_id': 1200, 'scenario_id': 1201, @@ -689,6 +694,7 @@ export function project2000 () { 'id': 2001, 'name': 'poi_000000', 'type': 'poi', + 'subtype': 'pointOfInterest', 'path': 'scenario-2000/poi_000000', 'project_id': 2000, 'scenario_id': 2000, @@ -930,6 +936,7 @@ export function projectPendingWithAllFiles (id) { 'id': id + 1, 'name': 'poi_000000', 'type': 'poi', + 'subtype': 'pointOfInterest', 'path': `scenario-${id}/poi_000000`, 'project_id': id, 'scenario_id': id, From d758a7b376780c33909c422f8573d58cbc81b20f Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Mon, 29 May 2017 15:05:58 +0100 Subject: [PATCH 09/63] Change villages file type to origins --- app/routes/projects--files-download.js | 2 +- app/routes/projects--files-upload.js | 2 +- app/routes/projects--get.js | 2 +- test/test-projects-files.js | 16 ++++---- test/utils/data.js | 54 +++++++++++++------------- 5 files changed, 38 insertions(+), 38 deletions(-) diff --git a/app/routes/projects--files-download.js b/app/routes/projects--files-download.js index 3595a1a4..a52db229 100644 --- a/app/routes/projects--files-download.js +++ b/app/routes/projects--files-download.js @@ -42,7 +42,7 @@ module.exports = [ case 'profile': mime = 'text/x-lua'; break; - case 'villages': + case 'origins': case 'admin-bounds': mime = 'application/json'; break; diff --git a/app/routes/projects--files-upload.js b/app/routes/projects--files-upload.js index 0625fb4c..257775a0 100644 --- a/app/routes/projects--files-upload.js +++ b/app/routes/projects--files-upload.js @@ -39,7 +39,7 @@ module.exports = [ type = result.fields.type[0]; - let allowedTypes = ['profile', 'villages', 'admin-bounds']; + let allowedTypes = ['profile', 'origins', 'admin-bounds']; if (allowedTypes.indexOf(type) === -1) { throw new DataValidationError(`"type" must be one of [${allowedTypes.join(', ')}]`); } diff --git a/app/routes/projects--get.js b/app/routes/projects--get.js index e9588212..84d18e3d 100644 --- a/app/routes/projects--get.js +++ b/app/routes/projects--get.js @@ -53,7 +53,7 @@ function attachProjectFiles (project) { return db.select('id', 'name', 'type', 'path', 'created_at') .from('projects_files') .where('project_id', project.id) - .whereIn('type', ['profile', 'villages', 'admin-bounds']) + .whereIn('type', ['profile', 'origins', 'admin-bounds']) .then(files => { project.files = files || []; return project; diff --git a/test/test-projects-files.js b/test/test-projects-files.js index f373d248..45c5d54b 100644 --- a/test/test-projects-files.js +++ b/test/test-projects-files.js @@ -135,7 +135,7 @@ describe('Project files', function () { headers: form.getHeaders() }).then(res => { assert.equal(res.statusCode, 400, 'Status code is 400'); - assert.match(res.result.message, /"type" must be one of \[profile, villages, admin-bounds\]/); + assert.match(res.result.message, /"type" must be one of \[profile, origins, admin-bounds\]/); }); }); }); @@ -195,7 +195,7 @@ describe('Project files', function () { it('should upload the file', function () { let form = new FormData(); - form.append('type', 'villages'); + form.append('type', 'origins'); form.append('file', fs.createReadStream('./test/utils/data-sergipe/villages.geojson')); return streamToPromise(form).then(payload => { @@ -207,18 +207,18 @@ describe('Project files', function () { }) .then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); - assert.match(res.result.fileName, /^villages_[0-9]+$/); + assert.match(res.result.fileName, /^origins_[0-9]+$/); }) .then(() => { return db.select('*') .from('projects_files') .where('project_id', 1000) - .where('type', 'villages') + .where('type', 'origins') .then(files => { assert.equal(files.length, 1); assert.equal(files[0].project_id, 1000); - assert.match(files[0].name, /^villages_[0-9]+$/); - assert.match(files[0].path, /project-1000\/villages_[0-9]+/); + assert.match(files[0].name, /^origins_[0-9]+$/); + assert.match(files[0].path, /project-1000\/origins_[0-9]+/); }); }) // Ensure that the project "updated_at" gets updated. @@ -310,14 +310,14 @@ describe('Project files', function () { }); }); - it('should download a villages file', function () { + it('should download a origins file', function () { return instance.injectThen({ method: 'GET', url: '/projects/1004/files/1005?download=true' }).then(res => { assert.equal(res.statusCode, 200); assert.match(res.headers['content-type'], /application\/json/); - assert.match(res.headers['content-disposition'], /villages_000000/); + assert.match(res.headers['content-disposition'], /origins_000000/); }); }); diff --git a/test/utils/data.js b/test/utils/data.js index 60c64616..2d1d498c 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -14,7 +14,7 @@ function readJSONSync (file) { } const FILE_PROFILE = path.join(__dirname, 'data-sergipe/profile.lua'); -const FILE_VILLAGES = path.join(__dirname, 'data-sergipe/villages.geojson'); +const FILE_ORIGINS = path.join(__dirname, 'data-sergipe/villages.geojson'); const FILE_ADMIN = path.join(__dirname, 'data-sergipe/admin-boundaries.geojson'); const FILE_ROAD_NETWORK = path.join(__dirname, 'data-sergipe/road-network.osm'); const FILE_POI = path.join(__dirname, 'data-sergipe/poi-townhalls.geojson'); @@ -183,26 +183,26 @@ export function project1001 () { .then(() => putObjectFromFile(bucket, 'scenario-1001/poi_000000', FILE_POI)); } -// Project 1003 in pending state with one scenario and a villages file +// Project 1003 in pending state with one scenario and a origins file export function project1003 () { return project({ 'id': 1003, 'name': 'Project 1003', - 'description': 'Project 1003 in pending state with one scenario and a villages file', + 'description': 'Project 1003 in pending state with one scenario and a origins file', 'status': 'pending', 'created_at': '2017-02-01T12:00:04.000Z', 'updated_at': '2017-02-01T12:00:04.000Z' }) .then(() => projectFile({ 'id': 1003, - 'name': 'villages_000000', - 'type': 'villages', - 'path': 'project-1003/villages_000000', + 'name': 'origins_000000', + 'type': 'origins', + 'path': 'project-1003/origins_000000', 'project_id': 1003, 'created_at': '2017-02-01T12:00:04.000Z', 'updated_at': '2017-02-01T12:00:04.000Z' })) - .then(() => putObjectFromFile(bucket, 'project-1003/villages_000000', FILE_VILLAGES)) + .then(() => putObjectFromFile(bucket, 'project-1003/origins_000000', FILE_ORIGINS)) .then(() => scenario({ 'id': 1003, 'name': 'Main scenario 1003', @@ -264,9 +264,9 @@ export function project1004 () { }, { 'id': 1005, - 'name': 'villages_000000', - 'type': 'villages', - 'path': 'project-1004/villages_000000', + 'name': 'origins_000000', + 'type': 'origins', + 'path': 'project-1004/origins_000000', 'project_id': 1004, 'created_at': '2017-02-01T12:00:05.000Z', 'updated_at': '2017-02-01T12:00:05.000Z' @@ -282,7 +282,7 @@ export function project1004 () { } ])) .then(() => putObjectFromFile(bucket, 'project-1004/profile_000000', FILE_PROFILE)) - .then(() => putObjectFromFile(bucket, 'project-1004/villages_000000', FILE_VILLAGES)) + .then(() => putObjectFromFile(bucket, 'project-1004/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1004/admin-bounds_000000', FILE_ADMIN)) .then(() => scenario({ 'id': 1004, @@ -360,9 +360,9 @@ export function project1100 () { }, { 'id': 1101, - 'name': 'villages_000000', - 'type': 'villages', - 'path': 'project-1100/villages_000000', + 'name': 'origins_000000', + 'type': 'origins', + 'path': 'project-1100/origins_000000', 'project_id': 1100, 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' @@ -379,7 +379,7 @@ export function project1100 () { ])) .then(() => projectAA(getAdminAreasForProject(1100))) .then(() => putObjectFromFile(bucket, 'project-1100/profile_000000', FILE_PROFILE)) - .then(() => putObjectFromFile(bucket, 'project-1100/villages_000000', FILE_VILLAGES)) + .then(() => putObjectFromFile(bucket, 'project-1100/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1100/admin-bounds_000000', FILE_ADMIN)) .then(() => scenario({ 'id': 1100, @@ -464,9 +464,9 @@ export function project1200 () { }, { 'id': 1201, - 'name': 'villages_000000', - 'type': 'villages', - 'path': 'project-1200/villages_000000', + 'name': 'origins_000000', + 'type': 'origins', + 'path': 'project-1200/origins_000000', 'project_id': 1200, 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' @@ -483,7 +483,7 @@ export function project1200 () { ])) .then(() => projectAA(getAdminAreasForProject(1200))) .then(() => putObjectFromFile(bucket, 'project-1200/profile_000000', FILE_PROFILE)) - .then(() => putObjectFromFile(bucket, 'project-1200/villages_000000', FILE_VILLAGES)) + .then(() => putObjectFromFile(bucket, 'project-1200/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1200/admin-bounds_000000', FILE_ADMIN)) .then(() => scenario([ { @@ -625,9 +625,9 @@ export function project2000 () { }, { 'id': 2001, - 'name': 'villages_000000', - 'type': 'villages', - 'path': 'project-2000/villages_000000', + 'name': 'origins_000000', + 'type': 'origins', + 'path': 'project-2000/origins_000000', 'project_id': 2000, 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' @@ -644,7 +644,7 @@ export function project2000 () { ])) .then(() => projectAA(getAdminAreasForProject(2000))) .then(() => putObjectFromFile(bucket, 'project-2000/profile_000000', FILE_PROFILE)) - .then(() => putObjectFromFile(bucket, 'project-2000/villages_000000', FILE_VILLAGES)) + .then(() => putObjectFromFile(bucket, 'project-2000/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-2000/admin-bounds_000000', FILE_ADMIN)) .then(() => scenario({ 'id': 2000, @@ -875,9 +875,9 @@ export function projectPendingWithAllFiles (id) { }, { 'id': id + 1, - 'name': 'villages_000000', - 'type': 'villages', - 'path': `project-${id}/villages_000000`, + 'name': 'origins_000000', + 'type': 'origins', + 'path': `project-${id}/origins_000000`, 'project_id': id, 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' @@ -893,7 +893,7 @@ export function projectPendingWithAllFiles (id) { } ])) .then(() => putObjectFromFile(bucket, `project-${id}/profile_000000`, FILE_PROFILE)) - .then(() => putObjectFromFile(bucket, `project-${id}/villages_000000`, FILE_VILLAGES)) + .then(() => putObjectFromFile(bucket, `project-${id}/origins_000000`, FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, `project-${id}/admin-bounds_000000`, FILE_ADMIN)) .then(() => scenario({ 'id': id, From d18a1fd6d241e355c0a2857c329c39eb609ccac0 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Mon, 29 May 2017 16:00:02 +0100 Subject: [PATCH 10/63] Fix scenario create. Fix bug introduced by new handling of selected admin areas. --- app/routes/scenarios--create.js | 8 ++++++++ .../scenario-create/scenario-create.js | 18 +----------------- test/test-scenarios-create.js | 2 ++ 3 files changed, 11 insertions(+), 17 deletions(-) diff --git a/app/routes/scenarios--create.js b/app/routes/scenarios--create.js index b1862771..356c0e1b 100644 --- a/app/routes/scenarios--create.js +++ b/app/routes/scenarios--create.js @@ -76,6 +76,13 @@ function handler (params, payload, reply) { value: 0, created_at: now, updated_at: now + }, + { + scenario_id: scenario.id, + key: 'admin_areas', + value: '[]', + created_at: now, + updated_at: now } ]) .then(() => scenario); @@ -85,6 +92,7 @@ function handler (params, payload, reply) { res_gen_at: 0, rn_updated_at: 0 }; + scenario.admin_areas = '[]'; return scenario; }); }) diff --git a/app/services/scenario-create/scenario-create.js b/app/services/scenario-create/scenario-create.js index 62e1e426..ea103de7 100644 --- a/app/services/scenario-create/scenario-create.js +++ b/app/services/scenario-create/scenario-create.js @@ -64,23 +64,7 @@ export function scenarioCreate (e) { op.loadById(opId) .then(op => op.log('admin-areas', {message: 'Cloning admin areas'})) .then(() => db.transaction(function (trx) { - // Get the admin areas from the master scenario. - let executor = trx('scenarios') - .select('*') - .where('project_id', projId) - .where('master', true) - .first() - .then(scenario => scenario.admin_areas.map(o => { - o.selected = false; - return o; - })) - .then(adminAreas => trx('scenarios') - .update({ - admin_areas: JSON.stringify(adminAreas), - updated_at: (new Date()) - }) - .where('id', scId) - ); + let executor = Promise.resolve(); if (source === 'clone') { executor = executor diff --git a/test/test-scenarios-create.js b/test/test-scenarios-create.js index 7a1e87bc..d32a22f3 100644 --- a/test/test-scenarios-create.js +++ b/test/test-scenarios-create.js @@ -264,6 +264,7 @@ describe('Scenarios', function () { assert.equal(typeof result.roadNetworkUpload, 'undefined'); assert.equal(result.data.res_gen_at, 0); assert.equal(result.data.rn_updated_at, 0); + assert.equal(result.admin_areas, '[]'); return result; }); @@ -350,6 +351,7 @@ describe('Scenarios', function () { assert.equal(typeof result.roadNetworkUpload, 'undefined'); assert.equal(result.data.res_gen_at, 0); assert.equal(result.data.rn_updated_at, 0); + assert.equal(result.admin_areas, '[]'); return result; }); From 8ed5ba19f9cb900e3039779d309e8a647f4da1d2 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 30 May 2017 17:13:37 +0100 Subject: [PATCH 11/63] Add tables for results and project origins --- app/db/structure.js | 92 ++++++++++++++++++++++++++++++++++++++++++++- test/utils/data.js | 55 +++++++++++++++++++++++++++ 2 files changed, 146 insertions(+), 1 deletion(-) diff --git a/app/db/structure.js b/app/db/structure.js index 5b8e8b6c..da9640c8 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -44,6 +44,26 @@ export function dropOperationsLogs () { return db.schema.dropTableIfExists('operations_logs'); } +export function dropResults () { + DEBUG && console.log('Dropping table: results'); + return db.schema.dropTableIfExists('results'); +} + +export function dropResultsPoi () { + DEBUG && console.log('Dropping table: results_poi'); + return db.schema.dropTableIfExists('results_poi'); +} + +export function dropProjectsOrigins () { + DEBUG && console.log('Dropping table: projects_origins'); + return db.schema.dropTableIfExists('projects_origins'); +} + +export function dropProjectsOriginsIndicators () { + DEBUG && console.log('Dropping table: projects_origins_indicators'); + return db.schema.dropTableIfExists('projects_origins_indicators'); +} + export function createProjectsTable () { DEBUG && console.log('Creating table: projects'); return db.schema.createTable('projects', table => { @@ -69,6 +89,7 @@ export function createProjectsFilesTable () { table.foreign('project_id') .references('projects.id') .onDelete('CASCADE'); + table.json('data'); table.timestamps(); }); } @@ -172,14 +193,79 @@ export function createOperationsLogsTable () { }); } +export function createResultsTable () { + DEBUG && console.log('Creating table: results'); + return db.schema.createTable('results', table => { + table.increments('id').primary(); + table.integer('project_id').unsigned(); + table.foreign('project_id') + .references('projects.id') + .onDelete('CASCADE'); + table.integer('scenario_id').unsigned(); + table.foreign('scenario_id') + .references('scenarios.id') + .onDelete('CASCADE'); + table.integer('origin_id').unsigned(); + table.foreign('origin_id') + .references('projects_origins.id') + .onDelete('CASCADE'); + table.integer('project_aa_id').unsigned(); + table.foreign('project_aa_id') + .references('projects_aa.id') + .onDelete('CASCADE'); + }); +} + +export function createResultsPoiTable () { + DEBUG && console.log('Creating table: results_poi'); + return db.schema.createTable('results_poi', table => { + table.increments('id').primary(); + table.integer('result_id').unsigned(); + table.foreign('result_id') + .references('results.id') + .onDelete('CASCADE'); + table.string('type'); + table.integer('time'); + }); +} + +export function createProjectsOriginsTable () { + DEBUG && console.log('Creating table: projects_origins'); + return db.schema.createTable('projects_origins', table => { + table.increments('id').primary(); + table.integer('project_id').unsigned(); + table.foreign('project_id') + .references('projects.id'); + table.string('name'); + table.json('coordinates'); + }); +} + +export function createProjectsOriginsIndicatorsTable () { + DEBUG && console.log('Creating table: projects_origins_indicators'); + return db.schema.createTable('projects_origins_indicators', table => { + table.increments('id').primary(); + table.integer('origin_id').unsigned(); + table.foreign('origin_id') + .references('projects_origins.id'); + table.string('key'); + table.string('label'); + table.integer('value'); + }); +} + export function setupStructure () { return dropScenariosFiles() .then(() => dropProjectsFiles()) + .then(() => dropResultsPoi()) + .then(() => dropResults()) .then(() => dropProjectsAA()) .then(() => dropOperationsLogs()) .then(() => dropOperations()) .then(() => dropScenariosSettings()) .then(() => dropScenarios()) + .then(() => dropProjectsOriginsIndicators()) + .then(() => dropProjectsOrigins()) .then(() => dropProjects()) .then(() => createProjectsTable()) .then(() => createProjectsAATable()) @@ -188,5 +274,9 @@ export function setupStructure () { .then(() => createOperationsTable()) .then(() => createOperationsLogsTable()) .then(() => createProjectsFilesTable()) - .then(() => createScenariosFilesTable()); + .then(() => createScenariosFilesTable()) + .then(() => createProjectsOriginsTable()) + .then(() => createProjectsOriginsIndicatorsTable()) + .then(() => createResultsTable()) + .then(() => createResultsPoiTable()); } diff --git a/test/utils/data.js b/test/utils/data.js index 2d1d498c..afd8e0c2 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -47,6 +47,47 @@ export function getSelectedAdminAreas (projId) { return [13, 16, 21, 23].map(o => parseInt(`${projId}0${o}`)); } +// Parse origins. +let originsFC = readJSONSync(FILE_ORIGINS); +let neededProps = ['name', 'population']; +let originFeatures = originsFC.features.filter(feat => { + let props = Object.keys(feat.properties); + return neededProps.every(o => props.indexOf(o) !== -1); +}); + +export function getOriginsForProject (projId) { + let originsIndicators = []; + let origins = originFeatures.map((feat, idx) => { + let id = parseInt(`${projId}0${idx + 1}`); + + let indicators = [ + { + key: 'population', + label: 'Total population' + } + ]; + let featureIndicators = indicators.map((ind, idx2) => ({ + id: parseInt(`${id}0${idx2 + 1}`), + origin_id: id, + key: ind.key, + label: ind.label, + value: parseInt(feat.properties[ind.key]) + })); + originsIndicators = originsIndicators.concat(featureIndicators); + + return { + id: id, + project_id: projId, + name: feat.properties.name, + coordinates: JSON.stringify(feat.geometry.coordinates) + }; + }); + + return { originsIndicators, origins }; +} + +// ////////////////////////////////////////////////////////////////////////// // + // Project in pending state with one scenario. export function project1000 () { return project({ @@ -199,6 +240,7 @@ export function project1003 () { 'type': 'origins', 'path': 'project-1003/origins_000000', 'project_id': 1003, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, 'created_at': '2017-02-01T12:00:04.000Z', 'updated_at': '2017-02-01T12:00:04.000Z' })) @@ -268,6 +310,7 @@ export function project1004 () { 'type': 'origins', 'path': 'project-1004/origins_000000', 'project_id': 1004, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, 'created_at': '2017-02-01T12:00:05.000Z', 'updated_at': '2017-02-01T12:00:05.000Z' }, @@ -364,6 +407,7 @@ export function project1100 () { 'type': 'origins', 'path': 'project-1100/origins_000000', 'project_id': 1100, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' }, @@ -378,6 +422,7 @@ export function project1100 () { } ])) .then(() => projectAA(getAdminAreasForProject(1100))) + .then(() => projectOrigins(getOriginsForProject(1100))) .then(() => putObjectFromFile(bucket, 'project-1100/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-1100/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1100/admin-bounds_000000', FILE_ADMIN)) @@ -468,6 +513,7 @@ export function project1200 () { 'type': 'origins', 'path': 'project-1200/origins_000000', 'project_id': 1200, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' }, @@ -482,6 +528,7 @@ export function project1200 () { } ])) .then(() => projectAA(getAdminAreasForProject(1200))) + .then(() => projectOrigins(getOriginsForProject(1200))) .then(() => putObjectFromFile(bucket, 'project-1200/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-1200/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1200/admin-bounds_000000', FILE_ADMIN)) @@ -629,6 +676,7 @@ export function project2000 () { 'type': 'origins', 'path': 'project-2000/origins_000000', 'project_id': 2000, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' }, @@ -643,6 +691,7 @@ export function project2000 () { } ])) .then(() => projectAA(getAdminAreasForProject(2000))) + .then(() => projectOrigins(getOriginsForProject(2000))) .then(() => putObjectFromFile(bucket, 'project-2000/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-2000/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-2000/admin-bounds_000000', FILE_ADMIN)) @@ -751,6 +800,11 @@ function projectAA (data) { return db.batchInsert('projects_aa', _.isArray(data) ? data : [data]); } +function projectOrigins ({ originsIndicators, origins }) { + return db.batchInsert('projects_origins', origins) + .then(() => db.batchInsert('projects_origins_indicators', originsIndicators)); +} + // // Functions for project creation. // @@ -879,6 +933,7 @@ export function projectPendingWithAllFiles (id) { 'type': 'origins', 'path': `project-${id}/origins_000000`, 'project_id': id, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' }, From 0b0bbb1a767d06e8c6740c10a4ba7fad28449497 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 30 May 2017 17:16:09 +0100 Subject: [PATCH 12/63] Include turf center-of-mass --- package.json | 1 + yarn.lock | 114 +++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 115 insertions(+) diff --git a/package.json b/package.json index c1076f72..7e377a8f 100644 --- a/package.json +++ b/package.json @@ -20,6 +20,7 @@ "homepage": "https://github.com/WorldBank-Transport/Rural-Road-Accessibility", "dependencies": { "@turf/bbox": "^4.0.2", + "@turf/center-of-mass": "^4.3.0", "babel-preset-es2015": "^6.18.0", "babel-register": "^6.23.0", "bluebird": "^3.4.7", diff --git a/yarn.lock b/yarn.lock index bb4436ff..5cd31e74 100644 --- a/yarn.lock +++ b/yarn.lock @@ -8,10 +8,50 @@ dependencies: "@turf/meta" "^4.1.0" +"@turf/center-of-mass@^4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@turf/center-of-mass/-/center-of-mass-4.3.0.tgz#4a8c6113f11307e8aabf560888ffca7ef14fdd5c" + dependencies: + "@turf/centroid" "^4.3.0" + "@turf/convex" "^4.3.0" + "@turf/explode" "^4.3.0" + "@turf/helpers" "^4.3.0" + "@turf/meta" "^4.3.0" + +"@turf/centroid@^4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@turf/centroid/-/centroid-4.3.0.tgz#086f6f562b934f3a4ccfb9d453c872357462d739" + dependencies: + "@turf/helpers" "^4.3.0" + "@turf/meta" "^4.3.0" + +"@turf/convex@^4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@turf/convex/-/convex-4.3.0.tgz#e58efceaa665234d0e630051c1bb4f426bbb3484" + dependencies: + "@turf/helpers" "^4.3.0" + "@turf/meta" "^4.3.0" + convex-hull "^1.0.3" + +"@turf/explode@^4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@turf/explode/-/explode-4.3.0.tgz#6f00ab86e8c85eb8b6087649facf492e01da07a7" + dependencies: + "@turf/helpers" "^4.3.0" + "@turf/meta" "^4.3.0" + +"@turf/helpers@^4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@turf/helpers/-/helpers-4.3.0.tgz#7b2f733aa0eb3ea1f07d467ac02ede00cc6cde0d" + "@turf/meta@^4.1.0": version "4.1.0" resolved "https://registry.yarnpkg.com/@turf/meta/-/meta-4.1.0.tgz#7b0715832ff483d28d2669051f1e00c6cefcbf75" +"@turf/meta@^4.3.0": + version "4.3.0" + resolved "https://registry.yarnpkg.com/@turf/meta/-/meta-4.3.0.tgz#eb11dd2c2511524258123767fe0f5c3bd963e8d7" + JSONStream@^1.1.4: version "1.3.1" resolved "https://registry.yarnpkg.com/JSONStream/-/JSONStream-1.3.1.tgz#707f761e01dae9e16f1bcf93703b78c70966579a" @@ -73,6 +113,12 @@ acorn@^5.0.1: version "5.0.3" resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.0.3.tgz#c460df08491463f028ccb82eab3730bf01087b3d" +affine-hull@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/affine-hull/-/affine-hull-1.0.0.tgz#763ff1d38d063ceb7e272f17ee4d7bbcaf905c5d" + dependencies: + robust-orientation "^1.1.3" + after-all@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/after-all/-/after-all-2.0.2.tgz#20300298ed6094b4c85c98e7c8ad4dca628f9f73" @@ -643,6 +689,10 @@ bindings@~1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.2.1.tgz#14ad6113812d2d37d72e67b4cacb4bb726505f11" +bit-twiddle@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/bit-twiddle/-/bit-twiddle-1.0.2.tgz#0c6c1fabe2b23d17173d9a61b7b7093eb9e1769e" + bitfield@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/bitfield/-/bitfield-1.1.2.tgz#a5477f00e33f2a76edc209aaf26bf09394a378cf" @@ -926,6 +976,14 @@ convert-source-map@^1.1.0: version "1.5.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.0.tgz#9acd70851c6d5dfdd93d9282e5edf94a03ff46b5" +convex-hull@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/convex-hull/-/convex-hull-1.0.3.tgz#20a3aa6ce87f4adea2ff7d17971c9fc1c67e1fff" + dependencies: + affine-hull "^1.0.0" + incremental-convex-hull "^1.0.1" + monotone-convex-hull-2d "^1.0.1" + core-js@^1.1.1: version "1.2.7" resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" @@ -1924,6 +1982,13 @@ imurmurhash@^0.1.4: version "0.1.4" resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" +incremental-convex-hull@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/incremental-convex-hull/-/incremental-convex-hull-1.0.1.tgz#51428c14cb9d9a6144bfe69b2851fb377334be1e" + dependencies: + robust-orientation "^1.1.2" + simplicial-complex "^1.0.0" + infinity-agent@^2.0.0: version "2.0.3" resolved "https://registry.yarnpkg.com/infinity-agent/-/infinity-agent-2.0.3.tgz#45e0e2ff7a9eb030b27d62b74b3744b7a7ac4216" @@ -2670,6 +2735,12 @@ moment@2.15.x, moment@2.x.x, moment@^2.10.3: version "2.15.2" resolved "https://registry.yarnpkg.com/moment/-/moment-2.15.2.tgz#1bfdedf6a6e345f322fe956d5df5bd08a8ce84dc" +monotone-convex-hull-2d@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/monotone-convex-hull-2d/-/monotone-convex-hull-2d-1.0.1.tgz#47f5daeadf3c4afd37764baa1aa8787a40eee08c" + dependencies: + robust-orientation "^1.1.3" + ms@0.7.2: version "0.7.2" resolved "https://registry.yarnpkg.com/ms/-/ms-0.7.2.tgz#ae25cf2512b3885a1d95d7f037868d8431124765" @@ -3493,6 +3564,30 @@ rimraf@2, rimraf@^2.2.8, rimraf@^2.5.1, rimraf@^2.6.1: dependencies: glob "^7.0.5" +robust-orientation@^1.1.2, robust-orientation@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/robust-orientation/-/robust-orientation-1.1.3.tgz#daff5b00d3be4e60722f0e9c0156ef967f1c2049" + dependencies: + robust-scale "^1.0.2" + robust-subtract "^1.0.0" + robust-sum "^1.0.0" + two-product "^1.0.2" + +robust-scale@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/robust-scale/-/robust-scale-1.0.2.tgz#775132ed09542d028e58b2cc79c06290bcf78c32" + dependencies: + two-product "^1.0.2" + two-sum "^1.0.0" + +robust-subtract@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/robust-subtract/-/robust-subtract-1.0.0.tgz#e0b164e1ed8ba4e3a5dda45a12038348dbed3e9a" + +robust-sum@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/robust-sum/-/robust-sum-1.0.0.tgz#16646e525292b4d25d82757a286955e0bbfa53d9" + routes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/routes/-/routes-2.1.0.tgz#475571192a48f99b6c065dd926bb75e8ae83e8a2" @@ -3586,6 +3681,13 @@ simple-get@^1.4.2: unzip-response "^1.0.0" xtend "^4.0.0" +simplicial-complex@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/simplicial-complex/-/simplicial-complex-1.0.0.tgz#6c33a4ed69fcd4d91b7bcadd3b30b63683eae241" + dependencies: + bit-twiddle "^1.0.0" + union-find "^1.0.0" + slash@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/slash/-/slash-1.0.0.tgz#c41f2f6c39fc16d1cd17ad4b5d896114ae470d55" @@ -3952,6 +4054,14 @@ tweetnacl@^0.14.3, tweetnacl@~0.14.0: version "0.14.5" resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" +two-product@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/two-product/-/two-product-1.0.2.tgz#67d95d4b257a921e2cb4bd7af9511f9088522eaa" + +two-sum@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/two-sum/-/two-sum-1.0.0.tgz#31d3f32239e4f731eca9df9155e2b297f008ab64" + type-check@~0.3.2: version "0.3.2" resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" @@ -3996,6 +4106,10 @@ undefsafe@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/undefsafe/-/undefsafe-0.0.3.tgz#ecca3a03e56b9af17385baac812ac83b994a962f" +union-find@^1.0.0: + version "1.0.2" + resolved "https://registry.yarnpkg.com/union-find/-/union-find-1.0.2.tgz#292bac415e6ad3a89535d237010db4a536284e58" + unzip-response@^1.0.0: version "1.0.2" resolved "https://registry.yarnpkg.com/unzip-response/-/unzip-response-1.0.2.tgz#b984f0877fc0a89c2c773cc1ef7b5b232b5b06fe" From 0c2c3ab7c37dac719056031be9b0cd328b613656 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 30 May 2017 17:16:52 +0100 Subject: [PATCH 13/63] Store hardcoded origins indicators on file upload --- app/routes/projects--files-upload.js | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/app/routes/projects--files-upload.js b/app/routes/projects--files-upload.js index 257775a0..6059b9b3 100644 --- a/app/routes/projects--files-upload.js +++ b/app/routes/projects--files-upload.js @@ -83,6 +83,22 @@ module.exports = [ updated_at: (new Date()) }; + if (type === 'origins') { + // When uploading an origins file, the user has to specify + // what attributes have population data, and a label for them. + // This will later be used for running analysis on subgroups + // of population. + // TODO: Get the values from the form. + data.data = JSON.stringify({ + indicators: [ + { + key: 'population', + label: 'Total population' + } + ] + }); + } + return db('projects_files') .returning('*') .insert(data) From 7d44353fc0421551bb0a7f1647c010f15458d61a Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 30 May 2017 17:18:17 +0100 Subject: [PATCH 14/63] Remove old results from table before generating new --- app/routes/scenarios--gen-results.js | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/app/routes/scenarios--gen-results.js b/app/routes/scenarios--gen-results.js index f57ff8d5..58e68622 100644 --- a/app/routes/scenarios--gen-results.js +++ b/app/routes/scenarios--gen-results.js @@ -61,7 +61,7 @@ module.exports = [ .then(scenario => db('scenarios_settings') .select('value') .where('key', 'admin_areas') - .where('scenario_id', scenario.id) + .where('scenario_id', scId) .first() .then(setting => { if (setting.value === '[]') { @@ -82,7 +82,13 @@ module.exports = [ return Promise.all(tasks) .then(() => db('scenarios_files') .whereIn('id', ids) - .del()); + .del() + ) + .then(() => db('results') + .where('project_id', projId) + .where('scenario_id', scId) + .del() + ); })) ) // Create an operation. From 47b895696c54b2ef7c19852451f554de800e1885 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 30 May 2017 17:19:16 +0100 Subject: [PATCH 15/63] Process origins on project setup --- app/services/project-setup/project-setup.js | 82 +++++++++++++++++++-- 1 file changed, 76 insertions(+), 6 deletions(-) diff --git a/app/services/project-setup/project-setup.js b/app/services/project-setup/project-setup.js index 5969e8db..06b0712c 100644 --- a/app/services/project-setup/project-setup.js +++ b/app/services/project-setup/project-setup.js @@ -1,6 +1,7 @@ 'use strict'; import path from 'path'; import bbox from '@turf/bbox'; +import centerOfMass from '@turf/center-of-mass'; import _ from 'lodash'; import config from '../../config'; @@ -103,6 +104,64 @@ export function concludeProjectSetup (e) { .then(() => adminAreaTask()); } + function processOrigins (originsData) { + logger && logger.log('process origins'); + let indicators = originsData.data.indicators; + let neededProps = indicators.map(o => o.key); + neededProps.push('name'); + + return getJSONFileContents(originsData.path) + .then(originsFC => { + logger && logger.log('origins before filter', originsFC.features.length); + let features = originsFC.features.filter(feat => { + let props = Object.keys(feat.properties); + return neededProps.every(o => props.indexOf(o) !== -1); + }); + + logger && logger.log('origins after filter', features.length); + + let originsIndicators = []; + let origins = features.map(feat => { + let coordinates = feat.geometry.type === 'Point' + ? feat.geometry.coordinates + : centerOfMass(feat).geometry.coordinates; + + // Will be flattened later. + // The array is constructed in this way so we can match the index of the + // results array and attribute the correct id. + let featureIndicators = indicators.map(ind => ({ + key: ind.key, + label: ind.label, + value: parseInt(feat.properties[ind.key]) + })); + originsIndicators.push(featureIndicators); + + return { + project_id: projId, + name: feat.properties.name, + coordinates: JSON.stringify(coordinates) + }; + }); + + return db.transaction(function (trx) { + return trx.batchInsert('projects_origins', origins) + .returning('id') + .then(ids => { + // Add ids to the originsIndicators and flatten the array in the process. + let flat = []; + originsIndicators.forEach((resInd, resIdx) => { + resInd.forEach(ind => { + ind.origin_id = ids[resIdx]; + flat.push(ind); + }); + }); + return flat; + }) + .then(data => trx.batchInsert('projects_origins_indicators', data)); + }); + }); + } + let op = new Operation(db); op.loadById(opId) .then(() => Promise.all([ @@ -115,13 +174,21 @@ export function concludeProjectSetup (e) { db('projects_files') .select('*') .where('project_id', projId) - .where('type', 'admin-bounds') - .first() - .then(file => getJSONFileContents(file.path)) + .whereIn('type', ['admin-bounds', 'origins']) + .orderBy('type') + .then(files => { + // Get the data from the admin bounds file immediately but pass + // the full data for the origins file because other values from the db + // are needed. + let [adminBoundsData, originsData] = files; + return getJSONFileContents(adminBoundsData.path) + .then(adminBoundsContent => ([adminBoundsContent, originsData])); + }) ])) .then(filesContent => { - // let [roadNetwork, adminBoundsFc] = filesContent; - let [adminBoundsFc] = filesContent; + // let [roadNetwork, [adminBoundsFc, originsData]] = filesContent; + let [[adminBoundsFc, originsData]] = filesContent; + // Run the tasks in series rather than in parallel. // This is better for error handling. If they run in parallel and // `processAdminAreas` errors, the script hangs a bit while @@ -129,7 +196,10 @@ export function concludeProjectSetup (e) { // the error is captured by the promise. // Since processing the admin areas is a pretty fast operation, the // performance is not really affected. - return processAdminAreas(adminBoundsFc); + return Promise.all([ + processAdminAreas(adminBoundsFc), + processOrigins(originsData) + ]); // .then(() => { // logger && logger.log('process road network'); // return importRoadNetwork(projId, scId, op, roadNetwork); From f27f911958cef4d0f493bb0ae3d036b9fceb1380 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 1 Jun 2017 19:25:39 +0100 Subject: [PATCH 16/63] Add endpoints to return results --- app/routes/scenario--results-download.js | 202 ++++++++++++++++++++++- app/services/plugins.js | 3 +- 2 files changed, 203 insertions(+), 2 deletions(-) diff --git a/app/routes/scenario--results-download.js b/app/routes/scenario--results-download.js index b053b611..318f59c0 100644 --- a/app/routes/scenario--results-download.js +++ b/app/routes/scenario--results-download.js @@ -3,12 +3,13 @@ import Joi from 'joi'; import Boom from 'boom'; import Promise from 'bluebird'; import Zip from 'node-zip'; +import _ from 'lodash'; import db from '../db/'; import { getFileContents } from '../s3/utils'; import { FileNotFoundError } from '../utils/errors'; -module.exports = [ +export default [ { path: '/projects/{projId}/scenarios/{scId}/results', method: 'GET', @@ -71,5 +72,204 @@ module.exports = [ reply(Boom.badImplementation(err)); }); } + }, + { + path: '/projects/{projId}/scenarios/{scId}/results/analysis', + method: 'GET', + config: { + validate: { + params: { + projId: Joi.number(), + scId: Joi.number() + } + } + }, + handler: (request, reply) => { + const { projId, scId } = request.params; + + // Future structure. + // let r = { + // accessibilityTime: [ + // { + // poi: 'bank', + // times: [10, 20, 30, 40, 50], + // adminAreas: [ + // { + // name: 'something', + // indicators: [ + // { + // name: 'Total Population', + // data: [0, 0, 0.1, 0.5, 1] + // } + // ] + // } + // ] + // } + // ] + // }; + + // Get all the poi types. + let _poi = db('scenarios_files') + .select('subtype') + .where('type', 'poi') + .where('project_id', projId) + .where('scenario_id', scId); + + // Get all the admin areas for which results were generated. + let _aa = db('scenarios_settings') + .select('value') + .where('key', 'admin_areas') + .where('scenario_id', scId) + .first() + .then(aa => JSON.parse(aa.value)) + .then(selectedAA => db('projects_aa') + .select('id', 'name') + .where('project_id', projId) + .whereIn('id', selectedAA) + ); + + // Generate the accessibilityTime array to be used later. + let _accessibilityTime = Promise.all([_poi, _aa]) + .then(data => { + let [poi, aa] = data; + let accessibilityTime = poi.map(p => { + return { + poi: p.subtype, + analysisMins: [10, 20, 30, 60, 90, 120], + adminAreas: aa.map(a => { + return { + id: a.id, + name: a.name + }; + }) + }; + }); + + return accessibilityTime; + }); + + // Get all the results. + let _all = db.raw(` + SELECT + pop.value as pop_value, + pop.key as pop_key, + r.project_aa_id as aa_id, + rp.type as poi_type, + rp.time as time_to_poi, + po.id as origin_id + FROM results r + INNER JOIN results_poi rp ON r.id = rp.result_id + INNER JOIN projects_origins po ON po.id = r.origin_id + INNER JOIN projects_origins_indicators pop ON po.id = pop.origin_id + WHERE pop.key = 'population' + `) + .then(res => res.rows); + + const sumPop = (arr) => arr.reduce((acc, o) => acc + (parseInt(o.pop_value) || 1), 0); + + // Compute the results. + Promise.all([_accessibilityTime, _all]) + .then(data => { + let [accessibilityTime, all] = data; + + accessibilityTime = accessibilityTime.map(poi => { + poi.adminAreas = _(poi.adminAreas).map(aa => { + let filtered = all.filter(r => r.poi_type === poi.poi && r.aa_id === aa.id); + + if (filtered.length) { + let totalPop = sumPop(filtered); + let pop = poi.analysisMins.map(time => sumPop(filtered.filter(o => o.time_to_poi <= time * 60))); + aa.data = pop.map(o => o / totalPop * 100); + } else { + aa.data = []; + } + + return aa; + }) + .sortBy(poi.adminAreas, o => _.deburr(o.name)) + .reverse() + .value(); + + return poi; + }); + return accessibilityTime; + }) + .then(accessibilityTime => { + reply({accessibilityTime}); + }).catch(err => { + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } + }, + { + path: '/projects/{projId}/scenarios/{scId}/results/raw', + method: 'GET', + config: { + validate: { + params: { + projId: Joi.number(), + scId: Joi.number() + }, + query: { + sortBy: Joi.string(), + sortDir: Joi.string().valid(['asc', 'desc']), + limit: Joi.number().default(50), + page: Joi.number() + } + } + }, + handler: (request, reply) => { + const { projId, scId } = request.params; + const { page, limit } = request; + const offset = (page - 1) * limit; + let { sortBy, sortDir } = request.query; + + sortBy = sortBy || 'origin_name'; + sortDir = sortDir || 'asc'; + + let _count = db('results') + .count('projects_origins.id') + .innerJoin('results_poi', 'results.id', 'results_poi.result_id') + .innerJoin('projects_origins', 'projects_origins.id', 'results.origin_id') + .innerJoin('projects_origins_indicators', 'projects_origins_indicators.origin_id', 'projects_origins.id') + .innerJoin('projects_aa', 'projects_aa.id', 'results.project_aa_id') + .where('results.project_id', projId) + .where('results.scenario_id', scId) + .where('projects_origins_indicators.key', 'population') + .where('results_poi.type', 'pointOfInterest') + .first(); + + let _results = db('results') + .select( + 'projects_origins.id as origin_id', + 'projects_origins.name as origin_name', + 'results.project_aa_id as aa_id', + 'projects_aa.name as aa_name', + 'projects_origins_indicators.value as pop_value', + 'projects_origins_indicators.key as pop_key', + 'results_poi.type as poi_type', + 'results_poi.time as time_to_poi' + ) + .innerJoin('results_poi', 'results.id', 'results_poi.result_id') + .innerJoin('projects_origins', 'projects_origins.id', 'results.origin_id') + .innerJoin('projects_origins_indicators', 'projects_origins_indicators.origin_id', 'projects_origins.id') + .innerJoin('projects_aa', 'projects_aa.id', 'results.project_aa_id') + .where('results.project_id', projId) + .where('results.scenario_id', scId) + .where('projects_origins_indicators.key', 'population') + .where('results_poi.type', 'pointOfInterest') + .orderBy(sortBy, sortDir) + .offset(offset).limit(limit); + + Promise.all([_count, _results]) + .then(res => { + request.count = parseInt(res[0].count); + reply(res[1]); + }).catch(err => { + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } } ]; diff --git a/app/services/plugins.js b/app/services/plugins.js index 15999e6a..98d47c97 100644 --- a/app/services/plugins.js +++ b/app/services/plugins.js @@ -43,7 +43,8 @@ module.exports = function (hapiServer, cb) { limit: 100, routes: [ {route: '/projects', methods: 'GET'}, - {route: '/projects/{projId}/scenarios', methods: 'GET'} + {route: '/projects/{projId}/scenarios', methods: 'GET'}, + {route: '/projects/{projId}/scenarios/{scId}/results/raw', methods: 'GET'} ] } } From ae38cdc309aa3878bc42a732f71c56f3ace9b34c Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 1 Jun 2017 19:27:26 +0100 Subject: [PATCH 17/63] Rename result routes file --- .../{scenario--results-download.js => scenario--results.js} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename app/routes/{scenario--results-download.js => scenario--results.js} (100%) diff --git a/app/routes/scenario--results-download.js b/app/routes/scenario--results.js similarity index 100% rename from app/routes/scenario--results-download.js rename to app/routes/scenario--results.js From f3ec6e6d0e9611147f05b16e4e108f8d1a549bf1 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 1 Jun 2017 20:04:05 +0100 Subject: [PATCH 18/63] Handle failure on project setup. Clean needed tables on project setup start. The information will be inserted again. There's no risk of loss --- app/db/structure.js | 3 +- app/services/project-setup/project-setup.js | 130 ++++++++++++-------- 2 files changed, 82 insertions(+), 51 deletions(-) diff --git a/app/db/structure.js b/app/db/structure.js index da9640c8..725abf03 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -247,7 +247,8 @@ export function createProjectsOriginsIndicatorsTable () { table.increments('id').primary(); table.integer('origin_id').unsigned(); table.foreign('origin_id') - .references('projects_origins.id'); + .references('projects_origins.id') + .onDelete('CASCADE'); table.string('key'); table.string('label'); table.integer('value'); diff --git a/app/services/project-setup/project-setup.js b/app/services/project-setup/project-setup.js index 06b0712c..d9b6b947 100644 --- a/app/services/project-setup/project-setup.js +++ b/app/services/project-setup/project-setup.js @@ -100,66 +100,96 @@ export function concludeProjectSetup (e) { }); }; + // Clean the tables so any remnants of previous attempts are removed. + // This avoids primary keys collisions. + let cleanAATable = () => { + return Promise.all([ + db('projects_aa') + .where('project_id', projId) + .del(), + db('scenarios_settings') + .where('scenario_id', scId) + .where('key', 'admin_areas') + .del() + ]); + }; + return op.log('process:admin-bounds', {message: 'Processing admin areas'}) + .then(() => cleanAATable()) .then(() => adminAreaTask()); } function processOrigins (originsData) { logger && logger.log('process origins'); - let indicators = originsData.data.indicators; - let neededProps = indicators.map(o => o.key); - neededProps.push('name'); - - return getJSONFileContents(originsData.path) - .then(originsFC => { - logger && logger.log('origins before filter', originsFC.features.length); - let features = originsFC.features.filter(feat => { - let props = Object.keys(feat.properties); - return neededProps.every(o => props.indexOf(o) !== -1); - }); - logger && logger.log('origins after filter', features.length); - - let originsIndicators = []; - let origins = features.map(feat => { - let coordinates = feat.geometry.type === 'Point' - ? feat.geometry.coordinates - : centerOfMass(feat).geometry.coordinates; - - // Will be flattened later. - // The array is constructed in this way so we can match the index of the - // results array and attribute the correct id. - let featureIndicators = indicators.map(ind => ({ - key: ind.key, - label: ind.label, - value: parseInt(feat.properties[ind.key]) - })); - originsIndicators.push(featureIndicators); - - return { - project_id: projId, - name: feat.properties.name, - coordinates: JSON.stringify(coordinates) - }; - }); + let originsTask = () => { + let indicators = originsData.data.indicators; + let neededProps = indicators.map(o => o.key); + neededProps.push('name'); + + return getJSONFileContents(originsData.path) + .then(originsFC => { + logger && logger.log('origins before filter', originsFC.features.length); + let features = originsFC.features.filter(feat => { + let props = Object.keys(feat.properties); + return neededProps.every(o => props.indexOf(o) !== -1); + }); + + logger && logger.log('origins after filter', features.length); + + let originsIndicators = []; + let origins = features.map(feat => { + let coordinates = feat.geometry.type === 'Point' + ? feat.geometry.coordinates + : centerOfMass(feat).geometry.coordinates; + + // Will be flattened later. + // The array is constructed in this way so we can match the index of the + // results array and attribute the correct id. + let featureIndicators = indicators.map(ind => ({ + key: ind.key, + label: ind.label, + value: parseInt(feat.properties[ind.key]) + })); + originsIndicators.push(featureIndicators); - return db.transaction(function (trx) { - return trx.batchInsert('projects_origins', origins) - .returning('id') - .then(ids => { - // Add ids to the originsIndicators and flatten the array in the process. - let flat = []; - originsIndicators.forEach((resInd, resIdx) => { - resInd.forEach(ind => { - ind.origin_id = ids[resIdx]; - flat.push(ind); + return { + project_id: projId, + name: feat.properties.name, + coordinates: JSON.stringify(coordinates) + }; + }); + + return db.transaction(function (trx) { + return trx.batchInsert('projects_origins', origins) + .returning('id') + .then(ids => { + // Add ids to the originsIndicators and flatten the array in the process. + let flat = []; + originsIndicators.forEach((resInd, resIdx) => { + resInd.forEach(ind => { + ind.origin_id = ids[resIdx]; + flat.push(ind); + }); }); - }); - return flat; - }) - .then(data => trx.batchInsert('projects_origins_indicators', data)); + return flat; + }) + .then(data => trx.batchInsert('projects_origins_indicators', data)); + }); }); - }); + }; + + // Clean the tables so any remnants of previous attempts are removed. + // This avoids primary keys collisions. + let cleanOriginsTable = () => { + return db('projects_origins') + .where('project_id', projId) + .del(); + }; + + return op.log('process:origins', {message: 'Processing origins'}) + .then(() => cleanOriginsTable()) + .then(() => originsTask()); } let op = new Operation(db); From 682fbcb62ae6de94ac975a1adb9d159541908bba Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Fri, 2 Jun 2017 11:04:09 +0100 Subject: [PATCH 19/63] Fix tests --- test/test-services-project-setup.js | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/test/test-services-project-setup.js b/test/test-services-project-setup.js index 727e2e4b..068db847 100644 --- a/test/test-services-project-setup.js +++ b/test/test-services-project-setup.js @@ -117,19 +117,21 @@ describe('Finish Project Setup', function () { .where('operation_id', op.getId()) .then(logs => { // assert.lengthOf(logs, 5); - assert.lengthOf(logs, 3); + assert.lengthOf(logs, 4); assert.equal(logs[0].code, 'start'); assert.equal(logs[0].data.message, 'Operation started'); assert.equal(logs[1].code, 'process:admin-bounds'); assert.equal(logs[1].data.message, 'Processing admin areas'); + assert.equal(logs[2].code, 'process:origins'); + assert.equal(logs[2].data.message, 'Processing origins'); // assert.equal(logs[2].code, 'process:road-network'); // assert.equal(logs[2].data.message, 'Road network processing started'); // assert.equal(logs[3].code, 'process:road-network'); // assert.equal(logs[3].data.message, 'Road network processing finished'); // assert.equal(logs[4].code, 'success'); // assert.equal(logs[4].data.message, 'Operation complete'); - assert.equal(logs[2].code, 'success'); - assert.equal(logs[2].data.message, 'Operation complete'); + assert.equal(logs[3].code, 'success'); + assert.equal(logs[3].data.message, 'Operation complete'); }) ]) .then(() => done()) @@ -173,7 +175,7 @@ describe('Finish Project Setup', function () { .orderBy('id', 'desc') .then(logs => { assert.equal(err, 'Invalid administrative boundaries file'); - assert.lengthOf(logs, 3); + assert.lengthOf(logs, 4); assert.equal(logs[0].code, 'error'); assert.equal(logs[0].data.error, 'Invalid administrative boundaries file'); }) From 163a2526b79581c26249ce83f6ed61e189221bd5 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Fri, 2 Jun 2017 08:37:06 -0400 Subject: [PATCH 20/63] Check for latest image rra-analysis. Fix #141 --- app/routes/scenarios--gen-results.js | 60 +++++++++++++++------------- 1 file changed, 32 insertions(+), 28 deletions(-) diff --git a/app/routes/scenarios--gen-results.js b/app/routes/scenarios--gen-results.js index 1d516b85..7cedfc4d 100644 --- a/app/routes/scenarios--gen-results.js +++ b/app/routes/scenarios--gen-results.js @@ -194,35 +194,39 @@ function spawnAnalysisProcess (projId, scId, opId) { // Append the name of the image last args.push(config.analysisProcess.container); - // Spawn the processing script. It will take care of updating - // the database with progress. - let analysisProc = cp.spawn(service, args); - analysisProc.stdout.on('data', (data) => { - console.log(`[ANALYSIS P${projId} S${scId}]`, data.toString()); - }); + // Make sure the latest image (dev / stable) is used + let pullImage = cp.spawn(service, ['pull', config.analysisProcess.container]); + pullImage.on('close', () => { + // Spawn the processing script. It will take care of updating + // the database with progress. + let analysisProc = cp.spawn(service, args); + analysisProc.stdout.on('data', (data) => { + console.log(`[ANALYSIS P${projId} S${scId}]`, data.toString()); + }); - let error; - analysisProc.stderr.on('data', (data) => { - error = data.toString(); - console.log(`[ANALYSIS P${projId} S${scId}][ERROR]`, data.toString()); - }); + let error; + analysisProc.stderr.on('data', (data) => { + error = data.toString(); + console.log(`[ANALYSIS P${projId} S${scId}][ERROR]`, data.toString()); + }); - analysisProc.on('close', (code) => { - if (code !== 0) { - // The operation may not have finished if the error took place outside - // the promise, or if the error was due to a wrong db connection. - let op = new Operation(db); - op.loadById(opId) - .then(op => { - if (!op.isCompleted()) { - return op.log('error', {error: error}) - .then(op => op.finish()); - } - }); - } - // Remove the container once the process is finished. Especially important - // for a hosted scenario, in which stopped containers may incur costs. - cp.spawn(service, ['rm', containerName]); - console.log(`[ANALYSIS P${projId} S${scId}][EXIT]`, code.toString()); + analysisProc.on('close', (code) => { + if (code !== 0) { + // The operation may not have finished if the error took place outside + // the promise, or if the error was due to a wrong db connection. + let op = new Operation(db); + op.loadById(opId) + .then(op => { + if (!op.isCompleted()) { + return op.log('error', {error: error}) + .then(op => op.finish()); + } + }); + } + // Remove the container once the process is finished. Especially important + // for a hosted scenario, in which stopped containers may incur costs. + cp.spawn(service, ['rm', containerName]); + console.log(`[ANALYSIS P${projId} S${scId}][EXIT]`, code.toString()); + }); }); } From b68b9dbe7582da1f68b47053905a58722eff60c9 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 6 Jun 2017 16:25:21 +0100 Subject: [PATCH 21/63] Add cascade delete for project origins --- app/db/structure.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/app/db/structure.js b/app/db/structure.js index 725abf03..2f11c964 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -235,7 +235,8 @@ export function createProjectsOriginsTable () { table.increments('id').primary(); table.integer('project_id').unsigned(); table.foreign('project_id') - .references('projects.id'); + .references('projects.id') + .onDelete('CASCADE'); table.string('name'); table.json('coordinates'); }); From b349f8b69668ccdb68f91f8522988fa0254f5ae7 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 6 Jun 2017 17:42:24 +0100 Subject: [PATCH 22/63] Change settings value field to text --- app/db/structure.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/db/structure.js b/app/db/structure.js index 2f11c964..fadda73b 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -151,7 +151,7 @@ export function createScenariosSettingsTable () { DEBUG && console.log('Creating table: scenarios_settings'); return db.schema.createTable('scenarios_settings', table => { table.string('key'); - table.string('value'); + table.text('value'); table.integer('scenario_id').unsigned(); table.foreign('scenario_id') .references('scenarios.id') From f18fdac3215993d3d477a9a75e8e8bffb941cc54 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 7 Jun 2017 14:37:59 +0100 Subject: [PATCH 23/63] Handle nulls in results --- app/routes/scenario--results.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 318f59c0..6f7f705f 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -165,7 +165,10 @@ export default [ `) .then(res => res.rows); + // Sum by pop_value. const sumPop = (arr) => arr.reduce((acc, o) => acc + (parseInt(o.pop_value) || 1), 0); + // Check if given time is less that given nimutes accounting for nulls. + const isLessThanMinutes = (time, min) => time === null ? false : time <= min * 60; // Compute the results. Promise.all([_accessibilityTime, _all]) @@ -178,7 +181,7 @@ export default [ if (filtered.length) { let totalPop = sumPop(filtered); - let pop = poi.analysisMins.map(time => sumPop(filtered.filter(o => o.time_to_poi <= time * 60))); + let pop = poi.analysisMins.map(time => sumPop(filtered.filter(o => isLessThanMinutes(o.time_to_poi, time)))); aa.data = pop.map(o => o / totalPop * 100); } else { aa.data = []; From ddbb011a04eb79f8b6f33f84805f26fd3ebc6217 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 7 Jun 2017 15:14:13 +0100 Subject: [PATCH 24/63] Fix id clash when creating large projects with existing fixtures --- test/utils/data.js | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/test/utils/data.js b/test/utils/data.js index afd8e0c2..f7d8db45 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -769,7 +769,21 @@ export function fixMeUp () { project1100(), project1200(), project2000() - ]); + ]) + // Reset counters. + .then(() => db.raw(` + select setval('operations_id_seq', (SELECT MAX(id) FROM operations)); + select setval('operations_logs_id_seq', (SELECT MAX(id) FROM operations_logs)); + select setval('projects_aa_id_seq', (SELECT MAX(id) FROM projects_aa)); + select setval('projects_files_id_seq', (SELECT MAX(id) FROM projects_files)); + select setval('projects_id_seq', (SELECT MAX(id) FROM projects)); + select setval('projects_origins_id_seq', (SELECT MAX(id) FROM projects_origins)); + select setval('projects_origins_indicators_id_seq', (SELECT MAX(id) FROM projects_origins_indicators)); + select setval('results_id_seq', (SELECT MAX(id) FROM results)); + select setval('results_poi_id_seq', (SELECT MAX(id) FROM results_poi)); + select setval('scenarios_files_id_seq', (SELECT MAX(id) FROM scenarios_files)); + select setval('scenarios_id_seq', (SELECT MAX(id) FROM scenarios)); + `)); } // From a2d651195625a547e4c58c814dcf6c529d484024 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 7 Jun 2017 17:20:22 +0100 Subject: [PATCH 25/63] Add endpoint to abort analysis generation. Fix #145 --- app/routes/scenarios--gen-results.js | 77 ++++++++++++++++++++++++++++ test/test-result-gen.js | 69 +++++++++++++++++++++++++ 2 files changed, 146 insertions(+) diff --git a/app/routes/scenarios--gen-results.js b/app/routes/scenarios--gen-results.js index 58e68622..b3d63417 100644 --- a/app/routes/scenarios--gen-results.js +++ b/app/routes/scenarios--gen-results.js @@ -108,6 +108,45 @@ module.exports = [ reply(Boom.badImplementation(err)); }); } + }, + { + path: '/projects/{projId}/scenarios/{scId}/generate', + method: 'DELETE', + config: { + validate: { + params: { + projId: Joi.number(), + scId: Joi.number() + } + } + }, + handler: (request, reply) => { + const { projId, scId } = request.params; + + let op = new Operation(db); + op.loadByData('generate-analysis', projId, scId) + .then(op => { + if (!op.isStarted()) { + throw new DataConflictError('Result generation not running'); + } + }, err => { + // In this case if the operation doesn't exist is not a problem. + if (err.message.match(/not exist/)) { + throw new DataConflictError('Result generation not running'); + } + throw err; + }) + // Send kill signal to generation process. + .then(() => killAnalysisProcess(projId, scId)) + // Abort operation. + .then(() => op.log('error', {error: 'Operation aborted'}).then(op => op.finish())) + .then(() => reply({statusCode: 200, message: 'Result generation aborted'})) + .catch(DataConflictError, e => reply(Boom.conflict(e.message))) + .catch(err => { + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } } ]; @@ -231,3 +270,41 @@ function spawnAnalysisProcess (projId, scId, opId) { console.log(`[ANALYSIS P${projId} S${scId}][EXIT]`, code.toString()); }); } + +function killAnalysisProcess (projId, scId) { + if (process.env.DS_ENV === 'test') { return Promise.resolve(); } + + return new Promise(resolve => { + let containerName = `analysisp${projId}s${scId}`; + let args = []; + + let service = config.analysisProcess.service; + switch (service) { + case 'hyper': + args.push( + '-e', `HYPER_ACCESS=${config.analysisProcess.hyperAccess}`, + '-e', `HYPER_SECRET=${config.analysisProcess.hyperSecret}` + ); + case 'docker': // eslint-disable-line + args.push('-t', '1'); + break; + default: + throw new Error(`${service} is not a valid option. The analysis should be run on 'docker' or 'hyper'. Check your config file or env variables.`); + } + + cp.exec(`${service} stop ${args.join(' ')} ${containerName}`, (errStop) => { + if (errStop) { + console.log(`[ANALYSIS P${projId} S${scId}][ABORT] stop`, errStop); + } + cp.exec(`${service} rm ${containerName}`, (errRm) => { + // This is likely to throw an error because stopping the container + // will trigger the remove action on the close listener of the analysis + // process. In any case better safe than sorry. + if (errRm) { + console.log(`[ANALYSIS P${projId} S${scId}][ABORT] rm`, errRm); + } + resolve(); + }); + }); + }); +} diff --git a/test/test-result-gen.js b/test/test-result-gen.js index 5899d685..eb82e98f 100644 --- a/test/test-result-gen.js +++ b/test/test-result-gen.js @@ -6,6 +6,7 @@ import { setupStructure as setupDdStructure } from '../app/db/structure'; import { setupStructure as setupStorageStructure } from '../app/s3/structure'; import { fixMeUp, getSelectedAdminAreas } from './utils/data'; import db from '../app/db'; +import Operation from '../app/utils/operation'; var options = { connection: {port: 2000, host: '0.0.0.0'} @@ -144,5 +145,73 @@ describe('Result generation', function () { assert.equal(res.result.message, 'Result generation already running'); }); }); + + after(function () { + // Clean operations table for project/scenario 2000 + return db('operations') + .where('project_id', 2000) + .where('scenario_id', 2000) + .del(); + }); + }); + + describe('DELETE /projects/{projId}/scenarios/{scId}/generate', function () { + it('should return conflict when getting non existent project', function () { + return instance.injectThen({ + method: 'DELETE', + url: '/projects/300/scenarios/300/generate' + }).then(res => { + assert.equal(res.statusCode, 409, 'Status code is 409'); + assert.equal(res.result.message, 'Result generation not running'); + }); + }); + + it('should return conflict when getting non existent scenario', function () { + return instance.injectThen({ + method: 'DELETE', + url: '/projects/2000/scenarios/300/generate' + }).then(res => { + assert.equal(res.statusCode, 409, 'Status code is 409'); + assert.equal(res.result.message, 'Result generation not running'); + }); + }); + + it('should stop the operation and add an error log', function () { + // There needs to be an ongoing operation to start the script. + // Operation is fully tested on another file so it's safe to use. + let op = new Operation(db); + return op.start('generate-analysis', 2000, 2000) + .then(() => op.log('start', {message: 'Operation started'})) + .then(() => instance.injectThen({ + method: 'DELETE', + url: '/projects/2000/scenarios/2000/generate' + })) + .then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + assert.equal(res.result.message, 'Result generation aborted'); + }) + // Check the operations table. + .then(() => db('operations') + .select('*') + .where('scenario_id', 2000) + .where('project_id', 2000) + .where('name', 'generate-analysis') + .orderBy('id') + .first() + .then(op => { + assert.equal(op.status, 'complete'); + return op.id; + }) + ) + // Check the operations_logs table. + .then(opId => db('operations_logs') + .select('*') + .where('operation_id', opId) + .then(opLogs => { + assert.deepEqual(opLogs[0].data, { message: 'Operation started' }); + assert.deepEqual(opLogs[1].data, { error: 'Operation aborted' }); + }) + ); + }); }); }); From 4ea671ad97312834d68b9a612f442191fa75bd41 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 8 Jun 2017 17:11:00 +0100 Subject: [PATCH 26/63] Add break to switch statement --- app/routes/scenarios--gen-results.js | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/app/routes/scenarios--gen-results.js b/app/routes/scenarios--gen-results.js index b3d63417..f0ec1062 100644 --- a/app/routes/scenarios--gen-results.js +++ b/app/routes/scenarios--gen-results.js @@ -283,9 +283,11 @@ function killAnalysisProcess (projId, scId) { case 'hyper': args.push( '-e', `HYPER_ACCESS=${config.analysisProcess.hyperAccess}`, - '-e', `HYPER_SECRET=${config.analysisProcess.hyperSecret}` + '-e', `HYPER_SECRET=${config.analysisProcess.hyperSecret}`, + '-t', '1' ); - case 'docker': // eslint-disable-line + break; + case 'docker': args.push('-t', '1'); break; default: From 148b90a4ca1ecba4831768cd1c2cbdebc105e5d9 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 8 Jun 2017 21:27:56 +0100 Subject: [PATCH 27/63] Add new tables to handle source data --- app/db/structure.js | 48 ++++++++++++++++++++++++++++++++++++++++++++- 1 file changed, 47 insertions(+), 1 deletion(-) diff --git a/app/db/structure.js b/app/db/structure.js index fadda73b..b645769e 100644 --- a/app/db/structure.js +++ b/app/db/structure.js @@ -64,6 +64,16 @@ export function dropProjectsOriginsIndicators () { return db.schema.dropTableIfExists('projects_origins_indicators'); } +export function dropProjectsSourceData () { + DEBUG && console.log('Dropping table: projects_source_data'); + return db.schema.dropTableIfExists('projects_source_data'); +} + +export function dropScenariosSourceData () { + DEBUG && console.log('Dropping table: scenarios_source_data'); + return db.schema.dropTableIfExists('scenarios_source_data'); +} + export function createProjectsTable () { DEBUG && console.log('Creating table: projects'); return db.schema.createTable('projects', table => { @@ -256,6 +266,38 @@ export function createProjectsOriginsIndicatorsTable () { }); } +export function createProjectsSourceData () { + DEBUG && console.log('Creating table: projects_source_data'); + return db.schema.createTable('projects_source_data', table => { + table.increments('id').primary(); + table.integer('project_id').unsigned(); + table.foreign('project_id') + .references('projects.id') + .onDelete('CASCADE'); + table.string('name'); + table.string('type'); + table.json('data'); + }); +} + +export function createScenariosSourceData () { + DEBUG && console.log('Creating table: scenarios_source_data'); + return db.schema.createTable('scenarios_source_data', table => { + table.increments('id').primary(); + table.integer('project_id').unsigned(); + table.foreign('project_id') + .references('projects.id') + .onDelete('CASCADE'); + table.integer('scenario_id').unsigned(); + table.foreign('scenario_id') + .references('scenarios.id') + .onDelete('CASCADE'); + table.string('name'); + table.string('type'); + table.json('data'); + }); +} + export function setupStructure () { return dropScenariosFiles() .then(() => dropProjectsFiles()) @@ -265,7 +307,9 @@ export function setupStructure () { .then(() => dropOperationsLogs()) .then(() => dropOperations()) .then(() => dropScenariosSettings()) + .then(() => dropScenariosSourceData()) .then(() => dropScenarios()) + .then(() => dropProjectsSourceData()) .then(() => dropProjectsOriginsIndicators()) .then(() => dropProjectsOrigins()) .then(() => dropProjects()) @@ -280,5 +324,7 @@ export function setupStructure () { .then(() => createProjectsOriginsTable()) .then(() => createProjectsOriginsIndicatorsTable()) .then(() => createResultsTable()) - .then(() => createResultsPoiTable()); + .then(() => createResultsPoiTable()) + .then(() => createScenariosSourceData()) + .then(() => createProjectsSourceData()); } From 552962b2fc0b20b61a5f23a44d5eaecad592e695 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 8 Jun 2017 21:28:47 +0100 Subject: [PATCH 28/63] Update fixtures to include new data structure --- test/utils/data.js | 246 ++++++++++++++++++++++++++++++++++++++++++--- 1 file changed, 233 insertions(+), 13 deletions(-) diff --git a/test/utils/data.js b/test/utils/data.js index f7d8db45..0117111d 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -184,6 +184,13 @@ export function project1001 () { 'updated_at': '2017-02-01T12:00:03.000Z' })) .then(() => putObjectFromFile(bucket, 'project-1001/profile_000000', FILE_PROFILE)) + .then(() => projectSourceData({ + 'id': 1001, + 'name': 'profile', + 'type': 'file', + 'project_id': 1001 + // 'data': + })) .then(() => scenario({ 'id': 1001, 'name': 'Main scenario', @@ -221,7 +228,15 @@ export function project1001 () { 'created_at': '2017-02-01T12:00:03.000Z', 'updated_at': '2017-02-01T12:00:03.000Z' })) - .then(() => putObjectFromFile(bucket, 'scenario-1001/poi_000000', FILE_POI)); + .then(() => putObjectFromFile(bucket, 'scenario-1001/poi_000000', FILE_POI)) + .then(() => scenarioSourceData({ + 'id': 1001, + 'name': 'poi', + 'type': 'file', + 'project_id': 1001, + 'scenario_id': 1001 + // 'data': + })); } // Project 1003 in pending state with one scenario and a origins file @@ -245,6 +260,13 @@ export function project1003 () { 'updated_at': '2017-02-01T12:00:04.000Z' })) .then(() => putObjectFromFile(bucket, 'project-1003/origins_000000', FILE_ORIGINS)) + .then(() => projectSourceData({ + 'id': 1003, + 'name': 'origins', + 'type': 'file', + 'project_id': 1003 + // 'data': + })) .then(() => scenario({ 'id': 1003, 'name': 'Main scenario 1003', @@ -281,7 +303,15 @@ export function project1003 () { 'created_at': '2017-02-01T12:00:04.000Z', 'updated_at': '2017-02-01T12:00:04.000Z' })) - .then(() => putObjectFromFile(bucket, 'scenario-1003/road-network_000000', FILE_ROAD_NETWORK)); + .then(() => putObjectFromFile(bucket, 'scenario-1003/road-network_000000', FILE_ROAD_NETWORK)) + .then(() => scenarioSourceData({ + 'id': 1003, + 'name': 'road-network', + 'type': 'file', + 'project_id': 1003, + 'scenario_id': 1003 + // 'data': + })); } // Project 1004 in pending state with one scenarios and all files @@ -327,6 +357,29 @@ export function project1004 () { .then(() => putObjectFromFile(bucket, 'project-1004/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-1004/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1004/admin-bounds_000000', FILE_ADMIN)) + .then(() => projectSourceData([ + { + 'id': 1004, + 'name': 'profile', + 'type': 'file', + 'project_id': 1004 + // 'data': + }, + { + 'id': 1005, + 'name': 'origins', + 'type': 'file', + 'project_id': 1004 + // 'data': + }, + { + 'id': 1006, + 'name': 'admin-bounds', + 'type': 'file', + 'project_id': 1004 + // 'data': + } + ])) .then(() => scenario({ 'id': 1004, 'name': 'Main scenario 1004', @@ -377,7 +430,25 @@ export function project1004 () { } ])) .then(() => putObjectFromFile(bucket, 'scenario-1004/road-network_000000', FILE_ROAD_NETWORK)) - .then(() => putObjectFromFile(bucket, 'scenario-1004/poi_000000', FILE_POI)); + .then(() => putObjectFromFile(bucket, 'scenario-1004/poi_000000', FILE_POI)) + .then(() => scenarioSourceData([ + { + 'id': 1004, + 'name': 'road-network', + 'type': 'file', + 'project_id': 1004, + 'scenario_id': 1004 + // 'data': + }, + { + 'id': 1005, + 'name': 'poi', + 'type': 'file', + 'project_id': 1004, + 'scenario_id': 1004 + // 'data': + } + ])); } // Project 1100 in active state with one scenarios and all files @@ -426,6 +497,29 @@ export function project1100 () { .then(() => putObjectFromFile(bucket, 'project-1100/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-1100/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1100/admin-bounds_000000', FILE_ADMIN)) + .then(() => projectSourceData([ + { + 'id': 1100, + 'name': 'profile', + 'type': 'file', + 'project_id': 1100 + // 'data': + }, + { + 'id': 1101, + 'name': 'origins', + 'type': 'file', + 'project_id': 1100 + // 'data': + }, + { + 'id': 1102, + 'name': 'admin-bounds', + 'type': 'file', + 'project_id': 1100 + // 'data': + } + ])) .then(() => scenario({ 'id': 1100, 'name': 'Main scenario 1100', @@ -483,7 +577,25 @@ export function project1100 () { } ])) .then(() => putObjectFromFile(bucket, 'scenario-1100/road-network_000000', FILE_ROAD_NETWORK)) - .then(() => putObjectFromFile(bucket, 'scenario-1100/poi_000000', FILE_POI)); + .then(() => putObjectFromFile(bucket, 'scenario-1100/poi_000000', FILE_POI)) + .then(() => scenarioSourceData([ + { + 'id': 1100, + 'name': 'road-network', + 'type': 'file', + 'project_id': 1100, + 'scenario_id': 1100 + // 'data': + }, + { + 'id': 1101, + 'name': 'poi', + 'type': 'file', + 'project_id': 1100, + 'scenario_id': 1100 + // 'data': + } + ])); } // Project 1200 in active state with 2 scenarios @@ -532,6 +644,29 @@ export function project1200 () { .then(() => putObjectFromFile(bucket, 'project-1200/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-1200/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-1200/admin-bounds_000000', FILE_ADMIN)) + .then(() => projectSourceData([ + { + 'id': 1200, + 'name': 'profile', + 'type': 'file', + 'project_id': 1200 + // 'data': + }, + { + 'id': 1201, + 'name': 'origins', + 'type': 'file', + 'project_id': 1200 + // 'data': + }, + { + 'id': 1202, + 'name': 'admin-bounds', + 'type': 'file', + 'project_id': 1200 + // 'data': + } + ])) .then(() => scenario([ { 'id': 1200, @@ -645,7 +780,41 @@ export function project1200 () { .then(() => putObjectFromFile(bucket, 'scenario-1200/road-network_000000', FILE_ROAD_NETWORK)) .then(() => putObjectFromFile(bucket, 'scenario-1200/poi_000000', FILE_POI)) .then(() => putObjectFromFile(bucket, 'scenario-1201/road-network_000000', FILE_ROAD_NETWORK)) - .then(() => putObjectFromFile(bucket, 'scenario-1201/poi_000000', FILE_POI)); + .then(() => putObjectFromFile(bucket, 'scenario-1201/poi_000000', FILE_POI)) + .then(() => scenarioSourceData([ + { + 'id': 1200, + 'name': 'road-network', + 'type': 'file', + 'project_id': 1200, + 'scenario_id': 1200 + // 'data': + }, + { + 'id': 1201, + 'name': 'poi', + 'type': 'file', + 'project_id': 1200, + 'scenario_id': 1200 + // 'data': + }, + { + 'id': 1202, + 'name': 'road-network', + 'type': 'file', + 'project_id': 1200, + 'scenario_id': 1201 + // 'data': + }, + { + 'id': 1203, + 'name': 'poi', + 'type': 'file', + 'project_id': 1200, + 'scenario_id': 1201 + // 'data': + } + ])); } // Project 2000 in active state with one scenarios and all files. @@ -695,6 +864,29 @@ export function project2000 () { .then(() => putObjectFromFile(bucket, 'project-2000/profile_000000', FILE_PROFILE)) .then(() => putObjectFromFile(bucket, 'project-2000/origins_000000', FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, 'project-2000/admin-bounds_000000', FILE_ADMIN)) + .then(() => projectSourceData([ + { + 'id': 2000, + 'name': 'profile', + 'type': 'file', + 'project_id': 2000 + // 'data': + }, + { + 'id': 2001, + 'name': 'origins', + 'type': 'file', + 'project_id': 2000 + // 'data': + }, + { + 'id': 2002, + 'name': 'admin-bounds', + 'type': 'file', + 'project_id': 2000 + // 'data': + } + ])) .then(() => scenario({ 'id': 2000, 'name': 'Main scenario for Sergipe', @@ -752,7 +944,25 @@ export function project2000 () { } ])) .then(() => putObjectFromFile(bucket, 'scenario-2000/road-network_000000', FILE_ROAD_NETWORK)) - .then(() => putObjectFromFile(bucket, 'scenario-2000/poi_000000', FILE_POI)); + .then(() => putObjectFromFile(bucket, 'scenario-2000/poi_000000', FILE_POI)) + .then(() => scenarioSourceData([ + { + 'id': 2000, + 'name': 'road-network', + 'type': 'file', + 'project_id': 2000, + 'scenario_id': 2000 + // 'data': + }, + { + 'id': 2001, + 'name': 'poi', + 'type': 'file', + 'project_id': 2000, + 'scenario_id': 2000 + // 'data': + } + ])); } // @@ -779,10 +989,12 @@ export function fixMeUp () { select setval('projects_id_seq', (SELECT MAX(id) FROM projects)); select setval('projects_origins_id_seq', (SELECT MAX(id) FROM projects_origins)); select setval('projects_origins_indicators_id_seq', (SELECT MAX(id) FROM projects_origins_indicators)); + select setval('projects_source_data_id_seq', (SELECT MAX(id) FROM projects_source_data)); select setval('results_id_seq', (SELECT MAX(id) FROM results)); select setval('results_poi_id_seq', (SELECT MAX(id) FROM results_poi)); select setval('scenarios_files_id_seq', (SELECT MAX(id) FROM scenarios_files)); select setval('scenarios_id_seq', (SELECT MAX(id) FROM scenarios)); + select setval('scenarios_source_data_id_seq', (SELECT MAX(id) FROM scenarios_source_data)); `)); } @@ -798,6 +1010,19 @@ function projectFile (data) { return db.batchInsert('projects_files', _.isArray(data) ? data : [data]); } +function projectAA (data) { + return db.batchInsert('projects_aa', _.isArray(data) ? data : [data]); +} + +function projectOrigins ({ originsIndicators, origins }) { + return db.batchInsert('projects_origins', origins) + .then(() => db.batchInsert('projects_origins_indicators', originsIndicators)); +} + +function projectSourceData (data) { + return db.batchInsert('projects_source_data', _.isArray(data) ? data : [data]); +} + function scenario (data) { return db.batchInsert('scenarios', _.isArray(data) ? data : [data]); } @@ -810,13 +1035,8 @@ function scenarioSettings (data) { return db.batchInsert('scenarios_settings', _.isArray(data) ? data : [data]); } -function projectAA (data) { - return db.batchInsert('projects_aa', _.isArray(data) ? data : [data]); -} - -function projectOrigins ({ originsIndicators, origins }) { - return db.batchInsert('projects_origins', origins) - .then(() => db.batchInsert('projects_origins_indicators', originsIndicators)); +function scenarioSourceData (data) { + return db.batchInsert('scenarios_source_data', _.isArray(data) ? data : [data]); } // From cfe62215ed4a78fc6d63ec2773855a58775da181 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 8 Jun 2017 21:29:49 +0100 Subject: [PATCH 29/63] Include source data in project and scenario responses --- app/routes/projects--get.js | 60 +++++++++++---------- app/routes/scenarios--get.js | 13 +++-- app/utils/utils.js | 102 +++++++++++++++++++++++++++++++++++ 3 files changed, 139 insertions(+), 36 deletions(-) diff --git a/app/routes/projects--get.js b/app/routes/projects--get.js index 84d18e3d..a1eaa99d 100644 --- a/app/routes/projects--get.js +++ b/app/routes/projects--get.js @@ -5,6 +5,7 @@ import Promise from 'bluebird'; import db from '../db/'; import { ProjectNotFoundError } from '../utils/errors'; +import { getSourceData } from '../utils/utils'; module.exports = [ { @@ -19,7 +20,7 @@ module.exports = [ db.select('*').from('projects').orderBy('created_at').offset(offset).limit(limit) ]).then(res => { const [count, projects] = res; - return Promise.map(projects, p => attachProjectFiles(p).then(p => attachScenarioCount(p))) + return Promise.map(projects, p => attachProjectSourceData(p).then(p => attachScenarioCount(p))) .then(projects => { request.count = parseInt(count[0].count); reply(projects); @@ -49,13 +50,10 @@ module.exports = [ } ]; -function attachProjectFiles (project) { - return db.select('id', 'name', 'type', 'path', 'created_at') - .from('projects_files') - .where('project_id', project.id) - .whereIn('type', ['profile', 'origins', 'admin-bounds']) - .then(files => { - project.files = files || []; +function attachProjectSourceData (project) { + return getSourceData(db, 'project', project.id) + .then(sourceData => { + project.sourceData = sourceData; return project; }); } @@ -74,31 +72,35 @@ function getProject (id) { return db.select('*') .from('projects') .where('id', id) - .orderBy('created_at') - .then(projects => { - if (!projects.length) throw new ProjectNotFoundError(); - return projects[0]; + .first() + .then(project => { + if (!project) throw new ProjectNotFoundError(); + return project; }) - .then(project => attachProjectFiles(project)) + .then(project => attachProjectSourceData(project)) .then(project => attachFinishSetupOperation(project)) .then(project => { - // Check if a project is ready to move out of the setup phase. - // Get 1st scenario files. - return db('scenarios_files') + // GetId of first scenario. + return db('scenarios') + .select('id') .where('project_id', project.id) - .whereIn('type', ['road-network', 'poi']) - .where('scenario_id', function () { - this.select('id') - .from('scenarios') - .where('project_id', project.id) - .where('master', true); - }).then(scenarioFiles => { - // For a file to be ready it need 5 files: - // - 3 on the project - // - 2 on the ghost scenario. - // There's no need for file type validation because it's all - // done on file upload. - project.readyToEndSetup = scenarioFiles.length === 2 && project.files.length === 3; + .where('master', true) + .first() + .then(scenario => getSourceData(db, 'scenario', scenario.id)) + .then(scenarioSourceData => { + let sources = Object.assign({}, project.sourceData, scenarioSourceData); + + // Check if all sources are valid. + // If source is osm is OK. + // If is file, there has to be at least one. + project.readyToEndSetup = Object.keys(sources) + .every(k => { + let src = sources[k]; + if (src.type === null) return false; + if (src.type === 'file') return src.files.length >= 1; + return true; + }); + return project; }); }) diff --git a/app/routes/scenarios--get.js b/app/routes/scenarios--get.js index 65bcee07..4e8c8618 100644 --- a/app/routes/scenarios--get.js +++ b/app/routes/scenarios--get.js @@ -5,6 +5,7 @@ import Promise from 'bluebird'; import db from '../db/'; import { ScenarioNotFoundError, ProjectNotFoundError } from '../utils/errors'; +import { getSourceData } from '../utils/utils'; const routeSingleScenarioConfig = { validate: { @@ -88,7 +89,7 @@ export function loadScenario (projId, scId) { }) .then(scenario => attachAdminAreas(scenario)) .then(scenario => attachScenarioSettings(scenario)) - .then(scenario => attachScenarioFiles(scenario)) + .then(scenario => attachScenarioSourceData(scenario)) .then(scenario => attachOperation('generate-analysis', 'gen_analysis', scenario)) .then(scenario => attachOperation('scenario-create', 'scen_create', scenario)); } @@ -117,12 +118,10 @@ function attachScenarioSettings (scenario) { }); } -function attachScenarioFiles (scenario) { - return db.select('id', 'name', 'type', 'subtype', 'path', 'created_at') - .from('scenarios_files') - .where('scenario_id', scenario.id) - .then(files => { - scenario.files = files || []; +function attachScenarioSourceData (scenario) { + return getSourceData(db, 'scenario', scenario.id) + .then(sourceData => { + scenario.sourceData = sourceData; return scenario; }); } diff --git a/app/utils/utils.js b/app/utils/utils.js index cbf28089..12d59a9a 100644 --- a/app/utils/utils.js +++ b/app/utils/utils.js @@ -12,3 +12,105 @@ export function parseFormData (req) { }); }); } + +// Same as an array map, but nulls and undefined are filtered out. +export function mapValid (arr, iterator) { + let holder = []; + arr.forEach((o, i) => { + let r = iterator(o, i, arr); + if (r !== null && typeof r !== undefined) { + holder.push(r); + } + }); + + return holder; +} + +export function getSourceData (db, contentType, id) { + let sourceDataQ; + let filesQ; + let structure; + + switch (contentType) { + case 'project': + sourceDataQ = db('projects_source_data') + .select('*') + .where('project_id', id); + + filesQ = db('projects_files') + .select('id', 'name', 'type', 'path', 'created_at') + .where('project_id', id); + + structure = { + profile: { + type: null, + files: [] + // osmOptions + }, + 'admin-bounds': { + type: null, + files: [] + // osmOptions + }, + origins: { + type: null, + files: [] + // osmOptions + } + }; + break; + case 'scenario': + sourceDataQ = db('scenarios_source_data') + .select('*') + .where('scenario_id', id); + + filesQ = db('scenarios_files') + .select('id', 'name', 'type', 'subtype', 'path', 'created_at') + .where('scenario_id', id); + + structure = { + 'road-network': { + type: null, + files: [] + // osmOptions + }, + poi: { + type: null, + files: [] + // osmOptions + } + }; + break; + default: + throw new Error('Unknown content type: ' + contentType); + } + + return sourceDataQ + .then(sources => { + let filesFetchTypes = []; + + sources.forEach(s => { + if (s.type === 'osm') { + // Never going to happen for projects, just scenarios. + structure[s.name].type = 'osm'; + structure[s.name].osmOptions = s.data; + } else if (s.type === 'file') { + structure[s.name].type = 'file'; + filesFetchTypes.push(s.name); + } else { + throw new Error('Unknown source type: ' + s.type); + } + }); + + if (!filesFetchTypes.length) { + return structure; + } + + return filesQ + .whereIn('type', filesFetchTypes) + .then(files => { + files.forEach(f => { structure[f.type].files.push(f); }); + return structure; + }); + }); +} From 72bd1c5ccab14590fedf023dd98bdce45845fef6 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 8 Jun 2017 21:50:59 +0100 Subject: [PATCH 30/63] Fix tests --- test/test-projects.js | 72 +++++++++++++++++++++++++++++ test/test-scenarios.js | 60 +++++++++++++++++++++++- test/test-services-project-setup.js | 11 +++-- test/utils/data.js | 60 +++++++++++++++++++++++- 4 files changed, 196 insertions(+), 7 deletions(-) diff --git a/test/test-projects.js b/test/test-projects.js index 419bce50..842d7b90 100644 --- a/test/test-projects.js +++ b/test/test-projects.js @@ -102,6 +102,78 @@ describe('Projects', function () { }); }); + it('should have the correct source data with no files', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/1000' + }).then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + let project = res.result; + assert.deepEqual(project.sourceData, { + profile: { + type: null, + files: [] + }, + 'admin-bounds': { + type: null, + files: [] + }, + origins: { + type: null, + files: [] + } + }); + }); + }); + + it('should have the correct source data with all files', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/2000' + }).then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + let project = res.result; + assert.deepEqual(project.sourceData, { + profile: { + type: 'file', + files: [ + { + 'id': 2000, + 'name': 'profile_000000', + 'type': 'profile', + 'path': 'project-2000/profile_000000', + 'created_at': new Date('2017-02-01T12:00:06.000Z') + } + ] + }, + 'admin-bounds': { + type: 'file', + files: [ + { + 'id': 2002, + 'name': 'admin-bounds_000000', + 'type': 'admin-bounds', + 'path': 'project-2000/admin-bounds_000000', + 'created_at': new Date('2017-02-01T12:00:06.000Z') + } + ] + }, + origins: { + type: 'file', + files: [ + { + 'id': 2001, + 'name': 'origins_000000', + 'type': 'origins', + 'path': 'project-2000/origins_000000', + 'created_at': new Date('2017-02-01T12:00:06.000Z') + } + ] + } + }); + }); + }); + it('should include the scenario count for an individual project', function () { return instance.injectThen({ method: 'GET', diff --git a/test/test-scenarios.js b/test/test-scenarios.js index 2f9dead3..6ecd6a41 100644 --- a/test/test-scenarios.js +++ b/test/test-scenarios.js @@ -141,6 +141,64 @@ describe('Scenarios', function () { assert.equal(scenario.scen_create, null); }); }); + + it('should have the correct source data with no files', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/1000/scenarios/1000' + }).then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + let scenario = res.result; + assert.deepEqual(scenario.sourceData, { + 'road-network': { + type: null, + files: [] + }, + poi: { + type: null, + files: [] + } + }); + }); + }); + + it('should have the correct source data with all files', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/2000/scenarios/2000' + }).then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + let scenario = res.result; + assert.deepEqual(scenario.sourceData, { + 'road-network': { + type: 'file', + files: [ + { + 'id': 2000, + 'name': 'road-network_000000', + 'type': 'road-network', + 'subtype': null, + 'path': 'scenario-2000/road-network_000000', + 'created_at': new Date('2017-02-01T12:00:06.000Z') + } + ] + }, + poi: { + type: 'file', + files: [ + { + 'id': 2001, + 'name': 'poi_000000', + 'type': 'poi', + 'subtype': 'pointOfInterest', + 'path': 'scenario-2000/poi_000000', + 'created_at': new Date('2017-02-01T12:00:06.000Z') + } + ] + } + }); + }); + }); }); describe('GET /projects/{projId}/scenarios/0', function () { @@ -350,7 +408,7 @@ describe('Scenarios', function () { assert.equal(scenario.description, 'updated description'); assert.equal(scenario.status, 'pending'); assert.equal(scenario.master, true); - assert.equal(scenario.files.length, 0); + assert.isTrue(typeof scenario.sourceData !== undefined); assert.deepEqual(scenario.data, { res_gen_at: '0', rn_updated_at: '0' }); assert.equal(scenario.gen_analysis, null); assert.equal(scenario.scen_create, null); diff --git a/test/test-services-project-setup.js b/test/test-services-project-setup.js index 068db847..209ae52a 100644 --- a/test/test-services-project-setup.js +++ b/test/test-services-project-setup.js @@ -120,10 +120,13 @@ describe('Finish Project Setup', function () { assert.lengthOf(logs, 4); assert.equal(logs[0].code, 'start'); assert.equal(logs[0].data.message, 'Operation started'); - assert.equal(logs[1].code, 'process:admin-bounds'); - assert.equal(logs[1].data.message, 'Processing admin areas'); - assert.equal(logs[2].code, 'process:origins'); - assert.equal(logs[2].data.message, 'Processing origins'); + + // These actions run in parallel and can actually be switched. + assert.oneOf(logs[1].code, ['process:admin-bounds', 'process:origins']); + assert.oneOf(logs[1].data.message, ['Processing admin areas', 'Processing origins']); + assert.oneOf(logs[2].code, ['process:admin-bounds', 'process:origins']); + assert.oneOf(logs[2].data.message, ['Processing admin areas', 'Processing origins']); + // assert.equal(logs[2].code, 'process:road-network'); // assert.equal(logs[2].data.message, 'Road network processing started'); // assert.equal(logs[3].code, 'process:road-network'); diff --git a/test/utils/data.js b/test/utils/data.js index 0117111d..941bc8ab 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -1101,6 +1101,13 @@ export function projectPendingWithFiles (id) { 'updated_at': '2017-02-01T12:00:00.000Z' })) .then(() => putObjectFromFile(bucket, `project-${id}/profile_000000`, FILE_PROFILE)) + .then(() => projectSourceData({ + 'id': id, + 'name': 'profile', + 'type': 'file', + 'project_id': id + // 'data': + })) .then(() => scenario({ 'id': id, 'name': `Scenario ${id}`, @@ -1137,7 +1144,15 @@ export function projectPendingWithFiles (id) { 'created_at': '2017-02-01T12:00:00.000Z', 'updated_at': '2017-02-01T12:00:00.000Z' })) - .then(() => putObjectFromFile(bucket, `scenario-${id}/poi_000000`, FILE_POI)); + .then(() => putObjectFromFile(bucket, `scenario-${id}/poi_000000`, FILE_POI)) + .then(() => scenarioSourceData({ + 'id': id, + 'name': 'poi', + 'type': 'file', + 'project_id': id, + 'scenario_id': id + // 'data': + })); } // Insert a project, a scenario, and all files. @@ -1184,6 +1199,29 @@ export function projectPendingWithAllFiles (id) { .then(() => putObjectFromFile(bucket, `project-${id}/profile_000000`, FILE_PROFILE)) .then(() => putObjectFromFile(bucket, `project-${id}/origins_000000`, FILE_ORIGINS)) .then(() => putObjectFromFile(bucket, `project-${id}/admin-bounds_000000`, FILE_ADMIN)) + .then(() => projectSourceData([ + { + 'id': id, + 'name': 'profile', + 'type': 'file', + 'project_id': id + // 'data': + }, + { + 'id': id + 1, + 'name': 'origins', + 'type': 'file', + 'project_id': id + // 'data': + }, + { + 'id': id + 2, + 'name': 'admin-bounds', + 'type': 'file', + 'project_id': id + // 'data': + } + ])) .then(() => scenario({ 'id': id, 'name': `Scenario ${id}`, @@ -1234,7 +1272,25 @@ export function projectPendingWithAllFiles (id) { } ])) .then(() => putObjectFromFile(bucket, `scenario-${id}/road-network_000000`, FILE_ROAD_NETWORK)) - .then(() => putObjectFromFile(bucket, `scenario-${id}/poi_000000`, FILE_POI)); + .then(() => putObjectFromFile(bucket, `scenario-${id}/poi_000000`, FILE_POI)) + .then(() => scenarioSourceData([ + { + 'id': id, + 'name': 'road-network', + 'type': 'file', + 'project_id': id, + 'scenario_id': id + // 'data': + }, + { + 'id': id + 1, + 'name': 'poi', + 'type': 'file', + 'project_id': id, + 'scenario_id': id + // 'data': + } + ])); } export function projectPendingWithAllFilesAndOperation (id) { From 64ed387752e86518d1900264eb9af38dc34e43c3 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Fri, 9 Jun 2017 18:36:21 +0100 Subject: [PATCH 31/63] Include data field on project source data information --- app/utils/utils.js | 2 +- test/test-projects.js | 5 +++++ 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/app/utils/utils.js b/app/utils/utils.js index 12d59a9a..bfa97e26 100644 --- a/app/utils/utils.js +++ b/app/utils/utils.js @@ -38,7 +38,7 @@ export function getSourceData (db, contentType, id) { .where('project_id', id); filesQ = db('projects_files') - .select('id', 'name', 'type', 'path', 'created_at') + .select('id', 'name', 'type', 'data', 'path', 'created_at') .where('project_id', id); structure = { diff --git a/test/test-projects.js b/test/test-projects.js index 842d7b90..013ca6d4 100644 --- a/test/test-projects.js +++ b/test/test-projects.js @@ -142,6 +142,7 @@ describe('Projects', function () { 'name': 'profile_000000', 'type': 'profile', 'path': 'project-2000/profile_000000', + 'data': null, 'created_at': new Date('2017-02-01T12:00:06.000Z') } ] @@ -154,6 +155,7 @@ describe('Projects', function () { 'name': 'admin-bounds_000000', 'type': 'admin-bounds', 'path': 'project-2000/admin-bounds_000000', + 'data': null, 'created_at': new Date('2017-02-01T12:00:06.000Z') } ] @@ -166,6 +168,9 @@ describe('Projects', function () { 'name': 'origins_000000', 'type': 'origins', 'path': 'project-2000/origins_000000', + 'data': { + 'indicators': [ { 'key': 'population', 'label': 'Total population' } ] + }, 'created_at': new Date('2017-02-01T12:00:06.000Z') } ] From 7517dc25f8c767c214e9c0460edce49c3d998642 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Fri, 9 Jun 2017 19:54:27 +0100 Subject: [PATCH 32/63] Refactor attach operation function --- app/routes/projects--get.js | 51 +++++++----------------------------- app/routes/scenarios--get.js | 43 ++++-------------------------- app/utils/utils.js | 38 +++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 80 deletions(-) diff --git a/app/routes/projects--get.js b/app/routes/projects--get.js index a1eaa99d..af79da5d 100644 --- a/app/routes/projects--get.js +++ b/app/routes/projects--get.js @@ -5,7 +5,7 @@ import Promise from 'bluebird'; import db from '../db/'; import { ProjectNotFoundError } from '../utils/errors'; -import { getSourceData } from '../utils/utils'; +import { getSourceData, getOperationData } from '../utils/utils'; module.exports = [ { @@ -108,48 +108,15 @@ function getProject (id) { } function attachFinishSetupOperation (project) { - return db.select('*') - .from('operations') - .where('operations.project_id', project.id) - .where('operations.scenario_id', function () { - this.select('id') - .from('scenarios') - .where('project_id', project.id) - .where('master', true); - }) - .where('operations.name', 'project-setup-finish') - .orderBy('created_at', 'desc') - .limit(1) + return db('scenarios') + .select('id') + .where('project_id', project.id) + .where('master', true) .first() - .then(op => { - if (!op) { - project.finish_setup = null; - return project; - } - - return db.select('*') - .from('operations_logs') - .where('operation_id', op.id) - .then(logs => { - let errored = false; - if (logs.length) { - errored = logs[logs.length - 1].code === 'error'; - } - project.finish_setup = { - id: op.id, - status: op.status, - created_at: op.created_at, - updated_at: op.updated_at, - errored, - logs: logs.map(l => ({ - id: l.id, - code: l.code, - data: l.data, - created_at: l.created_at - })) - }; - return project; - }); + .then(scenario => getOperationData(db, 'project-setup-finish', 'finish_setup', scenario.id)) + .then(opData => { + project.finish_setup = opData; + return project; }); } diff --git a/app/routes/scenarios--get.js b/app/routes/scenarios--get.js index 4e8c8618..4bcdfe97 100644 --- a/app/routes/scenarios--get.js +++ b/app/routes/scenarios--get.js @@ -5,7 +5,7 @@ import Promise from 'bluebird'; import db from '../db/'; import { ScenarioNotFoundError, ProjectNotFoundError } from '../utils/errors'; -import { getSourceData } from '../utils/utils'; +import { getSourceData, getOperationData } from '../utils/utils'; const routeSingleScenarioConfig = { validate: { @@ -160,42 +160,9 @@ function attachAdminAreas (scenario) { } function attachOperation (opName, prop, scenario) { - return db.select('*') - .from('operations') - .where('operations.scenario_id', scenario.id) - .where('operations.name', opName) - .orderBy('created_at', 'desc') - .limit(1) - .then(op => { - if (!op.length) { - scenario[prop] = null; - return scenario; - } - op = op[0]; - - return db.select('*') - .from('operations_logs') - .where('operation_id', op.id) - .orderBy('created_at') - .then(logs => { - let errored = false; - if (logs.length) { - errored = logs[logs.length - 1].code === 'error'; - } - scenario[prop] = { - id: op.id, - status: op.status, - created_at: op.created_at, - updated_at: op.updated_at, - errored, - logs: logs.map(l => ({ - id: l.id, - code: l.code, - data: l.data, - created_at: l.created_at - })) - }; - return scenario; - }); + return getOperationData(db, opName, prop, scenario.id) + .then(opData => { + scenario[prop] = opData; + return scenario; }); } diff --git a/app/utils/utils.js b/app/utils/utils.js index bfa97e26..f579a829 100644 --- a/app/utils/utils.js +++ b/app/utils/utils.js @@ -114,3 +114,41 @@ export function getSourceData (db, contentType, id) { }); }); } + +export function getOperationData (db, opName, prop, id) { + return db.select('*') + .from('operations') + .where('operations.scenario_id', id) + .where('operations.name', opName) + .orderBy('created_at', 'desc') + .first() + .then(op => { + if (!op) { + return null; + } + + return db.select('*') + .from('operations_logs') + .where('operation_id', op.id) + .orderBy('created_at') + .then(logs => { + let errored = false; + if (logs.length) { + errored = logs[logs.length - 1].code === 'error'; + } + return { + id: op.id, + status: op.status, + created_at: op.created_at, + updated_at: op.updated_at, + errored, + logs: logs.map(l => ({ + id: l.id, + code: l.code, + data: l.data, + created_at: l.created_at + })) + }; + }); + }); +} From 9bbe3e79b981b7c367c5d88577e05f4ff4034f64 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Mon, 12 Jun 2017 23:05:09 -0400 Subject: [PATCH 33/63] Allow download of multiple file types CSV and GeoJSON for now --- app/routes/scenario--results.js | 14 +++++------ test/test-scenarios.js | 42 ++++++++++++++++++++++++++++----- 2 files changed, 42 insertions(+), 14 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 6f7f705f..91c87049 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -20,22 +20,20 @@ export default [ scId: Joi.number() }, query: { - download: Joi.boolean().truthy('true').falsy('false') + download: Joi.boolean().truthy('true').falsy('false').valid('true').required(), + type: Joi.string().valid(['csv', 'geojson']).required() } } }, handler: (request, reply) => { - if (!request.query.download) { - return reply(Boom.notImplemented('Query parameter "download" missing')); - } - const { projId, scId } = request.params; + const { type } = request.query; db('scenarios_files') .select('*') .where('project_id', projId) .where('scenario_id', scId) - .where('type', 'results') + .where('type', `results-${type}`) .then(files => { if (!files.length) throw new FileNotFoundError('Results not found'); return files; @@ -52,7 +50,7 @@ export default [ .then(files => { let zip = new Zip(); files.forEach(f => { - zip.file(`${f.name}.csv`, f.content); + zip.file(`${f.name}.${type}`, f.content); }); return zip.generate({ base64: false, compression: 'DEFLATE' }); @@ -61,7 +59,7 @@ export default [ .then(data => reply(data) .type('application/zip') .encoding('binary') - .header('Content-Disposition', `attachment; filename=results-p${projId}s${scId}.zip`) + .header('Content-Disposition', `attachment; filename=results-${type}-p${projId}s${scId}.zip`) ) .catch(FileNotFoundError, e => reply(Boom.notFound(e.message))) .catch(err => { diff --git a/test/test-scenarios.js b/test/test-scenarios.js index 2f9dead3..3865e361 100644 --- a/test/test-scenarios.js +++ b/test/test-scenarios.js @@ -542,7 +542,7 @@ describe('Scenarios', function () { db.insert({ id: 10000001, name: 'results_000000', - type: 'results', + type: 'results-csv', path: 'scenario-1000/results_000000', project_id: 1000, scenario_id: 1000 @@ -559,20 +559,50 @@ describe('Scenarios', function () { .then(() => done()); }); + it('should return 400 when download is missing', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/1000/scenarios/1000/results?type=csv' + }).then(res => { + assert.equal(res.statusCode, 400, 'Status code is 400'); + assert.equal(res.result.message, 'child "download" fails because ["download" is required]'); + }); + }); + + it('should return 400 when type is missing', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/1000/scenarios/1000/results?download=true' + }).then(res => { + assert.equal(res.statusCode, 400, 'Status code is 400'); + assert.equal(res.result.message, 'child "type" fails because ["type" is required]'); + }); + }); + it('should return 400 when download flag not true', function () { return instance.injectThen({ method: 'GET', - url: '/projects/1000/scenarios/1000/results?download=false' + url: '/projects/1000/scenarios/1000/results?download=false&type=geojson' + }).then(res => { + assert.equal(res.statusCode, 400, 'Status code is 400'); + assert.equal(res.result.message, 'child "download" fails because ["download" must be one of [true]]'); + }); + }); + + it('should return 400 when type is incorrect', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/1000/scenarios/1000/results?download=true&type=csvjson ' }).then(res => { - assert.equal(res.statusCode, 501, 'Status code is 404'); - assert.equal(res.result.message, 'Query parameter "download" missing'); + assert.equal(res.statusCode, 400, 'Status code is 400'); + assert.equal(res.result.message, 'child "type" fails because ["type" must be one of [csv, geojson]]'); }); }); it('should return 404 when a file is not found', function () { return instance.injectThen({ method: 'GET', - url: '/projects/8888/scenarios/8888/results?download=true' + url: '/projects/8888/scenarios/8888/results?download=true&type=csv' }).then(res => { assert.equal(res.statusCode, 404, 'Status code is 404'); assert.equal(res.result.message, 'Results not found'); @@ -582,7 +612,7 @@ describe('Scenarios', function () { it('should return 404 when a file is not found on s3', function () { return instance.injectThen({ method: 'GET', - url: '/projects/1000/scenarios/1000/results?download=true' + url: '/projects/1000/scenarios/1000/results?download=true&type=csv' }).then(res => { assert.equal(res.statusCode, 404, 'Status code is 404'); assert.equal(res.result.message, 'File not found in storage bucket'); From 3a827cfe1962fd7be306ef74dfd120a544b80431 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Mon, 12 Jun 2017 23:22:21 -0400 Subject: [PATCH 34/63] Pass credentials to Hyper for pull. Contribute to #146 --- app/routes/scenarios--gen-results.js | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/app/routes/scenarios--gen-results.js b/app/routes/scenarios--gen-results.js index ec3c9463..6af37901 100644 --- a/app/routes/scenarios--gen-results.js +++ b/app/routes/scenarios--gen-results.js @@ -244,7 +244,11 @@ function spawnAnalysisProcess (projId, scId, opId) { args.push(config.analysisProcess.container); // Make sure the latest image (dev / stable) is used - let pullImage = cp.spawn(service, ['pull', config.analysisProcess.container]); + let pullImage = cp.spawn(service, [ + 'pull', config.analysisProcess.container, + '-e', `HYPER_ACCESS=${config.analysisProcess.hyperAccess}`, + '-e', `HYPER_SECRET=${config.analysisProcess.hyperSecret}` + ]); pullImage.on('close', () => { // Spawn the processing script. It will take care of updating // the database with progress. From cd025d576e2057f4339db4e8e00a21326314353b Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 14 Jun 2017 16:44:55 +0100 Subject: [PATCH 35/63] Add endpoint to upload source data --- app/routes/scenarios--source-data.js | 183 +++++++++++++++++++++++++++ 1 file changed, 183 insertions(+) create mode 100644 app/routes/scenarios--source-data.js diff --git a/app/routes/scenarios--source-data.js b/app/routes/scenarios--source-data.js new file mode 100644 index 00000000..24193efd --- /dev/null +++ b/app/routes/scenarios--source-data.js @@ -0,0 +1,183 @@ +'use strict'; +import Joi from 'joi'; +import Boom from 'boom'; + +import db from '../db/'; +import { putFile as putFileToS3, removeLocalFile } from '../s3/utils'; +import { + ProjectNotFoundError, + ScenarioNotFoundError, + FileExistsError, + DataValidationError, + ProjectStatusError +} from '../utils/errors'; +import { parseFormData } from '../utils/utils'; + +export default [ + { + path: '/projects/{projId}/scenarios/{scId}/source-data', + method: 'POST', + config: { + validate: { + params: { + projId: Joi.number(), + scId: Joi.number() + } + }, + payload: { + maxBytes: 1 * Math.pow(1024, 3), // 1GB + output: 'stream', + parse: false, + allow: 'multipart/form-data' + } + }, + handler: (request, reply) => { + const projId = parseInt(request.params.projId); + const scId = parseInt(request.params.scId); + + // Check if project exists and is still in setup phase. + db('projects') + .select('*') + .where('id', projId) + .first() + .then(project => { + if (!project) throw new ProjectNotFoundError(); + if (project.status !== 'pending') throw new ProjectStatusError('Project no longer in the setup phase. Source data can not be uploaded'); + }) + .then(() => db('scenarios') + .select('id') + .where('id', projId) + .first() + .then(scenario => { if (!scenario) throw new ScenarioNotFoundError(); }) + ) + .then(() => parseFormData(request.raw.req)) + .then(result => { + if (!result.fields['source-type']) { + throw new DataValidationError('"source-type" is required'); + } + + if (!result.fields['source-name']) { + throw new DataValidationError('"source-name" is required'); + } + + let sourceType = result.fields['source-type'][0]; + let sourceName = result.fields['source-name'][0]; + + if (['poi', 'road-network'].indexOf(sourceName) === -1) { + throw new DataValidationError(`"source-name" must be one of [poi, road-network]`); + } + + switch (sourceType) { + case 'file': + if (!result.files.file) { + throw new DataValidationError('"file" is required'); + } + + // With poi source the subtype is required. + let subtype = result.fields['subtype'][0]; + if (sourceName === 'poi' && !subtype) { + throw new DataValidationError('"subtype" is required for source "poi"'); + } + + let file = result.files.file[0]; + let fileName; + + if (subtype) { + fileName = `${sourceType}_${subtype}_${Date.now()}`; + } else { + fileName = `${sourceType}_${Date.now()}`; + } + + let filePath = `scenario-${scId}/${fileName}`; + + // Upsert source. + return db('scenarios_source_data') + .select('id') + .where('scenario_id', scId) + .where('name', sourceName) + .first() + .then(source => { + if (source) { + return db('scenarios_source_data') + .update({type: 'file'}) + .where('id', source.id); + } else { + return db('scenarios_source_data') + .insert({ + project_id: projId, + scenario_id: scId, + name: sourceName, + type: 'file' + }); + } + }) + // Check if the file exists. + .then(() => { + let query = db('scenarios_files') + .select('id') + .where('scenario_id', scId) + .where('type', sourceName); + + if (subtype) { + query = query.where('subtype', subtype); + } + + return query; + }) + .then(files => { + if (files.length) { throw new FileExistsError(); } + }) + // Upload to S3. + .then(() => putFileToS3(filePath, file.path)) + // Insert into database. + .then(() => { + let data = { + name: fileName, + type: sourceName, + path: filePath, + project_id: projId, + scenario_id: scId, + created_at: (new Date()), + updated_at: (new Date()) + }; + + if (subtype) { + data.subtype = subtype; + } + + return db('scenarios_files') + .returning(['id', 'name', 'type', 'subtype', 'path', 'created_at']) + .insert(data) + .then(insertResponse => insertResponse[0]) + .then(insertResponse => db('scenarios').update({updated_at: (new Date())}).where('id', scId).then(() => insertResponse)) + .then(insertResponse => db('projects').update({updated_at: (new Date())}).where('id', projId).then(() => insertResponse)); + }) + // Delete temp file. + .then(insertResponse => removeLocalFile(file.path, true).then(() => insertResponse)) + .then(insertResponse => reply(Object.assign({}, insertResponse, { + sourceType, + sourceName + }))) + .catch(err => { + // Delete temp file in case of error. Re-throw error to continue. + file && removeLocalFile(file.path, true); + throw err; + }); + case 'osm': + throw new DataValidationError(`"osm" type not implemented`); + // break; + default: + throw new DataValidationError(`"source-type" must be one of [osm, file]`); + } + }) + .catch(ProjectNotFoundError, e => reply(Boom.notFound(e.message))) + .catch(ScenarioNotFoundError, e => reply(Boom.notFound(e.message))) + .catch(FileExistsError, e => reply(Boom.conflict(e.message))) + .catch(DataValidationError, e => reply(Boom.badRequest(e.message))) + .catch(err => { + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } + } +]; From e756a092aa8b61fdd380fabfaebd2e7c04e2dbf5 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 14 Jun 2017 16:45:30 +0100 Subject: [PATCH 36/63] Handle multiple poi types on results --- app/routes/scenario--results.js | 2 -- 1 file changed, 2 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 6f7f705f..16adea6d 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -240,7 +240,6 @@ export default [ .where('results.project_id', projId) .where('results.scenario_id', scId) .where('projects_origins_indicators.key', 'population') - .where('results_poi.type', 'pointOfInterest') .first(); let _results = db('results') @@ -261,7 +260,6 @@ export default [ .where('results.project_id', projId) .where('results.scenario_id', scId) .where('projects_origins_indicators.key', 'population') - .where('results_poi.type', 'pointOfInterest') .orderBy(sortBy, sortDir) .offset(offset).limit(limit); From c20aa81af2568d09ead09379a525d451ba58c689 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 14 Jun 2017 16:52:39 +0100 Subject: [PATCH 37/63] Temporarily disable file upload methods --- app/routes/projects--files-upload.js | 5 ++++- app/routes/scenarios--files-upload.js | 5 ++++- test/test-projects-files.js | 2 +- test/test-scenarios-files.js | 2 +- 4 files changed, 10 insertions(+), 4 deletions(-) diff --git a/app/routes/projects--files-upload.js b/app/routes/projects--files-upload.js index 6059b9b3..53a79e70 100644 --- a/app/routes/projects--files-upload.js +++ b/app/routes/projects--files-upload.js @@ -18,13 +18,16 @@ module.exports = [ } }, payload: { - maxBytes: 1 * Math.pow(1024, 3), // 1GB + // maxBytes: 1 * Math.pow(1024, 3), // 1GB + maxBytes: 1, output: 'stream', parse: false, allow: 'multipart/form-data' } }, handler: (request, reply) => { + return reply(Boom.notImplemented('This method is deprecated')); + const projId = parseInt(request.params.projId); let file; let type; diff --git a/app/routes/scenarios--files-upload.js b/app/routes/scenarios--files-upload.js index df056dc6..46163da4 100644 --- a/app/routes/scenarios--files-upload.js +++ b/app/routes/scenarios--files-upload.js @@ -19,13 +19,16 @@ module.exports = [ } }, payload: { - maxBytes: 1 * Math.pow(1024, 3), // 1GB + // maxBytes: 1 * Math.pow(1024, 3), // 1GB + maxBytes: 1, output: 'stream', parse: false, allow: 'multipart/form-data' } }, handler: (request, reply) => { + return reply(Boom.notImplemented('This method is deprecated')); + const projId = parseInt(request.params.projId); const scId = parseInt(request.params.scId); let file; diff --git a/test/test-projects-files.js b/test/test-projects-files.js index 45c5d54b..da6fa6c2 100644 --- a/test/test-projects-files.js +++ b/test/test-projects-files.js @@ -95,7 +95,7 @@ describe('Project files', function () { }); }); - describe('POST /projects/{projId}/files', function () { + describe.skip('POST /projects/{projId}/files', function () { it('should error when data format is not multipart/form-data', function () { return instance.injectThen({ method: 'POST', diff --git a/test/test-scenarios-files.js b/test/test-scenarios-files.js index e35baf30..39be84c2 100644 --- a/test/test-scenarios-files.js +++ b/test/test-scenarios-files.js @@ -103,7 +103,7 @@ describe('Scenario files', function () { }); }); - describe('POST /projects/{projId}/scenarios/{scId}/files', function () { + describe.skip('POST /projects/{projId}/scenarios/{scId}/files', function () { it('should error when data format is not multipart/form-data', function () { return instance.injectThen({ method: 'POST', From e2b982276428685d0ce67c18653c48eadf63ebed Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 14 Jun 2017 17:32:50 +0100 Subject: [PATCH 38/63] Fix filename creation --- app/routes/scenarios--source-data.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/app/routes/scenarios--source-data.js b/app/routes/scenarios--source-data.js index 24193efd..80baeba5 100644 --- a/app/routes/scenarios--source-data.js +++ b/app/routes/scenarios--source-data.js @@ -83,9 +83,9 @@ export default [ let fileName; if (subtype) { - fileName = `${sourceType}_${subtype}_${Date.now()}`; + fileName = `${sourceName}_${subtype}_${Date.now()}`; } else { - fileName = `${sourceType}_${Date.now()}`; + fileName = `${sourceName}_${Date.now()}`; } let filePath = `scenario-${scId}/${fileName}`; From 2b93b570683bc613cda7abd09ebc9f9e524a302b Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 14 Jun 2017 17:33:07 +0100 Subject: [PATCH 39/63] Add endpoint for project source data --- app/routes/projects--source-data.js | 146 ++++++++++++++++++++++++++++ 1 file changed, 146 insertions(+) create mode 100644 app/routes/projects--source-data.js diff --git a/app/routes/projects--source-data.js b/app/routes/projects--source-data.js new file mode 100644 index 00000000..bbfd7149 --- /dev/null +++ b/app/routes/projects--source-data.js @@ -0,0 +1,146 @@ +'use strict'; +import Joi from 'joi'; +import Boom from 'boom'; + +import db from '../db/'; +import { putFile as putFileToS3, removeLocalFile } from '../s3/utils'; +import { + ProjectNotFoundError, + FileExistsError, + DataValidationError, + ProjectStatusError +} from '../utils/errors'; +import { parseFormData } from '../utils/utils'; + +export default [ + { + path: '/projects/{projId}/source-data', + method: 'POST', + config: { + validate: { + params: { + projId: Joi.number() + } + }, + payload: { + maxBytes: 1 * Math.pow(1024, 3), // 1GB + output: 'stream', + parse: false, + allow: 'multipart/form-data' + } + }, + handler: (request, reply) => { + const projId = parseInt(request.params.projId); + + // Check if project exists and is still in setup phase. + db('projects') + .select('*') + .where('id', projId) + .first() + .then(project => { + if (!project) throw new ProjectNotFoundError(); + if (project.status !== 'pending') throw new ProjectStatusError('Project no longer in the setup phase. Source data can not be uploaded'); + }) + .then(() => parseFormData(request.raw.req)) + .then(result => { + if (!result.fields['source-type']) { + throw new DataValidationError('"source-type" is required'); + } + + if (!result.fields['source-name']) { + throw new DataValidationError('"source-name" is required'); + } + + let sourceType = result.fields['source-type'][0]; + let sourceName = result.fields['source-name'][0]; + + if (['admin-bounds', 'profile', 'origins'].indexOf(sourceName) === -1) { + throw new DataValidationError(`"source-name" must be one of [admin-bounds, profile, origins]`); + } + + switch (sourceType) { + case 'file': + if (!result.files.file) { + throw new DataValidationError('"file" is required'); + } + + let file = result.files.file[0]; + let fileName = `${sourceName}_${Date.now()}`; + let filePath = `profile-${projId}/${fileName}`; + + // Upsert source. + return db('projects_source_data') + .select('id') + .where('project_id', projId) + .where('name', sourceName) + .first() + .then(source => { + if (source) { + return db('projects_source_data') + .update({type: 'file'}) + .where('id', source.id); + } else { + return db('projects_source_data') + .insert({ + project_id: projId, + name: sourceName, + type: 'file' + }); + } + }) + // Check if the file exists. + .then(() => db('projects_files') + .select('id') + .where('project_id', projId) + .where('type', sourceName) + ) + .then(files => { + if (files.length) { throw new FileExistsError(); } + }) + // Upload to S3. + .then(() => putFileToS3(filePath, file.path)) + // Insert into database. + .then(() => { + let data = { + name: fileName, + type: sourceName, + path: filePath, + project_id: projId, + created_at: (new Date()), + updated_at: (new Date()) + }; + + return db('projects_files') + .returning(['id', 'name', 'type', 'path', 'created_at']) + .insert(data) + .then(insertResponse => insertResponse[0]) + .then(insertResponse => db('projects').update({updated_at: (new Date())}).where('id', projId).then(() => insertResponse)); + }) + // Delete temp file. + .then(insertResponse => removeLocalFile(file.path, true).then(() => insertResponse)) + .then(insertResponse => reply(Object.assign({}, insertResponse, { + sourceType, + sourceName + }))) + .catch(err => { + // Delete temp file in case of error. Re-throw error to continue. + file && removeLocalFile(file.path, true); + throw err; + }); + case 'osm': + throw new DataValidationError(`"osm" type not implemented`); + // break; + default: + throw new DataValidationError(`"source-type" must be one of [osm, file]`); + } + }) + .catch(ProjectNotFoundError, e => reply(Boom.notFound(e.message))) + .catch(FileExistsError, e => reply(Boom.conflict(e.message))) + .catch(DataValidationError, e => reply(Boom.badRequest(e.message))) + .catch(err => { + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } + } +]; From 03a6d2b8a912a7b8193d14257b1cc260d3199dd1 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 14 Jun 2017 18:34:14 +0100 Subject: [PATCH 40/63] Fix subtype param check --- app/routes/scenarios--source-data.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/routes/scenarios--source-data.js b/app/routes/scenarios--source-data.js index 80baeba5..5de7fb11 100644 --- a/app/routes/scenarios--source-data.js +++ b/app/routes/scenarios--source-data.js @@ -74,7 +74,7 @@ export default [ } // With poi source the subtype is required. - let subtype = result.fields['subtype'][0]; + let subtype = result.fields['subtype'] ? result.fields['subtype'][0] : null; if (sourceName === 'poi' && !subtype) { throw new DataValidationError('"subtype" is required for source "poi"'); } From 0354293eb65a97e58caabc72b7e1f08b3a08e4e5 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Wed, 14 Jun 2017 18:48:21 +0100 Subject: [PATCH 41/63] Fix lint problem --- app/routes/projects--files-upload.js | 1 + app/routes/scenarios--files-upload.js | 1 + 2 files changed, 2 insertions(+) diff --git a/app/routes/projects--files-upload.js b/app/routes/projects--files-upload.js index 53a79e70..3c6def23 100644 --- a/app/routes/projects--files-upload.js +++ b/app/routes/projects--files-upload.js @@ -28,6 +28,7 @@ module.exports = [ handler: (request, reply) => { return reply(Boom.notImplemented('This method is deprecated')); + /* eslint-disable */ const projId = parseInt(request.params.projId); let file; let type; diff --git a/app/routes/scenarios--files-upload.js b/app/routes/scenarios--files-upload.js index 46163da4..ecff29a0 100644 --- a/app/routes/scenarios--files-upload.js +++ b/app/routes/scenarios--files-upload.js @@ -29,6 +29,7 @@ module.exports = [ handler: (request, reply) => { return reply(Boom.notImplemented('This method is deprecated')); + /* eslint-disable */ const projId = parseInt(request.params.projId); const scId = parseInt(request.params.scId); let file; From 413822450d53d51716a563b45c24eb75d9f746cf Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Wed, 14 Jun 2017 15:30:46 -0400 Subject: [PATCH 42/63] Add GeoJSON endpoint for scenario results As this will mainly power the map, property names are kept as short as possible --- app/routes/scenario--results.js | 85 +++++++++++++++++++++++++++++++++ test/test-scenarios-results.js | 62 ++++++++++++++++++++++++ test/utils/data.js | 50 ++++++++++++++++++- 3 files changed, 196 insertions(+), 1 deletion(-) create mode 100644 test/test-scenarios-results.js diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 6f7f705f..4fd7ea6c 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -274,5 +274,90 @@ export default [ reply(Boom.badImplementation(err)); }); } + }, + { + path: '/projects/{projId}/scenarios/{scId}/results/geojson', + method: 'GET', + config: { + validate: { + params: { + projId: Joi.number(), + scId: Joi.number() + } + } + }, + handler: (request, reply) => { + const { projId, scId } = request.params; + + let _results = db('results') + .select( + 'projects_origins.id as origin_id', + 'projects_origins.name as origin_name', + 'projects_origins.coordinates as origin_coords', + 'projects_origins_indicators.value as pop_value', + 'projects_origins_indicators.key as pop_key', + 'results_poi.type as poi_type', + 'results_poi.time as time_to_poi' + ) + .innerJoin('results_poi', 'results.id', 'results_poi.result_id') + .innerJoin('projects_origins', 'projects_origins.id', 'results.origin_id') + .innerJoin('projects_origins_indicators', 'projects_origins_indicators.origin_id', 'projects_origins.id') + .where('results.project_id', projId) + .where('results.scenario_id', scId); + Promise.all(_results) + .then(res => mergeOriginETA(res)) + .then(res => { + reply({ + 'type': 'FeatureCollection', + 'features': res + }); + }).catch(err => { + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } } ]; + +/** + * Turn an array of results into a proper GeoJSON features. Each feature refers + * to a unique origin, and can have multiple ETA for each POI types. + */ +function mergeOriginETA (results) { + return new Promise((resolve, reject) => { + return resolve(results.reduce((a, b) => { + // Check if the accumulator already has an object for the origin + let match = a.findIndex(o => o.properties.id === b.origin_id); + if (match === -1) { + // Create the feature + a.push({ + 'type': 'Feature', + 'properties': { + 'id': b.origin_id, + 'n': b.origin_name, + 'pk': b.pop_key, + 'pv': b.pop_value, + 'e': [ + { + 't': b.poi_type, + 'v': b.time_to_poi + } + ] + }, + 'geometry': { + 'type': 'Point', + 'coordinates': b.origin_coords + } + }); + } else { + // Update an existing feature with an ETA + a[match].properties.e.push({ + 't': b.poi_type, + 'v': b.time_to_poi + }); + return a; + } + return a; + }, [])); + }); +} diff --git a/test/test-scenarios-results.js b/test/test-scenarios-results.js new file mode 100644 index 00000000..25b1be93 --- /dev/null +++ b/test/test-scenarios-results.js @@ -0,0 +1,62 @@ +'use strict'; +import { assert } from 'chai'; + +import Server from '../app/services/server'; +import { setupStructure as setupDdStructure } from '../app/db/structure'; +import { setupStructure as setupStorageStructure } from '../app/s3/structure'; +import { fixMeUp } from './utils/data'; + +var options = { + connection: {port: 2000, host: '0.0.0.0'} +}; + +var instance; +before(function (done) { + instance = Server(options).hapi; + instance.register(require('inject-then'), function (err) { + if (err) throw err; + done(); + }); +}); + +describe('Scenario results', function () { + before('Before - Scenario results', function () { + this.timeout(5000); + return setupDdStructure() + .then(() => setupStorageStructure()) + .then(() => fixMeUp()); + }); + + describe('GET /projects/{projId}/scenarios/{scId}/results/geojson', function () { + it('should return the correct scenario - active', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/2000/scenarios/2000/results/geojson' + }).then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + assert.deepEqual(res.result.type, 'FeatureCollection'); + + let ft = res.result.features; + assert.equal(ft.length, 2); + assert.equal(ft[0].type, 'Feature'); + assert.equal(ft[0].properties.id, 200001); + assert.deepEqual(ft[0].properties.e, [ + { + 't': 'school', + 'v': 5000 + }, + { + 't': 'church', + 'v': 3500 + } + ]); + assert.deepEqual(ft[1].properties.e, [ + { + 't': 'school', + 'v': 54700 + } + ]); + }); + }); + }); +}); diff --git a/test/utils/data.js b/test/utils/data.js index f7d8db45..177b27f2 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -752,7 +752,47 @@ export function project2000 () { } ])) .then(() => putObjectFromFile(bucket, 'scenario-2000/road-network_000000', FILE_ROAD_NETWORK)) - .then(() => putObjectFromFile(bucket, 'scenario-2000/poi_000000', FILE_POI)); + .then(() => putObjectFromFile(bucket, 'scenario-2000/poi_000000', FILE_POI)) + .then(() => scenarioResults( + [ + { + 'id': 1, + 'project_id': 2000, + 'scenario_id': 2000, + 'origin_id': 200001, + 'project_aa_id': 200001 + }, + { + 'id': 2, + 'project_id': 2000, + 'scenario_id': 2000, + 'origin_id': 200002, + 'project_aa_id': 200001 + } + ] + )) + .then(() => scenarioResultsPOI( + [ + { + id: 1, + result_id: 1, + type: 'school', + time: 5000 + }, + { + id: 2, + result_id: 2, + type: 'school', + time: 54700 + }, + { + id: 3, + result_id: 1, + type: 'church', + time: 3500 + } + ] + )); } // @@ -819,6 +859,14 @@ function projectOrigins ({ originsIndicators, origins }) { .then(() => db.batchInsert('projects_origins_indicators', originsIndicators)); } +function scenarioResults (data) { + return db.batchInsert('results', _.isArray(data) ? data : [data]); +} + +function scenarioResultsPOI (data) { + return db.batchInsert('results_poi', _.isArray(data) ? data : [data]); +} + // // Functions for project creation. // From 585d3d908ea820ebb209baec2de3a70bdad40ba4 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Mon, 19 Jun 2017 19:35:24 +0100 Subject: [PATCH 43/63] Add fileread methods --- app/s3/utils.js | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/app/s3/utils.js b/app/s3/utils.js index 5705640a..4d9e0565 100644 --- a/app/s3/utils.js +++ b/app/s3/utils.js @@ -1,9 +1,12 @@ 'use strict'; import fs from 'fs'; +import Promise from 'bluebird'; import s3, { bucket } from './'; import { removeObject, putObjectFromFile, listObjects } from './structure'; +const readFile = Promise.promisify(fs.readFile); + export function getPresignedUrl (file) { return new Promise((resolve, reject) => { s3.presignedPutObject(bucket, file, 24 * 60 * 60, (err, presignedUrl) => { @@ -101,6 +104,15 @@ export function removeLocalFile (path, quiet = false) { }); } +export function getLocalFileContents (path) { + return readFile(path, 'utf8'); +} + +export function getLocalJSONFileContents (path) { + return getLocalFileContents(path) + .then(result => JSON.parse(result)); +} + // List files // Proxy of listObjects function, assuming the bucket. export function listFiles (namePrefix) { From 7078156a43269c38f782d71ba98f8627adadb7bf Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Mon, 19 Jun 2017 19:35:51 +0100 Subject: [PATCH 44/63] Update fixture data --- test/utils/data.js | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/utils/data.js b/test/utils/data.js index 941bc8ab..baff39f2 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -255,7 +255,7 @@ export function project1003 () { 'type': 'origins', 'path': 'project-1003/origins_000000', 'project_id': 1003, - 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ], availableInd: ['population']}, 'created_at': '2017-02-01T12:00:04.000Z', 'updated_at': '2017-02-01T12:00:04.000Z' })) @@ -340,7 +340,7 @@ export function project1004 () { 'type': 'origins', 'path': 'project-1004/origins_000000', 'project_id': 1004, - 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ], availableInd: ['population']}, 'created_at': '2017-02-01T12:00:05.000Z', 'updated_at': '2017-02-01T12:00:05.000Z' }, @@ -478,7 +478,7 @@ export function project1100 () { 'type': 'origins', 'path': 'project-1100/origins_000000', 'project_id': 1100, - 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ], availableInd: ['population']}, 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' }, @@ -625,7 +625,7 @@ export function project1200 () { 'type': 'origins', 'path': 'project-1200/origins_000000', 'project_id': 1200, - 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ], availableInd: ['population']}, 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' }, @@ -845,7 +845,7 @@ export function project2000 () { 'type': 'origins', 'path': 'project-2000/origins_000000', 'project_id': 2000, - 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ], availableInd: ['population']}, 'created_at': '2017-02-01T12:00:06.000Z', 'updated_at': '2017-02-01T12:00:06.000Z' }, @@ -1182,7 +1182,7 @@ export function projectPendingWithAllFiles (id) { 'type': 'origins', 'path': `project-${id}/origins_000000`, 'project_id': id, - 'data': {indicators: [ { key: 'population', label: 'Total population' } ]}, + 'data': {indicators: [ { key: 'population', label: 'Total population' } ], availableInd: ['population']}, 'created_at': '2017-02-01T12:00:07.000Z', 'updated_at': '2017-02-01T12:00:07.000Z' }, From e0a02d05e4036affed0b57e6a7588d532633b618 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Mon, 19 Jun 2017 19:36:23 +0100 Subject: [PATCH 45/63] Handle upload of origins data --- app/routes/projects--source-data.js | 282 +++++++++++++++++++++------- 1 file changed, 211 insertions(+), 71 deletions(-) diff --git a/app/routes/projects--source-data.js b/app/routes/projects--source-data.js index bbfd7149..4673c633 100644 --- a/app/routes/projects--source-data.js +++ b/app/routes/projects--source-data.js @@ -1,12 +1,14 @@ 'use strict'; import Joi from 'joi'; import Boom from 'boom'; +import _ from 'lodash'; import db from '../db/'; -import { putFile as putFileToS3, removeLocalFile } from '../s3/utils'; +import { putFile as putFileToS3, removeLocalFile, getLocalJSONFileContents } from '../s3/utils'; import { ProjectNotFoundError, FileExistsError, + FileNotFoundError, DataValidationError, ProjectStatusError } from '../utils/errors'; @@ -54,85 +56,74 @@ export default [ let sourceType = result.fields['source-type'][0]; let sourceName = result.fields['source-name'][0]; + if (sourceType !== 'file') { + throw new DataValidationError(`"source-type" must be one of [file]`); + } + if (['admin-bounds', 'profile', 'origins'].indexOf(sourceName) === -1) { throw new DataValidationError(`"source-name" must be one of [admin-bounds, profile, origins]`); } - switch (sourceType) { - case 'file': - if (!result.files.file) { - throw new DataValidationError('"file" is required'); - } + // Store the file if there is one. + // File must exist when the source is not origins, but that's + // checked afterwards. + let file = result.files.file ? result.files.file[0] : null; + let resolver; + // Origins need to be handled differently. + if (sourceName === 'origins') { + resolver = handleOrigins(result, projId); + } else { + if (!result.files.file) { + throw new DataValidationError('"file" is required'); + } - let file = result.files.file[0]; - let fileName = `${sourceName}_${Date.now()}`; - let filePath = `profile-${projId}/${fileName}`; + let fileName = `${sourceName}_${Date.now()}`; + let filePath = `profile-${projId}/${fileName}`; - // Upsert source. - return db('projects_source_data') + // Upsert source. + resolver = upsertSource(sourceName, 'file', projId) + // Check if the file exists. + .then(() => db('projects_files') .select('id') .where('project_id', projId) - .where('name', sourceName) - .first() - .then(source => { - if (source) { - return db('projects_source_data') - .update({type: 'file'}) - .where('id', source.id); - } else { - return db('projects_source_data') - .insert({ - project_id: projId, - name: sourceName, - type: 'file' - }); - } - }) - // Check if the file exists. - .then(() => db('projects_files') - .select('id') - .where('project_id', projId) - .where('type', sourceName) - ) - .then(files => { - if (files.length) { throw new FileExistsError(); } - }) - // Upload to S3. - .then(() => putFileToS3(filePath, file.path)) - // Insert into database. - .then(() => { - let data = { - name: fileName, - type: sourceName, - path: filePath, - project_id: projId, - created_at: (new Date()), - updated_at: (new Date()) - }; - - return db('projects_files') - .returning(['id', 'name', 'type', 'path', 'created_at']) - .insert(data) - .then(insertResponse => insertResponse[0]) - .then(insertResponse => db('projects').update({updated_at: (new Date())}).where('id', projId).then(() => insertResponse)); - }) - // Delete temp file. - .then(insertResponse => removeLocalFile(file.path, true).then(() => insertResponse)) - .then(insertResponse => reply(Object.assign({}, insertResponse, { - sourceType, - sourceName - }))) - .catch(err => { - // Delete temp file in case of error. Re-throw error to continue. - file && removeLocalFile(file.path, true); - throw err; - }); - case 'osm': - throw new DataValidationError(`"osm" type not implemented`); - // break; - default: - throw new DataValidationError(`"source-type" must be one of [osm, file]`); + .where('type', sourceName) + ) + .then(files => { + if (files.length) { throw new FileExistsError(); } + }) + // Upload to S3. + .then(() => putFileToS3(filePath, file.path)) + // Insert into database. + .then(() => { + let data = { + name: fileName, + type: sourceName, + path: filePath, + project_id: projId, + created_at: (new Date()), + updated_at: (new Date()) + }; + + return db('projects_files') + .returning(['id', 'name', 'type', 'path', 'created_at']) + .insert(data) + .then(insertResponse => insertResponse[0]); + }) + // Delete temp file. + .then(insertResponse => removeLocalFile(file.path, true).then(() => insertResponse)); } + + return resolver + .then(insertResponse => db('projects').update({updated_at: (new Date())}).where('id', projId).then(() => insertResponse)) + .then(insertResponse => reply(Object.assign({}, insertResponse, { + sourceType, + sourceName + }))) + .catch(err => { + // Delete temp file in case of error. Re-throw error to continue. + file && removeLocalFile(file.path, true); + throw err; + }); }) .catch(ProjectNotFoundError, e => reply(Boom.notFound(e.message))) .catch(FileExistsError, e => reply(Boom.conflict(e.message))) @@ -144,3 +135,152 @@ export default [ } } ]; + +function handleOrigins (result, projId) { + let sourceName = result.fields['source-name'][0]; + + if (!result.fields['available-ind']) { + throw new DataValidationError('"available-ind" is required'); + } + if (!result.fields['indicators[key]']) { + throw new DataValidationError('"indicators[key]" is required'); + } + if (!result.fields['indicators[label]']) { + throw new DataValidationError('"indicators[label]" is required'); + } + + // Data from the stream. + let availableInd = result.fields['available-ind']; + let indicatorKeys = result.fields['indicators[key]']; + let indicatorLabels = result.fields['indicators[label]']; + + // Are the submitted indicatorKeys in the available indicators. + let validKeys = indicatorKeys.every(k => availableInd.indexOf(k) !== -1); + if (!validKeys) { + throw new DataValidationError('Submitted indicator keys are not listed as available'); + } + + let indicators = _.zipWith(indicatorKeys, indicatorLabels, (k, l) => ({key: k, label: l})); + + return upsertSource(sourceName, 'file', projId) + .then(() => { + // Is there a file? + let file = result.files.file ? result.files.file[0] : null; + + // If there is, validate indicators against it. + if (file) { + let fileName = `${sourceName}_${Date.now()}`; + let filePath = `profile-${projId}/${fileName}`; + + // File was submitted. There can't be one in the database. + return db('projects_files') + .select('*') + .where('project_id', projId) + .where('type', sourceName) + .then(files => { + if (files.length) { throw new FileExistsError(); } + }) + .then(() => getLocalJSONFileContents(file.path)) + .then(contents => { + // Get the indicator common to every feature. Number indicators only. + let indicatorsInFile = contents.features.map(o => { + let numberKeys = []; + Object.keys(o.properties).forEach(k => { + if (!isNaN(parseInt(o.properties[k]))) { + numberKeys.push(k); + } + }); + return numberKeys; + }); + let intersect = indicatorsInFile.shift(); + indicatorsInFile.every(o => { + intersect = intersect.filter(i => o.indexOf(i) !== -1); + return !!intersect.length; + }); + indicatorsInFile = intersect; + + // indicatorsInFile must be the same as availableInd. + if (indicatorsInFile.length !== availableInd.length || _.intersection(indicatorsInFile, availableInd).length !== indicatorsInFile.length) { + throw new DataValidationError('Submitted available indicators do not match file attributes'); + } + }) + // Upload to S3. + .then(() => putFileToS3(filePath, file.path)) + // Insert into database. + .then(() => { + let data = { + name: fileName, + type: sourceName, + path: filePath, + data: JSON.stringify({ indicators, availableInd }), + project_id: projId, + created_at: (new Date()), + updated_at: (new Date()) + }; + + return db('projects_files') + .returning(['id', 'name', 'type', 'path', 'data', 'created_at']) + .insert(data) + .then(insertResponse => insertResponse[0]); + }) + // Delete temp file. + .then(insertResponse => removeLocalFile(file.path, true).then(() => insertResponse)); + + // If not, validate against the database. + } else { + // File was not submitted. There HAS to be one in the database. + return db('projects_files') + .select('*') + .where('project_id', projId) + .where('type', sourceName) + .then(files => { + if (!files.length) { throw new FileNotFoundError(); } + return files[0].data; + }) + .then(indicatorData => { + let storedIndicators = indicatorData.availableInd; + // Available indicators must be the same as the ones stores in + // the db. + if (storedIndicators.length !== availableInd.length || _.intersection(storedIndicators, availableInd).length !== storedIndicators.length) { + throw new DataValidationError('Submitted available indicators do not match stored attributes'); + } + }) + // Update database. + .then(() => { + let data = { + data: JSON.stringify({ indicators, availableInd }), + updated_at: (new Date()) + }; + + return db('projects_files') + .update(data, ['id', 'name', 'type', 'path', 'data', 'created_at']) + .where('project_id', projId) + .where('type', sourceName) + .then(insertResponse => insertResponse[0]); + }); + } + }); +} + +function upsertSource (sourceName, type, projId) { + return db('projects_source_data') + .select('id') + .where('project_id', projId) + .where('name', sourceName) + .first() + .then(source => { + if (source) { + // No need. + // return db('projects_source_data') + // .update({type: type}) + // .where('id', source.id); + } else { + return db('projects_source_data') + .insert({ + project_id: projId, + name: sourceName, + type: type + }); + } + }); +} From 73e231d70d11c643285fc8b94199dc7a1595fdb5 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Mon, 19 Jun 2017 14:41:22 -0400 Subject: [PATCH 46/63] Support multiple ETA and pop counts Also, values that are used by mapbox-gl are stored as k:v in the 'properties' object, instead of nesting them further. --- app/routes/scenario--results.js | 29 ++++++++++++++--------------- test/test-scenarios-results.js | 23 ++++++----------------- 2 files changed, 20 insertions(+), 32 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 4fd7ea6c..6cda3d10 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -322,6 +322,8 @@ export default [ /** * Turn an array of results into a proper GeoJSON features. Each feature refers * to a unique origin, and can have multiple ETA for each POI types. + * This will mostly be used to visualize results on a map. ETA and population + * data are stored flatly in the properties object. */ function mergeOriginETA (results) { return new Promise((resolve, reject) => { @@ -335,27 +337,24 @@ function mergeOriginETA (results) { 'properties': { 'id': b.origin_id, 'n': b.origin_name, - 'pk': b.pop_key, - 'pv': b.pop_value, - 'e': [ - { - 't': b.poi_type, - 'v': b.time_to_poi - } - ] + 'pop0': b.pop_value, + 'pop': [ b.pop_key ], + 'eta0': b.time_to_poi, + 'poi': [ b.poi_type ] }, 'geometry': { 'type': 'Point', 'coordinates': b.origin_coords } }); - } else { - // Update an existing feature with an ETA - a[match].properties.e.push({ - 't': b.poi_type, - 'v': b.time_to_poi - }); - return a; + } else if (a[match].properties.poi.indexOf(b.poi_type) === -1) { + // Update an existing feature with an ETA for a different POI + a[match].properties[`eta${a[match].properties.poi.length}`] = b.time_to_poi; + a[match].properties.poi.push(b.poi_type); + } else if (a[match].properties.pop.indexOf(b.pop_type) === -1) { + // Update an existing feature with a population for a different sub-set + a[match].properties[`pop${a[match].properties.pop.length}`] = b.pop_value; + a[match].properties.pop.push(b.pop_key); } return a; }, [])); diff --git a/test/test-scenarios-results.js b/test/test-scenarios-results.js index 25b1be93..3c73c246 100644 --- a/test/test-scenarios-results.js +++ b/test/test-scenarios-results.js @@ -28,7 +28,7 @@ describe('Scenario results', function () { }); describe('GET /projects/{projId}/scenarios/{scId}/results/geojson', function () { - it('should return the correct scenario - active', function () { + it.only('should return the correct scenario - active', function () { return instance.injectThen({ method: 'GET', url: '/projects/2000/scenarios/2000/results/geojson' @@ -40,22 +40,11 @@ describe('Scenario results', function () { assert.equal(ft.length, 2); assert.equal(ft[0].type, 'Feature'); assert.equal(ft[0].properties.id, 200001); - assert.deepEqual(ft[0].properties.e, [ - { - 't': 'school', - 'v': 5000 - }, - { - 't': 'church', - 'v': 3500 - } - ]); - assert.deepEqual(ft[1].properties.e, [ - { - 't': 'school', - 'v': 54700 - } - ]); + assert.equal(ft[0].properties.eta0, 5000); + assert.equal(ft[0].properties.eta1, 3500); + assert.deepEqual(ft[0].properties.poi, ['school', 'church']); + assert.equal(ft[1].properties.eta0, 54700); + assert.deepEqual(ft[1].properties.poi, ['school']); }); }); }); From 8e660df12c2fed5dbf7891896ae09e7a2f18222a Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Mon, 19 Jun 2017 19:45:55 +0100 Subject: [PATCH 47/63] Fix failing tests --- app/routes/scenarios--get.js | 2 +- test/test-projects.js | 1 + test/test-scenarios.js | 2 +- 3 files changed, 3 insertions(+), 2 deletions(-) diff --git a/app/routes/scenarios--get.js b/app/routes/scenarios--get.js index 4bcdfe97..636702c8 100644 --- a/app/routes/scenarios--get.js +++ b/app/routes/scenarios--get.js @@ -151,7 +151,7 @@ function attachAdminAreas (scenario) { aa = aa.map(o => { o.selected = selected.indexOf(o.id) !== -1; return o; - }); + }).sort((a, b) => a.id - b.id); scenario.admin_areas = aa; } diff --git a/test/test-projects.js b/test/test-projects.js index 013ca6d4..2aaec151 100644 --- a/test/test-projects.js +++ b/test/test-projects.js @@ -169,6 +169,7 @@ describe('Projects', function () { 'type': 'origins', 'path': 'project-2000/origins_000000', 'data': { + 'availableInd': ['population'], 'indicators': [ { 'key': 'population', 'label': 'Total population' } ] }, 'created_at': new Date('2017-02-01T12:00:06.000Z') diff --git a/test/test-scenarios.js b/test/test-scenarios.js index 6ecd6a41..b8166bd1 100644 --- a/test/test-scenarios.js +++ b/test/test-scenarios.js @@ -463,7 +463,7 @@ describe('Scenarios', function () { }); }); - it('should update the deselect all admin areas', function () { + it('should deselect all admin areas', function () { return instance.injectThen({ method: 'PATCH', url: '/projects/2000/scenarios/2000', From 27a4ec50882515b9cafc34447db54ece64911616 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Mon, 19 Jun 2017 17:02:58 -0400 Subject: [PATCH 48/63] Ensure that same poi and pop types share the same key --- app/routes/scenario--results.js | 55 ++++++++++++++++++++++++++------- test/test-scenarios-results.js | 14 ++++++--- 2 files changed, 52 insertions(+), 17 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 6cda3d10..4c0f5e8a 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -323,13 +323,42 @@ export default [ * Turn an array of results into a proper GeoJSON features. Each feature refers * to a unique origin, and can have multiple ETA for each POI types. * This will mostly be used to visualize results on a map. ETA and population - * data are stored flatly in the properties object. + * data are stored flatly in the properties object so mapbox-gl can use them. */ function mergeOriginETA (results) { return new Promise((resolve, reject) => { + // build array of unique POI types in the results + let poiTypes = results.reduce((a, b) => { + if (a.indexOf(b.poi_type) === -1) { + a.push(b.poi_type); + } + return a; + }, []); + + // build array of unique population types in the results + let popTypes = results.reduce((a, b) => { + if (a.indexOf(b.pop_key) === -1) { + a.push(b.pop_key); + } + return a; + }, []); + + // return max population count for each population type + // values are stored in the same order as the types in popTypes + let maxPop = popTypes.map(p => Math.max(...results + .filter(r => r.pop_key === p) + .map(r => { + if (r.pop_key) return r.pop_value; + }) + )); + + // Build a GeoJSON feature array, with each origin in its own feature return resolve(results.reduce((a, b) => { // Check if the accumulator already has an object for the origin let match = a.findIndex(o => o.properties.id === b.origin_id); + let poiIndex = poiTypes.indexOf(b.poi_type); + let popIndex = popTypes.indexOf(b.pop_key); + if (match === -1) { // Create the feature a.push({ @@ -337,25 +366,27 @@ function mergeOriginETA (results) { 'properties': { 'id': b.origin_id, 'n': b.origin_name, - 'pop0': b.pop_value, - 'pop': [ b.pop_key ], - 'eta0': b.time_to_poi, - 'poi': [ b.poi_type ] + 'pop': popTypes, + 'poi': poiTypes, + [`e${poiIndex}`]: b.time_to_poi, + [`p${popIndex}`]: b.pop_value, + [`pn${popIndex}`]: parseInt(b.pop_value / maxPop[popIndex] * 100) / 100 }, 'geometry': { 'type': 'Point', 'coordinates': b.origin_coords } }); - } else if (a[match].properties.poi.indexOf(b.poi_type) === -1) { + } else if (!a[match].properties[`e${poiIndex}`]) { // Update an existing feature with an ETA for a different POI - a[match].properties[`eta${a[match].properties.poi.length}`] = b.time_to_poi; - a[match].properties.poi.push(b.poi_type); - } else if (a[match].properties.pop.indexOf(b.pop_type) === -1) { - // Update an existing feature with a population for a different sub-set - a[match].properties[`pop${a[match].properties.pop.length}`] = b.pop_value; - a[match].properties.pop.push(b.pop_key); + a[match].properties[`e${poiIndex}`] = b.time_to_poi; + } else if (!a[match].properties[`e${popIndex}`]) { + // Update an existing feature with a population count for a different + // sub-set + a[match].properties[`p${popIndex}`] = b.pop_value; + a[match].properties[`pn${popIndex}`] = b.pop_value / maxPop[popIndex]; } + return a; }, [])); }); diff --git a/test/test-scenarios-results.js b/test/test-scenarios-results.js index 3c73c246..770b5e6a 100644 --- a/test/test-scenarios-results.js +++ b/test/test-scenarios-results.js @@ -28,7 +28,7 @@ describe('Scenario results', function () { }); describe('GET /projects/{projId}/scenarios/{scId}/results/geojson', function () { - it.only('should return the correct scenario - active', function () { + it('should return the correct scenario - active', function () { return instance.injectThen({ method: 'GET', url: '/projects/2000/scenarios/2000/results/geojson' @@ -40,11 +40,15 @@ describe('Scenario results', function () { assert.equal(ft.length, 2); assert.equal(ft[0].type, 'Feature'); assert.equal(ft[0].properties.id, 200001); - assert.equal(ft[0].properties.eta0, 5000); - assert.equal(ft[0].properties.eta1, 3500); + assert.equal(ft[0].properties.e0, 5000); + assert.equal(ft[0].properties.e1, 3500); assert.deepEqual(ft[0].properties.poi, ['school', 'church']); - assert.equal(ft[1].properties.eta0, 54700); - assert.deepEqual(ft[1].properties.poi, ['school']); + assert.equal(ft[1].properties.e0, 54700); + assert.equal(ft[1].properties.e1, undefined); + assert.deepEqual(ft[1].properties.poi, ['school', 'church']); + assert.equal(ft[0].properties.p0, 29459); + assert.equal(ft[0].properties.pn0, 1); + assert.deepEqual(ft[0].properties.pop, ['population']); }); }); }); From e6a6713efa33c00e37636cf7e66034587dd03ca7 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 20 Jun 2017 11:53:50 +0100 Subject: [PATCH 49/63] Add endpoints for source data download --- app/routes/projects--source-data.js | 64 +++++++++++++++++++++++++- app/routes/scenarios--source-data.js | 68 +++++++++++++++++++++++++++- 2 files changed, 129 insertions(+), 3 deletions(-) diff --git a/app/routes/projects--source-data.js b/app/routes/projects--source-data.js index 4673c633..f343f283 100644 --- a/app/routes/projects--source-data.js +++ b/app/routes/projects--source-data.js @@ -2,9 +2,11 @@ import Joi from 'joi'; import Boom from 'boom'; import _ from 'lodash'; +import Promise from 'bluebird'; +import Zip from 'node-zip'; import db from '../db/'; -import { putFile as putFileToS3, removeLocalFile, getLocalJSONFileContents } from '../s3/utils'; +import { putFile as putFileToS3, removeLocalFile, getLocalJSONFileContents, getFileContents } from '../s3/utils'; import { ProjectNotFoundError, FileExistsError, @@ -133,6 +135,66 @@ export default [ reply(Boom.badImplementation(err)); }); } + }, + { + path: '/projects/{projId}/source-data', + method: 'GET', + config: { + validate: { + params: { + projId: Joi.number() + }, + query: { + download: Joi.boolean().truthy('true').falsy('false').required(), + type: Joi.string().valid(['profile', 'origins', 'admin-bounds']).required() + } + } + }, + handler: (request, reply) => { + const { projId } = request.params; + + db('projects_files') + .select('*') + .where('type', request.query.type) + .where('project_id', projId) + .then(files => { + if (!files.length) throw new FileNotFoundError(); + return files; + }) + .then(files => { + let zip = new Zip(); + return Promise.map(files, file => getFileContents(file.path) + .then(content => { + let name; + switch (file.type) { + case 'profile': + name = `${file.name}.lua`; + break; + case 'origins': + case 'admin-bounds': + name = `${file.name}.geojson`; + break; + } + zip.file(name, content); + }) + ) + .then(() => zip.generate({ base64: false, compression: 'DEFLATE' })) + // Send! + .then(data => reply(data) + .type('application/zip') + .encoding('binary') + .header('Content-Disposition', `attachment; filename=${files[0].type}-p${projId}.zip`) + ); + }) + .catch(FileNotFoundError, e => reply(Boom.notFound(e.message))) + .catch(err => { + if (err.code === 'NoSuchKey') { + return reply(Boom.notFound('File not found in storage bucket')); + } + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } } ]; diff --git a/app/routes/scenarios--source-data.js b/app/routes/scenarios--source-data.js index 5de7fb11..c1682aa6 100644 --- a/app/routes/scenarios--source-data.js +++ b/app/routes/scenarios--source-data.js @@ -1,15 +1,18 @@ 'use strict'; import Joi from 'joi'; import Boom from 'boom'; +import Promise from 'bluebird'; +import Zip from 'node-zip'; import db from '../db/'; -import { putFile as putFileToS3, removeLocalFile } from '../s3/utils'; +import { putFile as putFileToS3, removeLocalFile, getFileContents } from '../s3/utils'; import { ProjectNotFoundError, ScenarioNotFoundError, FileExistsError, DataValidationError, - ProjectStatusError + ProjectStatusError, + FileNotFoundError } from '../utils/errors'; import { parseFormData } from '../utils/utils'; @@ -179,5 +182,66 @@ export default [ reply(Boom.badImplementation(err)); }); } + }, + { + path: '/projects/{projId}/scenarios/{scId}/source-data', + method: 'GET', + config: { + validate: { + params: { + projId: Joi.number(), + scId: Joi.number() + }, + query: { + download: Joi.boolean().truthy('true').falsy('false').required(), + type: Joi.string().valid(['poi', 'road-network']).required() + } + } + }, + handler: (request, reply) => { + const { projId, scId } = request.params; + + db('scenarios_files') + .select('*') + .where('type', request.query.type) + .where('project_id', projId) + .where('scenario_id', scId) + .then(files => { + if (!files.length) throw new FileNotFoundError(); + return files; + }) + .then(files => { + let zip = new Zip(); + return Promise.map(files, file => getFileContents(file.path) + .then(content => { + let name; + switch (file.type) { + case 'poi': + name = `${file.name}.geojson`; + break; + case 'road-network': + name = `${file.name}.osm`; + break; + } + zip.file(name, content); + }) + ) + .then(() => zip.generate({ base64: false, compression: 'DEFLATE' })) + // Send! + .then(data => reply(data) + .type('application/zip') + .encoding('binary') + .header('Content-Disposition', `attachment; filename=${files[0].type}-p${projId}s${scId}.zip`) + ); + }) + .catch(FileNotFoundError, e => reply(Boom.notFound(e.message))) + .catch(err => { + if (err.code === 'NoSuchKey') { + return reply(Boom.notFound('File not found in storage bucket')); + } + console.log('err', err); + reply(Boom.badImplementation(err)); + }); + } } ]; From c25228e1d2740ab13afa1b33ce205e8c8a37048c Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Tue, 20 Jun 2017 12:18:50 +0100 Subject: [PATCH 50/63] Add validation for feature name --- app/routes/projects--source-data.js | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/app/routes/projects--source-data.js b/app/routes/projects--source-data.js index f343f283..a9c13743 100644 --- a/app/routes/projects--source-data.js +++ b/app/routes/projects--source-data.js @@ -244,6 +244,12 @@ function handleOrigins (result, projId) { }) .then(() => getLocalJSONFileContents(file.path)) .then(contents => { + // Every feature must have a name attribute. + let hasName = contents.features.every(f => !!f.properties.name); + if (!hasName) { + throw new DataValidationError('All features in submitted file must have a name'); + } + // Get the indicator common to every feature. Number indicators only. let indicatorsInFile = contents.features.map(o => { let numberKeys = []; From 2178ce5340f042bfb47f01b0e7e974d11f00f7e0 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Tue, 20 Jun 2017 07:33:06 -0400 Subject: [PATCH 51/63] Remove old result files when generating new ones --- app/routes/scenarios--gen-results.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app/routes/scenarios--gen-results.js b/app/routes/scenarios--gen-results.js index 6af37901..0e5671c1 100644 --- a/app/routes/scenarios--gen-results.js +++ b/app/routes/scenarios--gen-results.js @@ -75,7 +75,7 @@ module.exports = [ .select('*') .where('scenario_id', scId) .where('project_id', projId) - .whereIn('type', ['results', 'results-all']) + .whereIn('type', ['results-csv', 'results-json', 'results-geojson']) .then(files => { let tasks = files.map(f => removeFile(f.path)); let ids = files.map(f => f.id); From f82302436e7716614d86a8784876af59bf314828 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Tue, 20 Jun 2017 07:43:18 -0400 Subject: [PATCH 52/63] Fix failing test --- test/test-result-gen.js | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/test-result-gen.js b/test/test-result-gen.js index eb82e98f..640b2d97 100644 --- a/test/test-result-gen.js +++ b/test/test-result-gen.js @@ -89,7 +89,7 @@ describe('Result generation', function () { return db.batchInsert('scenarios_files', [ { 'name': 'results', - 'type': 'results', + 'type': 'results-json', 'path': 'scenario-2000/results_000000', 'project_id': 2000, 'scenario_id': 2000, @@ -98,7 +98,7 @@ describe('Result generation', function () { }, { 'name': 'results-all', - 'type': 'results-all', + 'type': 'results-csv', 'path': 'scenario-2000/results-all_000000', 'project_id': 2000, 'scenario_id': 2000, @@ -118,7 +118,7 @@ describe('Result generation', function () { .then(() => db('scenarios_files') .select('*') .where('scenario_id', 2000) - .whereIn('type', ['results', 'results-all']) + .whereIn('type', ['results-json', 'results-csv']) ) .then(files => { assert.lengthOf(files, 0, 'Scenario results is empty'); From 7661df17d5cfd0c7222b9c0a1696c08dac1c37bb Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Tue, 20 Jun 2017 11:28:13 -0400 Subject: [PATCH 53/63] Minimize response to build map GeoJSON will be built client-side --- app/routes/scenario--results.js | 120 ++++++++++++++------------------ test/test-scenarios-results.js | 37 +++++----- test/utils/data.js | 19 +++++ 3 files changed, 92 insertions(+), 84 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 4c0f5e8a..0cfb5d88 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -276,7 +276,7 @@ export default [ } }, { - path: '/projects/{projId}/scenarios/{scId}/results/geojson', + path: '/projects/{projId}/scenarios/{scId}/results/mini', method: 'GET', config: { validate: { @@ -306,12 +306,8 @@ export default [ .where('results.scenario_id', scId); Promise.all(_results) .then(res => mergeOriginETA(res)) - .then(res => { - reply({ - 'type': 'FeatureCollection', - 'features': res - }); - }).catch(err => { + .then(res => reply(res)) + .catch(err => { console.log('err', err); reply(Boom.badImplementation(err)); }); @@ -320,74 +316,64 @@ export default [ ]; /** - * Turn an array of results into a proper GeoJSON features. Each feature refers - * to a unique origin, and can have multiple ETA for each POI types. - * This will mostly be used to visualize results on a map. ETA and population - * data are stored flatly in the properties object so mapbox-gl can use them. + * Merge result objects from same origin together. This is primarily to support + * visualization of the results on the map. GeoJSON is built client-side, so + * the response is kept to a minimum. + * To support data driven styling with mapbox-gl, properties are stored as flat + * k:v pairs, instead of nesting them in another object. */ function mergeOriginETA (results) { - return new Promise((resolve, reject) => { - // build array of unique POI types in the results - let poiTypes = results.reduce((a, b) => { - if (a.indexOf(b.poi_type) === -1) { - a.push(b.poi_type); - } - return a; - }, []); + let metaData = { + 'poi_type': [], + 'pop_type': [], + 'maxPop': [] + }; - // build array of unique population types in the results - let popTypes = results.reduce((a, b) => { - if (a.indexOf(b.pop_key) === -1) { - a.push(b.pop_key); - } - return a; - }, []); + // build array of unique POI and population types in the results + results.map(r => { + if (metaData.poi_type.indexOf(r.poi_type) === -1) metaData.poi_type.push(r.poi_type); + if (metaData.pop_type.indexOf(r.pop_key) === -1) metaData.pop_type.push(r.pop_key); + }); - // return max population count for each population type - // values are stored in the same order as the types in popTypes - let maxPop = popTypes.map(p => Math.max(...results - .filter(r => r.pop_key === p) + // return max population count for each population type + // values are stored in the same order as the types in popTypes + metaData.maxPop = metaData.pop_type.map(p => + Math.max(...results.filter(r => r.pop_key === p) .map(r => { if (r.pop_key) return r.pop_value; }) - )); + )); - // Build a GeoJSON feature array, with each origin in its own feature - return resolve(results.reduce((a, b) => { - // Check if the accumulator already has an object for the origin - let match = a.findIndex(o => o.properties.id === b.origin_id); - let poiIndex = poiTypes.indexOf(b.poi_type); - let popIndex = popTypes.indexOf(b.pop_key); + // Build a GeoJSON feature array, with each origin in its own feature + let resultData = results.reduce((a, b) => { + // Check if the accumulator already has an object for the origin + let match = a.findIndex(o => o.i === b.origin_id); + let poiIndex = metaData.poi_type.indexOf(b.poi_type); + let popIndex = metaData.pop_type.indexOf(b.pop_key); - if (match === -1) { - // Create the feature - a.push({ - 'type': 'Feature', - 'properties': { - 'id': b.origin_id, - 'n': b.origin_name, - 'pop': popTypes, - 'poi': poiTypes, - [`e${poiIndex}`]: b.time_to_poi, - [`p${popIndex}`]: b.pop_value, - [`pn${popIndex}`]: parseInt(b.pop_value / maxPop[popIndex] * 100) / 100 - }, - 'geometry': { - 'type': 'Point', - 'coordinates': b.origin_coords - } - }); - } else if (!a[match].properties[`e${poiIndex}`]) { - // Update an existing feature with an ETA for a different POI - a[match].properties[`e${poiIndex}`] = b.time_to_poi; - } else if (!a[match].properties[`e${popIndex}`]) { - // Update an existing feature with a population count for a different - // sub-set - a[match].properties[`p${popIndex}`] = b.pop_value; - a[match].properties[`pn${popIndex}`] = b.pop_value / maxPop[popIndex]; - } + if (match === -1) { + // Create the feature + a.push({ + 'i': b.origin_id, + 'n': b.origin_name, + [`e-${poiIndex}`]: b.time_to_poi, + [`p-${popIndex}`]: b.pop_value, + 'c': [parseInt(b.origin_coords[0] * 100000) / 100000, parseInt(b.origin_coords[1] * 100000) / 100000] + }); + } else if (!a[match][`e-${poiIndex}`]) { + // Update an existing feature with an ETA for a different POI + a[match][`e-${poiIndex}`] = b.time_to_poi; + } else if (!a[match][`p-${popIndex}`]) { + // Update an existing feature with a population count for a different + // sub-set + a[match][`p-${popIndex}`] = b.pop_value; + } - return a; - }, [])); - }); + return a; + }, []); + + return { + 'meta': metaData, + 'results': resultData + }; } diff --git a/test/test-scenarios-results.js b/test/test-scenarios-results.js index 770b5e6a..760ff50d 100644 --- a/test/test-scenarios-results.js +++ b/test/test-scenarios-results.js @@ -27,28 +27,31 @@ describe('Scenario results', function () { .then(() => fixMeUp()); }); - describe('GET /projects/{projId}/scenarios/{scId}/results/geojson', function () { - it('should return the correct scenario - active', function () { + describe('GET /projects/{projId}/scenarios/{scId}/results/mini', function () { + it('should return the correct results for a scenario', function () { return instance.injectThen({ method: 'GET', - url: '/projects/2000/scenarios/2000/results/geojson' + url: '/projects/2000/scenarios/2000/results/mini' }).then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); - assert.deepEqual(res.result.type, 'FeatureCollection'); + assert.deepEqual(res.result.meta, { + 'poi_type': [ 'school', 'church' ], + 'pop_type': [ 'population' ], + 'maxPop': [ 48733 ] + }); - let ft = res.result.features; - assert.equal(ft.length, 2); - assert.equal(ft[0].type, 'Feature'); - assert.equal(ft[0].properties.id, 200001); - assert.equal(ft[0].properties.e0, 5000); - assert.equal(ft[0].properties.e1, 3500); - assert.deepEqual(ft[0].properties.poi, ['school', 'church']); - assert.equal(ft[1].properties.e0, 54700); - assert.equal(ft[1].properties.e1, undefined); - assert.deepEqual(ft[1].properties.poi, ['school', 'church']); - assert.equal(ft[0].properties.p0, 29459); - assert.equal(ft[0].properties.pn0, 1); - assert.deepEqual(ft[0].properties.pop, ['population']); + let ft = res.result.results; + assert.equal(ft.length, 3); + assert.deepEqual(ft[0], { + 'n': 'Paripiranga', + 'i': 200001, + 'e-0': 5000, + 'e-1': 3500, + 'p-0': 29459, + 'c': [-37.86215, -10.68289] + }); + assert.equal(ft[1]['e-0'], 54700); + assert.equal(ft[1]['e-1'], undefined); }); }); }); diff --git a/test/utils/data.js b/test/utils/data.js index 177b27f2..9eb48270 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -768,6 +768,13 @@ export function project2000 () { 'scenario_id': 2000, 'origin_id': 200002, 'project_aa_id': 200001 + }, + { + 'id': 3, + 'project_id': 2000, + 'scenario_id': 2000, + 'origin_id': 200003, + 'project_aa_id': 200001 } ] )) @@ -790,6 +797,18 @@ export function project2000 () { result_id: 1, type: 'church', time: 3500 + }, + { + id: 4, + result_id: 3, + type: 'school', + time: 0 + }, + { + id: 5, + result_id: 3, + type: 'church', + time: 350000 } ] )); From f46c8a3fe828cfaf2b467d33d372b0d7a1b29982 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Tue, 20 Jun 2017 14:05:40 -0400 Subject: [PATCH 54/63] Add normalized population to geo results Easier to do on the server than client-side --- app/routes/scenario--results.js | 4 +++- test/test-scenarios-results.js | 5 +++-- 2 files changed, 6 insertions(+), 3 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 0cfb5d88..1db43ce2 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -276,7 +276,7 @@ export default [ } }, { - path: '/projects/{projId}/scenarios/{scId}/results/mini', + path: '/projects/{projId}/scenarios/{scId}/results/geo', method: 'GET', config: { validate: { @@ -358,6 +358,7 @@ function mergeOriginETA (results) { 'n': b.origin_name, [`e-${poiIndex}`]: b.time_to_poi, [`p-${popIndex}`]: b.pop_value, + [`pn-${popIndex}`]: parseInt(b.pop_value / metaData.maxPop[popIndex] * 100) / 100, 'c': [parseInt(b.origin_coords[0] * 100000) / 100000, parseInt(b.origin_coords[1] * 100000) / 100000] }); } else if (!a[match][`e-${poiIndex}`]) { @@ -367,6 +368,7 @@ function mergeOriginETA (results) { // Update an existing feature with a population count for a different // sub-set a[match][`p-${popIndex}`] = b.pop_value; + a[match][`pn-${popIndex}`] = parseInt(b.pop_value / metaData.maxPop[popIndex] * 100) / 100; } return a; diff --git a/test/test-scenarios-results.js b/test/test-scenarios-results.js index 760ff50d..b2ffaaa7 100644 --- a/test/test-scenarios-results.js +++ b/test/test-scenarios-results.js @@ -27,11 +27,11 @@ describe('Scenario results', function () { .then(() => fixMeUp()); }); - describe('GET /projects/{projId}/scenarios/{scId}/results/mini', function () { + describe('GET /projects/{projId}/scenarios/{scId}/results/geo', function () { it('should return the correct results for a scenario', function () { return instance.injectThen({ method: 'GET', - url: '/projects/2000/scenarios/2000/results/mini' + url: '/projects/2000/scenarios/2000/results/geo' }).then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); assert.deepEqual(res.result.meta, { @@ -48,6 +48,7 @@ describe('Scenario results', function () { 'e-0': 5000, 'e-1': 3500, 'p-0': 29459, + 'pn-0': 0.6, 'c': [-37.86215, -10.68289] }); assert.equal(ft[1]['e-0'], 54700); From 3f99116e03c823552158ab18f59537a30f68e1c5 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Wed, 21 Jun 2017 12:21:03 -0400 Subject: [PATCH 55/63] Use consistent casing on geo response --- app/routes/scenario--results.js | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 1db43ce2..21ac1747 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -324,20 +324,20 @@ export default [ */ function mergeOriginETA (results) { let metaData = { - 'poi_type': [], - 'pop_type': [], + 'poiType': [], + 'popType': [], 'maxPop': [] }; // build array of unique POI and population types in the results results.map(r => { - if (metaData.poi_type.indexOf(r.poi_type) === -1) metaData.poi_type.push(r.poi_type); - if (metaData.pop_type.indexOf(r.pop_key) === -1) metaData.pop_type.push(r.pop_key); + if (metaData.poiType.indexOf(r.poi_type) === -1) metaData.poiType.push(r.poi_type); + if (metaData.popType.indexOf(r.pop_key) === -1) metaData.popType.push(r.pop_key); }); // return max population count for each population type // values are stored in the same order as the types in popTypes - metaData.maxPop = metaData.pop_type.map(p => + metaData.maxPop = metaData.popType.map(p => Math.max(...results.filter(r => r.pop_key === p) .map(r => { if (r.pop_key) return r.pop_value; @@ -348,8 +348,8 @@ function mergeOriginETA (results) { let resultData = results.reduce((a, b) => { // Check if the accumulator already has an object for the origin let match = a.findIndex(o => o.i === b.origin_id); - let poiIndex = metaData.poi_type.indexOf(b.poi_type); - let popIndex = metaData.pop_type.indexOf(b.pop_key); + let poiIndex = metaData.poiType.indexOf(b.poi_type); + let popIndex = metaData.popType.indexOf(b.pop_key); if (match === -1) { // Create the feature From e36ebf802520e1dec396dba34004ace9af30eb32 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Wed, 21 Jun 2017 14:17:46 -0400 Subject: [PATCH 56/63] Improve tests - use result id's with indication of project - add result file when storing results --- .../data-sergipe/results-p200001-s2000.csv | 4 ++ .../results-p200001-s2000.geojson | 61 +++++++++++++++++++ .../data-sergipe/results-p200001-s2000.json | 40 ++++++++++++ test/utils/data.js | 34 ++++++----- 4 files changed, 125 insertions(+), 14 deletions(-) create mode 100644 test/utils/data-sergipe/results-p200001-s2000.csv create mode 100644 test/utils/data-sergipe/results-p200001-s2000.geojson create mode 100644 test/utils/data-sergipe/results-p200001-s2000.json diff --git a/test/utils/data-sergipe/results-p200001-s2000.csv b/test/utils/data-sergipe/results-p200001-s2000.csv new file mode 100644 index 00000000..a5200345 --- /dev/null +++ b/test/utils/data-sergipe/results-p200001-s2000.csv @@ -0,0 +1,4 @@ +id,name,population,lat,lon,poi.school,poi.church +200001,Paripiranga,29459,-10.6828923,-37.8621523,5000,3500 +200002,Jandaíra,10997,-11.5628009,-37.7820255,54700, +200003,Tobias Barreto,48733,-11.188034,-38.0034554,0,350000 \ No newline at end of file diff --git a/test/utils/data-sergipe/results-p200001-s2000.geojson b/test/utils/data-sergipe/results-p200001-s2000.geojson new file mode 100644 index 00000000..8d1b329e --- /dev/null +++ b/test/utils/data-sergipe/results-p200001-s2000.geojson @@ -0,0 +1,61 @@ +{ + "type": "FeatureCollection", + "features": [ + { + "type": "Feature", + "properties": { + "id": 200001, + "name": "Paripiranga", + "pop": 29459, + "eta": { + "school": 5000, + "church": 3500 + } + }, + "geometry": { + "type": "Point", + "coordinates": [ + -10.6828923, + -37.8621523 + ] + } + }, + { + "type": "Feature", + "properties": { + "id": 200002, + "name": "Jandaíra", + "pop": 10997, + "eta": { + "school": 54700 + } + }, + "geometry": { + "type": "Point", + "coordinates": [ + -11.5628009, + -37.7820255 + ] + } + }, + { + "type": "Feature", + "properties": { + "id": 200003, + "name": "Tobias Barreto", + "pop": 48733, + "eta": { + "school": 0, + "church": 350000 + } + }, + "geometry": { + "type": "Point", + "coordinates": [ + -11.188034, + -38.0034554 + ] + } + } + ] +} \ No newline at end of file diff --git a/test/utils/data-sergipe/results-p200001-s2000.json b/test/utils/data-sergipe/results-p200001-s2000.json new file mode 100644 index 00000000..533c23d3 --- /dev/null +++ b/test/utils/data-sergipe/results-p200001-s2000.json @@ -0,0 +1,40 @@ +[ + { + "id": 200001, + "name": "Arauá", + "results": [ + { + "id": 200001, + "name": "Paripiranga", + "population": 29459, + "lat": -10.6828923, + "lon": -37.8621523, + "poi": { + "school": 5000, + "church": 3500 + } + }, + { + "id": 200002, + "name": "Jandaíra", + "population": 10997, + "lat": -11.5628009, + "lon": -37.7820255, + "poi": { + "school": 54700 + } + }, + { + "id": 200003, + "name": "Tobias Barreto", + "population": 48733, + "lat": -11.188034, + "lon": -38.0034554, + "poi": { + "school": 0, + "church": 350000 + } + } + ] + } +] \ No newline at end of file diff --git a/test/utils/data.js b/test/utils/data.js index 9eb48270..5778eb30 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -18,6 +18,9 @@ const FILE_ORIGINS = path.join(__dirname, 'data-sergipe/villages.geojson'); const FILE_ADMIN = path.join(__dirname, 'data-sergipe/admin-boundaries.geojson'); const FILE_ROAD_NETWORK = path.join(__dirname, 'data-sergipe/road-network.osm'); const FILE_POI = path.join(__dirname, 'data-sergipe/poi-townhalls.geojson'); +const FILE_RESULTS_CSV = path.join(__dirname, 'data-sergipe/results-p200001-s2000.csv'); +const FILE_RESULTS_JSON = path.join(__dirname, 'data-sergipe/results-p200001-s2000.json'); +const FILE_RESULTS_GEOJSON = path.join(__dirname, 'data-sergipe/results-p200001-s2000.geojson'); const ADMIN_AREAS_BBOX = bbox(readJSONSync(FILE_ADMIN)); @@ -756,21 +759,21 @@ export function project2000 () { .then(() => scenarioResults( [ { - 'id': 1, + 'id': 200001, 'project_id': 2000, 'scenario_id': 2000, 'origin_id': 200001, 'project_aa_id': 200001 }, { - 'id': 2, + 'id': 200002, 'project_id': 2000, 'scenario_id': 2000, 'origin_id': 200002, 'project_aa_id': 200001 }, { - 'id': 3, + 'id': 200003, 'project_id': 2000, 'scenario_id': 2000, 'origin_id': 200003, @@ -781,37 +784,40 @@ export function project2000 () { .then(() => scenarioResultsPOI( [ { - id: 1, - result_id: 1, + id: 200001, + result_id: 200001, type: 'school', time: 5000 }, { - id: 2, - result_id: 2, + id: 200002, + result_id: 200002, type: 'school', time: 54700 }, { - id: 3, - result_id: 1, + id: 200003, + result_id: 200001, type: 'church', time: 3500 }, { - id: 4, - result_id: 3, + id: 200004, + result_id: 200003, type: 'school', time: 0 }, { - id: 5, - result_id: 3, + id: 200005, + result_id: 200003, type: 'church', time: 350000 } ] - )); + )) + .then(() => putObjectFromFile(bucket, 'scenario-2000/results_200001-araua-csv_000000', FILE_RESULTS_CSV)) + .then(() => putObjectFromFile(bucket, 'scenario-2000/results_all-json_000000', FILE_RESULTS_JSON)) + .then(() => putObjectFromFile(bucket, 'scenario-2000/results_all-geojson_000000', FILE_RESULTS_GEOJSON)); } // From 36bb2295973e8a2f9e03aebac705bb96df0d62e9 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Wed, 21 Jun 2017 15:22:11 -0400 Subject: [PATCH 57/63] Update tests to expect consistent casing --- test/test-scenarios-results.js | 4 ++-- test/utils/data-sergipe/results.csv | 4 ++++ 2 files changed, 6 insertions(+), 2 deletions(-) create mode 100644 test/utils/data-sergipe/results.csv diff --git a/test/test-scenarios-results.js b/test/test-scenarios-results.js index b2ffaaa7..fa3ece1f 100644 --- a/test/test-scenarios-results.js +++ b/test/test-scenarios-results.js @@ -35,8 +35,8 @@ describe('Scenario results', function () { }).then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); assert.deepEqual(res.result.meta, { - 'poi_type': [ 'school', 'church' ], - 'pop_type': [ 'population' ], + 'poiType': [ 'school', 'church' ], + 'popType': [ 'population' ], 'maxPop': [ 48733 ] }); diff --git a/test/utils/data-sergipe/results.csv b/test/utils/data-sergipe/results.csv new file mode 100644 index 00000000..6f1f343d --- /dev/null +++ b/test/utils/data-sergipe/results.csv @@ -0,0 +1,4 @@ +id,name,population,lat,lon,poi.school,poi.church +200001,Paripiranga,29459,-37.8621523,-10.6828923,5000,3500 +200002,Jandaíra,10997,-37.7820255,-11.5628009,54700, +200003,Tobias Barreto,48733,-38.0034554,-11.188034,0,350000 \ No newline at end of file From 85bdf86cd48bc4e6cc2fb56ec025620920e67af7 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 22 Jun 2017 10:32:55 +0100 Subject: [PATCH 58/63] Rename results files --- .../{results-p200001-s2000.csv => results-p2000-s2000.csv} | 0 ...ts-p200001-s2000.geojson => results-p2000-s2000.geojson} | 0 ...{results-p200001-s2000.json => results-p2000-s2000.json} | 0 test/utils/data.js | 6 +++--- 4 files changed, 3 insertions(+), 3 deletions(-) rename test/utils/data-sergipe/{results-p200001-s2000.csv => results-p2000-s2000.csv} (100%) rename test/utils/data-sergipe/{results-p200001-s2000.geojson => results-p2000-s2000.geojson} (100%) rename test/utils/data-sergipe/{results-p200001-s2000.json => results-p2000-s2000.json} (100%) diff --git a/test/utils/data-sergipe/results-p200001-s2000.csv b/test/utils/data-sergipe/results-p2000-s2000.csv similarity index 100% rename from test/utils/data-sergipe/results-p200001-s2000.csv rename to test/utils/data-sergipe/results-p2000-s2000.csv diff --git a/test/utils/data-sergipe/results-p200001-s2000.geojson b/test/utils/data-sergipe/results-p2000-s2000.geojson similarity index 100% rename from test/utils/data-sergipe/results-p200001-s2000.geojson rename to test/utils/data-sergipe/results-p2000-s2000.geojson diff --git a/test/utils/data-sergipe/results-p200001-s2000.json b/test/utils/data-sergipe/results-p2000-s2000.json similarity index 100% rename from test/utils/data-sergipe/results-p200001-s2000.json rename to test/utils/data-sergipe/results-p2000-s2000.json diff --git a/test/utils/data.js b/test/utils/data.js index 9b278303..5edfec81 100644 --- a/test/utils/data.js +++ b/test/utils/data.js @@ -18,9 +18,9 @@ const FILE_ORIGINS = path.join(__dirname, 'data-sergipe/villages.geojson'); const FILE_ADMIN = path.join(__dirname, 'data-sergipe/admin-boundaries.geojson'); const FILE_ROAD_NETWORK = path.join(__dirname, 'data-sergipe/road-network.osm'); const FILE_POI = path.join(__dirname, 'data-sergipe/poi-townhalls.geojson'); -const FILE_RESULTS_CSV = path.join(__dirname, 'data-sergipe/results-p200001-s2000.csv'); -const FILE_RESULTS_JSON = path.join(__dirname, 'data-sergipe/results-p200001-s2000.json'); -const FILE_RESULTS_GEOJSON = path.join(__dirname, 'data-sergipe/results-p200001-s2000.geojson'); +const FILE_RESULTS_CSV = path.join(__dirname, 'data-sergipe/results-p2000-s2000.csv'); +const FILE_RESULTS_JSON = path.join(__dirname, 'data-sergipe/results-p2000-s2000.json'); +const FILE_RESULTS_GEOJSON = path.join(__dirname, 'data-sergipe/results-p2000-s2000.geojson'); const ADMIN_AREAS_BBOX = bbox(readJSONSync(FILE_ADMIN)); From b73d14954fed73092bc5f8b532f3f05034cd1504 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 22 Jun 2017 17:37:10 +0100 Subject: [PATCH 59/63] Update results endpoint to accept filters --- app/routes/scenario--results.js | 222 +++++++++++++++++--------------- 1 file changed, 121 insertions(+), 101 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 8e1009e3..7ff2000b 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -7,7 +7,7 @@ import _ from 'lodash'; import db from '../db/'; import { getFileContents } from '../s3/utils'; -import { FileNotFoundError } from '../utils/errors'; +import { FileNotFoundError, DataValidationError } from '../utils/errors'; export default [ { @@ -79,125 +79,112 @@ export default [ params: { projId: Joi.number(), scId: Joi.number() + }, + query: { + poiType: Joi.string().required(), + popInd: Joi.string().required() } } }, handler: (request, reply) => { const { projId, scId } = request.params; + const { poiType, popInd } = request.query; - // Future structure. + // Prepare response. // let r = { - // accessibilityTime: [ - // { - // poi: 'bank', - // times: [10, 20, 30, 40, 50], - // adminAreas: [ - // { - // name: 'something', - // indicators: [ - // { - // name: 'Total Population', - // data: [0, 0, 0.1, 0.5, 1] - // } - // ] - // } - // ] - // } - // ] + // accessibilityTime: { + // poi: 'bank', + // analysisMins: [10, 20, 30, 40, 50], + // adminAreas: [ + // { + // id: 00000, + // name: 'something', + // data: [0, 0, 10, 50, 100] + // } + // ] + // } // }; - - // Get all the poi types. - let _poi = db('scenarios_files') - .select('subtype') - .where('type', 'poi') - .where('project_id', projId) - .where('scenario_id', scId); + let accessibilityTime = { + poi: poiType, + indicator: popInd, + analysisMins: [10, 20, 30, 60, 90, 120] + }; // Get all the admin areas for which results were generated. - let _aa = db('scenarios_settings') - .select('value') - .where('key', 'admin_areas') - .where('scenario_id', scId) - .first() - .then(aa => JSON.parse(aa.value)) - .then(selectedAA => db('projects_aa') - .select('id', 'name') - .where('project_id', projId) - .whereIn('id', selectedAA) - ); - - // Generate the accessibilityTime array to be used later. - let _accessibilityTime = Promise.all([_poi, _aa]) - .then(data => { - let [poi, aa] = data; - let accessibilityTime = poi.map(p => { - return { - poi: p.subtype, - analysisMins: [10, 20, 30, 60, 90, 120], - adminAreas: aa.map(a => { - return { - id: a.id, - name: a.name - }; - }) - }; - }); - - return accessibilityTime; - }); + const getAdminAreas = () => { + return db('scenarios_settings') + .select('value') + .where('key', 'admin_areas') + .where('scenario_id', scId) + .first() + .then(aa => JSON.parse(aa.value)) + .then(selectedAA => db('projects_aa') + .select('id', 'name') + .where('project_id', projId) + .whereIn('id', selectedAA) + ); + }; - // Get all the results. - let _all = db.raw(` - SELECT - pop.value as pop_value, - pop.key as pop_key, - r.project_aa_id as aa_id, - rp.type as poi_type, - rp.time as time_to_poi, - po.id as origin_id - FROM results r - INNER JOIN results_poi rp ON r.id = rp.result_id - INNER JOIN projects_origins po ON po.id = r.origin_id - INNER JOIN projects_origins_indicators pop ON po.id = pop.origin_id - WHERE pop.key = 'population' - `) - .then(res => res.rows); + const getResults = () => { + return db.raw(` + SELECT + pop.value as pop_value, + pop.key as pop_key, + r.project_aa_id as aa_id, + rp.type as poi_type, + rp.time as time_to_poi, + po.id as origin_id + FROM results r + INNER JOIN results_poi rp ON r.id = rp.result_id + INNER JOIN projects_origins po ON po.id = r.origin_id + INNER JOIN projects_origins_indicators pop ON po.id = pop.origin_id + WHERE pop.key = :popInd and rp.type = :poiType and r.project_id = :projId and r.scenario_id = :scId + `, { popInd, poiType, projId, scId }) + .then(res => res.rows); + }; // Sum by pop_value. const sumPop = (arr) => arr.reduce((acc, o) => acc + (parseInt(o.pop_value) || 1), 0); // Check if given time is less that given nimutes accounting for nulls. const isLessThanMinutes = (time, min) => time === null ? false : time <= min * 60; - // Compute the results. - Promise.all([_accessibilityTime, _all]) - .then(data => { - let [accessibilityTime, all] = data; + // GO! - accessibilityTime = accessibilityTime.map(poi => { - poi.adminAreas = _(poi.adminAreas).map(aa => { - let filtered = all.filter(r => r.poi_type === poi.poi && r.aa_id === aa.id); + checkPoi(projId, scId, poiType) + .then(() => checkPopInd(projId, popInd)) + .then(() => getAdminAreas()) + .then(aa => { + accessibilityTime.adminAreas = aa.map(a => { + return { + id: a.id, + name: a.name + }; + }); + }) + .then(() => getResults()) + .then(results => { + accessibilityTime.adminAreas = _(accessibilityTime.adminAreas).map(aa => { + let filtered = results.filter(r => r.aa_id === aa.id); - if (filtered.length) { - let totalPop = sumPop(filtered); - let pop = poi.analysisMins.map(time => sumPop(filtered.filter(o => isLessThanMinutes(o.time_to_poi, time)))); - aa.data = pop.map(o => o / totalPop * 100); - } else { - aa.data = []; - } + if (filtered.length) { + let totalPop = sumPop(filtered); + let pop = accessibilityTime.analysisMins.map(time => sumPop(filtered.filter(o => isLessThanMinutes(o.time_to_poi, time)))); + aa.data = pop.map(o => o / totalPop * 100); + } else { + aa.data = []; + } - return aa; - }) - .sortBy(poi.adminAreas, o => _.deburr(o.name)) - .reverse() - .value(); + return aa; + }) + .sortBy(accessibilityTime.adminAreas, o => _.deburr(o.name)) + .reverse() + .value(); - return poi; - }); return accessibilityTime; }) - .then(accessibilityTime => { - reply({accessibilityTime}); - }).catch(err => { + .then(accessibilityTime => reply({accessibilityTime})) + .catch(DataValidationError, e => reply(Boom.badRequest(e.message))) + .catch(err => { console.log('err', err); reply(Boom.badImplementation(err)); }); @@ -213,6 +200,8 @@ export default [ scId: Joi.number() }, query: { + poiType: Joi.string().required(), + popInd: Joi.string().required(), sortBy: Joi.string(), sortDir: Joi.string().valid(['asc', 'desc']), limit: Joi.number().default(50), @@ -224,7 +213,7 @@ export default [ const { projId, scId } = request.params; const { page, limit } = request; const offset = (page - 1) * limit; - let { sortBy, sortDir } = request.query; + let { sortBy, sortDir, poiType, popInd } = request.query; sortBy = sortBy || 'origin_name'; sortDir = sortDir || 'asc'; @@ -237,7 +226,8 @@ export default [ .innerJoin('projects_aa', 'projects_aa.id', 'results.project_aa_id') .where('results.project_id', projId) .where('results.scenario_id', scId) - .where('projects_origins_indicators.key', 'population') + .where('projects_origins_indicators.key', popInd) + .where('results_poi.type', poiType) .first(); let _results = db('results') @@ -257,11 +247,14 @@ export default [ .innerJoin('projects_aa', 'projects_aa.id', 'results.project_aa_id') .where('results.project_id', projId) .where('results.scenario_id', scId) - .where('projects_origins_indicators.key', 'population') + .where('projects_origins_indicators.key', popInd) + .where('results_poi.type', poiType) .orderBy(sortBy, sortDir) .offset(offset).limit(limit); - Promise.all([_count, _results]) + checkPoi(projId, scId, poiType) + .then(() => checkPopInd(projId, popInd)) + .then(() => Promise.all([_count, _results])) .then(res => { request.count = parseInt(res[0].count); reply(res[1]); @@ -311,6 +304,33 @@ export default [ } ]; +function checkPoi (projId, scId, poiType) { + return db('results') + .distinct('results_poi.type') + .select() + .innerJoin('results_poi', 'results_poi.result_id', 'results.id') + .where('project_id', projId) + .where('scenario_id', scId) + .then(poiTypes => _.map(poiTypes, 'type')) + .then(poiTypes => { + if (!poiTypes.length) throw new DataValidationError(`There are no available poi types to use`); + if (poiTypes.indexOf(poiType) === -1) throw new DataValidationError(`"poiType" must be one of [${poiTypes.join(', ')}]`); + }); +} + +function checkPopInd (projId, popInd) { + return db('projects_files') + .select('data') + .where('project_id', projId) + .where('type', 'origins') + .first() + .then(res => _.map(res.data.indicators, 'key')) + .then(popInds => { + if (!popInds.length) throw new DataValidationError(`There are no available population indicators to use`); + if (popInds.indexOf(popInd) === -1) throw new DataValidationError(`"popInd" must be one of [${popInds.join(', ')}]`); + }); +} + /** * Merge result objects from same origin together. This is primarily to support * visualization of the results on the map. GeoJSON is built client-side, so From 4b4a73b639d08b994c7dbb0e5b4fed213d3df837 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 22 Jun 2017 17:55:02 +0100 Subject: [PATCH 60/63] Fix scenario create through clone option --- app/services/scenario-create/scenario-create.js | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/app/services/scenario-create/scenario-create.js b/app/services/scenario-create/scenario-create.js index ea103de7..58d53893 100644 --- a/app/services/scenario-create/scenario-create.js +++ b/app/services/scenario-create/scenario-create.js @@ -77,8 +77,21 @@ export function scenarioCreate (e) { .whereIn('type', ['poi', 'road-network']) .then(files => cloneScenarioFiles(trx, files, projId, scId)) ) + .then(() => trx('scenarios_source_data') + .select('project_id', 'name', 'type', 'data') + .where('scenario_id', sourceScenarioId) + .where('project_id', projId) + .then(sourceData => { + // Set new id. + sourceData.forEach(o => { + o.scenario_id = scId; + }); + return sourceData; + }) + ) + .then(sourceData => trx.batchInsert('scenarios_source_data', sourceData)); // Copy the osm-p2p-db. - .then(() => op.log('files', {message: 'Cloning road network database'})); + // .then(() => op.log('files', {message: 'Cloning road network database'})); // .then(() => cloneOsmP2Pdb(projId, sourceScenarioId, projId, scId)); // } else if (source === 'new') { @@ -158,6 +171,7 @@ function cloneScenarioFiles (trx, files, projId, scId) { return { name: fileName, type: file.type, + subtype: file.subtype, path: filePath, project_id: projId, scenario_id: scId, From d22285175d93f81f97d77bcbf4adf51add208134 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Thu, 22 Jun 2017 18:34:48 +0100 Subject: [PATCH 61/63] Include source when creating a new scenario with new file --- .../scenario-create/scenario-create.js | 22 ++++++++++++++++--- 1 file changed, 19 insertions(+), 3 deletions(-) diff --git a/app/services/scenario-create/scenario-create.js b/app/services/scenario-create/scenario-create.js index 58d53893..d277716d 100644 --- a/app/services/scenario-create/scenario-create.js +++ b/app/services/scenario-create/scenario-create.js @@ -5,7 +5,7 @@ import Promise from 'bluebird'; import config from '../../config'; // import { cloneDatabase, importRoadNetwork } from '../rra-osm-p2p'; import db from '../../db/'; -import { copyFile, getFileContents } from '../../s3/utils'; +import { copyFile } from '../../s3/utils'; import Operation from '../../utils/operation'; import AppLogger from '../../utils/app-logger'; @@ -109,6 +109,22 @@ export function scenarioCreate (e) { .where('scenarios_files.type', 'poi') .then(files => cloneScenarioFiles(trx, files, projId, scId)) ) + // Insert source info. + // TODO: This needs to be updated once we have osm data. + .then(() => trx.batchInsert('scenarios_source_data', [ + { + project_id: projId, + scenario_id: scId, + name: 'road-network', + type: 'file' + }, + { + project_id: projId, + scenario_id: scId, + name: 'poi', + type: 'file' + } + ])) // Add entry for road network file. .then(() => { let now = new Date(); @@ -126,8 +142,8 @@ export function scenarioCreate (e) { .returning('*') .insert(data) .then(res => res[0]); - }) - .then(file => getFileContents(file.path)); + }); + // .then(file => getFileContents(file.path)) // Import to the osm-p2p-db. // .then(roadNetwork => { // logger && logger.log('process road network'); From 75cbde10762e032bf37f7e6046d2731f290c7887 Mon Sep 17 00:00:00 2001 From: Olaf Veerman Date: Thu, 22 Jun 2017 16:05:55 -0400 Subject: [PATCH 62/63] Refactor results/geo endpoint (#152) Response only contains estimates for a single POI and a single population type. --- app/routes/scenario--results.js | 88 +++++++++------------------------ test/test-scenarios-results.js | 42 ++++++++++------ 2 files changed, 50 insertions(+), 80 deletions(-) diff --git a/app/routes/scenario--results.js b/app/routes/scenario--results.js index 7ff2000b..2b226a37 100644 --- a/app/routes/scenario--results.js +++ b/app/routes/scenario--results.js @@ -272,11 +272,16 @@ export default [ params: { projId: Joi.number(), scId: Joi.number() + }, + query: { + poiType: Joi.string().required(), + popInd: Joi.string().required() } } }, handler: (request, reply) => { const { projId, scId } = request.params; + let { poiType, popInd } = request.query; let _results = db('results') .select( @@ -292,9 +297,14 @@ export default [ .innerJoin('projects_origins', 'projects_origins.id', 'results.origin_id') .innerJoin('projects_origins_indicators', 'projects_origins_indicators.origin_id', 'projects_origins.id') .where('results.project_id', projId) - .where('results.scenario_id', scId); - Promise.all(_results) - .then(res => mergeOriginETA(res)) + .where('results.scenario_id', scId) + .where('projects_origins_indicators.key', popInd) + .where('results_poi.type', poiType); + + checkPoi(projId, scId, poiType) + .then(() => checkPopInd(projId, popInd)) + .then(() => Promise.all(_results)) + .then(res => prepGeoResponse(res)) .then(res => reply(res)) .catch(err => { console.log('err', err); @@ -331,67 +341,17 @@ function checkPopInd (projId, popInd) { }); } -/** - * Merge result objects from same origin together. This is primarily to support - * visualization of the results on the map. GeoJSON is built client-side, so - * the response is kept to a minimum. - * To support data driven styling with mapbox-gl, properties are stored as flat - * k:v pairs, instead of nesting them in another object. - */ -function mergeOriginETA (results) { - let metaData = { - 'poiType': [], - 'popType': [], - 'maxPop': [] - }; +function prepGeoResponse (results) { + let maxPop = Math.max(...results.map(o => o.pop_value)); - // build array of unique POI and population types in the results - results.map(r => { - if (metaData.poiType.indexOf(r.poi_type) === -1) metaData.poiType.push(r.poi_type); - if (metaData.popType.indexOf(r.pop_key) === -1) metaData.popType.push(r.pop_key); + return results.map(o => { + return { + 'i': o.origin_id, + 'n': o.origin_name, + 'e': o.time_to_poi, + 'p': o.pop_value, + 'pn': parseInt(o.pop_value / maxPop * 100) / 100, + 'c': [parseInt(o.origin_coords[0] * 100000) / 100000, parseInt(o.origin_coords[1] * 100000) / 100000] + }; }); - - // return max population count for each population type - // values are stored in the same order as the types in popTypes - metaData.maxPop = metaData.popType.map(p => - Math.max(...results.filter(r => r.pop_key === p) - .map(r => { - if (r.pop_key) return r.pop_value; - }) - )); - - // Build a GeoJSON feature array, with each origin in its own feature - let resultData = results.reduce((a, b) => { - // Check if the accumulator already has an object for the origin - let match = a.findIndex(o => o.i === b.origin_id); - let poiIndex = metaData.poiType.indexOf(b.poi_type); - let popIndex = metaData.popType.indexOf(b.pop_key); - - if (match === -1) { - // Create the feature - a.push({ - 'i': b.origin_id, - 'n': b.origin_name, - [`e-${poiIndex}`]: b.time_to_poi, - [`p-${popIndex}`]: b.pop_value, - [`pn-${popIndex}`]: parseInt(b.pop_value / metaData.maxPop[popIndex] * 100) / 100, - 'c': [parseInt(b.origin_coords[0] * 100000) / 100000, parseInt(b.origin_coords[1] * 100000) / 100000] - }); - } else if (!a[match][`e-${poiIndex}`]) { - // Update an existing feature with an ETA for a different POI - a[match][`e-${poiIndex}`] = b.time_to_poi; - } else if (!a[match][`p-${popIndex}`]) { - // Update an existing feature with a population count for a different - // sub-set - a[match][`p-${popIndex}`] = b.pop_value; - a[match][`pn-${popIndex}`] = parseInt(b.pop_value / metaData.maxPop[popIndex] * 100) / 100; - } - - return a; - }, []); - - return { - 'meta': metaData, - 'results': resultData - }; } diff --git a/test/test-scenarios-results.js b/test/test-scenarios-results.js index fa3ece1f..c6e1449a 100644 --- a/test/test-scenarios-results.js +++ b/test/test-scenarios-results.js @@ -31,28 +31,38 @@ describe('Scenario results', function () { it('should return the correct results for a scenario', function () { return instance.injectThen({ method: 'GET', - url: '/projects/2000/scenarios/2000/results/geo' + url: '/projects/2000/scenarios/2000/results/geo?poiType=school&popInd=population' }).then(res => { assert.equal(res.statusCode, 200, 'Status code is 200'); - assert.deepEqual(res.result.meta, { - 'poiType': [ 'school', 'church' ], - 'popType': [ 'population' ], - 'maxPop': [ 48733 ] - }); - - let ft = res.result.results; - assert.equal(ft.length, 3); - assert.deepEqual(ft[0], { + let origins = res.result; + assert.equal(origins.length, 3); + assert.deepEqual(origins[0], { 'n': 'Paripiranga', 'i': 200001, - 'e-0': 5000, - 'e-1': 3500, - 'p-0': 29459, - 'pn-0': 0.6, + 'e': 5000, + 'p': 29459, + 'pn': 0.6, 'c': [-37.86215, -10.68289] }); - assert.equal(ft[1]['e-0'], 54700); - assert.equal(ft[1]['e-1'], undefined); + }); + }); + it('should return the correct results for a scenario', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/2000/scenarios/2000/results/geo?poiType=church&popInd=population' + }).then(res => { + assert.equal(res.statusCode, 200, 'Status code is 200'); + let origins = res.result; + assert.equal(origins.length, 2); + assert.equal(origins[1].e, 350000); + }); + }); + it.skip('should return an error for unknown POI types', function () { + return instance.injectThen({ + method: 'GET', + url: '/projects/2000/scenarios/2000/results/geo?poiType=mockery&popInd=population' + }).then(res => { + assert.equal(res.statusCode, 500, 'Internal Server Error'); }); }); }); From 9d89d2ff4762b387083f43f5325dc8f4bad0d942 Mon Sep 17 00:00:00 2001 From: Daniel da Silva Date: Mon, 26 Jun 2017 12:00:26 +0100 Subject: [PATCH 63/63] Update package and version bump [ci skip] --- package.json | 21 +++++++++++++++------ 1 file changed, 15 insertions(+), 6 deletions(-) diff --git a/package.json b/package.json index 7e377a8f..7332dc06 100644 --- a/package.json +++ b/package.json @@ -1,8 +1,20 @@ { - "name": "Rural-Road-Accessibility", - "version": "0.2.0", + "name": "rural-road-accessibility--server", + "version": "0.3.0", "description": "World Bank project to assess rural road accessibility", - "repository": "https://github.com/WorldBank-Transport/Rural-Road-Accessibility", + "repository": { + "type": "git", + "url": "https://github.com/WorldBank-Transport/Rural-Road-Accessibility" + }, + "author": { + "name": "Development Seed", + "url": "https://developmentseed.org" + }, + "license": "MIT", + "bugs": { + "url": "https://github.com/WorldBank-Transport/Rural-Road-Accessibility/issues" + }, + "homepage": "https://github.com/WorldBank-Transport/Rural-Road-Accessibility", "main": "index.js", "scripts": { "setup": "DEBUG=true node setup/", @@ -15,9 +27,6 @@ "engines": { "node": "6.7.x" }, - "author": "Development Seed", - "license": "", - "homepage": "https://github.com/WorldBank-Transport/Rural-Road-Accessibility", "dependencies": { "@turf/bbox": "^4.0.2", "@turf/center-of-mass": "^4.3.0",