diff --git a/modules/demo/api-server/index.js b/modules/demo/api-server/index.js index 0d97cdbc5..a957bca54 100755 --- a/modules/demo/api-server/index.js +++ b/modules/demo/api-server/index.js @@ -29,5 +29,5 @@ const env = { }; spawnSync(process.execPath, - [fastify, 'start', '-l', 'info', '-P', '-p', '3010', '-w', 'app.js'], + [fastify, 'start', '-l', 'info', '--ignore-watch', '-P', '-p', '3010', 'app.js'], {env, cwd: __dirname, stdio: 'inherit'}); diff --git a/modules/demo/api-server/package.json b/modules/demo/api-server/package.json index fc6266bea..830cef317 100644 --- a/modules/demo/api-server/package.json +++ b/modules/demo/api-server/package.json @@ -12,8 +12,8 @@ "description": "A fastify-based web server that provides browser-access to GPU resources.", "scripts": { "test": "tap \"test/**/*.test.js\"", - "start": "fastify start -l info -P -p 3010 -w app.js", - "dev": "fastify start -w -l info -P -p 3010 app.js" + "start": "fastify start --ignore-watch -l info -P -p 3010 app.js", + "dev": "fastify start --ignore-watch -l info -P -p 3010 app.js" }, "keywords": [ "rapids.ai", diff --git a/modules/demo/api-server/routes/gpu/index.js b/modules/demo/api-server/routes/gpu/index.js new file mode 100644 index 000000000..514698f28 --- /dev/null +++ b/modules/demo/api-server/routes/gpu/index.js @@ -0,0 +1,155 @@ +// Copyright (c) 2022, NVIDIA CORPORATION. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const {Utf8String, Int32, Uint32, Float32, DataFrame, Series, Float64} = require('@rapidsai/cudf'); +const {RecordBatchStreamWriter, Field, Vector, List} = require('apache-arrow'); +const Path = require('path'); +const {promisify} = require('util'); +const Fs = require('fs'); +const Stat = promisify(Fs.stat); +const fastifyCors = require('@fastify/cors'); +const fastify = require('fastify')({logger: true}); + +const arrowPlugin = require('fastify-arrow'); +const gpu_cache = require('../../util/gpu_cache.js'); +const root_schema = require('../../util/schema.js'); + +module.exports = async function(fastify, opts) { + fastify.register(arrowPlugin); + fastify.register(fastifyCors, {origin: '*'}); + fastify.decorate('cacheObject', gpu_cache.cacheObject); + fastify.decorate('getData', gpu_cache.getData); + fastify.decorate('readCSV', gpu_cache.readCSV); + fastify.decorate('publicPath', gpu_cache.publicPath); + fastify.decorate('listDataframes', gpu_cache.listDataframes); + fastify.decorate('clearDataFrames', gpu_cache.clearDataframes); + + const get_schema = { + logLevel: 'debug', + schema: { + response: { + 200: { + type: 'object', + properties: + {success: {type: 'boolean'}, message: {type: 'string'}, params: {type: 'string'}} + } + } + } + }; + + fastify.get('/', {...get_schema, handler: () => root_schema['gpu']}); + + fastify.route({ + method: 'POST', + url: '/DataFrame/readCSV', + schema: {}, + handler: async (request, reply) => { + let message = 'Error'; + let result = {'params': request.body, success: false, message: message}; + try { + const path = Path.join(fastify.publicPath(), request.body.filename); + const stats = await Stat(path); + const message = 'File is available'; + const currentDataFrame = await fastify.getData(request.body.filename); + if (currentDataFrame !== undefined) { + console.log('Found existing dataframe.'); + console.log(request.body); + console.log(currentDataFrame); + currentDataFrame.dispose(); + } + const cacheObject = await fastify.readCSV({ + header: 0, + sourceType: 'files', + sources: [path], + }); + const name = request.body.filename; + await fastify.cacheObject(name, cacheObject); + result.success = true; + result.message = 'CSV file in GPU memory.'; + result.statusCode = 200; + await reply.code(200).send(result); + } catch (e) { + result.message = e.message; + if (e.message.search('no such file or directory') !== -1) { + await reply.code(404).send(result); + } else { + await reply.code(500).send(result); + } + } + } + }); + + fastify.route({ + method: 'GET', + url: '/get_column/:table/:column', + schema: {querystring: {table: {type: 'string'}, 'column': {type: 'string'}}}, + handler: async (request, reply) => { + let message = 'Error'; + let result = {'params': JSON.stringify(request.params), success: false, message: message}; + const table = await fastify.getData(request.params.table); + if (table == undefined) { + result.message = 'Table not found'; + await reply.code(404).send(result); + } else { + try { + const name = request.params.column; + const column = table.get(name); + const newDfObject = {}; + newDfObject[name] = column; + const result = new DataFrame(newDfObject); + const writer = RecordBatchStreamWriter.writeAll(result.toArrow()); + await reply.code(200).send(writer.toNodeStream()); + } catch (e) { + if (e.message.search('Unknown column name') != -1) { + result.message = e; + await reply.code(404).send(result); + } else { + await reply.code(500).send(result); + } + } + } + } + }); + + fastify.route({ + method: 'GET', + url: '/list_tables', + handler: async (request, reply) => { + /** + * /graphology/list_tables returns a list of DataFrames stored in + * the GPU cache. + */ + let message = 'Error'; + let result = {success: false, message: message}; + const list = await fastify.listDataframes(); + return list; + } + }); + + fastify.route({ + method: 'POST', + url: '/release', + handler: async (request, reply) => { + /** + * /graphology/release clears the dataframes from memory. + * This is useful for testing, but should not be used in production. + * In production, the dataframes should be cached in memory and reused. + * This solution allows unit tests to pass without timing out, as the + * cached GPU objects are not cleared between tests. + */ + await fastify.clearDataFrames(); + await reply.code(200).send({message: 'OK'}) + } + }); +} diff --git a/modules/demo/api-server/routes/graphology/index.js b/modules/demo/api-server/routes/graphology/index.js index 42286fcff..a7f7a2ee9 100644 --- a/modules/demo/api-server/routes/graphology/index.js +++ b/modules/demo/api-server/routes/graphology/index.js @@ -28,13 +28,13 @@ const root_schema = require('../../util/schema.js'); module.exports = async function(fastify, opts) { fastify.register(arrowPlugin); fastify.register(fastifyCors, {origin: 'http://localhost:3001'}); - fastify.decorate('setDataframe', gpu_cache.setDataframe); - fastify.decorate('getDataframe', gpu_cache.getDataframe); + fastify.decorate('cacheObject', gpu_cache.cacheObject); + fastify.decorate('getData', gpu_cache.getData); fastify.decorate('listDataframes', gpu_cache.listDataframes); fastify.decorate('readGraphology', gpu_cache.readGraphology); fastify.decorate('readLargeGraphDemo', gpu_cache.readLargeGraphDemo); fastify.decorate('clearDataFrames', gpu_cache.clearDataframes); - fastify.get('/', async function(request, reply) { return root_schema; }); + fastify.get('/', async function(request, reply) { return root_schema['graphology']; }); fastify.route({ method: 'POST', @@ -76,9 +76,9 @@ module.exports = async function(fastify, opts) { result.success = true; try { const graphology = await fastify.readLargeGraphDemo(path); - await fastify.setDataframe('nodes', graphology['nodes']); - await fastify.setDataframe('edges', graphology['edges']); - await fastify.setDataframe('options', graphology['options']); + await fastify.cacheObject('nodes', graphology['nodes']); + await fastify.cacheObject('edges', graphology['edges']); + await fastify.cacheObject('options', graphology['options']); result.message = 'File read onto GPU.'; } catch (e) { result.success = false; @@ -109,6 +109,11 @@ module.exports = async function(fastify, opts) { } }, handler: async (request, reply) => { + /** + * /graphology/read_json reads a graphology formatted json file from + * public storage, storing it in the `nodes`, `edges`, `clusters`, and + * `tags` DataFrames. + */ const query = request.query; let result = { 'params': JSON.stringify(query), @@ -135,10 +140,10 @@ module.exports = async function(fastify, opts) { result.success = true; try { const graphology = await fastify.readGraphology(path); - await fastify.setDataframe('nodes', graphology['nodes']); - await fastify.setDataframe('edges', graphology['edges']); - await fastify.setDataframe('clusters', graphology['clusters']); - await fastify.setDataframe('tags', graphology['tags']); + await fastify.cacheObject('nodes', graphology['nodes']); + await fastify.cacheObject('edges', graphology['edges']); + await fastify.cacheObject('clusters', graphology['clusters']); + await fastify.cacheObject('tags', graphology['tags']); result.message = 'File read onto GPU.'; } catch (e) { result.success = false; @@ -155,25 +160,18 @@ module.exports = async function(fastify, opts) { } }); - fastify.route({ - method: 'GET', - url: '/list_tables', - handler: async (request, reply) => { - let message = 'Error'; - let result = {success: false, message: message}; - const list = await fastify.listDataframes(); - return list; - } - }); - fastify.route({ method: 'GET', url: '/get_column/:table/:column', schema: {querystring: {table: {type: 'string'}, 'column': {type: 'string'}}}, handler: async (request, reply) => { + /** + * /graphology/get_column/:table/:column returns a column of a DataFrame + * in the GPU cache as an Arrow Table. + */ let message = 'Error'; let result = {'params': JSON.stringify(request.params), success: false, message: message}; - const table = await fastify.getDataframe(request.params.table); + const table = await fastify.getData(request.params.table); if (table == undefined) { result.message = 'Table not found'; await reply.code(404).send(result); @@ -206,9 +204,13 @@ module.exports = async function(fastify, opts) { url: '/get_table/:table', schema: {querystring: {table: {type: 'string'}}}, handler: async (request, reply) => { + /** + * /graphology/get_table/:table returns a DataFrame from the GPU cache + * as an Arrow Table. + */ let message = 'Error'; let result = {'params': JSON.stringify(request.params), success: false, message: message}; - const table = await fastify.getDataframe(request.params.table); + const table = await fastify.getData(request.params.table); if (table == undefined) { result.message = 'Table not found'; await reply.code(404).send(result); @@ -223,9 +225,13 @@ module.exports = async function(fastify, opts) { method: 'GET', url: '/nodes/bounds', handler: async (request, reply) => { + /** + * /graphology/nodes/bounds returns the min/max of the x and y columns + * of the `nodes` DataFrame. + */ let message = 'Error'; let result = {success: false, message: message}; - const df = await fastify.getDataframe('nodes'); + const df = await fastify.getData('nodes'); if (df == undefined) { result.message = 'Table not found'; await reply.code(404).send(result); @@ -248,9 +254,13 @@ module.exports = async function(fastify, opts) { method: 'GET', url: '/nodes', handler: async (request, reply) => { + /** + * /graphology/nodes returns the `nodes` DataFrame, tiled into a single column + * with offset x,y,scale,color values. + */ let message = 'Error'; let result = {success: false, message: message}; - const df = await fastify.getDataframe('nodes'); + const df = await fastify.getData('nodes'); if (df == undefined) { result.message = 'Table not found'; await reply.code(404).send(result); @@ -287,12 +297,16 @@ module.exports = async function(fastify, opts) { method: 'GET', url: '/edges', handler: async (request, reply) => { + /** + * /graphology/edges returns the edges table, tiled into a single column + * of x,y,size,color offset values. + */ let message = 'Error'; let result = {success: false, message: message}; /** @type DataFrame<{x: Float32, y: Float32}> */ - const df = await fastify.getDataframe('nodes'); + const df = await fastify.getData('nodes'); /** @type DataFrame<{x: Int32, y: Int32}> */ - const edges = await fastify.getDataframe('edges'); + const edges = await fastify.getData('edges'); if (df == undefined) { result.message = 'Table not found'; await reply.code(404).send(result); @@ -369,13 +383,4 @@ module.exports = async function(fastify, opts) { } } }); - - fastify.route({ - method: 'POST', - url: '/release', - handler: async (request, reply) => { - await fastify.clearDataFrames(); - await reply.code(200).send({message: 'OK'}) - } - }); } diff --git a/modules/demo/api-server/routes/particles/index.js b/modules/demo/api-server/routes/particles/index.js new file mode 100644 index 000000000..fcf0af077 --- /dev/null +++ b/modules/demo/api-server/routes/particles/index.js @@ -0,0 +1,136 @@ +// Copyright (c) 2022, NVIDIA CORPORATION. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const {Int32, Float32, DataFrame, Series} = require('@rapidsai/cudf'); +const {RecordBatchStreamWriter} = require('apache-arrow'); +const fastify = require('fastify')({logger: {level: 'debug'}}); +const fastifyCors = require('@fastify/cors'); + +const arrowPlugin = require('fastify-arrow'); +const gpu_cache = require('../../util/gpu_cache.js'); +const root_schema = require('../../util/schema.js'); + +module.exports = async function(fastify, opts) { + fastify.register(fastifyCors, {origin: '*'}); + fastify.register(arrowPlugin); + fastify.decorate('cacheObject', gpu_cache.cacheObject); + fastify.decorate('getData', gpu_cache.getData); + fastify.decorate('readCSV', gpu_cache.readCSV); + + const get_schema = { + logLevel: 'debug', + schema: { + response: { + 200: { + type: 'object', + properties: + {success: {type: 'boolean'}, message: {type: 'string'}, params: {type: 'string'}} + } + } + } + }; + + fastify.get('/', {...get_schema, handler: () => root_schema['particles']}); + fastify.post('/', {...get_schema, handler: () => root_schema['particles']}); + + const filterPoints = + (column, min, max) => { + const gt = column._col.gt(parseInt(min)); + const lt = column._col.lt(parseInt(max)); + const mask = gt.bitwiseAnd(lt); + return column.filter(Series.new(mask)); + } + + const handler = async (request, reply) => { + /** + * /particles/get_shader_column/:table/:xmin/:xmax/:ymin/:ymax returns a bounds-limited + * set of interleaved longitude/latitude pairs as an Arrow Table containing a single column. + * The column is a Float32Array of length 2 * N, where N is the number of points. + * :table is the name of the table to query. + * (optional) :xmin, :xmax, :ymin, :ymax are the bounds to limit the query to. + * If no bounds are provided, the entire table is returned. + * + * The returned Arrow Table is a single column of Float32 values, where the first + * two values are the first point's longitude and latitude, the next two values are + * the second point's longitude and latitude, and so on. + */ + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + const table = await fastify.getData(request.params.table); + if (table == undefined) { + result.message = 'Table not found'; + await reply.code(404).send(result); + } else { + try { + let x = undefined; + let y = undefined; + if (request.params.xmin != undefined && request.params.xmax != undefined && + request.params.ymin != undefined && request.params.ymax != undefined) { + x = filterPoints(table.get('Longitude'), request.params.xmin, request.params.xmax); + y = filterPoints(table.get('Latitude'), request.params.ymin, request.params.ymax); + } else { + x = table.get('Longitude'); + y = table.get('Latitude'); + } + + // Map x, y, r, g, b to offsets for client display + let tiled = Series.sequence({type: new Float32, init: 0.0, size: (2 * x.length)}); + let base_offset = Series.sequence({type: new Int32, init: 0.0, size: x.length}).mul(2); + tiled = tiled.scatter(x, base_offset.cast(new Int32)); + x.dispose(); + tiled = tiled.scatter(y, base_offset.add(1).cast(new Int32)); + y.dispose(); + const result = new DataFrame({'gpu_buffer': tiled}); + const writer = RecordBatchStreamWriter.writeAll(result.toArrow()); + await reply.code(200).send(writer.toNodeStream()); + tiled.dispose(); + result.dispose(); + writer.close(); + } catch (e) { + result.message = e.message; + if (e.message.search('Unknown column name') != -1) { + result.message = { + error: result.message, + message: + 'Imported CSV file must contain four columns: State, Zip_Code, Longitude, and Latitude' + }; + await reply.code(500).send(result); + } else { + await reply.code(500).send(result); + } + } + } + }; + + fastify.route({ + method: 'GET', + url: '/get_shader_column/:table/:xmin/:xmax/:ymin/:ymax', + schema: { + querystring: { + table: {type: 'string'}, + xmin: {type: 'number'}, + xmax: {type: 'number'}, + ymin: {type: 'number'}, + ymax: {type: 'number'} + } + }, + handler: handler + }); + fastify.route({ + method: 'GET', + url: '/get_shader_column/:table', + schema: {querystring: {table: {type: 'string'}}}, + handler: handler + }); +} diff --git a/modules/demo/api-server/routes/quadtree/index.js b/modules/demo/api-server/routes/quadtree/index.js new file mode 100644 index 000000000..b9d87f20e --- /dev/null +++ b/modules/demo/api-server/routes/quadtree/index.js @@ -0,0 +1,484 @@ +// Copyright (c) 2022, NVIDIA CORPORATION. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +const {Float64, Float32, DataFrame, Series} = require('@rapidsai/cudf'); +const {RecordBatchStreamWriter} = require('apache-arrow'); +const fastify = require('fastify')({logger: {level: 'debug'}}); +const fastifyCors = require('@fastify/cors'); +const cuspatial = require('@rapidsai/cuspatial'); + +const arrowPlugin = require('fastify-arrow'); +const gpu_cache = require('../../util/gpu_cache.js'); +const root_schema = require('../../util/schema.js'); + +module.exports = async function(fastify, opts) { + fastify.register(fastifyCors, {origin: '*'}); + fastify.register(arrowPlugin); + fastify.decorate('cacheObject', gpu_cache.cacheObject); + fastify.decorate('getData', gpu_cache.getData); + fastify.decorate('listDataframes', gpu_cache.listDataframes); + fastify.decorate('readCSV', gpu_cache.readCSV); + + const get_schema = { + logLevel: 'debug', + schema: { + response: { + 200: { + type: 'object', + properties: + {success: {type: 'boolean'}, message: {type: 'string'}, params: {type: 'string'}} + } + } + } + }; + + fastify.get('/', {...get_schema, handler: () => root_schema['quadtree']}); + fastify.post('/', {...get_schema, handler: () => root_schema['quadtree']}); + + fastify.route({ + method: 'POST', + url: '/create/:table', + schema: {querystring: {table: {type: 'string'}}}, + handler: async (request, reply) => { + /** + * @api {post} /quadtree/create/:table Create Quadtree + * @apiName CreateQuadtree + * @apiGroup Quadtree + * @apiDescription Create a quadtree from a table + * @apiParam {String} table Table name + * @apiParam {String} xAxisName Column name for x-axis + * @apiParam {String} yAxisName Column name for y-axis + * @apiParamExample {json} Request-Example: + * { + * "xAxisName": "x", + * "yAxisName": "y" + * } + * @apiSuccessExample {json} Success-Response: + * { + * "params": { + * "table": "test" + * }, + * "success": true, + * "message": "Quadtree created" + * } + * @apiErrorExample {json} Error-Response: + * { + * "params": { + * "table": "test" + * }, + * "success": false, + * "message": "Error" + * } + */ + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + const table = await fastify.getData(request.params.table); + if (request.body.xAxisName === undefined || request.body.yAxisName === undefined) { + result.message = 'xAxisName or yAxisName undefined, specify them in POST body.'; + result.code = 400; + await reply.code(result.code).send(result); + return; + } + if (table == undefined) { + result.message = 'Table not found'; + await reply.code(404).send(result); + } else { + xCol = table.get(request.body.xAxisName).cast(new Float64); + yCol = table.get(request.body.yAxisName).cast(new Float64); + const [xMin, xMax, yMin, yMax] = [xCol.min(), xCol.max(), yCol.min(), yCol.max()]; + try { + const quadtree = cuspatial.Quadtree.new( + {x: xCol, y: yCol, xMin, xMax, yMin, yMax, scale: 0, maxDepth: 15, minSize: 1e5}); + const quadtree_name = request.params.table + '_quadtree'; + request.params.quadtree = quadtree_name + const saved = await fastify.cacheObject(quadtree_name, quadtree); + result.message = 'Quadtree created'; + result.success = true; + result.statusCode = 200; + await reply.code(result.statusCode).send(result); + } catch (e) { + result.message = e; + result.success = false; + result.statusCode = 500; + await reply.code(result.statusCode).send(result); + } + } + } + }); + + fastify.route({ + method: 'POST', + url: '/set_polygons', + schema: { + querystring: { + name: {type: 'string'}, + polygon_offset: {type: 'array'}, + ring_offset: {type: 'array'}, + points: {type: 'array'} + } + }, + handler: async (request, reply) => { + /** + * @api {post} /quadtree/set_polygons_quadtree Set Polygons Quadtree + * @apiName SetPolygonsQuadtree + * @apiGroup Quadtree + * @apiDescription Set polygons for quadtree + * @apiParam {String} name Name of quadtree + * @apiParam {Array} polygon_offset Array of polygon offsets + * @apiParam {Array} ring_offset Array of ring offsets + * @apiParam {Array} points Array of points + * @apiParamExample {json} Request-Example: + * { + * "name": "test_quadtree", + * "polygon_offset": [0, 4, 8], + * "ring_offset": [0, 4, 8, 12], + * "points": [0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1] + * } + * @apiSuccessExample {json} Success-Response: + * { + * "params": { + * "name": "test_quadtree", + * "polygon_offset": [0, 4, 8], + * "ring_offset": [0, 4, 8, 12], + * "points": [0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1] + * }, + * "success": true, + * "message": "Set polygon test_quadtree" + * } + * @apiErrorExample {json} Error-Response: + * { + * "params": { + * "name": "test_quadtree", + * "polygon_offset": [0, 4, 8], + * "ring_offset": [0, 4, 8, 12], + * "points": [0, 0, 1, 0, 1, 1, 0, 1, 0, 0, 1, 0, 1, 1, 0, 1] + * }, + * "success": false, + * "message": "Error" + * } + */ + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + try { + const polygon_offset = Series.new(new Int32Array(request.body.polygon_offset)); + const ring_offset = Series.new(new Int32Array(request.body.ring_offset)); + const points = Series.new(new Float64Array(request.body.points)); + const cached = + await fastify.cacheObject(request.body.name, {polygon_offset, ring_offset, points}); + result.message = 'Set polygon ' + request.body.name; + result.success = true; + result.statusCode = 200; + result.params = request.body; + await reply.code(result.statusCode).send(result); + } catch (e) { + result.message = e; + result.success = false; + result.statusCode = 500; + await reply.code(result.statusCode).send(result); + } + } + }); + + fastify.route({ + method: 'GET', + url: '/get_points/:quadtree/:polygon/clear', + schema: + {querystring: {quadtree: {type: 'string'}, polygon: {type: 'string'}, n: {type: 'number'}}}, + handler: async (request, reply) => { + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + try { + const quadtree = await fastify.getData(request.params.quadtree); + const {polygon_offset, ring_offset, points} = await fastify.getData(request.params.polygon); + const polygons_and_served = await fastify.getData(points.toArray()); + let polygons = polygons_and_served ? polygons_and_served.polygons : undefined; + let served = polygons_and_served ? polygons_and_served.served : 0; + if (polygons === undefined) { + result.message = 'No cached polygons found'; + result.success = false; + result.statusCode = 404; + } else { + await fastify.cacheObject(points.toArray(), {polygons: polygons, served: 0}); + result.message = 'Cleared points from cache'; + result.success = true; + result.statusCode = 200; + } + await reply.code(result.statusCode).send(result); + } catch (e) { + result.message = JSON.stringify(e); + result.success = false; + result.statusCode = 500; + await reply.code(result.statusCode).send(result); + } + } + }); + + fastify.route({ + method: 'GET', + url: '/get_points/:quadtree/:polygon/:n/next', + schema: + {querystring: {quadtree: {type: 'string'}, polygon: {type: 'string'}, n: {type: 'number'}}}, + handler: async (request, reply) => { + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + try { + const quadtree = await fastify.getData(request.params.quadtree); + const {polygon_offset, ring_offset, points} = await fastify.getData(request.params.polygon); + const polygons_and_served = await fastify.getData(points); + let polygons = polygons_and_served ? polygons_and_served.polygons : undefined; + let served = polygons_and_served ? polygons_and_served.served : 0; + if (polygons === undefined) { + const pts = cuspatial.makePoints( + points.gather(Series.sequence({size: points.length, step: 2, init: 0})), + points.gather(Series.sequence({size: points.length, step: 2, init: 1}))); + const polylines = cuspatial.makePolylines(pts, ring_offset); + polygons = cuspatial.makePolygons(polylines, polygon_offset); + await fastify.cacheObject(points.toArray(), {polygons: polygons, served: 0}); + } + const polyPointPairs = quadtree.pointInPolygon(polygons); + const resultPoints = quadtree.points.gather(polyPointPairs.get('point_index')); + // Either return the number of requested points or the number of points left + const numPoints = + Math.min(parseInt(request.params.n), resultPoints.get('x').length - served); + let result_col = + Series.sequence({size: numPoints * 2, type: new Float32, step: 0, init: 0}); + result_col = result_col.scatter( + resultPoints.get('x').gather(Series.sequence({size: numPoints, step: 1, init: served})), + Series.sequence({size: numPoints, step: 2, init: served})); + result_col = result_col.scatter( + resultPoints.get('y').gather(Series.sequence({size: numPoints, step: 1, init: served})), + Series.sequence({size: numPoints, step: 2, init: served + 1})); + await fastify.cacheObject(points.toArray(), + {polygons: polygons, served: served + numPoints}); + result = new DataFrame({'points_in_polygon': result_col}) + const writer = RecordBatchStreamWriter.writeAll(result.toArrow()); + writer.close(); + await reply.code(200).send(writer.toNodeStream()); + } catch (e) { + result.message = e.toString(); + result.success = false; + result.statusCode = 500; + await reply.code(result.statusCode).send(result); + } + } + }); + + fastify.route({ + method: 'GET', + url: '/get_points/:quadtree/:polygon/:n', + schema: + {querystring: {quadtree: {type: 'string'}, polygon: {type: 'string'}, n: {type: 'number'}}}, + handler: async (request, reply) => { + /** + * @api {get} /quadtree/get_points/:quadtree/:polygon Get Points + * @apiName GetPoints + * @apiGroup Quadtree + * @apiDescription This API returns uses the quadtree to return only the points that are in + * the polygon. This API only returns the first n points. + * @apiParam {String} quadtree Name of quadtree created with /quadtree/create/:table + * @apiParam {String} polygon Name of polygon created with /quadtree/set_polygons_quadtree + * @apiParam {Number} n Number of points to return + * @apiParamExample {json} Request-Example: + * { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * "n": 100 + * } + * @apiSuccessExample {json} Success-Response: + * { + * "params": { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * }, + * "success": true, + * "message": "Get points from test_quadtree" + * } + * @apiErrorExample {json} Error-Response: + * { + * "params": { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * }, + * "success": false, + * "message": "Error" + * } + */ + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + try { + const quadtree = await fastify.getData(request.params.quadtree); + const {polygon_offset, ring_offset, points} = await fastify.getData(request.params.polygon); + const polygons_and_served = await fastify.getData(points.toArray()); + let polygons = polygons_and_served ? polygons_and_served.polygons : undefined; + let served = polygons_and_served ? polygons_and_served.served : 0; + if (polygons === undefined) { + const pts = cuspatial.makePoints( + points.gather(Series.sequence({size: points.length, step: 2, init: 0})), + points.gather(Series.sequence({size: points.length, step: 2, init: 1}))); + const polylines = cuspatial.makePolylines(pts, ring_offset); + polygons = cuspatial.makePolygons(polylines, polygon_offset); + await fastify.cacheObject(points.toArray(), {polygons: polygons, served: 0}); + } + const polyPointPairs = quadtree.pointInPolygon(polygons); + const resultPoints = quadtree.points.gather(polyPointPairs.get('point_index')); + const numPoints = Math.min(parseInt(request.params.n), resultPoints.get('x').length); + let result_col = + Series.sequence({size: numPoints * 2, type: new Float32, step: 0, init: 0}); + result_col = result_col.scatter(resultPoints.get('x'), + Series.sequence({size: numPoints, step: 2, init: 0})); + result_col = result_col.scatter(resultPoints.get('y'), + Series.sequence({size: numPoints, step: 2, init: 1})); + result = new DataFrame({'points_in_polygon': result_col}) + const writer = RecordBatchStreamWriter.writeAll(result.toArrow()); + writer.close(); + await reply.code(200).send(writer.toNodeStream()); + } catch (e) { + result.message = JSON.stringify(e); + result.success = false; + result.statusCode = 500; + await reply.code(result.statusCode).send(result); + } + } + }); + + fastify.route({ + method: 'GET', + url: '/get_points/:quadtree/:polygon', + schema: {querystring: {quadtree: {type: 'string'}, polygon: {type: 'string'}}}, + handler: async (request, reply) => { + /** + * @api {get} /quadtree/get_points/:quadtree/:polygon Get Points + * @apiName GetPoints + * @apiGroup Quadtree + * @apiDescription This API returns uses the quadtree to return only the points that are in + * the polygon. + * @apiParam {String} quadtree Name of quadtree created with /quadtree/create/:table + * @apiParam {String} polygon Name of polygon created with /quadtree/set_polygons_quadtree + * @apiParamExample {json} Request-Example: + * { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * } + * @apiSuccessExample {json} Success-Response: + * { + * "params": { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * }, + * "success": true, + * "message": "Get points from test_quadtree" + * } + * @apiErrorExample {json} Error-Response: + * { + * "params": { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * }, + * "success": false, + * "message": "Error" + * } + */ + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + try { + const quadtree = await fastify.getData(request.params.quadtree); + const {polygon_offset, ring_offset, points} = await fastify.getData(request.params.polygon); + const data = await fastify.listDataframes(); + const pts = cuspatial.makePoints( + points.gather(Series.sequence({size: points.length, step: 2, init: 0})), + points.gather(Series.sequence({size: points.length, step: 2, init: 1}))); + const polylines = cuspatial.makePolylines(pts, ring_offset); + const polygons = cuspatial.makePolygons(polylines, polygon_offset); + const polyPointPairs = quadtree.pointInPolygon(polygons); + const resultPoints = quadtree.points.gather(polyPointPairs.get('point_index')); + const numPoints = resultPoints.get('x').length + let result_col = + Series.sequence({size: numPoints * 2, type: new Float32, step: 0, init: 0}); + result_col = result_col.scatter(resultPoints.get('x'), + Series.sequence({size: numPoints, step: 2, init: 0})); + result_col = result_col.scatter(resultPoints.get('y'), + Series.sequence({size: numPoints, step: 2, init: 1})); + result = new DataFrame({'points_in_polygon': result_col}) + const writer = RecordBatchStreamWriter.writeAll(result.toArrow()); + writer.close(); + await reply.code(200).send(writer.toNodeStream()); + } catch (e) { + result.message = JSON.stringify(e); + result.success = false; + result.statusCode = 500; + await reply.code(result.statusCode).send(result); + } + } + }); + + fastify.route({ + method: 'GET', + url: '/:quadtree/:polygon/count', + schema: {querystring: {quadtree: {type: 'string'}, polygon: {type: 'string'}}}, + handler: async (request, reply) => { + /** + * @api {get} /quadtree/:quadtree/:polygon/count Count Points + * @apiName CountPoints + * @apiGroup Quadtree + * @apiDescription This API returns uses the quadtree to return only the points that are in + * the polygon. + * @apiParam {String} quadtree Name of quadtree created with /quadtree/create/:table + * @apiParam {String} polygon Name of polygon created with /quadtree/set_polygons_quadtree + * @apiParamExample {json} Request-Example: + * { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * } + * @apiSuccessExample {json} Success-Response: + * { + * "count": 100 + * } + * @apiErrorExample {json} Error-Response: + * { + * "params": { + * "quadtree": "test_quadtree", + * "polygon": "test_polygon" + * }, + * "success": false, + * "message": "Error" + * } + */ + let message = 'Error'; + let result = {'params': request.params, success: false, message: message}; + try { + const quadtree = await fastify.getData(request.params.quadtree); + const {polygon_offset, ring_offset, points} = await fastify.getData(request.params.polygon); + const data = await fastify.listDataframes(); + const pts = cuspatial.makePoints( + points.gather(Series.sequence({size: points.length, step: 2, init: 0})), + points.gather(Series.sequence({size: points.length, step: 2, init: 1}))); + const polylines = cuspatial.makePolylines(pts, ring_offset); + const polygons = cuspatial.makePolygons(polylines, polygon_offset); + // TODO: This is a good place to put the polygons object into the cache, + // and check for it before creating it. Is it worth benchmarking? + const polyPointPairs = quadtree.pointInPolygon(polygons); + result.count = polyPointPairs.get('point_index').length; + result.message = 'Counted points in polygon'; + result.success = true; + result.statusCode = 200; + await reply.code(200).send(result); + } catch (e) { + result.message = e; + result.success = false; + result.statusCode = 500; + await reply.code(result.statusCode).send(result); + } + } + }); +} diff --git a/modules/demo/api-server/test/fixtures.js b/modules/demo/api-server/test/fixtures.js index 35df27442..8ad838150 100644 --- a/modules/demo/api-server/test/fixtures.js +++ b/modules/demo/api-server/test/fixtures.js @@ -163,9 +163,45 @@ const json_bad_map = { }` }; +const csv_base = { + 'csv_base.csv': + `Index,Name,Int,Float + 0,"bob",1,1.0 + 1,"george",2,2.0 + 2,"sam",3,3.0` +}; + +const csv_particles = { + 'csv_particles.csv': + `Index,Longitude,Latitude + 0, -105, 40 + 1, -106, 41 + 2, -107, 42 + 3, -108, 43 + 4, -109, 44 + 5, -110, 45` +}; + +const csv_quadtree = { + 'csv_quadtree.csv': + `Index,x,y + 0,-4.0,4.0 + 1,-3.0,3.0 + 2,-2.0,2.0 + 3,-1.0,1.0 + 4,0.0,0.0 + 5,1.0,-1.0 + 6,2.0,-2.0 + 7,3.0,-3.0 + 8,4.0,-4.0` +}; + module.exports = { json_good: json_good, json_large: json_large, json_out_of_order: json_out_of_order, json_bad_map: json_bad_map, + csv_base: csv_base, + csv_particles: csv_particles, + csv_quadtree: csv_quadtree }; diff --git a/modules/demo/api-server/test/plugins/gpu_cache.test.js b/modules/demo/api-server/test/plugins/gpu_cache.test.js index b8453a337..b29acaec9 100644 --- a/modules/demo/api-server/test/plugins/gpu_cache.test.js +++ b/modules/demo/api-server/test/plugins/gpu_cache.test.js @@ -20,13 +20,14 @@ const Support = require('../../plugins/support') const fixtures = require('../fixtures.js'); const gpuCache = require('../../util/gpu_cache.js'); -test('clearCachedGPUData()', async t => { - await gpuCache.setDataframe('bob', 5); - const result = await gpuCache.getDataframe('bob'); +test('set/getData', async t => { + await gpuCache.cacheObject('bob', 5); + const result = await gpuCache.getData('bob'); + await gpuCache.clearDataframes(); t.equal(result, 5); }); -test('read_large_graph_demo', async t => { +test('readLargeGraphDemo', async t => { const dir = t.testdir(fixtures); const result = await gpuCache.readLargeGraphDemo(dir + '/json_large/json_large.txt'); await gpuCache.clearDataframes(); @@ -39,3 +40,36 @@ test('readGraphology', async t => { await gpuCache.clearDataframes(); t.same(Object.keys(result), ['nodes', 'edges', 'tags', 'clusters']); }); + +test('readCSV', {only: true}, async t => { + const dir = t.testdir(fixtures); + const path = dir + '/csv_base/csv_base.csv'; + const result = await gpuCache.readCSV({ + header: 0, + sourceType: 'files', + sources: [path], + }); + t.same(result.names, ['Index', 'Name', 'Int', 'Float']); +}); + +test('listDataframes', async t => { + await gpuCache.cacheObject('bob', 5); + await gpuCache.cacheObject('george', 6); + const result = await gpuCache.listDataframes(); + await gpuCache.clearDataframes(); + t.same(result, ['bob', 'george']); +}); + +test('clearDataframes', async t => { + await gpuCache.cacheObject('bob', 5); + await gpuCache.cacheObject('george', 6); + await gpuCache.clearDataframes(); + const result = await gpuCache.listDataframes(); + t.same(result, []); +}); + +test('_setPathForTesting', {only: true}, async t => { + await gpuCache._setPathForTesting('abcdef'); + const result = await gpuCache.publicPath(); + t.same(result, 'abcdef'); +}); diff --git a/modules/demo/api-server/test/routes/gpu.test.js b/modules/demo/api-server/test/routes/gpu.test.js new file mode 100644 index 000000000..591641214 --- /dev/null +++ b/modules/demo/api-server/test/routes/gpu.test.js @@ -0,0 +1,63 @@ +// Copyright (c) 2022, NVIDIA CORPORATION. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict' + +const {dir} = require('console'); +const {test} = require('tap'); +const {build} = require('../helper'); +const {tableFromIPC, RecordBatchStreamWriter} = require('apache-arrow'); +const {json_large, json_good, json_out_of_order, json_bad_map, csv_base} = + require('../fixtures.js'); +const gpu_cache = require('../../util/gpu_cache'); + +test('read_csv', async (t) => { + const dir = t.testdir(csv_base); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const res = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_base.csv'}}); + const release = await app.inject({method: 'POST', url: '/graphology/release'}); + t.same(JSON.parse(res.payload), { + success: true, + message: 'CSV file in GPU memory.', + statusCode: 200, + params: {filename: 'csv_base.csv'} + }); +}); + +test('list_tables', async (t) => { + const dir = t.testdir(csv_base); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const res = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_base.csv'}}); + const tables = await app.inject({method: 'GET', url: '/gpu/list_tables'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + t.same(JSON.parse(tables.payload), ['csv_base.csv']); +}); + +test('release', async (t) => { + const dir = t.testdir(csv_base); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const res = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_base.csv'}}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + const tables = await app.inject({method: 'GET', url: '/gpu/list_tables'}); + t.same(JSON.parse(tables.payload), []); +}); diff --git a/modules/demo/api-server/test/routes/graphology.test.js b/modules/demo/api-server/test/routes/graphology.test.js index f0242fdc8..43665fa12 100644 --- a/modules/demo/api-server/test/routes/graphology.test.js +++ b/modules/demo/api-server/test/routes/graphology.test.js @@ -14,51 +14,50 @@ 'use strict' -const {dir} = require('console'); -const {test} = require('tap'); -const {build} = require('../helper'); -const {tableFromIPC, RecordBatchStreamWriter} = require('apache-arrow'); -const {json_large, json_good, json_out_of_order, json_bad_map} = require('../fixtures.js'); +const {dir} = require('console'); +const {test} = require('tap'); +const {build} = require('../helper'); +const {tableFromIPC, RecordBatchStreamWriter} = require('apache-arrow'); +const {json_large, json_good, json_out_of_order, json_bad_map, csv_base} = + require('../fixtures.js'); +const gpu_cache = require('../../util/gpu_cache'); test('graphology root returns api description', async t => { const app = await build(t); const res = await app.inject({url: '/graphology'}) t.same(JSON.parse(res.payload), { - graphology: { - description: 'The graphology api provides GPU acceleration of graphology datasets.', - schema: { - read_json: { - filename: 'A URI to a graphology json dataset file.', - result: `Causes the node-rapids backend to attempt to load the json object specified + description: 'The graphology api provides GPU acceleration of graphology datasets.', + schema: { + read_json: { + filename: 'A URI to a graphology json dataset file.', + result: `Causes the node-rapids backend to attempt to load the json object specified by :filename. The GPU will attempt to parse the json file asynchronously and will return OK/ Not Found/ or Fail based on the file status. If the load is successful, four tables will be created in the node-rapids backend: nodes, edges, clusters, and tags. The root objects in the json target must match these names and order.`, - returns: 'Result OK/Not Found/Fail' - }, - read_large_demo: { - filename: - 'A URI to a graphology json dataset file matching the sigma.js/examples/large-demos spec.', - result: `Produces the same result as 'read_json'. + returns: 'Result OK/Not Found/Fail' + }, + read_large_demo: { + filename: + 'A URI to a graphology json dataset file matching the sigma.js/examples/large-demos spec.', + result: `Produces the same result as 'read_json'. If the load is successful, three tables will be created in the node-rapids backend: nodes, edges, and options.`, - returns: 'Result OK/Not Found/Fail' - }, - list_tables: {returns: 'Tables that are available presently in GPU memory.'}, - get_table: { - ':table': - {table: 'The name of the table that has been allocated previously into GPU memory.'} - }, - get_column: {':table': {':column': {table: 'The table name', column: 'The column name'}}}, - nodes: { - returns: - 'Returns the existing nodes table after applying normalization functions for sigma.js' - }, - nodes: {bounds: {returns: 'Returns the x and y bounds to be used in rendering.'}}, - edges: - {return: 'Returns the existing edges table after applying normalization for sigma.js'} - } + returns: 'Result OK/Not Found/Fail' + }, + list_tables: {returns: 'Tables that are available presently in GPU memory.'}, + get_table: { + ':table': + {table: 'The name of the table that has been allocated previously into GPU memory.'} + }, + get_column: {':table': {':column': {table: 'The table name', column: 'The column name'}}}, + nodes: { + returns: + 'Returns the existing nodes table after applying normalization functions for sigma.js' + }, + nodes: {bounds: {returns: 'Returns the x and y bounds to be used in rendering.'}}, + edges: {return: 'Returns the existing edges table after applying normalization for sigma.js'} } }) }); @@ -120,7 +119,7 @@ test('read_json incorrect format', async (t) => { const rpath = '../../test/routes/' + dir.substring(dir.lastIndexOf('/')) + '/json_bad.txt'; const app = await build(t); const res = await app.inject({method: 'POST', url: '/graphology/read_json?filename=' + rpath}); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); const payload = JSON.parse(res.payload); t.equal(payload.message, 'Bad graphology format: nodes not found.'); t.equal(payload.success, false); @@ -129,9 +128,14 @@ test('read_json incorrect format', async (t) => { test('read_json file good', async (t) => { const dir = t.testdir(json_good); const rpath = '../../test/routes/' + dir.substring(dir.lastIndexOf('/')) + '/json_good.txt'; - const app = await build(t); - const res = await app.inject({method: 'POST', url: '/graphology/read_json?filename=' + rpath}); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + /* This comment is left for working out mocking with tap in fastify + see: https://github.com/tapjs/node-tap/issues/846 + const build = t.mock('../../routes/graphology/index.js', + {'../../util/gpu_cache.js': {publicPath: () => rpath}}); + */ + const app = await build(t); + const res = await app.inject({method: 'POST', url: '/graphology/read_json?filename=' + rpath}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); const payload = JSON.parse(res.payload); t.equal(payload.message, 'File read onto GPU.'); t.equal(payload.success, true); @@ -142,8 +146,8 @@ test('list_tables', async (t) => { const rpath = '../../test/routes/' + dir.substring(dir.lastIndexOf('/')) + '/json_good.txt'; const app = await build(t); const load = await app.inject({method: 'POST', url: '/graphology/read_json?filename=' + rpath}); - const res = await app.inject({method: 'GET', url: '/graphology/list_tables'}); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const res = await app.inject({method: 'GET', url: '/gpu/list_tables'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); const payload = JSON.parse(res.payload); t.ok(payload.includes('nodes')); }); @@ -160,7 +164,7 @@ test('get_table', async (t) => { }); t.same(res.statusCode, 200); const table = tableFromIPC(res.rawPayload); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); t.same(table.schema.names, ['key', 'label', 'tag', 'URL', 'cluster', 'x', 'y', 'score']); t.equal(table.numRows, 2); t.equal(table.numCols, 8); @@ -178,7 +182,7 @@ test('get_column', async (t) => { }); t.same(res.statusCode, 200); const table = tableFromIPC(res.rawPayload); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); t.same(table.schema.names, ['score']); t.equal(table.numRows, 2); t.equal(table.numCols, 1); @@ -205,7 +209,7 @@ test('nodes', async (t) => { 2, -5.515159729197043e+28 ])) - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); }); test('nodes/bounds', async (t) => { @@ -225,7 +229,7 @@ test('nodes/bounds', async (t) => { 'ymax': 4.134339332580566, } }); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); }); test('nodes then nodes/bounds', async (t) => { @@ -260,7 +264,7 @@ test('nodes then nodes/bounds', async (t) => { 'ymax': 4.134339332580566, } }); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); }); test('edges', async (t) => { @@ -273,7 +277,7 @@ test('edges', async (t) => { {method: 'GET', url: '/graphology/edges', header: {'accepts': 'application/octet-stream'}}); t.equal(res.statusCode, 200); const table = tableFromIPC(res.rawPayload); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); t.ok(table.getChild('edges')); t.same(table.getChild('edges').toArray(), new Float32Array([ 0.9705526828765869, @@ -300,8 +304,7 @@ test('edges and nodes do not begin with 0', async (t) => { await app.inject({method: 'POST', url: '/graphology/read_large_demo?filename=' + rpath}); const res = await app.inject( {method: 'GET', url: '/graphology/edges', header: {'accepts': 'application/octet-stream'}}); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); - debugger; + const release = await app.inject({method: 'POST', url: '/gpu/release'}); t.equal(res.statusCode, 200); const table = tableFromIPC(res.rawPayload); t.ok(table.getChild('edges')); @@ -321,7 +324,7 @@ test('edges and nodes do not begin with 0', async (t) => { ])) }); -test('edge keys do not match node keys', {only: true}, async (t) => { +test('edge keys do not match node keys', async (t) => { const dir = t.testdir(json_bad_map); const rpath = '../../test/routes/' + dir.substring(dir.lastIndexOf('/')) + '/json_bad_map.txt'; const app = await build(t); @@ -329,8 +332,7 @@ test('edge keys do not match node keys', {only: true}, async (t) => { await app.inject({method: 'POST', url: '/graphology/read_large_demo?filename=' + rpath}); const res = await app.inject( {method: 'GET', url: '/graphology/edges', header: {'accepts': 'application/octet-stream'}}); - const release = await app.inject({method: 'POST', url: '/graphology/release'}); - debugger; + const release = await app.inject({method: 'POST', url: '/gpu/release'}); t.equal(res.statusCode, 422); t.same(JSON.parse(res.payload), {success: false, message: 'Edge sources do not match node keys', statusCode: 422}); diff --git a/modules/demo/api-server/test/routes/particles.test.js b/modules/demo/api-server/test/routes/particles.test.js new file mode 100644 index 000000000..efe1b4c80 --- /dev/null +++ b/modules/demo/api-server/test/routes/particles.test.js @@ -0,0 +1,63 @@ +// Copyright (c) 2022, NVIDIA CORPORATION. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict' + +const {dir} = require('console'); +const {test} = require('tap'); +const {build} = require('../helper'); +const {tableFromIPC, RecordBatchStreamWriter} = require('apache-arrow'); +const {json_large, json_good, json_out_of_order, json_bad_map, csv_base, csv_particles} = + require('../fixtures.js'); +const gpu_cache = require('../../util/gpu_cache'); + +test('get_shader_column/:table wrong table', async (t) => { + const app = await build(t); + const res = await app.inject({method: 'GET', url: '/particles/get_shader_column/no_table'}); + t.equal(res.statusCode, 404); + const expected = {params: {table: 'no_table'}, success: false, message: 'Table not found'}; + console.log(res.payload); + const got = JSON.parse(res.payload); + t.same(got, expected); +}); + +test('get_shader_column/:table', async (t) => { + const dir = t.testdir(csv_particles); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_particles.csv'}}); + const res = + await app.inject({method: 'GET', url: '/particles/get_shader_column/csv_particles.csv'}); + const expected = [-105, 40, -106, 41, -107, 42, -108, 43, -109, 44, -110, 45]; + const got = tableFromIPC(res.rawPayload).getChild('gpu_buffer').toArray(); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + t.same(got, expected); +}); + +test('get_shader_column/:table/:xmin/:xmax/:ymin/:ymax', async (t) => { + const dir = t.testdir(csv_particles); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_particles.csv'}}); + const res = await app.inject( + {method: 'GET', url: '/particles/get_shader_column/csv_particles.csv/-109/-106/41/44'}); + const expected = [-107, 42, -108, 43]; + const got = tableFromIPC(res.rawPayload).getChild('gpu_buffer').toArray(); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + t.same(got, expected); +}); diff --git a/modules/demo/api-server/test/routes/quadtree.test.js b/modules/demo/api-server/test/routes/quadtree.test.js new file mode 100644 index 000000000..35b9b69ef --- /dev/null +++ b/modules/demo/api-server/test/routes/quadtree.test.js @@ -0,0 +1,404 @@ +// Copyright (c) 2022, NVIDIA CORPORATION. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +'use strict' + +const {dir} = require('console'); +const {test} = require('tap'); +const {build} = require('../helper'); +const {tableFromIPC, RecordBatchStreamWriter} = require('apache-arrow'); +const {csv_quadtree} = require('../fixtures.js'); +const gpu_cache = require('../../util/gpu_cache'); + +test('quadtree/create/:table', async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const res = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + const result = JSON.parse(res.payload); + t.same(result, { + statusCode: 200, + message: 'Quadtree created', + params: {table: 'csv_quadtree.csv', quadtree: 'csv_quadtree.csv_quadtree'}, + success: true + }) +}); + +test('quadtree/set_polygons', async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const res = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: + {name: 'test', polygon_offset: [0, 1], ring_offset: [0, 4], points: [0, 0, 1, 1, 2, 2, 3, 3]} + }); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + const result = JSON.parse(res.payload); + t.same(result, { + statusCode: 200, + message: 'Set polygon test', + params: + {name: 'test', polygon_offset: [0, 1], ring_offset: [0, 4], points: [0, 0, 1, 1, 2, 2, 3, 3]}, + success: true + }) +}); + +test('quadtree/get_points_float', async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-2, -2, -2, 2, 2, 2, 2, -2] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + const res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name, + }) + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + const table = tableFromIPC(res.rawPayload); + const got = table.getChild('points_in_polygon').toArray(); + const expected = [1.0, -1.0, -1.0, 1.0, 0.0, 0.0]; + t.same(expected, got); +}); + +test('quadtree/get_points_int', async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-2, -2, -2, 2, 2, 2, 2, -2] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + const res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name, + }) + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + const table = tableFromIPC(res.rawPayload); + const got = table.getChild('points_in_polygon').toArray(); + const expected = [1, -1, -1, 1, 0, 0]; + t.same(expected, got); +}); + +test('quadtree/:quadtree/:polygon/count', async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-2, -2, -2, 2, 2, 2, 2, -2] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + const res = await app.inject({ + method: 'GET', + url: 'quadtree/' + quadtree_name + '/' + polygons_name + '/count', + }) + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + const result = JSON.parse(res.payload); + t.same(result, { + statusCode: 200, + message: 'Counted points in polygon', + params: {quadtree: quadtree_name, polygon: polygons_name}, + count: 3, + success: true + }) +}); + +test('quadtree/:quadtree/:polygon/:n', async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-2, -2, -2, 2, 2, 2, 2, -2] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + const res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/2', + }) + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + const table = tableFromIPC(res.rawPayload); + const got = table.getChild('points_in_polygon').toArray(); + const expected = [1.0, -1.0, -1.0, 1.0]; + t.same(got, expected); +}); + +test('quadtree/:quadtree/:polygon/:n/next', {only: true}, async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-4, -4, -4, 4, 4, 4, 4, -4] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + let res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/2/next', + }) + let table = tableFromIPC(res.rawPayload); + let got = table.getChild('points_in_polygon').toArray(); + let expected = [3.0, -3.0, 1.0, -1.0]; + t.same(got, expected); + res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/2/next', + }) + table = tableFromIPC(res.rawPayload); + got = table.getChild('points_in_polygon').toArray(); + expected = [-1.0, 1.0, 2.0, -2.0]; + t.same(got, expected); + res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/2/next', + }) + table = tableFromIPC(res.rawPayload); + got = table.getChild('points_in_polygon').toArray(); + expected = [-3.0, 3.0, -2.0, 2.0]; + t.same(got, expected); + res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/2/next', + }) + table = tableFromIPC(res.rawPayload); + got = table.getChild('points_in_polygon').toArray(); + expected = [0.0, 0.0]; + t.same(got, expected); + res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/2/next', + }) + table = tableFromIPC(res.rawPayload); + got = table.getChild('points_in_polygon').toArray(); + expected = []; + t.same(got, expected); + const release = await app.inject({method: 'POST', url: '/gpu/release'}); +}); + +test('quadtree/:quadtree/:polygon/:n max', {only: true}, async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-4, -4, -4, 4, 4, 4, 4, -4] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + let res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/20', + }) + let table = tableFromIPC(res.rawPayload); + let got = table.getChild('points_in_polygon').length; + let expected = 14; + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + t.same(got, expected); +}); + +test('quadtree/:quadtree/:polygon/:n/next max', {only: true}, async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-4, -4, -4, 4, 4, 4, 4, -4] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + let res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/20/next', + }) + let table = tableFromIPC(res.rawPayload); + let got = table.getChild('points_in_polygon').length; + let expected = 14; + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + t.same(got, expected); +}); + +test('quadtree/:quadtree/:polygon/clear', {only: true}, async (t) => { + const dir = t.testdir(csv_quadtree); + const rpath = 'test/routes/' + dir.substring(dir.lastIndexOf('/')); + const app = await build(t); + gpu_cache._setPathForTesting(rpath); + const load = await app.inject( + {method: 'POST', url: '/gpu/DataFrame/readCSV', body: {filename: 'csv_quadtree.csv'}}); + const create = await app.inject({ + method: 'POST', + url: '/quadtree/create/csv_quadtree.csv', + body: {xAxisName: 'x', yAxisName: 'y'} + }); + const quadtree_name = JSON.parse(create.payload).params.quadtree; + const set_poly = await app.inject({ + method: 'POST', + url: '/quadtree/set_polygons', + body: { + name: 'test', + polygon_offset: [0, 1], + ring_offset: [0, 4], + points: [-4, -4, -4, 4, 4, 4, 4, -4] + } + }); + const polygons_name = JSON.parse(set_poly.payload).params.name; + let res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/20/next', + }) + let table = tableFromIPC(res.rawPayload); + let got = table.getChild('points_in_polygon').length; + let expected = 14; + t.same(got, expected); + res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/clear', + }) + got = JSON.parse(res.payload); + expected = { + statusCode: 200, + success: true, + message: 'Cleared points from cache', + params: {quadtree: quadtree_name, polygon: polygons_name} + }; + t.same(got, expected); + res = await app.inject({ + method: 'GET', + url: 'quadtree/get_points/' + quadtree_name + '/' + polygons_name + '/20/next', + }) + table = tableFromIPC(res.rawPayload); + got = table.getChild('points_in_polygon').length; + expected = 14; + const release = await app.inject({method: 'POST', url: '/gpu/release'}); + t.same(got, expected); +}); diff --git a/modules/demo/api-server/test/routes/root.test.js b/modules/demo/api-server/test/routes/root.test.js index 564498d71..ce499977c 100644 --- a/modules/demo/api-server/test/routes/root.test.js +++ b/modules/demo/api-server/test/routes/root.test.js @@ -21,6 +21,37 @@ test('root returns API description', async (t) => { const app = await build(t); const res = await app.inject({url: '/'}); t.same(JSON.parse(res.payload), { + gpu: { + description: 'An abstract interface to the node-rapids api, supported by a server.', + schema: { + '/': { + method: 'The name of the method to apply to gpu_cache data.', + caller: 'Either an object that has been stored in the gpu_cache or a static module name.', + arguments: 'Correctly specified arguments to the gpu_cache method.', + result: 'Either a result code specifying success or failure or an arrow data buffer.', + }, + 'DataFrame/readCSV': { + method: 'POST', + params: { + filename: + 'The name of the file, stored in the server\'s public/ folder, of the csv file.' + }, + result: `Causes the node-rapids backend to attempt to load the csv file specified + by :filename. The GPU will attempt to parse the CSV file asynchronously and will + return OK/ Not Found/ or Fail based on the file status.`, + returns: '500/404/200' + }, + 'get_column/:table/:column': { + method: 'GET', + params: { + ':table': + 'The filename of a previously loaded dataset, for example with `DataFrame/readCSV`', + ':column': 'A valid column name in a DataFrame that has been previously loaded.' + }, + returns: 'An Arrow `RecordBatchStreamWriter` stream of the columnar data.' + } + } + }, graphology: { description: 'The graphology api provides GPU acceleration of graphology datasets.', schema: { @@ -56,6 +87,77 @@ test('root returns API description', async (t) => { edges: {return: 'Returns the existing edges table after applying normalization for sigma.js'} } + }, + particles: { + description: + 'The API responsible for parsing particles CSV files for the point-budget API demo.', + schema: { + 'get_shader_column/:table/:xmin/:xmax/:ymin/:ymax': { + method: 'POST', + params: { + ':table': 'The name of the CSV file previously loaded with `DataFrame/readCSV`', + 'xmin (optional)': 'Don\'t return results outside of xmin', + 'xmax (optional)': 'Don\'t return results outside of xmax', + 'ymin (optional)': 'Don\'t return results outside of ymin', + 'ymax (optional)': 'Don\'t return results outside of ymax' + }, + result: + `Returns the Longitude and Latitude columns of a table that has been read previously + with DataFrame/readCSV. The Longitude and Latitude will be packed into a a single column and + interleaved.`, + return: 'Returns an Arrow stream of lon/lat values as a Table containing a single column.' + } + } + }, + quadtree: { + description: 'The API responsible for making quadtree API server requests.', + schema: { + 'create/:table': { + method: 'POST', + params: {':table': 'The name of the CSV file previously loaded with `DataFrame/readCSV`'}, + result: 'Create a quadtree from the table specified by :table.', + return: { + '200': 'Quadtree created successfully.', + '404': 'Table not found.', + '500': 'Quadtree creation failed.' + } + }, + 'set_polygons': { + method: 'POST', + params: { + 'name': 'The name of the polygon set.', + 'points': 'A list of points that define the polygons.', + 'polygon_offset': 'The GeoArrow offset defining the polygons in the points list.', + 'ring_offset': + 'The GeoArrow offset defining the rings of the polygons in the points list.', + }, + result: 'Create a polygon set from the points specified.', + return: + {'200': 'Polygon set created successfully.', '500': 'Polygon set creation failed.'} + }, + 'get_points/:quadtree/:polygon': { + method: 'GET', + params: { + ':quadtree': 'The name of the quadtree previously created with `quadtree/create`', + ':polygon': + 'The name of the polygon set previously created with `quadtree/set_polygons`' + }, + result: + 'Returns the points that are contained within the polygons specified by :polygon.', + return: 'Returns an Arrow stream of points that are contained within the polygons.' + }, + ':quadtree/:polygon/count': { + method: 'GET', + params: { + ':quadtree': 'The name of the quadtree previously created with `quadtree/create`', + ':polygon': + 'The name of the polygon set previously created with `quadtree/set_polygons`' + }, + result: + 'Returns the number of points that are contained within the polygons specified by :polygon.', + return: {count: 'The number of points that are contained within the polygons.'} + } + } } }); }); diff --git a/modules/demo/api-server/util/gpu_cache.js b/modules/demo/api-server/util/gpu_cache.js index 1188346a6..4c5959dfb 100644 --- a/modules/demo/api-server/util/gpu_cache.js +++ b/modules/demo/api-server/util/gpu_cache.js @@ -14,12 +14,16 @@ const {Bool8, Utf8String, Int32, Int64, DataFrame, Series, Float32, Float64} = require('@rapidsai/cudf'); +const Path = require('path'); let timeout = -1; let datasets = {}; function clearCachedGPUData() { - for (const key in datasets) { datasets[key] = null; } + for (const key in datasets) { + const dataset = datasets[key]; + datasets[key] = null; + } }; function json_key_attributes_to_dataframe(str) { @@ -36,7 +40,7 @@ function json_key_attributes_to_dataframe(str) { }); const result = new DataFrame(arr); return result; -} +}; function json_aos_to_dataframe(str, columns, dtypes) { let arr = {}; @@ -48,7 +52,7 @@ function json_aos_to_dataframe(str, columns, dtypes) { }); const result = new DataFrame(arr); return result; -} +}; function json_aoa_to_dataframe(str, dtypes) { let arr = {}; @@ -61,33 +65,25 @@ function json_aoa_to_dataframe(str, dtypes) { }); const result = new DataFrame(arr); return result; -} - -module.exports = { - async setDataframe(name, dataframe) { - if (timeout) { clearTimeout(timeout); } - timeout = setTimeout(clearCachedGPUData, 10 * 60 * 1000); - if (datasets === null) { - datasets = {}; - } - datasets[name] = dataframe; - }, - - async getDataframe(name) { return datasets[name]; }, +}; - async listDataframes() { return datasets != null ? Object.keys(datasets) : []; }, +let _publicPath = Path.join(__dirname, '../public'); - async clearDataframes() { - clearCachedGPUData(); - clearTimeout(timeout); - datasets = null; - }, +const cacheObject = async (name, data) => { + if (timeout) { clearTimeout(timeout); } + timeout = setTimeout(clearCachedGPUData, 10 * 60 * 1000); + if (datasets === null) { + datasets = {}; + } + datasets[name] = data; +}; +module.exports = { async readLargeGraphDemo(path) { console.log('readLargeGraphDemo'); const dataset = Series.readText(path, ''); let split = dataset.split('"options":'); - if (split.length <= 1) { throw 'Bad readLargeGraphDemo format: options not found.'; }; + if (split.length <= 1) { throw 'Bad readLargeGraphDemo format: options not found.'; } const toptions = split.gather([1], false); let rest = split.gather([0], false); split = rest.split('"edges":'); @@ -143,5 +139,26 @@ module.exports = { ]); const edges = json_aoa_to_dataframe(tedges, [new Utf8String, new Utf8String]); return {nodes: nodes, edges: edges, tags: tags, clusters: clusters}; + }, + + async cacheObject(name, data) { cacheObject(name, data); }, + + async getData(name) { return datasets != null ? datasets[name] : undefined; }, + getDataSync(name) { return datasets != null ? datasets[name] : undefined; }, + + async listDataframes() { return datasets != null ? Object.keys(datasets) : []; }, + + async clearDataframes() { + clearCachedGPUData(); + clearTimeout(timeout); + datasets = null; + }, + + _setPathForTesting(path) { _publicPath = path; }, + publicPath() { return _publicPath; }, + + async readCSV(options) { + const result = await DataFrame.readCSV(options); + return result; } } diff --git a/modules/demo/api-server/util/schema.js b/modules/demo/api-server/util/schema.js index 1756ded74..703acfa0c 100644 --- a/modules/demo/api-server/util/schema.js +++ b/modules/demo/api-server/util/schema.js @@ -15,6 +15,36 @@ 'use strict'; const schema = { + gpu: { + description: 'An abstract interface to the node-rapids api, supported by a server.', + schema: { + '/': { + method: 'The name of the method to apply to gpu_cache data.', + caller: 'Either an object that has been stored in the gpu_cache or a static module name.', + arguments: 'Correctly specified arguments to the gpu_cache method.', + result: 'Either a result code specifying success or failure or an arrow data buffer.', + }, + 'DataFrame/readCSV': { + method: 'POST', + params: { + filename: 'The name of the file, stored in the server\'s public/ folder, of the csv file.' + }, + result: `Causes the node-rapids backend to attempt to load the csv file specified + by :filename. The GPU will attempt to parse the CSV file asynchronously and will + return OK/ Not Found/ or Fail based on the file status.`, + returns: '500/404/200' + }, + 'get_column/:table/:column': { + method: 'GET', + params: { + ':table': + 'The filename of a previously loaded dataset, for example with `DataFrame/readCSV`', + ':column': 'A valid column name in a DataFrame that has been previously loaded.' + }, + returns: 'An Arrow `RecordBatchStreamWriter` stream of the columnar data.' + } + } + }, graphology: { description: 'The graphology api provides GPU acceleration of graphology datasets.', schema: { @@ -49,6 +79,72 @@ const schema = { nodes: {bounds: {returns: 'Returns the x and y bounds to be used in rendering.'}}, edges: {return: 'Returns the existing edges table after applying normalization for sigma.js'} } + }, + particles: { + description: + 'The API responsible for parsing particles CSV files for the point-budget API demo.', + schema: { + 'get_shader_column/:table/:xmin/:xmax/:ymin/:ymax': { + method: 'POST', + params: { + ':table': 'The name of the CSV file previously loaded with `DataFrame/readCSV`', + 'xmin (optional)': 'Don\'t return results outside of xmin', + 'xmax (optional)': 'Don\'t return results outside of xmax', + 'ymin (optional)': 'Don\'t return results outside of ymin', + 'ymax (optional)': 'Don\'t return results outside of ymax' + }, + result: `Returns the Longitude and Latitude columns of a table that has been read previously + with DataFrame/readCSV. The Longitude and Latitude will be packed into a a single column and + interleaved.`, + return: 'Returns an Arrow stream of lon/lat values as a Table containing a single column.' + } + } + }, + quadtree: { + description: 'The API responsible for making quadtree API server requests.', + schema: { + 'create/:table': { + method: 'POST', + params: {':table': 'The name of the CSV file previously loaded with `DataFrame/readCSV`'}, + result: 'Create a quadtree from the table specified by :table.', + return: { + '200': 'Quadtree created successfully.', + '404': 'Table not found.', + '500': 'Quadtree creation failed.' + } + }, + 'set_polygons': { + method: 'POST', + params: { + 'name': 'The name of the polygon set.', + 'points': 'A list of points that define the polygons.', + 'polygon_offset': 'The GeoArrow offset defining the polygons in the points list.', + 'ring_offset': + 'The GeoArrow offset defining the rings of the polygons in the points list.', + }, + result: 'Create a polygon set from the points specified.', + return: {'200': 'Polygon set created successfully.', '500': 'Polygon set creation failed.'} + }, + 'get_points/:quadtree/:polygon': { + method: 'GET', + params: { + ':quadtree': 'The name of the quadtree previously created with `quadtree/create`', + ':polygon': 'The name of the polygon set previously created with `quadtree/set_polygons`' + }, + result: 'Returns the points that are contained within the polygons specified by :polygon.', + return: 'Returns an Arrow stream of points that are contained within the polygons.' + }, + ':quadtree/:polygon/count': { + method: 'GET', + params: { + ':quadtree': 'The name of the quadtree previously created with `quadtree/create`', + ':polygon': 'The name of the polygon set previously created with `quadtree/set_polygons`' + }, + result: + 'Returns the number of points that are contained within the polygons specified by :polygon.', + return: {count: 'The number of points that are contained within the polygons.'} + } + } } };