diff --git a/.gitignore b/.gitignore index b6e4761..c07ba7d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,9 @@ +# Test artifacts +pdgpoints/testdata/lp_jumps.laz +pdgpoints/testdata/tileset.json +pdgpoints/testdata/*[.txt|.zip] +pdgpoints/testdata/*/ + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..a34a071 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,3 @@ +include pdgpoints/bin/* +include pdgpoints/testdata/* +include pdgpoints/log/* \ No newline at end of file diff --git a/README.md b/README.md index a0ba14c..1746763 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,139 @@ # viz-points -Pythonpackage for post-processing point-cloud data for 3D visualization +Python package for post-processing point-cloud data for 3D visualization + +## Repository contents + +- [notebooks/](notebooks/) contains the simplified and annotated Jupyter Notebook version of the LiDAR processing workflow +- [pdgpoints/](pdgpoints/) contains the application code comprising the library + +## Installation + +### Hardware requirements +This software requires an x86_64 architecture and a Linux environment. + +### Software requirements +- [pdal](https://github.com/PDAL/python) (pip installation may not build...if so use [anaconda](https://anaconda.org/) or [miniconda](https://repo.anaconda.com/miniconda/)) +- [py3dtiles](https://gitlab.com/oslandia/py3dtiles) (Oslandia versions after [522ce61a](https://gitlab.com/Oslandia/py3dtiles/-/blob/522ce61a0c2cbeb496ba4862e14477bb941b23a3/py3dtiles/merger.py)) +- rapidlasso [las2las](https://rapidlasso.com/lastools/las2las/) and [lasinfo](https://rapidlasso.com/lastools/lasinfo/) post-November 2022 (rapidlasso [precompiled Windows](https://github.com/LAStools/LAStools/blob/master/README.md#links) or included [linux binary](https://rapidlasso.de/release-of-lastoolslinux/)) + +Visualization requirements: +- A tool that can display 3dtiles data, such as [Cesium](https://cesium.com) + +### Unix installation example + +Remember to set up and activate your virtual environment before proceeding. + +```bash +# use conda to set up environment +conda create -n viz-points python=3.9 +conda activate viz-points +conda install -c conda-forge pdal +# get and install this software +git clone https://github.com/PermafrostDiscoveryGateway/viz-points.git +cd viz-points +# checkout the development branch if necessary +git checkout develop +pip install . +# test your installation +tilepoints-test +``` + +## Usage + +This software is designed to be used either as a command line tool or as a Python package. + +### Command line usage + +**Command syntax:** +``` +tilepoints [ OPTIONS ] -f /path/to/file.las +``` + +**Required argument:** +``` + -f file.las | --file=/path/to/file.las + specify the path to a LAS or LAZ point cloud file +``` + +**Options:** +``` + -h | --help + display the help message + -v | --verbose + display more informational messages + -c | --copy_I_to_RGB + copy intensity values to RGB channels + -m | --merge + merge all tilesets in the output folder (./3dtiles) + -a | --archive + copy original LAS files to a ./archive folder + -s X | --rgb_scale=X + scale RGB values by X amount + -z X | --translate_z=X + translate Z (elevation) values by X amount +``` + +### Python usage + +**Python example:** +```python +from pdgpoints.pipeline import Pipeline + +p = Pipeline(f='/path/to/file.laz', + intensity_to_RGB=True, + merge=True, + archive=False, + rgb_scale=4.0, + translate_z=-8.3, + verbose=False) +p.run() +``` + +### Visualizing the data in Cesium + +You can view the output tiles in a Cesium environment. For steps for how to visualize the tiles with a local Cesium instance, see the [documentation here in pdg-info](https://github.com/julietcohen/pdg-info/blob/main/05_displaying-the-tiles.md#option-1-run-cesium-locally). + +![Test dataset](pdgpoints/testdata/lp.png) + +More info on the above test dataset [here](pdgpoints/testdata/README.md). + +Below is an example of the `cesium.js` file that will display a 3dtiles tileset at `./3dtiles/tileset.json` (you will need your own access token): + + +```javascript + +function start(){// Your access token can be found at: https://cesium.com/ion/tokens. + + Cesium.Ion.defaultAccessToken = "YOUR-TOKEN-HERE" + + const viewer = new Cesium.Viewer('cesiumContainer'); + + const imageryLayers = viewer.imageryLayers; + + var tileset = new Cesium.Cesium3DTileset({ + url: "3dtiles/tileset.json", + debugShowBoundingVolume: true, + debugShowContentBoundingVolume: false, + debugShowGeometricError: false, + debugWireframe: true + }); + + viewer.scene.primitives.add(tileset); + + window.zoom_to_me = function(){ + viewer.zoomTo(tileset); + } + + tileset.readyPromise.then(zoom_to_me).otherwise(error => { console.log(error) }); +} + +start() +``` + +# Referencing this repository + +If you would like to cite this repository, we suggest using something like the following: + +``` +Ian M. Nesbitt, Robyn Thiessen-Bock, and Matthew B. Jones (NCEAS). viz-points: A Python package for post-processing point-cloud data for 3D visualization. https://github.com/PermafrostDiscoveryGateway/viz-points. +``` diff --git a/docsrc/Makefile b/docsrc/Makefile new file mode 100644 index 0000000..56909cb --- /dev/null +++ b/docsrc/Makefile @@ -0,0 +1,23 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +github: + @make html + @cp -a _build/html/. ../docs + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/docsrc/conf.py b/docsrc/conf.py new file mode 100644 index 0000000..51a633f --- /dev/null +++ b/docsrc/conf.py @@ -0,0 +1,208 @@ +# -*- coding: utf-8 -*- +# +# Configuration file for the Sphinx documentation builder. +# +# This file does only contain a selection of the most common options. For a +# full list see the documentation: +# http://www.sphinx-doc.org/en/master/config + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys +# sys.path.insert(0, os.path.abspath('.')) +sys.path.insert(0, os.path.abspath('../')) +from datetime import datetime + + +# -- Project information ----------------------------------------------------- + +project = 'viz-points' +copyright = '%s, National Center for Ecological Analysis and Synthesis, University of California Santa Barbara (NCEAS-UCSB)' % datetime.now().year +author = 'Ian Nesbitt' + +# The short X.Y version +sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) +from pdgpoints._version import __version__ as version +# The full version, including alpha/beta/rc tags +release = version + + +# -- General configuration --------------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.autosummary', + 'sphinx.ext.intersphinx', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.mathjax', + 'sphinx.ext.autosectionlabel', + 'sphinx.ext.githubpages', +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The master toctree document. +master_doc = 'index' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = 'en' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The name of the Pygments (syntax highlighting) style to use. +#pygments_style = None +pygments_style = 'sphinx' + + +# autosummary +#autosummary_generate = True + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# +html_theme = "sphinx_rtd_theme" +html_logo = '_static/logo.png' + +#html_theme = 'alabaster' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +# +# html_theme_options = {} + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# Custom sidebar templates, must be a dictionary that maps document names +# to template names. +# +# The default sidebars (for documents that don't match any pattern) are +# defined by theme itself. Builtin themes are using these templates by +# default: ``['localtoc.html', 'relations.html', 'sourcelink.html', +# 'searchbox.html']``. +# +# html_sidebars = {} + + +# -- Options for HTMLHelp output --------------------------------------------- + +# Output file base name for HTML help builder. +htmlhelp_basename = 'vizpointsdoc' + + +# -- Options for LaTeX output ------------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # + # 'preamble': '', + + # Latex figure (float) alignment + # + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, +# author, documentclass [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'vizpoints.tex', project+' Documentation', + author, 'manual'), +] + + +# -- Options for manual page output ------------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, project, project+' Documentation', + [author], 1) +] + + +# -- Options for Texinfo output ---------------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, project, project+' Documentation', + author, project, 'Convert lidar data (LAZ, LAS, etc.) to 3dtiles', + 'Miscellaneous'), +] + + +# -- Options for Epub output ------------------------------------------------- + +# Bibliographic Dublin Core info. +epub_title = project + +# The unique identifier of the text. This can be a ISBN number +# or the project homepage. +# +# epub_identifier = '' + +# A unique identification for the text. +# +# epub_uid = '' + +# A list of files that should not be packed into the epub file. +epub_exclude_files = ['search.html'] + + +# -- Extension configuration ------------------------------------------------- + +# -- Options for intersphinx extension --------------------------------------- + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = { + 'python 3': ('https://docs.python.org/3', None), + 'pyproj [stable]': ('https://pyproj4.github.io/pyproj/stable', None), + 'pyegt [stable]': ('https://iannesbitt.github.io/pyegt', None), + } + +# -- Options for todo extension ---------------------------------------------- + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True diff --git a/notebooks/README.md b/notebooks/README.md new file mode 100644 index 0000000..e69de29 diff --git a/notebooks/las-workflow.ipynb b/notebooks/las-workflow.ipynb new file mode 100644 index 0000000..fa7de05 --- /dev/null +++ b/notebooks/las-workflow.ipynb @@ -0,0 +1,390 @@ +{ + "cells": [ + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Practice running the workflow through 3dtiles tiling\n", + "\n", + "- use environment with `viz-staging` and `viz-raster` installed\n", + "- after running through these steps in chunks in this notebook, it's a great idea to transfer the code to a script and run as a `tmux` session" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# directory in which to look for data (change this to your needs!)\n", + "DATA_DIR = '/home/shares/drp/pointcloud'" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "#### Imports" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "# input data import\n", + "from pathlib import Path\n", + "\n", + "# py3dtiles\n", + "from py3dtiles.tileset.utils import TileContentReader\n", + "\n", + "# interaction with the system\n", + "import subprocess\n", + "\n", + "# logging\n", + "from datetime import datetime, timedelta\n", + "import logging\n", + "import os" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Define the logging configuration\n", + "\n", + "This prints logging statements to a file specified by the path in the config. Change the filepath as needed. There will be many logging statements written to that file. It's helpful to ctrl + f for certain logged statements when troubleshooting. For example, if fewer files were staged than expected, you can search for \"error\" or \"failed\". If you are debugging a silent error and suspect that the issue has something to do with the order in which input files are processed, you can search for the input filenames to determine which was staged first. In between runs, it's a good idea to delete the log from the past run, rename it, or archive it elsewhere so the next run's log does not append to the same log file. " + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "DATE_FMT = '%Y-%m-%dT%H:%M:%S'\n", + "LOG_FMT = \"%(asctime)s:%(levelname)s: %(message)s\" # overrides import\n", + "L = logging.getLogger('DRPWorkflow')\n", + "L.setLevel(\"INFO\")\n", + "handler = logging.handlers.WatchedFileHandler(\n", + " os.environ.get(\"LOGFILE\", os.path.expanduser(\"~/bin/drpworkflow/log/log.log\"))) # <- note: should find this dir programatically in the future\n", + "formatter = logging.Formatter(fmt=LOG_FMT, datefmt=DATE_FMT)\n", + "handler.setFormatter(formatter)\n", + "L.addHandler(handler)\n", + "L.info(\"Logging start.\")" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Import data\n", + "\n", + "In order to process 1 or 2 files instead of all 3 adjacent files on Wrangle Island, subset the `flist` list of filepaths created below. Estimated times and number of files " + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['/home/shares/drp/pointcloud/Site7.las']" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "base_dir = Path(DATA_DIR)\n", + "vlrcorrect_dir = os.path.join(base_dir, 'vlrcorrect')\n", + "archive_dir = os.path.join(base_dir, 'archive')\n", + "out_dir = os.path.join(base_dir, '3dtiles')\n", + "for d in [vlrcorrect_dir, archive_dir, out_dir]:\n", + " L.info('Creating dir %s' % (d))\n", + " os.makedirs(d, exist_ok=True)\n", + "filename = '*.las'\n", + "# To define each .las file within each subdir as a string representation with forward slashes, use as_posix()\n", + "# ** represents that any subdir string can be present between the base_dir and the filename (not using this because we don't want to include subdirs)\n", + "flist = [p.as_posix() for p in base_dir.glob('./' + filename)]\n", + "L.info('File list: %s' % (flist))\n", + "flist" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Use las2las to write new VLR\n", + "\n", + "QT Modeler and other LiDAR processing software sometimes outputs VLR (variable length record) header information that does not adhere exactly to LAS standards. And sometimes, the data abstraction libraries like `PDAL` that undergird the tools we use on the Python side don't play nicely with those headers. Accordingly, sometimes it is necessary to use software that handles malformed headers gracefully like `lastools` to write headers that `PDAL` will accept. Here we use [`las2las`](https://downloads.rapidlasso.de/las2las_README.txt) to rewrite input LAS files with the correct VLR format." + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "las2lasstart = datetime.now()\n", + "L.info('Using las2las to resolve any issues with malformed VLR (variable length record) from QT Modeler... (step 1 of 3)')\n", + "i = 0\n", + "for f in flist:\n", + " i += 1\n", + " L.info('Processing %s (%s of %s)' % (f, i, len(flist)))\n", + " bn = os.path.basename(f)\n", + " vlrcn = os.path.join(vlrcorrect_dir, bn)\n", + " an = os.path.join(archive_dir, bn)\n", + " subprocess.run([\n", + " '../bin/las2las',\n", + " '-i',\n", + " f,\n", + " '-epsg', '4326',\n", + " '-wgs84',\n", + " '-meter',\n", + " '-target_ecef',\n", + " '-target_epsg', '4326',\n", + " '-o',\n", + " vlrcn\n", + " ])\n", + " # move the file to the archive\n", + " L.info('Archiving to %s' % (an))\n", + " os.replace(src=f, dst=an)\n", + "las2lastime = (datetime.now() - las2lasstart).seconds/60\n", + "L.info('Finished las2las rewrite (%.1f min)' % (las2lastime))" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Generate new file list with VLR-corrected files" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['/home/shares/drp/pointcloud/vlrcorrect/Site7.las']" + ] + }, + "execution_count": 5, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "# generate new file list\n", + "flist = [p.as_posix() for p in Path(vlrcorrect_dir).glob('./' + filename)]\n", + "flist" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Use py3dtiles to tile point cloud data into web-ready chunks\n", + "\n", + "3dtiles is a format that breaks large datasets up into manageable chunks that can be easily downloaded and displayed at varying zoom levels. This conversion process computes and executes the tiling, then writes outputs." + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + " 100.0 % in 9 sec [est. time left: 0 sec]]]]]" + ] + } + ], + "source": [ + "processstart = datetime.now()\n", + "L.info('Starting tiling process for %s file(s) (step 2 of 3)' % (len(flist)))\n", + "i = 0\n", + "for f in flist:\n", + " i += 1\n", + " tilestart = datetime.now()\n", + " L.info('Processing %s (%s of %s)' % (f, i, len(flist)))\n", + " L.info('Creating tile directory')\n", + " fndir = os.path.join(out_dir, os.path.splitext(os.path.basename(f))[0])\n", + " subprocess.run([\n", + " 'py3dtiles',\n", + " 'convert',\n", + " f,\n", + " '--out',\n", + " fndir,\n", + " '--overwrite'\n", + " ])\n", + " tiletime = (datetime.now() - tilestart).seconds/60\n", + " L.info('Done (%.1f min)' % (tiletime))\n", + "processtime = (datetime.now() - processstart).seconds/60\n", + "L.info('Finished tiling (%.1f min)' % (processtime))" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Merge outputs of tiling runs on multiple input files\n", + "\n", + "This process takes note of the outputs of various tiling runs (put into subdirectories) and gathers the relative paths of all those outputs into one tileset JSON file.\n", + "For example, consider this directory tree:\n", + "\n", + "```\n", + "Sites_3dtiles\n", + "├── Site4\n", + "│   ├── r0.pnts\n", + "│   ├── r1.pnts\n", + "│   ├── r2.pnts\n", + "│ ├── ...\n", + "│   ├── r.pnts\n", + "│   └── tileset.json\n", + "├── Site5\n", + "│   ├── r0.pnts\n", + "│   ├── r1.pnts\n", + "│   ├── r2.pnts\n", + "│ ├── ...\n", + "│   ├── r.pnts\n", + "│   └── tileset.json\n", + "└── tileset.json\n", + "```\n", + "Each subdirectory's `tileset.json` file would have pointers at and metadata for each of the tiles in the subdirectory, and the master `tileset.json` would point at each of those subdirectory tilesets in turn.\n", + "\n", + "The following code is designed to create the master tileset which points to all the various subdirectory tilesets." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "Found 1 tilesets to merge\n", + "------------------------\n" + ] + } + ], + "source": [ + "L.info('Starting merge process in %s (step 3 of 3)' % (out_dir))\n", + "mergestart = datetime.now()\n", + "subprocess.run([\n", + " 'py3dtiles',\n", + " 'merge',\n", + " '--overwrite',\n", + " '--verbose',\n", + " out_dir\n", + "])\n", + "mergetime = (datetime.now() - mergestart).seconds/60\n", + "L.info('Finished merge (%.1f min)' % (mergetime))" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## Visualizing the data in Cesium\n", + "\n", + "You can view the output tiles on a Cesium basemap! For steps for how to visualize the tiles with local Cesium, see [documentation here in pdg-info](https://github.com/julietcohen/pdg-info/blob/main/05_displaying-the-tiles.md#option-1-run-cesium-locally).\n", + "\n", + "Here is an example of the `cesium.js` file that ended up working for me (you will need your own access token):\n", + "\n", + "\n", + "```javascript\n", + "\n", + "function start(){// Your access token can be found at: https://cesium.com/ion/tokens.\n", + "\n", + " Cesium.Ion.defaultAccessToken = \"YOUR-TOKEN-HERE\"\n", + "\n", + " const viewer = new Cesium.Viewer('cesiumContainer');\n", + "\n", + " const imageryLayers = viewer.imageryLayers;\n", + "\n", + " var tileset = new Cesium.Cesium3DTileset({\n", + " url: \"3dtiles/tileset.json\",\n", + " debugShowBoundingVolume: true,\n", + " debugShowContentBoundingVolume: false,\n", + " debugShowGeometricError: false,\n", + " debugWireframe: true\n", + " });\n", + "\n", + " viewer.scene.primitives.add(tileset);\n", + "\n", + " window.zoom_to_me = function(){\n", + " viewer.zoomTo(tileset);\n", + " }\n", + "\n", + " tileset.readyPromise.then(zoom_to_me).otherwise(error => { console.log(error) });\n", + "}\n", + "\n", + "start()\n", + "```" + ] + }, + { + "attachments": {}, + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## More visualization options\n", + "\n", + "The above `cesium.js` code will display the LiDAR data with its RGB values. However if the dataset doesn't have RGB values assigned, the points in the dataset will display black until you colorize them. There's some Cesium [learning material](https://cesium.com/learn/cesiumjs-learn/cesiumjs-3d-tiles-styling/#style-point-clouds) about point cloud stylizing, but here are a couple of other options to consider:\n", + "\n", + "- You can (sort of) [color points in discrete elevation bands](https://sandcastle.cesium.com/#c=tVdrb9s2FP0rhFegMmbLevmh1AnWeq8A3VLMbvdhHgpKom0iEimQlF0v8H/fJSk5cpygXdcECWxe8t5z7pPMFgu0pWRHBLpEjOzQjEhaFe4HI3OWndSsZ5wpTBkRy04P3S0ZQooIAZJ3gm9pRsRFo5gKghX5k4s8W9gjTre3ZIfuqyVbsi3AKZoTSRTgWWBXpoQRtxS0oIpuiXRxljkao8XHfoQ/LqyyYzggVIn8iHzN2R9E8kqkxF0JXryWcPA6c4JJHA01B61QcsrULOdVNt/gjLL1BapNIYSVIqzCinJ2gZSoSK/ZKfAnWlTF6/aBoN49mM9Dd8msi7V7LsQh20N4CiqJqzaEOauKpVrX6VrMwQDN1T4nKMGSZIgztCF0vVFIs0ckz2kpOc3qo4sNlQh+MUO4LAUHRoYJCDK0wbCBVhCvUpCUSi1P8RZSIWt1hHwXbBC0qgSwEQjv8B50TEC0XQMKO8f8QFYgycihTCe7kIivUM7ZmqoqIz2UA7r+1jVKBRcEUcZwWgnIv5HV3iQk5QW55xG46N3N/HpxffP7x9dv5jdv3y9+Mo6hMOgnVKEtzittzBiRkCZgsdvQdKNPMa6QrFYrmlLg13J3xaG0dIxkinNi0Y6hA0I7jsBjiRRHtID4bYk9jgVph2jGi7ICB0oi+jY2x0Q0DnHLbPbuvYl9gW+BrUJ4i2mOE8gnQBjqJrkmM0lFc9UHl6DGBE0AoB2OBtPal43rOhHCQEhIpTUExIGZ2mv1VjPNbK4um7Wb8Irp8p6XkGvi2ly+arRkJVY4JTMsFF8LXEJwQbdptZbUNJIWwBZmzgla15h7xJRbx+kSea73GKYx9gSg4vdwj9juHu1lVCrMUrLgc3vs1KCxELoFXjNTps5DfNut6Bgxm63Lp4aO6dRm7GRkBcNQtmaHdfkCLTs5YWu1cV7cnRX5oYv6cAB9f869PUoQSnnORct4yiGXutcB8a9GiOBr58WdBT6gKYog2no8w8zW+s5LLjBbk5fdZefv3pNaw4dae6h3vvuM1uihVk6LzyGNH+qke8we0dGT9+Qc9L6yxptz9ZeDnbyvjtcLNNTP+V73H0aMQklAlwt9y+jZpVsqpWrvwjVkL54UF0Rgd5XvF/w+tVJRVs/4di0ca8qxB/1gNPYmvjvy42E4mkzC2ot+FMTeOIrc2B/74SQK43ojGoZeHPmuF/lDP479WIvtvcSFHmY1aEOEwmj7PA0A9NxxOJz4oReNh74Xe5NjPD13FMDlB/KRF4aTYBS0toIoHof+MI7C4RB2/cBuNTdlVX4BsBeP41EcRqNJFAVh4I97rc1oBO6PRwA+CoajY4A09mTiRcDYDzxgF7eBbQ9k+hYxvnt1cju9ztS06FVj5QcY41wo/QZwXHegSFHCnUTkIKnSW2joVEo7LfTPd4rzPIGxcXdfawlOb9dCD8oLJNYJdqKgh5o/4HivDe8GeJOY10JUfmqJEy7gduoLeEpU8nTzcAZNGUz5NgEoTkXhturjnK7B14JmWU7OUfuKQzKCE+RmK+FK8eJ09xwaRrK5RlvgK3jS9Xf12Ep4nj1Unw7a8Z5mdItodvnIixClOZYSdlZVns/pP9C9V9MBnD9Tzbl5c92A4zne62Mb/+qtFbquOx3A8nHN2hFQaVhOlb5rr+4dmqqEZ/uWQIvEyVpLsiNdGxMwOdMCCk8K+6Ba7EsipwOVndoaPDD2qPGrX0xBnWnbzaktAQUAOpAbkt4m/BNEMMMKw9uHZY2YQE3a2jTB/Doub/kOfSBrYifLE5xOJSD7Txzhqvi4PUK00nMk+lXMfyPQUMVzky8MynPw/xXa6rnZbwDjObi/gbeqbkj5TYo4aaz9jzq+0f+wDN6ztO5T8m36i2uzX0ALlieTBdat0dOeV/8C)\n", + "- Color points by [proximity to a point in xyz space](https://sandcastle.cesium.com/#c=dVTbjts2EP0VwghgCVBp6i4lziKOu0WMul1j7TQvAgKaorNsaNIgKW3dhf+9pC5OnSB6sDzDmXNm5gw1m20kEwYsuWxqsD+DjZIHCDbUUAUWnFMRgKXkzXHPMPgoWEuVZuYMHuVeGkY0WOM9BFuChWDiiwNYcPo3FrWSYKckIZIzYE3wBzaUgSWTBCvOKKxEixVoGX22RG+BoM9gSTVrjvCvzudNSWcupTCYCaqmAXipBLCPLU1Zl620ZbXNfj1mEkUtyyepeL3rYzy/Ehf/TSV6uidbCf+eb2vTqNieMKH3LRXmQx/k9bVBTaig0DbYYu2QBgyoqVmJU2MWxDApvEMj+j/UYfhjrY7VdmwsFRaW9wb0xMjXjbTzvObB02A6JpdPpNCSU8jlF++K4w4vwc/q351PFK7vf9t9Xq5Xy9+/da/NmVNbw1BaTQ92rtqOb3B0TqYNFoRa75RwfDx5o8d79bJ52K52q4c/Py/ebx/WH3f3toiWkthLEhRmZQmTMInLMgyTAERRisIIZqjMwyxMUBqAJC3TJA5hEucoTKOi9H0wA1EKUQCQ+wkh8qd9LZdg7J9Lp/D0yP7xOsOrJmfKuXyuJn4ARpeidWe/ehnrvTioy7V5wzi1kt1K37/iX3f9ofcCGsWv67SS4pFq2ShC4UHJ40I7zWsvzJIo9EG3VwMsHGf7E/CtO/a6IJd1uwWKHa3mLdUQ17U3IA6y3e7gkSrsFs9dEe8qozZMYLc0r2/ox23p5cmTBGaJFSiNR3lSK0WI8sIKhtJo0CdCMM6TvEiSOPYHEaRidq9+pPhAcW1v/YYZ8vQoOfdSS5HlcVygFMVlnuYB+AXBKM8ylIdpXMRlUWQByGBUhJa0yNMEIVSk327pJJjMuzHd9dTv2PEklXG6eBDODD2euL3jerZvyFc7d6K7SzmfjUnzmrWA1W+ryXffj2oC7EJrbU8ODedb9i+tJnfzmY2/SeOya+rBfuk4PruQp/Bu3TshhPOZNX/MMlLyPVb/Q/wP)\n", + "- And in the future, Cesium plans on [making elevation accessible to a custom shader](https://github.com/CesiumGS/cesium/issues/9735). There is also a more detailed discussion of this in [CesiumGS/3d-tiles#603](https://github.com/CesiumGS/3d-tiles/issues/603).\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "clipToFP_PR", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.9" + }, + "orig_nbformat": 4 + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/pdgpoints/README.md b/pdgpoints/README.md new file mode 100644 index 0000000..74ed258 --- /dev/null +++ b/pdgpoints/README.md @@ -0,0 +1,2 @@ +# pdgpoints +Point cloud processing workflow \ No newline at end of file diff --git a/pdgpoints/__init__.py b/pdgpoints/__init__.py new file mode 100644 index 0000000..0508cb8 --- /dev/null +++ b/pdgpoints/__init__.py @@ -0,0 +1,4 @@ +from logging.config import dictConfig +from .defs import LOGGING_CONFIG + +dictConfig(LOGGING_CONFIG) diff --git a/pdgpoints/_version.py b/pdgpoints/_version.py new file mode 100644 index 0000000..a0235ce --- /dev/null +++ b/pdgpoints/_version.py @@ -0,0 +1 @@ +__version__ = "0.0.2" \ No newline at end of file diff --git a/pdgpoints/bin/README.md b/pdgpoints/bin/README.md new file mode 100644 index 0000000..71b47af --- /dev/null +++ b/pdgpoints/bin/README.md @@ -0,0 +1,15 @@ +# lastools binaries + +The code within this directory was created by RapidLASso GmbH +as part of LAStools, and is distributed under the LGPL license. +It may not be used for commercial gain but may be redistributed +freely as an open-source library. See LAStools +[License](https://github.com/LAStools/LAStools/blob/master/LICENSE.txt) and +[Copying](https://github.com/LAStools/LAStools/blob/master/COPYING.txt). + +## Note + +The binary in this directory was compiled on Ubuntu x86_64. +It should work on most Unix variants but may not work on +all architectures. Please recompile if you have issues. +See https://github.com/LAStools/LAStools for details. \ No newline at end of file diff --git a/pdgpoints/bin/las2las b/pdgpoints/bin/las2las new file mode 100755 index 0000000..9b43361 Binary files /dev/null and b/pdgpoints/bin/las2las differ diff --git a/pdgpoints/bin/las2las_README.md b/pdgpoints/bin/las2las_README.md new file mode 100644 index 0000000..b93b549 --- /dev/null +++ b/pdgpoints/bin/las2las_README.md @@ -0,0 +1,938 @@ +# las2las + +reads and writes LIDAR data in LAS/LAZ/ASCII format to filter, +transform, project, thin, or otherwise modify its contents. + +Sometimes it is not neccessary to use las2las prior other +lastools, because most arguments can be used by the other +tool as well. + +Examples are keeping only those points that are within a +rextangle '-keep_xy 10 10 20 20' or points that are between +a certain height '-keep_z 10 100', or dropping points that +are a certain return '-drop_return 2', that have a scan angle +above some threshold '-drop_scan_angle_above 5', or below some +intensity '-drop_intensity_below 15'. Sometimes points are far +outside the bounding box (corrupted files) and it is handy to +remove them with '-clip_to_bounding_box'. + +It is also possible to add missing projection information to +the LAS/LAZ file or to reproject (using the same ellipsoid) +for example from latitude/longitude to UTM or the stateplane +of Ohio_North or to Earth-centered Earth-fixed (ECEF). You can +also use common EPSG codes with '-epsg 32754'. For LAS 1.4 it +is important to '-set_ogc_wkt' which translates the GeoTIFF +keys into an CRS string in the OGC WKT format and adds them as +the payload of the corresponding VLR. For LAS 1.4 files you +can use '-set_ogc_wkt_in_evlr' to put that string into the +EVLR instead of the VLR (but we don't recommend that). + +Another typical use is extract only first (-first_only) or only +last (-last_only) returns. Extracting the first return is the +same as dropping all others (e.g. -drop_return 2 3 4 5). + +Or one can extract a subsequence of 1000 points (-subseq 540 1000) +which will start at point 540. + +Finally one can also only keep or drop certain classifications. +The option -keep_class 2 3 will keep only those points that are +of classification 2 or 3 and the option -drop_class 2 3 will drop +only those points. For all options run 'las2las -h'. + + +## Examples + + las2las -i s1885565.laz -o out.las -sp83 OH_S -feet -elevation_feet + +Adding the projection information to the file 's1885565.laz'. This +will not modify the points but merely change the projection VLR in the +header to contain these four geokeys: + + GeoKeyDirectoryTag version 1.1.0 number of keys 4 + - key 1024 value_offset 1 - GTModelTypeGeoKey: ModelTypeProjected + - key 3072 value_offset 32123 - ProjectedCSTypeGeoKey: PCS_NAD83_Ohio_South + - key 3076 value_offset 9002 - ProjLinearUnitsGeoKey: Linear_Foot + - key 4099 value_offset 9002 - VerticalUnitsGeoKey: Linear_Foot + + + las2las -i s1885565.laz -o out.las -sp83 OH_S -feet -elevation_feet -target_utm auto + +Reprojects the points from the Ohio_South NAD83 state plane with all units +in feet to NAD83 UTM coordinates with all units in meter and sets these four +geokeys as the projection information: + + GeoKeyDirectoryTag version 1.1.0 number of keys 4 + - key 1024 value_offset 1 - GTModelTypeGeoKey: ModelTypeProjected + - key 3072 value_offset 26917 - ProjectedCSTypeGeoKey: PCS_NAD83_UTM_zone_17N + - key 3076 value_offset 9001 - ProjLinearUnitsGeoKey: Linear_Meter + - key 4099 value_offset 9001 - VerticalUnitsGeoKey: Linear_Meter + + + las2las -i s1885565.laz -o out.las -sp83 OH_S -feet -elevation_feet -target_longlat + +Reprojects the points from the Ohio_South NAD83 state plane with all units +in feet to geographic coordinates with x being longitude and y latitude and +sets these three geokeys as the projection information: + + GeoKeyDirectoryTag version 1.1.0 number of keys 3 + - key 1024 value_offset 2 - GTModelTypeGeoKey: ModelTypeGeographic + - key 2048 value_offset 4269 - GeographicTypeGeoKey: GCS_NAD83 + - key 4099 value_offset 9001 - VerticalUnitsGeoKey: Linear_Meter + + + las2las -i s1885565.laz -o out.las -sp83 OH_S -feet -elevation_feet -target_sp83 OH_N -target_survey_feet -target_elevation_survey_feet + las2las -i TO_core_last_zoom.laz -o out.laz -utm 17T + las2las -i TO_core_last_zoom.laz -o out.laz -utm 17T -target_latlong + +other variations of adding / changing projection information. + + + las2las -i *.las -last_only + +processes all LAS files that match *.las and stores only the last returns +to a corresponding LAS file called *_1.las (an added '_1' in the name). + + + las2las -i *.las -olaz -keep_tile 630000 4830000 10000 + +keeps a 10000 by 10000 tile with a lower left coordinate of x=630000 +and y=4830000 out of all LAS files that match *.las and stores each as a +compressed LAZ file *_1.laz (an added '_1' in the name). + + + las2las -i *.txt -iparse xyztiarn -keep_scan_angle -15 15 + +processes all ASCII files that match *.txt, parses them with "xyztiarn", +keeps all points whose scan angle is between -15 and 15, and stores them +to a corresponding LAS file called *_1.las (an added '_1' in the name). + + + las2las -i in.las -o out.las -keep_xy 630250 4834500 630500 4834750 + +keeps only points of in.las whose double-precision coordinates fall inside +the rectangle (630250,4834500) to (630500,4834750) and stores these points +to out.las. + + + las2las -lof file_list.txt -merged -o out.laz -keep_circle 630000 4850000 100 + +keeps only those points from all files listed in the list of files file_list.txt +whose double-precision coordinates fall into the circle centered at 630000 4850000 +with radius 100 and stores these points compressed to out.laz. + + + las2las -i in.las -o out.las -keep_z 10 100 + +keeps points of in.las whose double-precision elevations falls inside the +range 10 to 100 and stores these points to out.las. + + + las2las -i in.las -o out.laz -drop_return 1 + +drops all points of in.las that are designated first returns by +the value in their return_number field and stores surviving points +compressed to out.laz. + + + las2las -i in.laz -o out.las -drop_scan_angle_above 15 + +drops all points of compressed in.laz whose scan angle is above 15 or +below -15 and stores surviving points compressed to out.laz. + + + las2las -i in.las -o out.las -drop_intensity_below 1000 -remove_padding + +drops all points of in.las whose intensity is below 1000 and stores +surviving points to out.las. in addition any additional user data after +the LAS header or after the VLR block are stripped from the file. + + + las2las -i in.laz -o out.laz -last_only + +extracts all last return points from compressed in.laz and stores them +compressed to out.laz. + + + las2las -i in.las -o out.las -scale_rgb_up + +multiplies all rgb values in the file with 256. this is used to scale +the rgb values from standard unsigned char range (0 ... 255) to the +unsigned short range (0 ... 65535) used in the LAS format. + + + las2las -i in.laz -o out.laz -scale_rgb_down + +does the opposite with compressed input and output files + + + las2las -i in.las -o out.las -subseq 1000 2000 + +extracts a subsequence of points by skipping the first 1000 points and +then collecting points until 2000 points were read. + + + las2las -i in.las -o out.las -keep_class 2 -keep_class 3 + +extracts all points classfied as 2 or 3 from in.las and stores +them to out.las. + + + las2las -i in.las -o out.las -keep_XY 63025000 483450000 63050000 483475000 + +similar to '-keep_xy' but uses the integer values point.X and point.Y +that the points are stored with for the checks (and not the double +precision floating point coordinates they represent). drops all the +points of in.las that have point.X<63025000 or point.Y<483450000 or +point.X>63050000 or point.Y>483475000 and stores surviving points to +out.las (use lasinfo.exe to see the range of point.Z and point.Y). + + + las2las -i in.las -o out.las -keep_Z 1000 4000 + +similar to '-keep_z' but uses the integer values point.Z that the +points are stored with for the checks (and not the double-precision +floating point coordinates they represent). drops all the points +of in.las that have point.Z<1000 or point.Z>4000 and stores all +surviving points to out.las (use lasinfo.exe to see the range of +point.Z). + + + las2las -h + las2las -i *.las -utm 13N + las2las -i *.laz -first_only -olaz + las2las -i *.las -drop_return 4 5 -olaz + las2las -latlong -target_utm 12T -i in.las -o out.las + las2las -i in.laz -target_epsg 2972 -o out.laz + las2las -set_point_type 0 -lof file_list.txt -merged -o out.las + las2las -remove_vlr 2 -scale_rgb_up -i in.las -o out.las + las2las -i in.las -keep_xy 630000 4834500 630500 4835000 -keep_z 10 100 -o out.las + las2las -i in.txt -iparse xyzit -keep_circle 630200 4834750 100 -oparse xyzit -o out.txt + las2las -i in.las -remove_padding -keep_scan_angle -15 15 -o out.las + las2las -i in.las -rescale 0.01 0.01 0.01 -reoffset 0 300000 0 -o out.las + las2las -i in.las -set_version 1.2 -keep_gpstime 46.5 47.5 -o out.las + las2las -i in.las -drop_intensity_below 10 -olaz -stdout > out.laz + las2las -i in.las -last_only -drop_gpstime_below 46.75 -otxt -oparse xyzt -stdout > out.txt + las2las -i in.las -remove_all_vlrs -keep_class 2 3 4 -olas -stdout > out.las + + +## las2las specific arguments + +-add_attribute [m] [n] [o] [p] [q] [t]: adds a new "extra_byte" attribute of data_type [m] name [n] description [o]; optional: scale[p] offset [q] no_data_value [t] +-add_empty_vlr [m] [n] [o] : add an empty VLR with user-id [m], record-id [n] and description [o] +-adjusted_to_week : converts time stamps from Adjusted Standard GPS to GPS week +-cores [n] : process multiple inputs on [n] cores in parallel +-crop_to_bb : removes points that falls outsize the bouding box specified in the LAS header +-crop_to_bounding_box : removes points that falls outsize the bouding box specified in the LAS header +-dont_remove_empty_files : do not remove files that have zero points remaining from disk +-elevation_feet : use feet for elevation +-feet : use feet +-force : force a GPS week conversion even if conversion is suspect. +-load_vlrs : loads all VLRs from a file called vlrs.vlr and adds them to each processed file +-load_ogc_wkt [f] : loads the first single-string from file [f] and puts it into the place of the OGC WKT +-move_evlrs_to_vlrs : move all EVLRs with small enough payload to VLR section +-remove_all_evlrs : remove all EVLRs +-remove_all_vlrs : remove all VLRs +-remove_evlr [n] : remove EVLR with index [n]{0=first} +-remove_evlrs_from_to [m] [n] : remove EVLRs with index [m] to [n]{0=first} +-remove_original_vlr : removes VLR containing original header information created by on-the-fly buffering +-remove_padding : remove user-defined bytes before and after the header +-remove_tiling_vlr : removes VLR containing tiling information created by lastile +-remove_vlr [n] : remove VLR with index [n]{0=first} +-remove_vlrs_from_to [m] [n] : remove VLRs with index [m] to [n]{0=first} +-reoffset [x] [y] [z] : puts a new offset [x] [y] [z] into the header and translates the points accordingly +-rescale [x] [y] [z] : puts a new scale [x] [y] [z] into the header and rescales the points accordingly +-save_vlrs : saves all VLRs to a file called vlrs.vlr so they can be loaded into another file +-set_attribute_offset [m] [n] : set offset of the attribute [m]{0-based} in the extra bytes to [n] +-set_attribute_scale [m] [n] : set scale of the attribute [m]{0-based} in the extra bytes to [n] +-set_classification [n] : set classification to [n] +-set_global_encoding_gps_bit [n] : sets bit in global encoding field specifying Adjusted GPS Standard time stamps +-set_lastiling_buffer_flag [0/1] : sets buffer flag in LAStiling VLR (if it exists) to [0/1] +-set_ogc_wkt [n] : translate GeoTIFF keys [n] into CRS string in OGC WKT format and add it as VLR +-set_ogc_wkt_in_evlr [n] : same as "set_ogc_wkt" but adds [n] as LAS 1.4 EVLR instead. really not recommended!!! +-set_point_data_format [n] : force point type to be [n]{1-10} +-set_point_data_record_length [n] : CAREFUL! sets the point data record length field of the LAS header to size [n] without checking whether this will corrupt the file +-set_point_size [n] : force point size to be [n] +-set_point_type [n] : force point type to be [n]{1-10} +-set_version 1.2 : set LAS version number to 1.2 +-set_version_major 1 : set LAS major version number to 1 +-set_version_minor 2 : set LAS minor version number to 2 +-start_at_point [n] : skips all points until point number [n] +-stop_at_point [n] : omits all points after point number [n] +-subseq [m] [n] : extract a subsequence, start from [m] using [n] points +-switch_G_B : switch green and blue value +-unset_attribute_offset [n] : unsets the offset of attribute [n]{0=first} in the extra bytes +-unset_attribute_scale [n] : unsets the scale of attribute [n]{0=first} in the extra bytes +-week_to_adjusted [n] : converts time stamps from GPS week [n] to Adjusted Standard GPS + +### Basics +-cpu64 : start 64 bit executable (instead of default 32 bit executable) +-fail : fail if license expired or invalid +-gui : start with files loaded into GUI +-h : print help output +-help : print help output +-v : verbose output (print extra information) +-verbose : verbose output (print extra information) +-version : reports this tool's version number +-very_verbose : very verbose output (print even more information) +-vv : very verbose output (print even more information) + +## Module arguments + +### General +-buffered [n] : define read or write buffer of size [n]{default=262144} +-chunk_size [n] : set chunk size [n] in number of bytes +-comma_not_point : use comma instead of point as decimal separator +-neighbors [n] : set neighbors filename or wildcard [n] +-neighbors_lof [n] : set neighbors list of files [fnf] +-stored : use in memory reader +-unique : remove duplicate points + +### Color +-clamp_RGB_to_8bit : limit RGB values to 8 bit (otherwise: 16 bit) +-copy_B_into_NIR : copy blue color value into NearInfraRed value +-copy_B_into_intensity : copy blue color value to intensity +-copy_B_into_register [n] : copy blue color value into register [n] +-copy_G_into_NIR : copy green color value into NearInfraRed value +-copy_G_into_intensity : copy green color value to intensity +-copy_G_into_register [n] : copy green color value into register [n] +-copy_NIR_into_intensity : copy NIR into intensity +-copy_NIR_into_register [n] : copy NearInfraRed value into register [n] +-copy_RGB_into_intensity : copy weighted RGB value to intensity +-copy_R_into_NIR : copy red color value into NearInfraRed value +-copy_R_into_intensity : copy red color value to intensity +-copy_R_into_register [n] : copy red color value into register [n] +-copy_attribute_into_B [n] : copy attribute [n] value into blue +-copy_attribute_into_G [n] : copy attribute [n] value into green +-copy_attribute_into_NIR [n] : copy attribute [n] value into NIR (NearInfraRed) +-copy_attribute_into_R [n] : copy attribute [n] value into red +-copy_intensity_into_NIR : copy intensity into NIR (NearInfraRed) value +-copy_register_into_B [n] : copy register [n] into blue color value +-copy_register_into_G [n] : copy register [n] into green color value +-copy_register_into_I [n] : copy register [n] into NearInfraRed value +-copy_register_into_NIR [n] : copy register [n] into NearInfraRed value +-copy_register_into_R [n] : copy register [n] into red color value +-drop_RGB_green [min] [max] : drop points with green color value between [min] and [max] +-drop_RGB_red [min] [max] : drop points with red color value between [min] and [max] +-force_RGB : force the use of the RGB value even if the point format does not support RGB +-keep_NDVI_from_CIR [min] [max] : keep NDVI (Normalized Difference Vegetation Index) from CIR between [min] [max] +-keep_NDVI_green_is_NIR [min] [max] : keep NDVI (Normalized Difference Vegetation Index) where green is NIR between [min] [max] +-keep_NDVI_intensity_is_NIR [min] [max]: keep NDVI (Normalized Difference Vegetation Index) where intensity is NIR between [min] [max] +-keep_RGB_blue [m] [n] : keep points with RGB blue color values between [min] [max] +-keep_RGB_green [min] [max] : keep points with green color value between [min] and [max] +-keep_RGB_greenness [m] [n] : keep points with RGB greenness values between [min] [max] +-keep_RGB_nir [m] [n] : keep points with RGB NIR values between [min] [max] +-keep_RGB_red [min] [max] : keep points with red color value between [min] and [max] +-map_attribute_into_RGB [a] [fnm] : map attribute [a] by table in file [fnm] to RGB values +-oscale_rgb [n] : scale output RGB by [n] +-scale_NIR [n] : scale NearInfraRed value by factor [n] +-scale_NIR_down : scale NearInfraRed value down by 256 +-scale_NIR_to_16bit : scale 8 bit NearInfraRed value to 16 bit +-scale_NIR_to_8bit : scale 16 bit NearInfraRed value downto 8 bit +-scale_NIR_up : scale NearInfraRed value up by 256 +-scale_RGB [r] [g] [b] : scale RGB values by factors in [r][g][b] +-scale_RGB_down : scale RGB color values down by 256 +-scale_RGB_to_16bit : scale 8 bit color values to 16 bit +-scale_RGB_to_8bit : scale 16 bit color values downto 8 bit +-scale_RGB_up : scale RGB values from 8 bit up to 16 bit (multiply with 256) +-scale_rgb_down : divides all RGB values by 256 (to go from 16 bit to 8 bit numbers) +-scale_rgb_up : multiplies all RGB values by 256 (to go from 8 bit to 16 bit numbers) +-set_NIR [n] : set NearInfraRed value to [n] +-set_RGB [r] [g] [b] : set color to [r] [g] [b] +-set_RGB_of_class [c] [r] [g] [b] : set RGB values of class [c] to [r][g][b] (8 or 16 bit) +-switch_RGBI_into_CIR : set R to NIR; G to R; B to G +-switch_RGB_intensity_into_CIR : set R to intensity; G to R; B to G +-switch_R_B : switch red and blue color value +-switch_R_G : switch red and green color value + +### Coordinates +-add_attribute_to_z [n] : add value of attribute [n] to z value +-add_scaled_attribute_to_z [m] [n] : scale attribute [m] value by [n] and add to z value +-auto_reoffset : puts a reasonable offset in the header and translates the points accordingly +-bin_Z_into_point_source [n] : set point source to z/[n] +-clamp_raw_z [min] [max] : limit raw z values to [min] and [max] +-clamp_z [min] [max] : limit z values to [min] and [max] +-clamp_z_above [n] : limit z values to maximal [n] +-clamp_z_below [n] : limit z values to minimal [n] +-classify_z_above_as [m] [n] : for z value above [m] set class to [n] +-classify_z_below_as [m] [n] : for z value below [m] set class to [n] +-classify_z_between_as [m] [n] [o] : for z value between [m] and [n] set class to [o] +-copy_attribute_into_x [n] : copy attribute [n] value into x +-copy_attribute_into_y [n] : copy attribute [n] value into y +-copy_attribute_into_z [n] : copy attribute [n] value into z +-copy_intensity_into_z : copy intensity to z value +-copy_register_into_x [n] : copy register [n] to x value +-copy_register_into_y [n] : copy register [n] to y value +-copy_register_into_z [n] : copy register [n] to z value +-copy_user_data_into_z : copy user data into z +-copy_z_into_attribute [n] : copy z value into attribute [n] value +-drop_x [m] [n] : drop points with x value between [m] and [n] +-drop_x_above [n] : drop points with x value above [n] +-drop_x_below [n] : drop points with x value below [n] +-drop_xy [x1] [y1] [x2] [y2] : drop points within the [x1] [y1] [x2] [y2] rectangle +-drop_xyz [x1] [y1] [z1] [x2] [y2] [z2]: drop points within the given cube dimensions +-drop_y [m] [n] : drop points with y value between [m] and [n] +-drop_y_above [n] : drop points with y value above [n] +-drop_y_below [n] : drop points with y value below [n] +-drop_z [m] [n] : drop points with z value between [m] and [n] +-drop_z_above [n] : drop points with z value above [n] +-drop_z_below [n] : drop points with z value below [n] +-inside [x1] [y1] [x2] [y2] : use only points within the [x1] [y1] [x2] [y2] rectangle +-inside_circle [x] [y] [r] : keep circle at pos [x] [y] with radius [r] +-inside_rectangle [x1] [y1] [x2] [y2]: use only points within the [x1] [y1] [x2] [y2] rectangle +-inside_tile [m] [n] [o] : use only points inside tile at lower-left [x] [y] with size [s] +-keep_circle [x] [y] [r] : keep circle at pos [x] [y] with radius [r] +-keep_profile [x1] [y1] [x2] [y2] [w]: keep profile with [x1] [y1] [x2] [y2] [w] +-keep_tile [x] [y] [size] : keep tile at lower-left [x] [y] with size [s] +-keep_x [m] [n] : keep points with x value between [m] and [n] +-keep_xy [x1] [y1] [x2] [y2] : keep points within the [x1] [y1] [x2] [y2] rectangle +-keep_xyz [x1] [y1] [z1] [x2] [y2] [z2]: keep points within the given cube dimensions +-keep_y [m] [n] : keep points with y value between [m] and [n] +-keep_z [m] [n] : keep points with z value between [m] and [n] +-keep_z_above [n] : keep points with z value above [n] +-keep_z_below [n] : keep points with z value below [n] +-rescale_xy [x] [y] : rescale x y by [x] [y] +-rescale_z [z] : rescale z by [z] +-rotate_xy [a] [x] [y] : rotate points by [a] degrees, center at [x] [y] +-rotate_xz [a] [x] [z] : rotate points by [a] degrees, center at [x] [z] +-rotate_yz [a] [y] [z] : rotate points by [a] degrees, center at [y] [z] +-scale_x [n] : scale x value by [n] +-scale_xyz [m] [n] [o] : scale xyz values by [m] [n] [o] +-scale_y [n] : scale y value by [n] +-scale_z [n] : scale z value by [n] +-switch_x_y : exchange x and y value +-switch_x_z : exchange x and z value +-switch_y_z : exchange z and x value +-transform_affine [a],[b],[c],[d] : transform input using affine transformation with [a],[b],[c],[d] +-transform_helmert [m] [n] [o] : do a helmert transformation with 3 or 7 comma separated parameters [n] ... +-transform_matrix [r11,r12,r13] [r21,r22,r23] [r31,r32,r33] [tr1,tr2,tr3]: transform input using matrix [r11,r12,r13] [r21,r22,r23] [r31,r32,r33] [tr1,tr2,tr3] +-translate_raw_x [n] : translate raw x value by [n] +-translate_raw_xy_at_random [x] [y] : translate raw xy values by random and max offset of [x] [y] +-translate_raw_xyz [x] [y] [z] : translate raw coordinates by [x] [y] [z] +-translate_raw_y [n] : translate raw y value by [n] +-translate_raw_z [n] : translate raw z value by [n] +-translate_then_scale_x [m] [n] : translate x value by [m] and scale by [n] +-translate_then_scale_y [m] [n] : translate y value by [m] and scale by [n] +-translate_then_scale_z [m] [n] : translate z value by [m] and scale by [n] +-translate_x [n] : translate y value by [n] +-translate_xyz [x] [y] [z] : translate point coordinates by [x] [y] [z] +-translate_y [n] : translate y value by [n] +-translate_z [n] : translate z value by [n] + +### Simple thinning +-drop_every_nth [n] : drop every [n]th point +-keep_every_nth [n] : keep every [n]th point +-keep_random_fraction [m] [n] : keep points by random fraction [m]{0-1}, optional seed [n] +-thin_points_with_time [n] : thin points with time, [n] = timespacing +-thin_pulses_with_time [n] : thin pulses with time, [n] = timespacing +-thin_with_grid [n] : thin points by min grid size of [n] +-thin_with_time [n] : thin pulses with time, [n] = timespacing + +### Return number +-change_extended_number_of_returns_from_to [m] [n]: change extended number of returns from [m] to [n] +-change_extended_return_number_from_to [m] [n]: change extended return number from [m] to [n] +-change_number_of_returns_from_to [m] [n]: change number of returns from [m] to [n] +-change_return_number_from_to [m] [n]: change return number from [m] to [n] +-drop_double : drop double returns +-drop_first : drop first return +-drop_first_of_many : drop first of many returns +-drop_last : drop last return +-drop_last_of_many : drop last of many returns +-drop_middle : drop middle returns +-drop_number_of_returns [n] : drop points with [n] number of returns +-drop_quadruple : drop quadruple returns +-drop_quintuple : drop quintuple returns +-drop_return [m] [n]... : drop points with return [m] [n]... +-drop_return_mask [n] : drop points with return mask [n] +-drop_second_last : drop points with second last return +-drop_single : drop points with single return +-drop_triple : drop points with triple return +-first_only : use first return only +-keep_double : keep double returns +-keep_first : keep first return +-keep_first_of_many : keep first of many returns +-keep_last : keep last return +-keep_last_of_many : keep last of many returns +-keep_middle : keep mittle returns +-keep_number_of_returns [n] : keep points with [n] number of returns +-keep_quadruple : keep quadruple returns +-keep_quintuple : keep quintuple returns +-keep_return [m] [n]... : keep points with return [m] [n]... +-keep_return_mask [n] : keep points with return mask [n] +-keep_second_last : keep points with second last return +-keep_single : keep points with single return +-keep_triple : keep points with triple return +-last_only : use last return only +-repair_zero_returns : sets return counts and number of returns that are zero to one +-set_extended_number_of_returns [n] : set extended number of returns to [n] +-set_extended_return_number [n] : set extended return number to [n] +-set_number_of_returns [n] : set number of returns to [n] +-set_return_number [n] : set return number to [n] + +### Scanline +-drop_scan_direction [n] : drop points with scan direction [n] +-faf : input files are flightlines. do ***NOT*** use this for tiled input +-faf_index [n] : set files are flightlines index [n] +-files_are_flightlines : input files are flightlines. do ***NOT*** use this for tiled input +-keep_edge_of_flight_line : keep points with "Edge of Flight Line" flag set +-keep_scan_direction_change : keep points with changed scan direction flag +-set_edge_of_flight_line [0/1] : set "Edge of Flight Line" flag to [0/1] +-set_scan_direction_flag [0/1] : set scan direction flag to [0/1] + +### Scanner channel +-copy_scanner_channel_into_point_source: copy scanner channel into point_source +-copy_scanner_channel_into_user_data: copy scanner channel into user data +-copy_user_data_into_scanner_channel: copy user data into scanner channel +-drop_scanner_channel [n] : drop points with scanner channel [n] +-keep_scanner_channel [n] : keep points with scanner channel [n] +-merge_scanner_channel_into_point_source: merge scanner channel to point source +-set_extended_scanner_channel [n] : set extended scanner channel to [n] +-set_scanner_channel [n] : set scanner channel to [n] +-split_scanner_channel_from_point_source: split scanner channel from point source and save as extended scanner channel + +### Source ID +-apply_file_source_ID : copy file source ID to target +-bin_Z_into_point_source [n] : set point source to z/[n] +-bin_abs_scan_angle_into_point_source [n]: set point source to scan_angle/[n] +-bin_gps_time_into_point_source [n] : set point source to gps/[n] +-change_point_source_from_to [m] [n]: change point source from [m] to [n] +-copy_attribute_into_point_source [n]: copy attribute [n] value into point source +-copy_classification_into_point_source: copy classification to point source +-copy_point_source_into_register [n]: copy point source into register [n] +-copy_register_into_point_source [n]: copy register [n] to point source +-copy_scanner_channel_into_point_source: copy scanner channel into point_source +-copy_user_data_into_point_source : copy user data into point source +-drop_point_source [n] : drop points with point source [n] +-drop_point_source_above [n] : drop points with with point source above [n] +-drop_point_source_below [n] : drop points with with point source below [n] +-drop_point_source_between [m] [n] : drop points with with point source between [n] and [m] +-keep_point_source [n] : keep points with point source [n] +-keep_point_source_between [m] [n] : keep points with with point source between [n] and [m] +-map_point_source [fnm] : set the point source by map in file [fnm] +-merge_scanner_channel_into_point_source: merge scanner channel to point source +-set_point_source [n] : set point source to [n] +-split_scanner_channel_from_point_source: split scanner channel from point source and save as extended scanner channel + +### User data +-add_scaled_attribute_to_user_data [m] [n]: scale attribute [m] value by [n] and add to user data +-change_user_data_from_to [m] [n] : change user data from [m] to [n] +-copy_attribute_into_user_data [n] : copy attribute [n] value into user data field +-copy_classification_into_user_data : copy classification to user data +-copy_register_into_user_data [n] : copy register [n] to user data +-copy_scanner_channel_into_user_data: copy scanner channel into user data +-copy_user_data_into_attribute [n] : copy user data into attribute [n] value +-copy_user_data_into_classification : copy user data into classification +-copy_user_data_into_point_source : copy user data into point source +-copy_user_data_into_register [n] : copy user data to register [n] +-copy_user_data_into_scanner_channel: copy user data into scanner channel +-copy_user_data_into_z : copy user data into z +-drop_user_data [n] : drop points with user data value of [n] +-drop_user_data_above [n] : drop points with user data value above [n] +-drop_user_data_below [n] : drop points with user data value below [n] +-drop_user_data_between [m] [n] : drop points with user data between [m] and [n] +-keep_user_data [n] : keep points with user data value of [n] +-keep_user_data_above [n] : keep points with user data value above [n] +-keep_user_data_below [n] : keep points with user data value below [n] +-keep_user_data_between [m] [n] : keep points with user data between [m] and [n] +-map_user_data [fnm] : set the user data by map in file [fnm] +-scale_user_data [n] : scale user data by [n] +-set_user_data [n] : sets all user_data fields to [n] + +### Classification +-change_class_from_to [m] [n] : change classification from [m] to [n] +-change_classification_from_to [m] [n]: change classification from [m] to [n] +-change_extended_class_from_to [m] [n]: change extended class from [m] to [n] +-change_extended_classification_from_to [m] [n]: change extended class from [m] to [n] +-classify_attribute_above_as [m] [n] [o]: for attribute [m] with value above [n] set class to [o] +-classify_attribute_below_as [m] [n] [o]: for attribute [m] with value below [n] set class to [o] +-classify_attribute_between_as [m] [n] [o] [p]: for attribute [m] with value between [n] and [o] set class to [p] +-classify_intensity_above_as [m] [n]: for intensity value above [m] set class to [n] +-classify_intensity_below_as [m] [n]: for intensity value below [m] set class to [n] +-classify_intensity_between_as [m] [n] [o]: for intensity value between [m] and [n] set class to [o] +-classify_z_above_as [m] [n] : for z value above [m] set class to [n] +-classify_z_below_as [m] [n] : for z value below [m] set class to [n] +-classify_z_between_as [m] [n] [o] : for z value between [m] and [n] set class to [o] +-copy_classification_into_point_source: copy classification to point source +-copy_classification_into_user_data : copy classification to user data +-copy_intensity_into_classification : copy intensity to classification +-copy_user_data_into_classification : copy user data into classification +-drop_class [m] [n] [o]... : drop points with class in [m][n][o]... +-drop_classification [m] [n] [o]... : drop points with class in [m][n][o]... +-drop_classification_mask [n] : drop points with classification mask matches [n] +-drop_extended_class [m] [n]... : drop extended class [m] [n]... +-drop_extended_classification [n] : drop points with extended classification [n] +-drop_extended_classification_mask [a] [b] [c] [d] [e] [f] [g] [h]: drop points with extended classification mask matches [a] [b] [c] [d] [e] [f] [g] [h] +-keep_class [m] [n] [o]... : keep points with class in [m][n][o]... +-keep_classification [m] [n] [o]... : keep points with class in [m][n][o]... +-keep_classification_mask [n] : keep points with classification mask matches [n] +-keep_extended_class [m] [n]... : keep extended class [m] [n]... +-keep_extended_classification [n] : keep points with extended class [n] +-move_ancient_to_extended_classification: move old data to extended classification +-set_RGB_of_class [c] [r] [g] [b] : set RGB values of class [c] to [r][g][b] (8 or 16 bit) +-set_extended_classification [n] : set extended classification to [n] + +### Extra byte +-add_attribute_to_z [n] : add value of attribute [n] to z value +-add_scaled_attribute_to_user_data [m] [n]: scale attribute [m] value by [n] and add to user data +-add_scaled_attribute_to_z [m] [n] : scale attribute [m] value by [n] and add to z value +-classify_attribute_above_as [m] [n] [o]: for attribute [m] with value above [n] set class to [o] +-classify_attribute_below_as [m] [n] [o]: for attribute [m] with value below [n] set class to [o] +-classify_attribute_between_as [m] [n] [o] [p]: for attribute [m] with value between [n] and [o] set class to [p] +-copy_attribute_into_B [n] : copy attribute [n] value into blue +-copy_attribute_into_G [n] : copy attribute [n] value into green +-copy_attribute_into_I [n] : copy attribute [n] value into intensity +-copy_attribute_into_NIR [n] : copy attribute [n] value into NIR (NearInfraRed) +-copy_attribute_into_R [n] : copy attribute [n] value into red +-copy_attribute_into_intensity [n] : copy attribute [n] value into intensity +-copy_attribute_into_point_source [n]: copy attribute [n] value into point source +-copy_attribute_into_register [m] [n]: copy attribute [m] value into register [m] +-copy_attribute_into_user_data [n] : copy attribute [n] value into user data field +-copy_attribute_into_x [n] : copy attribute [n] value into x +-copy_attribute_into_y [n] : copy attribute [n] value into y +-copy_attribute_into_z [n] : copy attribute [n] value into z +-copy_intensity_into_attribute [n] : copy intensity to attribute [n] value +-copy_register_into_attribute [m] [n]: copy register [m] to attribute [n] value +-copy_user_data_into_attribute [n] : copy user data into attribute [n] value +-copy_z_into_attribute [n] : copy z value into attribute [n] value +-drop_attribute_above [m] [n] : drop points with attribute [m] value > [n] +-drop_attribute_below [m] [n] : drop points with attribute [m] value < [n] +-drop_attribute_between [m] [n] [o] : drop points with attribute [m] in range [n]...[o] +-iadd_attribute [m] [n] [o] [p] [q] [r] [s] [t]: adds a new "extra_byte" attribute of data_type [m] name [n] description [o]; optional: scale[p] offset [q] pre_scale [r] pre_offset [s] no_data_value [t] +-iadd_extra [m] [n] [o] [p] [q] [r] [s] [t]: adds a new "extra_byte" attribute of data_type [m] name [n] description [o]; optional: scale[p] offset [q] pre_scale [r] pre_offset [s] no_data_value [t] +-keep_attribute_above [m] [n] : keep points with attribute [m] value > [n] +-keep_attribute_below [m] [n] : keep points with attribute [m] value < [n] +-keep_attribute_between [m] [n] [o] : keep points with attribute [m] in range [n]...[o] +-load_attribute_from_text [m] [fnt] : load attribute [m] from file [fnt] +-map_attribute_into_RGB [a] [fnm] : map attribute [a] by table in file [fnm] to RGB values +-scale_attribute [m] [n] : scale attribute [m] by [n] +-set_attribute [m] [n] : set attribute [m] with value [n] +-translate_attribute [m] [n] : translate attribute [n] by [n] + +### Flags +-drop_keypoint : drop points flaged as keypoint +-drop_overlap : drop points flaged as overlap +-drop_scan_direction [n] : drop points with scan direction [n] +-drop_synthetic : drop points flaged as synthetic +-drop_withheld : drop points flaged as withheld +-keep_edge_of_flight_line : keep points with "Edge of Flight Line" flag set +-keep_keypoint : keep points flaged as keypoint +-keep_overlap : keep points flaged as overlap +-keep_scan_direction_change : keep points with changed scan direction flag +-keep_synthetic : keep points flaged as synthetic +-keep_withheld : keep points flaged as withheld +-set_edge_of_flight_line [0/1] : set "Edge of Flight Line" flag to [0/1] +-set_extended_overlap_flag [0/1] : set extended overlap flag to [0/1] +-set_keypoint_flag [0/1] : set keypoint flag to [0/1] +-set_overlap_flag [0/1] : set overlap flag to [0/1] +-set_scan_direction_flag [0/1] : set scan direction flag to [0/1] +-set_synthetic_flag [0/1] : set synthetic flag to [0/1] +-set_withheld_flag [0/1] : set withheld flag to [0/1] + +### GPS time +-bin_gps_time_into_intensity [n] : set intensity time to gps/[n] +-bin_gps_time_into_point_source [n] : set point source to gps/[n] +-drop_gps_time_above [n] : drop points with GPS time above [n] +-drop_gps_time_below [n] : drop points with GPS time below [n] +-drop_gps_time_between [m] [n] : drop points with GPS time between [m] and [n] +-drop_gpstime_above [n] : drop points with GPS time above [n] +-drop_gpstime_below [n] : drop points with GPS time below [n] +-drop_gpstime_between [m] [n] : drop points with GPS time between [m] and [n] +-keep_gps_time [m] [n] : keep points with GPS time between [m] and [n] +-keep_gps_time_above [n] : keep points with GPS time above [n] +-keep_gps_time_below [n] : keep points with GPS time below [n] +-keep_gps_time_between [m] [n] : keep points with GPS time between [m] and [n] +-keep_gpstime [m] [n] : keep points with GPS time between [m] and [n] +-keep_gpstime_above [n] : keep points with GPS time above [n] +-keep_gpstime_below [n] : keep points with GPS time below [n] +-keep_gpstime_between [m] [n] : keep points with GPS time between [m] and [n] +-set_gps_time [n] : set gps time to [n] +-translate_gps_time [n] : translate GPS time by [n] + +### Intensity +-bin_gps_time_into_intensity [n] : set intensity time to gps/[n] +-clamp_intensity [min] [max] : limit intensity values to [min] and [max] +-clamp_intensity_above [max] : limit intensity values to maximal [max] +-clamp_intensity_below [max] : limit intensity values to minimal [min] +-classify_intensity_above_as [m] [n]: for intensity value above [m] set class to [n] +-classify_intensity_below_as [m] [n]: for intensity value below [m] set class to [n] +-classify_intensity_between_as [m] [n] [o]: for intensity value between [m] and [n] set class to [o] +-copy_B_into_intensity : copy blue color value to intensity +-copy_G_into_intensity : copy green color value to intensity +-copy_NIR_into_intensity : copy NIR into intensity +-copy_RGB_into_intensity : copy weighted RGB value to intensity +-copy_R_into_intensity : copy red color value to intensity +-copy_attribute_into_I [n] : copy attribute [n] value into intensity +-copy_attribute_into_intensity [n] : copy attribute [n] value into intensity +-copy_intensity_into_NIR : copy intensity into NIR (NearInfraRed) value +-copy_intensity_into_attribute [n] : copy intensity to attribute [n] value +-copy_intensity_into_classification : copy intensity to classification +-copy_intensity_into_register [n] : copy color intensitiy value into register [n] +-copy_intensity_into_z : copy intensity to z value +-copy_register_into_intensity [n] : copy register [n] into point intensitiy value +-drop_intensity_above [n] : drop points with intensity value above [n] +-drop_intensity_below [n] : drop points with intensity value below [n] +-drop_intensity_between [m] [n] : drop points with intensity value between [m] and [n] +-iscale_intensity [n] : scale intensity value by [n] +-itranslate_intensity [n] : translate input intensity by [n] +-keep_NDVI_intensity_is_NIR [min] [max]: keep NDVI (Normalized Difference Vegetation Index) where intensity is NIR between [min] [max] +-keep_intensity [m] [n] : keep points with intensity between [m] and [n] +-keep_intensity_above [n] : keep points with intensity value above [n] +-keep_intensity_below [n] : keep points with intensity value below [n] +-map_intensity [fnm] : set the intensity by map in file [fnm] +-scale_intensity [n] : multiply intensity by [n] +-set_intensity [n] : set intensity to [n] +-switch_RGB_intensity_into_CIR : set R to intensity; G to R; B to G +-translate_intensity [n] : translate intensity by [n] +-translate_then_scale_intensity [m] [n]: translate intensity by [m] and scale by [n] + +### Raw point values +-clamp_raw_z [min] [max] : limit raw z values to [min] and [max] +-translate_raw_x [n] : translate raw x value by [n] +-translate_raw_xy_at_random [x] [y] : translate raw xy values by random and max offset of [x] [y] +-translate_raw_xyz [x] [y] [z] : translate raw coordinates by [x] [y] [z] +-translate_raw_y [n] : translate raw y value by [n] +-translate_raw_z [n] : translate raw z value by [n] + +### Registers +-add_registers [m] [n] [o] : add register [m] and [n] and store result in register [o] +-copy_B_into_register [n] : copy blue color value into register [n] +-copy_G_into_register [n] : copy green color value into register [n] +-copy_NIR_into_register [n] : copy NearInfraRed value into register [n] +-copy_R_into_register [n] : copy red color value into register [n] +-copy_attribute_into_register [m] [n]: copy attribute [m] value into register [m] +-copy_intensity_into_register [n] : copy color intensitiy value into register [n] +-copy_point_source_into_register [n]: copy point source into register [n] +-copy_register_into_B [n] : copy register [n] into blue color value +-copy_register_into_G [n] : copy register [n] into green color value +-copy_register_into_I [n] : copy register [n] into NearInfraRed value +-copy_register_into_NIR [n] : copy register [n] into NearInfraRed value +-copy_register_into_R [n] : copy register [n] into red color value +-copy_register_into_attribute [m] [n]: copy register [m] to attribute [n] value +-copy_register_into_intensity [n] : copy register [n] into point intensitiy value +-copy_register_into_point_source [n]: copy register [n] to point source +-copy_register_into_user_data [n] : copy register [n] to user data +-copy_register_into_x [n] : copy register [n] to x value +-copy_register_into_y [n] : copy register [n] to y value +-copy_register_into_z [n] : copy register [n] to z value +-copy_user_data_into_register [n] : copy user data to register [n] +-divide_registers [m] [n] [o] : divide register [m] by register [n] and store result in register [o] +-multiply_registers [m] [n] [o] : Multiply register [m] with [n] and store result in register [o] +-scale_register [m] [n] : scale register index [m] with factor [n] +-set_register [m] [n] : set register [m] with value [n] +-subtract_registers [m] [n] [o] : subtract register [m] by register [n] and store result in register [o] +-translate_register [m] [n] : translate register index [m] value by [n] + +### Scan angle +-bin_abs_scan_angle_into_point_source [n]: set point source to scan_angle/[n] +-drop_abs_scan_angle_above [max] : drop points with absolute scan angle above [max] +-drop_abs_scan_angle_below [min] : drop points with absolute scan angle below [min] +-drop_scan_angle_above [n] : drop points with scan angle above [n] +-drop_scan_angle_below [n] : drop points with scan angle below [n] +-drop_scan_angle_between [m] [n] : drop points with scan angle between [m] and [n] +-iscale_scan_angle [n] : scale scan angle by [n] +-itranslate_scan_angle [n] : translate input scan angle by [n] +-keep_scan_angle [m] [n] : keep points with scan angle between [m] and [n] +-keep_scan_angle_between [m] [n] : keep points with scan angle between [m] and [n] +-scale_scan_angle [n] : scale scan angle by [n] +-set_scan_angle [n] : set scan angle to [n] +-translate_scan_angle [n] : translate scan angle by [n] +-translate_then_scale_scan_angle [m] [n]: translate scan angle by [m] and scale by [n] + +### Tiles +-keep_tile [x] [y] [size] : keep tile at lower-left [x] [y] with size [s] + +### Waveform packet +-drop_wavepacket [n] : drop points with wavepacket value of [n] +-flip_waveform_direction : flip the waveform direction in the waveform VLR +-keep_wavepacket [n] : keep points with wavepacket value of [n] + +### CRS +-aeac [m] [n] [meter/survey_feet/feet] [o] [p] [q] [r]: Albers Equal Area Conic Projection: False Easting [m] False Northing[n] [meter/survey_feet/feet] Central Meridian [o] Standard Parallel 1 [p] Standard Parallel 2 [q] Latitude of origin [r] +-ecef : input is geocentric (Earth-centered Earth-fixed) +-elevation_meter : use meter for elevation +-elevation_survey_feet : set vertical units from meters to US survey feet +-elevation_surveyfeet : use survey feet for elevation +-ellipsoid [n] : use the WGS-84 ellipsoid [n]{do -ellipsoid -1 for a list of ellipsoids} +-epsg [n] : set datum to EPSG [n] +-etrs89 : use datum ETRS89 +-gda2020 : use datum GDA2020 +-gda94 : use datum GDA94 +-grs80 : use datum GRS1980 +-latlong : geometric coordinates in latitude/longitude order +-lcc 609601.22 0.0 meter 33.75 -79 34.33333 36.16666: specifies a lambertian conic confomal projection +-longlat : geometric coordinates in longitude/latitude order +-meter : use meter +-nad27 : use the NAD27 ellipsoid +-nad83 : use the NAD83 ellipsoid +-nad83_2011 : use datum NAD83_2011 +-nad83_csrs : use datum NAD83_CSRS +-nad83_harn : use datum NAD83_HARN +-nad83_pa11 : set horizontal datum to NAD83 PA11 +-osgb1936 : use datum OSGB 1936 +-sp27 SC_N : use the NAD27 South Carolina North state plane +-sp83 CO_S : use the NAD83 Colorado South state plane for georeferencing +-survey_feet : use survey feet +-surveyfeet : use survey feet as unit of measurement +-target_aeac [m] [n] [meter/survey_feet/feet] [o] [p] [q] [r]: Albers Equal Area Conic Projection for target: False Easting [m] False Northing[n] [meter/survey_feet/feet] Central Meridian [o] Standard Parallel 1 [p] Standard Parallel 2 [q] Latitude of origin [r] +-target_ecef : output is geocentric (Earth-centered Earth-fixed) +-target_elevation_feet : output uses feet for elevation +-target_elevation_meter : output uses meter for elevation +-target_elevation_precision [n] : output uses [n] (meter/feet) resolution for z +-target_elevation_survey_feet : use elevation survey feet as target unit +-target_elevation_surveyfeet : output uses survey feet for elevation +-target_epsg [n] : output is EPSG code [n] (e.g. 2193=NZGD2000) +-target_feet : output uses feet +-target_latlong : output is geometric coordinates in latitude/longitude +-target_lcc 609601.22 0.0 meter 33.75 -79 34.33333 36.16666: specifies a lambertian conic confomal projection at target +-target_longlat : output is geometric coordinates in longitude/latitude +-target_meter : output uses meter +-target_precision [n] : output uses [n] (meter/feet) resolution for x and y +-target_sp27 SC_N : output is state plane NAD27 South Carolina North +-target_sp83 CO_S : output is state plane NAD83 Colorado South +-target_survey_feet : output uses survey feet +-target_surveyfeet : use survey feet as target unit +-target_tm : use transverse mercator projection for target +-target_utm 12T : output is UTM zone 12T +-tm 609601.22 0.0 meter 33.75 -79 0.99996: specifies a transverse mercator projection +-transverse_mercator : use transverse mercator projection +-utm 12T : use UTM zone 12T +-vertical_cgvd2013 : set vertical datum to CGVD2013 +-vertical_cgvd28 : set vertical datum to CGVD28 +-vertical_dhhn2016 : set vertical datum to DHHN2016 +-vertical_dhhn92 : set vertical datum to DHHN92 +-vertical_dvr90 : set vertical datum to DVR90 +-vertical_epsg [n] : set vertical datum to EPSG [n] +-vertical_evrf2007 : set vertical datum to EVRF2007 +-vertical_navd29 : set vertical datum to NAVD29 +-vertical_navd88 : set vertical datum to NAVD88 +-vertical_ngvd29 : set vertical datum to NGVD29 +-vertical_nn2000 : set vertical datum to NN2000 +-vertical_nn54 : set vertical datum to NN54 +-vertical_nzvd2016 : set vertical datum to NZVD2016 +-vertical_wgs84 : set vertical datum to WGS84 +-wgs72 : use the WGS-72 ellipsoid +-wgs84 : use the WGS-84 ellipsoid + +### Logical +-filter_and : boolean AND combination of last 2 filters +-filter_or : boolean OR combination of last 2 filters +-filtered_transform : do the transformation only on points of the current filter + +### Input +-i [fnp] : input file or input file mask [fnp] (e.g. *.laz;fo?.la?;esri.shp,...) +-io_ibuffer [n] : use read-input-buffer of size [n] bytes +-iparse [xyz] : define fields [xyz] for text input parser +-ipts : input as PTS (plain text lidar source), store header in VLR +-iptx : input as PTX (plain text extended lidar data), store header in VLR +-iptx_transform : use PTX file header to transform point data +-iskip [n] : skip [n] lines at the beginning of the text input +-itxt : expect input as text file +-lof [fnf] : use input out of a list of files [fnf] +-merged : merge input files +-stdin : pipe from stdin + +### Output +-compatible : write LAS/LAZ output in compatibility mode +-do_not_populate : do not populate header on output +-io_obuffer [n] : use write-out-buffer of size [n] bytes +-native : write LAS/LAZ output in native/actual mode +-nil : pipe output to NULL (suppress output) +-o [n] : use [n] as output file +-obin : output as BIN (terrasolid binary) +-ocut [n] : cut the last [n] characters from name +-odir [n] : set output directory to [n] +-odix [n] : set output file name suffix to [n] +-oforce : force output creation also on errors or warnings +-olas : output as LAS file +-olaz : output as LAZ (compressed LAS) +-oparse [xyz] : parse on-the-fly to ASCII using fields [xyz] +-opts : output as PTS (plain text lidar data) +-optx : output as PTX (plain text with header) +-oqi : output in QFIT format (.qi)(ATM project, NASA) +-oscale_rgb [n] : scale output RGB by [n] +-osep [n] : set text output separator as char [n] +-otxt : output as textfile +-owrl : output as VRLM (Virtual Reality Modeling Language) text +-pipe_on : write output to command pipe, see also -std_in +-populate : populate header on output +-stdout : pipe to stdout +-target_ecef : output is geocentric (Earth-centered Earth-fixed) +-temp_files [n] : set base file name [n] for temp files (example: E:\tmp) + +### add_attribute +The '-add_attribute' argument allow as first parameter the datatype +out of this values: + 0 : undocumented - extra bytes specify value in options field + 1 : unsigned char (1 byte) + 2 : char (1 byte) + 3 : unsigned short (2 bytes) + 4 : short (2 bytes) + 5 : unsigned long (4 bytes) + 6 : long (4 bytes) + 7 : unsigned long long (8 bytes) + 8 : long long (8 bytes) + 9 : float (4 bytes) + 10 : double (8 bytes) + 11-30 : deprecated + 31-255 : reserved + +### parse +The '-parse [xyz]' flag specifies how to set the +columns in a ASCII output file. +For example, 'tsxyzssa' means that the first number +is the gpstime, the next number should be skipped, +the next three numbers are the x, y, and z coordinate, +the next two should be skipped and the next number +is the scan angle. + +The other supported entries are: + x : [x] coordinate + y : [y] coordinate + z : [z] coordinate + X : unscaled raw [X] value + Y : unscaled raw [Y] value + Z : unscaled raw [Z] value + t : gps [t]ime + R : RGB [R]ed channel + G : RGB [G]reen channel + B : RGB [B]lue channel + s : [s]kip a string or a number that we don't care about + i : [i]ntensity + a : scan [a]ngle + n : [n]umber of returns of that given pulse + r : number of [r]eturn + h : with[h]eld flag + k : [k]eypoint flag + g : synthetic fla[g] + o : [o]verlap flag of LAS 1.4 point types 6, 7, 8 + l : scanner channe[l] of LAS 1.4 point types 6, 7, 8 + m : point index, starting at 0 + M : point index, starting at 1 + W : all wavepacket attributes + w : [w]avepacket descriptor index + c : [c]lassification + u : [u]ser data + p : [p]oint source ID + e : [e]dge of flight line flag + d : [d]irection of scan flag + 0-9 : additional attributes described as extra bytes (0 through 9) + + +## License + +This tool is free to use. + +## Support + +To get more information about a tool just goto the +[LAStools Google Group](http://groups.google.com/group/lastools/) +and enter the tool name in the search function. +You will get plenty of samples to this tool. + +To get further support see our +[rapidlasso service page](https://rapidlasso.de/service/) + +Check for latest updates at +https://rapidlasso.de/category/blog/releases/ + +If you have any suggestions please let us (support@rapidlasso.de) know. +Jochen @rapidlasso diff --git a/pdgpoints/bin/lasinfo b/pdgpoints/bin/lasinfo new file mode 100755 index 0000000..7413990 Binary files /dev/null and b/pdgpoints/bin/lasinfo differ diff --git a/pdgpoints/bin/lasinfo_README.md b/pdgpoints/bin/lasinfo_README.md new file mode 100644 index 0000000..e96166f --- /dev/null +++ b/pdgpoints/bin/lasinfo_README.md @@ -0,0 +1,903 @@ +# lasinfo + +Reports the contents of the header and a short summary of the +points. Warns when there is a difference between the header +information and the point content. When run with option '-cd' +or '-compute_density', lasinfo will compute the point density. + +All differences can be repaired with the '-repair' option. The +option '-repair_bb' will only repair (or tighten) the bounding +box while '-repair_counters' will only repair wrong reported +number of points. + +By default the output of lasinfo goes to stderr. The output can +be suppressed with '-quiet' or changed to '-stdout'. To write +to a particular file use '-o output.txt' or '-otxt' which stores +the lasinfo report to "lidar.txt" assuming the file has the name +"lidar.las". With the option '-odix _info' you change the name +to "lidar_info.txt" and with '-odir E:\reports' you choose a +particular directory. These options are especially useful in +batch mode such as: + +lasinfo -i *.laz -otxt -odir ..\reports -odix _info -cores 3 + +The tool can also be used to modify various other entries in +the header as described below. This needs to be done with care +as such changes can potentially corrupt the file. + +In case you just want to report or modify the header entries +you can skip the parsing of the points with '-no_check'. Some +LAS files have excessive amounts of VLRs. You can supress them +being output with '-no_vlrs'. By default lasinfo reports the +min and the max of every point attribute after parsing all the +points. You can supress this with '-no_min_max'. By default the +tool also counts the points falling outside the header bounding +box. This can be disabled with '-no_check_outside'. If instead +you want to report all the individual points that fall outside +use '-report_outside'. Another interesting option is to report +the GPS time min and max as '-gps_week' in case it is stored as +adjusted Standard GPS time. + +There are a lot of ways that lasinfo can be used to modify the +contents of the LAS header *in place* without copying the file +and the GUI will teach you these command line options. Some not +listed in the GUI are: + +-set_GUID F794F8A4-A23E-421E-A134-ACF7754E1C54 +-set_system_identifier "RIEGL Optech Leica" +-set_generating_software "awesome LAStools" +-set_file_source_ID 27 +-set_file_creation 8 2007 +-auto_date +-set_global_encoding 1 + +## Examples + + lasinfo lidar.las + +reports all information to stderr. + + + lasinfo -i lidar1.las lidar2.las -merged + +reports all information for a merged LAS file that contains the +merged contents of lidar1.las and lidar2.las to stderr. + + + lasinfo -i lidar1.las lidar2.las + +reports the information of the two files individually to stderr. + + + lasinfo *.las -merged -stdout + +reports information for all files merged to stdout. + + + lasinfo *.las -stdout + +reports information for all files individually to stdout. + + + lasinfo *.las -odix _info + +reports information for all files individually to *_info.txt + + + lasinfo -i lidar.las -o lidar_info.txt + +reports the information to a text file called lidar_info.txt. + + + lasinfo -i lidar.las -no_header + +supresses the reporting of the header information (short: '-nh'). + + + lasinfo -i lidar.las -no_vlrs + +supresses the reporting of variable length records (short: '-nv'). + + + lasinfo -i lidar.las -no_min_max + +supresses the reporting of minimum/maximum value for each attribute +of the point records (short: '-nmm'). + + + lasinfo -i lidar.las -no_check_outside + +supresses checking whether points fall outside of the bounding box that +is reported in the header (short: '-nco'). + + + lasinfo -i lidar.las -report_outside + +reports the index and the coordinates of each point that falls outside +of the bounding box that is reported in the header (short: '-ro'). + + + lasinfo -i lidar.las -nh -nv -progress 1000000 + +supresses reporting of the header information and the vlrs and reports +progress each time 1,000,000 points are parsed. + + + lasinfo -i lidar.las -no_check + +only reports header information (short: '-nc'). does not parse the points. + + + lasinfo -i lidar.las -compute_density + +computes and reports a good estimate of the point density (short: '-cd'). + + + lasinfo -i lidar.las -repair_bb + +corrects missing or wrong bounding box info in the header. + + + lasinfo -i lidar.las -repair_counters + +corrects missing or wrong point number info in the header. + + + lasinfo -i lidar.las -set_file_source_ID 27 + +sets the file source ID in the LAS header to 27. + + + lasinfo -i lidar.las -auto_date + +sets the file creation day/year in the header to the creation date of the file. + + + lasinfo -i lidar.las -set_file_creation 8 2007 + +sets the file creation day/year in the header to 8/2007 + + + lasinfo -i lidar.las -set_system_identifier "hello world!" + +copies the first 31 characters of the string into the system_identifier field of the header + + + lasinfo -i lidar.las -set_generating_software "this is a test (-:" + +copies the first 31 characters of the string into the generating_software field of the header + + + lasinfo -i lidar.las -set_bounding_box 4100000 1800000 150 4110000 1810000 400 + +sets the bounding box in the header to min_x=4100000, max_x=4110000, min_y=1800000, ... + + + lasinfo -i lidar.las -set_offset 3000000 1000000 0 + +CAREFUL! sets the offset in the LAS header to 3000000 1000000 0 without changing +the point coordinates. hence this will, in general, translate the point cloud. + + + lasinfo -i lidar.las -set_scale 0.001 0.001 0.001 + +CAREFUL! sets the scale in the LAS header to 0.001 0.001 0.001 without changing +the point coordinates. hence this will, in general, scale the point cloud. + + + lasinfo -i lidar.las -set_global_encoding 1 + +CAREFUL! sets the global encoding field of the LAS header to 1 without checking +whether this will corrupt the file. + + + lasinfo -i lidar.las -set_version 1.1 + +CAREFUL! sets the version field of the LAS header to 1.1 without checking whether +this will corrupt the file. + + + lasinfo -i lidar.las -set_header_size 235 + +CAREFUL! sets the header size field of the LAS header to 235 without checking +whether this will corrupt the file. + + + lasinfo -i lidar.las -set_offset_to_point_data 460 + +CAREFUL! sets the offset to point data field of the LAS header to 460 without +checking whether this will corrupt the file. + + + lasinfo -i lidar.las -set_point_data_format 1 + +CAREFUL! sets the point data format field of the LAS header to point type 1 +without checking whether this will corrupt the file. + + + lasinfo -i lidar.las -set_point_data_record_length 32 + +CAREFUL! sets the point data record length field of the LAS header to size 32 +without checking whether this will corrupt the file. + + + lasinfo -i lidar.las -set_start_of_waveform_data_packet_record 0 + +CAREFUL! sets the start of waveform data packet record field of the LAS header +to 0 without checking whether this will corrupt the file. + + +lasinfo -i lidar.las +lasinfo -i lidar.las -compute_density -o lidar_info.txt +lasinfo -i *.las +lasinfo -i *.las -single -otxt +lasinfo -no_header -no_vlrs -i lidar.laz +lasinfo -nv -nc -stdout -i lidar.las +lasinfo -nv -nc -stdout -i *.laz -single | grep version +lasinfo -i *.laz -subseq 100000 100100 -histo user_data 8 +lasinfo -i *.las -repair +lasinfo -i *.laz -repair_bb -set_file_creation 8 2007 +lasinfo -i *.las -repair_counters -set_version 1.2 +lasinfo -i *.laz -set_system_identifier "hello world!" -set_generating_software "this is a test (-:" + + +## lasinfo specific arguments + +-auto_creation : set (in place) creation year / day of year in header automatically based on file creation date +-auto_creation_date : set (in place) creation year / day of year in header automatically based on file creation date +-auto_date : set (in place) creation year / day of year in header automatically based on file creation date +-cd : compute rough approximation for covered area, density, and spacing +-compute_density : compute rough approximation for covered area, density, and spacing +-cores [n] : process multiple inputs on [n] cores in parallel +-delete_empty : delete LAS files with zero points +-gps_week : compute the GPS week (if data is Adjusted Standard GPS time) +-gw : compute the GPS week (if data is Adjusted Standard GPS time) +-nc : don't parse points (only check header and VLRs) +-nco : don't check whether points fall outside of LAS header bounding box +-nh : don't output LAS header information +-nmm : don't output point minimum / maximum entry information +-no_check : don't parse points (only check header and VLRs) +-no_check_outside : don't check whether points fall outside of LAS header bounding box +-no_header : don't output LAS header information +-no_min_max : don't output point minimum / maximum entry information +-no_returns : don't output return information +-no_vlrs : don't output VLR information +-no_warnings : don't output WARNINGs +-nr : don't output return information +-nv : don't output VLR information +-nw : don't output WARNINGs +-otxt : output as textfile +-progress [n] : report progress every [n] points +-rename [n] : renames input file 'fusa.laz' to '[n]_277750_6122250.laz' +-repair : repair both bounding box and counters +-repair_bb : repair bounding box +-repair_counters : set (in place) the counters for point number and (extended) return histograms in header +-report_outside : report attributes of each point that falls outside of LAS header bounding box +-ro : report attributes of each point that falls outside of LAS header bounding box +-set_bb [x1] [y1] [z1] [x2] [y2] [z2]: set bounding box to [x1] [y1] [z1] [x2] [y2] [z2] +-set_bounding_box [x1] [y1] [z1] [x2] [y2] [z2]: set bounding box to [x1] [y1] [z1] [x2] [y2] [z2] +-set_creation_date [day] [year] : set creation date to [day] [year] +-set_file_source_ID [n] : set (in place) file source ID in header to [n] +-set_file_source_ID_from_point_source_ID: set (in place) file source ID in header to value that *all* points have in point source ID +-set_generating_software [n] : set generating software header entry to [n] (max. 31 char string) +-set_geotiff_epsg [n] : adds EPSG code [n] in-place when other GeoTIFF tags present +-set_global_encoding [0/1] : CAREFUL! sets the global encoding field of the LAS header to [0/1] without checking whether this will corrupt the file +-set_GUID [n] : sets the GUID [n] in header (assuming it is given in example hexadecimal format shown) +-set_header_size [n] : CAREFUL! sets the header size field of the LAS header to [n] without checking whether this will corrupt the file +-set_number_of_point_records [n] : set number of point records to [n] +-set_number_of_points_by_return [m] [n] [o] [p] [q]: set "number of points by return" header info [m] [n] [o] [p] [q] +-set_number_of_variable_length_records [n]: set number of variable length records to [n] +-set_offset [x] [y] [z] : CAREFUL! sets the offset in the LAS header to [x] [y] [z] without changing the point coordinates +-set_offset_to_point_data [n] : CAREFUL! sets the offset to point data field of the LAS header to [n] without checking whether this will corrupt the file +-set_point_data_format [n] : CAREFUL! sets the point data format field of the LAS header to point type [n]{1-10} without checking whether this will corrupt the file +-set_point_data_record_length [n] : CAREFUL! sets the point data record length field of the LAS header to size [n] without checking whether this will corrupt the file +-set_scale [x] [y] [z] : quantize ASCII points with [x] [y] [z] (unit meters) +-set_start_of_waveform_data_packet_record [n]: CAREFUL! sets the start of waveform data packet record field of the LAS header to [n] without checking whether this will corrupt the file +-set_system_identifier [n] : set the system identifier header entry to [n] (max 31 characters) +-set_version [n] : CAREFUL! sets the version field of the LAS header to [n]{e.g. 1.2} without checking whether this will corrupt the file +-set_vlr_description [m] [n] : set description of vlr [m] to [n] +-set_vlr_record_id [m] [n] : set record id of vlr [m] to [n] +-set_vlr_user_id [m] [n] : set user id of vlr [m] to [n] +-start_at_point [n] : start loading from point position [n] +-stop_at_point [n] : stop loading after [n] points +-subseq [m] [n] : only load subsequence from point [m] to [n] +-suppress_classification : do not decompress classification for native-compressed LAS 1.4 point types 6 or higher +-suppress_extra_bytes : do not decompress "extra bytes" for native-compressed LAS 1.4 point types 6 or higher +-suppress_flags : do not decompress flags for native-compressed LAS 1.4 point types 6 or higher +-suppress_intensity : do not decompress intensity for native-compressed LAS 1.4 point types 6 or higher +-suppress_point_source : do not decompress point source ID for native-compressed LAS 1.4 point types 6 or higher +-suppress_RGB : do not decompress RGB for native-compressed LAS 1.4 point types 6 or higher +-suppress_scan_angle : do not decompress scan angle for native-compressed LAS 1.4 point types 6 or higher +-suppress_user_data : do not decompress user data field for native-compressed LAS 1.4 point types 6 or higher +-suppress_z : do not decompress z coordinates for native-compressed LAS 1.4 point types 6 or higher +-week_to_adjusted [n] : converts time stamps from GPS week [n] to Adjusted Standard GPS + +### Basics +-cpu64 : start 64 bit executable (instead of default 32 bit executable) +-fail : fail if license expired or invalid +-gui : start with files loaded into GUI +-h : print help output +-quiet : nothing reported in console +-v : verbose output (print extra information) +-version : reports this tool's version number +-wait : wait for in the console at end of process + +## Module arguments + +### General +-buffered [n] : define read or write buffer of size [n]{default=262144} +-chunk_size [n] : set chunk size [n] in number of bytes +-comma_not_point : use comma instead of point as decimal separator +-histo_avg [m] [n] [o] : histogram output about [m] with step width [n] and average [o] +-neighbors [n] : set neighbors filename or wildcard [n] +-neighbors_lof [n] : set neighbors list of files [fnf] +-stored : use in memory reader +-unique : remove duplicate points + +### Color +-clamp_RGB_to_8bit : limit RGB values to 8 bit (otherwise: 16 bit) +-copy_B_into_NIR : copy blue color value into NearInfraRed value +-copy_B_into_intensity : copy blue color value to intensity +-copy_B_into_register [n] : copy blue color value into register [n] +-copy_G_into_NIR : copy green color value into NearInfraRed value +-copy_G_into_intensity : copy green color value to intensity +-copy_G_into_register [n] : copy green color value into register [n] +-copy_NIR_into_intensity : copy NIR into intensity +-copy_NIR_into_register [n] : copy NearInfraRed value into register [n] +-copy_RGB_into_intensity : copy weighted RGB value to intensity +-copy_R_into_NIR : copy red color value into NearInfraRed value +-copy_R_into_intensity : copy red color value to intensity +-copy_R_into_register [n] : copy red color value into register [n] +-copy_attribute_into_B [n] : copy attribute [n] value into blue +-copy_attribute_into_G [n] : copy attribute [n] value into green +-copy_attribute_into_NIR [n] : copy attribute [n] value into NIR (NearInfraRed) +-copy_attribute_into_R [n] : copy attribute [n] value into red +-copy_intensity_into_NIR : copy intensity into NIR (NearInfraRed) value +-copy_register_into_B [n] : copy register [n] into blue color value +-copy_register_into_G [n] : copy register [n] into green color value +-copy_register_into_I [n] : copy register [n] into NearInfraRed value +-copy_register_into_NIR [n] : copy register [n] into NearInfraRed value +-copy_register_into_R [n] : copy register [n] into red color value +-drop_RGB_green [min] [max] : drop points with green color value between [min] and [max] +-drop_RGB_red [min] [max] : drop points with red color value between [min] and [max] +-force_RGB : force the use of the RGB value even if the point format does not support RGB +-keep_NDVI_from_CIR [min] [max] : keep NDVI (Normalized Difference Vegetation Index) from CIR between [min] [max] +-keep_NDVI_green_is_NIR [min] [max] : keep NDVI (Normalized Difference Vegetation Index) where green is NIR between [min] [max] +-keep_NDVI_intensity_is_NIR [min] [max]: keep NDVI (Normalized Difference Vegetation Index) where intensity is NIR between [min] [max] +-keep_RGB_blue [m] [n] : keep points with RGB blue color values between [min] [max] +-keep_RGB_green [min] [max] : keep points with green color value between [min] and [max] +-keep_RGB_greenness [m] [n] : keep points with RGB greenness values between [min] [max] +-keep_RGB_nir [m] [n] : keep points with RGB NIR values between [min] [max] +-keep_RGB_red [min] [max] : keep points with red color value between [min] and [max] +-map_attribute_into_RGB [a] [fnm] : map attribute [a] by table in file [fnm] to RGB values +-oscale_rgb [n] : scale output RGB by [n] +-scale_NIR [n] : scale NearInfraRed value by factor [n] +-scale_NIR_down : scale NearInfraRed value down by 256 +-scale_NIR_to_16bit : scale 8 bit NearInfraRed value to 16 bit +-scale_NIR_to_8bit : scale 16 bit NearInfraRed value downto 8 bit +-scale_NIR_up : scale NearInfraRed value up by 256 +-scale_RGB [r] [g] [b] : scale RGB values by factors in [r][g][b] +-scale_RGB_down : scale RGB color values down by 256 +-scale_RGB_to_16bit : scale 8 bit color values to 16 bit +-scale_RGB_to_8bit : scale 16 bit color values downto 8 bit +-scale_RGB_up : scale RGB values from 8 bit up to 16 bit (multiply with 256) +-scale_rgb_down : divides all RGB values by 256 (to go from 16 bit to 8 bit numbers) +-scale_rgb_up : multiplies all RGB values by 256 (to go from 8 bit to 16 bit numbers) +-set_NIR [n] : set NearInfraRed value to [n] +-set_RGB [r] [g] [b] : set color to [r] [g] [b] +-set_RGB_of_class [c] [r] [g] [b] : set RGB values of class [c] to [r][g][b] (8 or 16 bit) +-switch_G_B : switch green and blue value +-switch_RGBI_into_CIR : set R to NIR; G to R; B to G +-switch_RGB_intensity_into_CIR : set R to intensity; G to R; B to G +-switch_R_B : switch red and blue color value +-switch_R_G : switch red and green color value + +### Coordinates +-add_attribute_to_z [n] : add value of attribute [n] to z value +-add_scaled_attribute_to_z [m] [n] : scale attribute [m] value by [n] and add to z value +-auto_reoffset : puts a reasonable offset in the header and translates the points accordingly +-bin_Z_into_point_source [n] : set point source to z/[n] +-clamp_raw_z [min] [max] : limit raw z values to [min] and [max] +-clamp_z [min] [max] : limit z values to [min] and [max] +-clamp_z_above [n] : limit z values to maximal [n] +-clamp_z_below [n] : limit z values to minimal [n] +-classify_z_above_as [m] [n] : for z value above [m] set class to [n] +-classify_z_below_as [m] [n] : for z value below [m] set class to [n] +-classify_z_between_as [m] [n] [o] : for z value between [m] and [n] set class to [o] +-copy_attribute_into_x [n] : copy attribute [n] value into x +-copy_attribute_into_y [n] : copy attribute [n] value into y +-copy_attribute_into_z [n] : copy attribute [n] value into z +-copy_intensity_into_z : copy intensity to z value +-copy_register_into_x [n] : copy register [n] to x value +-copy_register_into_y [n] : copy register [n] to y value +-copy_register_into_z [n] : copy register [n] to z value +-copy_user_data_into_z : copy user data into z +-copy_z_into_attribute [n] : copy z value into attribute [n] value +-drop_x [m] [n] : drop points with x value between [m] and [n] +-drop_x_above [n] : drop points with x value above [n] +-drop_x_below [n] : drop points with x value below [n] +-drop_xy [x1] [y1] [x2] [y2] : drop points within the [x1] [y1] [x2] [y2] rectangle +-drop_xyz [x1] [y1] [z1] [x2] [y2] [z2]: drop points within the given cube dimensions +-drop_y [m] [n] : drop points with y value between [m] and [n] +-drop_y_above [n] : drop points with y value above [n] +-drop_y_below [n] : drop points with y value below [n] +-drop_z [m] [n] : drop points with z value between [m] and [n] +-drop_z_above [n] : drop points with z value above [n] +-drop_z_below [n] : drop points with z value below [n] +-inside [x1] [y1] [x2] [y2] : use only points within the [x1] [y1] [x2] [y2] rectangle +-inside_circle [x] [y] [r] : keep circle at pos [x] [y] with radius [r] +-inside_rectangle [x1] [y1] [x2] [y2]: use only points within the [x1] [y1] [x2] [y2] rectangle +-inside_tile [m] [n] [o] : use only points inside tile at lower-left [x] [y] with size [s] +-keep_circle [x] [y] [r] : keep circle at pos [x] [y] with radius [r] +-keep_profile [x1] [y1] [x2] [y2] [w]: keep profile with [x1] [y1] [x2] [y2] [w] +-keep_tile [x] [y] [size] : keep tile at lower-left [x] [y] with size [s] +-keep_x [m] [n] : keep points with x value between [m] and [n] +-keep_xy [x1] [y1] [x2] [y2] : keep points within the [x1] [y1] [x2] [y2] rectangle +-keep_xyz [x1] [y1] [z1] [x2] [y2] [z2]: keep points within the given cube dimensions +-keep_y [m] [n] : keep points with y value between [m] and [n] +-keep_z [m] [n] : keep points with z value between [m] and [n] +-keep_z_above [n] : keep points with z value above [n] +-keep_z_below [n] : keep points with z value below [n] +-reoffset [x] [y] [z] : puts a new offset [x] [y] [z] into the header and translates the points accordingly +-rescale [x] [y] [z] : puts a new scale [x] [y] [z] into the header and rescales the points accordingly +-rescale_xy [x] [y] : rescale x y by [x] [y] +-rescale_z [z] : rescale z by [z] +-rotate_xy [a] [x] [y] : rotate points by [a] degrees, center at [x] [y] +-rotate_xz [a] [x] [z] : rotate points by [a] degrees, center at [x] [z] +-rotate_yz [a] [y] [z] : rotate points by [a] degrees, center at [y] [z] +-scale_x [n] : scale x value by [n] +-scale_xyz [m] [n] [o] : scale xyz values by [m] [n] [o] +-scale_y [n] : scale y value by [n] +-scale_z [n] : scale z value by [n] +-switch_x_y : exchange x and y value +-switch_x_z : exchange x and z value +-switch_y_z : exchange z and x value +-transform_affine [a],[b],[c],[d] : transform input using affine transformation with [a],[b],[c],[d] +-transform_helmert [m] [n] [o] : do a helmert transformation with 3 or 7 comma separated parameters [n] ... +-transform_matrix [r11,r12,r13] [r21,r22,r23] [r31,r32,r33] [tr1,tr2,tr3]: transform input using matrix [r11,r12,r13] [r21,r22,r23] [r31,r32,r33] [tr1,tr2,tr3] +-translate_raw_x [n] : translate raw x value by [n] +-translate_raw_xy_at_random [x] [y] : translate raw xy values by random and max offset of [x] [y] +-translate_raw_xyz [x] [y] [z] : translate raw coordinates by [x] [y] [z] +-translate_raw_y [n] : translate raw y value by [n] +-translate_raw_z [n] : translate raw z value by [n] +-translate_then_scale_x [m] [n] : translate x value by [m] and scale by [n] +-translate_then_scale_y [m] [n] : translate y value by [m] and scale by [n] +-translate_then_scale_z [m] [n] : translate z value by [m] and scale by [n] +-translate_x [n] : translate y value by [n] +-translate_xyz [x] [y] [z] : translate point coordinates by [x] [y] [z] +-translate_y [n] : translate y value by [n] +-translate_z [n] : translate z value by [n] + +### Simple thinning +-drop_every_nth [n] : drop every [n]th point +-keep_every_nth [n] : keep every [n]th point +-keep_random_fraction [m] [n] : keep points by random fraction [m]{0-1}, optional seed [n] +-thin_points_with_time [n] : thin points with time, [n] = timespacing +-thin_pulses_with_time [n] : thin pulses with time, [n] = timespacing +-thin_with_grid [n] : thin points by min grid size of [n] +-thin_with_time [n] : thin pulses with time, [n] = timespacing + +### Return number +-change_extended_number_of_returns_from_to [m] [n]: change extended number of returns from [m] to [n] +-change_extended_return_number_from_to [m] [n]: change extended return number from [m] to [n] +-change_number_of_returns_from_to [m] [n]: change number of returns from [m] to [n] +-change_return_number_from_to [m] [n]: change return number from [m] to [n] +-drop_double : drop double returns +-drop_first : drop first return +-drop_first_of_many : drop first of many returns +-drop_last : drop last return +-drop_last_of_many : drop last of many returns +-drop_middle : drop middle returns +-drop_number_of_returns [n] : drop points with [n] number of returns +-drop_quadruple : drop quadruple returns +-drop_quintuple : drop quintuple returns +-drop_return [m] [n]... : drop points with return [m] [n]... +-drop_return_mask [n] : drop points with return mask [n] +-drop_second_last : drop points with second last return +-drop_single : drop points with single return +-drop_triple : drop points with triple return +-first_only : use first return only +-keep_double : keep double returns +-keep_first : keep first return +-keep_first_of_many : keep first of many returns +-keep_last : keep last return +-keep_last_of_many : keep last of many returns +-keep_middle : keep mittle returns +-keep_number_of_returns [n] : keep points with [n] number of returns +-keep_quadruple : keep quadruple returns +-keep_quintuple : keep quintuple returns +-keep_return [m] [n]... : keep points with return [m] [n]... +-keep_return_mask [n] : keep points with return mask [n] +-keep_second_last : keep points with second last return +-keep_single : keep points with single return +-keep_triple : keep points with triple return +-last_only : use last return only +-repair_zero_returns : sets return counts and number of returns that are zero to one +-set_extended_number_of_returns [n] : set extended number of returns to [n] +-set_extended_return_number [n] : set extended return number to [n] +-set_number_of_returns [n] : set number of returns to [n] +-set_return_number [n] : set return number to [n] + +### Scanline +-drop_scan_direction [n] : drop points with scan direction [n] +-faf : input files are flightlines. do ***NOT*** use this for tiled input +-faf_index [n] : set files are flightlines index [n] +-files_are_flightlines : input files are flightlines. do ***NOT*** use this for tiled input +-keep_edge_of_flight_line : keep points with "Edge of Flight Line" flag set +-keep_scan_direction_change : keep points with changed scan direction flag +-set_edge_of_flight_line [0/1] : set "Edge of Flight Line" flag to [0/1] +-set_scan_direction_flag [0/1] : set scan direction flag to [0/1] + +### Scanner channel +-copy_scanner_channel_into_point_source: copy scanner channel into point_source +-copy_scanner_channel_into_user_data: copy scanner channel into user data +-copy_user_data_into_scanner_channel: copy user data into scanner channel +-drop_scanner_channel [n] : drop points with scanner channel [n] +-keep_scanner_channel [n] : keep points with scanner channel [n] +-merge_scanner_channel_into_point_source: merge scanner channel to point source +-set_extended_scanner_channel [n] : set extended scanner channel to [n] +-set_scanner_channel [n] : set scanner channel to [n] +-split_scanner_channel_from_point_source: split scanner channel from point source and save as extended scanner channel + +### Source ID +-apply_file_source_ID : copy file source ID to target +-bin_Z_into_point_source [n] : set point source to z/[n] +-bin_abs_scan_angle_into_point_source [n]: set point source to scan_angle/[n] +-bin_gps_time_into_point_source [n] : set point source to gps/[n] +-change_point_source_from_to [m] [n]: change point source from [m] to [n] +-copy_attribute_into_point_source [n]: copy attribute [n] value into point source +-copy_classification_into_point_source: copy classification to point source +-copy_point_source_into_register [n]: copy point source into register [n] +-copy_register_into_point_source [n]: copy register [n] to point source +-copy_scanner_channel_into_point_source: copy scanner channel into point_source +-copy_user_data_into_point_source : copy user data into point source +-drop_point_source [n] : drop points with point source [n] +-drop_point_source_above [n] : drop points with with point source above [n] +-drop_point_source_below [n] : drop points with with point source below [n] +-drop_point_source_between [m] [n] : drop points with with point source between [n] and [m] +-keep_point_source [n] : keep points with point source [n] +-keep_point_source_between [m] [n] : keep points with with point source between [n] and [m] +-map_point_source [fnm] : set the point source by map in file [fnm] +-merge_scanner_channel_into_point_source: merge scanner channel to point source +-set_point_source [n] : set point source to [n] +-split_scanner_channel_from_point_source: split scanner channel from point source and save as extended scanner channel + +### User data +-add_scaled_attribute_to_user_data [m] [n]: scale attribute [m] value by [n] and add to user data +-change_user_data_from_to [m] [n] : change user data from [m] to [n] +-copy_attribute_into_user_data [n] : copy attribute [n] value into user data field +-copy_classification_into_user_data : copy classification to user data +-copy_register_into_user_data [n] : copy register [n] to user data +-copy_scanner_channel_into_user_data: copy scanner channel into user data +-copy_user_data_into_attribute [n] : copy user data into attribute [n] value +-copy_user_data_into_classification : copy user data into classification +-copy_user_data_into_point_source : copy user data into point source +-copy_user_data_into_register [n] : copy user data to register [n] +-copy_user_data_into_scanner_channel: copy user data into scanner channel +-copy_user_data_into_z : copy user data into z +-drop_user_data [n] : drop points with user data value of [n] +-drop_user_data_above [n] : drop points with user data value above [n] +-drop_user_data_below [n] : drop points with user data value below [n] +-drop_user_data_between [m] [n] : drop points with user data between [m] and [n] +-keep_user_data [n] : keep points with user data value of [n] +-keep_user_data_above [n] : keep points with user data value above [n] +-keep_user_data_below [n] : keep points with user data value below [n] +-keep_user_data_between [m] [n] : keep points with user data between [m] and [n] +-map_user_data [fnm] : set the user data by map in file [fnm] +-scale_user_data [n] : scale user data by [n] +-set_user_data [n] : sets all user_data fields to [n] + +### Classification +-change_class_from_to [m] [n] : change classification from [m] to [n] +-change_classification_from_to [m] [n]: change classification from [m] to [n] +-change_extended_class_from_to [m] [n]: change extended class from [m] to [n] +-change_extended_classification_from_to [m] [n]: change extended class from [m] to [n] +-classify_attribute_above_as [m] [n] [o]: for attribute [m] with value above [n] set class to [o] +-classify_attribute_below_as [m] [n] [o]: for attribute [m] with value below [n] set class to [o] +-classify_attribute_between_as [m] [n] [o] [p]: for attribute [m] with value between [n] and [o] set class to [p] +-classify_intensity_above_as [m] [n]: for intensity value above [m] set class to [n] +-classify_intensity_below_as [m] [n]: for intensity value below [m] set class to [n] +-classify_intensity_between_as [m] [n] [o]: for intensity value between [m] and [n] set class to [o] +-classify_z_above_as [m] [n] : for z value above [m] set class to [n] +-classify_z_below_as [m] [n] : for z value below [m] set class to [n] +-classify_z_between_as [m] [n] [o] : for z value between [m] and [n] set class to [o] +-copy_classification_into_point_source: copy classification to point source +-copy_classification_into_user_data : copy classification to user data +-copy_intensity_into_classification : copy intensity to classification +-copy_user_data_into_classification : copy user data into classification +-drop_class [m] [n] [o]... : drop points with class in [m][n][o]... +-drop_classification [m] [n] [o]... : drop points with class in [m][n][o]... +-drop_classification_mask [n] : drop points with classification mask matches [n] +-drop_extended_class [m] [n]... : drop extended class [m] [n]... +-drop_extended_classification [n] : drop points with extended classification [n] +-drop_extended_classification_mask [a] [b] [c] [d] [e] [f] [g] [h]: drop points with extended classification mask matches [a] [b] [c] [d] [e] [f] [g] [h] +-keep_class [m] [n] [o]... : keep points with class in [m][n][o]... +-keep_classification [m] [n] [o]... : keep points with class in [m][n][o]... +-keep_classification_mask [n] : keep points with classification mask matches [n] +-keep_extended_class [m] [n]... : keep extended class [m] [n]... +-keep_extended_classification [n] : keep points with extended class [n] +-move_ancient_to_extended_classification: move old data to extended classification +-set_RGB_of_class [c] [r] [g] [b] : set RGB values of class [c] to [r][g][b] (8 or 16 bit) +-set_classification [n] : set classification to [n] +-set_extended_classification [n] : set extended classification to [n] + +### Extra byte +-add_attribute_to_z [n] : add value of attribute [n] to z value +-add_scaled_attribute_to_user_data [m] [n]: scale attribute [m] value by [n] and add to user data +-add_scaled_attribute_to_z [m] [n] : scale attribute [m] value by [n] and add to z value +-classify_attribute_above_as [m] [n] [o]: for attribute [m] with value above [n] set class to [o] +-classify_attribute_below_as [m] [n] [o]: for attribute [m] with value below [n] set class to [o] +-classify_attribute_between_as [m] [n] [o] [p]: for attribute [m] with value between [n] and [o] set class to [p] +-copy_attribute_into_B [n] : copy attribute [n] value into blue +-copy_attribute_into_G [n] : copy attribute [n] value into green +-copy_attribute_into_I [n] : copy attribute [n] value into intensity +-copy_attribute_into_NIR [n] : copy attribute [n] value into NIR (NearInfraRed) +-copy_attribute_into_R [n] : copy attribute [n] value into red +-copy_attribute_into_intensity [n] : copy attribute [n] value into intensity +-copy_attribute_into_point_source [n]: copy attribute [n] value into point source +-copy_attribute_into_register [m] [n]: copy attribute [m] value into register [m] +-copy_attribute_into_user_data [n] : copy attribute [n] value into user data field +-copy_attribute_into_x [n] : copy attribute [n] value into x +-copy_attribute_into_y [n] : copy attribute [n] value into y +-copy_attribute_into_z [n] : copy attribute [n] value into z +-copy_intensity_into_attribute [n] : copy intensity to attribute [n] value +-copy_register_into_attribute [m] [n]: copy register [m] to attribute [n] value +-copy_user_data_into_attribute [n] : copy user data into attribute [n] value +-copy_z_into_attribute [n] : copy z value into attribute [n] value +-drop_attribute_above [m] [n] : drop points with attribute [m] value > [n] +-drop_attribute_below [m] [n] : drop points with attribute [m] value < [n] +-drop_attribute_between [m] [n] [o] : drop points with attribute [m] in range [n]...[o] +-iadd_attribute [m] [n] [o] [p] [q] [r] [s] [t]: adds a new "extra_byte" attribute of data_type [m] name [n] description [o]; optional: scale[p] offset [q] pre_scale [r] pre_offset [s] no_data_value [t] +-iadd_extra [m] [n] [o] [p] [q] [r] [s] [t]: adds a new "extra_byte" attribute of data_type [m] name [n] description [o]; optional: scale[p] offset [q] pre_scale [r] pre_offset [s] no_data_value [t] +-keep_attribute_above [m] [n] : keep points with attribute [m] value > [n] +-keep_attribute_below [m] [n] : keep points with attribute [m] value < [n] +-keep_attribute_between [m] [n] [o] : keep points with attribute [m] in range [n]...[o] +-load_attribute_from_text [m] [fnt] : load attribute [m] from file [fnt] +-map_attribute_into_RGB [a] [fnm] : map attribute [a] by table in file [fnm] to RGB values +-scale_attribute [m] [n] : scale attribute [m] by [n] +-set_attribute [m] [n] : set attribute [m] with value [n] +-translate_attribute [m] [n] : translate attribute [n] by [n] + +### Flags +-drop_keypoint : drop points flaged as keypoint +-drop_overlap : drop points flaged as overlap +-drop_scan_direction [n] : drop points with scan direction [n] +-drop_synthetic : drop points flaged as synthetic +-drop_withheld : drop points flaged as withheld +-keep_edge_of_flight_line : keep points with "Edge of Flight Line" flag set +-keep_keypoint : keep points flaged as keypoint +-keep_overlap : keep points flaged as overlap +-keep_scan_direction_change : keep points with changed scan direction flag +-keep_synthetic : keep points flaged as synthetic +-keep_withheld : keep points flaged as withheld +-set_edge_of_flight_line [0/1] : set "Edge of Flight Line" flag to [0/1] +-set_extended_overlap_flag [0/1] : set extended overlap flag to [0/1] +-set_keypoint_flag [0/1] : set keypoint flag to [0/1] +-set_overlap_flag [0/1] : set overlap flag to [0/1] +-set_scan_direction_flag [0/1] : set scan direction flag to [0/1] +-set_synthetic_flag [0/1] : set synthetic flag to [0/1] +-set_withheld_flag [0/1] : set withheld flag to [0/1] + +### GPS time +-adjusted_to_week : converts time stamps from Adjusted Standard GPS to GPS week +-bin_gps_time_into_intensity [n] : set intensity time to gps/[n] +-bin_gps_time_into_point_source [n] : set point source to gps/[n] +-drop_gps_time_above [n] : drop points with GPS time above [n] +-drop_gps_time_below [n] : drop points with GPS time below [n] +-drop_gps_time_between [m] [n] : drop points with GPS time between [m] and [n] +-drop_gpstime_above [n] : drop points with GPS time above [n] +-drop_gpstime_below [n] : drop points with GPS time below [n] +-drop_gpstime_between [m] [n] : drop points with GPS time between [m] and [n] +-keep_gps_time [m] [n] : keep points with GPS time between [m] and [n] +-keep_gps_time_above [n] : keep points with GPS time above [n] +-keep_gps_time_below [n] : keep points with GPS time below [n] +-keep_gps_time_between [m] [n] : keep points with GPS time between [m] and [n] +-keep_gpstime [m] [n] : keep points with GPS time between [m] and [n] +-keep_gpstime_above [n] : keep points with GPS time above [n] +-keep_gpstime_below [n] : keep points with GPS time below [n] +-keep_gpstime_between [m] [n] : keep points with GPS time between [m] and [n] +-set_gps_time [n] : set gps time to [n] +-translate_gps_time [n] : translate GPS time by [n] + +### Intensity +-bin_gps_time_into_intensity [n] : set intensity time to gps/[n] +-clamp_intensity [min] [max] : limit intensity values to [min] and [max] +-clamp_intensity_above [max] : limit intensity values to maximal [max] +-clamp_intensity_below [max] : limit intensity values to minimal [min] +-classify_intensity_above_as [m] [n]: for intensity value above [m] set class to [n] +-classify_intensity_below_as [m] [n]: for intensity value below [m] set class to [n] +-classify_intensity_between_as [m] [n] [o]: for intensity value between [m] and [n] set class to [o] +-copy_B_into_intensity : copy blue color value to intensity +-copy_G_into_intensity : copy green color value to intensity +-copy_NIR_into_intensity : copy NIR into intensity +-copy_RGB_into_intensity : copy weighted RGB value to intensity +-copy_R_into_intensity : copy red color value to intensity +-copy_attribute_into_I [n] : copy attribute [n] value into intensity +-copy_attribute_into_intensity [n] : copy attribute [n] value into intensity +-copy_intensity_into_NIR : copy intensity into NIR (NearInfraRed) value +-copy_intensity_into_attribute [n] : copy intensity to attribute [n] value +-copy_intensity_into_classification : copy intensity to classification +-copy_intensity_into_register [n] : copy color intensitiy value into register [n] +-copy_intensity_into_z : copy intensity to z value +-copy_register_into_intensity [n] : copy register [n] into point intensitiy value +-drop_intensity_above [n] : drop points with intensity value above [n] +-drop_intensity_below [n] : drop points with intensity value below [n] +-drop_intensity_between [m] [n] : drop points with intensity value between [m] and [n] +-iscale_intensity [n] : scale intensity value by [n] +-itranslate_intensity [n] : translate input intensity by [n] +-keep_NDVI_intensity_is_NIR [min] [max]: keep NDVI (Normalized Difference Vegetation Index) where intensity is NIR between [min] [max] +-keep_intensity [m] [n] : keep points with intensity between [m] and [n] +-keep_intensity_above [n] : keep points with intensity value above [n] +-keep_intensity_below [n] : keep points with intensity value below [n] +-map_intensity [fnm] : set the intensity by map in file [fnm] +-scale_intensity [n] : multiply intensity by [n] +-set_intensity [n] : set intensity to [n] +-switch_RGB_intensity_into_CIR : set R to intensity; G to R; B to G +-translate_intensity [n] : translate intensity by [n] +-translate_then_scale_intensity [m] [n]: translate intensity by [m] and scale by [n] + +### Raw point values +-clamp_raw_z [min] [max] : limit raw z values to [min] and [max] +-translate_raw_x [n] : translate raw x value by [n] +-translate_raw_xy_at_random [x] [y] : translate raw xy values by random and max offset of [x] [y] +-translate_raw_xyz [x] [y] [z] : translate raw coordinates by [x] [y] [z] +-translate_raw_y [n] : translate raw y value by [n] +-translate_raw_z [n] : translate raw z value by [n] + +### Registers +-add_registers [m] [n] [o] : add register [m] and [n] and store result in register [o] +-copy_B_into_register [n] : copy blue color value into register [n] +-copy_G_into_register [n] : copy green color value into register [n] +-copy_NIR_into_register [n] : copy NearInfraRed value into register [n] +-copy_R_into_register [n] : copy red color value into register [n] +-copy_attribute_into_register [m] [n]: copy attribute [m] value into register [m] +-copy_intensity_into_register [n] : copy color intensitiy value into register [n] +-copy_point_source_into_register [n]: copy point source into register [n] +-copy_register_into_B [n] : copy register [n] into blue color value +-copy_register_into_G [n] : copy register [n] into green color value +-copy_register_into_I [n] : copy register [n] into NearInfraRed value +-copy_register_into_NIR [n] : copy register [n] into NearInfraRed value +-copy_register_into_R [n] : copy register [n] into red color value +-copy_register_into_attribute [m] [n]: copy register [m] to attribute [n] value +-copy_register_into_intensity [n] : copy register [n] into point intensitiy value +-copy_register_into_point_source [n]: copy register [n] to point source +-copy_register_into_user_data [n] : copy register [n] to user data +-copy_register_into_x [n] : copy register [n] to x value +-copy_register_into_y [n] : copy register [n] to y value +-copy_register_into_z [n] : copy register [n] to z value +-copy_user_data_into_register [n] : copy user data to register [n] +-divide_registers [m] [n] [o] : divide register [m] by register [n] and store result in register [o] +-multiply_registers [m] [n] [o] : Multiply register [m] with [n] and store result in register [o] +-scale_register [m] [n] : scale register index [m] with factor [n] +-set_register [m] [n] : set register [m] with value [n] +-subtract_registers [m] [n] [o] : subtract register [m] by register [n] and store result in register [o] +-translate_register [m] [n] : translate register index [m] value by [n] + +### Scan angle +-bin_abs_scan_angle_into_point_source [n]: set point source to scan_angle/[n] +-drop_abs_scan_angle_above [max] : drop points with absolute scan angle above [max] +-drop_abs_scan_angle_below [min] : drop points with absolute scan angle below [min] +-drop_scan_angle_above [n] : drop points with scan angle above [n] +-drop_scan_angle_below [n] : drop points with scan angle below [n] +-drop_scan_angle_between [m] [n] : drop points with scan angle between [m] and [n] +-iscale_scan_angle [n] : scale scan angle by [n] +-itranslate_scan_angle [n] : translate input scan angle by [n] +-keep_scan_angle [m] [n] : keep points with scan angle between [m] and [n] +-keep_scan_angle_between [m] [n] : keep points with scan angle between [m] and [n] +-scale_scan_angle [n] : scale scan angle by [n] +-set_scan_angle [n] : set scan angle to [n] +-translate_scan_angle [n] : translate scan angle by [n] +-translate_then_scale_scan_angle [m] [n]: translate scan angle by [m] and scale by [n] + +### Tiles +-keep_tile [x] [y] [size] : keep tile at lower-left [x] [y] with size [s] + +### Waveform packet +-drop_wavepacket [n] : drop points with wavepacket value of [n] +-flip_waveform_direction : flip the waveform direction in the waveform VLR +-keep_wavepacket [n] : keep points with wavepacket value of [n] + +### Logical +-filter_and : boolean AND combination of last 2 filters +-filter_or : boolean OR combination of last 2 filters +-filtered_transform : do the transformation only on points of the current filter + +### Input +-i [fnp] : input file or input file mask [fnp] (e.g. *.laz;fo?.la?;esri.shp,...) +-io_ibuffer [n] : use read-input-buffer of size [n] bytes +-iparse [xyz] : define fields [xyz] for text input parser +-ipts : input as PTS (plain text lidar source), store header in VLR +-iptx : input as PTX (plain text extended lidar data), store header in VLR +-iptx_transform : use PTX file header to transform point data +-iskip [n] : skip [n] lines at the beginning of the text input +-itxt : expect input as text file +-lof [fnf] : use input out of a list of files [fnf] +-merged : merge input files +-stdin : pipe from stdin + +### Output +-compatible : write LAS/LAZ output in compatibility mode +-do_not_populate : do not populate header on output +-io_obuffer [n] : use write-out-buffer of size [n] bytes +-native : write LAS/LAZ output in native/actual mode +-nil : pipe output to NULL (suppress output) +-o [n] : use [n] as output file +-obin : output as BIN (terrasolid binary) +-ocut [n] : cut the last [n] characters from name +-odir [n] : set output directory to [n] +-odix [n] : set output file name suffix to [n] +-oforce : force output creation also on errors or warnings +-olas : output as LAS file +-olaz : output as LAZ (compressed LAS) +-oparse [xyz] : parse on-the-fly to ASCII using fields [xyz] +-opts : output as PTS (plain text lidar data) +-optx : output as PTX (plain text with header) +-oqi : output in QFIT format (.qi)(ATM project, NASA) +-oscale_rgb [n] : scale output RGB by [n] +-osep [n] : set text output separator as char [n] +-owrl : output as VRLM (Virtual Reality Modeling Language) text +-pipe_on : write output to command pipe, see also -std_in +-populate : populate header on output +-stdout : pipe to stdout +-temp_files [n] : set base file name [n] for temp files (example: E:\tmp) + +### Basics +-help : print help output + +### parse +The '-parse [xyz]' flag specifies how to interpret +each line of the ASCII file. For example, 'tsxyzssa' +means that the first number is the gpstime, the next +number should be skipped, the next three numbers are +the x, y, and z coordinate, the next two should be +skipped, and the next number is the scan angle. + +The other supported entries are: + x : [x] coordinate + y : [y] coordinate + z : [z] coordinate + t : gps [t]ime + R : RGB [R]ed channel + G : RGB [G]reen channel + B : RGB [B]lue channel + I : N[I]R channel of LAS 1.4 point type 8 + s : [s]kip a string or a number that we don't care about + i : [i]ntensity + a : scan [a]ngle + n : [n]umber of returns of that given pulse + r : number of [r]eturn + h : with[h]eld flag + k : [k]eypoint flag + g : synthetic fla[g] + o : [o]verlap flag of LAS 1.4 point types 6, 7, 8 + l : scanner channe[l] of LAS 1.4 point types 6, 7, 8 + E : terrasolid [E]hco Encoding + c : [c]lassification + u : [u]ser data + p : [p]oint source ID + e : [e]dge of flight line flag + d : [d]irection of scan flag + 0-9 : additional attributes described as extra bytes (0 through 9) + (13) : additional attributes described as extra bytes (10 and up) + H : a hexadecimal string encoding the RGB color + J : a hexadecimal string encoding the intensity + +## License + +This tool is free to use. + +## Support + +To get more information about a tool just goto the +[LAStools Google Group](http://groups.google.com/group/lastools/) +and enter the tool name in the search function. +You will get plenty of samples to this tool. + +To get further support see our +[rapidlasso service page](https://rapidlasso.de/service/) + +Check for latest updates at +https://rapidlasso.de/category/blog/releases/ + +If you have any suggestions please let us (support@rapidlasso.de) know. +Jochen @rapidlasso diff --git a/pdgpoints/cli.py b/pdgpoints/cli.py new file mode 100644 index 0000000..7ce5359 --- /dev/null +++ b/pdgpoints/cli.py @@ -0,0 +1,36 @@ +from pathlib import Path +import argparse +from pyegt.defs import MODEL_LIST, REGIONS + +import logging as L +from .pipeline import Pipeline + +def cli(): + """ + Parse the command options and arguments. + """ + parser = argparse.ArgumentParser(prog='pdgpoints', description='Convert LiDAR files (LAS, LAZ) to Cesium tilesets.') + parser.add_argument('-c', '--copy_i_to_rgb', action='store_true', help='Whether to copy intensity values to RGB') + parser.add_argument('-m', '--merge', action='store_true', help='Whether to use merge function') + parser.add_argument('-a', '--archive', action='store_true', help='Whether to archive the input dataset') + parser.add_argument('-s', '--rgb_scale', type=float, default=1.0, help='Scale multiplier for RGB values') + parser.add_argument('-z', '--translate_z', type=float, default=0.0, help='Float translation for z values') + parser.add_argument('-g', '--from_geoid', choices=MODEL_LIST, default=None, help='The geoid, tidal, or geopotential model to translate from') + parser.add_argument('-r', '--geoid_region', choices=REGIONS, default=REGIONS[0], help='The NGS region (https://vdatum.noaa.gov/docs/services.html#step140)') + parser.add_argument('-f', '--file', type=str, required=True, help='The file to process') + + args = parser.parse_args() + p = Path(args.file) + if not p.is_file(): + L.error('No file at %s' % (p)) + exit(1) + + p = Pipeline(f=args.file, + intensity_to_RGB=args.copy_i_to_rgb, + merge=args.merge, + archive=args.archive, + rgb_scale=args.rgb_scale, + translate_z=args.translate_z, + from_geoid=args.from_geoid, + geoid_region=args.geoid_region) + p.run() \ No newline at end of file diff --git a/pdgpoints/defs.py b/pdgpoints/defs.py new file mode 100644 index 0000000..13d748b --- /dev/null +++ b/pdgpoints/defs.py @@ -0,0 +1,20 @@ +import json +from pathlib import Path +from datetime import datetime + +from ._version import __version__ + +Y = datetime.now().year +HELP_TXT = ''' +~~ pdgpoints version %s ~~ + Ian Nesbitt / NCEAS %s +''' % (__version__, Y) + +MOD_LOC = Path(__file__).parent.absolute() +BIN_LOC = MOD_LOC.joinpath('bin') +LAS2LAS_LOC = BIN_LOC.joinpath('las2las') +LASINFO_LOC = BIN_LOC.joinpath('lasinfo') + +LOGCONFIG = MOD_LOC.joinpath('log/config.json') +with open(LOGCONFIG, 'r') as lc: + LOGGING_CONFIG = json.load(lc) diff --git a/pdgpoints/geoid.py b/pdgpoints/geoid.py new file mode 100644 index 0000000..1269ad8 --- /dev/null +++ b/pdgpoints/geoid.py @@ -0,0 +1,145 @@ +from typing import Union, Literal +from pyproj import CRS, Transformer +from logging import getLogger + +from pyegt.height import HeightModel +from pyegt.utils import model_search + +def use_model(user_vrs: Union[str, Literal[None]]=None, + las_vrs: Union[str, Literal[None]]=None, # overrides user_vrs. + # consequently implies we trust file headers; + # this is done to support projects with multiple CRS + # and to enforce correct CRS info in database + ) -> str: + """ + Get the geoid, tidal, or geopotential model + in order to calculate ellipsoid height. + The following figure demonstrates the difference between geoid, ellipsoid, + and topographic ground surface: + + .. figure:: https://user-images.githubusercontent.com/18689918/239385604-5b5dd0df-e2fb-4ea9-90e7-575287a069e6.png + :align: center + + Diagram showing conceptual model of ellipsoid height ``h``, geoid + height ``H``, and height of geoid relative to ellipsoid ``N`` + along with topographic surface (grey). + + Ellipsoidal height (``h``) is generally used in global projections such as + Cesium due to its small digital footprint and ease of calculation relative + to systems based on gravity or geopotential height. However, the earth and + tides are influenced by local differences in gravitational pull and other + factors. Therefore some engineering projects and local reference systems + use height referenced to a geoid or tidal model (``H``) which provides a much + easier framework to understand height relative to, for example, modeled + mean sea level or sea level potential. Converting from ``H`` to ``h`` + requires knowing the height difference between the geoid and the ellipsoid + (``N``). + Conversion is then a simple addition of these values (``H + N = h``). + + .. note:: + + ``las_vrs`` is set by file headers and overrides ``user_vrs``. + This implicitly means we trust file headers over user input. + We do this to support projects with multiple VRS (i.e. `user_vrs` + values are used solely to fill in gaps where headers do not explicitly + specify a vertical reference system). It is also meant to enforce the + accuracy of VRS information in file headers. + + If a project should need to set or change VRS information prior to + uploading to the database, they are encouraged to do so through the + use of third-party software such as + `LASTools `_. + + The 9 possible input scenarios and their outcomes:: + + # 1. matched las_vrs / matched user_vrs -> las_vrs + # 2. matched las_vrs / unmatched user_vrs -> las_vrs + # 3. matched las_vrs / empty user_vrs -> las_vrs + # 4. empty las_vrs / empty user_vrs -> 0 + # 5. empty las_vrs / matched user_vrs -> user_vrs + # 6. empty las_vrs / unmatched user_vrs -> exit(1) + # 7. unmatched las_vrs / empty user_vrs -> exit(1) + # 8. unmatched las_vrs / matched user_vrs -> exit(1) # maybe in the future we have a geoid_override setting where this will execute + # 9. unmatched las_vrs / unmatched user_vrs -> exit(1) + + + :param user_vrs: The user-specified geoid model to convert from if none is found in the file header + :return: The model name to use for lookup + :rtype: str + """ + L = getLogger(__name__) + vrs = None + L.debug(f'user_vrs={user_vrs}, las_vrs={las_vrs}') + if las_vrs: + # override user value with detected VRS + vrs = model_search(las_vrs) + L.debug(f'after model_search(las_vrs): vrs={vrs}') + if user_vrs and vrs: + # scenarios 1 and 2 + L.info('User value of "%s" will be overridden by detected VRS "%s"' % (user_vrs, vrs)) + if user_vrs and (not vrs): + # scenarios 8 and 9 + L.error('No vertical reference system matching "%s" found' % (user_vrs)) + exit(1) + if not user_vrs: + # scenario 3 + pass + if (not user_vrs) and (not vrs): + # scenario 7 + L.error('No vertical reference system matching "%s" found' % (las_vrs)) + else: + if not user_vrs: + # scenario 4 + return 0 + else: + vrs = model_search(user_vrs) + L.debug(f'after model_search(user_vrs): vrs={vrs}') + if vrs: + # scenario 5 + L.info('VRS found: %s (user-specified)' % (vrs)) + else: + # scenario 6 + L.error('Could not find VRS matching value "%s"' % (user_vrs)) + exit(1) + return vrs + +def crs_to_wgs84(x: Union[str, int, float], y: Union[str, int, float], from_crs: Union[CRS, int, str]): + """ + Convert grid coordinates to cartographic (lat/lon) in order to use the + :py:class:`pyegt.height.HeightModel` API lookup. + + :param x: The X-coordinate to convert to longitude + :type x: str or int or float + :param y: The Y-coordinate to convert to latitude + :type y: str or int or float + :param from_crs: The projected coordinate reference system to convert from + :type from_crs: pyproj.crs.CRS or int or str + :return: The lat and long position equivalent to the X and Y position in the input CRS + :rtype: tuple(float, float) + """ + if type(from_crs) == int: + crs = CRS.from_epsg(from_crs) + elif type(from_crs) == CRS: + crs = from_crs + elif type(from_crs) == str: + if "AUTHORITY" in from_crs: + crs = CRS.from_wkt(from_crs) + else: + crs = CRS.from_string(from_crs) + wgs84 = CRS.from_epsg(4326) + t = Transformer.from_crs(crs_from=crs, crs_to=wgs84) + return t.transform(xx=float(x), yy=float(y)) + +def get_adjustment(lat: float, lon: float, model=str, region=str): + """ + Get the modeled height of a specified location and a specified geoid or + tidal model from :py:class:`pyegt.height.HeightModel`. + + :param float lat: Decimal latitude + :param float lon: Decimal longitude + :param str model: The geoid or tidal model to query the height of + :param str region: The geoid or tidal region (for options, see :py:data:`pyegt.defs.REGION`) + :return: The ellipsoid height of the given geoid model at the given location + :rtype: pyegt.height.HeightModel + """ + return HeightModel(lat=lat, lon=lon, from_model=model, region=region) diff --git a/pdgpoints/lastools_iface.py b/pdgpoints/lastools_iface.py new file mode 100644 index 0000000..3d64768 --- /dev/null +++ b/pdgpoints/lastools_iface.py @@ -0,0 +1,227 @@ +from pathlib import Path +from typing import Union, Tuple +from subprocess import Popen, PIPE, STDOUT, CalledProcessError, check_output +from datetime import datetime +import pandas as pd +from logging import getLogger + +from .defs import LAS2LAS_LOC, LASINFO_LOC +from . import utils + +def log_subprocess_output(pipe: PIPE): + """ + Log the output from a lastools subprocess. + + :param subprocess.PIPE pipe: The pipe to listen to + :param bool verbose: Whether to log more messages + """ + L = getLogger(__name__) + try: + for line in iter(pipe.readline, b''): # b'\n'-separated lines + L.info('subprocess output: %r', line.decode('utf-8').strip()) + except CalledProcessError as e: + L.error("Subprocess Error> %s: %s" % (repr(e), str(e))) + +def run_proc(command: list[str], + get_wkt: bool=False) -> Union[str, None]: + """ + Start a subprocess with a given command. + + :param list command: List of command arguments + :param bool get_wkt: Whether to grep the well-known text (WKT) string from lasinfo output + :param bool verbose: Whether to log more messages + + :return: Well-known text (WKT) of the file's coordinate reference system (CRS) + :rtype: str + """ + L = getLogger(__name__) + L.debug('Command args: %s' % (command)) + process = Popen(command, + stdout=PIPE, + stderr=STDOUT) + if get_wkt: + wktstr = check_output(('grep', 'EPSG'), stdin=process.stdout).decode().strip().strip('\n') + # pass pipe to be parsed + with process.stdout: + log_subprocess_output(process.stdout) + # start subprocess + exitcode = process.wait() + if exitcode != 0: + L.error('las2las rewrite subprocess exited with nonzero exit code--check log output') + exit(1) + if get_wkt: + return wktstr + +def lasinfo(f: Path) -> Tuple[str, str, str, Path]: + """ + Use lasinfo to extract CRS info (in EPSG format) from a LAS or LAZ point cloud file. + + :param f: The input file + :type f: pathlib.Path + + :return: The EPSG code of the CRS, and CRS info as WKT + :rtype: str, str, str, pathlib.Path + """ + L = getLogger(__name__) + lasinfostart = utils.timer() + command = [ + LASINFO_LOC, + '-i', f, + '-nc', # shaves a lot of time off large jobs by telling lasinfo not to compute min/maxes + '-stdout', + ] + wkt = run_proc(command=command, get_wkt=True) + L.debug('WKT string: %s' % (wkt)) + crs, epsg_h, epsg_v, h_name, v_name = utils.get_epsgs_from_wkt(wkt) + cpd = 'Compound ' if crs.is_compound else '' + L.info('%sCRS: %s' % (cpd, h_name)) + L.info('%sVRS: %s' % (cpd, v_name)) + L.debug('%sCRS object: \n%s' % (cpd, repr(crs))) + wktf = Path(str(f) + '-wkt.txt') + L.info('Writing WKT to %s' % (wktf)) + utils.write_wkt_to_file(f=wktf, wkt=wkt) + L.info('Finished lasinfo (%s sec / %.1f min)' % utils.timer(lasinfostart)) + return epsg_h, epsg_v, wkt, wktf, h_name, v_name + +def lasmean(f: Path, + name: str="none"): + """ + Use las2txt to output values of X and Y for a dataset, + then return the mean of those points. To save resources, + only every 10,000th point will be sampled. + + :param f: The input file + :type f: str or pathlib.Path + :param str name: The name of the coordinate reference system in use + :return: Mean X and Y of the dataset, and the location of the ascii file used to calculate these + :rtype: float, float, str + """ + L = getLogger(__name__) + lasmeanstart = utils.timer() + xyf = Path(str(f) + '-xy.txt') + L.info("Writing abridged XY file to %s" % (xyf)) + command = [ + LAS2LAS_LOC, + '-i', f, + '-keep_every_nth', '10000', + '-o', str(xyf), + '-oparse', 'xy' + ] + run_proc(command=command) + df = pd.read_csv(xyf, sep=' ', header=None, names=['x', 'y']) + mean = df.mean() + L.info('X mean: %.3f Y mean: %.3f (%s)' % (mean.x, mean.y, name)) + lasmeantime = (datetime.now() - lasmeanstart).seconds + L.info('Finished las2las (%s sec / %.1f min)' % (lasmeantime, lasmeantime/60)) + return mean.x, mean.y, xyf + +def las2las_ogc_wkt(f: Path, + output_file: Path): + """ + Use las2las to write CRS info in OGC WKT format to the output file. + + :param f: The input file + :type f: str or pathlib.Path + :param output_file: The output file + :type output_file: str or pathlib.Path + :param bool verbose: Whether or not to write STDOUT (output will always be written to log file) + """ + L = getLogger(__name__) + las2lasstart = datetime.now() + # construct command + command = [ + LAS2LAS_LOC, + '-i', f, + '-set_ogc_wkt', + '-o', output_file + ] + run_proc(command=command) + las2lastime = (datetime.now() - las2lasstart).seconds + L.info('Finished las2las (%s sec / %.1f min)' % (las2lastime, las2lastime/60)) + +def las2las(f: Path, + output_file: Path, + #out_crs: str='4326', + archive_dir: Path=Path(''), + archive: bool=False, + intensity_to_RGB: bool=False, + rgb_scale: float=1.0, + translate_z: float=0.0): + """ + Simple wrapper around las2las to repair and rework LAS files. + LAS is rewritten with valid VLRs to correct errors propagated by processing suites + e.g. QT Modeler, to be read by software that is picky about LAS format, e.g. PDAL. + Output is converted to WGS84 earth-centered earth-fixed (ECEF) CRS, EPSG 4326 + by default, to prepare for display in Cesium. + Also, an option exists to copy intensity values into RGB for viewing. + Commands are written to log output and STDOUT from las2las should be as well. + + :param f: The input file + :type f: str or pathlib.Path + :param output_file: The output file + :type output_file: str or pathlib.Path + :param archive_dir: Location to archive input file, if applicable + :type output_file: str or pathlib.Path + :param bool archive: Whether or not to archive input files + :param bool intensity_to_RGB: Whether or not to copy intensity values to RGB + :param float rgb_scale: RGB scale multiplier + :param float translate_z: Z translation value + :param bool verbose: Whether or not to write STDOUT (output will always be written to log file) + """ + L = getLogger(__name__) + las2lasstart = datetime.now() + # construct command + wktf = str(f) + '-wkt.txt' + + if intensity_to_RGB: + L.info('Copying intensity to RGB by exploding and reforming LAS fields') + read_command = [ + LAS2LAS_LOC, + '-i', f, + '-scale_intensity', '%s' % (rgb_scale), + '-translate_z', '%s' % (translate_z), + '-otxt', + '-oparse', 'xyziiiitanr', + '-stdout' + ] + write_command = [ + LAS2LAS_LOC, + '-stdin', + '-itxt', + '-iparse', 'xyziRGBtanr', + '-scale_rgb_up', + '-load_ogc_wkt', wktf, + '-o', output_file + ] + L.debug('Cmd L of pipe: %s' % read_command) + L.debug('Cmd R of pipe: %s' % write_command) + r_process = Popen(read_command, stdout=PIPE) + w_process = Popen(write_command, stdin=r_process.stdout, stdout=PIPE) + r_process.stdout.close() + output = w_process.communicate()[0] + L.debug('Piped cmd output: %s' % output) + else: + L.info('Rewriting LAS to avoid VLR size errors (e.g. PDAL reading QTModeler files)') + command = [ + LAS2LAS_LOC, + '-i', f, + '-load_ogc_wkt', wktf, + '-translate_z', '%s' % (translate_z), + '-o', output_file + ] + run_proc(command=command) + + if archive: + # move the file to the archive + try: + assert (str(archive_dir) != '') + an = archive_dir.joinpath(f.name) + L.info('Archiving to %s' % (an)) + f.rename(an) + except AssertionError as e: + L.error('Archiving is on but no archive directory set! Cannot archive files!') + except Exception as e: + L.error('%s: %s' % (repr(e), e)) + + las2lastime = (datetime.now() - las2lasstart).seconds + L.info('Finished las2las (%s sec / %.1f min)' % (las2lastime, las2lastime/60)) diff --git a/pdgpoints/log/config.json b/pdgpoints/log/config.json new file mode 100644 index 0000000..3f6537c --- /dev/null +++ b/pdgpoints/log/config.json @@ -0,0 +1,43 @@ +{ + "version": 1, + "disable_existing_loggers": true, + "formatters": { + "standard": { + "format": "%(asctime)s:%(levelname)s:%(name)s.%(funcName)s: %(message)s", + "datefmt": "%Y-%m-%dT%H:%M:%S" + } + }, + "handlers": { + "stream": { + "level": "INFO", + "formatter": "standard", + "class": "logging.StreamHandler", + "stream": "ext://sys.stdout" + }, + "errorfile": { + "level": "WARNING", + "formatter": "standard", + "class": "logging.FileHandler", + "filename": "/var/log/viz-points/pdgpoints.err", + "mode": "a" + }, + "debugfile": { + "level": "DEBUG", + "formatter": "standard", + "class": "logging.handlers.RotatingFileHandler", + "filename": "/var/log/viz-points/pdgpoints.log", + "mode": "a", + "maxBytes": 1048576, + "backupCount": 10 + } + }, + "loggers": { + "": { + "handlers": [ + "stream", "debugfile", "errorfile" + ], + "level": "INFO", + "propagate": false + } + } + } \ No newline at end of file diff --git a/pdgpoints/pipeline.py b/pdgpoints/pipeline.py new file mode 100644 index 0000000..d5b9a4a --- /dev/null +++ b/pdgpoints/pipeline.py @@ -0,0 +1,172 @@ +from pathlib import Path +from typing import Union, Literal +from pyegt.defs import REGIONS +from logging import getLogger + +from . import utils +from . import geoid +from . import lastools_iface +from . import py3dtiles_iface + +class Pipeline(): + """ + The LiDAR processing pipeline. + Takes input point cloud files of any type supported by lastools + and outputs 3dtiles files. + + :param f: The LAS file to process + :type f: str or pathlib.Path + :param bool merge: Whether to use py3dtiles.merger.merge() to incorporate the processed dataset into an existing set of 3dtiles datasets + :param bool intensity_to_RGB: Whether to copy intensity values to RGB (straight copy I->R I->G I->B, so will show up as greyscale) + :param bool archive: Archive the input dataset to `./archive` directory + :param bool verbose: Whether to log more messages + """ + def __init__(self, + f: Path, + merge: bool=True, + intensity_to_RGB: bool=False, + rgb_scale: Union[float, int, Literal[False]]=False, + translate_z: Union[float, int, Literal[False]]=False, + from_geoid: Union[str, Literal[None]]=None, + geoid_region: str=REGIONS[0], + archive: bool=False): + """ + Initialize the processing pipeline. + + :param self self: + :param f: The LAS file to process + :type f: pathlib.Path + :param bool merge: Whether to use py3dtiles.merger.merge() to incorporate the processed dataset into an existing set of 3dtiles datasets + :param bool intensity_to_RGB: Whether to copy intensity values to RGB (straight copy I->R I->G I->B, so will show up as greyscale) + :param rgb_scale: Scale multiplier for RGB values + :type rgb_scale: float or int or False + :param translate_z: Float translation for z values + :type translate_z: float or int or False + :param bool archive: Archive the input dataset to `./archive` directory + :param bool verbose: Whether to log more messages + """ + super().__init__() + self.starttime = utils.timer() + self.auto = False # if auto-processing + self.L = getLogger(__name__) + self.L.debug('Initializing pipeline.') + self.f = Path(f).absolute() + self.base_dir = self.f.parent.absolute() + self.bn = self.f.name + self.given_name = self.f.stem + self.ext = self.f.suffix + self.ogcwkt_name = self.base_dir / ('%s-wkt.laz' % (self.given_name)) + self.rewrite_dir = self.base_dir / 'rewrite' + self.archive_dir = self.base_dir / 'archive' + self.out_dir = self.base_dir / '3dtiles' + self.las_name = self.rewrite_dir / ('%s.las' % (self.given_name)) + self.intensity_to_RGB = intensity_to_RGB + try: + self.rgb_scale = float(rgb_scale) if rgb_scale else 1. + except ValueError: + self.L.warning('Could not convert RGB scale value to float. Not scaling RGB values.') + self.rgb_scale = 1. + try: + self.translate_z = float(translate_z) if translate_z else 0. + except ValueError: + self.L.warning('Could not convert Z-translation value to float. Not translating Z values.') + self.translate_z = 0. + self.las_crs = None + self.x = None + self.y = None + self.from_geoid = from_geoid + self.geoid_region = geoid_region + self.ellips_lkup = None + self.geoid_adj = 0 + self.archive = archive + self.merge = merge + self.steps = 4 + self.steps = self.steps + 1 if merge else self.steps + self.steps = self.steps + 1 if from_geoid else self.steps + self.step = 1 + utils.log_init_stats(self) + + def run(self) -> Path: + """ + Process the input LAS file. + + :param self self: + :return: The path of the output directory + :rtype: pathlib.Path + """ + L = getLogger(__name__) + for d in [self.rewrite_dir, self.archive_dir, self.out_dir]: + self.L.info('Creating dir %s' % (d)) + utils.make_dirs(d) + + L.info('Rewriting file with new OGC WKT... (step %s of %s)' % (self.step, self.steps)) + lastools_iface.las2las_ogc_wkt(f=self.f, + output_file=self.ogcwkt_name) + + self.step += 1 + L.info('Doing lasinfo dump... (step %s of %s)' % (self.step, self.steps)) + self.las_crs, las_vrs, self.wkt, wktf, h_name, v_name = lastools_iface.lasinfo(f=self.ogcwkt_name) + + if self.from_geoid or las_vrs: + L.info('self.from_geoid="%s", las_vrs="%s"' % (self.from_geoid, las_vrs)) + self.step += 1 + L.info('Getting mean lat/lon from las file... (step %s of %s)' % (self.step, self.steps)) + self.x, self.y, xyf = lastools_iface.lasmean(f=self.ogcwkt_name, name=h_name) + self.lat, self.lon = geoid.crs_to_wgs84(x=self.x, y=self.y, + from_crs=self.las_crs) + L.info('Resolving geoid/tidal model... (step %s of %s)' % (self.step, self.steps)) + self.from_geoid = geoid.use_model(user_vrs=self.from_geoid, + las_vrs=las_vrs) + L.info('Looking up ellipsoid height of %s at (%.3f, %.3f)... (step %s of %s)' % (self.from_geoid, + self.lat, self.lon, + self.step, + self.steps)) + self.ellips_lkup = geoid.get_adjustment(lat=self.lat, + lon=self.lon, + model=self.from_geoid, + region=self.geoid_region) + self.geoid_adj = float(self.ellips_lkup) + L.info('Manual Z transformation: %.3f' % (self.translate_z)) + L.info('Geoid height adjustment: %.3f' % (self.geoid_adj)) + if self.ellips_lkup: + self.translate_z = self.translate_z + self.geoid_adj + L.info('Translating Z values by %.3f' % (self.translate_z)) + else: + raise LookupError('Could not get ellipsoid height of %s. Query URL: %s' % (self.from_geoid, + self.lat, + self.lon)) + + self.step += 1 + L.info('Starting las2las rewrite... (step %s of %s)' % (self.step, self.steps)) + lastools_iface.las2las(f=self.ogcwkt_name, + output_file=self.las_name, + archive_dir=self.archive_dir, + intensity_to_RGB=self.intensity_to_RGB, + archive=self.archive, + rgb_scale=self.rgb_scale, + translate_z=self.translate_z) + + self.step += 1 + L.info('Starting tiling process... (step %s of %s)' % (self.step, self.steps)) + py3dtiles_iface.tile(f=self.las_name, + out_dir=self.out_dir, + las_crs=self.las_crs, + out_crs='4978') + + if self.merge: + self.step += 1 + L.info('Starting merge process... (step %s of %s)' % (self.step, self.steps)) + py3dtiles_iface.merge(dir=self.out_dir, + overwrite=True) + + L.info('Cleaning up processing artifacts.') + files = [self.ogcwkt_name, wktf, xyf] + if not self.archive: + files.append(self.las_name) + L.debug('Removing files: %s' % (files)) + utils.rm_files(files=files) + + s, m = utils.timer(self.starttime) + L.info('Finished processing %s (%s sec / %.1f min)' % (self.bn, s, m)) + + return self.out_dir diff --git a/pdgpoints/py3dtiles_iface.py b/pdgpoints/py3dtiles_iface.py new file mode 100644 index 0000000..9b759a7 --- /dev/null +++ b/pdgpoints/py3dtiles_iface.py @@ -0,0 +1,114 @@ +import glob +from pathlib import Path +from typing import Union +from py3dtiles import convert, merger +from py3dtiles.utils import str_to_CRS +from logging import getLogger + +from . import utils + +def log_tileset_error(e: Union[ValueError, RuntimeError]): + """ + Log the error py3dtiles throws when the user tries to merge a single dataset. + + Variables: + :param e: Error object + :type e: ValueError or RuntimeError + """ + L = getLogger(__name__) + L.error('Got "%s" error from py3dtiles.merger.merge' % (repr(e))) + L.warning('The above error means that there was only one tileset directory ' + 'in the output folder. The merged tileset could not be created. ' + 'Add another tileset to allow the merge to work.') + +def rm_file(f: Path): + """ + Remove a file. + + Variables: + :param f: File to remove + :type f: pathlib.Path + """ + L = getLogger(__name__) + try: + L.info('Cleaning up previous merge artifact %s' % (f)) + f.unlink() + except FileNotFoundError as e: + L.warning('FileNotFoundError caught when deleting %s. This might mean nothing.' % (f)) + +def tile(f: Path, + out_dir: Path, + las_crs: str, + out_crs: str='4978'): + """ + Use py3dtiles.converter.convert() to create 3dtiles from a LAS or LAZ file. + + Variables: + :param f: LAS or LAZ file to convert to 3dtiles + :type f: pathlib.Path + :param out_dir: The output directory to store 3dtiles subdirectory in + :type out_dir: pathlib.Path + :param str las_crs: Coordinate reference system (CRS) of the input LAS file + :param str out_crs: CRS of the output tileset + :param bool verbose: Whether to log more messages + """ + L = getLogger(__name__) + tilestart = utils.timer() + L.info('File: %s' % (f)) + L.info('Creating tile directory') + fndir = out_dir / f.stem + CRSi = str_to_CRS(las_crs) + CRSo = str_to_CRS(out_crs) + L.info('CRS to convert from: %s' % (CRSi)) + L.info('CRS to convert to: %s' % (CRSo)) + + converter = convert._Convert(files=f, + outfolder=fndir, + overwrite=True, + crs_in=CRSi, + crs_out=CRSo, + force_crs_in=True, + rgb=True, + benchmark=True, + verbose=False) + converter.convert() + + L.info('Finished tiling (%s sec / %.1f min)' % utils.timer(tilestart)) + + +def merge(dir: Path, + overwrite: bool=False): + """ + Use py3dtiles.merger.merge() to merge more than one 3dtiles dataset. + This function will search for `tileset.json` files in subdirectories + of the input directory (e.g. `input_dir/ds1/tileset.json`, + `input_dir/ds2/tileset.json`) + + Variables: + :param dir: Directory to search for tileset subdirectories in + :type dir: pathlib.Path + :param bool overwrite: Whether to overwrite existing mergers in the output directory (default: False) + :param bool verbose: Whether to log more messages + """ + L = getLogger(__name__) + L.info('Output dir: %s' % dir) + mergestart = utils.timer() + + paths = [Path(path) for path in glob.glob(str(dir.joinpath('*', 'tileset.json')))] + ts_path = Path(dir.joinpath('tileset.json')) + r_path = Path(dir.joinpath('r.pnts')) + + if overwrite: + for f in [ts_path, r_path]: + if f.is_file(): + rm_file(f) + + try: + merger.merge_from_files(tileset_paths=paths, + output_tileset_path=ts_path, + overwrite=overwrite, + force_universal_merger=True) + except (RuntimeError, ValueError) as e: + log_tileset_error(e) + + L.info('Finished merge (%s sec / %.1f min)' % utils.timer(mergestart)) diff --git a/pdgpoints/test.py b/pdgpoints/test.py new file mode 100644 index 0000000..73e4b3a --- /dev/null +++ b/pdgpoints/test.py @@ -0,0 +1,30 @@ +from pathlib import Path +from logging import getLogger + +from .pipeline import Pipeline +from .defs import MOD_LOC + +E = MOD_LOC.joinpath(MOD_LOC, 'testdata/lp_jumps_e.laz') +W = MOD_LOC.joinpath(MOD_LOC, 'testdata/lp_jumps_w.laz') + +def test(f: list[Path]=[E, W], + verbose: bool=True): + """ + Run both halves of the test dataset through the library to test functionality. + + Variables: + :param list f: Two halves of the test dataset to be processed + :param bool verbose: Whether to log more messages + """ + L = getLogger(__name__) + merge = False + for l in f: + p = Pipeline(f=l, + intensity_to_RGB=True, + merge=merge, + archive=True, + rgb_scale=2.0, + #translate_z=-28.143, # geoid height at https://geodesy.noaa.gov/api/geoid/ght?lat=44.25&lon=-73.96 + from_geoid='GEOID18') + p.run() + merge = True diff --git a/pdgpoints/testdata/README.md b/pdgpoints/testdata/README.md new file mode 100644 index 0000000..4e5789e --- /dev/null +++ b/pdgpoints/testdata/README.md @@ -0,0 +1,6 @@ +# Test data + +Data is from the [USGS Lidar Point Cloud NY Cl-Ess-LChamp-P2 2014](https://portal.opentopography.org/usgsDataset?dsid=USGS_LPC_NY_Cl_Ess_LChamp_P2_2014_LAS_2016) +and shows the Olympic Jumping Complex in Lake Placid, NY, USA (shown below). + +![Test dataset image](lp.png) \ No newline at end of file diff --git a/pdgpoints/testdata/lp.png b/pdgpoints/testdata/lp.png new file mode 100644 index 0000000..ed8c692 Binary files /dev/null and b/pdgpoints/testdata/lp.png differ diff --git a/pdgpoints/testdata/lp_jumps_e.laz b/pdgpoints/testdata/lp_jumps_e.laz new file mode 100644 index 0000000..7f5c0a3 Binary files /dev/null and b/pdgpoints/testdata/lp_jumps_e.laz differ diff --git a/pdgpoints/testdata/lp_jumps_w.laz b/pdgpoints/testdata/lp_jumps_w.laz new file mode 100644 index 0000000..4d7fb8c Binary files /dev/null and b/pdgpoints/testdata/lp_jumps_w.laz differ diff --git a/pdgpoints/utils.py b/pdgpoints/utils.py new file mode 100644 index 0000000..fddeee5 --- /dev/null +++ b/pdgpoints/utils.py @@ -0,0 +1,127 @@ +from pathlib import Path +from datetime import datetime +from typing import Union +from pyproj import CRS +from logging import getLogger + +def timer(time: Union[datetime, bool]=False) -> Union[datetime, int, float]: + """ + Start a timer if no argument is supplied, otherwise stop it and report the seconds and minutes elapsed. + + :param time: The directory to create + :type time: bool or datetime.datetime + :return: If no time is supplied, return start time; else return elapsed time in seconds and decimal minutes + :rtype: datetime.datetime or (int, float) + """ + if not time: + return datetime.now() + else: + time = (datetime.now() - time).seconds + return time, time/60 + +def make_dirs(d: Path, exist_ok: bool=True): + """ + Simple wrapper to create directory using os.makedirs(). + Included is a logging command. + + :param pathlib.Path d: The directory to create + :param bool exist_ok: Whether to gracefully accept an existing directory (default: True) + """ + d.mkdir(exist_ok=exist_ok) + +def rm_files(files: list[Path]=[]): + """ + Remove a list of intermediate processing files. + + :param list files: A list `pathlib.Path`s to remove + """ + for f in files: + if f.is_file(): + f.unlink() + +def write_wkt_to_file(f: Path, wkt: str): + """ + Write well-known text (WKT) string to file. Will overwrite existing file. + + :param f: File path to write to (wil) + :type f: pathlib.Path + :param str wkt: String to write + """ + if f.is_file(): + f.unlink() + with open(f, 'w') as fw: + fw.write(str(wkt)) + +def read_wkt_from_file(f: Path) -> str: + """ + Read the WKT string from a file + + :param f: The file to read + :type f: pathlib.Path + :return: The well-known text of the CRS in use + :rtype: str + """ + with open(f, 'r') as fr: + return fr.read() + +def get_epsgs_from_wkt(wkt: str) -> tuple: + """ + Use pyproj to parse a well-known text string to CRS. Returns a tuple of + `[CRS, horizontal EPSG, vertical EPSG, horizontal CRS name, vertical CRS name]` + where the EPSG fields could be an integer representing an EPSG code or `None`. + + :param str wkt: The well-known text string to parse to pyproj.crs.CRS + :return: CRS object, horizontal EPSG, vertical EPSG, horizontal name, vertical name + :rtype: tuple + """ + L = getLogger(__name__) + epsg_h, epsg_v = None, None + h_name, v_name = None, None + crs = CRS.from_wkt(wkt) + if crs.is_compound: + L.info('Found compound coordinate system (COMPD_CS): %s entries' % (len(crs.sub_crs_list))) + if len(crs.sub_crs_list) > 2: # not sure if this case exists, but should be warned anyway + L.warning('More than 2 entries in a compound coordinate system may cause an unwanted override!') + for c in crs.sub_crs_list: + if c.is_vertical: + epsg_v = c.to_epsg() + v_name = c.name + else: + epsg_h = c.to_epsg() + h_name = c.name + else: + if crs.is_vertical: + epsg_v = crs.to_epsg() + v_name = crs.name + else: + epsg_h = crs.to_epsg() + h_name = crs.name + if epsg_h: + L.info('Found horizontal EPSG: %s (%s)' % (epsg_h, h_name)) + if epsg_v: + L.info('Found vertical EPSG: %s (%s)' % (epsg_v, v_name)) + return crs, epsg_h, epsg_v, h_name, v_name + + +def log_init_stats(self): + """ + Log initialization values. + + :param self self: The `self` object from which to extract values. + """ + self.L.info('File: %s' % (self.f)) + self.L.info('Merge: %s' % (self.merge)) + self.L.info('Intensity > RGB: %s' % (self.intensity_to_RGB)) + self.L.info('Intens. scalar: %sx' % (self.rgb_scale)) + self.L.info('Translate Z: %+.1f' % (self.translate_z)) + self.L.info('From geoid: %s' % (self.from_geoid)) + self.L.info('Archive input: %s' % (self.archive)) + self.L.info('Given name: %s' % (self.given_name)) + self.L.info('File extension: %s' % (self.ext)) + self.L.debug('base_dir: %s' % (self.base_dir)) + self.L.debug('bn: %s' % (self.bn)) + self.L.debug('rewrite_dir: %s' % (self.rewrite_dir)) + self.L.debug('archive_dir: %s' % (self.archive_dir)) + self.L.debug('out_dir: %s' % (self.out_dir)) + self.L.debug('las_name: %s' % (self.las_name)) + diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..f82e801 --- /dev/null +++ b/setup.py @@ -0,0 +1,42 @@ +import setuptools +from pdgpoints import _version + +with open('README.md', 'r') as fh: + long_description = fh.read() + +setuptools.setup( + author='Ian Nesbitt', + author_email='nesbitt@nceas.ucsb.edu', + name='pdgpoints', + version=_version.__version__, + description='PDG point cloud staging pipeline', + long_description=long_description, + long_description_content_type='text/markdown', + url='https://github.com/PermafrostDiscoveryGateway/viz-points', + packages=setuptools.find_packages(), + include_package_data=True, + install_requires=[ + 'py3dtiles @ git+https://gitlab.com/Oslandia/py3dtiles.git@68cdcd9080994d38614d3aa5db75cea2456298cf', + 'pdal' + ], + extras_require={ + 'dev': [ + 'sphinx', + ] + }, + entry_points = { + 'console_scripts': [ + 'tilepoints=pdgpoints.cli:cli', + 'tilepoints-test=pdgpoints.test:test' + ], + }, + python_requires='>=3.9, <4.0', + classifiers=[ + 'Development Status :: 2 - Pre-Alpha', + 'Intended Audience :: Developers', + 'License :: OSI Approved :: Apache Software License', + 'Natural Language :: English', + 'Programming Language :: Python :: 3', + ], + license='Apache Software License 2.0', +) \ No newline at end of file