From 83c7959972859f3680724d4b6966f5d604ca2a95 Mon Sep 17 00:00:00 2001 From: piter Date: Mon, 15 Jul 2024 14:48:24 +0200 Subject: [PATCH] Initial commit. --- .coveragerc | 27 + .editorconfig | 21 + .gitattributes | 2 + .github/workflows/ci.yml | 225 ++++ .gitignore | 116 ++ AUTHORS.rst | 14 + CONTRIBUTING.rst | 162 +++ HISTORY.rst | 11 + LICENSE | 47 + MANIFEST.in | 11 + Makefile | 121 ++ README.rst | 206 +++ docs/Makefile | 20 + docs/_static/custom.css | 3 + docs/authors.rst | 1 + docs/conf.py | 317 +++++ docs/contributing.rst | 1 + docs/demo/demo_masjed_dam.rst | 44 + docs/demo/demo_masjed_dam_detailed_guide.rst | 353 +++++ docs/demo/demo_masjed_dam_fast_track.rst | 92 ++ docs/demo_datasets.rst | 17 + docs/history.rst | 1 + docs/index.rst | 25 + docs/installation.rst | 186 +++ docs/make.bat | 36 + docs/preparation.rst | 188 +++ docs/processing.rst | 355 +++++ docs/readme.rst | 1 + docs/usage.rst | 75 ++ sarvey/__init__.py | 39 + sarvey/coherence.py | 189 +++ sarvey/config.py | 759 +++++++++++ sarvey/console.py | 128 ++ sarvey/densification.py | 324 +++++ sarvey/filtering.py | 291 +++++ sarvey/geolocation.py | 100 ++ sarvey/ifg_network.py | 348 +++++ sarvey/objects.py | 795 ++++++++++++ sarvey/osm_utils.py | 185 +++ sarvey/preparation.py | 277 ++++ sarvey/processing.py | 1145 +++++++++++++++++ sarvey/sarvey_export.py | 298 +++++ sarvey/sarvey_mask.py | 634 +++++++++ sarvey/sarvey_mti.py | 291 +++++ sarvey/sarvey_osm.py | 220 ++++ sarvey/sarvey_plot.py | 496 +++++++ sarvey/triangulation.py | 144 +++ sarvey/unwrapping.py | 1041 +++++++++++++++ sarvey/utils.py | 832 ++++++++++++ sarvey/version.py | 34 + sarvey/viewer.py | 562 ++++++++ setup.cfg | 23 + setup.py | 109 ++ .../CI_docker/build_sarvey_testsuite_image.sh | 26 + .../CI_docker/context/environment_sarvey.yml | 48 + tests/CI_docker/context/sarvey_ci.docker | 52 + tests/__init__.py | 30 + tests/linting/.gitkeep | 0 tests/test_config.py | 60 + tests/test_ifg_network.py | 86 ++ tests/test_processing.py | 391 ++++++ tests/testdata/config_test.json | 68 + tests/testdata/logfiles/.gitkeep | 0 tox.ini | 30 + 64 files changed, 12733 insertions(+) create mode 100644 .coveragerc create mode 100644 .editorconfig create mode 100644 .gitattributes create mode 100644 .github/workflows/ci.yml create mode 100644 .gitignore create mode 100644 AUTHORS.rst create mode 100644 CONTRIBUTING.rst create mode 100644 HISTORY.rst create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 Makefile create mode 100644 README.rst create mode 100644 docs/Makefile create mode 100644 docs/_static/custom.css create mode 100644 docs/authors.rst create mode 100755 docs/conf.py create mode 100644 docs/contributing.rst create mode 100644 docs/demo/demo_masjed_dam.rst create mode 100644 docs/demo/demo_masjed_dam_detailed_guide.rst create mode 100644 docs/demo/demo_masjed_dam_fast_track.rst create mode 100644 docs/demo_datasets.rst create mode 100644 docs/history.rst create mode 100644 docs/index.rst create mode 100644 docs/installation.rst create mode 100644 docs/make.bat create mode 100644 docs/preparation.rst create mode 100644 docs/processing.rst create mode 100644 docs/readme.rst create mode 100644 docs/usage.rst create mode 100644 sarvey/__init__.py create mode 100644 sarvey/coherence.py create mode 100644 sarvey/config.py create mode 100644 sarvey/console.py create mode 100644 sarvey/densification.py create mode 100644 sarvey/filtering.py create mode 100644 sarvey/geolocation.py create mode 100644 sarvey/ifg_network.py create mode 100644 sarvey/objects.py create mode 100644 sarvey/osm_utils.py create mode 100644 sarvey/preparation.py create mode 100644 sarvey/processing.py create mode 100755 sarvey/sarvey_export.py create mode 100755 sarvey/sarvey_mask.py create mode 100755 sarvey/sarvey_mti.py create mode 100755 sarvey/sarvey_osm.py create mode 100755 sarvey/sarvey_plot.py create mode 100644 sarvey/triangulation.py create mode 100644 sarvey/unwrapping.py create mode 100644 sarvey/utils.py create mode 100644 sarvey/version.py create mode 100644 sarvey/viewer.py create mode 100644 setup.cfg create mode 100644 setup.py create mode 100755 tests/CI_docker/build_sarvey_testsuite_image.sh create mode 100644 tests/CI_docker/context/environment_sarvey.yml create mode 100644 tests/CI_docker/context/sarvey_ci.docker create mode 100644 tests/__init__.py create mode 100644 tests/linting/.gitkeep create mode 100644 tests/test_config.py create mode 100644 tests/test_ifg_network.py create mode 100644 tests/test_processing.py create mode 100755 tests/testdata/config_test.json create mode 100644 tests/testdata/logfiles/.gitkeep create mode 100644 tox.ini diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..40105a6 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,27 @@ +# .coveragerc to control coverage.py +[run] +branch = False +concurrency = multiprocessing +parallel = True +omit = */site-packages/*,*/tests/*,*/.eggs/* + +[report] +show_missing = True + +# Regexes for lines to exclude from consideration +exclude_lines = + # Have to re-enable the standard pragma + pragma: no cover + + # Don't complain if tests don't hit defensive assertion code: + raise AssertionError + raise NotImplementedError + + # Don't complain if non-runnable code isn't run: + if 0: + if __name__ == .__main__.: + +ignore_errors = True + +[html] +directory = htmlcov diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000..d4a2c44 --- /dev/null +++ b/.editorconfig @@ -0,0 +1,21 @@ +# http://editorconfig.org + +root = true + +[*] +indent_style = space +indent_size = 4 +trim_trailing_whitespace = true +insert_final_newline = true +charset = utf-8 +end_of_line = lf + +[*.bat] +indent_style = tab +end_of_line = crlf + +[LICENSE] +insert_final_newline = false + +[Makefile] +indent_style = tab diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..fa7bd42 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,2 @@ +# *.h5 filter=lfs diff=lfs merge=lfs -text +# *.png filter=lfs diff=lfs merge=lfs -text diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..8528fa4 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,225 @@ +name: CI + +on: + push: + branches: + - main + - githubci + +env: + SKIP: true + +jobs: + before_script: + runs-on: self-hosted + + strategy: + matrix: + node-version: [ 20.x ] + + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + conda init bash + source ~/.bashrc + conda info + source activate ci_env + shell: bash + + test_sarvey: + runs-on: self-hosted + needs: before_script + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Run tests + if: env.SKIP == 'false' + run: | + conda init bash + source ~/.bashrc + source activate ci_env + rm -rf tests/testdata + wget -c https://seafile.projekt.uni-hannover.de/f/4b3be399dffa488e98db/?dl=1 -P tests/ + unzip tests/testdata.zip + ls tests/testdata + make pytest + shell: bash + + - name: create docs + run: | + conda init bash + source ~/.bashrc + source activate ci_env + make docs + shell: bash + + - name: Upload coverage report + uses: actions/upload-artifact@v4 + with: + name: coverage-report + path: htmlcov/ + + - name: Upload report.html + uses: actions/upload-artifact@v4 + with: + name: test-report + path: report.html + + - name: Upload docs + uses: actions/upload-artifact@v4 + with: + name: docs + path: docs/_build/html/ + + - name: Upload cobertura coverage report + uses: actions/upload-artifact@v4 + with: + name: cobertura-coverage + path: coverage.xml + + - name: Upload junit report + uses: actions/upload-artifact@v4 + with: + name: junit-report + path: report.xml + + test_styles: + runs-on: self-hosted + needs: before_script + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + conda init bash + source ~/.bashrc + conda info + source activate ci_env + make lint + shell: bash + + - name: Upload flake8 log + uses: actions/upload-artifact@v4 + with: + name: flake8-log + path: tests/linting/flake8.log + + - name: Upload pycodestyle log + uses: actions/upload-artifact@v4 + with: + name: pycodestyle-log + path: tests/linting/pycodestyle.log + + - name: Upload pydocstyle log + uses: actions/upload-artifact@v4 + with: + name: pydocstyle-log + path: tests/linting/pydocstyle.log + + test_urls: + runs-on: self-hosted + needs: before_script + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + conda init bash + source ~/.bashrc + conda info + source activate ci_env + make urlcheck + shell: bash + + test_sarvey_install: + runs-on: self-hosted + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Install dependencies + run: | + conda init bash + source ~/.bashrc + mamba update -n base mamba conda + mamba info + mamba env remove --name sarvey_testinstall --yes || echo "Environment sarvey_testinstall does not exist" + pip install conda-merge + wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml + conda-merge conda-env.yml tests/CI_docker/context/environment_sarvey.yml > env.yml + mamba env create --name sarvey_testinstall -f env.yml + source activate sarvey_testinstall + pip install git+https://github.com/insarlab/MiaplPy.git + pip install . + pip check + cd .. + python -c "import sarvey; print(sarvey)" + shell: bash + + deploy_pages: + runs-on: self-hosted + + needs: test_sarvey + steps: + - name: Checkout code + uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: '3.x' + + - name: Deploy to GitHub Pages + if: env.SKIP == 'false' + run: | + rm -rf public + mkdir -p public/doc + mkdir -p public/images/ + mkdir -p public/coverage + mkdir -p public/test_reports + cp -r docs/_build/html/* public/doc/ + cp -r htmlcov/* public/coverage/ + cp report.html public/test_reports/ + ls -al public + ls -al public/doc + ls -al public/coverage + ls -al public/test_reports + shell: bash + + - name: Upload to GitHub Pages + if: env.SKIP == 'false' + uses: peaceiris/actions-gh-pages@v4 + with: + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./public diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..1191f46 --- /dev/null +++ b/.gitignore @@ -0,0 +1,116 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +env/ +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +.hypothesis/ +.pytest_cache/ +report.xml +report.html + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# pyenv +.python-version + +# celery beat schedule file +celerybeat-schedule + +# SageMath parsed files +*.sage.py + +# dotenv +.env + +# virtualenv +.venv +venv/ +ENV/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ + +# IDE settings +.vscode/ + +# IntelliJ Idea family of suites +.idea +*.iml +## File-based project format: +*.ipr +*.iws +## mpeltonen/sbt-idea plugin +.idea_modules/ diff --git a/AUTHORS.rst b/AUTHORS.rst new file mode 100644 index 0000000..2f9386b --- /dev/null +++ b/AUTHORS.rst @@ -0,0 +1,14 @@ +======= +Credits +======= + +Development Lead +---------------- + +* Andreas Piter + +Contributors +------------ + +* FernLab +* Mahmud H. Haghighi diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst new file mode 100644 index 0000000..41d6855 --- /dev/null +++ b/CONTRIBUTING.rst @@ -0,0 +1,162 @@ +.. highlight:: shell + +============ +Contributing +============ + +Contributions are welcome, and they are greatly appreciated! Every little bit +helps, and credit will always be given. + +You can contribute in many ways: + +Types of Contributions +---------------------- + +Report Bugs +~~~~~~~~~~~ + +Report bugs at https://git.gfz-potsdam.de/fernlab/timeseries/issues. + +If you are reporting a bug, please include: + +* Your operating system name and version. +* Any details about your local setup that might be helpful in troubleshooting. +* Detailed steps to reproduce the bug. + +Fix Bugs +~~~~~~~~ + +Look through the GitLab issues for bugs. Anything tagged with "bug" and "help +wanted" is open to whoever wants to implement it. + +Implement Features +~~~~~~~~~~~~~~~~~~ + +Look through the GitLab issues for features. Anything tagged with "enhancement" +and "help wanted" is open to whoever wants to implement it. + +Write Documentation +~~~~~~~~~~~~~~~~~~~ + +SARvey could always use more documentation, whether as part of the +official SARvey docs, in docstrings, or even on the web in blog posts, +articles, and such. + +Submit Feedback +~~~~~~~~~~~~~~~ + +The best way to send feedback is to file an issue at https://git.gfz-potsdam.de/fernlab/timeseries/issues. + +If you are proposing a feature: + +* Explain in detail how it would work. +* Keep the scope as narrow as possible, to make it easier to implement. +* Remember that this is a volunteer-driven project, and that contributions + are welcome :) + +Commit Changes +-------------- + +How to +~~~~~~ + +1. Fork the `sarvey` repo on GitLab. +2. Clone your fork locally:: + + $ git clone git@git.gfz-potsdam.de:fernlab/timeseries.git + +3. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed, this is how you set up your fork for local development:: + + $ mkvirtualenv sarvey + $ cd sarvey/ + $ python setup.py develop + +4. Create a branch for local development:: + + $ git checkout -b name-of-your-bugfix-or-feature + + Now you can make your changes locally. + +5. When you're done making changes, check that your changes pass flake8 and the + tests, including testing other Python versions with tox:: + + $ make pytest + $ make lint + $ make urlcheck + $ tox + + To get flake8 and tox, just pip install them into your virtualenv. + +6. Commit your changes and push your branch to GitLab:: + + $ git add . + $ git commit -m "Your detailed description of your changes." + $ git push origin name-of-your-bugfix-or-feature + +7. Submit a merge request through the GitLab website. + +Sign your commits +~~~~~~~~~~~~~~~~~ + +Please note that our license terms only allow signed commits. +A guideline how to sign your work can be found here: https://git-scm.com/book/en/v2/Git-Tools-Signing-Your-Work + +If you are using the PyCharm IDE, the `Commit changes` dialog has an option called `Sign-off commit` to +automatically sign your work. + + +License header +~~~~~~~~~~~~~~ + +If you commit new Python files, please note that they have to contain the following license header: + +.. code:: bash + + # SARvey - A multitemporal InSAR time series tool for the derivation of displacements. + # + # Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) + # + # This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context + # of the SAR4Infra project with funds of the German Federal Ministry for Digital and + # Transport and contributions from Landesamt fuer Vermessung und Geoinformation + # Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. + # + # This program is free software: you can redistribute it and/or modify it under + # the terms of the GNU General Public License as published by the Free Software + # Foundation, either version 3 of the License, or (at your option) any later + # version. + # + # Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ + # implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you + # cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. + # This requirement extends to SARvey. + # + # This program is distributed in the hope that it will be useful, but WITHOUT + # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS + # FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more + # details. + # + # You should have received a copy of the GNU Lesser General Public License along + # with this program. If not, see . + + +Merge Request Guidelines +------------------------ + +Before you submit a pull request, check that it meets these guidelines: + +1. The merge request should include tests. +2. If the merge request adds functionality, the docs should be updated. Put + your new functionality into a function with a docstring, and add the + feature to the list in README.rst. +3. The pull request should work for Python 3.6, 3.7, 3.8 and 3.9. Check + https://gitlab.projekt.uni-hannover.de/ipi-sar4infra/sarvey/-/merge_requests + and make sure that the tests pass for all supported Python versions. + +Tips +---- + +To run a subset of tests:: + +$ pytest tests.test_processing + diff --git a/HISTORY.rst b/HISTORY.rst new file mode 100644 index 0000000..71035d2 --- /dev/null +++ b/HISTORY.rst @@ -0,0 +1,11 @@ +======= +History +======= + +1.1.0 (coming soon) +------------------- + +1.0.0 (2024-06-26) Strawberry Pie +--------------------------------- + +* First release version on github. diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..d116ca3 --- /dev/null +++ b/LICENSE @@ -0,0 +1,47 @@ +`SARvey` is distributed under the GNU General Public License, version 3 (GPLv3). + +Exceptions are listed in the following: + +* This package uses PyMaxFlow. The core of PyMaxflows library is the C++ + implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you + cite [BOYKOV04] in any resulting publication if you use this code for research purposes. + This requirement extends to SARvey. + + BOYKOV04 + An Experimental Comparison of Min-Cut/Max-Flow Algorithms for Energy Minimization in Vision. + Yuri Boykov and Vladimir Kolmogorov. In IEEE Transactions on Pattern Analysis and Machine + Intelligence (PAMI), September 2004 + +GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + SARvey + Copyright (C) 2021-2024 Andreas Piter + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. + diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..e8f37ad --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,11 @@ +include AUTHORS.rst +include CONTRIBUTING.rst +include HISTORY.rst +include LICENSE +include README.rst + +recursive-exclude tests * +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] + +recursive-include docs *.rst conf.py Makefile make.bat *.jpg *.png *.gif diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..d18ba84 --- /dev/null +++ b/Makefile @@ -0,0 +1,121 @@ +.PHONY: clean clean-test clean-pyc clean-build docs help pytest +.DEFAULT_GOAL := help + +define BROWSER_PYSCRIPT +import os, webbrowser, sys + +from urllib.request import pathname2url + +webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1]))) +endef +export BROWSER_PYSCRIPT + +define PRINT_HELP_PYSCRIPT +import re, sys + +for line in sys.stdin: + match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line) + if match: + target, help = match.groups() + print("%-20s %s" % (target, help)) +endef +export PRINT_HELP_PYSCRIPT + +BROWSER := python -c "$$BROWSER_PYSCRIPT" + +help: + @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST) + +clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts + +clean-build: ## remove build artifacts + rm -fr build/ + rm -fr dist/ + rm -fr .eggs/ + find . -name '*.egg-info' -exec rm -fr {} + + find . -name '*.egg' -exec rm -f {} + + +clean-pyc: ## remove Python file artifacts + find . -name '*.pyc' -exec rm -f {} + + find . -name '*.pyo' -exec rm -f {} + + find . -name '*~' -exec rm -f {} + + find . -name '__pycache__' -exec rm -fr {} + + +clean-test: ## remove test and coverage artifacts + rm -fr .tox/ + rm -f .coverage + rm -fr .coverage.* + rm -fr htmlcov/ + rm -fr report.html + rm -fr report.xml + rm -fr coverage.xml + rm -fr .pytest_cache + +lint: ## check style with flake8 + flake8 --max-line-length=120 sarvey tests > ./tests/linting/flake8.log || \ + (cat ./tests/linting/flake8.log && exit 1) + pycodestyle sarvey --exclude="*.ipynb,*.ipynb*" --max-line-length=120 > ./tests/linting/pycodestyle.log || \ + (cat ./tests/linting/pycodestyle.log && exit 1) + pydocstyle sarvey > ./tests/linting/pydocstyle.log || \ + (cat ./tests/linting/pydocstyle.log && exit 1) + +urlcheck: ## check for dead URLs + urlchecker check . --file-types .py,.rst,.md,.json + +test: ## run tests quickly with the default Python + pytest + +test-all: ## run tests on every Python version with tox + tox + +coverage: ## check code coverage quickly with the default Python + coverage erase + coverage run --source sarvey -m pytest + coverage combine # must be called in order to make coverage work in multiprocessing + coverage report -m + coverage html + $(BROWSER) htmlcov/index.html + +pytest: clean-test ## Runs pytest with coverage and creates coverage and test report + ## - puts the coverage results in the folder 'htmlcov' + ## - generates cobertura 'coverage.xml' (needed to show coverage in GitLab MR changes) + ## - generates 'report.html' based on pytest-reporter-html1 + ## - generates JUnit 'report.xml' to show the test report as a new tab in a GitLab MR + ## NOTE: additional options pytest and coverage (plugin pytest-cov) are defined in .pytest.ini and .coveragerc + pytest \ + --verbosity=3 \ + --color=yes \ + --tb=short \ + --cov=sarvey \ + --cov-report html:htmlcov \ + --cov-report term-missing \ + --cov-report xml:coverage.xml \ + --template=html1/index.html \ + --report=report.html \ + --junitxml report.xml \ + tests + +docs: ## generate Sphinx HTML documentation, including API docs + rm -f docs/timeseries.rst + rm -f docs/modules.rst + sphinx-apidoc sarvey -o docs/ --private --doc-project 'Python API reference' + $(MAKE) -C docs clean + $(MAKE) -C docs html + $(BROWSER) docs/_build/html/index.html + +servedocs: docs ## compile the docs watching for changes + watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D . + +release: dist ## package and upload a release + twine upload dist/* + +dist: clean ## builds source and wheel package + python setup.py sdist + python setup.py bdist_wheel + ls -l dist + +install: clean ## install the package to the active Python's site-packages + python setup.py install + +gitlab_CI_docker: ## Build a docker image for CI use within gitlab + cd ./tests/CI_docker/; bash ./build_sarvey_testsuite_image.sh diff --git a/README.rst b/README.rst new file mode 100644 index 0000000..eb50c80 --- /dev/null +++ b/README.rst @@ -0,0 +1,206 @@ +======================== +SARvey - survey with SAR +======================== + +Open-source InSAR time series analysis software developed within the project SAR4Infra. +**SARvey** aims to analyze InSAR displacement time series for engineering applications. + + + +Documentation +------------- +The documentation with installation instructions, processing steps, and examples with a demo dataset can be found at: +https://ipi-sar4infra.projektpages.uni-h.de/timeseries/doc/ + + + +Status +------ + +.. image:: https://gitlab.projekt.uni-hannover.de/ipi-sar4infra/timeseries/badges/main/pipeline.svg + :target: https://gitlab.projekt.uni-hannover.de/ipi-sar4infra/timeseries/-/pipelines + :alt: Pipelines +.. image:: https://gitlab.projekt.uni-hannover.de/ipi-sar4infra/timeseries/badges/main/coverage.svg + :target: https://ipi-sar4infra.projektpages.uni-h.de/timeseries/coverage/ + :alt: Coverage +.. image:: https://img.shields.io/static/v1?label=Documentation&message=GitLab%20Pages&color=orange + :target: https://ipi-sar4infra.projektpages.uni-h.de/timeseries/doc/ + :alt: Documentation +.. image:: https://zenodo.org/badge/DOI/10.5281/zenodo.12544131.svg + :target: https://doi.org/10.5281/zenodo.12544131 + :alt: DOI + + +See also the latest coverage_ report and the pytest_ HTML report. + + +License +------- + +**SARvey** is distributed under the GNU General Public License, version 3 (GPLv3). + +The following exceptions applies: + +This package uses PyMaxFlow. The core of PyMaxflows library is the C++ implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you cite [BOYKOV04] in any resulting publication if you use this code for research purposes. +This requirement extends to **SARvey**. + +Please check out the details of the license `here `_. + +How to cite +----------- + +If you use **SARvey** in your research, please cite the following. + +1. The paper describing the methodology: + + Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review). + +2. The software itself. Please specify the version you use: + + Piter, A., Haghshenas Haghighi, M., FERN.Lab, & Motagh, M. (2024). SARvey - survey with SAR [version]. Zenodo. https://doi.org/10.5281/zenodo.12544131 + +3. If you use the PUMA method for unwrapping in your research, please cite the following publication as indicated in the license: + + An Experimental Comparison of Min-Cut/Max-Flow Algorithms for Energy Minimization in Vision. Yuri Boykov and Vladimir Kolmogorov. In IEEE Transactions on Pattern Analysis and Machine Intelligence (PAMI), September 2004. `Link to paper `_. + + +Processing overview +------------------- + + +.. image:: https://seafile.projekt.uni-hannover.de/f/006f702937cd4e618bcb/?dl=1 + :width: 600 + :align: center + :alt: SARvey workflow + +Processing workflow for using the SARvey software to derive displacement time series. + + +SARvey is a command-line-based software. The major steps for running SARvey are the following: + +* **Installation** + + SARvey is a cross-platform python-based software and can be installed on Linux and MacOS. On Windows, SARvey is tested on Windows Subsystem for Linux (WSL_) version 2. + Details of installation can be found in `installation instruction`_. + + +* **Preprocessing** + + The software requires a coregistered stack of SLC and the related geometry information in the MiaplPy_ data format. + The coregistered stack of SLC can be created using an InSAR processor. Currently MiaplPy_ only supports ISCE_. Support for GAMMA and SNAP_ is planned for future. + After creating the coregistered stack of SLC, run the "load_data" step from Miaplpy_ to create the "inputs" directory which contains "slcStack.h5" and "geometryRadar.h5". + Details are explained in the `Preparation `_ section + + +* **Time series analysis** + + Time series analysis is performed using `sarvey`. It consists of 5 steps (steps 0 to 4). The details of each step are explained in `processing steps `_. The processing parameters are handled in a json config file. Visualization and export are handled by `sarvey_plot` and `sarvey_export` packages. Below are the major steps: + + * Go to your working directory: + + .. code-block:: bash + + cd path/to/working_dir/ + + * Create a default config file using **"-g"** flag: + + .. code-block:: bash + + sarvey -f config.json 0 4 -g + + * Modify **config.json** to change path to "inputs" directory. Modify other parameters as desired. + + * Run all processing steps (steps 0 to 4): + + .. code-block:: bash + + sarvey -f config.json 0 4 + + Different processing steps are explained `here `_. + + * Plot the resulting displacement time series: + + .. code-block:: bash + + sarvey_plot outputs/coh80_ts.h5 -t + + * Export the results as Shapefiles_: + + .. code-block:: bash + + sarvey_export outputs/coh80_ts.h5 -o outputs/shp/coh80.shp + + +Feature overview +---------------- + +**SARvey** has three main components for processing, visualization, and exporting data. + +* `sarvey` performs time series analysis. +* `sarvey_plot` plots the outputs. +* `sarvey_export` exports InSAR time series results from to GIS data formats. The GIS data format can be visualized for example in QGIS_. + +It also has two components that facilitate transport infrastructure monitoring. + +* `sarvey_mask` creates mask from Shapefiles, e.g. for transport infrastructures. +* `sarvey_osm` downloads transport infrastructure information from OSM_ and store as Shapefiles. + +You can run each component in the command line with "-h" argument for more information about the usage. For example: + + .. code-block:: bash + + sarvey -h + + + +**SARvey** supports two processing schemes: + +* `Two-step unwrapping `_ with atmospheric correction (default). + +* `One-step unwrapping `_ for a small area. + + +History / Changelog +------------------- + +You can find the protocol of recent changes in the **SARvey** package +`here `__. + +We follow the principle of semantic versioning. +The version number is structured as follows: MAJOR.MINOR.PATCH. +You can find a description of the versioning scheme `here `__. + +Credits +------- + +This software was developed within the project SAR4Infra (2020-2024) with funds of the German Federal Ministry for Digital and Transport. +The project consortium consists of +the `Institute of Photogrammetry and GeoInformation`_ at Leibniz University Hannover, +`FERN.Lab`_ (innovation and technology transfer lab of the GFZ German Research Centre for Geosciences, Potsdam), +`Landesamt fuer Vermessung und Geoinformation Schleswig-Holstein`_, +and `Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein`_. +The scientific and methodological development was carried out by Andreas Piter (piter@ipi.uni-hannover.de), supervised by Mahmud H. Haghighi (mahmud@ipi.uni-hannover.de) and Mahdi Motagh (motagh@gfz-potsdam.de). +The `FERN.Lab`_ (fernlab@gfz-potsdam.de) contributed to the development, documentation, continuous integration, and testing of the package. + + +This package was created with Cookiecutter_ and the `fernlab/cookiecutter-pypackage`_ project template. + + +.. _Cookiecutter: https://github.com/audreyr/cookiecutter +.. _`fernlab/cookiecutter-pypackage`: https://github.com/fernlab/cookiecutter-pypackage +.. _coverage: https://ipi-sar4infra.projektpages.uni-h.de/timeseries/coverage/ +.. _pytest: https://ipi-sar4infra.projektpages.uni-h.de/timeseries/test_reports/report.html +.. _processing: docs/processing.html +.. _`installation instruction`: docs/installation.html +.. _MiaplPy: https://github.com/insarlab/MiaplPy +.. _ISCE: https://github.com/isce-framework/isce2 +.. _SNAP: https://step.esa.int/main/toolboxes/snap +.. _Shapefiles: https://doc.arcgis.com/en/arcgis-online/reference/shapefiles.htm +.. _QGIS: https://qgis.org/en/site/ +.. _`PS Time Series Viewer`: https://plugins.qgis.org/plugins/pstimeseries/ +.. _OSM: https://www.openstreetmap.org/ +.. _WSL: https://learn.microsoft.com/en-us/windows/wsl/ +.. _FERN.Lab: https://fernlab.gfz-potsdam.de/ +.. _`Institute of Photogrammetry and GeoInformation`: https://www.ipi.uni-hannover.de/en/ +.. _`Landesamt fuer Vermessung und Geoinformation Schleswig-Holstein`: https://www.schleswig-holstein.de/DE/landesregierung/ministerien-behoerden/LVERMGEOSH/lvermgeosh_node.html +.. _`Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein`: https://www.schleswig-holstein.de/DE/Landesregierung/LBVSH/lbvsh_node.html diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..b9b615b --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,20 @@ +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = python -msphinx +SPHINXPROJ = sarvey +SOURCEDIR = . +BUILDDIR = _build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_static/custom.css b/docs/_static/custom.css new file mode 100644 index 0000000..1e71926 --- /dev/null +++ b/docs/_static/custom.css @@ -0,0 +1,3 @@ +.wy-nav-content { +max-width: 1200px !important; +} diff --git a/docs/authors.rst b/docs/authors.rst new file mode 100644 index 0000000..e122f91 --- /dev/null +++ b/docs/authors.rst @@ -0,0 +1 @@ +.. include:: ../AUTHORS.rst diff --git a/docs/conf.py b/docs/conf.py new file mode 100755 index 0000000..865df13 --- /dev/null +++ b/docs/conf.py @@ -0,0 +1,317 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- +# +# sarvey documentation build configuration file, created by +# sphinx-quickstart on Fri Jun 9 13:47:02 2017. +# +# This file is execfile()d with the current directory set to its +# containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +# If extensions (or modules to document with autodoc) are in another +# directory, add these directories to sys.path here. If the directory is +# relative to the documentation root, use os.path.abspath to make it +# absolute, like shown here. +# +import os +import sys +sys.path.insert(0, os.path.abspath('..')) + +import sarvey + +# -- General configuration --------------------------------------------- + +# If your documentation needs a minimal Sphinx version, state it here. +# needs_sphinx = '1.0' + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.githubpages', + 'sphinx.ext.viewcode', + 'sphinx.ext.todo', + # 'sphinxarg.ext', + 'sphinx_autodoc_typehints', + 'sphinx.ext.intersphinx' +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix(es) of source filenames. +# You can specify multiple suffix as a list of string: +# source_suffix = ['.rst', '.md'] +source_suffix = '.rst' + +# The encoding of source files. +# source_encoding = 'utf-8-sig' + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +project = 'SARvey' +copyright = "2024, IPI - Leibniz Universitaet Hannover" +author = "Andreas Piter" + +# The version info for the project you're documenting, acts as replacement +# for |version| and |release|, also used in various other places throughout +# the built documents. +# +# The short X.Y version. +version = sarvey.__version__ +# The full version, including alpha/beta/rc tags. +release = sarvey.__version__ + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +# +# This is also used if you do content translation via gettext catalogs. +# Usually you set "language" from the command line for these cases. +language = "en" + +# There are two options for replacing |today|: either, you set today to +# some non-false value, then it is used: +# today = '' +# Else, today_fmt is used as the format for a strftime call. +# today_fmt = '%B %d, %Y' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This patterns also effect to html_static_path and html_extra_path +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] + +# The reST default role (used for this markup: `text`) to use for all +# documents. +# default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +# add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +# add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +# show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +# modindex_common_prefix = [] + +# If true, keep warnings as "system message" paragraphs in the built +# documents. +# keep_warnings = False + +# Define how to document class docstrings +# '__init__' documents only the __init__ methods, 'class' documents only the class methods and 'both' documents both +autoclass_content = 'both' + +# If true, `todo` and `todoList` produce output, else they produce nothing. +todo_include_todos = True + + +# Apply custom sphinx styles (e.g., increase content width of generated docs) +def setup(app): + app.add_css_file('custom.css') + + +# Add mappings for intersphinx extension (allows to link to the API reference of other sphinx documentations) +intersphinx_mapping = { + 'python': ('https://docs.python.org/3', None), +} + + +# -- Options for HTML output ------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +# html_theme = 'default' +html_theme = 'classic' # The one installed via pip install sphinx_rtd_theme in the .gitlab.yml + +# Theme options are theme-specific and customize the look and feel of a +# theme further. For a list of options available for each theme, see the +# documentation. +# html_theme_options = { +# 'canonical_url': '', +# 'analytics_id': '', +# 'logo_only': False, +# 'display_version': True, +# 'prev_next_buttons_location': 'bottom', +# 'style_external_links': False, +# # Toc options +# 'collapse_navigation': True, +# 'sticky_navigation': True, +# 'navigation_depth': 4, +# 'includehidden': True, +# 'titles_only': False, +# # 'set_type_checking_flag': True # option of sphinx_autodoc_typehints extension +# } + +# Add any paths that contain custom themes here, relative to this directory. +# html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +# html_title = None + +# A shorter title for the navigation bar. Default is the same as +# html_title. +# html_short_title = None + +# The name of an image file (relative to this directory) to place at the +# top of the sidebar. +# html_logo = None + +# The name of an image file (within the static path) to use as favicon +# of the docs. This file should be a Windows icon file (.ico) being +# 16x16 or 32x32 pixels large. +# html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) +# here, relative to this directory. They are copied after the builtin +# static files, so a file named "default.css" will overwrite the builtin +# "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page +# bottom, using the given strftime format. +# html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +# html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +# html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names +# to template names. +# html_additional_pages = {} + +# If false, no module index is generated. +# html_domain_indices = True + +# If false, no index is generated. +# html_use_index = True + +# If true, the index is split into individual pages for each letter. +# html_split_index = False + +# If true, links to the reST sources are added to the pages. +# html_show_sourcelink = True + +# If true, "Created using Sphinx" is shown in the HTML footer. +# Default is True. +# html_show_sphinx = True + +# If true, "(C) Copyright ..." is shown in the HTML footer. +# Default is True. +# html_show_copyright = True + +# If true, an OpenSearch description file will be output, and all pages +# will contain a tag referring to it. The value of this option +# must be the base URL from which the finished HTML is served. +# html_use_opensearch = '' + +# This is the file name suffix for HTML files (e.g. ".xhtml"). +# html_file_suffix = None + +# Output file base name for HTML help builder. +htmlhelp_basename = 'sarveydoc' + + +# -- Options for LaTeX output ------------------------------------------ + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + + # Latex figure (float) alignment + # 'figure_align': 'htbp', +} + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass +# [howto, manual, or own class]). +latex_documents = [ + (master_doc, 'sarvey.tex', + 'SARvey Documentation', + author, 'manual'), +] + +# The name of an image file (relative to this directory) to place at +# the top of the title page. +# latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings +# are parts, not chapters. +# latex_use_parts = False + +# If true, show page references after internal links. +# latex_show_pagerefs = False + +# If true, show URL addresses after external links. +# latex_show_urls = False + +# Documents to append as an appendix to all manuals. +# latex_appendices = [] + +# If false, no module index is generated. +# latex_domain_indices = True + + +# -- Options for manual page output ------------------------------------ + +# One entry per manual page. List of tuples +# (source start file, name, description, authors, manual section). +man_pages = [ + (master_doc, 'sarvey', + 'SARvey Documentation', + [author], 1) +] + +# If true, show URL addresses after external links. +# man_show_urls = False + + +# -- Options for Texinfo output ---------------------------------------- + +# Grouping the document tree into Texinfo files. List of tuples +# (source start file, target name, title, author, +# dir menu entry, description, category) +texinfo_documents = [ + (master_doc, 'sarvey', + 'SARvey Documentation', + author, + 'sarvey', + 'One line description of project.', + 'Miscellaneous'), +] + +# Documents to append as an appendix to all manuals. +# texinfo_appendices = [] + +# If false, no module index is generated. +# texinfo_domain_indices = True + +# How to display URL addresses: 'footnote', 'no', or 'inline'. +# texinfo_show_urls = 'footnote' + +# If true, do not generate a @detailmenu in the "Top" node's menu. +# texinfo_no_detailmenu = False diff --git a/docs/contributing.rst b/docs/contributing.rst new file mode 100644 index 0000000..e582053 --- /dev/null +++ b/docs/contributing.rst @@ -0,0 +1 @@ +.. include:: ../CONTRIBUTING.rst diff --git a/docs/demo/demo_masjed_dam.rst b/docs/demo/demo_masjed_dam.rst new file mode 100644 index 0000000..7648286 --- /dev/null +++ b/docs/demo/demo_masjed_dam.rst @@ -0,0 +1,44 @@ +.. _demo_masjed_dam: + + +Masjed Soleyman dam +------------------- + +This tutorial focuses on measuring the post-construction settlement of the `Masjed Soleyman Dam `_, a rock-fill dam on the Karun river, opened in 2002. Previous investigations using GNSS and high-resolution TerraSAR-X data, as detailed in `Emadali et al., 2017 `_, have shown post-construction settlement of the dam. TerraSAR-X data indicates that the dam undergoes a maximum deformation rate of 13 cm/year in the radar line-of-sight. + + +Dataset +^^^^^^^ + +The dataset used in this tutorial is a **Sentinel-1** stack of 100 images. The details are provided in the table below. + + ++------------------------+-------------------------------------+ +| Number of SLC images | 100 | ++------------------------+-------------------------------------+ +| Start date | 2015-01-05 | ++------------------------+-------------------------------------+ +| End date | 2018-09-04 | ++------------------------+-------------------------------------+ +| Sensor | Sentinel-1 | ++------------------------+-------------------------------------+ +| Orbit direction | Descending | ++------------------------+-------------------------------------+ +| InSAR processor | GAMMA | ++------------------------+-------------------------------------+ + + +There are two tutorials for this demo dataset: one with a comprehensive description for beginners, and one with minimal description for advanced users. + +.. toctree:: + :maxdepth: 1 + :caption: Tutorials: + + demo_masjed_dam_detailed_guide.rst + demo_masjed_dam_fast_track.rst + + +Literature +^^^^^^^^^^ + +* Emadali L, Motagh M, Haghighi, MH (2017). Characterizing post-construction settlement of the Masjed-Soleyman embankment dam, Southwest Iran, using TerraSAR-X SpotLight radar imagery. Engineering Structures 143:261-273, DOI 10.1016/j.engstruct.2017.04.009. `Link to paper. `_ diff --git a/docs/demo/demo_masjed_dam_detailed_guide.rst b/docs/demo/demo_masjed_dam_detailed_guide.rst new file mode 100644 index 0000000..14b60ad --- /dev/null +++ b/docs/demo/demo_masjed_dam_detailed_guide.rst @@ -0,0 +1,353 @@ +.. _demo_masjed_dam_detailed_guide: + +Detailed Guide for Masjed Soleyman Dam +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +This tutorial provides a comprehensive guide to SARvey processing. If you are an advanced user, you can proceed directly to the :ref:`fast track for advanced users `. + +.. note:: + + This instruction is based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly. + +Step 1: Before Running SARvey +""""""""""""""""""""""""""""" + +Step 1.1: Download the Data +""""""""""""""""""""""""""" + +Download the data by running the following commnad in the console: + +.. code-block:: bash + + wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + + +Unzip the downloaded file and change the directory. + +.. code-block:: bash + + unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018 + + +Check the downloaded data using `info.py` and `view.py`. For example: + +.. code-block:: bash + + info.py inputs/slcStack.h5 + +.. code-block:: bash + + view.py inputs/geometryRadar.h5 + + +Step 1.2: Activate SARvey and Change Directory +""""""""""""""""""""""""""""""""""""""""""""""" + +If you have not installed SARvey, refer to the `installation instructions `_. Activate the SARvey environment: + +.. code-block:: bash + + conda activate sarvey + +Ensure SARvey can be called from the console. + +.. code-block:: bash + + sarvey -h + +If you see the following command, it indicates that SARvey cannot be called. Ensure it is installed correctly and the conda environment is activated. + +.. code-block:: none + + command not found: sarvey + +Step 1.3: Create a Config File +"""""""""""""""""""""""""""""" + +Create a config file, which is a JSON file containing the parameters for `sarvey`. The config file can be created using the following command: + +.. code-block:: bash + + sarvey -f config.json 0 0 -g + +Note: The above command only generates a configuration file. Although step 0 is specified, it will not be executed. + +Step 1.4: Modify the config.json File +""""""""""""""""""""""""""""""""""""" + +1.4.1. Open the config.json file and check the parameters. The first parameters to specify in the config file are **path_inputs** and **path_outputs**. For this example dataset, the `slcStack.h5` and `geometryRadar.h5` files are in the `inputs/` directory, which is the default value in the config file. Therefore, you do not need to change it. The **path_outputs** should be `outputs/` for this example. + +.. code-block:: json + + { + "data_directories": { + "path_inputs": "inputs/", + "path_outputs": "outputs/" + } + // other parameters + } + +1.4.2. Specify the **num_cores**. You can check the number of cores on your computer using the following commands. + +In Linux, run: + +.. code-block:: bash + + nproc --all + +In MacOS, run: + +.. code-block:: bash + + sysctl -n hw.ncpu + +It is a good practice to specify a number lower than the number of available cores in the config file. + +.. code-block:: json + + { + // other parameters + "processing": { + "num_cores": 5, + // other parameters + }, + //other parameters + } + + + +Step 2: Running SARvey +"""""""""""""""""""""" + +SARvey consists of five steps as detailed in :ref:`processing`. You can run all steps by specifying starting step `0` and ending step `4`. In this tutorial, however, we will run the steps separately as follows. + +When running `sarvey`, if it finishes normally, you will see a message like the following in the command line: + +.. code-block:: none + + 2024-06-19 11:05:10,305 - INFO - MTI finished normally. + +.. note:: + If you encounter an error, first read all the prompts in the console and carefully track all error and warning messages. If the issue is not clear from the console messages, check the log files stored in the directory specified in the config file. If the error persists and you need assistance, sharing the corresponding log file will help. + + +Step 2.0: Run Step 0 of SARvey: Preparation +''''''''''''''''''''''''''''''''''''''''''' + +The first step creates an interferogram network and calculates the temporal coherence for all pixels. Run the following command: + +.. code-block:: bash + + sarvey -f config.json 0 0 + +In the command line, you will see a list of parameters used by SARvey to run step 0. All parameters that have been changed from the default are indicated: + +.. code-block:: none + + ... + 2024-06-19 11:04:28,137 - INFO - Parameter value default + 2024-06-19 11:04:28,137 - INFO - _________ _____ _______ + 2024-06-19 11:04:28,138 - INFO - num_cores 5 <--- 50 + 2024-06-19 11:04:28,138 - INFO - num_patches 1 1 + 2024-06-19 11:04:28,138 - INFO - temporal_unwrapping True True + 2024-06-19 11:04:28,138 - INFO - unwrapping_method puma puma + 2024-06-19 11:04:28,138 - INFO - + 2024-06-19 11:04:28,138 - INFO - --------------------------------------------------------------------------------- + 2024-06-19 11:04:28,138 - INFO - STEP 0: PREPARATION + 2024-06-19 11:04:28,138 - INFO - --------------------------------------------------------------------------------- + 2024-06-19 11:04:28,138 - INFO - Parameter value default + 2024-06-19 11:04:28,139 - INFO - _________ _____ _______ + 2024-06-19 11:04:28,139 - INFO - start_date None None + 2024-06-19 11:04:28,139 - INFO - stop_date None None + 2024-06-19 11:04:28,139 - INFO - network_type sb <--- delaunay + 2024-06-19 11:04:28,139 - INFO - num_ifgs 3 3 + 2024-06-19 11:04:28,139 - INFO - max_tbase 100 100 + 2024-06-19 11:04:28,139 - INFO - filter_wdw_size 9 9 + ... + +After running this step, a `sbas` directory is created. Inside this directory, you can find the following files: + +.. code-block:: none + + outputs/ + ├── temporal_coherence.h5 + ├── ifg_stack.h5 + ├── ifg_network.h5 + ├── coordinates_utm.h5 + ├── config.json + ├── background_map.h5 + └── pic/ + ├── step_0_temporal_phase_coherence.png + ├── step_0_interferogram_network.png + └── step_0_amplitude_image.png + + +Check the PNG files inside the `outputs/pic` directory and ensure the amplitude image, interferogram network, and temporal coherence look fine. If you are not satisfied with the interferogram network, you can modify the corresponding parameters in the `config.json` file and run step 0 again. + +Use the following command to plot the interferograms: + +.. code-block:: bash + + sarvey_plot outputs/ifg_stack.h5 -i + +This command creates the interferograms as PNG files in the following directory: + +.. code-block:: none + + outputs/ + └── pic/ + └── ifgs/ + ├── 0_ifg.png + ├── 1_ifg.png + └── ... + +Check the interferograms one by one and ensure they look reasonable. In various interferograms, there are fringes associated with deformation approximately at ranges 100-200, azimuth 40-60. + + +Step 2.1: Run Step 1 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 1 1 + +Outputs of this step are: + +.. code-block:: none + + outputs/ + ├── point_network.h5 + ├── p1_ifg_wr.h5 + ├── point_network_parameter.h5 + └── pic/ + ├── selected_pixels_temp_coh_0.8.png + ├── step_1_mask_p1.png + ├── step_1_arc_coherence.png + ├── step_1_arc_coherence_reduced.png + ├── step_1_rmse_vel_0th_iter.png + └── step_1_rmse_dem_error_0th_iter.png + + +Step 2.2: Run Step 2 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 2 2 + + +Outputs of this step are: + +.. code-block:: none + + outputs/ + ├── p1_ifg_unw.h5 + ├── p1_ts.h5 + └── pic/ + ├── step_2_estimation_dem_error.png + └── step_2_estimation_velocity.png + +Step 2.3: Run Step 3 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 3 3 + + +Outputs of this step are: + +.. code-block:: none + + outputs/ + ├── coh80_ifg_wr.h5 + ├── coh80_aps.h5 + ├── p1_aps.h5 + ├── p1_ts_filt.h5 + └── pic/ + ├── step_3_temporal_autocorrelation.png + ├── step_3_stable_points.png + ├── selected_pixels_temp_coh_0.8.png + └── step_3_mask_coh80.png + + +Step 2.4: Run Step 4 of SARvey +'''''''''''''''''''''''''''''' + +.. code-block:: bash + + sarvey -f config.json 4 4 + +.. outputs directory structure to be added + + +The results of step 4 of SARvey, including the time series, are stored in the `coh80_ts.h5` file. The file is named based on the `coherence_p2` parameter in the config.json file. + + +Step 3: Plot Time Series Results +"""""""""""""""""""""""""""""""" + +Check the instruction on how to use the `sarvey_plot`. + +.. code-block:: bash + + sarvey_plot -h + + +Plot the time series using the following command. Flag `-t` indicates that you want to plot the time series. + +.. code-block:: bash + + sarvey_plot outputs/coh80_ts.h5 -t + + +You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series. As you will see in the plot, the density of measurement points on the dam is relatively low. In the next section, you will learn how to modify the config file to increase the density of points. + + +Step 4: Modify Config File and Rerun SARvey +""""""""""""""""""""""""""""""""""""""""""" + +Modify the config.json file and change **coherence_p2** from 0.8 to 0.7. + +Run steps 3 and 4 using the following command: + +.. code-block:: bash + + sarvey -f config.json 3 4 + + +A new file `coh70_ts.h5` is created. You can now visualize this file that has a higher point density. + +.. code-block:: bash + + sarvey_plot outputs/coh70_ts.h5 -t + + +.. note:: + Be cautious that reducing the value of **coherence_p2** too much may include noisy points of low quality in the analysis, potentially leading to poor final results. + + You should carefully read the :ref:`processing` documentation to understand the meaning of each parameter and carefully choose reasonable values. You should also check the details of all parameters using the -p flag in `sarvey` and decide how to tune them. + +.. code-block:: bash + + sarvey -f config.json 0 0 -p + + +Step 5: Export to GIS Format +"""""""""""""""""""""""""""" + +Export the data to Shapefiles using the following command: + +.. code-block:: bash + + sarvey_export outputs/coh70_ts.h5 -o outputs/shp/coh70_ts.shp + +You can open the exported data in any GIS software. If you use QGIS, you can use the `PS Time Series Viewer `_ plugin to draw the time series. + + +Step 6: Validate Your Results +""""""""""""""""""""""""""""" + +You can download a copy of the final SARvey products from `this link `_. Use these files to compare your results and ensure everything worked correctly. + diff --git a/docs/demo/demo_masjed_dam_fast_track.rst b/docs/demo/demo_masjed_dam_fast_track.rst new file mode 100644 index 0000000..372a079 --- /dev/null +++ b/docs/demo/demo_masjed_dam_fast_track.rst @@ -0,0 +1,92 @@ +.. _demo_masjed_dam_fast_track: + +Fast Track Guide for Masjed Soleyman Dam +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +If you are an advanced user, you can proceed with this fast track tutorial. If you prefer a more detailed, step-by-step guide, please refer to the :ref:`detailed guide ` for this example. + +.. note:: + + These instructions are based on SARvey version 1.0.0 (Strawberry Pie). Newer versions may differ slightly. + + +Download the Data +""""""""""""""""" + +In this tutorial, a processed stack of data is provided. If you wish to generate data for other areas, please refer to the :ref:`preparation` section. + +.. code-block:: bash + + wget https://zenodo.org/records/12189041/files/SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + unzip SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018.zip + cd SARvey_input_data_Masjed_Soleyman_dam_S1_dsc_2015_2018 + + +Activate SARvey environment +""""""""""""""""""""""""""" + +.. code-block:: bash + + conda activate sarvey + + +Create a Config File +"""""""""""""""""""" + +.. code-block:: bash + + sarvey -f config.json 0 0 -g + +Specify parameters in the config file. Set a reasonable value for **num_cores**. + +Run **SARvey** +"""""""""""""" + +You can run each step individually or a range of steps by specifying the first and last step. + +.. code-block:: bash + + sarvey -f config.json 0 4 + +Check Outputs +""""""""""""" + +First, check the output snapshots in the `outputs/pics` directory. You can also use **`sarvey_plot`** to plot various products to assess the quality of the results and decide how to adjust parameters. Modify the parameters in the config file and rerun the corresponding steps of `sarvey` to improve the results. For instance, changing **`coherence_p2`** from 0.8 to 0.7 and rerunning steps 3 and 4 can increase the density of the final set of points. However, be cautious that reducing the value too much may include noisy points of low quality in the analysis, potentially leading to poor final results. You can check the details of all parameters using the -p flag in `sarvey` and decide how to tune them. For more explanations, please refer to :ref:`processing` + + + +Plot Time Series Results +"""""""""""""""""""""""" + +The final products, including the time series, are stored in the coh\*\*_ts.h5 file. The file is named based on the coherence_p2 parameter you used. Plot the time series using the following command: + +.. code-block:: bash + + sarvey_plot outputs/coh80_ts.h5 -t + +You can visualize velocity and DEM error estimation of second-order points. You can also visualize amplitude, DEM, or temporal coherence as the background. Right-click on any point to see its time series. + +.. description of time series options to be added. + + + + +Export to GIS Format +"""""""""""""""""""" + +Export the data to Shapefiles using the following command. + + +.. code-block:: bash + + sarvey_export outputs/coh80_ts.h5 -o outputs/shp/coh80_ts.shp + +You can visualize the data in any GIS software. If you use QGIS, you can use the `PS Time Series Viewer `_ plugin to draw the time series. + + + +Validate Your Results +""""""""""""""""""""" + +You can download a copy of the final SARvey products from `this link `_. Use these files to compare your results and ensure everything worked correctly. + diff --git a/docs/demo_datasets.rst b/docs/demo_datasets.rst new file mode 100644 index 0000000..4affd08 --- /dev/null +++ b/docs/demo_datasets.rst @@ -0,0 +1,17 @@ +.. _example_datasets: + +============= +Demo Datasets +============= + +Several demo datasets are available to help you learn how to perform SARvey processing effectively. + +.. note:: + The demo datasets and instructions provided serve as a practical guide for using SARvey. They do not cover all the software details or offer the best processing strategies for every specific dataset. + +.. toctree:: + :maxdepth: 1 + :caption: Demo Datasets: + + demo/demo_masjed_dam.rst + diff --git a/docs/history.rst b/docs/history.rst new file mode 100644 index 0000000..2506499 --- /dev/null +++ b/docs/history.rst @@ -0,0 +1 @@ +.. include:: ../HISTORY.rst diff --git a/docs/index.rst b/docs/index.rst new file mode 100644 index 0000000..afb0dbc --- /dev/null +++ b/docs/index.rst @@ -0,0 +1,25 @@ +==================== +SARvey documentation +==================== + +.. toctree:: + :maxdepth: 2 + :caption: Contents: + + readme + Source code repository + installation + usage + preparation + processing + demo_datasets + modules + contributing + authors + history + +Indices and tables +================== +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` diff --git a/docs/installation.rst b/docs/installation.rst new file mode 100644 index 0000000..3c575b5 --- /dev/null +++ b/docs/installation.rst @@ -0,0 +1,186 @@ +.. _installation: + +============ +Installation +============ + +SARvey is a cross-platform python-based software and can be installed on + * `Linux`_ + * `MacOS ARM (Apple Silicon M2)`_ + * `Windows using WSL`_ + + +Linux +----- + +On Linux, SARvey can be installed `Using Mamba (recommended)`_ or `Using Anaconda or Miniconda`_. + +Using Mamba (recommended) +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Using mamba_ (latest version recommended), **SARvey** is installed as follows: + + +1. Clone the SARvey source code and install SARvey and all dependencies from the environment_sarvey.yml file: + + .. code-block:: bash + + git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git + cd timeseries + + +2. Create virtual environment for **SARvey** (optional but recommended): + + .. code-block:: bash + + pip install conda-merge + wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml + conda-merge conda-env.yml tests/CI_docker/context/environment_sarvey.yml > env.yml + mamba env create -n sarvey -f env.yml + rm env.yml conda-env.yml + mamba activate sarvey + pip install git+https://github.com/insarlab/MiaplPy.git + pip install . + + +This is the preferred method to install **SARvey**, as it always installs the most recent stable release and +automatically resolves all the dependencies. + + +Using Anaconda or Miniconda +^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Using conda_ (latest version recommended), **SARvey** is installed as follows: + + +1. Then clone the **SARvey** source code and install **SARvey** and all dependencies from the environment_sarvey.yml file: + + .. code-block:: bash + + git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git + cd timeseries + + +1. Create virtual environment for **SARvey** (optional but recommended): + + .. code-block:: bash + + pip install conda-merge + wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml + conda-merge conda-env.yml tests/CI_docker/context/environment_sarvey.yml > env.yml + conda env create -n sarvey -f env.yml + rm env.yml conda-env.yml + conda activate sarvey + pip install git+https://github.com/insarlab/MiaplPy.git + pip install . + + +MacOS ARM (Apple Silicon M2) +---------------------------- + +This guide provides instructions for installing SARvey on MacOS ARM M2 using conda_. +If you do not have Conda, install `Conda for Mac`_. +Using conda_ (latest version recommended), SARvey is installed as follows: + +0. **Create a directory for the SARvey package and navigate to it in the terminal. You can choose any other directory if you prefer.** + + .. code-block:: bash + + mkdir -p ~/software/sarvey + +1. **Install MiaplPy before installing SARvey in the same environment where you want to install SARvey.** + + .. code-block:: bash + + cd ~/software/sarvey + git clone https://github.com/insarlab/MiaplPy.git + cd MiaplPy + + 1.1 Open `conda-env.yml` in an editor of your choice and comment out the line `isce2`. Alternatively, you can run the following command:. + + .. code-block:: bash + + sed -i '' '/isce2/s/^/# /' conda-env.yml + + 1.2 Install the package using Conda. + + .. code-block:: bash + + conda env update --name sarvey --file conda-env.yml + conda activate sarvey + python -m pip install . + +2. **Install SARvey** + + 2.1 Download the source code of the SARvey package. + + .. code-block:: bash + + cd ~/software/sarvey + git clone git@gitlab.projekt.uni-hannover.de:ipi-sar4infra/timeseries.git + cd timeseries + + 2.2 Open `tests/CI_docker/context/environment_sarvey.yml` in an editor of your choice and comment out the lines `isce2` and `gcc_linux-64`. Alternatively, you can run the following commands. + + .. code-block:: bash + + sed -i '' '/isce2/s/^/# /' tests/CI_docker/context/environment_sarvey.yml + sed -i '' '/gcc_linux-64/s/^/# /' tests/CI_docker/context/environment_sarvey.yml + + Note: As of the time of creation of this document, `isce2` for MacOS ARM64 is not available in Conda repositories. Therefore, it is skipped, but it should not cause any problems for running SARvey. Also, `gcc_linux-64` is not required on ARM64. + + 2.3 Install Timeseries using the same environment that you used to install MiaplPy. + + .. code-block:: bash + + conda env update --name sarvey -f tests/CI_docker/context/environment_sarvey.yml + conda activate sarvey + pip install . + +3. **Set up the PATH for MiaplPy and SARvey.** + + 3.1 Run the following commands to set up the path in `~/source_sarvey.sh`. + + .. code-block:: bash + + echo 'export miaplpy_path=~/software/sarvey/MiaplPy/src/' > ~/source_sarvey.sh + echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}$miaplpy_path' >> ~/source_sarvey.sh + echo 'export timeseries_path=~/software/sarvey/timeseries' >> ~/source_sarvey.sh + echo 'export PATH=${PATH}:$timeseries_path:$timeseries_path/sarvey' >> ~/source_sarvey.sh + echo 'export PYTHONPATH=${PYTHONPATH:+$PYTHONPATH:}:$timeseries_path' >> ~/source_sarvey.sh + +4. **Test the installation** + + 4.1. Open a new terminal and activate the software. + + .. code-block:: bash + + conda activate sarvey + source ~/source_sarvey.sh + + 4.2. Run the following commands. If the help messages of SARvey and MiaplPy are shown, the installation is correctly done. + + .. code-block:: bash + + sarvey -h + + +Windows using WSL +----------------- + +On Windows, SARvey is tested on Windows Subsystem for Linux (WSL_) version 2. Please follow the `Linux`_ installation. + + + +.. note:: + + Timeseries has been tested with Python 3.6+., i.e., should be fully compatible to all Python versions from 3.6 onwards. + + +.. _pip: https://pip.pypa.io +.. _Python installation guide: http://docs.python-guide.org/en/latest/starting/installation/ +.. _conda: https://conda.io/docs +.. _mamba: https://github.com/mamba-org/mamba +.. _Conda for Mac: https://docs.conda.io/projects/conda/en/latest/user-guide/install/macos.html +.. _WSL: https://learn.microsoft.com/en-us/windows/wsl/ + diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..91c05d4 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,36 @@ +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=python -msphinx +) +set SOURCEDIR=. +set BUILDDIR=_build +set SPHINXPROJ=sarvey + +if "%1" == "" goto help + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The Sphinx module was not found. Make sure you have Sphinx installed, + echo.then set the SPHINXBUILD environment variable to point to the full + echo.path of the 'sphinx-build' executable. Alternatively you may add the + echo.Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% + +:end +popd diff --git a/docs/preparation.rst b/docs/preparation.rst new file mode 100644 index 0000000..141a38d --- /dev/null +++ b/docs/preparation.rst @@ -0,0 +1,188 @@ +.. _preparation: + +=========== +Preparation +=========== + +SARvey requires a coregistered stack of SLC and the related geometry information in the MiaplPy_ data format. +The coregistered stack of SLC can be created using an InSAR processor, such as ISCE, GAMMA, or SNAP. +Currently MiaplPy only supports ISCE_. Support for GAMMA and SNAP_ is planned for future. +After creating the coregistered stack of SLC, run the “load_data” step from MiaplPy to create the “inputs” directory which contains “slcStack.h5” and “geometryRadar.h5”. + + + +Preprocessing +------------- + +ISCE +^^^^ +... ISCE brief processing to be added + +The ISCE products should have the following directory structure that is later in `Loading Data into MiaplPy`_ step. + +:: + + ISCE_processed_data + ├─ reference + │ ├─ IW*.xml + │ └─ ... + ├─ merged + │ ├─ SLC + │ │ ├─ YYYYMMDD + │ │ │ ├─ YYYYMMDD.slc.full + │ │ │ └─ ... + │ │ ├─ YYYYMMDD + │ │ ├─ YYYYMMDD + │ ├─ geom_reference + │ │ ├─ hgt.rdr.full + │ │ ├─ lat.rdr.full + │ │ ├─ lon.rdr.full + │ │ ├─ los.rdr.full + │ │ └─ ... + └─ baselines + └─ YYYYMMDD_YYYYMMDD + └─ YYYYMMDD_YYYYMMDD.txt + + +GAMMA +^^^^^ +Support is in progress. + + +SNAP +^^^^ +Support is planned for future. + + +Loading Data to MiaplPy Format +------------------------------ + +**Loading Data into MiaplPy** +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +Run the `load_data` step of MiaplPy to convert the preprocessed stack of SLC to `slcStack.h5` and `geometryRadar.h5`. +Refer to MiaplPy_ instruction on how to prepare the stack of coregistered SLC and modify the template file. + +.. code-block:: bash + + miaplpyApp miaplpy_template_file.txt --dostep load_data + +The output includes the following directory structure that is later used as input in SARvey processing: + +:: + + inputs + ├── slcStack.h5 + └── geometryRadar.h5 + + + +**Check the data** +^^^^^^^^^^^^^^^^^^ + +Use `info.py` from MintPy_ to check the files' information. + +.. code-block:: bash + + info.py inputs/slcStack.h5 + info.py inputs/geometryRadar.h5 + + +Use `view.py` from MintPy_ to visualize the files and make sure they look fine. + +.. code-block:: bash + + view.py inputs/slcStack.h5 + view.py inputs/geometryRadar.h5 + + + +Optional Steps +-------------- + + +**Phase Linking** +^^^^^^^^^^^^^^^^^ + + +This step is optional. You can run it if you wish to perform distributed scatterers (DS) analysis. +**Caution:** This step is computationally heavy and might be time-consuming for large datasets. + +.. code-block:: bash + + miaplpyApp miaplpy_template_file.txt --dostep phase_linking + miaplpyApp miaplpy_template_file.txt --dostep concatenate_patches + +The output includes the following directory structure that is later used as additional input in SARvey processing if the config file is modified to inclued DS analysis. + +:: + + MiaplPy working directory + ├─ inverted + │ ├── phase_series.h5 + │ ├── ... + ├── maskPS.h5 + └── ... + + + +Subset Data +^^^^^^^^^^^ + +Data loaded into MiaplPy can be subset using Mintpy_'s subset function. +This is particularly useful if you have a dataset in MiaplPy format and want to crop a small area of it. +Both slcStack.h5 and geometryRadar.h5 should be subset with the same range and azimuth coordinate ranges. +Also the Phase Linking results (phase_series.h5 and maskPS.h5) should be subset if it has been created. +Please refer to Mintpy_ for more instruction to subset. +Run `subset.py -h` for information about parameters. +The following example crops the data between 500 and 800 in range and 100 and 1000 in azimuth coordinates. + + +.. code-block:: bash + + subset.py -h + + subset.py inputs/slcStack.h5 -x 500 800 -y 100 1000 -o inputs_crop/slcStack.h5 + subset.py inputs/geometryRadar.h5 -x 500 800 -y 100 1000 -o inputs_crop/geometryRadar.h5 + + subset.py inverted/phase_series.h5 -x 500 800 -y 100 1000 -o inverted_crop/phase_series.h5 + subset.py maskPS.h5 -x 500 800 -y 100 1000 -o inverted_crop/maskPS.h5 + + +`Check the data`_ after subsetting it and make sure all products look correct. + + + +Create Manual Mask +^^^^^^^^^^^^^^^^^^ +A mask can be created manually using MintPy's `generate_mask.py` tool. +This is particularly useful if you want to limit the MTInSAR processing to certain areas. +Run `generate_mask.py -h` for information about parameters. +The following example allows to draw a polygon on top of the DEM to create a mask. + +.. code-block:: bash + + generate_mask.py -h + + generate_mask.py inputs/geometryRadar.h5 height -o mask.h5 --roipoly # draw polygon on top of the DEM + +Alternatively, a mask can be drawn on top of the temporal coherence map, in case step 0 (preparation) of `sarvey` has been executed already. + +.. code-block:: bash + + generate_mask.py results_dir/temporal_coherence.h5 -o mask.h5 --roipoly # draw polygon on top of the temporal coherence image + +Follow the instructions in the terminal: + + Select points in the figure by enclosing them within a polygon. + Press the 'esc' key to start a new polygon. + Try hold to left key to move a single vertex. + After complete the selection, close the figure/window to continue. + + + +.. _MiaplPy: https://github.com/insarlab/MiaplPy +.. _ISCE: https://github.com/isce-framework/isce2 +.. _SNAP: https://step.esa.int/main/toolboxes/snap +.. _MintPy: https://github.com/insarlab/MintPy + diff --git a/docs/processing.rst b/docs/processing.rst new file mode 100644 index 0000000..a2d8a2c --- /dev/null +++ b/docs/processing.rst @@ -0,0 +1,355 @@ +.. _processing: + +======================================= +Multitemporal InSAR processing workflow +======================================= + +The `sarvey` command line interface executes the multitemporal InSAR processing workflow. +The workflow is described in the paper + + Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review). + +All processing steps are described in detail in the following sections. +Two processing strategies are provided with either one- or two-step unwrapping. +The workflow should be decided based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). +The parameters of each step are handled via the `configuration file`_ for which the parameters are named within the description of each step. + + +Configuration file +------------------ +The configuration file is a JSON file containing all the parameters required to run `sarvey`. +This file can be generated using the `sarvey` command with the **"-g"** flag, where you can specify your desired filename. + + +.. code-block:: bash + + sarvey -f config.json 0 0 -g + +Note: The above command only generates a configuration file. Although step 0 is specified, it will not be executed. + +The configuration file has various sections, as detailed below: + + +* data_directories + + + This section specifies the paths to the input and output data. The paths can be either absolute or relative. + + +* logging + + + This section defines the logging level displayed in the command line and the directory path where log files will be stored. + + +* processing + + + This section includes top-level parameters such as the number of cores and the unwrapping method. + + +* phase_linking + + + This section specifies the Phase Linking parameters. By default, `"phase_linking": false`. + If you wish to perform DS analysis, change it to `true`. Note: If `"phase_linking": true`, you must complete the corresponding step of MiaplPy as described in `preparation `_. In the configuration file, set `path_inverted` to the path of the inverted directory of MiaplPy data. + + + +* preparation + + + This section includes network parameters, such as the start and end dates, network type, and `filter_wdw_size`, which specifies the window size used to estimate the temporal coherence for each pixel. + + +* consistency_check + + + This section contains parameters related to the first order points. + +* unwrapping + + + This section will specify parameters related to the unwrapping process. + +* filtering + + + This section defines the parameters for atmospheric estimation and filtering. Atmospheric filtering is enabled by default. To skip it, set `"skip_filtering": true`. + + +* densification + + + This section includes the settings for second order points. + + + + + +Processing steps for two-step unwrapping workflow +------------------------------------------------- + +Step 0: Preparation +^^^^^^^^^^^^^^^^^^^ + +- Loading the resampled SLC data: + The resampled SLC (Single Look Complex) data is read from the inputs/slcStack.h5 + This data is complex-valued and contains both amplitude and phase information. + The data is subsetted to the specified time span (via **preparation:start_date** and **preparation:stop_date** in the config file). + A description of how to prepare the data and make a spatial subset of the data is described in `data preparation in MiaplPy `_. + +- Designing the interferogram network: + From the stack of SLC images, the interferogram network is designed. + The network of interferograms is designed based on the temporal and perpendicular baselines of the SLC images. + Different networks can be created (via **preparation:network_type** in the config file) and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). + Currently five types of networks are supported: + + a) small baseline network ('sb') (Berardino et al. 2002), + b) small temporal baseline network ('stb') (only consecutive images are used to form interferograms) + c) small temporal baselines + yearly interferograms ('stb_yearly') + d) delaunay network ('delaunay') + e) star network ('star', single-reference network) (Ferretti et al. 2001) + + +- Generating a stack of interferograms: + The stack of interferograms is generated based on the specified interferogram network. + +- Estimating the temporal coherence: + The phase noise of each pixel is approximated by the estimation of the temporal phase coherence (Zhao and Mallorqui 2019). + Thereby, a low-pass filter with a certain window size is used (**preparation:filter_wdw_size**). + The temporal coherence is used to select the first- and second-order points in the later steps (**consistency_check:coherence_p1** and **filtering:coherence_p2**). + +- Output of this step + - background_map.h5 + - ifg_stack.h5 + - coordinates_utm.h5 + - ifg_network.h5 + - temporal_coherence.h5 + + +Step 1: Consistency Check +^^^^^^^^^^^^^^^^^^^^^^^^^ + + +- Selecting candidates for first order points: + Candidates for the first-order points are selected based on the temporal coherence threshold (**consistency_check:coherence_p1**). + However, not all points with a coherence above the threshold are selected, but only those which have the highest coherence within a grid cell of size **consistency_check:grid_size** (in [m]). + A mask file can be specified (**consistency_check:spatial_mask_file_p1**) to limit the first-order points to the given area of interest. + +- Creating a spatial network: + After selecting the candidates for first order points, the method creates a spatial network to connect the first-order points. + For each arc in the network, the double difference phase time series is calculated. + A delaunay network ensures the connectivity in the spatial network and k-nearest neighbors (**consistency_check:knn**) can be used to increase the redundancy in the network. + Arcs with a distance above a threshold (**consistency_check:max_arc_length**) are removed from the network to reduce the impact of the atmospheric effects. + +- Temporal unwrapping: + All arcs in the spatial network are temporally unwrapped based on a phase model consisting of DEM error difference and velocity difference between the two points of the arc. + The temporal coherence derived from the model fit is maximized by searching within a search space of given bounds (**consistency_check:velocity_bound** and **consistency_check:dem_error_bound**). + Within the bounds, the search space is discretized (**consistency_check:num_samples**). + The final parameters for each arc are derived from a gradient descent refinement of the discrete search space result. + +- Performing a consistency check on the data: + During the atmospheric filtering in step 3, only high quality first-order points are supposed to be used. + Therefore, outliers among the candidates are removed with a consistency check. + The consistency check is based on the estimated temporal coherence of the temporal unwrapping of each arc. + A point is assumed to be an outlier, if it is connected by many arcs having a low temporal coherence from temporal unwrapping. + Arcs with a temporal coherence below a threshold are removed (**consistency_check:arc_coherence**). + Similarly, points with mean coherence of all connected arcs are removed (specified by the same parameter **consistency_check:arc_coherence**). + Moreover, points which are connected by a number of arcs less than a threshold (**consistency_check:min_num_arc**) are removed. + Afterwards, the consistency within the spatial network is checked. + For this purpose, the parameters (DEM error difference and velocity difference) of all arcs are integrated in the spatial network relative to an arbitrary reference point using least squares. + The residuals of the integration are used to identify outliers. + +- Output of this step + - point_network.h5 + - point_network_parameter.h5 + - p1_ifg_wr.h5 + +Step 2: Unwrapping +^^^^^^^^^^^^^^^^^^ + +Two unwrapping options (**processing:temporal_unwrapping**, also applies to step 4) are implemented and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). + +- Output of this step + - p1_ifg_unw.h5 + - p1_ifg_ts.h5 + +Option 1) Unwrapping in time and space +"""""""""""""""""""""""""""""""""""""" + +- Integrating parameters from arcs to points: + The temporal unwrapping results of the spatial network from consistency check in step 1 are used in this step. + The parameters of the arcs are integrated relative to an arbitrary reference point from the arcs to the points using least squares. + +- Removing phase contributions (mean velocity and DEM error): + After integrating the parameters, the phase contributions are removed from the wrapped interferometric phase of the first-order points. + +- Spatial unwrapping of the residuals: + The residuals in each interferogram are unwrapped in space using a sparse point network unwrapping method (**processing:unwrapping_method**) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). + The spatial neighbourhood for unwrapping is defined by the arcs of the spatial network. + There are two options (**unwrapping:use_temporal_unwrapping_arcs**). + Either the spatial network from consistency check (step 2) can be used for unwrapping, i.e. the spatial network after removing arcs with a low temporal coherence from temporal unwrapping. + Or, the spatial network is re-created with a delaunay network and k-nearest neighbors (**unwrapping:knn**). + +- Restore phase contributions to the spatially unwrapped residual phase: + Finally, the phase contributions are added back to the spatially unwrapped residual phase of each point. + +- Adjust reference: + All restored unwrapped interferograms are referenced to the peak of velocity histogram derived from all points. + +- Inverting the interferogram network: + The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition. + +Option 2) Unwrapping in space +""""""""""""""""""""""""""""" + +- Spatial unwrapping: + The interferograms are unwrapped independently in space with a sparse point network unwrapping method (**processing:unwrapping_method**) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). + The spatial neighbourhood for unwrapping is defined by the arcs of the spatial network. + There are two options (**unwrapping:use_temporal_unwrapping_arcs**). + Either the spatial network from consistency check (step 2) can be used for unwrapping, i.e. the spatial network after removing arcs with a low temporal coherence from temporal unwrapping. + Or, the spatial network is re-created with a delaunay network and k-nearest neighbors (**unwrapping:knn**). + +- Adjust reference: + All unwrapped interferograms are referenced to the peak of velocity histogram derived from all points. + +- Inverting the interferogram network: + The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition. + +Step 3: Filtering +^^^^^^^^^^^^^^^^^ + +In this step, the atmospheric phase screen (APS) is estimated from the displacement time series of the first-order points. +Afterwards, the APS is interpolated to the location of the second-order points. +The filtering can be skipped by setting **filtering:skip_filtering** to True. +However, the step 3 has to be executed as the second-order points are selected during this step. + +- Selecting pixels with no or linear displacement: + Among the first-order points, the points with no or merely linear displacement are selected (**filtering:use_moving_points**). + It is assumed that for these points, the phase consists only of atmospheric effect and noise after removing the mean velocity and DEM error. + Points with a non-linear displacement behaviour are removed by a threshold on the temporal autocorrelation of the displacement time series (**filtering:max_auto_corr**) (Crosetto et al. 2018). + A regular grid (**filtering:grid_size** in [m]) is applied to select the first-order points with the lowest temporal autocorrelation to reduce the computational complexity during filtering. + +- Selecting second-order points: + Second-order points are selected based on a temporal coherence threshold (**filtering:coherence_p2**) on the temporal phase coherence computed during step 0. + A mask file can be specified (**filtering:spatial_mask_file_p2**) to limit the second-order points to the given area of interest. + Second-order points can also be selected based on the results of phase-linking (set **phase_linking:phase_linking** to True) implemented in MiaplPy (Mirzaee et al. 2023). + More information on Miaplpy and phase-linking can be found `here `_. + The number of siblings (**phase_linking:num_siblings**) used during phase-linking within MiaplPy processing needs to be specified to identify the distributed scatterers (DS) among the pixels selected by MiaplPy. + A mask file can be specified (**phase_linking:spatial_mask_file_pl**) to limit the phase-linking to the given area of interest. + MiaplPy also provides a selection of persistent scatterers (PS) which can be included as second-order points (set **phase_linking:use_ps** to True). + In case the second-order points are selected among the results from MiaplPy, the filtered interferometric phase (MiaplPy result) is used for the respective points. + The DS pixels from MiaplPy and the pixels selected with the temporal phase coherence from step 0 are both selected with the same coherence threshold (**filtering:coherence_p2**). + +- Estimating the atmospheric phase screen (APS): + The estimation of the APS takes place in time-domain and not interferogram-domain to reduce the computational time. + The phase contributions are removed from the first-order points which were selected for atmospheric filtering. + Their residual time series contains atmospheric phase contributions and noise. + As the APS is assumed to be spatially correlated, the residuals of all points are spatially filtered (**filtering:interpolation_method**) independently for each time step. + After filtering, the estimated APS is interpolated to the location of the second-order points. + +- Output of this step + - p1_ts_filt.h5 + - p1_aps.h5 + - cohXX_aps.h5 + - cohXX_ifg_wr.h5 + +The placeholder XX depends on the threshold for the temporal coherence used for selecting the second-order points. +For example, a threshold of 0.8 would result in coh80_aps.h5 and coh80_ifg_wr.h5. + +Step 4: Densification +^^^^^^^^^^^^^^^^^^^^^ + +Two unwrapping options (**processing:temporal_unwrapping**, also applies to step 2) are implemented and should be chosen based on the characteristics of the displacement (spatial extend, magnitude, temporal behaviour). + +- Output of this step + - cohXX_ifg_unw.h5 + - cohXX_ts.h5 + +The placeholder XX depends on the threshold for the temporal coherence used for selecting the second-order points during filtering in step 3. +For example, a threshold of 0.8 would result in coh80_ifg_unw.h5 and coh80_ts.h5. + +Option 1: Unwrapping in time and space +"""""""""""""""""""""""""""""""""""""" + +- Removing APS from interferograms + The wrapped interferograms are corrected for the interpolated APS for both the first and second order points. + +- Densify network: + The parameters (DEM error and velocity) of each second-order point are estimated independently from the other second-order points. + The parameters are estimated by temporal unwrapping with respect to the closest first-order points (**densification:num_connections_p1**, **densification:max_distance_p1**) with a phase model consisting of DEM error and velocity (**densification:velocity_bound** and **densification:dem_error_bound**, **densification:num_samples**). + The densification is similar to the approach described by Van Leijen (2014), but jointly maximizes the temporal coherence to find the parameters that fit best to all arcs connecting the second-order point to the first-order points. + The estimated parameters are validated by estimating the fit for the arcs connecting the second-order point to the closest second-order points (**densification:num_connections_p2**). + However, this validation is only visualized in the pic/ directory, but has no impact on the final results. + +- Remove outliers: + Second-order points which could not be temporally unwrapped with respect to the closest first-order points are removed. + For this purpose, a threshold on the joint temporal coherence considering the residuals of all arcs connecting the respective second-order point to the closest first-order points is applied (**densification:coherence_threshold**). + First-order points receive a joint temporal coherence value of 1.0 to avoid them being removed from the final set of points. + +- Removing phase contributions (mean velocity and DEM error): + After estimating the parameters of the second-order points, the phase contributions are removed from the wrapped interferometric phase of the first-order points. + +- Spatial unwrapping of the residuals: + The residuals in each interferogram are unwrapped in space using a sparse point network unwrapping method (**processing:unwrapping_method**) (Bioucas-Dias and Valadao 2007, Boykov and Kolmogorov 2004). + The spatial neighbourhood for unwrapping is defined by spatial network including both first- and second-order points. + It is created with a delaunay network and k-nearest neighbors (**unwrapping:knn**). + +- Restore phase contributions to the spatially unwrapped residual phase: + Finally, the phase contributions are added back to the spatially unwrapped residual phase of each point. + +- Adjust reference: + All restored unwrapped interferograms are referenced to the peak of velocity histogram derived from all points. + +- Inverting the interferogram network: + The interferogram network is inverted for each point to retrieve the displacement time series relative to the first acquisition. + +Option 2: Unwrapping in space +""""""""""""""""""""""""""""" + +- Removing APS from interferograms + The wrapped interferograms are corrected for the interpolated APS for both the first and second order points. + +Afterwards, the processing is the same as in the spatial unwrapping during step 2. + + +Handling big datasets +--------------------- +The processing of large datasets can be computationally expensive and time-consuming. +Especially the estimation of the temporal phase coherence in step 0 is a bottleneck, also in terms of memory consumption. +Therefore, it is recommended to set **processing:num_cores** for parallel processing. +By setting **processing:num_patches** the data is split into spatial patches and processed subsequently to fit into memory. + + +Processing steps for one-step unwrapping workflow +------------------------------------------------- +The one-step unwrapping workflow is an alternative to the two-step unwrapping workflow. +The steps are similar to the workflow described above, but is only executed until step 2. +This workflow is meant for processing small areas where the atmospheric filtering is not required as the reference point will be selected close to the area of interest. +The idea behind the one-step unwrapping workflow is to apply the consistency check based on the temporal unwrapping (step 1) to all pixels, without differentiating between first and second order points. +This can yield better unwrapping results compared to the two-step unwrapping in case DEM error and/or velocity highly vary in space. +For this purpose, the pixels are selected without gridding (set **preparation:grid_size** to Zero, i.e. all pixels above the specified coherence threshold are selected as final points. +Since the densification step is not performed, you should reduce the coherence threshold (**consistency_check:coherence_p1**) to select the desired number of points. + + +Literature +---------- + +* Piter, A., Haghshenas Haghighi, M., Motagh, M.(2024). An in-depth study on Sentinel-1 InSAR for transport infrastructure monitoring. PFG - Journal of Photogrammetry, Remote Sensing and Geoinformation Science. (paper currently under review). + +* Zhao F, Mallorqui JJ (2019). A Temporal Phase Coherence Estimation Algorithm and Its Application on DInSAR Pixel Selection. IEEE Transactions on Geoscience and Remote Sensing 57(11):8350–8361, DOI 10.1109/TGRS.2019.2920536 + +* Ferretti A, Prati C, Rocca F (2001). Permanent scatterers in SAR interferometry. IEEE Transactions on Geoscience and Remote Sensing 39(1):8–20 + +* Berardino P, Fornaro G, Lanari R, Sansosti E (2002). A new algorithm for surface deformation monitoring based on small baseline differential SAR interferograms. IEEE Transactions on Geoscience and Remote Sensing 40(11):2375–2383 + +* Bioucas-Dias JM, Valadao G (2007). Phase Unwrapping via Graph Cuts. IEEE Transactions on Image Processing 16(3):698–709, DOI 10.1109/TIP.2006.888351 + +* Mirzaee S, Amelung F, Fattahi H (2023). Non-linear phase linking using joined distributed and persistent scatterers. Computers & Geosciences 171:105291, DOI 10.1016/j.cageo.2022.105291 + +* Crosetto M, Devanthéry N, Monserrat O, Barra A, Cuevas-González M, Mróz M, Botey-Bassols J, Vázquez-Suné E, Crippa B (2018). A persistent scatterer interferometry procedure based on stable areas to filter the atmospheric component. Remote Sensing 10(11):1780 + +* Van Leijen FJ (2014). Persistent scatterer interferometry based on geodetic estimation theory. PhD thesis + +* Boykov Y, Kolmogorov V (2004) An experimental comparison of min-cut/max- flow algorithms for energy minimization in vision. IEEE Transactions on Pattern Analysis and Machine Intelligence 26(9):1124–1137, DOI 10.1109/TPAMI.2004.60 diff --git a/docs/readme.rst b/docs/readme.rst new file mode 100644 index 0000000..72a3355 --- /dev/null +++ b/docs/readme.rst @@ -0,0 +1 @@ +.. include:: ../README.rst diff --git a/docs/usage.rst b/docs/usage.rst new file mode 100644 index 0000000..af5ec35 --- /dev/null +++ b/docs/usage.rst @@ -0,0 +1,75 @@ +.. _usage: + +===== +Usage +===== + +.. image:: https://seafile.projekt.uni-hannover.de/f/39209355cabc4607bf0a/?dl=1 + :alt: SARvey workflow + :width: 600px + :align: center + +Processing workflow for using the SARvey software to derive displacement time series. The minimal required processing +steps and datasets are depicted in grey. All other steps are optional. + + + +Command-line tools +------------------ + +The following command-line tools are available and can be run directly in the terminal. + +`sarvey` + A tool to derive displacements from the SLC stack with Multi-Temporal InSAR (MTI). + A detailed description of the processing steps is given `here `_. + +`sarvey_plot` + A tool to plot the results from `sarvey` processing. + +`sarvey_export` + A tool to export the results from `sarvey` processing to shapefile or geopackage. + +`sarvey_mask` + A tool to create a mask from shapefile containing the area of interest, which can be used in `sarvey` processing. + The tool reads from an input file, which is a shapefile or geopackage containing the geographic data. + It supports both 'LineString' and 'Polygon' geometries. + The tool first gets the spatial extent of the geographic data and searches the location of the polygon/line nodes in the image coordinates of the radar image. + A buffer around the polygon/line is created specified by a width in pixel. + The buffer is then used to create the mask. + + Here is an example of how to use the `sarvey_mask` tool: + + .. code-block:: bash + + sarvey_mask --input_file my_shapefile.shp --geom_file ./inputs/geometryRadar.h5 --out_file_name my_mask.h5 --width 5 + + + +`sarvey_osm` + A tool to download OpenStreetMap data for the area of interest specified by the spatial extend of the SLC stack. + The tool first gets the spatial extent of the SAR image from the geometry file. + It then uses this spatial extent to download the OpenStreetMap data for the corresponding area. + The download of railway tracks, highways and bridges is supported. + After downloading the data, the tool saves it to a shapefile. + + After downloading the OpenStreetMap data with `sarvey_osm`, you can use the `sarvey_mask` tool to create a mask from the shapefile. + + Here is an example of how to use the `sarvey_osm` tool: + + .. code-block:: bash + + sarvey_osm --geom ./geometryRadar.h5 --railway # download railway + sarvey_osm --geom ./geometryRadar.h5 --highway # download highway + sarvey_osm --geom ./geometryRadar.h5 --railway --bridge # download railway bridge + sarvey_osm --geom ./geometryRadar.h5 --railway -o mask_railway.shp # specify output path + + +Usage of the Python API +----------------------- + +To use SARvey in a project: + + .. code-block:: python + + import sarvey + diff --git a/sarvey/__init__.py b/sarvey/__init__.py new file mode 100644 index 0000000..d1f9ff5 --- /dev/null +++ b/sarvey/__init__.py @@ -0,0 +1,39 @@ +# -*- coding: utf-8 -*- + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Top-level package for SARvey.""" + +__author__ = """Andreas Piter""" +__email__ = 'piter@ipi.uni-hannover.de' + +from .version import __version__ + +__all__ = [ + '__version__' +] diff --git a/sarvey/coherence.py b/sarvey/coherence.py new file mode 100644 index 0000000..5b07adc --- /dev/null +++ b/sarvey/coherence.py @@ -0,0 +1,189 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Coherence module for SARvey.""" +import multiprocessing +import time +import numpy as np +from numba import jit +from scipy.signal import convolve2d +from logging import Logger +from miaplpy.objects.slcStack import slcStack +from sarvey.objects import BaseStack +from sarvey.utils import convertBboxToBlock + + +def computeIfgsAndTemporalCoherence(*, path_temp_coh: str, path_ifgs: str, path_slc: str, ifg_array: np.ndarray, + time_mask: np.ndarray, wdw_size: int, num_boxes: int, box_list: list, + num_cores: int, logger: Logger): + """ComputeIfgsAndTemporalCoherence. + + Compute the interferograms and temporal coherence from the SLC stack for a given set of (spatial) patches. + + Parameters + ---------- + path_temp_coh : str + Path to the temporary coherence stack. The data will be stored in this file during processing. + path_ifgs : str + Path to the interferograms stack. The data will be stored in this file during processing. + path_slc : str + Path to the SLC stack. The data will be read from this file. + ifg_array : np.ndarray + Array containing the indices of the reference and secondary images which are used to compute the interferograms. + time_mask : np.ndarray + Binary mask indicating the selected images from the SLC stack. + wdw_size : int + Size of the filter window. Has to be odd. + num_boxes : int + Number of patches to enable reading and processing of larger SLC stacks. + box_list : list + List containing the indices of each patch. + num_cores : int + Number of cores for parallel processing. + logger : Logger + Logger object. + + Returns + ------- + mean_amp_img : np.ndarray + Mean amplitude image. + """ + start_time = time.time() + filter_kernel = np.ones((wdw_size, wdw_size), dtype=np.float64) + filter_kernel[wdw_size // 2, wdw_size // 2] = 0 + + slc_stack_obj = slcStack(path_slc) + slc_stack_obj.open() + temp_coh_obj = BaseStack(file=path_temp_coh, logger=logger) + ifg_stack_obj = BaseStack(file=path_ifgs, logger=logger) + + mean_amp_img = np.zeros((slc_stack_obj.length, slc_stack_obj.width), dtype=np.float32) + num_ifgs = ifg_array.shape[0] + + for idx in range(num_boxes): + bbox = box_list[idx] + block2d = convertBboxToBlock(bbox=bbox) + + # read slc + slc = slc_stack_obj.read(datasetName='slc', box=bbox, print_msg=False) + slc = slc[time_mask, :, :] + # todo: check if mean in log() is 0, then mask it to avoid computational problems. + mean_amp_img[bbox[1]:bbox[3], bbox[0]:bbox[2]] = np.log10(np.mean(np.abs(slc), axis=0)) + + # compute ifgs + ifgs = computeIfgs(slc=slc, ifg_array=ifg_array) + ifg_stack_obj.writeToFileBlock(data=ifgs, dataset_name="ifgs", block=block2d, print_msg=False) + del slc + + # filter ifgs + avg_neighbours = np.zeros_like(ifgs) + if num_cores == 1: + for i in range(num_ifgs): + avg_neighbours[:, :, i] = convolve2d(in1=ifgs[:, :, i], in2=filter_kernel, mode='same', boundary="symm") + else: + pool = multiprocessing.Pool(processes=num_cores) + + args = [( + idx, + ifgs[:, :, idx], + filter_kernel) for idx in range(num_ifgs)] + + results = pool.map(func=launchConvolve2d, iterable=args) + + # retrieve results + for j, avg_neigh in results: + avg_neighbours[:, :, j] = avg_neigh + del results, args, avg_neigh + + # compute temporal coherence + residual_phase = np.angle(ifgs * np.conjugate(avg_neighbours)) + del ifgs, avg_neighbours + temp_coh = np.abs(np.mean(np.exp(1j * residual_phase), axis=2)) + temp_coh_obj.writeToFileBlock(data=temp_coh, dataset_name="temp_coh", block=block2d, print_msg=False) + del residual_phase, temp_coh + logger.info(msg="Patches processed:\t {}/{}".format(idx + 1, num_boxes)) + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='\ntime used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + return mean_amp_img + + +@jit(nopython=True) +def computeIfgs(*, slc: np.ndarray, ifg_array: np.ndarray): + """ComputeIfgs. + + Parameters + ---------- + slc : np.ndarray + SLC stack. + ifg_array : np.ndarray + Array containing the indices of the reference and secondary images which are used to compute the interferograms. + + Returns + ------- + ifgs : np.ndarray + Interferograms. + """ + t, length, width = slc.shape + num_ifgs = ifg_array.shape[0] + ifgs = np.zeros((length, width, num_ifgs), dtype=np.complex64) + + c = 0 + for i, j in ifg_array: + ifgs[:, :, c] = slc[i, :, :] * np.conjugate(slc[j, :, :]) + c += 1 + return ifgs + + +def launchConvolve2d(args: tuple): + """LaunchConvolve2d. + + Parameters + ---------- + args : tuple + Tuple containing the arguments for the convolution. + Tuple contains: + + idx : int + Index of the processed interferogram. + ifg : np.ndarray + Interferogram. + filter_kernel : np.ndarray + Filter kernel. + + Returns + ------- + idx : int + Index of the processed interferogram. + avg_neighbours : np.ndarray + Low-pass filtered phase derived as average of neighbours. + """ + (idx, ifg, filter_kernel) = args + avg_neighbours = convolve2d(in1=ifg, in2=filter_kernel, mode='same', boundary="symm") + return idx, avg_neighbours diff --git a/sarvey/config.py b/sarvey/config.py new file mode 100644 index 0000000..c56d510 --- /dev/null +++ b/sarvey/config.py @@ -0,0 +1,759 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Configuration module for SARvey.""" +import os +import json +from datetime import date +from json import JSONDecodeError +from typing import Optional +from pydantic import BaseModel, Field, validator, Extra + + +class DataDirectories(BaseModel, extra=Extra.forbid): + """Template for settings in config file.""" + + path_inputs: str = Field( + title="The path to the input data directory.", + description="Set the path of the input data directory.", + default="inputs/" + ) + + path_outputs: str = Field( + title="The path to the processing output data directory.", + description="Set the path of the processing output data directory.", + default="outputs/" + ) + + @validator('path_inputs') + def checkPathInputs(cls, v): + """Check if the input path exists.""" + if v == "": + raise ValueError("Empty string is not allowed.") + if not os.path.exists(os.path.abspath(v)): + raise ValueError(f"path_inputs is invalid: {os.path.abspath(v)}") + if not os.path.exists(os.path.join(os.path.abspath(v), "slcStack.h5")): + raise ValueError(f"'slcStack.h5' does not exist: {v}") + if not os.path.exists(os.path.join(os.path.abspath(v), "geometryRadar.h5")): + raise ValueError(f"'geometryRadar.h5' does not exist: {v}") + return v + + +class Processing(BaseModel, extra=Extra.forbid): + """Template for settings in config file.""" + + num_cores: int = Field( + title="Number of cores", + description="Set the number of cores for parallel processing.", + default=50 + ) + + num_patches: int = Field( + title="Number of patches", + description="Set the number of patches for processing large areas patch-wise.", + default=1 + ) + + temporal_unwrapping: bool = Field( + title="Apply temporal unwrapping", + description="Apply temporal unwrapping additionally to spatial unwrapping.", + default=True + ) + + unwrapping_method: str = Field( + title="Spatial unwrapping method", + description="Select spatial unwrapping method from 'ilp' and 'puma'.", + default='puma' + ) + + @validator('num_cores') + def checkNumCores(cls, v): + """Check if the number of cores is valid.""" + if v <= 0: + raise ValueError("Number of cores must be greater than zero.") + return v + + @validator('num_patches') + def checkNumPatches(cls, v): + """Check if the number of patches is valid.""" + if v <= 0: + raise ValueError("Number of patches must be greater than zero.") + return v + + @validator('unwrapping_method') + def checkUnwMethod(cls, v): + """Check if unwrapping_method is valid.""" + if (v != "ilp") & (v != "puma"): + raise ValueError("Unwrapping method must be either 'ilp' or 'puma'.") + return v + + +class PhaseLinking(BaseModel, extra=Extra.forbid): + """Template for settings in config file.""" + + phase_linking: bool = Field( + title="Use phase linking results.", + description="Use pixels selected in phase linking.", + default=False + ) + + path_inverted: str = Field( + title="The path to the phase linking inverted data directory.", + description="Set the path of the inverted data directory.", + default="inverted/" + ) + + num_siblings: int = Field( + title="Sibling threshold.", + description="Threshold on the number of siblings applied during phase linking to distinguish PS from DS" + "candidates.", + default=20 + ) + + spatial_mask_file_pl: Optional[str] = Field( + title="Path to spatial mask file for phase linking results.", + description="Path to the mask file, e.g. created by sarvey_mask.", + default="" + ) + + use_ps: bool = Field( + title="Use point-like scatterers.", + description="Use point-like scatterers (pixels with a low number of siblings) selected in phase linking." + "Is applied, only if 'phase_linking' is true.", + default=False + ) + + path_mask_file_ps: str = Field( + title="The path to the mask file for ps pixels from phase linking.", + description="Set the path of the 'maskPS.h5' file (optional).", + default="maskPS.h5" + ) + + @validator('path_inverted') + def checkPathInverted(cls, v, values): + """Check if the inverted path exists.""" + if values["phase_linking"]: + if v == "": + raise ValueError("Empty string is not allowed.") + if not os.path.exists(os.path.abspath(v)): + raise ValueError(f"path_inverted is invalid: {os.path.abspath(v)}") + if not os.path.exists(os.path.join(os.path.abspath(v), "phase_series.h5")): + raise ValueError(f"'phase_series.h5' does not exist: {v}") + return v + + @validator('num_siblings') + def checkNumSiblings(cls, v, values): + """Check is no_siblings is valid.""" + if not values["phase_linking"]: + if v < 1: + raise ValueError("'num_siblings' has to be greater than 0.") + return v + + @validator('spatial_mask_file_pl') + def checkSpatialMaskPath(cls, v, values): + """Check if the path is correct.""" + if values["phase_linking"]: + if v == "" or v is None: + return None + else: + if not os.path.exists(os.path.abspath(v)): + raise ValueError(f"spatial_mask_file_pl path is invalid: {v}") + return v + + @validator('use_ps') + def checkUsePS(cls, v, values): + """Check if use_ps will be applied.""" + if (not values["phase_linking"]) and v: + raise ValueError("'use_ps' will not be applied, because 'phase_linking' is set to False.") + return v + + @validator('path_mask_file_ps') + def checkPathMaskFilePS(cls, v, values): + """Check if the mask file exists.""" + if values["phase_linking"] and values["use_ps"]: + if v == "": + raise ValueError("Empty string is not allowed.") + if not os.path.exists(os.path.abspath(v)): + raise ValueError(f"path_mask_file_ps is invalid: {os.path.abspath(v)}") + return v + + +class Preparation(BaseModel, extra=Extra.forbid): + """Template for settings in config file.""" + + start_date: Optional[str] = Field( + title="Start date", + description="Format: YYYY-MM-DD.", + default=None + ) + + stop_date: Optional[str] = Field( + title="Stop date", + description="Format: YYYY-MM-DD.", + default=None + ) + + network_type: str = Field( + title="Interferogram network type.", + description="Set the intererogram network type: 'sb' (small baseline), 'stb' (small temporal baseline), " + "'stb_year' (small temporal baseline and yearly ifgs), 'delaunay' (delaunay network), " + "or 'star' (single-reference).", + default="sb" + ) + + num_ifgs: Optional[int] = Field( + title="Number of interferograms", + description="Set the number of interferograms per image. Might be violated .", + default=3 + ) + + max_tbase: Optional[int] = Field( + title="Maximum temporal baseline [days]", + description="Set the maximum temporal baseline for the ifg network. (required for: 'sb')", + default=100 + ) + + filter_wdw_size: int = Field( + title="Size of filtering window [pixel]", + description="Set the size of window for lowpass filtering.", + default=9 + ) + + @validator('start_date', 'stop_date') + def checkDates(cls, v): + """Check if date format is valid.""" + if v == "": + v = None + + if v is not None: + try: + date.fromisoformat(v) + except Exception as e: + raise ValueError(f"Date needs to be in format: YYYY-MM-DD. {e}") + return v + + @validator('network_type') + def checkNetworkType(cls, v): + """Check if the ifg network type is valid.""" + if (v != "sb") and (v != "star") and (v != "delaunay") and (v != "stb") and (v != "stb_year"): + raise ValueError("Interferogram network type has to be 'sb', 'stb', Ästb_year', 'delaunay' or 'star'.") + return v + + @validator('num_ifgs') + def checkNumIfgs(cls, v): + """Check if the number of ifgs is valid.""" + if v is not None: + if v <= 0: + raise ValueError("Number of ifgs must be greater than zero.") + return v + + @validator('max_tbase') + def checkMaxTBase(cls, v): + """Check if the value for maximum time baseline is valid.""" + if v is not None: + if v <= 0: + raise ValueError("Maximum baseline must be greater than zero.") + return v + + @validator('filter_wdw_size') + def checkFilterWdwSize(cls, v): + """Check if the filter window size is valid.""" + if v <= 0: + raise ValueError("Filter window size must be greater than zero.") + return v + + +class ConsistencyCheck(BaseModel, extra=Extra.forbid): + """Template for settings in config file.""" + + coherence_p1: float = Field( + title="Temporal coherence threshold for first-order points", + description="Set the temporal coherence threshold of first-order points for the consistency check.", + default=0.9 + ) + + grid_size: int = Field( + title="Grid size [m]", + description="Set the grid size in [m] for the consistency check. No grid is applied if 'grid_size' is Zero.", + default=200 + ) + + spatial_mask_file_p1: Optional[str] = Field( + title="Path to mask file", + description="Set the path to the mask file in .h5 format.", + default="" + ) + + knn: int = Field( + title="Number of nearest neighbours", + description="Set number of nearest neighbours for creating arcs.", + default=30 + ) + + max_arc_length: Optional[int] = Field( + title="Maximum length of arcs [m]", + description="Set the maximum length of arcs.", + default=None + ) + + velocity_bound: float = Field( + title="Bounds on mean velocity for temporal unwrapping [m/year]", + description="Set the bound (symmetric) for the mean velocity estimation in temporal unwrapping.", + default=0.1 + ) + + dem_error_bound: float = Field( + title="Bounds on DEM error for temporal unwrapping [m]", + description="Set the bound (symmetric) for the DEM error estimation in temporal unwrapping.", + default=100.0 + ) + + num_samples: int = Field( + title="Number of samples in the search space for temporal unwrapping", + description="Set the number of samples evaluated along the search space for temporal unwrapping.", + default=100 + ) + + arc_coherence: float = Field( + title="Arc coherence threshold", + description="Set the arc coherence threshold for the consistency check.", + default=0.6 + ) + + min_num_arc: int = Field( + title="Minimum number of arcs per point", + description="Set the minimum number of arcs per point.", + default=3 + ) + + @validator('coherence_p1') + def checkCoherenceP1(cls, v): + """Check if the temporal coherence threshold is valid.""" + if v < 0: + raise ValueError("Temporal coherence threshold cannot be negative.") + if v > 1: + raise ValueError("Temporal coherence threshold cannot be greater than 1.") + return v + + @validator('grid_size') + def checkGridSize(cls, v): + """Check if the grid size is valid.""" + if v < 0: + raise ValueError('Grid size cannot be negative.') + if v == 0: + v = None + return v + + @validator('spatial_mask_file_p1') + def checkSpatialMaskPath(cls, v): + """Check if the path is correct.""" + if v == "" or v is None: + return None + else: + if not os.path.exists(os.path.abspath(v)): + raise ValueError(f"spatial_mask_file_p1 path is invalid: {v}") + return v + + @validator('knn') + def checkKNN(cls, v): + """Check if the k-nearest neighbours is valid.""" + if v <= 0: + raise ValueError('K-nearest neighbours cannot be negative or zero.') + return v + + @validator('max_arc_length') + def checkMaxArcLength(cls, v): + """Check if the maximum length of arcs is valid.""" + if v is None: + return 999999 + if v <= 0: + raise ValueError('Maximum arc length must be positive.') + return v + + @validator('velocity_bound') + def checkVelocityBound(cls, v): + """Check if the velocity bound is valid.""" + if v <= 0: + raise ValueError('Velocity bound cannot be negative or zero.') + return v + + @validator('dem_error_bound') + def checkDEMErrorBound(cls, v): + """Check if the DEM error bound is valid.""" + if v <= 0: + raise ValueError('DEM error bound cannot be negative or zero.') + return v + + @validator('num_samples') + def checkNumSamples(cls, v): + """Check if the number of samples for the search space is valid.""" + if v <= 0: + raise ValueError('Number of samples cannot be negative or zero.') + return v + + @validator('arc_coherence') + def checkArcCoherence(cls, v): + """Check if the arc coherence threshold is valid.""" + if v < 0: + raise ValueError('Arc coherence threshold cannot be negativ.') + if v > 1: + raise ValueError('Arc coherence threshold cannot be greater than 1.') + return v + + @validator('min_num_arc') + def checkMinNumArc(cls, v): + """Check if the minimum number of arcs is valid.""" + if v < 0: + raise ValueError('Velocity bound cannot be negative.') + return v + + +class Unwrapping(BaseModel, extra=Extra.forbid): + """Template for settings in config file.""" + + knn: int = Field( + title="Number of nearest neighbours", + description="Set number of nearest neighbours for spatial unwrapping with PUMA. Not used, if" + "'use_temporal_unwrapping_arcs' is 'true'.", + default=1 + ) + + use_temporal_unwrapping_arcs: bool = Field( + title="Use arcs from temporal unwrapping", + description="If true, use same arcs from temporal unwrapping. If false, apply new delaunay and knn" + "triangulation.", + default=True + ) + + @validator('knn') + def checkKNN(cls, v): + """Check if the k-nearest neighbours is valid.""" + if v <= 0: + raise ValueError('K-nearest neighbours cannot be negative or zero.') + return v + + +class Filtering(BaseModel, extra=Extra.forbid): + """Template for filtering settings in config file.""" + + skip_filtering: bool = Field( + title="Skip filtering step.", + description="Set whether to skip filtering step.", + default=False + ) + + interpolation_method: str = Field( + title="Spatial interpolation method.", + description="Method for interpolating atmosphere in space ('linear', 'cubic' or 'kriging').", + default="kriging" + ) + + coherence_p2: float = Field( + title="Temporal coherence threshold", + description="Set the temporal coherence threshold for the filtering step.", + default=0.8 + ) + + grid_size: int = Field( + title="Grid size [m].", + description="Set the grid size for spatial filtering.", + default=1000 + ) + + spatial_mask_file_p2: Optional[str] = Field( + title="Path to spatial mask file.", + description="Path to the mask file, e.g. created by sarvey_mask.", + default="" + ) + + use_moving_points: bool = Field( + title="Use moving points", + description="Set whether to use moving points in the filtering step.", + default=True + ) + + max_auto_corr: float = Field( + title="Max auto correlation.", + description="Set temporal autocorrelation threshold for the selection of stable/linearly moving points.", + default=0.3 + ) + + @validator('coherence_p2') + def checkTempCohThrsh2(cls, v): + """Check if the temporal coherence threshold is valid.""" + if v < 0: + raise ValueError("Temporal coherence threshold cannot be negative.") + if v > 1: + raise ValueError("Temporal coherence threshold cannot be greater than 1.") + return v + + @validator('interpolation_method') + def checkInterpolationMethod(cls, v): + """Check if the interpolation method is valid.""" + if (v.lower() != "linear") and (v.lower() != "cubic") and (v.lower() != "kriging"): + raise ValueError("Method for interpolating atmosphere in space needs to be either 'linear', 'cubic' " + "or 'kriging'.") + return v + + @validator('grid_size') + def checkGridSize(cls, v): + """Check if the grid size is valid.""" + if v < 0: + raise ValueError("Grid size cannot be negative.") + else: + return v + + @validator('spatial_mask_file_p2') + def checkSpatialMaskPath(cls, v): + """Check if the path is correct.""" + if v == "" or v is None: + return None + else: + if not os.path.exists(os.path.abspath(v)): + raise ValueError(f"spatial_mask_file_p2 path is invalid: {v}") + return v + + @validator('max_auto_corr') + def checkMaxAutoCorr(cls, v): + """Check if the value is correct.""" + if v < 0 or v > 1: + raise ValueError(f"max_auto_corr is not between 0 and 1: {v}") + return v + + +class Densification(BaseModel, extra=Extra.forbid): + """Template for densification settings in config file.""" + + coherence_threshold: float = Field( + title="Coherence threshold for densification", + description="Set coherence threshold for densification.", + default=0.5 + ) + + num_connections_p1: int = Field( + title="Number of connections in temporal unwrapping.", + description="Set number of connections between second-order point and closest first-order points for temporal " + "unwrapping.", + default=5 + ) + + num_connections_p2: int = Field( + title="Number of connections in consistency check with neighbouring points.", + description="Set number of connections between unwrapped second-order point and closest second-order points for" + " temporal checking consistency.", + default=10 + ) + + max_distance_p1: int = Field( + title="Maximum distance to nearest first-order point [m]", + description="Set threshold on the distance between first-order points and to be temporally unwrapped" + "second-order point.", + default=2000 + ) + + velocity_bound: float = Field( + title="Bounds on mean velocity for temporal unwrapping [m/year]", + description="Set the bound (symmetric) for the mean velocity in temporal unwrapping.", + default=0.15 + ) + + dem_error_bound: float = Field( + title="Bounds on DEM error for temporal unwrapping [m]", + description="Set the bound (symmetric) for the DEM error estimation in temporal unwrapping.", + default=100.0 + ) + + num_samples: int = Field( + title="Number of samples in the search space for temporal unwrapping", + description="Set the number of samples evaluated along the search space for temporal unwrapping.", + default=100 + ) + + knn: int = Field( + title="Number of nearest neighbours", + description="Set number of nearest neighbours for creating arcs.", + default=1 + ) + + @validator('coherence_threshold') + def checkCoherenceThresh(cls, v): + """Check if coherence_threshold is valid.""" + if v < 0 or v > 1: + raise ValueError(f"coherence_threshold is not between 0 and 1: {v}") + return v + + @validator('num_connections_p1') + def checkNumConn1(cls, v): + """Check if num_connections_p1 are valid.""" + if v <= 0: + raise ValueError(f"num_connections_p1 must be greater than 0: {v}") + return v + + @validator('num_connections_p1') + def checkNumConn2(cls, v): + """Check if num_connections_p2 are valid.""" + if v < 0: + raise ValueError(f"num_connections_p2 cannot be negative: {v}") + return v + + @validator('max_distance_p1') + def checkMaxDistanceP1(cls, v): + """Check if the maximum distance to nearest first-order points is valid.""" + if v < 0: + raise ValueError('Maximum distance to first-order points cannot be negative.') + return v + + @validator('velocity_bound') + def checkVelocityBound(cls, v): + """Check if the velocity bound is valid.""" + if v <= 0: + raise ValueError('Velocity bound cannot be negative or zero.') + return v + + @validator('dem_error_bound') + def checkDEMErrorBound(cls, v): + """Check if the DEM error bound is valid.""" + if v <= 0: + raise ValueError('DEM error bound cannot be negative or zero.') + return v + + @validator('num_samples') + def checkNumSamples(cls, v): + """Check if the number of samples for the search space is valid.""" + if v <= 0: + raise ValueError('Number of samples cannot be negative or zero.') + return v + + @validator('knn') + def checkKNN(cls, v): + """Check if the k-nearest neighbours is valid.""" + if v <= 0: + raise ValueError('K-nearest neighbours cannot be negative or zero.') + return v + + +class Logging(BaseModel): + """Template for logger settings.""" + + logging_level: str = Field( + title="Logging level.", + description="Set loggig level.", + default="INFO" + ) + + logfile_path: str = Field( + title="Logfile Path.", + description="Path to directory where the logfiles should be saved.", + default="logfiles/" + ) + + @validator('logging_level') + def checkLoggingLevel(cls, v): + """Check if the logging level is valid.""" + if v == "": + raise ValueError("Empty string is not allowed.") + v = v.upper() + if v not in ["CRITICAL", "ERROR", "WARNING", "INFO", "DEBUG", "NOTSET"]: + raise ValueError("Logging level must be one of ('CRITICAL', 'ERROR', " + "'WARNING', 'INFO', 'DEBUG', 'NOTSET').") + return v + + +class Config(BaseModel): + """Configuration for SAR4Infra sarvey.""" + + # title has to be the name of the class. Needed for creating default file + data_directories: DataDirectories = Field( + title="DataDirectories", description="" + ) + + logging: Logging = Field( + title="Logging", description="" + ) + + processing: Processing = Field( + title="Processing", description="" + ) + + phase_linking: PhaseLinking = Field( + title="PhaseLinking", description="" + ) + + preparation: Preparation = Field( + title="Preparation", description="" + ) + + consistency_check: ConsistencyCheck = Field( + title="ConsistencyCheck", description="" + ) + + unwrapping: Unwrapping = Field( + title="Unwrapping", description="" + ) + + filtering: Filtering = Field( + title="Filtering", description="" + ) + + densification: Densification = Field( + title="Densification", description="" + ) + + +def loadConfiguration(*, path: str) -> dict: + """Load configuration json file. + + Parameters + ---------- + path : str + Path to the configuration json file. + + Returns + ------- + : dict + A dictionary containing configurations. + + Raises + ------ + JSONDecodeError + If failed to parse the json file to the dictionary. + FileNotFoundError + Config file not found. + IOError + Invalid JSON file. + ValueError + Invalid value for configuration object. + """ + try: + with open(path) as config_fp: + config = json.load(config_fp) + config = Config(**config).dict(by_alias=True) + except JSONDecodeError as e: + raise IOError(f'Failed to load the configuration json file => {e}') + return config diff --git a/sarvey/console.py b/sarvey/console.py new file mode 100644 index 0000000..e6594d5 --- /dev/null +++ b/sarvey/console.py @@ -0,0 +1,128 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Console module for SARvey.""" +from sarvey import version +from logging import Logger + + +def printStep(*, step: int, step_dict: dict, logger: Logger): + """Print the current step to console. + + Parameters + ---------- + step: int + current step number + step_dict: dict + dictionary with step numbers and names + logger: Logger + Logging handler + """ + logger.info(msg=" ---------------------------------------------------------------------------------") + logger.info(msg=f" STEP {step}: {step_dict[step]}") + logger.info(msg=" ---------------------------------------------------------------------------------") + + +def printCurrentConfig(*, config_section: dict, config_section_default: dict, logger: Logger): + """Print the current parameters and their default values from the config file to console. + + Parameters + ---------- + config_section: dict + Section of the configuration class which contains the selected parameters. + config_section_default: dict + Config section with default values. + logger: Logger + Logging handler. + """ + shift = " " + logger.info(msg=shift + "{:>30} {:>15} {:>10}".format("Parameter", "value", "default")) + logger.info(msg=shift + "{:>30} {:>15} {:>10}".format("_________", "_____", "_______")) + + for key in config_section.keys(): + default = config_section_default[key] + default = "None" if default is None else default + default = "True" if default is True else default + default = "False" if default is False else default + + value = config_section[key] + value = "None" if value is None else value + value = "True" if value is True else value + value = "False" if value is False else value + if default == value: + logger.info(msg=shift + "{:>30} {:>15} {:>10}".format(key, value, default)) + else: + logger.info(msg=shift + "{:>30} {:>15} <--- {:>10}".format(key, value, default)) + + logger.info(msg="") + + +def showLogoSARvey(*, logger: Logger, step: str): + """ShowLogoSARvey. + + Parameters + ---------- + logger: Logger + logging handler + step: str + Name of the step or script which is shown on the logo. + """ + # generate_from: http://patorjk.com/software/taag/ - font: Big, style: default + # and https://textik.com/ + logo = rf""" + -\ + -/ \ + -/ - + / / + -/ -/ + +----------------------- / -/ ------------------------------------------------------+ + | -/ -/ | + | -/ / | + | -/ -/ | + | \ -/ -/ | + | /- -\/ -/ - _____ _____ | + | /- -\ -/ -/ / ____| /\ | __ \ | + | -\ -/ -/ | (___ / \ | |__) |_ _____ _ _ | + | -\ - -/ \___ \ / /\ \ | _ /\ \ / / _ \ | | | | + | -/ -\ /- -/ ____) / ____ \| | \ \ \ V / __/ |_| | | + -/ --\ /- -/ |_____/_/ \_\_| \_\ \_/ \___|\__, | | + -/ -/ -- / ___ __/ | | + -/ -/ -/ | |___/ | + / -/ -/ |___ Version: {version.__version__:<25} | + / -/ -/ | {version.__versionalias__:<25} | + -/ -/ -/ |___ Date: {version.__versiondate__:<25} | + -/ -/ -/ | Run: {step: <25} | + -/ / / |___ | + / -/ | | +-- / |___ | + \-- +------------------------------------ | ------------------------------------------+ + |___ + | + """ + logger.info(msg=logo) diff --git a/sarvey/densification.py b/sarvey/densification.py new file mode 100644 index 0000000..0d9ed91 --- /dev/null +++ b/sarvey/densification.py @@ -0,0 +1,324 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Densification module for SARvey.""" +import time +import multiprocessing +import numpy as np +from scipy.spatial import KDTree +from logging import Logger + +from mintpy.utils import ptime + +from sarvey.unwrapping import oneDimSearchTemporalCoherence +from sarvey.objects import Points +import sarvey.utils as ut + + +def densificationInitializer(tree_p1: KDTree, tree_p2: KDTree, point2_obj: Points, demod_phase1: np.ndarray): + """DensificationInitializer. + + Sets values to global variables for parallel processing. + + Parameters + ---------- + tree_p1 : KDTree + KDTree of the first-order network + tree_p2 : KDTree + KDTree of the second-order network + point2_obj : Points + Points object with second-order points + demod_phase1 : np.ndarray + demodulated phase of the first-order network + """ + global global_tree_p1 + global global_tree_p2 + global global_point2_obj + global global_demod_phase1 + + global_tree_p1 = tree_p1 + global_tree_p2 = tree_p2 + global_point2_obj = point2_obj + global_demod_phase1 = demod_phase1 + + +def launchDensifyNetworkConsistencyCheck(args: tuple): + """LaunchDensifyNetworkConsistencyCheck. + + Launches the densification of the network with second-order points inside parallel processing. + + Parameters + ---------- + args : tuple + Tuple with the following parameters: + + idx_range : np.ndarray + Array with the indices of the second-order points + num_points : int + Number of second-order points + num_conn_p1 : int + Number of nearest points in the first-order network + num_conn_p2 : int + Number of nearest points in the second-order network + max_dist_p1 : float + Maximum allowed distance to the nearest points in the first-order network + velocity_bound : float + Bound for the velocity estimate in temporal unwrapping + demerr_bound : float + Bound for the DEM error estimate in temporal unwrapping + num_samples : int + Number of samples for the search of the optimal parameters + + Returns + ------- + idx_range : np.ndarray + Array with the indices of the second-order points + demerr_p2 : np.ndarray + DEM error array of the second-order points + vel_p2 : np.ndarray + Velocity array of the second-order points + gamma_p2 : np.ndarray + Estimated temporal coherence array of the second-order points resulting from temporal unwrapping + mean_gamma : np.ndarray + Mean temporal coherence array of the second-order points resulting from consistency check with neighbourhood + """ + (idx_range, num_points, num_conn_p1, num_conn_p2, max_dist_p1, velocity_bound, demerr_bound, num_samples) = args + + counter = 0 + prog_bar = ptime.progressBar(maxValue=num_points) + + # initialize output + demerr_p2 = np.zeros((num_points,), dtype=np.float32) + vel_p2 = np.zeros((num_points,), dtype=np.float32) + gamma_p2 = np.zeros((num_points,), dtype=np.float32) + mean_gamma = np.zeros((num_points,), dtype=np.float32) + + design_mat = np.zeros((global_point2_obj.ifg_net_obj.num_ifgs, 2), dtype=np.float32) + + demerr_range = np.linspace(-demerr_bound, demerr_bound, num_samples) + vel_range = np.linspace(-velocity_bound, velocity_bound, num_samples) + + factor = 4 * np.pi / global_point2_obj.wavelength + + for idx in range(num_points): + p2 = idx_range[idx] + # nearest points in p1 + dist, nearest_p1 = global_tree_p1.query([global_point2_obj.coord_utm[p2, 0], + global_point2_obj.coord_utm[p2, 1]], k=num_conn_p1) + mask = (dist < max_dist_p1) & (dist != 0) + mask[:3] = True # ensure that always at least the three closest points are used + nearest_p1 = nearest_p1[mask] + + # compute arc observations to nearest points + arc_phase_p1 = np.angle(np.exp(1j * global_point2_obj.phase[p2, :]) * + np.conjugate(np.exp(1j * global_demod_phase1[nearest_p1, :]))) + + design_mat[:, 0] = (factor * global_point2_obj.ifg_net_obj.pbase_ifg + / (global_point2_obj.slant_range[p2] * np.sin(global_point2_obj.loc_inc[p2]))) + design_mat[:, 1] = factor * global_point2_obj.ifg_net_obj.tbase_ifg + + demerr_p2[idx], vel_p2[idx], gamma_p2[idx] = oneDimSearchTemporalCoherence( + demerr_range=demerr_range, + vel_range=vel_range, + obs_phase=arc_phase_p1, + design_mat=design_mat + ) + + if num_conn_p2 > 0: + # nearest points in p2 (for consistency check with neighbourhood) + dist, nearest_p2 = global_tree_p2.query([global_point2_obj.coord_utm[p2, 0], + global_point2_obj.coord_utm[p2, 1]], k=num_conn_p2) + mask = (dist != 0) # remove the point itself + nearest_p2 = nearest_p2[mask] + num_n2 = mask[mask].shape[0] + + pred_phase_p2 = ut.predictPhaseSingle( + vel=vel_p2[idx], demerr=demerr_p2[idx], + slant_range=global_point2_obj.slant_range[p2], loc_inc=global_point2_obj.loc_inc[p2], + ifg_net_obj=global_point2_obj.ifg_net_obj, wavelength=global_point2_obj.wavelength, + only_vel=False, ifg_space=True + ) + demod_phase2 = np.angle(np.exp(1j * global_point2_obj.phase[p2]) * np.conjugate(np.exp(1j * pred_phase_p2))) + + gamma_n2 = np.zeros((num_n2,), dtype=np.float32) + for n_idx, n2 in enumerate(nearest_p2): + arc_phase_p2 = np.angle(np.exp(1j * demod_phase2) * + np.conjugate(np.exp(1j * global_point2_obj.phase[n2, :]))) + + gamma_n2[n_idx] = oneDimSearchTemporalCoherence( + demerr_range=demerr_range, + vel_range=vel_range, + obs_phase=arc_phase_p2, + design_mat=design_mat + )[-1] + + mean_gamma[idx] = np.average(gamma_n2, weights=1/(dist[mask] ** 2)) + + prog_bar.update(counter + 1, every=np.int16(200), + suffix='{}/{} points'.format(counter + 1, num_points)) + counter += 1 + + return idx_range, demerr_p2, vel_p2, gamma_p2, mean_gamma + + +def densifyNetwork(*, point1_obj: Points, vel_p1: np.ndarray, demerr_p1: np.ndarray, point2_obj: Points, + num_conn_p1: int, num_conn_p2: int, max_dist_p1: float, velocity_bound: float, demerr_bound: float, + num_samples: int, num_cores: int = 1, logger: Logger): + """DensifyNetwork. + + Densifies the network with second-order points by connecting the second-order points to the closest points in the + first-order network. + + Parameters + ---------- + point1_obj : Points + Points object with first-order points + vel_p1 : np.ndarray + Velocity array of the first-order points + demerr_p1 : np.ndarray + DEM error array of the first-order points + point2_obj : Points + Points object with second-order points + num_conn_p1 : int + Number of nearest points in the first-order network + num_conn_p2 : int + Number of nearest points in the second-order network + max_dist_p1 : float + Maximum allowed distance to the nearest points in the first-order network + velocity_bound : float + Bound for the velocity estimate in temporal unwrapping + demerr_bound : float + Bound for the DEM error estimate in temporal unwrapping + num_samples : int + Number of samples for the search of the optimal parameters + num_cores : int + Number of cores for parallel processing (default: 1) + logger : Logger + Logger object + + Returns + ------- + demerr_p2 : np.ndarray + DEM error array of the second-order points + vel_p2 : np.ndarray + Velocity array of the second-order points + gamma_p2 : np.ndarray + Estimated temporal coherence array of the second-order points resulting from temporal unwrapping + mean_gamma : np.ndarray + Mean temporal coherence array of the second-order points resulting from consistency check with neighbourhood + + """ + msg = "#" * 10 + msg += " DENSIFICATION WITH SECOND-ORDER POINTS " + msg += "#" * 10 + logger.info(msg=msg) + start_time = time.time() + + # find the closest points from first-order network + tree_p1 = KDTree(data=point1_obj.coord_utm) + tree_p2 = KDTree(data=point2_obj.coord_utm) + + # remove parameters from wrapped phase + pred_phase_demerr, pred_phase_vel = ut.predictPhase( + obj=point1_obj, + vel=vel_p1, demerr=demerr_p1, + ifg_space=True, logger=logger + ) + pred_phase = pred_phase_demerr + pred_phase_vel + + # Note: for small baselines it does not make a difference if re-wrapping the phase difference or not. + # However, for long baselines (like in the star network) it does make a difference. Leijen (2014) does not re-wrap + # the arc double differences to be able to test the ambiguities. Kampes (2006) does re-wrap, but is testing based + # on the estimated parameters. Hence, it doesn't make a difference for him. Not re-wrapping can be a starting point + # for triangle-based temporal unwrapping. + # demod_phase1 = np.angle(np.exp(1j * point1_obj.phase) * np.conjugate(np.exp(1j * pred_phase))) # re-wrapping + demod_phase1 = point1_obj.phase - pred_phase # not re-wrapping + + # initialize output + init_args = (tree_p1, tree_p2, point2_obj, demod_phase1) + + if num_cores == 1: + densificationInitializer(tree_p1=tree_p1, tree_p2=tree_p2, point2_obj=point2_obj, demod_phase1=demod_phase1) + args = (np.arange(point2_obj.num_points), point2_obj.num_points, num_conn_p1, num_conn_p2, max_dist_p1, + velocity_bound, demerr_bound, num_samples) + idx_range, demerr_p2, vel_p2, gamma_p2, mean_gamma = launchDensifyNetworkConsistencyCheck(args) + else: + with multiprocessing.Pool(num_cores, initializer=densificationInitializer, initargs=init_args) as pool: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + num_cores = point2_obj.num_points if num_cores > point2_obj.num_points else num_cores + # avoids having less samples than cores + idx = ut.splitDatasetForParallelProcessing(num_samples=point2_obj.num_points, num_cores=num_cores) + args = [( + idx_range, + idx_range.shape[0], + num_conn_p1, + num_conn_p2, + max_dist_p1, + velocity_bound, + demerr_bound, + num_samples + ) for idx_range in idx] + + results = pool.map_async(launchDensifyNetworkConsistencyCheck, args, chunksize=1) + while True: + time.sleep(5) + if results.ready(): + results = results.get() + break + # needed to make coverage work in multiprocessing (not sure what that means. copied from package Arosics). + pool.close() + pool.join() + + demerr_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) + vel_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) + gamma_p2 = np.zeros((point2_obj.num_points,), dtype=np.float32) + mean_gamma = np.zeros((point2_obj.num_points,), dtype=np.float32) + + # retrieve results + for i, demerr_i, vel_i, gamma_i, mean_gamma_i in results: + demerr_p2[i] = demerr_i + vel_p2[i] = vel_i + gamma_p2[i] = gamma_i + mean_gamma[i] = mean_gamma_i + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + + # combine p1 and p2 parameters and bring them in correct order using point_id + sort_idx = np.argsort(np.append(point1_obj.point_id, point2_obj.point_id)) + demerr_p2 = np.append(demerr_p1, demerr_p2) # add gamma=1 for p1 pixels + vel_p2 = np.append(vel_p1, vel_p2) + gamma_p2 = np.append(np.ones_like(point1_obj.point_id), gamma_p2) # add gamma=1 for p1 pixels + mean_gamma = np.append(np.ones_like(point1_obj.point_id), mean_gamma) # add mean_gamma=1 for p1 pixels + + demerr_p2 = demerr_p2[sort_idx] + vel_p2 = vel_p2[sort_idx] + gamma_p2 = gamma_p2[sort_idx] + mean_gamma = mean_gamma[sort_idx] + return demerr_p2, vel_p2, gamma_p2, mean_gamma diff --git a/sarvey/filtering.py b/sarvey/filtering.py new file mode 100644 index 0000000..2ca250e --- /dev/null +++ b/sarvey/filtering.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Filtering module for SARvey.""" +import time +import multiprocessing +import matplotlib.pyplot as plt +import numpy as np +from scipy.interpolate import griddata +import gstools as gs +from logging import Logger + +from mintpy.utils import ptime + +import sarvey.utils as ut + + +def launchSpatialFiltering(parameters: tuple): + """Launch_spatial_filtering. + + Launches the spatial filtering to estimate the atmospheric phase screen with low-pass filtering. + + Parameters + ---------- + parameters: tuple + Tuple containing the following parameters: + + idx_range: np.ndarray + range of indices for the time series + num_time: int + number of time steps + residuals: np.ndarray + residual phase (size: num_points x num_ifgs) + coord_utm1: np.ndarray + coordinates in UTM of the first-order points for which the residuals are given (size: num_points_p1 x 2) + coord_utm2: np.ndarray + coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2) + bins: np.ndarray + bin edges for the variogram + bool_plot: bool + boolean flag to plot intermediate results + logger: Logger + Logging handler + + Returns + ------- + idx_range: np.ndarray + range of indices for the time series + aps1: np.ndarray + atmospheric phase screen for the known points (size: num_points_p1 x num_ifgs) + aps2: np.ndarray + atmospheric phase screen for the new points (size: num_points_p2 x num_ifgs) + """ + # Unpack the parameters + (idx_range, num_time, residuals, coord_utm1, coord_utm2, bins, bool_plot, logger) = parameters + + x = coord_utm1[:, 1] + y = coord_utm1[:, 0] + x_new = coord_utm2[:, 1] + y_new = coord_utm2[:, 0] + + aps1 = np.zeros((coord_utm1.shape[0], num_time), dtype=np.float32) + aps2 = np.zeros((coord_utm2.shape[0], num_time), dtype=np.float32) + + prog_bar = ptime.progressBar(maxValue=num_time) + + for i in range(num_time): + field = residuals[:, i].astype(np.float32) + + # 1) estimate the variogram of the field + bin_center, vario = gs.vario_estimate(pos=[x, y], field=field, bin_edges=bins) + + # 2) fit model to empirical variogram + model = gs.Stable(dim=2) + try: + model.fit_variogram(x_data=bin_center, y_data=vario, nugget=True, max_eval=1500) + except RuntimeError as err: + logger.error(msg="\nIMAGE {}: Not able to fit variogram! {}".format(idx_range[i], err)) + if bool_plot: + fig, ax = plt.subplots(2, figsize=[10, 5]) + sca1 = ax[0].scatter(x, y, c=field) + plt.colorbar(sca1, ax=ax[0], pad=0.03, shrink=0.5) + ax[0].set_title("Not able to fit variogram! - PS1 residuals") + ax[1].scatter(bin_center, vario) + ax[1].set_xlabel("distance in [m]") + ax[1].set_ylabel("semi-variogram") + plt.close(fig) + prog_bar.update(value=i + 1, every=1, suffix='{}/{} images'.format(i + 1, num_time)) + continue + + # 3) estimate parameters of kriging + sk = gs.krige.Simple( + model=model, + cond_pos=[x, y], + cond_val=field, + ) + + # 4) evaluate the kriging model at ORIGINAL locations + fld_sk, _ = sk((x, y), return_var=True) + aps1[:, i] = fld_sk + + # 5) evaluate the kriging model at NEW locations + fld_sk_new, var_sk_new = sk((x_new, y_new), return_var=True) + aps2[:, i] = fld_sk_new + + prog_bar.update(value=i + 1, every=1, suffix='{}/{} images'.format(i + 1, num_time)) + + # 5) show results + if bool_plot: + min_val = np.min(field) + max_val = np.max(field) + + fig, ax = plt.subplots(2, 2, figsize=[10, 5]) + + cur_ax = ax[0, 0] + sca1 = cur_ax.scatter(x, y, c=field, vmin=min_val, vmax=max_val) + plt.colorbar(sca1, ax=cur_ax, pad=0.03, shrink=0.5) + cur_ax.set_title("PS1 residuals") + + cur_ax = ax[0, 1] + cur_ax = model.plot(x_max=bin_center[-1], ax=cur_ax) + cur_ax.scatter(bin_center, vario) + cur_ax.set_xlabel("distance in [m]") + cur_ax.set_ylabel("semi-variogram") + + if coord_utm2 is not None: + cur_ax = ax[1, 0] + sca2 = cur_ax.scatter(x_new, y_new, c=fld_sk_new, vmin=min_val, vmax=max_val) + plt.colorbar(sca2, ax=cur_ax, pad=0.03, shrink=0.5) + cur_ax.set_title("PS2 prediction of atmospheric effect") + + cur_ax = ax[0, 1] + sca4 = cur_ax.scatter(x_new, y_new, c=var_sk_new) + plt.colorbar(sca4, ax=cur_ax, pad=0.03, shrink=0.5) + cur_ax.set_title("Variance of predicted atmospheric effect") + + plt.close(fig) + + return idx_range, aps1, aps2 + + +def estimateAtmosphericPhaseScreen(*, residuals: np.ndarray, coord_utm1: np.ndarray, coord_utm2: np.ndarray, + num_cores: int = 1, bool_plot: bool = False, + logger: Logger) -> tuple[np.ndarray, np.ndarray]: + """Estimate_atmospheric_phase_screen. + + Estimates the atmospheric phase screen from a stack of phase time series for a sparse set of points. + Kriging is used to estimate the spatial dependence and to interpolate the phase screen over a set of new points. + + Parameters + ---------- + residuals: np.ndarray + residual phase (size: num_points1 x num_images) + coord_utm1: np.ndarray + coordinates in UTM of the points for which the residuals are given (size: num_points1 x 2) + coord_utm2: np.ndarray + coordinates in UTM of the new points which shall be interpolated (size: num_points2 x 2) + num_cores: int + Number of cores + bool_plot: bool + boolean flag to plot intermediate results (default: False) + logger: Logger + Logging handler + + Returns + ------- + aps1: np.ndarray + atmospheric phase screen for the known points (size: num_points1 x num_images) + aps2: np.ndarray + atmospheric phase screen for the new points (size: num_points2 x num_images) + """ + msg = "#" * 10 + msg += " ESTIMATE ATMOSPHERIC PHASE SCREEN (KRIGING) " + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + + num_points1 = residuals.shape[0] + num_points2 = coord_utm2.shape[0] + num_time = residuals.shape[1] # can be either num_ifgs or num_images + + bins = gs.variogram.standard_bins(pos=(coord_utm1[:, 1], coord_utm1[:, 0]), + dim=2, latlon=False, mesh_type='unstructured', bin_no=30, max_dist=None) + + if num_cores == 1: + args = (np.arange(0, num_time), num_time, residuals, coord_utm1, coord_utm2, bins, bool_plot, logger) + _, aps1, aps2 = launchSpatialFiltering(parameters=args) + else: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + aps1 = np.zeros((num_points1, num_time), dtype=np.float32) + aps2 = np.zeros((num_points2, num_time), dtype=np.float32) + + num_cores = num_time if num_cores > num_time else num_cores # avoids having more samples than cores + idx = ut.splitDatasetForParallelProcessing(num_samples=num_time, num_cores=num_cores) + + args = [( + idx_range, + idx_range.shape[0], + residuals[:, idx_range], + coord_utm1, + coord_utm2, + bins, + False, + logger) for idx_range in idx] + + results = pool.map(func=launchSpatialFiltering, iterable=args) + + # retrieve results + for i, aps1_i, aps2_i in results: + aps1[:, i] = aps1_i + aps2[:, i] = aps2_i + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + + return aps1, aps2 + + +def simpleInterpolation(*, residuals: np.ndarray, coord_utm1: np.ndarray, coord_utm2: np.ndarray, + interp_method: str = "linear"): + """SimpleInterpolation. + + Simple interpolation of atmospheric phase screen using scipy's griddata function with options "linear" or "cubic". + For pixels outside the convex hull of the input points, the nearest neighbor is used. + + Parameters + ---------- + residuals: np.ndarray + residual phase (size: num_points x num_ifgs) + coord_utm1: np.ndarray + coordinates in UTM of the points for which the residuals are given (size: num_points_p1 x 2) + coord_utm2: np.ndarray + coordinates in UTM of the new points which shall be interpolated (size: num_points_p2 x 2) + interp_method: str + interpolation method (default: "linear"; options: "linear", "cubic") + + Returns + ------- + aps1: np.ndarray + atmospheric phase screen for the known points (size: num_points_p1 x num_images) + aps2: np.ndarray + atmospheric phase screen for the new points (size: num_points_p2 x num_images) + """ + num_points2 = coord_utm2.shape[0] + num_images = residuals.shape[1] + + aps1 = np.zeros_like(residuals, dtype=np.float32) + aps2 = np.zeros((num_points2, num_images), dtype=np.float32) + for i in range(num_images): + aps1[:, i] = griddata(coord_utm1, residuals[:, i], coord_utm1, method=interp_method) + aps2[:, i] = griddata(coord_utm1, residuals[:, i], coord_utm2, method=interp_method) + # interpolation with 'linear' or 'cubic' yields nan values for pixel that need to be extrapolated. + # interpolation with 'knn' solves this problem. + mask_extrapolate = np.isnan(aps2[:, i]) + aps2[mask_extrapolate, i] = griddata( + coord_utm1, + residuals[:, i], + coord_utm2[mask_extrapolate, :], + method='nearest' + ) + + return aps1, aps2 diff --git a/sarvey/geolocation.py b/sarvey/geolocation.py new file mode 100644 index 0000000..c796fe7 --- /dev/null +++ b/sarvey/geolocation.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Module for correcting the geolocation of the scatterers.""" +import logging +from os.path import join +import numpy as np + +from miaplpy.objects.slcStack import slcStack + +from sarvey.objects import Points + + +def getHeading(path_inputs: str, logger: logging.Logger): + """ + Read heading angle from slcStack.h5. + + Parameters + ---------- + path_inputs: str + Path to directory containing 'slcStack.h5' and 'geometryRadar.h5'. + logger: Logger + Logger handle + + Returns + ------- + heading_angle: float + heading angle of the satellite in radians + for ascending ~ -12*pi/180 + for descending ~ 190*pi/180 + """ + # get heading from slcStack.h5 + slc_stack_file = join(path_inputs, 'slcStack.h5') + slc_stack_obj = slcStack(slc_stack_file) + try: + meta_dict = slc_stack_obj.get_metadata() + lower_case_meta_dict = {k.lower(): v for k, v in meta_dict.items()} + + heading_angle = float(lower_case_meta_dict["heading"]) + logger.info(msg=f"Heading_angle of satellite: {heading_angle} deg") + heading_angle = np.deg2rad(heading_angle) + except Exception as exc: + logger.error(f'Failed to retrieve heading angle from {slc_stack_file}: {exc}') + raise Exception + return heading_angle + + +def calculateGeolocationCorrection(*, path_geom: str, point_obj: Points, demerr: np.array, logger: logging.Logger): + """ + Calculate geolocation correction. + + Parameters + ---------- + path_geom: str + Path to directory containing 'slcStack.h5' or 'geometryRadar.h5'. + point_obj: Points + Point object with incidence angle for points + demerr: np.array + Array of dem error per pixel + logger: Logger + Logger handle + + Returns + ------- + coord_correction: np.array + array of geolocation corrections, two columns [x_correction, y_correction] per point. + """ + heading_angle = getHeading(path_inputs=path_geom, logger=logger) + + coord_correction = np.zeros_like(point_obj.coord_xy, dtype=float) + coord_correction[:, 0] = demerr * np.cos(heading_angle) / np.tan(point_obj.loc_inc) + coord_correction[:, 1] = -demerr * np.sin(heading_angle) / np.tan(point_obj.loc_inc) + + return coord_correction diff --git a/sarvey/ifg_network.py b/sarvey/ifg_network.py new file mode 100644 index 0000000..a187c0e --- /dev/null +++ b/sarvey/ifg_network.py @@ -0,0 +1,348 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""IfgNetwork module for SARvey.""" +import datetime +import h5py +import os +import matplotlib.pyplot as plt +import numpy as np +from typing import Union +import warnings +from logging import Logger +from scipy.spatial import Delaunay + + +class IfgNetwork: + """Abstract class/interface for different types of interferogram networks.""" + + ifg_list: Union[list, np.ndarray] = None + + def __init__(self): + """Init.""" + self.pbase = None + self.tbase = None + self.num_images = None + self.ifg_list = list() # is later converted to np.array + self.pbase_ifg = None + self.tbase_ifg = None + self.num_ifgs = None + self.dates = list() + + def plot(self): + """Plot the network of interferograms.""" + fig = plt.figure(figsize=(15, 5)) + axs = fig.subplots(1, 3) + dt = [datetime.date.fromisoformat(d) for d in self.dates] + axs[0].plot(dt, self.pbase, 'ko') + for idx in self.ifg_list: + xx = np.array([dt[idx[0]], dt[idx[1]]]) + yy = np.array([self.pbase[idx[0]], self.pbase[idx[1]]]) + axs[0].plot(xx, yy, 'k-') + axs[0].set_ylabel('perpendicular baseline [m]') + axs[0].set_xlabel('temporal baseline [years]') + axs[0].set_title('Network of interferograms') + fig.autofmt_xdate() + + axs[1].hist(self.tbase_ifg * 365.25, bins=100) + axs[1].set_ylabel('Absolute frequency') + axs[1].set_xlabel('temporal baseline [days]') + + axs[2].hist(self.pbase_ifg, bins=100) + axs[2].set_ylabel('Absolute frequency') + axs[2].set_xlabel('perpendicular baseline [m]') + return fig + + def getDesignMatrix(self): + """Compute the design matrix for the smallbaseline network.""" + a = np.zeros((self.num_ifgs, self.num_images)) + for i in range(len(self.ifg_list)): + a[i, self.ifg_list[i][0]] = 1 + a[i, self.ifg_list[i][1]] = -1 + return a + + def open(self, *, path: str): + """Read stored information from already existing.h5 file. + + Parameter + ----------- + path: str + path to existing file to read from. + """ + with h5py.File(path, 'r') as f: + self.num_images = f.attrs["num_images"] + self.num_ifgs = f.attrs["num_ifgs"] + + self.tbase_ifg = f['tbase_ifg'][:] + self.pbase_ifg = f['pbase_ifg'][:] + self.tbase = f['tbase'][:] + self.pbase = f['pbase'][:] + self.ifg_list = f['ifg_list'][:] + try: + self.dates = f['dates'][:] + self.dates = [date.decode("utf-8") for date in self.dates] + except KeyError as ke: + self.dates = None + print(f"IfgNetwork is in old dataformat. Cannot read 'dates'! {ke}") + + f.close() + + def writeToFile(self, *, path: str, logger: Logger): + """Write all existing data to .h5 file. + + Parameters + ---------- + path: str + path to filename + logger: Logger + Logging handler. + """ + logger.info(msg="write IfgNetwork to {}".format(path)) + + if os.path.exists(path): + os.remove(path) + + dates = np.array(self.dates, dtype=np.string_) + + with h5py.File(path, 'w') as f: + f.attrs["num_images"] = self.num_images + f.attrs["num_ifgs"] = self.num_ifgs + + f.create_dataset('tbase_ifg', data=self.tbase_ifg) + f.create_dataset('pbase_ifg', data=self.pbase_ifg) + f.create_dataset('tbase', data=self.tbase) + f.create_dataset('pbase', data=self.pbase) + f.create_dataset('ifg_list', data=self.ifg_list) + f.create_dataset('dates', data=dates) + + +class StarNetwork(IfgNetwork): + """Star network of interferograms (single-reference).""" + + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, ref_idx: int, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + --------- + pbase: np.ndarray + Perpendicular baselines of the SAR acquisitions. + tbase: np.ndarray + Temporal baselines of the SAR acquisitions. + ref_idx: int + Index of the reference image. + dates: list + Dates of the acquisitions. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + + for i in range(self.num_images): + if i == ref_idx: + continue + self.ifg_list.append((ref_idx, i)) + + self.pbase_ifg = np.delete(self.pbase - self.pbase[ref_idx], ref_idx) + self.tbase_ifg = np.delete(self.tbase - self.tbase[ref_idx], ref_idx) + self.num_ifgs = self.num_images - 1 + + +class SmallTemporalBaselinesNetwork(IfgNetwork): + """Small temporal baselines network of interferograms without restrictions on the perpendicular baselines.""" + + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, num_link: int = None, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + Perpendicular baselines of the SAR acquisitions. + tbase: np.ndarray + Temporal baselines of the SAR acquisitions. + num_link: int + Number of consecutive links in time connecting acquisitions. + dates: list + Dates of the acquisitions. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + + for i in range(self.num_images): + for j in range(num_link): + if i + j + 1 >= self.num_images: + continue + self.ifg_list.append((i, i + j + 1)) + + self.ifg_list = [(i, j) for i, j in self.ifg_list if i != j] # remove connections to itself, e.g. (0, 0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0] + + +class SmallBaselineNetwork(IfgNetwork): + """Small baseline network of interferograms restricting both temporal and spatial baselines.""" + + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, num_link: int, max_tbase: int, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + perpendicular baselines of the SAR acquisitions. + tbase: np.ndarray + temporal baselines of the SAR acquisitions. + max_tbase: int + maximum temporal baseline in [days] (default: None). + num_link: int + number of links within the range of maximum temporal baseline. + dates: list + Dates of the acquisitions. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + flag_restrict_to_max_tbase = False + + # in this section use tbase in [days] (function argument, not self.) + for i in range(self.num_images - 1): + + if i + 1 < self.num_images - 1: + # always use one connection to nearest neighbour in time + self.ifg_list.append((i, i + 1)) + else: + self.ifg_list.append((i, i + 1)) + break + # compute index corresponding to max_tbase for current time + diff = np.abs(tbase - (tbase[i] + max_tbase)) + max_idx = np.where(diff == diff.min())[0][0] + self.ifg_list.append((i, max_idx)) + + if max_idx == i: # no further images between i and max_idx + flag_restrict_to_max_tbase = True + continue + + # spread the rest of the links over the remaining time steps in between + links = np.floor(np.arange(i, max_idx, (max_idx - i) / (num_link - 1)))[1:].astype(int) + for link in links: + self.ifg_list.append((i, link)) + self.ifg_list = np.unique(self.ifg_list, axis=0) + + if flag_restrict_to_max_tbase: + warnings.warn(f"Cannot restrict ifgs to maximum temporal baseline of {max_tbase} days.") + + self.ifg_list = [(i, j) for i, j in self.ifg_list if i != j] # remove connections to itself, e.g. (0, 0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0] + + +class DelaunayNetwork(IfgNetwork): + """Delaunay network of interferograms which restricts both the temporal and perpendicular baselines.""" + + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + perpendicular baselines of the SAR acquisitions, array + tbase: np.ndarray + temporal baselines of the SAR acquisitions, array + dates: list + Dates of the acquisitions, list. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + scale = 0.25 + + network = Delaunay(points=np.stack([self.pbase, self.tbase * 365.25 * scale]).T) + for p1, p2, p3 in network.simplices: + self.ifg_list.append((p1, p2)) + self.ifg_list.append((p1, p3)) + self.ifg_list.append((p2, p3)) + self.ifg_list = np.unique(self.ifg_list, axis=0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0] + + +class SmallBaselineYearlyNetwork(IfgNetwork): + """Small baseline network of interferograms with yearly connections.""" + + def configure(self, *, pbase: np.ndarray, tbase: np.ndarray, num_link: int = None, dates: list): + """Create list of interferograms containing the indices of the images and computes baselines. + + Parameter + ----------- + pbase: np.ndarray + perpendicular baselines of the SAR acquisitions, array + tbase: np.ndarray + temporal baselines of the SAR acquisitions, array + num_link: int + Number of consecutive links in time connecting acquisitions. + dates: list + Dates of the acquisitions, list. + """ + self.pbase = pbase + self.tbase = tbase / 365.25 + self.num_images = pbase.shape[0] + self.dates = dates + + # add small temporal baselines + for i in range(self.num_images): + for j in range(num_link): + if i + j + 1 >= self.num_images: + continue + self.ifg_list.append((i, i + j + 1)) + + # add yearly ifgs + for i in range(self.num_images): + # find index of image at roughly one year distance + diff = np.abs(tbase - (tbase[i] + 365.25)) + year_idx = np.where(diff == diff.min())[0][0] + print(year_idx) + if year_idx != self.num_images - 1: # avoid connections to the last image + self.ifg_list.append((i, year_idx)) + print("found!") + + self.ifg_list = np.unique(self.ifg_list, axis=0) + self.ifg_list = [(i, j) for i, j in self.ifg_list if i != j] # remove connections to itself, e.g. (0, 0) + + self.pbase_ifg = np.array([self.pbase[idx[1]] - self.pbase[idx[0]] for idx in self.ifg_list]) + self.tbase_ifg = np.array([self.tbase[idx[1]] - self.tbase[idx[0]] for idx in self.ifg_list]) + self.num_ifgs = self.pbase_ifg.shape[0] diff --git a/sarvey/objects.py b/sarvey/objects.py new file mode 100644 index 0000000..accac3d --- /dev/null +++ b/sarvey/objects.py @@ -0,0 +1,795 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Objects module for SARvey.""" +import os +from os.path import join, dirname, exists, basename +from typing import Optional +import h5py +import matplotlib.pyplot as plt +import numpy as np +from pyproj import Proj, CRS +from pyproj.aoi import AreaOfInterest +from pyproj.database import query_utm_crs_info +from logging import Logger + +from miaplpy.objects.slcStack import slcStack +from mintpy.utils import readfile +from mintpy.utils.plot import auto_flip_direction + +from sarvey.ifg_network import IfgNetwork + + +class AmplitudeImage: + """AmplitudeImage.""" + + def __init__(self, *, file_path: str): + """Init. + + Parameters + ---------- + file_path: str + path to filename + """ + self.width = None + self.length = None + self.file_path = file_path + self.background_map = None + self.orbit_direction = None + + def prepare(self, *, slc_stack_obj: slcStack, img: np.ndarray, logger: Logger): + """Read the SLC stack, compute the mean amplitude image and store it into a file. + + Parameters + ---------- + slc_stack_obj: slcStack + object of class slcStack from MiaplPy + img: np.ndarray + amplitude image, e.g. the mean over time + logger: Logger + Logging handler + """ + self.orbit_direction = slc_stack_obj.metadata["ORBIT_DIRECTION"] + self.length = slc_stack_obj.length + self.width = slc_stack_obj.width + + self.background_map = img + + logger.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.create_dataset('background_map', data=self.background_map) + f.attrs["ORBIT_DIRECTION"] = self.orbit_direction + f.attrs["LENGTH"] = self.length + f.attrs["WIDTH"] = self.width + + def open(self): + """Open.""" + # print("read from {}".format(self.file_path)) + + with h5py.File(self.file_path, 'r') as f: + self.background_map = f["background_map"][:] + self.orbit_direction = f.attrs["ORBIT_DIRECTION"] + self.length = f.attrs["LENGTH"] + self.width = f.attrs["WIDTH"] + + def plot(self, *, ax: plt.Axes = None, logger: Logger): + """Plot the mean amplitude image as a background map. + + Parameters + ---------- + ax: plt.Axes + axes for plotting (default: None, a new figure will be created). + logger: Logger + Logging handler. + + Return + ------ + ax: plt.Axes + axes object. + """ + if self.background_map is None: + try: + self.open() + except OSError as e: + logger.error(msg="Could not open file: {}".format(e)) + fig = plt.figure(figsize=(15, 5)) + ax = fig.add_subplot() + logger.error(msg="Orbit direction not available.") + return ax + + if ax is None: + fig = plt.figure(figsize=(15, 5)) + ax = fig.add_subplot() + ax.imshow(self.background_map, cmap=plt.cm.get_cmap("gray")) + meta = {"ORBIT_DIRECTION": self.orbit_direction} + auto_flip_direction(meta, ax=ax, print_msg=False) + + ax.set_xlabel("Range") + ax.set_ylabel("Azimuth") + + return ax + + +class CoordinatesUTM: + """Coordinates in UTM for all pixels in the radar image.""" + + def __init__(self, *, file_path: str, logger: Logger): + """Init. + + Parameters + ---------- + file_path: str + path to filename + logger: Logger + Logging handler. + """ + self.file_path = file_path + self.coord_utm = None + self.logger = logger + + def prepare(self, *, input_path: str): + """Read the slc stack, computes the mean amplitude image and stores it into a file. + + Parameters + ---------- + input_path: str + path to slcStack.h5 file. + """ + log = self.logger + lat = readfile.read(input_path, datasetName='latitude')[0] + lon = readfile.read(input_path, datasetName='longitude')[0] + + log.info(msg="Transform coordinates from latitude and longitude (WGS84) to North and East (UTM).") + # noinspection PyTypeChecker + utm_crs_list = query_utm_crs_info( + datum_name="WGS 84", + area_of_interest=AreaOfInterest( + west_lon_degree=np.nanmin(lon.ravel()), + south_lat_degree=np.nanmin(lat.ravel()), + east_lon_degree=np.nanmax(lon.ravel()), + north_lat_degree=np.nanmax(lat.ravel())), + contains=True) + utm_crs = CRS.from_epsg(utm_crs_list[0].code) + lola2utm = Proj(utm_crs) + self.coord_utm = np.array(lola2utm(lon, lat)) + + log.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.create_dataset('coord_utm', data=self.coord_utm) + + def open(self): + """Open.""" + with h5py.File(self.file_path, 'r') as f: + self.coord_utm = f["coord_utm"][:] + + +class BaseStack: + """Class for 3D image-like data stacks.""" + + def __init__(self, *, file: str = None, logger: Logger): + """Init. + + Parameters + ---------- + file: str + path to filename + logger: Logger + Logging handler. + """ + self.file = file + self.logger = logger + self.metadata = None + self.num_time = None + self.length = None + self.width = None + self.f = None + + def close(self, *, print_msg: bool = True): + """Close.""" + try: + self.f.close() + if print_msg: + self.logger.info(msg='close file: {}'.format(basename(self.file))) + except Exception as e: + self.logger.exception(msg=e) + pass + return None + + def getShape(self, *, dataset_name: str): + """Open file and read shape of dataset.""" + with h5py.File(self.file, 'r') as f: + dshape = f[dataset_name].shape + return dshape + + def read(self, *, dataset_name: str, box: Optional[tuple] = None, print_msg: bool = True): + """Read dataset from slc file. + + Parameters + ---------- + dataset_name: str + name of dataset + box: tuple + tuple of 4 int, indicating x0,y0,x1,y1 of range, or + tuple of 6 int, indicating x0,y0,z0,x1,y1,z1 of range + print_msg: bool + print message. + + Returns + ------- + data: np.ndarray + 2D or 3D dataset + """ + if print_msg: + self.logger.info(msg='reading box {} from file: {} ...'.format(box, self.file)) + + with h5py.File(self.file, 'r') as f: + self.metadata = dict(f.attrs) + + ds = f[dataset_name] + if len(ds.shape) == 3: + self.length, self.width, self.num_time = ds.shape + else: + self.length, self.width = ds.shape + + # Get Index in space/2_3 dimension + if box is None: + box = [0, 0, self.width, self.length] + + if len(ds.shape) == 3: + if len(box) == 4: + data = ds[box[1]:box[3], box[0]:box[2], :] + if len(box) == 6: + data = ds[box[1]:box[4], box[0]:box[3], box[2]:box[5]] + else: + if len(box) == 6: + raise IndexError("Cannot read 3D box from 2D data.") + data = ds[box[1]:box[3], box[0]:box[2]] + + for key, value in self.metadata.items(): + try: + self.metadata[key] = value.decode('utf8') + except Exception: + self.metadata[key] = value + return data + + def prepareDataset(self, dataset_name: str, dshape: tuple, dtype: object, + metadata: Optional[dict], mode: str = "w", chunks: [tuple, bool] = True): + """PrepareDataset. Creates a dataset in file with specified size without writing any data. + + Parameters + ---------- + dataset_name: str + name of dataset. + dshape: tuple + shape of dataset. + dtype: object + data type of dataset. + metadata: dict + metadata of dataset (e.g. WAVELENGTH, ORBIT_DIRECTION, etc.). Usually the same as in slcStack.h5. + mode: str + open mode ('w' for writing new file or 'a' for appending to existing file). + chunks: tuple + chunk size ('True'/'False' or tuple specifying the dimension of the chunks) + """ + with h5py.File(self.file, mode) as f: + self.logger.info(msg="Prepare dataset: {d:<25} of {t:<25} in size of {s}".format( + d=dataset_name, + t=str(dtype), + s=dshape)) + + f.create_dataset(dataset_name, + shape=dshape, + dtype=dtype, + chunks=chunks) + + # write attributes + metadata = dict(metadata) + for key in metadata.keys(): + f.attrs[key] = metadata[key] + + return + + def writeToFileBlock(self, *, data: np.ndarray, dataset_name: str, block: Optional[tuple] = None, mode: str = 'a', + print_msg: bool = True): + """Write data to existing HDF5 dataset in disk block by block. + + Parameters + ---------- + data: np.ndarray + 1/2/3D matrix. + dataset_name: str + dataset name. + block: list + the list can contain 2, 4 or 6 integers indicating: [zStart, zEnd, yStart, yEnd, xStart, xEnd]. + mode: str + open mode ('w' for writing new file or 'a' for appending to existing file). + print_msg: bool + print message. + + Returns + -------- + file: str + path to file + """ + if block is None: + # data shape + if isinstance(data, list): + shape = (len(data),) + else: + shape = data.shape + + if len(shape) == 1: + block = [0, shape[0]] + elif len(shape) == 2: + block = [0, shape[0], + 0, shape[1]] + elif len(shape) == 3: + block = [0, shape[0], + 0, shape[1], + 0, shape[2]] + + with h5py.File(self.file, mode) as f: + + if print_msg: + self.logger.info(msg="writing dataset /{:<25} block: {}".format(dataset_name, block)) + if len(block) == 6: + f[dataset_name][block[0]:block[1], + block[2]:block[3], + block[4]:block[5]] = data + + elif len(block) == 4: + f[dataset_name][block[0]:block[1], + block[2]:block[3]] = data + + elif len(block) == 2: + f[dataset_name][block[0]:block[1]] = data + + return self.file + + def writeToFile(self, *, data: np.ndarray, dataset_name: str, metadata: Optional[dict] = None, mode: str = 'a', + chunks: [tuple, bool] = True): + """Write the whole dataset to the file (not block-by-block). + + Parameters + ---------- + data: np.ndarray + 3D data array. + dataset_name: str + name of dataset. + metadata: dict + metadata of dataset (e.g. WAVELENGTH, ORBIT_DIRECTION, etc.). Usually the same as in slcStack.h5. + mode: str + mode for opening the h5 file (e.g. write: 'w' or append: 'a') + chunks: tuple + chunk size ('True'/'False' or tuple specifying the dimension of the chunks) + """ + # 3D dataset + self.logger.info(msg='create HDF5 file: {} with w mode'.format(self.file)) + self.f = h5py.File(self.file, mode) + if dataset_name not in self.f: + self.logger.info(msg='create dataset /{n} of {t:<10} in size of {s}.'.format(n=dataset_name, + t=str(data.dtype), + s=data.shape)) + self.f.create_dataset(dataset_name, data=data, chunks=chunks) + else: + self.logger.info(msg='overwrite dataset /{n} of {t:<10} in size of {s}.'.format(n=dataset_name, + t=str(data.dtype), + s=data.shape)) + self.f[dataset_name] = data + + # Attributes + if metadata is not None: + metadata = dict(metadata) + for key, value in metadata.items(): + self.f.attrs[key] = str(value) + + self.f.close() + self.logger.info(msg='finished writing to {}'.format(self.file)) + return + + +class Points: + """Points class for storing information about the selected scatterers.""" + + file_path: str + point_id: np.array + coord_xy: np.array + num_points: int + phase: np.array + wavelength: float + length: int + width: int + times: None + + # etc. + + def __init__(self, *, file_path: str, logger: Logger): + """Init. + + Parameters + ---------- + file_path: str + ath to filename + logger: Logger + Logging handler. + """ + self.ifg_net_obj = IfgNetwork() # use parent class here which doesn't know and care about 'star' or 'sb' + self.coord_utm = None + self.coord_lalo = None + self.height = None + self.slant_range = None + self.loc_inc = None + self.file_path = file_path + self.logger = logger + + def prepare(self, *, point_id: np.ndarray, coord_xy: np.ndarray, path_inputs: str): + """Assign point_id and radar coordinates to the object. + + Store the point_id and radar coordinates of the scatterers in the object (not file) and read further + attributes from external files (ifg_network.h5, slcStack.h5, geometryRadar.h5, coordinates_utm.h5). + + Parameters + ---------- + point_id: np.ndarray + point_id of the scatterers. + coord_xy: np.ndarray + radar coordinates of the scatterers. + path_inputs: str + path to input files (slcStack.h5, geometryRadar.h5). + """ + self.point_id = point_id + self.coord_xy = coord_xy + self.num_points = self.coord_xy.shape[0] + self.phase = None + self.openExternalData(path_inputs=path_inputs) + + def writeToFile(self): + """Write data to .h5 file (num_points, coord_xy, point_id, phase).""" + self.logger.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.attrs["num_points"] = self.num_points + f.create_dataset('coord_xy', data=self.coord_xy) + f.create_dataset('point_id', data=self.point_id) + f.create_dataset('phase', data=self.phase) + + def open(self, path_inputs: str, other_file_path: str = None): + """Read data from file. + + Read stored information from already existing .h5 file. This can be the file of the object itself. If the + data should be read from another file, the path to this file can be given as 'other_file_path'. Thereby, a new + Points object can be created with the data of another Points object. + + Parameters + ---------- + path_inputs: str + path to input files (slcStack.h5, geometryRadar.h5). + other_file_path: str + path to other .h5 file (default: None). + """ + # 1) read own data: coord_xy, phase, point_id, num_points, reference_point_idx + if other_file_path is not None: + path = other_file_path + else: + path = self.file_path + self.logger.info(msg="read from {}".format(path)) + + with h5py.File(path, 'r') as f: + self.num_points = f.attrs["num_points"] + self.coord_xy = f["coord_xy"][:] + self.point_id = f["point_id"][:] + self.phase = f["phase"][:] + + self.openExternalData(path_inputs=path_inputs) + + def openExternalData(self, *, path_inputs: str): + """Load data which is stored in slcStack.h5, geometryRadar.h5, ifg_network.h5 and coordinates_utm.h5.""" + # 1) read IfgNetwork + self.ifg_net_obj.open(path=join(dirname(self.file_path), "ifg_network.h5")) + + # 2) read metadata from slcStack + slc_stack_obj = slcStack(join(path_inputs, "slcStack.h5")) + slc_stack_obj.open(print_msg=False) + self.wavelength = np.float64(slc_stack_obj.metadata["WAVELENGTH"]) + self.length = slc_stack_obj.length # y-coordinate axis (azimut) + self.width = slc_stack_obj.width # x-coordinate axis (range) + + # 3) read from geometry file + mask = self.createMask() + + geom_path = join(path_inputs, "geometryRadar.h5") + + # load geometry data + loc_inc, meta = readfile.read(geom_path, datasetName='incidenceAngle') + loc_inc *= np.pi / 180 # in [rad] + slant_range = readfile.read(geom_path, datasetName='slantRangeDistance')[0] + height = readfile.read(geom_path, datasetName='height')[0] + lat = readfile.read(geom_path, datasetName='latitude')[0] + lon = readfile.read(geom_path, datasetName='longitude')[0] + + self.loc_inc = loc_inc[mask].ravel() + self.slant_range = slant_range[mask].ravel() + self.height = height[mask].ravel() + self.coord_lalo = np.array([lat[mask].ravel(), lon[mask].ravel()]).transpose() + + # 4) read UTM coordinates + coord_utm_obj = CoordinatesUTM(file_path=join(dirname(self.file_path), "coordinates_utm.h5"), + logger=self.logger) + coord_utm_obj.open() + self.coord_utm = coord_utm_obj.coord_utm[:, mask].transpose() + + def createMask(self): + """Create a mask. + + Create a mask in the size of the radar image which is used to read the geometry and SLC data for the selected + scatterers. + """ + mask = np.zeros((self.length, self.width), dtype=np.bool_) + tmp = [tuple([c[0], c[1]]) for c in self.coord_xy] + for i in tmp: + mask[i] = True + return mask + + def addPointsFromObj(self, *, new_point_id: np.ndarray, new_coord_xy: np.ndarray, new_phase: np.ndarray, + new_num_points: int, path_inputs: str): + """Add new points and their attributes to the existing data. + + Parameters + ---------- + new_point_id: np.ndarray + point_id of the new scatterers. + new_coord_xy: np.ndarray + radar coordinates of the new scatterers. + new_phase: np.ndarray + phase of the new scatterers. + new_num_points: int + number of new points. + path_inputs: str + path to input files (slcStack.h5, geometryRadar.h5). + """ + self.point_id = np.append(self.point_id, new_point_id) + self.coord_xy = np.append(self.coord_xy, new_coord_xy, axis=0) + self.phase = np.append(self.phase, new_phase, axis=0) + self.num_points += new_num_points + + # all data must be ordered, so that all external data can be loaded correctly + sort_idx = np.argsort(self.point_id) + self.point_id = self.point_id[sort_idx] + self.coord_xy = self.coord_xy[sort_idx, :] + self.phase = self.phase[sort_idx, :] + # refresh by reopening all external data + self.openExternalData(path_inputs=path_inputs) + + def removePoints(self, mask: np.ndarray = None, *, keep_id: [np.ndarray, list], path_inputs: str): + """Remove all entries from specified points. + + The possible options exist for removing the points: + a) Keep all points which are set to True in a 'mask' with size (num_points x 1). Or + b) Keep all points whose ID is listed in keep_id. The rest of the points will be removed. + + Parameters + ---------- + mask: np.ndarray + mask to select points to be kept, rest will be removed (default: None). + keep_id: np.ndarray + list of point_id to keep. + path_inputs: str + path to input files (slcStack.h5, geometryRadar.h5). + """ + if mask is None: + mask = np.ones((self.num_points,), dtype=np.bool_) + for p in self.point_id: + if p not in keep_id: + mask[self.point_id == p] = False + self.point_id = self.point_id[mask] + self.coord_xy = self.coord_xy[mask, :] + self.phase = self.phase[mask, :] + self.num_points = mask[mask].shape[0] + # refresh by reopening all external data + self.openExternalData(path_inputs=path_inputs) + + +class Network: + """Spatial network of PS candidates.""" + + def __init__(self, *, file_path: str, logger: Logger): + """Init. + + Parameters + ---------- + file_path: str + absolute path to working directory for creating/loading 'psNetwork.h5' + logger: Logger + Logging handler. + """ + self.num_arcs = None + self.gamma = None + self.arcs = None + self.slant_range = None + self.loc_inc = None + self.phase = None + self.vel = None + self.demerr = None + self.ifg_net_obj = None + self.width = None + self.length = None + self.wavelength = None + self.file_path = file_path + self.logger = logger + + def writeToFile(self): + """Write all existing data to psNetwork.h5 file.""" + self.logger.info(msg="write data to {}...".format(self.file_path)) + + if exists(self.file_path): + os.remove(self.file_path) + + with h5py.File(self.file_path, 'w') as f: + f.attrs["num_arcs"] = self.num_arcs + f.create_dataset('arcs', data=self.arcs) + f.create_dataset('phase', data=self.phase) + f.create_dataset('loc_inc', data=self.loc_inc) + f.create_dataset('slant_range', data=self.slant_range) + + def open(self, *, path_inputs: str): + """Read stored information from existing .h5 file.""" + with h5py.File(self.file_path, 'r') as f: + self.num_arcs = f.attrs["num_arcs"] + self.arcs = f["arcs"][:] + self.phase = f["phase"][:] + self.loc_inc = f["loc_inc"][:] + self.slant_range = f["slant_range"][:] + self.openExternalData(path_inputs=path_inputs) + + def openExternalData(self, *, path_inputs: str): + """Read data from slcStack.h5 and IfgNetwork.h5 files.""" + slc_stack_obj = slcStack(join(path_inputs, "slcStack.h5")) + slc_stack_obj.open(print_msg=False) + self.wavelength = np.float64(slc_stack_obj.metadata["WAVELENGTH"]) + self.length = slc_stack_obj.length # y-coordinate axis (azimut) + self.width = slc_stack_obj.width # x-coordinate axis (range) + + # 3) read IfgNetwork + self.ifg_net_obj = IfgNetwork() + self.ifg_net_obj.open(path=join(dirname(self.file_path), "ifg_network.h5")) + + def computeArcObservations(self, *, point_obj: Points, arcs: np.ndarray): + """Compute the phase observations for each arc. + + Compute double difference phase observations, i.e. the phase differences for each arc in the network from the + phase of the two scatterers connected by the arc. + + Parameters + ---------- + point_obj: Points + object of class Points. + arcs: np.ndarray + Array with the indices of the points connected by an arc. + """ + self.arcs = arcs + self.num_arcs = self.arcs.shape[0] + self.logger.info(msg="no. arcs:\t{}".format(self.num_arcs)) + + self.phase = np.zeros((self.num_arcs, point_obj.ifg_net_obj.num_ifgs)) + self.loc_inc = np.zeros((self.num_arcs,)) + self.slant_range = np.zeros((self.num_arcs,)) + for idx, arc in enumerate(self.arcs): + self.phase[idx, :] = np.angle( + np.exp(1j * point_obj.phase[arc[0], :]) * np.conjugate(np.exp(1j * point_obj.phase[arc[1], :]))) + self.loc_inc[idx] = np.mean([point_obj.loc_inc[arc[0]], point_obj.loc_inc[arc[1]]]) + self.slant_range[idx] = np.mean([point_obj.slant_range[arc[0]], point_obj.slant_range[arc[1]]]) + + self.logger.info(msg="ifg arc observations created.") + + def removeArcs(self, *, mask: np.ndarray): + """Remove arcs from the list of arcs in the network. + + Parameter + --------- + mask: np.ndarray + mask to select arcs to be kept, rest will be removed. + """ + self.demerr = self.demerr[mask] + self.vel = self.vel[mask] + self.phase = self.phase[mask, :] + self.loc_inc = self.loc_inc[mask] + self.slant_range = self.slant_range[mask] + self.arcs = np.array(self.arcs) + self.arcs = self.arcs[mask, :] + self.gamma = self.gamma[mask] + self.num_arcs = mask[mask].shape[0] + + +class NetworkParameter(Network): + """Spatial Network with the estimated parameters of each arc in the network.""" + + def __init__(self, *, file_path: str, logger: Logger): + """Init.""" + super().__init__(file_path=file_path, logger=logger) + self.gamma = None + self.vel = None + self.demerr = None + self.slant_range = None + self.loc_inc = None + self.phase = None + self.arcs = None + self.num_arcs = None + self.logger = logger + + def prepare(self, *, net_obj: Network, demerr: np.ndarray, vel: np.ndarray, gamma: np.ndarray): + """Prepare. + + Parameter + ----------- + net_obj: Network + object of class Network. + demerr: np.ndarray + estimated DEM error for each arc in the network. + vel: np.ndarray + estimated velocity for each arc in the network. + gamma: np.ndarray + estimated temporal coherence for each arc in the network. + """ + self.num_arcs = net_obj.num_arcs + self.arcs = net_obj.arcs + self.phase = net_obj.phase + self.loc_inc = net_obj.loc_inc + self.slant_range = net_obj.slant_range + self.demerr = demerr + self.vel = vel + self.gamma = gamma + + def writeToFile(self): + """Write DEM error, velocity and temporal coherence to file.""" + super().writeToFile() + + with h5py.File(self.file_path, 'r+') as f: # append existing file + f.create_dataset('demerr', data=self.demerr) + f.create_dataset('vel', data=self.vel) + f.create_dataset('gamma', data=self.gamma) + + def open(self, *, path_inputs: str): + """Read data from file.""" + super().open(path_inputs=path_inputs) + + with h5py.File(self.file_path, 'r') as f: + self.demerr = f["demerr"][:] + self.vel = f["vel"][:] + self.gamma = f["gamma"][:] diff --git a/sarvey/osm_utils.py b/sarvey/osm_utils.py new file mode 100644 index 0000000..fd7bf23 --- /dev/null +++ b/sarvey/osm_utils.py @@ -0,0 +1,185 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Osm utils module for SARvey.""" +import numpy as np +import overpy +from logging import Logger +from shapely import Point + +from mintpy.utils import readfile, utils as ut + + +def getSpatialExtend(*, geom_file: str, logger: Logger): + """Get spatial extend of the radar image. + + Parameters + ---------- + geom_file: str + path of geometryRadar.h5 file + logger: Logger + Logging handler. + + Returns + ------- + ll_corner_wgs: list + list of coordinates of the lower-left corner of the radar image in WGS84 coordinates. + ur_corner_wgs: list + list of coordinates of the upper-right corner of the radar image in WGS84 coordinates. + coord: np.ndarray + coordinates of all pixels in the radar image in WGS84. + atr: dict + metadata dictionary from geometryRadar.h5. + """ + logger.info(msg='read spatial extend from geometryRadar.h5') + _, atr = readfile.read(geom_file) + coord = ut.coordinate(atr, lookup_file=geom_file) + lat, atr = readfile.read(geom_file, datasetName='latitude') + lon, _ = readfile.read(geom_file, datasetName='longitude') + + # radar image is flipped up-down + # unclear: check if bounding box fits. Otherwise, change to max and min values of lat and lon + ll_bbox = [np.nanmin(lat), np.nanmin(lon)] + ur_bbox = [np.nanmax(lat), np.nanmax(lon)] + + img_ext = [ + Point(lon[0, 0], lat[0, 0]), + Point(lon[-1, 0], lat[-1, 0]), + Point(lon[-1, -1], lat[-1, -1]), + Point(lon[0, -1], lat[0, -1]) + ] + return ll_bbox, ur_bbox, img_ext, coord, atr + + +def runOsmQuery(*, ll_corner_wgs: np.ndarray, ur_corner_wgs: np.ndarray, type_list: list, + logger: Logger) -> overpy.Result: + """Query OSM database for transport infrastructure within the spatial extent of the radar image. + + Parameters + ---------- + ll_corner_wgs: np.ndarray + coordinates of the lower-left corner of the radar image in WGS84 coordinates. + ur_corner_wgs: np.ndarray + coordinates of the upper-right corner of the radar image in WGS84 coordinates. + type_list: list + List of street types that shall be queried at the OSM database. + logger: Logger + Logging handler. + + Returns + ------- + result: overpy.Result + results of the overpy query to OSM database. + """ + # Initialize overpass connection + api = overpy.Overpass() + + # Request data from API + logger.info(msg='querying OSM database for infra types...') + # query_cmd = "way({},{},{},{}) [""highway=motorway_link""]; (._;>;); out body;" + + query_cmd = "[bbox: {},{},{},{}];(" + for infra_type in type_list: + logger.info(msg='\t - {}'.format(infra_type)) + if infra_type == 'rail': + query_cmd += "way[railway={}];".format(infra_type) + else: + query_cmd += "way[highway={}];".format(infra_type) + + query_cmd += ");(._; >;); out body;" # skel + + cmd = query_cmd.format(ll_corner_wgs[0], ll_corner_wgs[1], + ur_corner_wgs[0], ur_corner_wgs[1]) + logger.info(msg="\n" + cmd + "\n") + result = api.query(cmd) + + if len(result.ways) == 0: + logger.error(msg='Empty OSM query results. No roads or railway tracks found.') + raise ValueError + + logger.info(msg='...done.') + return result + + +def runOsmQueryBridge(*, ll_corner_wgs: np.ndarray, ur_corner_wgs: np.ndarray, bridge_highway: bool, + bridge_railway: bool, logger: Logger) -> overpy.Result: + """Query OSM database for bridges of transport infrastructure within the spatial extent of the radar image. + + Parameters + ---------- + ll_corner_wgs: np.ndarray + coordinates of the lower-left corner of the radar image in WGS84 coordinates. + ur_corner_wgs: np.ndarray + coordinates of the upper-right corner of the radar image in WGS84 coordinates. + bridge_highway: bool + Set true to query highway bridges. + bridge_railway: bool + Set true to query railway bridges. + logger: Logger + Logging handler. + + Returns + ------- + result: overpy.Result + results of the overpy query to OSM database. + """ + # Initialize overpass connection + api = overpy.Overpass() + + # Request data from API + logger.info(msg='querying OSM database for infra types...') + # query_cmd = "way({},{},{},{}) [""highway=motorway_link""]; (._;>;); out body;" + + query_cmd = "[bbox: {},{},{},{}];(" + + if bridge_highway: + logger.info(msg='\t - bridge_highway') + query_cmd += 'way[highway~"^(motorway|motorway_link|trunk|trunk_link)$"][bridge];' + + if bridge_railway: + logger.info(msg='\t - bridge_railway') + query_cmd += 'way[railway=rail][bridge];' + + if (bridge_highway is False) & (bridge_railway is False): + logger.info(msg='\t - all bridges') + query_cmd += 'way[bridge];' + + query_cmd += ");(._; >;); out body;" # skel + + cmd = query_cmd.format(ll_corner_wgs[0], ll_corner_wgs[1], + ur_corner_wgs[0], ur_corner_wgs[1]) + logger.info(msg="\n" + cmd + "\n") + result = api.query(cmd) + + if len(result.ways) == 0: + logger.error(msg='Empty OSM query results. No bridges found.') + raise ValueError + + logger.info(msg='...done.') + return result diff --git a/sarvey/preparation.py b/sarvey/preparation.py new file mode 100644 index 0000000..9a199bf --- /dev/null +++ b/sarvey/preparation.py @@ -0,0 +1,277 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Preparation module for SARvey.""" +import datetime +import matplotlib.pyplot as plt +import numpy as np +from logging import Logger +from os.path import join + +import mintpy.utils.readfile as readfile + +from sarvey import viewer +import sarvey.utils as ut +from sarvey.objects import CoordinatesUTM, AmplitudeImage, BaseStack, Points +from sarvey.triangulation import PointNetworkTriangulation + + +def createTimeMaskFromDates(*, start_date: str, stop_date: str, date_list: list, logger: Logger): + """Create a mask with selected dates within given time frame. + + Parameters + ---------- + start_date: str + Start date. + stop_date: str + Stop date. + date_list: list + all avaiable dates in the slcStack.h5. + logger: Logger + Logging handler. + + Returns + ------- + time_mask: np.ndarray + mask with True for selected dates. + num_slc: int + number of selected images. + result_date_list: list + list of selected dates. + """ + time_mask = np.ones((len(date_list)), dtype=np.bool_) + date_list = [datetime.date(year=int(d[:4]), month=int(d[4:6]), day=int(d[6:])) for d in date_list] + + if (start_date is None) and (stop_date is None): + # use all images. + result_date_list = [date.isoformat() for date in date_list] + return time_mask, time_mask.shape[0], result_date_list + + if start_date is None: + start_date = min(date_list) + else: + start_date = datetime.date.fromisoformat(start_date) + + if stop_date is None: + stop_date = max(date_list) + else: + stop_date = datetime.date.fromisoformat(stop_date) + + if start_date >= stop_date: + logger.error(msg="Choose start date < stop date!") + raise ValueError + + if stop_date < min(date_list): + logger.error(msg="Stop date is before the first acquired image. Choose a later stop date!") + raise ValueError + + if start_date > max(date_list): + logger.error(msg="Start date is after the last acquired image. Choose an earlier start date!") + raise ValueError + + shift = " " + logger.debug(msg=shift + "{:>10} {:>10}".format(" Date ", "Selected")) + logger.debug(msg=shift + "{:>10} {:>10}".format("__________", "________")) + + result_date_list = list() + for i, date in enumerate(date_list): + if (date < start_date) or (date > stop_date): + time_mask[i] = False + else: + result_date_list.append(date.isoformat()) + val = " x" if time_mask[i] else "" + logger.debug(msg=shift + "{:>10} {:>3}".format(date.isoformat(), val)) + + num_slc = time_mask[time_mask].shape[0] + return time_mask, num_slc, result_date_list + + +def readSlcFromMiaplpy(*, path: str, box: tuple = None, logger: Logger) -> np.ndarray: + """Read SLC data from phase-linking results of Miaplpy. + + Parameters + ---------- + path: str + Path to the phase_series.h5 file. + box: tuple + Bounding Box to read from. + logger: Logger + Logging handler. + + Returns + ------- + slc: np.ndarray + slc stack created from phase-linking results. + """ + logger.info(msg="read phase from MiaplPy results...") + phase = readfile.read(path, datasetName='phase', box=box)[0] + + logger.info(msg="read amplitude from MiaplPy results...") + amp = readfile.read(path, datasetName='amplitude', box=box)[0] + + logger.info(msg="combine phase and amplitude to slc...") + slc = amp * np.exp(phase * 1j) + return slc + + +def readCoherenceFromMiaplpy(*, path: str, box: tuple = None, logger: Logger) -> tuple[np.ndarray, dict]: + """Read the coherence image from phase-linking of MiaplPy. + + Parameters + ---------- + path: str + Path to phase_series.h5 file. + box: tuple + Bounding Box to read from. + logger: Logger + Logging handler. + + Returns + ------- + temp_coh: np.ndarray + temporal coherence image from phase-linking results of MiaplPy. + """ + logger.info(msg="read quality from MiaplPy results...") + temp_coh = readfile.read(path, datasetName='temporalCoherence', box=box)[0][1, :, :] + return temp_coh + + +def selectPixels(*, path: str, selection_method: str, thrsh: float, + grid_size: int = None, bool_plot: bool = False, logger: Logger): + """Select pixels based on temporal coherence. + + Parameters + ---------- + path: str + Path to the directory with the temporal_coherence.h5 file. + selection_method: str + Pixel selection method. Currently, only "temp_coh" is implemented. + thrsh: float + Threshold for pixel selection. + grid_size: int + Grid size for sparse pixel selection. + bool_plot: bool + Plot the selected pixels. + logger: Logger + Logging handler. + + Returns + ------- + cand_mask: np.ndarray + Mask with selected pixels. + """ + quality = None + grid_min_val = None + cand_mask = None + unit = None + cmap = None + # compute candidates + if selection_method == "temp_coh": + temp_coh_obj = BaseStack(file=join(path, "temporal_coherence.h5"), logger=logger) + quality = temp_coh_obj.read(dataset_name="temp_coh") + cand_mask = quality >= thrsh + grid_min_val = False + unit = "Temporal\nCoherence [ ]" + cmap = "autumn" + + if selection_method == "miaplpy": + raise NotImplementedError("This part is not developed yet. MiaplPy data is read in another way.") + # pl_coherence = readCoherenceFromMiaplpy(path=join(path, 'inverted', 'phase_series.h5'), box=None, + # logger=logger) + # cand_mask = pl_coherence >= thrsh + # quality = pl_coherence + # grid_min_val = False + # unit = "Phase-Linking\nCoherence [ ]" + # cmap = "autumn" + + if grid_size is not None: # -> sparse pixel selection + coord_utm_obj = CoordinatesUTM(file_path=join(path, "coordinates_utm.h5"), logger=logger) + coord_utm_obj.open() + box_list = ut.createSpatialGrid(coord_utm_img=coord_utm_obj.coord_utm, + length=coord_utm_obj.coord_utm.shape[1], + width=coord_utm_obj.coord_utm.shape[2], + grid_size=grid_size)[0] + cand_mask_sparse = ut.selectBestPointsInGrid(box_list=box_list, quality=quality, sel_min=grid_min_val) + cand_mask &= cand_mask_sparse + + if bool_plot: + coord_xy = np.array(np.where(cand_mask)).transpose() + bmap_obj = AmplitudeImage(file_path=join(path, "background_map.h5")) + viewer.plotScatter(value=quality[cand_mask], coord=coord_xy, bmap_obj=bmap_obj, ttl="Selected pixels", + unit=unit, s=2, cmap=cmap, vmin=0, vmax=1, logger=logger) + # if grid_size is not None: + # psViewer.plotGridFromBoxList(box_list, ax=ax, edgecolor="k", linewidth=0.2) + plt.tight_layout() + plt.gcf().savefig(join(path, "pic", "selected_pixels_{}_{}.png".format(selection_method, thrsh)), + dpi=300) + plt.close(plt.gcf()) + + return cand_mask + + +def createArcsBetweenPoints(*, point_obj: Points, knn: int = None, max_arc_length: float = np.inf, + logger: Logger) -> np.ndarray: + """Create a spatial network of arcs to triangulate the points. + + All points are triangulated with a Delaunay triangulation. If knn is given, the triangulation is done with the k + nearest neighbors. Too long arcs are removed from the network. If, afterward, the network is not connected, a + delaunay triangulation is performed again to ensure connectivity in the network. + + Parameters + ---------- + point_obj: Points + Point object. + knn: int + Number of nearest neighbors to consider (default: None). + max_arc_length: float + Maximum length of an arc. Longer arcs will be removed. Default: np.inf. + logger: Logger + Logging handler. + + Returns + ------- + arcs: np.ndarray + Arcs of the triangulation containing the indices of the points for each arc. + """ + triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=point_obj.coord_utm, logger=logger) + + if knn is not None: + triang_obj.triangulateKnn(k=knn) + + triang_obj.triangulateGlobal() + + logger.info(msg="remove arcs with length > {}.".format(max_arc_length)) + triang_obj.removeLongArcs(max_dist=max_arc_length) + + if not triang_obj.isConnected(): + triang_obj.triangulateGlobal() + + logger.info(msg="retrieve arcs from adjacency matrix.") + arcs = triang_obj.getArcsFromAdjMat() + return arcs diff --git a/sarvey/processing.py b/sarvey/processing.py new file mode 100644 index 0000000..dc323f9 --- /dev/null +++ b/sarvey/processing.py @@ -0,0 +1,1145 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Processing module for SARvey.""" +from os.path import join, exists +import matplotlib.pyplot as plt +from matplotlib import colormaps +import numpy as np +from logging import Logger + +from miaplpy.objects.slcStack import slcStack +from mintpy.utils import readfile +from mintpy.utils.plot import auto_flip_direction + +from sarvey import viewer +from sarvey.densification import densifyNetwork +from sarvey.filtering import estimateAtmosphericPhaseScreen, simpleInterpolation +from sarvey.ifg_network import (DelaunayNetwork, SmallBaselineYearlyNetwork, SmallTemporalBaselinesNetwork, + SmallBaselineNetwork, StarNetwork) +from sarvey.objects import Network, Points, AmplitudeImage, CoordinatesUTM, NetworkParameter, BaseStack +from sarvey.unwrapping import spatialParameterIntegration, \ + parameterBasedNoisyPointRemoval, temporalUnwrapping, spatialUnwrapping, removeGrossOutliers +from sarvey.preparation import createArcsBetweenPoints, selectPixels, createTimeMaskFromDates +import sarvey.utils as ut +from sarvey.coherence import computeIfgsAndTemporalCoherence +from sarvey.triangulation import PointNetworkTriangulation +from sarvey.config import Config + + +class Processing: + """Processing.""" + + def __init__(self, path: str, config: Config, logger: Logger): + """Init.""" + self.path = path + self.config = config + self.logger = logger + + def runPreparation(self): + """RunPreparation.""" + log = self.logger + + msg = "#" * 10 + msg += " PREPARE PROCESSING: LOAD INPUT " + msg += "#" * 10 + log.info(msg=msg) + + # load slc data + slc_stack_obj = slcStack(join(self.config.data_directories.path_inputs, "slcStack.h5")) + slc_stack_obj.open() + + if "ORBIT_DIRECTION" in slc_stack_obj.metadata: + log.info(msg="Orbit direction: {}".format(slc_stack_obj.metadata["ORBIT_DIRECTION"])) + else: + log.warning(msg="No orbit direction found in metadata. Add 'ORBIT_DIRECTION' to metadata of 'slcStack.h5'" + "and run again!") + raise AttributeError("No orbit direction found in metadata.") + + time_mask, num_slc, date_list = createTimeMaskFromDates( + start_date=self.config.preparation.start_date, + stop_date=self.config.preparation.stop_date, + date_list=slc_stack_obj.dateList, + logger=log + ) + log.info(msg=f"Start date: {date_list[0]}") + log.info(msg=f"Stop date: {date_list[-1]}") + log.info(msg=f"Number of SLC: {num_slc}") + + msg = "#" * 10 + msg += " DESIGN IFG NETWORK " + msg += "#" * 10 + log.info(msg=msg) + + ifg_net_obj = None + if self.config.preparation.network_type == "star": + ifg_net_obj = StarNetwork() + ifg_net_obj.configure( + pbase=slc_stack_obj.pbase[time_mask], + tbase=slc_stack_obj.tbase[time_mask], + ref_idx=int(np.floor(num_slc/2)), + dates=date_list + ) + log.info(msg="Star ifg network") + elif self.config.preparation.network_type == "sb": + ifg_net_obj = SmallBaselineNetwork() + ifg_net_obj.configure(pbase=slc_stack_obj.pbase[time_mask], + tbase=slc_stack_obj.tbase[time_mask], + num_link=self.config.preparation.num_ifgs, + max_tbase=self.config.preparation.max_tbase, + dates=date_list) + log.info(msg="Small baseline network") + elif self.config.preparation.network_type == "stb": + ifg_net_obj = SmallTemporalBaselinesNetwork() + ifg_net_obj.configure( + pbase=slc_stack_obj.pbase[time_mask], + tbase=slc_stack_obj.tbase[time_mask], + num_link=self.config.preparation.num_ifgs, + dates=date_list + ) + log.info(msg="Small temporal baseline network") + elif self.config.preparation.network_type == "stb_year": + ifg_net_obj = SmallBaselineYearlyNetwork() + ifg_net_obj.configure( + pbase=slc_stack_obj.pbase[time_mask], + tbase=slc_stack_obj.tbase[time_mask], + num_link=self.config.preparation.num_ifgs, + dates=date_list + ) + log.info(msg="Small temporal baseline and yearly ifg network") + elif self.config.preparation.network_type == "delaunay": + ifg_net_obj = DelaunayNetwork() + ifg_net_obj.configure( + pbase=slc_stack_obj.pbase[time_mask], + tbase=slc_stack_obj.tbase[time_mask], + dates=date_list + ) + log.info(msg="Delaunay ifg network") + + ifg_net_obj.writeToFile(path=join(self.path, "ifg_network.h5"), logger=log) + log.info(msg=f"temporal baselines: {np.unique(np.round(np.abs(ifg_net_obj.tbase_ifg) * 365.25).astype(int))}") + + fig = ifg_net_obj.plot() + fig.savefig(join(self.path, "pic", "step_0_interferogram_network.png"), dpi=300) + plt.close(fig) + + msg = "#" * 10 + msg += f" GENERATE STACK OF {ifg_net_obj.num_ifgs} INTERFEROGRAMS & ESTIMATE TEMPORAL COHERENCE " + msg += "#" * 10 + log.info(msg=msg) + + box_list, num_patches = ut.preparePatches(num_patches=self.config.processing.num_patches, + width=slc_stack_obj.width, + length=slc_stack_obj.length, + logger=log) + + # create placeholder in result file for datasets which are stored patch-wise + dshape = (slc_stack_obj.length, slc_stack_obj.width, ifg_net_obj.num_ifgs) + ifg_stack_obj = BaseStack(file=join(self.path, "ifg_stack.h5"), logger=log) + ifg_stack_obj.prepareDataset(dataset_name="ifgs", dshape=dshape, dtype=np.csingle, + metadata=slc_stack_obj.metadata, mode='w', chunks=(30, 30, ifg_net_obj.num_ifgs)) + + # create placeholder in result file for datasets which are stored patch-wise + temp_coh_obj = BaseStack(file=join(self.path, "temporal_coherence.h5"), logger=log) + dshape = (slc_stack_obj.length, slc_stack_obj.width) + temp_coh_obj.prepareDataset(dataset_name="temp_coh", metadata=slc_stack_obj.metadata, + dshape=dshape, dtype=np.float32, mode="w", chunks=True) + + mean_amp_img = computeIfgsAndTemporalCoherence( + path_temp_coh=join(self.path, "temporal_coherence.h5"), + path_ifgs=join(self.path, "ifg_stack.h5"), + path_slc=join(self.config.data_directories.path_inputs, "slcStack.h5"), + ifg_array=np.array(ifg_net_obj.ifg_list), + time_mask=time_mask, + wdw_size=self.config.preparation.filter_wdw_size, + num_boxes=num_patches, + box_list=box_list, + num_cores=self.config.processing.num_cores, + logger=log + ) + + # store auxilliary datasets for faster access during processing + if not exists(join(self.path, "coordinates_utm.h5")): + coord_utm_obj = CoordinatesUTM(file_path=join(self.path, "coordinates_utm.h5"), logger=self.logger) + coord_utm_obj.prepare(input_path=join(self.config.data_directories.path_inputs, "geometryRadar.h5")) + del coord_utm_obj + + if not exists(join(self.path, "background_map.h5")): + bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) + bmap_obj.prepare(slc_stack_obj=slc_stack_obj, img=mean_amp_img, logger=self.logger) + ax = bmap_obj.plot(logger=self.logger) + img = ax.get_images()[0] + cbar = plt.colorbar(img, pad=0.03, shrink=0.5) + cbar.ax.set_visible(False) + plt.tight_layout() + plt.gcf().savefig(join(self.path, "pic", "step_0_amplitude_image.png"), dpi=300) + plt.close(plt.gcf()) + del bmap_obj + del mean_amp_img + + temp_coh = temp_coh_obj.read(dataset_name="temp_coh") + + fig = plt.figure(figsize=[15, 5]) + ax = fig.add_subplot() + im = ax.imshow(temp_coh, cmap=colormaps["gray"], vmin=0, vmax=1) + auto_flip_direction(slc_stack_obj.metadata, ax=ax, print_msg=True) + ax.set_xlabel("Range") + ax.set_ylabel("Azimuth") + plt.colorbar(im, pad=0.03, shrink=0.5) + plt.title("Temporal coherence") + plt.tight_layout() + fig.savefig(join(self.path, "pic", "step_0_temporal_phase_coherence.png"), dpi=300) + plt.close(fig) + + def runConsistencyCheck(self): + """RunConsistencyCheck.""" + # 0) select candidates for first order points + ifg_stack_obj = BaseStack(file=join(self.path, "ifg_stack.h5"), logger=self.logger) + length, width, num_ifgs = ifg_stack_obj.getShape(dataset_name="ifgs") + + cand_mask1 = selectPixels( + path=self.path, selection_method="temp_coh", thrsh=self.config.consistency_check.coherence_p1, + grid_size=self.config.consistency_check.grid_size, bool_plot=True, logger=self.logger + ) + + bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) + mask_valid_area = ut.detectValidAreas(bmap_obj=bmap_obj, logger=self.logger) + + if self.config.consistency_check.spatial_mask_file_p1 is not None: + path_mask_aoi = join(self.config.consistency_check.spatial_mask_file_p1) + self.logger.info(msg="load mask for area of interest from: {}.".format(path_mask_aoi)) + mask_aoi = readfile.read(path_mask_aoi, datasetName='mask')[0].astype(np.bool_) + mask_valid_area &= mask_aoi + else: + self.logger.info(msg="No mask for area of interest given.") + + cand_mask1 &= mask_valid_area + + fig = plt.figure(figsize=[15, 5]) + ax = fig.add_subplot() + ax.imshow(mask_valid_area, cmap=plt.cm.get_cmap("gray"), alpha=0.5, zorder=10, vmin=0, vmax=1) + bmap_obj.plot(ax=ax, logger=self.logger) + coord_xy = np.array(np.where(cand_mask1)).transpose() + val = np.ones_like(cand_mask1) + sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=val[cand_mask1], s=0.5, cmap=plt.get_cmap("autumn_r"), + vmin=1, vmax=2) # set min, max to ensure that points are yellow + cbar = plt.colorbar(sc, pad=0.03, shrink=0.5) + cbar.ax.set_visible(False) # make size of axis consistent with all others + plt.tight_layout() + plt.title("Mask for first order point set") + fig.savefig(join(self.path, "pic", "step_1_mask_p1.png"), dpi=300) + plt.close(fig) + + if cand_mask1[cand_mask1].shape[0] == 0: + self.logger.error("No points selected for first-order points. Modify the coherence threshold.") + raise ValueError + + # create unique point_id throughout the image to make it possible to mix first-order and second-order points + # in the densification step. point_id is ordered so that it fits to anydata[mask].ravel() when loading the data. + point_id_img = np.arange(0, length * width).reshape((length, width)) + + point_obj = Points(file_path=join(self.path, "p1_ifg_wr.h5"), logger=self.logger) + point_id1 = point_id_img[cand_mask1] + + point_obj.prepare( + point_id=point_id1, + coord_xy=coord_xy, + path_inputs=self.config.data_directories.path_inputs + ) + + point_obj.phase = ut.readPhasePatchwise(stack_obj=ifg_stack_obj, dataset_name="ifgs", + num_patches=self.config.processing.num_patches, cand_mask=cand_mask1, + point_id_img=point_id_img, logger=self.logger) + + point_obj.writeToFile() + del ifg_stack_obj, cand_mask1 + + # 1) create spatial network + arcs = createArcsBetweenPoints(point_obj=point_obj, + knn=self.config.consistency_check.knn, + max_arc_length=self.config.consistency_check.max_arc_length, + logger=self.logger) + net_obj = Network(file_path=join(self.path, "point_network.h5"), logger=self.logger) + net_obj.computeArcObservations( + point_obj=point_obj, + arcs=arcs + ) + net_obj.writeToFile() + net_obj.open(path_inputs=self.config.data_directories.path_inputs) # to retrieve external data + + demerr, vel, gamma = temporalUnwrapping(ifg_net_obj=point_obj.ifg_net_obj, + net_obj=net_obj, + wavelength=point_obj.wavelength, + velocity_bound=self.config.consistency_check.velocity_bound, + demerr_bound=self.config.consistency_check.dem_error_bound, + num_samples=self.config.consistency_check.num_samples, + num_cores=self.config.processing.num_cores, + logger=self.logger) + + net_par_obj = NetworkParameter(file_path=join(self.path, "point_network_parameter.h5"), + logger=self.logger) + net_par_obj.prepare( + net_obj=net_obj, + demerr=demerr, + vel=vel, + gamma=gamma + ) + net_par_obj.writeToFile() + + # 3) spatial unwrapping of the arc network and removal of outliers (arcs and points) + bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) + thrsh_visualisation = 0.7 + + try: + ax = bmap_obj.plot(logger=self.logger) + arc_mask = net_par_obj.gamma.reshape(-1) <= thrsh_visualisation + ax, cbar = viewer.plotColoredPointNetwork(x=point_obj.coord_xy[:, 1], y=point_obj.coord_xy[:, 0], + arcs=net_par_obj.arcs[arc_mask, :], + val=net_par_obj.gamma[arc_mask], + ax=ax, linewidth=1, cmap_name="autumn", clim=(0, 1)) + ax.set_title(f"Coherence from temporal unwrapping\n(only arcs with gamma <= {thrsh_visualisation} shown)") + fig = ax.get_figure() + plt.tight_layout() + fig.savefig(join(self.path, "pic", "step_1_arc_coherence.png"), dpi=300) + except BaseException as e: + self.logger.exception(msg="NOT POSSIBLE TO PLOT SPATIAL NETWORK OF POINTS. {}".format(e)) + + net_par_obj, point_id, coord_xy, design_mat = removeGrossOutliers( + net_obj=net_par_obj, + point_id=point_obj.point_id, + coord_xy=point_obj.coord_xy, + min_num_arc=self.config.consistency_check.min_num_arc, + quality_thrsh=self.config.consistency_check.arc_coherence, + logger=self.logger + ) + + try: + ax = bmap_obj.plot(logger=self.logger) + arc_mask = net_par_obj.gamma.reshape(-1) <= thrsh_visualisation + ax, cbar = viewer.plotColoredPointNetwork(x=coord_xy[:, 1], y=coord_xy[:, 0], + arcs=net_par_obj.arcs[arc_mask, :], + val=net_par_obj.gamma[arc_mask], + ax=ax, linewidth=1, cmap_name="autumn", clim=(0, 1)) + ax.set_title(f"Coherence from temporal unwrapping\n(only arcs with gamma <= {thrsh_visualisation} shown)") + fig = ax.get_figure() + plt.tight_layout() + fig.savefig(join(self.path, "pic", "step_1_arc_coherence_reduced.png"), dpi=300) + except BaseException as e: + self.logger.exception(msg="NOT POSSIBLE TO PLOT SPATIAL NETWORK OF POINTS. {}".format(e)) + + spatial_ref_id, point_id, net_par_obj = parameterBasedNoisyPointRemoval( + net_par_obj=net_par_obj, + point_id=point_id, + coord_xy=coord_xy, + design_mat=design_mat, + bmap_obj=bmap_obj, + bool_plot=True, + logger=self.logger + ) + + net_par_obj.writeToFile() # arcs were removed. obj still needed in next step. + point_obj.removePoints(keep_id=point_id, path_inputs=self.config.data_directories.path_inputs) + point_obj.writeToFile() + + def runUnwrappingTimeAndSpace(self): + """RunTemporalAndSpatialUnwrapping.""" + net_par_obj = NetworkParameter(file_path=join(self.path, "point_network_parameter.h5"), + logger=self.logger) + net_par_obj.open(path_inputs=self.config.data_directories.path_inputs) + + point_obj = Points(file_path=join(self.path, "p1_ifg_unw.h5"), logger=self.logger) + point_obj.open( + other_file_path=join(self.path, "p1_ifg_wr.h5"), + path_inputs=self.config.data_directories.path_inputs + ) + + # reference point can be set arbitrarily, because outliers are removed. + spatial_ref_idx = 0 + + bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) + + self.logger.info(msg="Integrate parameters from arcs to points.") + self.logger.info(msg="Integrate DEM error.") + demerr = spatialParameterIntegration(val_arcs=net_par_obj.demerr, + arcs=net_par_obj.arcs, + coord_xy=point_obj.coord_xy, + weights=net_par_obj.gamma, + spatial_ref_idx=spatial_ref_idx, logger=self.logger) + + # demerr = spatialParameterIntegrationIterative(val_arcs=net_par_obj.demerr, all_arcs=net_par_obj.arcs, + # coord_xy=point_obj.coord_xy, all_weights=net_par_obj.gamma, + # spatial_ref_idx=spatial_ref_idx, + # res_tol=5.0, + # max_rm_fraction=0.001) + fig = viewer.plotScatter(value=-demerr, coord=point_obj.coord_xy, ttl="Parameter integration: DEM error in [m]", + bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] + fig.savefig(join(self.path, "pic", "step_2_estimation_dem_error.png"), dpi=300) + plt.close(fig) + + self.logger.info(msg="Integrate mean velocity.") + vel = spatialParameterIntegration(val_arcs=net_par_obj.vel, + arcs=net_par_obj.arcs, + coord_xy=point_obj.coord_xy, + weights=net_par_obj.gamma, + spatial_ref_idx=spatial_ref_idx, logger=self.logger) + + # vel = spatialParameterIntegrationIterative(val_arcs=net_par_obj.vel, all_arcs=net_par_obj.arcs, + # coord_xy=point_obj.coord_xy, + # all_weights=net_par_obj.gamma, + # spatial_ref_idx=spatial_ref_idx, + # res_tol=1.0, + # max_rm_fraction=0.001) + fig = viewer.plotScatter(value=-vel, coord=point_obj.coord_xy, + ttl="Parameter integration: mean velocity in [m / year]", + bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] + fig.savefig(join(self.path, "pic", "step_2_estimation_velocity.png"), dpi=300) + plt.close(fig) + + self.logger.info(msg="Remove phase contributions from mean velocity" + " and DEM error from wrapped phase of points.") + pred_phase_demerr, pred_phase_vel = ut.predictPhase( + obj=point_obj, vel=vel, demerr=demerr, + ifg_space=True, logger=self.logger + ) + pred_phase = pred_phase_demerr + pred_phase_vel + + wr_phase = point_obj.phase + wr_res_phase = np.angle(np.exp(1j * wr_phase) * np.conjugate(np.exp(1j * pred_phase))) + + if self.config.unwrapping.use_temporal_unwrapping_arcs: + arcs = net_par_obj.arcs # use this to avoid unreliable connections. Takes a bit longer. + else: + triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=point_obj.coord_utm, + logger=self.logger) + triang_obj.triangulateGlobal() + triang_obj.triangulateKnn(k=self.config.unwrapping.knn) + arcs = triang_obj.getArcsFromAdjMat() + + unw_res_phase = spatialUnwrapping(num_ifgs=point_obj.ifg_net_obj.num_ifgs, + num_points=point_obj.num_points, + phase=wr_res_phase, + method=self.config.processing.unwrapping_method, + edges=arcs, + num_cores=self.config.processing.num_cores, logger=self.logger) + + # use same reference point for spatial integration and Puma unwrapping before recombining phases + unw_res_phase = unw_res_phase - unw_res_phase[spatial_ref_idx, :] + + self.logger.info(msg="Add phase contributions from mean velocity and DEM error back to " + "spatially unwrapped residual phase.") + unw_phase = unw_res_phase + pred_phase + # unw_phase = unw_res_phase # debug: don't add phase back. + + # adjust reference to peak of histogram + point_obj.phase = unw_phase + vel = ut.estimateParameters(obj=point_obj, ifg_space=True)[0] + point_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) + + point_obj.writeToFile() + + phase_ts = ut.invertIfgNetwork( + phase=unw_phase, + num_points=point_obj.num_points, + ifg_net_obj=point_obj.ifg_net_obj, + num_cores=1, # self.config.processing.num_cores, + ref_idx=0, + logger=self.logger + ) + point_obj = Points(file_path=join(self.path, "p1_ts.h5"), logger=self.logger) + point_obj.open( + other_file_path=join(self.path, "p1_ifg_unw.h5"), + path_inputs=self.config.data_directories.path_inputs + ) + point_obj.phase = phase_ts + point_obj.writeToFile() + + def runUnwrappingSpace(self): + """RunSpatialUnwrapping.""" + point_obj = Points(file_path=join(self.path, "p1_ifg_unw.h5"), logger=self.logger) + point_obj.open( + other_file_path=join(self.path, "p1_ifg_wr.h5"), + path_inputs=self.config.data_directories.path_inputs + ) + + if self.config.unwrapping.use_temporal_unwrapping_arcs: + net_par_obj = NetworkParameter(file_path=join(self.path, "point_network_parameter.h5"), + logger=self.logger) + net_par_obj.open(path_inputs=self.config.data_directories.path_inputs) + arcs = net_par_obj.arcs # use this to avoid unreliable connections. Takes a bit longer. + else: + # re-triangulate with delaunay to make PUMA faster + triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=point_obj.coord_utm, + logger=self.logger) + triang_obj.triangulateGlobal() + triang_obj.triangulateKnn(k=self.config.unwrapping.knn) + arcs = triang_obj.getArcsFromAdjMat() + + bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) + ax = bmap_obj.plot(logger=self.logger) + ax, cbar = viewer.plotColoredPointNetwork(x=point_obj.coord_xy[:, 1], + y=point_obj.coord_xy[:, 0], + arcs=arcs, + val=np.zeros(arcs.shape[0], dtype=np.float32), + ax=ax, linewidth=0.5, cmap_name="hot", clim=(0, 1)) + cbar.ax.set_visible(False) + ax.set_xlabel("Range") + ax.set_ylabel("Azimuth") + ax.set_title("Unwrapping Network") + plt.tight_layout() + plt.gcf().savefig(join(self.path, "pic", "step_2_unwrapping_network_p1.png"), dpi=300) + plt.close(plt.gcf()) + + unw_phase = spatialUnwrapping(num_ifgs=point_obj.ifg_net_obj.num_ifgs, + num_points=point_obj.num_points, + phase=point_obj.phase, + method=self.config.processing.unwrapping_method, + edges=arcs, + num_cores=self.config.processing.num_cores, logger=self.logger) + + # adjust reference to peak of histogram + point_obj.phase = unw_phase + vel = ut.estimateParameters(obj=point_obj, ifg_space=True)[0] + point_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) + + point_obj.writeToFile() + del point_obj + + point_obj = Points(file_path=join(self.path, "p1_ts.h5"), logger=self.logger) + point_obj.open( + other_file_path=join(self.path, "p1_ifg_wr.h5"), + path_inputs=self.config.data_directories.path_inputs + ) + + # for sbas the ifg network needs to be inverted to get the phase time series + phase_ts = ut.invertIfgNetwork(phase=unw_phase, num_points=point_obj.num_points, + ifg_net_obj=point_obj.ifg_net_obj, + num_cores=1, # self.config.processing.num_cores, + ref_idx=0, + logger=self.logger) + + point_obj.phase = phase_ts + point_obj.writeToFile() + + def runFiltering(self): + """RunFiltering.""" + coh_value = int(self.config.filtering.coherence_p2 * 100) + + # create output file which contains filtered phase time series + point1_obj = Points(file_path=join(self.path, "p1_ts_filt.h5"), logger=self.logger) + point1_obj.open( + other_file_path=join(self.path, "p1_ts.h5"), + path_inputs=self.config.data_directories.path_inputs + ) + p1_mask = point1_obj.createMask() # used later for selecting psCand2 when a spatial mask AOI is given. + + # select only pixels which have low phase noise and are well distributed + mask = point1_obj.createMask() + + bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) + + # temporal auto-correlation + auto_corr_img = np.zeros_like(mask, np.float64) + + vel, demerr, _, _, _, residuals = ut.estimateParameters(obj=point1_obj, ifg_space=False) + + if self.config.filtering.use_moving_points: + auto_corr = ut.temporalAutoCorrelation(residuals=residuals, lag=1).reshape(-1) + else: + # remove DEM error, but not velocity before estimating the temporal autocorrelation + pred_phase_demerr = ut.predictPhase( + obj=point1_obj, vel=vel, demerr=demerr, ifg_space=False, logger=self.logger)[0] + phase_wo_demerr = point1_obj.phase - pred_phase_demerr + auto_corr = ut.temporalAutoCorrelation(residuals=phase_wo_demerr, lag=1).reshape(-1) + + auto_corr_img[mask] = auto_corr + auto_corr_img[~mask] = np.inf + + fig = viewer.plotScatter(value=auto_corr, coord=point1_obj.coord_xy, bmap_obj=bmap_obj, + ttl="Temporal autocorrelation", unit="[ ]", s=3.5, cmap="autumn_r", + vmin=0, vmax=1, logger=self.logger)[0] + fig.savefig(join(self.path, "pic", "step_3_temporal_autocorrelation.png"), dpi=300) + plt.close(fig) + + # create grid + coord_utm_obj = CoordinatesUTM(file_path=join(self.path, "coordinates_utm.h5"), logger=self.logger) + coord_utm_obj.open() + + # remove points based on threshold + mask_thrsh = auto_corr_img <= self.config.filtering.max_auto_corr + auto_corr_img[~mask_thrsh] = np.inf + + box_list, num_box = ut.createSpatialGrid(coord_utm_img=coord_utm_obj.coord_utm, length=point1_obj.length, + width=point1_obj.width, + grid_size=self.config.filtering.grid_size) + + cand_mask_sparse = ut.selectBestPointsInGrid(box_list=box_list, quality=auto_corr_img, sel_min=True) + + num_p1_points_for_filtering = cand_mask_sparse[cand_mask_sparse].shape[0] + if num_p1_points_for_filtering < 10: + self.logger.warning(msg=f"Only {num_p1_points_for_filtering} points for APS filtering selected. Filtering " + f"results are probably not reliable. You can e.g. increase 'max_auto_corr' or try " + f"to increase the number of first-order points during step 1 and 2.") + + point_id_img = np.arange(0, point1_obj.length * point1_obj.width).reshape( + (point1_obj.length, point1_obj.width)) + keep_id = point_id_img[np.where(cand_mask_sparse)] + point1_obj.removePoints(keep_id=keep_id, path_inputs=self.config.data_directories.path_inputs) + point1_obj.writeToFile() # to be able to load aps1 from this file having the same set of points + + # store plot for quality control during processing + fig, ax = viewer.plotScatter(value=auto_corr_img[cand_mask_sparse], coord=point1_obj.coord_xy, + bmap_obj=bmap_obj, ttl="Selected pixels for APS estimation", + unit="Auto-correlation\n[ ]", s=5, cmap="autumn_r", vmin=0, vmax=1, + logger=self.logger)[:2] + viewer.plotGridFromBoxList(box_list=box_list, ax=ax, edgecolor="k", linewidth=0.2) + fig.savefig(join(self.path, "pic", "step_3_stable_points.png"), dpi=300) + plt.close(fig) + + if self.config.filtering.use_moving_points: + # recompute the residuals, because now there are fewer points in the obj + phase_for_aps_filtering = ut.estimateParameters(obj=point1_obj, ifg_space=False)[-1] + else: + phase_for_aps_filtering = point1_obj.phase + + # create output which contains only the atmospheric phase screen (no parameters) + aps1_obj = Points(file_path=join(self.path, "p1_aps.h5"), logger=self.logger) + aps1_obj.open( + other_file_path=join(self.path, "p1_ts_filt.h5"), + path_inputs=self.config.data_directories.path_inputs + ) + + # select second-order points + cand_mask2 = selectPixels( + path=self.path, selection_method="temp_coh", + thrsh=self.config.filtering.coherence_p2, + grid_size=None, bool_plot=True, + logger=self.logger + ) # first-order points are included in second-order points + + if self.config.phase_linking.phase_linking: + # read PL results + pl_coh = readfile.read(join(self.config.phase_linking.path_inverted, "phase_series.h5"), + datasetName='temporalCoherence')[0] + pl_coh = pl_coh[1, :, :] + siblings = readfile.read(join(self.config.phase_linking.path_inverted, "phase_series.h5"), + datasetName='shp')[0] + + if self.config.phase_linking.use_ps: + mask_ps = readfile.read(self.config.phase_linking.path_mask_file_ps, + datasetName='mask')[0].astype(np.bool_) + cand_mask_pl = (pl_coh > self.config.filtering.coherence_p2) | mask_ps + else: + cand_mask_pl = (pl_coh > self.config.filtering.coherence_p2) + # remove ps candidates, because the ps detection strategy in miaplpy seems to be biased. + cand_mask_pl[siblings <= self.config.phase_linking.num_siblings] = False + + if self.config.phase_linking.spatial_mask_file_pl is not None: + path_mask_pl_aoi = join(self.config.phase_linking.spatial_mask_file_pl) + self.logger.info(msg="load mask for area of interest from: {}.".format(path_mask_pl_aoi)) + mask_pl_aoi = readfile.read(path_mask_pl_aoi, datasetName='mask')[0].astype(np.bool_) + + fig = plt.figure(figsize=[15, 5]) + ax = fig.add_subplot() + ax.imshow(mask_pl_aoi, cmap=plt.cm.get_cmap("gray"), alpha=0.5, zorder=10, vmin=0, vmax=1) + bmap_obj.plot(ax=ax, logger=self.logger) + coord_xy = np.array(np.where(cand_mask_pl)).transpose() + val = np.ones_like(cand_mask_pl) + sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=val[cand_mask_pl], s=0.5, + cmap=plt.get_cmap("autumn_r"), + vmin=1, vmax=2) # set min, max to ensure that points are yellow + cbar = plt.colorbar(sc, pad=0.03, shrink=0.5) + cbar.ax.set_visible(False) # make size of axis consistent with all others + plt.tight_layout() + plt.title("Mask for phase linking points") + fig.savefig(join(self.path, "pic", "step_3_mask_coh{}_phase_linking.png".format(coh_value)), dpi=300) + plt.close(fig) + + # mask points after plotting, so that available coherent points are visible in figure + cand_mask_pl[~mask_pl_aoi] = False + + # combine phase linking coherence with TPC cand_mask2 + cand_mask2 = cand_mask2 | cand_mask_pl + + mask_valid_area = ut.detectValidAreas(bmap_obj=bmap_obj, logger=self.logger) + + if self.config.filtering.spatial_mask_file_p2 is not None: + path_mask_aoi = join(self.config.filtering.spatial_mask_file_p2) + self.logger.info(msg="load mask for area of interest from: {}.".format(path_mask_aoi)) + mask_aoi = readfile.read(path_mask_aoi, datasetName='mask')[0].astype(np.bool_) + mask_valid_area &= mask_aoi + # todo: add unstable points from p1 for densification + else: + self.logger.info(msg="No mask for area of interest given.") + + cand_mask2[p1_mask] = True # add all selected 1.order points to avoid spatial gaps in 2D unwrapping + # cand_mask2[cand_mask_sparse] = True # add only stable points from 1.order points + + cand_mask2 &= mask_valid_area + + fig = plt.figure(figsize=[15, 5]) + ax = fig.add_subplot() + ax.imshow(mask_valid_area, cmap=plt.cm.get_cmap("gray"), alpha=0.5, zorder=10, vmin=0, vmax=1) + bmap_obj.plot(ax=ax, logger=self.logger) + coord_xy = np.array(np.where(cand_mask2)).transpose() + val = np.ones_like(cand_mask2) + sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=val[cand_mask2], s=0.5, cmap=plt.get_cmap("autumn_r"), + vmin=1, vmax=2) # set min, max to ensure that points are yellow + cbar = plt.colorbar(sc, pad=0.03, shrink=0.5) + cbar.ax.set_visible(False) # make size of axis consistent with all others + plt.tight_layout() + plt.title("Mask for dense point set") + fig.savefig(join(self.path, "pic", "step_3_mask_coh{}.png".format(coh_value)), dpi=300) + plt.close(fig) + + point2_obj = Points(file_path=join(self.path, "coh{}_ifg_wr.h5".format(coh_value)), logger=self.logger) + coord_xy = np.array(np.where(cand_mask2)).transpose() + point_id2 = point_id_img[cand_mask2] + point2_obj.prepare( + point_id=point_id2, + coord_xy=coord_xy, + path_inputs=self.config.data_directories.path_inputs + ) + + ifg_stack_obj = BaseStack(file=join(self.path, "ifg_stack.h5"), logger=self.logger) + + point2_obj.phase = ut.readPhasePatchwise(stack_obj=ifg_stack_obj, dataset_name="ifgs", + num_patches=self.config.processing.num_patches, cand_mask=cand_mask2, + point_id_img=point_id_img, logger=self.logger) + + if self.config.phase_linking.phase_linking: + self.logger.info(msg="read phase from MiaplPy results...") + phase_linking_obj = BaseStack(file=join(self.config.phase_linking.path_inverted, "phase_series.h5"), + logger=self.logger) + + pl_phase = ut.readPhasePatchwise( + stack_obj=phase_linking_obj, dataset_name="phase", + num_patches=self.config.processing.num_patches, + cand_mask=cand_mask2, + point_id_img=point_id_img, logger=self.logger + ) + + # subset to time span + slc_stack_obj = slcStack(join(self.config.data_directories.path_inputs, "slcStack.h5")) + slc_stack_obj.open() + time_mask = createTimeMaskFromDates( + start_date=self.config.preparation.start_date, + stop_date=self.config.preparation.stop_date, + date_list=slc_stack_obj.dateList, + logger=self.logger + )[0] + pl_phase = pl_phase[:, time_mask] + + pl_ifgs = np.zeros((point2_obj.num_points, point2_obj.ifg_net_obj.num_ifgs), dtype=np.float32) + + c = 0 + for i, j in np.asarray(point1_obj.ifg_net_obj.ifg_list): + pl_ifgs[:, c] = np.angle(np.exp(1j * pl_phase[:, i]) * np.conjugate(np.exp(1j * pl_phase[:, j]))) + c += 1 + + # change only phase for good phase linking pixels and keep original phase for good tpc pixels + mask_pl = cand_mask_pl[cand_mask2] + point2_obj.phase[mask_pl] = pl_ifgs[mask_pl] + + point2_obj.writeToFile() + del point2_obj, ifg_stack_obj + + aps2_obj = Points(file_path=join(self.path, "coh{}_aps.h5".format(coh_value)), logger=self.logger) + aps2_obj.open( + other_file_path=join(self.path, "coh{}_ifg_wr.h5".format(coh_value)), + path_inputs=self.config.data_directories.path_inputs + ) + + if self.config.filtering.skip_filtering: + msg = "#" * 10 + msg += " SKIP ATMOSPHERIC FILTERING! " + msg += "#" * 10 + self.logger.info(msg=msg) + num_points1 = phase_for_aps_filtering.shape[0] + num_points2 = aps2_obj.coord_utm.shape[0] + num_time = phase_for_aps_filtering.shape[1] + aps1_phase = np.zeros((num_points1, num_time), dtype=np.float32) + aps2_phase = np.zeros((num_points2, num_time), dtype=np.float32) + else: + # spatial filtering of points with linear motion only (no non-linear motion) + if self.config.filtering.interpolation_method == "kriging": + aps1_phase, aps2_phase = estimateAtmosphericPhaseScreen( + residuals=phase_for_aps_filtering, + coord_utm1=point1_obj.coord_utm, + coord_utm2=aps2_obj.coord_utm, + num_cores=self.config.processing.num_cores, + bool_plot=False, + logger=self.logger + ) + else: + aps1_phase, aps2_phase = simpleInterpolation( + residuals=phase_for_aps_filtering, + coord_utm1=point1_obj.coord_utm, + coord_utm2=aps2_obj.coord_utm, + interp_method=self.config.filtering.interpolation_method + ) + + point1_obj.phase -= aps1_phase + point1_obj.writeToFile() + + aps1_obj.phase = aps1_phase + aps2_obj.phase = aps2_phase + aps1_obj.writeToFile() + aps2_obj.writeToFile() + + def runDensificationTimeAndSpace(self): + """RunDensificationTimeAndSpace.""" + coh_value = int(self.config.filtering.coherence_p2 * 100) + + point2_obj = Points(file_path=join(self.path, "coh{}_ifg_unw.h5".format(coh_value)), logger=self.logger) + point2_obj.open( + other_file_path=join(self.path, "coh{}_ifg_wr.h5".format(coh_value)), + path_inputs=self.config.data_directories.path_inputs + ) # wrapped phase + + # estimate parameters from unwrapped phase + point1_obj = Points(file_path=join(self.path, "p1_ifg_unw.h5"), logger=self.logger) + point1_obj.open(path_inputs=self.config.data_directories.path_inputs) + vel_p1, demerr_p1 = ut.estimateParameters(obj=point1_obj, ifg_space=True)[:2] + + # load wrapped phase to remove known components for unwrapping p2 points + point1_obj = Points(file_path=join(self.path, "p1_ifg_wr.h5"), logger=self.logger) # wrapped phase! + point1_obj.open(path_inputs=self.config.data_directories.path_inputs) + + aps1_obj = Points(file_path=join(self.path, "p1_aps.h5"), logger=self.logger) + aps1_obj.open(path_inputs=self.config.data_directories.path_inputs) + + aps2_obj = Points(file_path=join(self.path, "coh{}_aps.h5".format(coh_value)), logger=self.logger) + aps2_obj.open(path_inputs=self.config.data_directories.path_inputs) + + if self.config.filtering.spatial_mask_file_p2 is None: + """ + overview of points contained in the *_obj + (unstable p1 means: p1 which were not used in atmospheric filtering) + p2: p2 - inconsistent p2 + unstable p1 + stable p1 --> p2: p2 + aps2: p2 + unstable p1 + stable p1 --> aps2: p2 + p1: stable p1 + unstable p1 --> p1: stable p1 + unstable p1 + aps1: stable p1 --> aps1: stable p1 + unstable p1 + """ + # find unstable p1 in p2 (and in aps2) + point_id_img = np.arange(0, point1_obj.length * point1_obj.width).reshape( + (point1_obj.length, point1_obj.width)) + p1_mask = point1_obj.createMask() + aps1_mask = aps1_obj.createMask() + mask_unstable_p1 = p1_mask & (~aps1_mask) + unstable_p1_id = point_id_img[np.where(mask_unstable_p1)] + + mask_unstable_p1_in_p2 = np.ones((aps2_obj.num_points,), dtype=np.bool_) + for p in aps2_obj.point_id: + if p not in unstable_p1_id: + mask_unstable_p1_in_p2[aps2_obj.point_id == p] = False + + # add unstable p1 from aps2 to aps1 + aps1_obj.addPointsFromObj( + new_point_id=aps2_obj.point_id[mask_unstable_p1_in_p2], + new_coord_xy=aps2_obj.coord_xy[mask_unstable_p1_in_p2, :], + new_phase=aps2_obj.phase[mask_unstable_p1_in_p2, :], + new_num_points=mask_unstable_p1_in_p2[mask_unstable_p1_in_p2].shape[0], + path_inputs=self.config.data_directories.path_inputs + ) + + # remove unstable p1 from p2 and aps2. thereby remove inconsistent p2 from aps2. + p2_mask = point2_obj.createMask() + mask_only_p2 = p2_mask & (~p1_mask) + keep_id = point_id_img[np.where(mask_only_p2)] + point2_obj.removePoints(keep_id=keep_id, path_inputs=self.config.data_directories.path_inputs) + aps2_obj.removePoints(keep_id=keep_id, path_inputs=self.config.data_directories.path_inputs) + + else: + """ + if spatial mask is applied: + p2: p2 - inconsistent p2 (?) + p1 (coincidently?) --> p2: p2 + aps2: p2 + p1 (coincidently?) --> aps2: p2 + p1: stable p1 + unstable p1 --> p1: stable p1 (+ unstable p1) + aps1: stable p1 --> aps1: stable p1 (+ unstable p1) + """ + use_all_p1 = False # todo: add to config + if use_all_p1: + raise NotImplementedError("Use all p1 is not implemented.") + else: + # remove also values from estimated parameters + mask = np.ones((point1_obj.num_points,), dtype=np.bool_) + for p in point1_obj.point_id: + if p not in aps1_obj.point_id: + mask[point1_obj.point_id == p] = False + + vel_p1 = vel_p1[mask] + demerr_p1 = demerr_p1[mask] + + # remove unstable p1 from p1 + point1_obj.removePoints( + keep_id=aps1_obj.point_id, + path_inputs=self.config.data_directories.path_inputs + ) + + # remove p2 which are coincidentally equal to p1 + point_id_img = np.arange(0, point1_obj.length * point1_obj.width).reshape( + (point1_obj.length, point1_obj.width)) + p1_mask = point1_obj.createMask() + p2_mask = point2_obj.createMask() + mask_p2 = ~(p1_mask & p2_mask) & p2_mask + p2_id = point_id_img[np.where(mask_p2)] + point2_obj.removePoints(keep_id=p2_id, path_inputs=self.config.data_directories.path_inputs) + aps2_obj.removePoints(keep_id=p2_id, path_inputs=self.config.data_directories.path_inputs) + + # return to ifg-space + a_ifg = point2_obj.ifg_net_obj.getDesignMatrix() + aps1_ifg_phase = np.matmul(a_ifg, aps1_obj.phase.T).T + aps2_ifg_phase = np.matmul(a_ifg, aps2_obj.phase.T).T + + # correct for APS + point2_obj.phase = np.angle(np.exp(1j * point2_obj.phase) * np.conjugate(np.exp(1j * aps2_ifg_phase))) + point1_obj.phase = np.angle(np.exp(1j * point1_obj.phase) * np.conjugate(np.exp(1j * aps1_ifg_phase))) + + demerr, vel, gamma, mean_gamma = densifyNetwork( + point1_obj=point1_obj, + vel_p1=vel_p1, + demerr_p1=demerr_p1, + point2_obj=point2_obj, + num_conn_p1=self.config.densification.num_connections_p1, + num_conn_p2=self.config.densification.num_connections_p2, + max_dist_p1=self.config.densification.max_distance_p1, + velocity_bound=self.config.densification.velocity_bound, + demerr_bound=self.config.densification.dem_error_bound, + num_samples=self.config.densification.num_samples, + num_cores=self.config.processing.num_cores, + logger=self.logger + ) # returns parameters of both first- and second-order points + + # store combined set of first and second-order points + point2_obj.addPointsFromObj( + new_point_id=point1_obj.point_id, + new_coord_xy=point1_obj.coord_xy, + new_phase=point1_obj.phase, + new_num_points=point1_obj.num_points, + path_inputs=self.config.data_directories.path_inputs + ) + + fig = plt.figure(figsize=(15, 5)) + axs = fig.subplots(1, 3) + axs[0].hist(gamma, bins=100) + axs[0].set_xlim([0, 1]) + axs[0].set_ylabel('Absolute frequency') + axs[0].set_xlabel('Temporal coherence\n(temporal unwrapping) [ ]') + + axs[1].hist(mean_gamma, bins=100) + axs[1].set_xlim([0, 1]) + axs[1].set_ylabel('Absolute frequency') + axs[1].set_xlabel('Mean temporal coherence\n(temporal unwrapping of neighbours) [ ]') + + axs[2].plot(mean_gamma, gamma, 'k.') + axs[2].plot([0, 1], [0, 1], 'k-') + axs[2].set_xlim([0, 1]) + axs[2].set_ylim([0, 1]) + axs[2].set_ylabel('Temporal Coherence\n(w.r.t. first-order points)') + axs[2].set_xlabel('Temporal Coherence\n(w.r.t. neighbouring second-order points)') + fig.savefig(join(self.path, "pic", "step_4_consistency_coherence_coh{}.png".format(coh_value)), dpi=300) + plt.close(fig) + + # comparison with a priori estimated coherence + mask_p2 = point2_obj.createMask() + temp_coh_obj = BaseStack(file=join(self.path, "temporal_coherence.h5"), logger=self.logger) + temp_coh_img = temp_coh_obj.read(dataset_name="temp_coh") + temp_coh = temp_coh_img[mask_p2] + + fig = plt.figure(figsize=(15, 5)) + axs = fig.subplots(1, 2) + axs[0].plot(temp_coh, gamma, 'k.') + axs[0].plot([0, 1], [0, 1], 'k-') + axs[0].set_xlim([0, 1]) + axs[0].set_ylim([0, 1]) + axs[0].set_xlabel('Temporal Coherence\n(a priori)') + axs[0].set_ylabel('Temporal Coherence\n(w.r.t. first-order points)') + + axs[1].plot(temp_coh, mean_gamma, 'k.') + axs[1].plot([0, 1], [0, 1], 'k-') + axs[1].set_xlim([0, 1]) + axs[1].set_ylim([0, 1]) + axs[1].set_xlabel('Temporal Coherence\n(a priori)') + axs[1].set_ylabel('Temporal Coherence\n(w.r.t. neighbouring second-order points)') + fig.savefig(join(self.path, "pic", "step_4_coherence_priori_vs_posteriori{}.png".format(coh_value)), + dpi=300) + plt.close(fig) + + bmap_obj = AmplitudeImage(file_path=join(self.path, "background_map.h5")) + fig = viewer.plotScatter(value=gamma, coord=point2_obj.coord_xy, bmap_obj=bmap_obj, + ttl="Coherence from temporal unwrapping", s=3.5, cmap="autumn", + vmin=0, vmax=1, logger=self.logger)[0] + fig.savefig(join(self.path, "pic", "step_4_temporal_unwrapping_coh{}.png".format(coh_value)), dpi=300) + plt.close(fig) + + mask_gamma = gamma >= self.config.densification.coherence_threshold + self.logger.info(msg=f"Reduce the dense point set by {mask_gamma[~mask_gamma].shape[0]} points,") + self.logger.info(msg=f"due to coherence from temporal unwrapping < " + f"{self.config.densification.coherence_threshold}") + point2_obj.removePoints(mask=mask_gamma, keep_id=[], path_inputs=self.config.data_directories.path_inputs) + + fig = plt.figure(figsize=(15, 5)) + axs = fig.subplots(1, 2) + axs[0].hist(-vel[mask_gamma] * 100, bins=200) + axs[0].set_ylabel('Absolute frequency') + axs[0].set_xlabel('Mean velocity [cm / year]') + + axs[1].hist(-demerr[mask_gamma], bins=200) + axs[1].set_ylabel('Absolute frequency') + axs[1].set_xlabel('DEM error [m]') + fig.savefig(join(self.path, "pic", "step_4_consistency_parameters_coh{}.png".format(coh_value)), + dpi=300) + plt.close(fig) + + fig = viewer.plotScatter(value=gamma[mask_gamma], coord=point2_obj.coord_xy, bmap_obj=bmap_obj, + ttl="Coherence from temporal unwrapping", s=3.5, cmap="autumn", + vmin=0, vmax=1, logger=self.logger)[0] + fig.savefig(join(self.path, "pic", "step_4_temporal_unwrapping_coh{}_reduced.png".format(coh_value)), + dpi=300) + plt.close(fig) + + fig = viewer.plotScatter(value=-vel[mask_gamma], coord=point2_obj.coord_xy, + ttl="Mean velocity in [m / year]", + bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] + fig.savefig(join(self.path, "pic", "step_4_estimation_velocity_coh{}.png".format(coh_value)), dpi=300) + plt.close(fig) + + fig = viewer.plotScatter(value=-demerr[mask_gamma], coord=point2_obj.coord_xy, ttl="DEM error in [m]", + bmap_obj=bmap_obj, s=3.5, cmap="jet_r", symmetric=True, logger=self.logger)[0] + fig.savefig(join(self.path, "pic", "step_4_estimation_dem_error_coh{}.png".format(coh_value)), dpi=300) + plt.close(fig) + + self.logger.info(msg="Remove phase contributions from mean velocity " + "and DEM error from wrapped phase of points.") + pred_phase_demerr, pred_phase_vel = ut.predictPhase( + obj=point2_obj, + vel=vel[mask_gamma], + demerr=demerr[mask_gamma], + ifg_space=True, + logger=self.logger + ) + pred_phase = pred_phase_demerr + pred_phase_vel + + wr_phase = point2_obj.phase + wr_res_phase = np.angle(np.exp(1j * wr_phase) * np.conjugate(np.exp(1j * pred_phase))) + + triang_obj = PointNetworkTriangulation(coord_xy=point2_obj.coord_xy, coord_utmxy=None, logger=self.logger) + triang_obj.triangulateGlobal() + triang_obj.triangulateKnn(k=self.config.densification.knn) + arcs = triang_obj.getArcsFromAdjMat() + + unw_res_phase = spatialUnwrapping(num_ifgs=point2_obj.ifg_net_obj.num_ifgs, + num_points=point2_obj.num_points, + phase=wr_res_phase, + method=self.config.processing.unwrapping_method, + edges=arcs, + num_cores=self.config.processing.num_cores, logger=self.logger) + + self.logger.info(msg="Add phase contributions from mean velocity " + "and DEM error back to spatially unwrapped residual phase.") + unw_phase = unw_res_phase + pred_phase + + point2_obj.phase = unw_phase + vel = ut.estimateParameters(obj=point2_obj, ifg_space=True)[0] + point2_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) + + point2_obj.writeToFile() + + phase_ts = ut.invertIfgNetwork( + phase=unw_phase, + num_points=point2_obj.num_points, + ifg_net_obj=point2_obj.ifg_net_obj, + num_cores=1, # self.config.processing.num_cores, + ref_idx=0, + logger=self.logger) + + point_obj = Points(file_path=join(self.path, "coh{}_ts.h5".format(coh_value)), logger=self.logger) + point_obj.open( + other_file_path=join(self.path, "coh{}_ifg_unw.h5".format(coh_value)), + path_inputs=self.config.data_directories.path_inputs + ) + point_obj.phase = phase_ts + + point_obj.writeToFile() + + def runDensificationSpace(self): + """RunDensification.""" + coh_value = int(self.config.filtering.coherence_p2 * 100) + + point_obj = Points(file_path=join(self.path, "coh{}_ifg_unw.h5".format(coh_value)), logger=self.logger) + point_obj.open( + other_file_path=join(self.path, "coh{}_ifg_wr.h5".format(coh_value)), + path_inputs=self.config.data_directories.path_inputs + ) # open wr phase + + aps2_obj = Points(file_path=join(self.path, "coh{}_aps.h5".format(coh_value)), logger=self.logger) + aps2_obj.open(path_inputs=self.config.data_directories.path_inputs) + + # return to ifg-space + a_ifg = point_obj.ifg_net_obj.getDesignMatrix() + aps2_ifg_phase = np.matmul(a_ifg, aps2_obj.phase.T).T + + # correct for APS + point_obj.phase = np.angle(np.exp(1j * point_obj.phase) * np.conjugate(np.exp(1j * aps2_ifg_phase))) + + triang_obj = PointNetworkTriangulation(coord_xy=point_obj.coord_xy, coord_utmxy=None, logger=self.logger) + triang_obj.triangulateGlobal() # if coord_utm is not given, only global delaunay and knn can be calculated + triang_obj.triangulateKnn(k=self.config.densification.knn) + arcs = triang_obj.getArcsFromAdjMat() + + unw_phase = spatialUnwrapping(num_ifgs=point_obj.ifg_net_obj.num_ifgs, + num_points=point_obj.num_points, + phase=point_obj.phase, + method=self.config.processing.unwrapping_method, + edges=arcs, + num_cores=self.config.processing.num_cores, logger=self.logger) + + # adjust reference to peak of histogram + point_obj.phase = unw_phase + vel = ut.estimateParameters(obj=point_obj, ifg_space=True)[0] + point_obj.phase = ut.setReferenceToPeakOfHistogram(phase=unw_phase, vel=vel, num_bins=300) + + point_obj.writeToFile() + del point_obj + + point_obj = Points(file_path=join(self.path, "coh{}_ts.h5".format(coh_value)), logger=self.logger) + point_obj.open( + other_file_path=join(self.path, "coh{}_ifg_wr.h5".format(coh_value)), + path_inputs=self.config.data_directories.path_inputs + ) + + phase_ts = ut.invertIfgNetwork(phase=unw_phase, num_points=point_obj.num_points, + ifg_net_obj=point_obj.ifg_net_obj, + num_cores=1, # self.config.processing.num_cores, + ref_idx=0, + logger=self.logger) + + point_obj.phase = phase_ts + + point_obj.writeToFile() diff --git a/sarvey/sarvey_export.py b/sarvey/sarvey_export.py new file mode 100755 index 0000000..9f25dd5 --- /dev/null +++ b/sarvey/sarvey_export.py @@ -0,0 +1,298 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Console script for exporting data from SARvey format to GIS formats.""" +import argparse +import json +import logging +from json import JSONDecodeError +from logging import Logger +import sys +import time +import warnings +import os +from os.path import join, dirname, basename +import numpy as np +import pandas as pd +import geopandas as gpd +from pyproj import CRS +from pyproj.aoi import AreaOfInterest +from pyproj.database import query_utm_crs_info +from shapely import Point +from shapely.errors import ShapelyDeprecationWarning + +from sarvey.config import Config +from sarvey.console import showLogoSARvey +from sarvey.objects import Points +import sarvey.utils as ut +from sarvey.geolocation import calculateGeolocationCorrection + + +warnings.filterwarnings("ignore", category=ShapelyDeprecationWarning) + + +def exportDataToGisFormat(*, file_path: str, output_path: str, path_inputs: str, + correct_geolocation: bool = False, no_timeseries: bool = False, logger: Logger): + """Export data to GIS format (shp or gpkg). + + Parameters + ---------- + file_path: str + Path to the input file. + output_path: str + Path for writing output file. + path_inputs: str + Path to slcStack.h5 and geometryRadar.h5. + correct_geolocation: bool + Correct geolocation or not + no_timeseries: bool + Export time series data or not + logger: Logger + Logger handle. + """ + point_obj = Points(file_path=file_path, logger=logger) + + point_obj.open(path_inputs=path_inputs) + + # todo: add corrected height to output + # todo: add option to mask the output to e.g. linear infrastructures or other AOI + + vel, demerr, _, coherence, omega, _ = ut.estimateParameters(obj=point_obj, ifg_space=False) + + stc = ut.spatiotemporalConsistency(coord_utm=point_obj.coord_utm, phase=point_obj.phase, + wavelength=point_obj.wavelength) + + point_obj.phase *= point_obj.wavelength / (4 * np.pi) # in [m] + + # extract displacement + defo_ts = np.zeros_like(point_obj.phase, dtype=np.float32) + for i in range(point_obj.num_points): + phase_topo = (point_obj.ifg_net_obj.pbase / (point_obj.slant_range[i] * np.sin(point_obj.loc_inc[i])) * + demerr[i]) + defo_ts[i, :] = point_obj.phase[i, :] - phase_topo + + # transform into meters + defo_ts *= 1000 # in [mm] + + utm_crs_list = query_utm_crs_info( + datum_name="WGS 84", + area_of_interest=AreaOfInterest( + west_lon_degree=point_obj.coord_lalo[:, 1].min(), + south_lat_degree=point_obj.coord_lalo[:, 0].min(), + east_lon_degree=point_obj.coord_lalo[:, 1].max(), + north_lat_degree=point_obj.coord_lalo[:, 0].max(), + ), + contains=True + ) + + utm_epsg = utm_crs_list[0].code + + dates = ["D{}".format(date).replace("-", "") for date in point_obj.ifg_net_obj.dates] + + dates = dates[:point_obj.phase.shape[1]] # remove dates which were not processed + + if no_timeseries: + df_points = pd.DataFrame({}) + else: + df_points = pd.DataFrame({date: () for date in dates}) + + if correct_geolocation: + logger.info("Calculate geolocation correction.") + coord_correction = calculateGeolocationCorrection(path_geom=path_inputs, + point_obj=point_obj, + demerr=demerr, + logger=logger) + coord_correction_norm = np.linalg.norm(coord_correction, axis=1) + max_error_index = np.argmax(coord_correction_norm) + logger.info(f"Maximum geolocation correction: {coord_correction_norm[max_error_index]:.1f} m " + f"corresponding to {demerr[max_error_index]:.1f} m DEM error") + else: + coord_correction = 0 + logger.info("geolocation correction skipped.") + + coord_utm = point_obj.coord_utm + coord_utm += coord_correction + df_points['coord'] = (coord_utm).tolist() + df_points['coord'] = df_points['coord'].apply(Point) + df_points.insert(0, 'point_id', point_obj.point_id.tolist()) + df_points.insert(1, 'velocity', vel * 1000) # in [mm] + df_points.insert(2, 'coherence', coherence) + df_points.insert(3, 'omega', omega) + df_points.insert(4, 'st_consistency', stc * 1000) # in [mm] + df_points.insert(5, 'dem_error', demerr) # in [m] + df_points.insert(6, 'dem', point_obj.height) # in [m] + + df_points.columns = [col[:10] for col in df_points.columns] + + if not no_timeseries: + for i, date in enumerate(dates): + df_points[date] = defo_ts[:, i] + + gdf_points = gpd.GeoDataFrame(df_points, geometry='coord') + gdf_points = gdf_points.set_crs(CRS.from_epsg(utm_epsg)) + logger.info(msg="write to file.") + gdf_points.to_file(output_path) + + +def createParser(): + """Create_parser.""" + parser = argparse.ArgumentParser( + description="Export InSAR time series results from '.h5' to GIS data formats.", + formatter_class=argparse.RawTextHelpFormatter, + epilog="""EXAMPLE: + sarvey_export outputs/coh50_ts.h5 -o outputs/shp/coh50.shp # export time series to shapefile + sarvey_export outputs/coh50_ts.h5 -o outputs/shp/coh50.gpkg # export time series to geopackage + sarvey_export outputs/coh90_ts.h5 -o outputs/shp/coh90.shp -g # apply geolocation correction + sarvey_export outputs/coh90_ts.h5 -o outputs/shp/coh90.shp -g -t # skip time series data + """) + + parser.add_argument('file_path', type=str, help='Path to input file.') + + parser.add_argument("-o", "--output_path", type=str, dest="output_path", default="", + help="Path to output file. If empty, the name of the input file will be used.") + + # parser.add_argument("-f", "--format", type=str, required=False, metavar="FILE", dest="format", + # help="Output file format (if not already specified within '-o'). Can be 'shp', 'gpkg', + # 'csv'.") + + parser.add_argument("-l", "--log_dir", type=str, required=False, metavar="FILE", dest="log_dir", + default="logfiles/", help="Logfile directory (default: 'logfiles/')") + + parser.add_argument('-w', '--workdir', default=None, dest="workdir", + help='Working directory (default: current directory).') + + parser.add_argument('-g', '--correct_geo', default=False, action="store_true", dest="correct_geolocation", + help='Correct Geolocation (default: False).') + + parser.add_argument('-t', '--no-time-series', default=False, action="store_true", dest="no_timeseries", + help='Do not export time series (default: False).') + + return parser + + +def main(iargs=None): + """Run Main. + + :param iargs: + """ + parser = createParser() + args = parser.parse_args(iargs) + + log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_format) + logger.addHandler(console_handler) + logging_level = logging.getLevelName("INFO") + logger.setLevel(logging_level) + + if args.workdir is None: + args.workdir = os.path.abspath(os.path.curdir) + else: + logger.info(msg="Working directory: {}".format(args.workdir)) + + args.log_dir = join(args.workdir, args.log_dir) + current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + log_filename = f"export_log_{current_datetime}.log" + + if not os.path.exists(args.log_dir): + os.mkdir(args.log_dir) + file_handler = logging.FileHandler(filename=join(args.log_dir, log_filename)) + file_handler.setFormatter(log_format) + logger.addHandler(file_handler) + + showLogoSARvey(logger=logger, step="Export results") + + # read config file to retrieve location of inputs + config_file_path = os.path.abspath(join(args.workdir, dirname(args.file_path), "config.json")) + + if not os.path.exists(config_file_path): + # check if any config file is available in upper directory (backward compatibility) + files = np.array([os.path.abspath(f) for f in os.listdir(join(dirname(config_file_path), "..")) + if os.path.isfile(f)]) + potential_configs = np.array([(basename(f).split(".")[-1] == "json") and ("config" in basename(f)) + for f in files]) + if potential_configs[potential_configs].shape[0] == 0: + raise FileNotFoundError(f"Backup configuration file not found: {config_file_path}!") + else: + logger.warning(msg=f"Backup configuration file not found: {config_file_path}!") + logger.warning(msg=f"Other configuration files automatically detected: {files[potential_configs]}!") + logger.warning(msg=f"Automatically selected configuration file: {files[potential_configs][0]}!") + config_file_path = files[potential_configs][0] + + try: + with open(config_file_path) as config_fp: + config_dict = json.load(config_fp) + config = Config(**config_dict) + except JSONDecodeError as err: + raise IOError(f'Failed to load the configuration json file => {err}') + + # create output directory + if args.output_path == "": + output_dir = args.workdir + output_fname = basename(args.file_path).split(".")[-2] + output_format = "shp" + args.output_path = join(output_dir, output_fname + "." + output_format) + else: + output_dir = join(args.workdir, dirname(args.output_path)) + output_fname = basename(args.output_path) + name_splitted = output_fname.split(".") + if len(name_splitted) == 1: + args.output_path = join(output_dir, output_fname + ".shp") # use shapefile as default format + elif len(name_splitted) == 2: + output_format = name_splitted[-1] # use specified format + if (output_format != "shp") and (output_format != "gpkg"): + logger.error(msg=f"Output format not supported: {output_format}!") + raise ValueError + logger.info(msg=f"Detected output format: {output_format}.") + args.output_path = join(output_dir, output_fname) + else: + logger.error(msg=f"Output format was not recognized! {output_fname}") + raise ValueError + + logger.info(msg=f"Output file: {args.output_path}") + + # specify geolocation status + logger.info(msg=f"Correct geolocation error: {args.correct_geolocation}") + + # specify time series flag + logger.info(msg=f"Export time series data: {not args.no_timeseries}") + + if not os.path.exists(output_dir): + os.mkdir(output_dir) + + exportDataToGisFormat(file_path=args.file_path, output_path=args.output_path, + path_inputs=config.data_directories.path_inputs, + correct_geolocation=args.correct_geolocation, no_timeseries=args.no_timeseries, + logger=logger) + + +if __name__ == '__main__': + main() diff --git a/sarvey/sarvey_mask.py b/sarvey/sarvey_mask.py new file mode 100755 index 0000000..75645d3 --- /dev/null +++ b/sarvey/sarvey_mask.py @@ -0,0 +1,634 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Generate mask from shape file.""" +import argparse +import os +import sys +import time +from os.path import join +import PIL.Image as Image +import PIL.ImageDraw as ImageDraw +import matplotlib +import matplotlib.pyplot as plt +import numpy as np +from scipy import spatial +import logging +from logging import Logger +import geopandas as gpd + +from mintpy.utils import writefile, ptime, utils + +from sarvey.osm_utils import getSpatialExtend + +try: + matplotlib.use('TkAgg') +except ImportError as e: + print(e) + +EXAMPLE = """Example: + sarvey_mask path/to/file.shp --geom ./geometryRadar.h5 --width 6 -o mask_infra.h5 +""" + + +def create_parser(): + """Create_parser.""" + parser = argparse.ArgumentParser( + description='Create transport infrastructure mask from shp-file.', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + + parser.add_argument(dest='input_file', help='path to input shp-file.') + + parser.add_argument('-w', '--work_dir', dest='work_dir', default=None, + help='absolute path to working directory\n' + + '(default: current directory).') + + parser.add_argument('--geom', dest='geom_file', default=None, + help='path to existing geometryRadar.h5 file.') + + parser.add_argument('--width', dest='width', default=6, type=int, + help='Width of the mask in pixel (default: 6).') + + parser.add_argument('-o', dest='out_file_name', default='mask_infra.h5', + help="name of output file. (default: 'mask_infra.h5').") + + return parser + + +class Node: + """Define simple class for a node at a road (similar to overpy.Node).""" + + def __init__(self, *, lat: float = None, lon: float = None): + """Init.""" + self.lat = lat + self.lon = lon + + +class CoordinateSearch: + """CoordinateSearch.""" + + def __init__(self): + """Init.""" + self.search_tree = None + self.yidx = None + self.xidx = None + self.lon = None + self.lat = None + self.coord = None + + def createSearchTree(self, *, coord: utils.coordinate, logger: Logger): + """Create search tree. + + Parameters + ---------- + coord: utils.coordinate + Coordinates + logger: Logger + Logging handler. + """ + self.coord = coord + logger.info(msg='create kd-tree for efficient search...') + + if self.coord.lut_y is None or self.coord.lut_x is None: + self.coord.open() + lat, lon = self.coord.read_lookup_table(print_msg=False) + self.lat = lat.ravel() + self.lon = lon.ravel() + + # create the 2D coordinate arrays for fast indexing + x = np.arange(self.coord.lut_x.shape[1]) + y = np.arange(self.coord.lut_x.shape[0]) + xx, yy = np.meshgrid(x, y) + self.xidx = xx.ravel() + self.yidx = yy.ravel() + + start_time = time.time() + self.search_tree = spatial.KDTree(data=np.array([self.lon, self.lat]).transpose()) + + logger.info(msg='... done.') + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + def getMeanDistanceBetweenPixels(self): + """Compute mean distance between adjacent pixels.""" + distances = self.search_tree.query([self.lon[0], self.lat[0]], k=10)[0] + mean_dist = np.mean(distances[1:]) + return mean_dist + + def getNearestNeighbour(self, *, node: Node): + """Query the kd-tree for the nearest neighbour. + + :param node: Node object + """ + # find nearest neighbour + dist, idx = self.search_tree.query([node.lon, node.lat]) + found_node = Node(lat=self.lat[idx], lon=self.lon[idx]) + # return index of NN in radar coordinates + return dist, (self.yidx[idx], self.xidx[idx]), found_node + + +def findLastRoadPixel(*, csearch: CoordinateSearch, cur_node: Node, prev_node: Node, dist_thrsh: float): + """Find the index of the last road pixel that is within the image extend. + + Idea: the pixel with the shortest distance to the current node of a road is not necessarily on the road, if the + current node is outside the image extend. Split the road in further linear parts and find the last road pixel + recursively that is still inside the image. + Hint: all nodes are instances from class Node + + Parameters + ---------- + csearch: CoordinateSearch + Search tree for efficient spatial search of the coordinate of a pixel in the radar image. + cur_node: Node + Current node of the road that is outside the image extend. + prev_node: Node + Previous node of the road that is inside the image extend. + dist_thrsh: float + Distance threshold for stop criterion (derived from average distance between two pixels in the image). + + Returns + ------- + node_idx: int + Node of the pixel which is the last pixel on the road inside the image. + """ + # create a new node at half of the road distance between previous and current node + mid_lat = cur_node.lat + (cur_node.lat - prev_node.lat) / 2 + mid_lon = cur_node.lon + (cur_node.lon - prev_node.lon) / 2 + mid_node = Node(lat=mid_lat, lon=mid_lon) + + dist, node_idx = csearch.getNearestNeighbour(node=mid_node)[0:2] + if dist < dist_thrsh: + return node_idx + else: + node_idx = findLastRoadPixel(csearch=csearch, cur_node=cur_node, prev_node=prev_node, dist_thrsh=dist_thrsh) + return node_idx + + +def euclDist(*, node1: Node, node2: Node): + """Compute the euclidean distance between two nodes.""" + return np.sqrt((node1.lat - node2.lat) ** 2 + (node1.lon - node2.lon) ** 2) + + +def computeLastRoadPixel(*, cur_node: Node, prev_node: Node, found_node: Node): + """Compute the location of the pixel at the border of the radar image that is part of the road. + + Parameters + ---------- + cur_node: Node + Current node of the road. + prev_node: Node + Previous node of the road. + found_node: Node + Found node of the road. + + Returns + ------- + new_lon: float + Longitude of the pixel at the border of the radar image that is part of the road. + new_lat: float + Latitude of the pixel at the border of the radar image that is part of the road. + """ + a = euclDist(node1=prev_node, node2=found_node) + b = euclDist(node1=cur_node, node2=found_node) + c = euclDist(node1=prev_node, node2=cur_node) + alpha = np.arccos((- a ** 2 + b ** 2 + c ** 2) / (2 * b * c)) + d = b / np.sin(np.pi / 2 - alpha) + new_lat = cur_node.lat + (prev_node.lat - cur_node.lat) / c * d + new_lon = cur_node.lon + (prev_node.lon - cur_node.lon) / c * d + return new_lon, new_lat + + +def convertToRadarCoordPolygon(*, gdf_infra: gpd.geodataframe, csearch: CoordinateSearch, logger: Logger): + """Convert Polygon to a mask in shape of radar image. + + Parameters + ---------- + gdf_infra: gpd.geodataframe + The queried infrastructures containing polygons. + csearch: CoordinateSearch + The coordinate search object. + logger: Logger + Logging handler. + + Returns + ------- + img_np: np.ndarray + Mask image. + """ + # create a new image + logger.info(msg='create mask image...') + img_pil = Image.new(mode="1", + size=(int(csearch.coord.src_metadata['LENGTH']), int(csearch.coord.src_metadata['WIDTH']))) + img_pil_draw = ImageDraw.Draw(im=img_pil) + + num_ways = gdf_infra.shape[0] + way_iter = 0 + prog_bar = ptime.progressBar(maxValue=num_ways) + + dist_thrsh = 1.3 * csearch.getMeanDistanceBetweenPixels() + lines = [geom.boundary.coords for geom in gdf_infra.geometry if geom is not None] + way_list = list() + for coo in lines: + way_list.append([Node(lat=point[1], lon=point[0]) for point in coo]) + + # plt.ion() + fig = plt.figure() + ax = fig.add_subplot() + ax.set_xlabel("lon") + ax.set_ylabel("lat") + lat, lon = csearch.coord.read_lookup_table(print_msg=False) + ax.plot([lon[0, 0], lon[-1, 0]], [lat[0, 0], lat[-1, 0]], '-k') + ax.plot([lon[0, 0], lon[0, -1]], [lat[0, 0], lat[0, -1]], '-k') + ax.plot([lon[0, -1], lon[-1, -1]], [lat[0, -1], lat[-1, -1]], '-k') + ax.plot([lon[-1, 0], lon[-1, -1]], [lat[-1, 0], lat[-1, -1]], '-k') + # ax.plot(lon.ravel(), lat.ravel(), '.k', markersize=0.5) + + while way_iter < num_ways: + way = way_list[way_iter] + poly_line_way = [] + + # perform a preliminary search to check if polygon is partly outside image extend + outside = np.zeros(len(way)) + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, _, _ = csearch.getNearestNeighbour(node=cur_node) + + # check if node is outside the image + if dist > dist_thrsh: + outside[i] = 1 + + if np.sum(outside) == 0: # all road nodes inside image extend + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + # Fill list of current way with node coordinates + poly_line_way.append(node_idx) + ax.plot(cur_node.lon, cur_node.lat, '*k') + ax.plot(found_node.lon, found_node.lat, 'ok') + + else: # some polygon nodes outside image extend + if np.sum(outside) == outside.size: # all nodes outside, skip + way_iter += 1 + continue + + # polygon nodes partly inside and partly outside + prev_p = outside[-2] == 1 # last point == first point (due to closed polygon). Select second last. + for i in range(outside.shape[0]): + cur_p = outside[i] == 1 + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + # check if transition happens + # yes: check if current point is inside or outside + # if outside: find transition point, but do not add current point + # if inside: find transition point, then add current point + # no: if point inside: add point + # if point outside: skip point + + if not (prev_p == cur_p): # transition + stored_idx = None + if i - 1 < 0: + prev_node = way[-2] + else: + prev_node = way[i - 1] + + if cur_p: # transition: in -> out + # find transition point, but do not add current point + ax.plot(cur_node.lon, cur_node.lat, '*y') + + if prev_p: # transition: out -> in + # find and add transition point, then add current point. + stored_idx = node_idx # store current point for adding it later. + ax.plot(cur_node.lon, cur_node.lat, '*r') # plot now, because variables will be overwritten + # the 'found_node' has to be computed from the last point outside, i.e. from 'prev_node' + ax.plot(found_node.lon, found_node.lat, 'or') + _, _, found_node = csearch.getNearestNeighbour(node=prev_node) + + new_lon, new_lat = computeLastRoadPixel( + cur_node=cur_node, + prev_node=prev_node, + found_node=found_node + ) + + dist, node_idx, found_node = csearch.getNearestNeighbour(node=Node(lon=new_lon, lat=new_lat)) + ax.plot(cur_node.lon, cur_node.lat, '*b') + ax.plot(found_node.lon, found_node.lat, 'ob') + ax.plot(new_lon, new_lat, '+b') + + # add the transition point + poly_line_way.append(node_idx) + if prev_p: # transition: out -> in + # transition point found and added, now add stored current point. + poly_line_way.append(stored_idx) + prev_p = cur_p # prepare for next iteration + + elif cur_p: # no transition, current point is outside -> do not add point + ax.plot(cur_node.lon, cur_node.lat, '*y') + prev_p = cur_p # prepare for next iteration + + else: # no transition, current points is inside -> add point + ax.plot(cur_node.lon, cur_node.lat, '*r') + ax.plot(found_node.lon, found_node.lat, 'or') + poly_line_way.append(node_idx) + prev_p = cur_p # prepare for next iteration + + prog_bar.update(value=way_iter + 1, every=10, suffix='{}/{} polygons'.format(way_iter + 1, num_ways)) + + # if first point is outside image, the polygon will not be closed. However, it still works to create a polygon. + img_pil_draw.polygon(poly_line_way, fill=255) + # plt.figure() + # plt.imshow(np.array(img_pil.getdata()).reshape(img_pil.size[1], img_pil.size[0]).astype(int)) + + way_iter += 1 + + img_np = np.array(img_pil.getdata()).reshape(img_pil.size[1], img_pil.size[0]).astype(int) + return img_np + + +def convertToRadarCoord(*, gdf_infra: gpd.geodataframe, csearch: CoordinateSearch, width: int, logger: Logger): + """Convert Polyline to a mask in shape of radar image. Apply a buffer of size 'width' in pixels. + + Parameters + ---------- + gdf_infra: gpd.geodataframe + The queried infrastructures containing polygons. + csearch: CoordinateSearch + The coordinate search object. + width: int + Width of the mask in pixel. + logger: Logger + Logging handler. + + Returns + ------- + img_np: np.ndarray + Mask image. + """ + # create a new image + logger.info(msg='create mask image...') + img_pil = Image.new(mode="1", + size=(int(csearch.coord.src_metadata['LENGTH']), int(csearch.coord.src_metadata['WIDTH']))) + img_pil_draw = ImageDraw.Draw(im=img_pil) + + num_roads = gdf_infra.shape[0] + prog_bar = ptime.progressBar(maxValue=num_roads) + + dist_thrsh = 1.3 * csearch.getMeanDistanceBetweenPixels() + lines = [ls.coords for ls in gdf_infra.geometry if ls is not None] # enables to append to list + way_list = list() + for coo in lines: + way_list.append([Node(lat=point[1], lon=point[0]) for point in coo]) + + num_ways = len(way_list) # changes during iteration + way_iter = 0 + + # plt.ion() + fig = plt.figure() + ax = fig.add_subplot() + ax.set_xlabel("lon") + ax.set_ylabel("lat") + lat, lon = csearch.coord.read_lookup_table(print_msg=False) + ax.plot([lon[0, 0], lon[-1, 0]], [lat[0, 0], lat[-1, 0]], '-k') + ax.plot([lon[0, 0], lon[0, -1]], [lat[0, 0], lat[0, -1]], '-k') + ax.plot([lon[0, -1], lon[-1, -1]], [lat[0, -1], lat[-1, -1]], '-k') + ax.plot([lon[-1, 0], lon[-1, -1]], [lat[-1, 0], lat[-1, -1]], '-k') + # ax.plot(lon.ravel(), lat.ravel(), '.k', markersize=0.5) + + while way_iter < num_ways: + way = way_list[way_iter] + poly_line_way = [] + + # perform a preliminary search to check if road is partly outside image extend + outside = np.zeros(len(way)) + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, _, _ = csearch.getNearestNeighbour(node=cur_node) + + # check if node is outside the image + if dist > dist_thrsh: + outside[i] = 1 + + if np.sum(outside) == 0: # all road nodes inside image extend + for i in range(len(way)): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + # Fill list of current way with node coordinates + poly_line_way.append(node_idx) + ax.plot(cur_node.lon, cur_node.lat, '*k') + ax.plot(found_node.lon, found_node.lat, 'ok') + + else: # some road nodes outside image extend + if np.sum(outside) == outside.size: # all nodes outside, skip + way_iter += 1 + continue + # split the way into sub parts based on in-out / out-in transition + # find first node inside the image + first_inside_idx = np.where(outside == 0)[0][0] + if first_inside_idx > 0: # this is a transition into the image + start_idx = first_inside_idx - 1 + else: + start_idx = first_inside_idx + + # find first node which is again outside the image + outside_idx = np.where(outside[first_inside_idx:] == 1)[0] + if outside_idx.size == 0: # no more transition to outside the image + stop_idx = len(way) + else: + stop_idx = outside_idx[0] + first_inside_idx + 1 + if stop_idx != len(way): # split the current way and add a new way at the end of the way_list + # to handle it later + way_list.append(way[stop_idx:]) + num_ways += 1 + + for i in range(start_idx, stop_idx): + cur_node = way[i] + + # convert node coordinates (lat, lon) to image coordinates + dist, node_idx, found_node = csearch.getNearestNeighbour(node=cur_node) + + if dist > dist_thrsh: + if i == start_idx: # there is no previous node, but a next node. + prev_node = way[i + 1] + else: + prev_node = way[i - 1] + new_lon, new_lat = computeLastRoadPixel(cur_node=cur_node, prev_node=prev_node, + found_node=found_node) + dist, node_idx, found_node = csearch.getNearestNeighbour(node=Node(lon=new_lon, lat=new_lat)) + ax.plot(cur_node.lon, cur_node.lat, '*b') + ax.plot(found_node.lon, found_node.lat, 'ob') + ax.plot(new_lon, new_lat, '+b') + else: + ax.plot(cur_node.lon, cur_node.lat, '*r') + ax.plot(found_node.lon, found_node.lat, 'or') + # Fill list of current way with node coordinates + poly_line_way.append(node_idx) + + prog_bar.update(value=way_iter + 1, every=10, suffix='{}/{} road segments'.format(way_iter + 1, num_roads)) + + img_pil_draw.line(poly_line_way, fill=255, width=width) + # img_pil_draw.polygon(poly_line_way, fill=255) + + way_iter += 1 + + img_np = np.array(img_pil.getdata()).reshape(img_pil.size[1], img_pil.size[0]).astype(int) + return img_np + + +def saveMask(*, work_dir: str, mask: np.ndarray, atr: dict, out_file_name: str): + """Save the mask to 'maskRoads.h5'. + + Parameters + ---------- + work_dir: str + Working directory. + mask: np.ndarray + Mask image. + atr: dict + Metadata data, e.g. from the geometryRadar.h5 file. + out_file_name: str + Output file name. + """ + # create the right attributes + ds_dict = dict() + ds_dict['mask'] = mask.transpose().astype('float32') + atr["FILE_TYPE"] = "mask" + + writefile.write(datasetDict=ds_dict, out_file=os.path.join(work_dir, out_file_name), metadata=atr) + + +def createMask(*, input_file: str, width: int, work_dir: str, out_file_name: str, geom_file: str, + logger: logging.Logger): + """Create a mask for the radar image from a shapefile containing lines or polygons. + + Parameters + ---------- + input_file: str + Path to input file. + width: int + Width of the mask in pixel. Applied to the lines only. + work_dir: str + Working directory. + out_file_name: str + Output file name. + geom_file: str + Path to geometryRadar.h5 file. + logger: logging.Logger + Logging handler. + """ + logger.info(msg="Start creating mask file based on openstreetmap data.") + + # get bounding box + _, _, _, coord, atr = getSpatialExtend(geom_file=geom_file, logger=logger) + + # create search tree + csearch = CoordinateSearch() + csearch.createSearchTree(coord=coord, logger=logger) + + logger.info(f"Read from input file: {input_file}") + gdf_infra = gpd.read_file(input_file) + + if gdf_infra.geometry[0].geom_type == "LineString": + mask_img = convertToRadarCoord(gdf_infra=gdf_infra, csearch=csearch, width=width, logger=logger) + + elif gdf_infra.geometry[0].geom_type == "Polygon": + mask_img = convertToRadarCoordPolygon(gdf_infra=gdf_infra, csearch=csearch, width=width, logger=logger) + else: + logger.error(msg=f"Geometry type is {gdf_infra.geometry[0].geom_type}." + f"Only 'LineString' and 'Polygon' supported!") + raise TypeError + + if '.h5' not in out_file_name: + out_file_name += ".h5" + saveMask(work_dir=work_dir, mask=mask_img, atr=atr, out_file_name=out_file_name) + + logger.info(msg="Masking finished.") + + +def main(iargs=None): + """Create mask from lines or polygons given in geographic coordinates (EPSG:4326). Input as shp or gpkg.""" + # check input + parser = create_parser() + inps = parser.parse_args(args=iargs) + + # initiate logger + logging_level = logging.getLevelName('DEBUG') + + log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + log_filename = f"sarvey_mask_log_{current_datetime}.txt" + if not os.path.exists(os.path.join(os.getcwd(), "logfiles")): + os.mkdir(os.path.join(os.getcwd(), "logfiles")) + file_handler = logging.FileHandler(filename=os.path.join(os.getcwd(), "logfiles", log_filename)) + file_handler.setFormatter(log_format) + logger.addHandler(file_handler) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_format) + logger.addHandler(console_handler) + logger.setLevel(logging_level) + + if inps.work_dir is None: + work_dir = os.getcwd() + else: + work_dir = inps.work_dir + if not os.path.exists(path=work_dir): + logger.info(msg='create output folder: ' + work_dir) + os.mkdir(path=work_dir) + logger.info(msg='working directory: {}'.format(work_dir)) + + input_file = join(work_dir, inps.input_file) + out_file_name = join(work_dir, inps.out_file_name) + + createMask( + input_file=input_file, + width=inps.width, + work_dir=work_dir, + out_file_name=out_file_name, + logger=logger, + geom_file=inps.geom_file + ) + + +if __name__ == '__main__': + main() diff --git a/sarvey/sarvey_mti.py b/sarvey/sarvey_mti.py new file mode 100755 index 0000000..166cc74 --- /dev/null +++ b/sarvey/sarvey_mti.py @@ -0,0 +1,291 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""MTI module for SARvey.""" + +import argparse +import json +import os +import shutil +from json import JSONDecodeError +from os.path import join +import matplotlib +import sys +import logging +import time +from logging import Logger +from pydantic.schema import schema + +from sarvey.console import printStep, printCurrentConfig, showLogoSARvey +from sarvey.processing import Processing +from sarvey.config import Config + +try: + matplotlib.use('QtAgg') +except ImportError as e: + print(e) + +EXAMPLE = """Example: + sarvey -f config.json 0 0 -g # create default config file with the name config.json and exit + sarvey -f config.json 0 0 # run only preparation step + sarvey -f config.json 0 4 # run all processing steps + + sarvey -f config.json 0 0 -p # print explanation of the configuration parameters to console +""" + +STEP_DICT = { + 0: "PREPARATION", + 1: "CONSISTENCY CHECK", + 2: "UNWRAPPING", + 3: "FILTERING", + 4: "DENSIFICATION", +} + + +def run(*, config: Config, args: argparse.Namespace, logger: Logger): + """Run the specified processing steps. + + Parameters + ---------- + config: Config + object of configuration class. + args: argparse.Namespace + command line input arguments + logger: Logger + Logging handler. + """ + showLogoSARvey(logger=logger, step="MTInSAR") + + steps = range(args.start, args.stop + 1) + + config_default_dict = generateTemplateFromConfigModel() + + proc_obj = Processing(path=config.data_directories.path_outputs, config=config, logger=logger) + + printCurrentConfig(config_section=config.processing.dict(), + config_section_default=config_default_dict["processing"], + logger=logger) + + if config.phase_linking.phase_linking: + printCurrentConfig(config_section=config.phase_linking.dict(), + config_section_default=config_default_dict["phase_linking"], + logger=logger) + + if 0 in steps: + printStep(step=0, step_dict=STEP_DICT, logger=logger) + printCurrentConfig(config_section=config.preparation.dict(), + config_section_default=config_default_dict["preparation"], + logger=logger) + proc_obj.runPreparation() + + if 1 in steps: + printStep(step=1, step_dict=STEP_DICT, logger=logger) + printCurrentConfig(config_section=config.consistency_check.dict(), + config_section_default=config_default_dict["consistency_check"], + logger=logger) + proc_obj.runConsistencyCheck() + + if 2 in steps: + printStep(step=2, step_dict=STEP_DICT, logger=logger) + printCurrentConfig(config_section=config.unwrapping.dict(), + config_section_default=config_default_dict["unwrapping"], + logger=logger) + if proc_obj.config.processing.temporal_unwrapping: + proc_obj.runUnwrappingTimeAndSpace() + else: + proc_obj.runUnwrappingSpace() + + if 3 in steps: + printStep(step=3, step_dict=STEP_DICT, logger=logger) + printCurrentConfig(config_section=config.filtering.dict(), + config_section_default=config_default_dict["filtering"], + logger=logger) + proc_obj.runFiltering() + + if 4 in steps: + printStep(step=4, step_dict=STEP_DICT, logger=logger) + printCurrentConfig(config_section=config.densification.dict(), + config_section_default=config_default_dict["densification"], + logger=logger) + if proc_obj.config.processing.temporal_unwrapping: + proc_obj.runDensificationTimeAndSpace() + else: + proc_obj.runDensificationSpace() + + logger.info(msg="SARvey MTI finished normally.") + # close log-file to avoid problems with deleting the files + if logger.hasHandlers(): + for handler in logger.handlers[:]: + logger.removeHandler(handler) + handler.flush() + handler.close() + + +def generateTemplateFromConfigModel(): + """GenerateTemplateFromConfigModel.""" + top_level_schema = schema([Config]) + top_level_dict = dict() + for sec_name, sec_def in top_level_schema['definitions'].items(): + if sec_name == "Config": + # substitute the class names of subsections in top_level_dict by the name of the sections in class Config + for subsec_name, subsec_def in sec_def["properties"].items(): + top_level_dict[subsec_name] = top_level_dict.pop(subsec_def["title"]) + continue # don't add "Config" to top_level_dict + sec_dict = dict() + for subsec_name, subsec_def in sec_def["properties"].items(): + if "default" not in subsec_def: + sec_dict.update({subsec_name: None}) + else: + sec_dict.update({subsec_name: subsec_def["default"]}) + top_level_dict.update({sec_name: sec_dict}) + + return top_level_dict + + +def createParser(): + """Create_parser. + + :return: + """ + parser = argparse.ArgumentParser( + description='Multitemporal InSAR processing workflow\n\n' + + 'Run the following steps:\n' + + '0 - preparation\n' + + '1 - consistency check\n' + + '2 - spatial unwrapping\n' + + '3 - filtering\n' + + '4 - densification', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + + parser.add_argument('start', choices={0, 1, 2, 3, 4}, type=int, + help='Start of processing') + + parser.add_argument('stop', choices={0, 1, 2, 3, 4}, type=int, + help='Stop of processing') + + parser.add_argument("-f", "--filepath", type=str, required=True, metavar="FILE", + help="Path to the config.json file.") + + parser.add_argument("-g", "--generate_config", action="store_true", default=False, dest="generate_config", + help="Write default configuration to file specified by '-f'.") + + parser.add_argument("-p", "--print_config_explanation", action="store_true", default=False, + dest="print_config_explanation", + help="Prints exhaustive explanations about configuration to console.") + + parser.add_argument('-w', '--workdir', default=None, dest="workdir", + help='Working directory (default: current directory).') + + return parser + + +def main(iargs=None): + """Run Main. + + :param iargs: + """ + parser = createParser() + args = parser.parse_args(iargs) + + # initiate logger + logging_level = logging.getLevelName('DEBUG') # set a default value before until level is read from config + + log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_format) + logger.addHandler(console_handler) + logger.setLevel(logging_level) + + if args.generate_config: + logger.info(msg=f"Write default config to file: {args.filepath}.") + default_config_dict = generateTemplateFromConfigModel() + with open(args.filepath, "w") as f: + f.write(json.dumps(default_config_dict, indent=4)) + return 0 + + if args.print_config_explanation: + top_level_schema = schema([Config]) + print(json.dumps(top_level_schema, indent=2)) + return 0 + + if args.stop < args.start: + logger.error(msg="Choose Start <= Stop!") + raise ValueError + + if args.workdir is None: + args.workdir = os.path.abspath(os.path.curdir) + else: + logger.info(msg="Working directory: {}".format(args.workdir)) + + config_file_path = os.path.abspath(join(args.workdir, args.filepath)) + + try: + with open(config_file_path) as config_fp: + config_dict = json.load(config_fp) + config = Config(**config_dict) + except JSONDecodeError as err: + raise IOError(f'Failed to load the configuration json file => {err}') + + current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + log_filename = f"sarvey_log_{current_datetime}.log" + logpath = config.logging.logfile_path + if not os.path.exists(logpath): + os.mkdir(logpath) + file_handler = logging.FileHandler(filename=join(logpath, log_filename)) + file_handler.setFormatter(log_format) + logger.addHandler(file_handler) + + logging_level = logging.getLevelName(config.logging.logging_level) + logger.setLevel(logging_level) + + config.data_directories.path_outputs = os.path.abspath(join(args.workdir, config.data_directories.path_outputs)) + if config.consistency_check.spatial_mask_file_p1 is not None: + config.consistency_check.spatial_mask_file_p1 = os.path.abspath( + join(args.workdir, config.consistency_check.spatial_mask_file_p1)) + if config.filtering.spatial_mask_file_p2 is not None: + config.filtering.spatial_mask_file_p2 = os.path.abspath( + join(args.workdir, config.filtering.spatial_mask_file_p2)) + + # create all necessary directories + if not os.path.exists(config.data_directories.path_outputs): + os.mkdir(config.data_directories.path_outputs) + if not os.path.exists(join(config.data_directories.path_outputs, "pic")): + os.mkdir(join(config.data_directories.path_outputs, "pic")) + + # copy config file to output directory to ensure that there is always a backup config file with latest parameters + shutil.copy2(src=config_file_path, dst=join(config.data_directories.path_outputs, "config.json")) + + run(config=config, args=args, logger=logger) + + +if __name__ == '__main__': + main() diff --git a/sarvey/sarvey_osm.py b/sarvey/sarvey_osm.py new file mode 100755 index 0000000..93fbed1 --- /dev/null +++ b/sarvey/sarvey_osm.py @@ -0,0 +1,220 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Download openstreetmap data for area of interest.""" +import argparse +import logging +import os +import sys +import time +from os.path import join +import geopandas as gpd +from shapely import ops, Point +import matplotlib + +from sarvey.osm_utils import runOsmQueryBridge, runOsmQuery, getSpatialExtend + +try: + matplotlib.use('TkAgg') +except ImportError as e: + print(e) + + +EXAMPLE = """Example: + sarvey_osm --geom ./geometryRadar.h5 --railway # download railway + sarvey_osm --geom ./geometryRadar.h5 --highway # download highway + sarvey_osm --geom ./geometryRadar.h5 --railway --bridge # download railway bridge + sarvey_osm --geom ./geometryRadar.h5 --railway -o mask_railway.shp # specify output path +""" + + +def create_parser(): + """Create_parser.""" + parser = argparse.ArgumentParser( + description='Download transport infrastructure information from openstreetmap and store as shp-file.', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + + parser.add_argument('-w', '--work_dir', dest='work_dir', default=None, + help='absolute path to working directory\n' + + '(default: current directory).') + + parser.add_argument('--geom', dest='geom_file', default=None, + help='path to existing geometryRadar.h5 file') + + parser.add_argument('--railway', dest='railway', action="store_true", default=False, + help='Set true to query railways.') + + parser.add_argument('--highway', dest='highway', action="store_true", default=False, + help='Set true to query highways.') + + parser.add_argument('--bridge', dest='bridge', action="store_true", default=False, + help='Set true to mask bridges.\n' + + 'If --railway or --highway set true, only railway/highway bridges are queried.') + + parser.add_argument('-o', dest='out_file_name', default='osm_infra.shp', + help="name of output file. (default: 'osm_infra.shp')") + + return parser + + +def downloadOSM(*, railway: bool, highway: bool, bridge: bool, + work_dir: str, out_file_name: str, logger: logging.Logger, geom_file: str): + """Download openstreetmap data and store to file. + + Parameters + ---------- + railway: bool + download railway data. + highway: bool + download highway data. + bridge: bool + download bridge data. + work_dir: str + working directory. + out_file_name: str + output file name. + logger: logging.Logger + logger. + geom_file: str + path to geometryRadar.h5 file. + """ + logger.info(msg="Start creating mask file based on openstreetmap data.") + + # get bounding box + ll_bbox, ur_bbox, img_ext, coord, atr = getSpatialExtend(geom_file=geom_file, logger=logger) + + # store image extend + gdf = gpd.GeoDataFrame({"geometry": gpd.geoseries.GeoSeries(img_ext)}) + gdf = gdf.dissolve().convex_hull + gdf.to_file(join(work_dir, "img_extend.gpkg")) + + # store bounding box + bbox_points = [ + Point(ll_bbox[1], ll_bbox[0]), + Point(ur_bbox[1], ll_bbox[0]), + Point(ur_bbox[1], ur_bbox[0]), + Point(ll_bbox[1], ur_bbox[0]) + ] + + gdf = gpd.GeoDataFrame({"geometry": gpd.geoseries.GeoSeries(bbox_points)}) + gdf = gdf.dissolve().convex_hull + gdf.to_file(join(work_dir, "bounding_box.gpkg")) + + if (not railway) & (not highway) & (not bridge): + logger.error(msg="No infrastructure type was specified.") + return + + if bridge: + # get requested OSM layer + query_result = runOsmQueryBridge( + ll_corner_wgs=ll_bbox, ur_corner_wgs=ur_bbox, + bridge_highway=highway, bridge_railway=railway, + logger=logger + ) + else: + type_list = list() + if railway: + type_list += ["rail"] + if highway: + type_list += ["motorway", "motorway_link", "trunk", "trunk_link"] + + # get requested OSM layer + query_result = runOsmQuery(ll_corner_wgs=ll_bbox, ur_corner_wgs=ur_bbox, + type_list=type_list, logger=logger) + + multi_line_list = list() + for way in query_result.ways: + if "area" in way.tags: + if way.tags["area"] == "yes": + logger.info('Area flag is true') + continue + else: + # keep coordinates in lat/lon. It will be needed in masking step. + coord = [[float(way.nodes[i].lon), float(way.nodes[i].lat)] for i in range(len(way.nodes))] + multi_line_list.append(coord) + + # Merge all road segments + merged_road = list(ops.linemerge(multi_line_list).geoms) + gdf = gpd.GeoDataFrame({"geometry": gpd.GeoSeries(merged_road)}) + # gdf = gdf.set_crs(crs=utm_crs) # set appropriate CRS + # todo: add attributes if required + + # todo: check ending of output file name + gdf.to_file(join(work_dir, out_file_name)) + logger.info(msg="OSM download finished.") + + +def main(iargs=None): + """Download openstreetmap data and store to file.""" + # check input + parser = create_parser() + inps = parser.parse_args(args=iargs) + + # initiate logger + logging_level = logging.getLevelName('DEBUG') + + log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + log_filename = f"sarvey_osm_log_{current_datetime}.txt" + if not os.path.exists(os.path.join(os.getcwd(), "logfiles")): + os.mkdir(os.path.join(os.getcwd(), "logfiles")) + file_handler = logging.FileHandler(filename=os.path.join(os.getcwd(), "logfiles", log_filename)) + file_handler.setFormatter(log_format) + logger.addHandler(file_handler) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_format) + logger.addHandler(console_handler) + logger.setLevel(logging_level) + + if inps.work_dir is None: + work_dir = os.getcwd() + else: + work_dir = inps.work_dir + if not os.path.exists(path=work_dir): + logger.info(msg='create output folder: ' + work_dir) + os.mkdir(path=work_dir) + logger.info(msg='working directory: {}'.format(work_dir)) + + downloadOSM( + railway=inps.railway, + highway=inps.highway, + bridge=inps.bridge, + work_dir=work_dir, + out_file_name=inps.out_file_name, + logger=logger, + geom_file=inps.geom_file + ) + + +if __name__ == '__main__': + main() diff --git a/sarvey/sarvey_plot.py b/sarvey/sarvey_plot.py new file mode 100755 index 0000000..96e20b1 --- /dev/null +++ b/sarvey/sarvey_plot.py @@ -0,0 +1,496 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Plot module for SARvey.""" +import argparse +import json +import time +import os +from os.path import join, basename, dirname +from json import JSONDecodeError +import matplotlib +import matplotlib.pyplot as plt +from matplotlib import colormaps +import numpy as np +import logging +from logging import Logger +import sys + +from mintpy.utils import ptime +from mintpy.objects.colors import ColormapExt +from mintpy.utils.plot import auto_flip_direction + +from sarvey.ifg_network import IfgNetwork +from sarvey.objects import Points, AmplitudeImage, BaseStack +from sarvey import console +from sarvey import viewer +from sarvey.config import Config +import sarvey.utils as ut + +try: + matplotlib.use('QtAgg') +except ImportError as e: + print(e) + +EXAMPLE = """Example: + sarvey_plot outputs/coh60_ts.h5 -t # plot average velocity and time series + sarvey_plot outputs/coh80_ts.h5 -m -a # plot velocity map and DEM error interactively + sarvey_plot outputs/coh80_ts.h5 -r -n 0 5 # plot residuals for image 0 to 5 + sarvey_plot outputs/coh80_ifg_wr.h5 -p -n 0 1 -a # plot wrapped phase of final point selection for interferogram 0 + sarvey_plot outputs/p1_ifg_wr.h5 -p -n 0 1 -a # plot wrapped phase of the first order network + sarvey_plot -i -a outputs/ifg_stack.h5 # interactively plot interferograms + sarvey_plot -i outputs/ifg_stack.h5 # store interferograms as png files + [...] +""" + + +def plotMap(*, obj_name: str, save_path: str, interactive: bool = False, path_inputs: str, logger: Logger): + """Plot results from sarvey as map in radar coordinates. + + Plot the velocity map, DEM error, squared sum of residuals, temporal coherence and spatiotemporal consistency. + + Parameters + ---------- + obj_name : str + Path to the Points object file. + save_path : str + Path to the directory where the figures are saved. + interactive : bool + If True, the plots will be shown interactively. + path_inputs : str + Path to the inputs directory containing slcStack.h5 and geometryRadar.h5. + logger : Logger + Logger object. + """ + console.showLogoSARvey(logger=logger, step="Plot map") + + scatter_size = 1 + + point_obj = Points(file_path=obj_name, logger=logger) + point_obj.open(path_inputs=path_inputs) + + bmap_obj = AmplitudeImage(file_path=join(dirname(obj_name), "background_map.h5")) + vel, demerr, _, coherence, omega, v_hat = ut.estimateParameters(obj=point_obj, ifg_space=False) + + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(point_obj.coord_xy[:, 1], point_obj.coord_xy[:, 0], c=demerr, s=scatter_size, + cmap=colormaps["jet_r"]) + plt.colorbar(sc, label="[m]", pad=0.03, shrink=0.5) + plt.title("DEM error") + plt.ylabel('Azimuth') + plt.xlabel('Range') + plt.tight_layout() + plt.gcf().savefig(join(save_path, "map_dem_error.png"), dpi=300) + if interactive: + plt.show() + else: + plt.close(plt.gcf()) + + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(point_obj.coord_xy[:, 1], point_obj.coord_xy[:, 0], c=omega, s=scatter_size, + cmap=colormaps["autumn_r"]) + plt.colorbar(sc, label="", pad=0.03, shrink=0.5) + plt.title("Squared sum of residuals") + plt.ylabel('Azimuth') + plt.xlabel('Range') + plt.tight_layout() + plt.gcf().savefig(join(save_path, "map_squared_sum_of_residuals.png"), dpi=300) + if interactive: + plt.show() + else: + plt.close(plt.gcf()) + + v_range = np.max(np.abs(vel * 100)) + + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(point_obj.coord_xy[:, 1], point_obj.coord_xy[:, 0], c=vel * 100, s=scatter_size, + cmap=colormaps["jet_r"], + vmin=-v_range, vmax=v_range) + plt.colorbar(sc, label="[cm / year]", pad=0.03, shrink=0.5) + plt.title("Mean Velocity") + plt.ylabel('Azimuth') + plt.xlabel('Range') + plt.tight_layout() + plt.gcf().savefig(join(save_path, "map_velocity.png"), dpi=300) + if interactive: + plt.show() + else: + plt.close(plt.gcf()) + + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(point_obj.coord_xy[:, 1], point_obj.coord_xy[:, 0], c=coherence, vmin=0, vmax=1, s=scatter_size, + cmap=colormaps["autumn"]) + plt.colorbar(sc, label="[-]", pad=0.03, shrink=0.5) + plt.title("Temporal coherence") + plt.ylabel('Azimuth') + plt.xlabel('Range') + plt.tight_layout() + plt.gcf().savefig(join(save_path, "map_coherence.png"), dpi=300) + if interactive: + plt.show() + else: + plt.close(plt.gcf()) + + stc = ut.spatiotemporalConsistency(coord_utm=point_obj.coord_utm, phase=point_obj.phase, + wavelength=point_obj.wavelength, + min_dist=50, max_dist=np.inf, knn=40) + + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(point_obj.coord_xy[:, 1], point_obj.coord_xy[:, 0], c=stc * 100, s=scatter_size, + cmap=colormaps["autumn_r"]) + plt.colorbar(sc, label="[cm]", pad=0.03, shrink=0.5) + plt.title("Spatiotemporal consistency") + plt.ylabel('Azimuth') + plt.xlabel('Range') + plt.tight_layout() + plt.gcf().savefig(join(save_path, "map_spatiotemporal_consistency.png"), dpi=300) + if interactive: + plt.show() + else: + plt.close(plt.gcf()) + + +def plotTS(*, obj_name: str, path_inputs: str, logger: Logger): + """Plot the derived displacement time series. + + Parameters + ---------- + obj_name : str + Path to the Points object file. + path_inputs : str + Path to the inputs directory containing slcStack.h5 and geometryRadar.h5. + logger : Logger + Logger object. + """ + console.showLogoSARvey(logger=logger, step="Plot time series") + + point_obj = Points(file_path=obj_name, logger=logger) + point_obj.open(path_inputs=path_inputs) + if point_obj.phase.shape[1] == point_obj.ifg_net_obj.num_ifgs: + logger.warning(msg="File contains ifg phase and not phase time series. Cannot display.") + else: + viewer.TimeSeriesViewer(point_obj=point_obj, logger=logger, path_inputs=path_inputs) + plt.show() + + +def plotPhase(*, obj_name: str, save_path: str, image_range: tuple, interactive: bool = False, path_inputs: str, + logger: Logger): + """Plot the phase of a Points object file in geographic coordinates (WGS84). + + Plot the phase of each interferogram or each image depending on the domain of the input file. + + Parameters + ---------- + obj_name : str + Path to the Points object file. + save_path : str + Path to the directory where the figures are saved. + image_range : tuple + Range of images to be plotted. + interactive : bool + If True, the plots will be shown interactively. + path_inputs : str + Path to the inputs directory containing slcStack.h5 and geometryRadar.h5. + logger : Logger + Logger object. + """ + console.showLogoSARvey(logger=logger, step="Plot phase") + + point_obj = Points(file_path=obj_name, logger=logger) + point_obj.open(path_inputs=path_inputs) + + if image_range is None: + viewer.plotIfgs(phase=point_obj.phase, coord=point_obj.coord_lalo, ttl="Phase") + else: + viewer.plotIfgs(phase=point_obj.phase[:, image_range[0]:image_range[1]], coord=point_obj.coord_lalo, + ttl="Phase") + + plt.gcf().savefig(join(save_path, "{}_phase.png".format(basename(obj_name)[:-3])), dpi=300) + + if interactive: + plt.show() + + +def plotResidualPhase(*, obj_name: str, save_path: str, image_range: tuple, interactive: bool = False, + path_inputs: str, logger: Logger): + """Plot the residual phase of a Points object file in geographic coordinates (WGS84). + + The residuals are derived by substracting the phase contributions based on the estimated parameters. + + Parameters + ---------- + obj_name : str + Path to the Points object file. + save_path : str + Path to the directory where the figures are saved. + image_range : tuple + Range of images to be plotted. + interactive : bool + If True, the plots will be shown interactively. + path_inputs : str + Path to the inputs directory containing slcStack.h5 and geometryRadar.h5. + logger : Logger + Logger object. + """ + console.showLogoSARvey(logger=logger, step="Plot residual phase") + + point_obj = Points(file_path=obj_name, logger=logger) + point_obj.open(path_inputs=path_inputs) + + if point_obj.phase.shape[1] == point_obj.ifg_net_obj.num_ifgs: + v_hat = ut.estimateParameters(obj=point_obj, ifg_space=True)[-1] + else: + v_hat = ut.estimateParameters(obj=point_obj, ifg_space=False)[-1] + + if image_range is None: + viewer.plotIfgs(phase=v_hat, coord=point_obj.coord_lalo, ttl="Residual phase") + else: + viewer.plotIfgs(phase=v_hat[:, image_range[0]:image_range[1]], coord=point_obj.coord_lalo, ttl="Residual phase") + + plt.gcf().savefig(join(save_path, "{}_residual_phase.png".format(basename(obj_name)[:-3])), dpi=300) + + if interactive: + plt.show() + + +def plotAllIfgs(*, obj_name: str, save_path: str, interactive: bool = False, logger: Logger): + """Plot all interferograms inside the ifg_stack.h5 file. + + If interactivity is enabled, the plots are shown and figures are not saved. Otherwise, the figures are + not shown but saved as png files. + If the ifg_network.h5 file is available, the baselines are displayed in the title of each interferogram. + + Parameters + ---------- + obj_name : str + Path to the ifg_stack.h5 file. + save_path : str + Path to the directory where the figures are saved. + interactive : bool + If True, the plots will be shown interactively. + logger : Logger + Logger object. + """ + console.showLogoSARvey(logger=logger, step="Plot interferograms") + + if obj_name.split("/")[-1] != "ifg_stack.h5": + logger.warning(msg="Cannot plot ifgs from {}".format(obj_name)) + return + + ifg_stack_obj = BaseStack(file=obj_name, logger=logger) + ifgs = ifg_stack_obj.read(dataset_name="ifgs") + + path_ifg_net = join(dirname(obj_name), "ifg_network.h5") + if os.path.exists(path_ifg_net): + ifg_net_obj = IfgNetwork() + ifg_net_obj.open(path=path_ifg_net) + else: + logger.warning(msg="'ifg_network.h5' is not available in the same directory as 'ifg_stack.h5'. " + "No baseline information available.") + ifg_net_obj = None + + num_ifgs = ifgs.shape[2] + + prog_bar = ptime.progressBar(maxValue=num_ifgs) + start_time = time.time() + logger.info(msg="plot and save figures of ifgs.") + for i in range(num_ifgs): + fig = plt.figure(figsize=[15, 5]) + ax = fig.add_subplot() + ifg = np.angle(ifgs[:, :, i]) + ifg[ifg == 0] = np.nan + im = plt.imshow(ifg, cmap=ColormapExt('cmy').colormap, interpolation='nearest', vmin=-np.pi, vmax=np.pi) + auto_flip_direction(ifg_stack_obj.metadata, ax=ax, print_msg=False) + ax.set_xlabel("Range") + ax.set_ylabel("Azimuth") + plt.colorbar(im, ax=ax, label="[rad]", pad=0.03, shrink=0.5) + if ifg_net_obj is not None: + if ifg_net_obj.dates is not None: + date1 = ifg_net_obj.dates[ifg_net_obj.ifg_list[i][0]] + date2 = ifg_net_obj.dates[ifg_net_obj.ifg_list[i][1]] + ttl = "{date1} - {date2}\nbaselines: {tbase} days, {pbase} m".format( + date1=date1, + date2=date2, + tbase=int(np.round(ifg_net_obj.tbase_ifg[i] * 365.25)), + pbase=int(np.round(ifg_net_obj.pbase_ifg[i])) + ) + else: + ttl = "baselines: {tbase} days, {pbase} m".format( + tbase=int(np.round(ifg_net_obj.tbase_ifg[i] * 365.25)), + pbase=int(np.round(ifg_net_obj.pbase_ifg[i])) + ) + plt.title(ttl) + plt.tight_layout() + if interactive: + plt.show() + else: + fig.savefig(join(save_path, "{}_ifg".format(i)), dpi=300) + plt.close(fig) + prog_bar.update(value=i + 1, every=1, suffix='{}/{} ifgs'.format(i + 1, num_ifgs)) + prog_bar.close() + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + +def createParser(): + """Create_parser.""" + parser = argparse.ArgumentParser( + description='Plot results from MTI\n\n', + formatter_class=argparse.RawTextHelpFormatter, + epilog=EXAMPLE) + + parser.add_argument('input_file', help='Path to the input file') + + parser.add_argument('-t', '--plot-ts', default=False, dest="plotTS", action="store_true", + help='Creates an interactive time series viewer.') + + parser.add_argument('-p', '--plot-phase', default=False, dest="plotPhase", action="store_true", + help='Plots the phase.') + + parser.add_argument('-r', '--plot-residual-phase', default=False, dest="plot_res_phase", action="store_true", + help='Plots the residual phase after substracting known components.') + + parser.add_argument('-m', '--plot-map', default=False, dest="plotMap", action="store_true", + help='Plots the velocity map and DEM error.') + + parser.add_argument('-i', '--plot-all-ifgs', default=False, dest="plotAllIfgs", action="store_true", + help='Plots all ifgs.') + + parser.add_argument('-n', '--image_range', default=None, dest="image_range", nargs=2, type=int, + help='Reduces the number of phase images to the given range. Has no effect on -m and -t. Tuple.' + '(default: all images).') + + parser.add_argument('-a', '--interactive', default=False, dest="interactive", action="store_true", + help='Enables interactive visualisation of figures. Is always ON for plot-ts.') + + parser.add_argument('-w', '--workdir', default=None, dest="workdir", + help='Working directory (default: current directory).') + + return parser + + +def main(iargs=None): + """Run Main.""" + parser = createParser() + args = parser.parse_args(iargs) + + if args.workdir is None: + args.workdir = os.getcwd() + + # initiate logger + logging_level = logging.getLevelName('DEBUG') + + log_format = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s') + logger = logging.getLogger(__name__) + + current_datetime = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + log_filename = f"sarvey_plot_log_{current_datetime}.txt" + if not os.path.exists(os.path.join(os.getcwd(), "logfiles")): + os.mkdir(os.path.join(os.getcwd(), "logfiles")) + file_handler = logging.FileHandler(filename=os.path.join(os.getcwd(), "logfiles", log_filename)) + file_handler.setFormatter(log_format) + logger.addHandler(file_handler) + + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setFormatter(log_format) + logger.addHandler(console_handler) + logger.setLevel(logging_level) + + logger.info("Working directory: {}".format(args.workdir)) + args.input_file = join(args.workdir, args.input_file) + + config_file_path = os.path.abspath(join(args.workdir, dirname(args.input_file), "config.json")) + + if not os.path.exists(config_file_path): + # check if any config file is available in upper directory (backward compatibility) + files = np.array([os.path.abspath(f) for f in os.listdir(join(dirname(config_file_path), "..")) + if os.path.isfile(f)]) + potential_configs = np.array([(basename(f).split(".")[-1] == "json") and ("config" in basename(f)) + for f in files]) + if potential_configs[potential_configs].shape[0] == 0: + raise FileNotFoundError(f"Backup configuration file not found: {config_file_path}!") + else: + logger.warning(msg=f"Backup configuration file not found: {config_file_path}!") + logger.warning(msg=f"Other configuration files automatically detected: {files[potential_configs]}!") + logger.warning(msg=f"Automatically selected configuration file: {files[potential_configs][0]}!") + config_file_path = files[potential_configs][0] + + try: + with open(config_file_path) as config_fp: + config_dict = json.load(config_fp) + config = Config(**config_dict) + except JSONDecodeError as err: + raise IOError(f'Failed to load the configuration json file => {err}') + + folder_name = "p1" if "p1" in basename(args.input_file) else basename(args.input_file)[:5] + folder_name = "ifgs" if "ifg_stack" in basename(args.input_file) else folder_name + + save_path = join(dirname(args.input_file), "pic", folder_name) + if not os.path.exists(save_path): + if not args.plotTS: # not needed for interactive time series + os.mkdir(save_path) + + selected = False + if args.plotTS: + # todo: read path_inputs from config file in same directory as file to be able to load height from geometryRadar + plotTS(obj_name=args.input_file, path_inputs=config.data_directories.path_inputs, logger=logger) + selected = True + + if args.plotPhase: + plotPhase(obj_name=args.input_file, save_path=save_path, image_range=args.image_range, + interactive=args.interactive, path_inputs=config.data_directories.path_inputs, logger=logger) + selected = True + + if args.plot_res_phase: + plotResidualPhase(obj_name=args.input_file, save_path=save_path, image_range=args.image_range, + interactive=args.interactive, path_inputs=config.data_directories.path_inputs, logger=logger) + selected = True + + if args.plotMap: + plotMap(obj_name=args.input_file, save_path=save_path, interactive=args.interactive, + path_inputs=config.data_directories.path_inputs, logger=logger) + selected = True + + if args.plotAllIfgs: + plotAllIfgs(obj_name=args.input_file, save_path=save_path, interactive=args.interactive, logger=logger) + selected = True + + if not selected: + logger.info(msg="No action chosen.") + + # close log-file to avoid problems with deleting the files + if logger.hasHandlers(): + for handler in logger.handlers[:]: + logger.removeHandler(handler) + handler.flush() + handler.close() + + +if __name__ == '__main__': + main() diff --git a/sarvey/triangulation.py b/sarvey/triangulation.py new file mode 100644 index 0000000..fc57ea2 --- /dev/null +++ b/sarvey/triangulation.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Triangulation module for SARvey.""" +import time +from typing import Optional +import numpy as np +from scipy.spatial import Delaunay, distance_matrix, KDTree +from scipy.sparse import lil_matrix, csr_matrix +from scipy.sparse.csgraph import connected_components +from logging import Logger + +from mintpy.utils import ptime + + +class PointNetworkTriangulation: + """PointNetworkTriangulation.""" + + def __init__(self, *, coord_xy: np.ndarray, coord_utmxy: Optional[np.ndarray], logger: Logger): + """Triangulate points in space based on distance. + + Parameters + ---------- + coord_xy: np.ndarray + Radar coordinates of the points. + coord_utmxy: np.ndarray + UTM coordinates of the points. + logger: Logger + Logging handler. + """ + self.coord_xy = coord_xy + num_points = self.coord_xy.shape[0] + self.logger = logger + + # create sparse matrix with dim (num_points x num_points), add 1 if connected. + # create network afterwards once. reduces time. + self.adj_mat = lil_matrix((num_points, num_points), dtype=np.bool_) + + if coord_utmxy is not None: + logger.info(msg="create distance matrix between all points...") + self.dist_mat = distance_matrix(coord_utmxy, coord_utmxy) + # todo: check out alternatives: + # scipy.spatial.KDTree.sparse_distance_matrix + else: # if only global delaunay shall be computed without memory issues + self.dist_mat = None + + def getArcsFromAdjMat(self): + """Convert the adjacency matrix into a list of arcs. + + Returns + ------- + arcs: np.ndarray + List of arcs with indices of the start and end point. + """ + a = self.adj_mat.copy() + # copy entries from lower to upper triangular matrix + b = (a + a.T) + # remove entries from diagonal and lower part of matrix + arc_tmp = [[i, b.indices[b.indptr[i]:b.indptr[i + 1]]] for i in range(b.shape[0])] + arc_tmp = [[s, e_list[np.where(e_list < s)[0]]] for s, e_list in arc_tmp] + + arcs = list() + for s, e_list in arc_tmp: + for e in e_list: + arcs.append([s, e]) + arcs = np.array(arcs) + return arcs + + def removeLongArcs(self, *, max_dist: float): + """Remove arcs from network which are longer than given threshold. + + Parameter + --------- + max_dist: float + distance threshold on arc length in [m] + """ + mask = self.dist_mat > max_dist + self.adj_mat[mask] = False + + def isConnected(self): + """Check if the network is connected.""" + n_components = connected_components(csgraph=csr_matrix(self.adj_mat), directed=False, return_labels=False) + if n_components == 1: + return True + else: + return False + + def triangulateGlobal(self): + """Connect the points with a GLOBAL delaunay triangulation.""" + self.logger.info(msg="Triangulate points with global delaunay.") + + network = Delaunay(points=self.coord_xy) + for p1, p2, p3 in network.simplices: + self.adj_mat[p1, p2] = True + self.adj_mat[p1, p3] = True + self.adj_mat[p2, p3] = True + + def triangulateKnn(self, *, k: int): + """Connect points to the k-nearest neighbours.""" + self.logger.info(msg="Triangulate points with {}-nearest neighbours.".format(k)) + num_points = self.coord_xy.shape[0] + prog_bar = ptime.progressBar(maxValue=num_points) + start_time = time.time() + count = 0 + tree = KDTree(data=self.coord_xy) + + if k > num_points: + k = num_points + self.logger.info(msg="k > number of points. Connect all points with each other.") + for p1 in range(num_points): + idx = tree.query(self.coord_xy[p1, :], k)[1] + self.adj_mat[p1, idx] = True + count += 1 + prog_bar.update(value=count + 1, every=np.int16(num_points / 250), + suffix='{}/{} points triangulated'.format(count + 1, num_points + 1)) + prog_bar.close() + m, s = divmod(time.time() - start_time, 60) + self.logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) diff --git a/sarvey/unwrapping.py b/sarvey/unwrapping.py new file mode 100644 index 0000000..ca659d2 --- /dev/null +++ b/sarvey/unwrapping.py @@ -0,0 +1,1041 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Unwrapping module for SARvey.""" +import multiprocessing +from os.path import join, dirname +import time +import matplotlib.pyplot as plt +import numpy as np +from kamui import unwrap_arbitrary +from scipy.sparse import csr_matrix +from scipy.sparse.csgraph import structural_rank +from scipy.sparse.linalg import lsqr +from scipy.optimize import minimize +from logging import Logger + +from mintpy.utils import ptime + +import sarvey.utils as ut +from sarvey.ifg_network import IfgNetwork +from sarvey.objects import Network, NetworkParameter, AmplitudeImage + + +def objFuncTemporalCoherence(x, *args): + """Compute temporal coherence from parameters and phase. To be used as objective function for optimization. + + Parameters + ---------- + x: np.ndarray + Search space for the DEM error in a 1D grid. + args: tuple + Additional arguments: (design_mat, obs_phase, scale_vel, scale_demerr). + + Returns + ------- + 1 - gamma: float + """ + (design_mat, obs_phase, scale_vel, scale_demerr) = args + + # equalize the gradients in both directions + x[0] *= scale_demerr + x[1] *= scale_vel + + pred_phase = np.matmul(design_mat, x) + res = (obs_phase - pred_phase.T).ravel() + gamma = np.abs(np.mean(np.exp(1j * res))) + return 1 - gamma + + +def gridSearchTemporalCoherence(*, demerr_grid: np.ndarray, vel_grid: np.ndarray, design_mat: np.ndarray, + obs_phase: np.ndarray): + """Grid search which maximizes the temporal coherence as the objective function. + + Parameters + ---------- + demerr_grid: np.ndarray + Search space for the DEM error in a 2D grid. + vel_grid: np.ndarray + Search space for the velocity in a 2D grid. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + obs_phase: np.ndarray + Observed phase of the arc. + + Returns + ------- + demerr: float + estimated DEM error. + vel: float + estimated velocity. + gamma: float + estimated temporal coherence. + """ + demerr_grid_flat = demerr_grid.flatten() + vel_grid_flat = vel_grid.flatten() + gamma_flat = np.array( + [1 - objFuncTemporalCoherence(np.array([demerr_grid_flat[i], vel_grid_flat[i]]), + design_mat, obs_phase, 1, 1) + for i in range(demerr_grid_flat.shape[0])]) + gamma = gamma_flat.reshape(demerr_grid.shape) + idx_max_gamma = np.argmax(gamma_flat) + + # return demerr_grid_flat[idx_max_gamma], vel_grid_flat[idx_max_gamma], gamma_flat[idx_max_gamma] + return demerr_grid_flat[idx_max_gamma], vel_grid_flat[idx_max_gamma], gamma + + +def findOptimum(*, obs_phase: np.ndarray, design_mat: np.ndarray, val_range: np.ndarray): + """Find optimal value within a one dimensional search space that fits to the observed phase. + + Parameters + ---------- + obs_phase: np.ndarray + Observed phase of the arc. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + val_range: np.ndarray + Range of possible values for the solution. Can be either for DEM error or velocity. + + Returns + ------- + opt_val: scipy.optimize.minimize return value + gamma: float + pred_phase: np.ndarray + """ + pred_phase = design_mat[:, np.newaxis] * val_range[np.newaxis, :] # broadcasting + if len(obs_phase.shape) == 2: + # step densification + res = obs_phase[:, np.newaxis, :] - pred_phase.T + res = np.moveaxis(res, 0, 1) + res = res.reshape((pred_phase.shape[1], -1)) # combine residuals from all arcs + else: + # step consistency check + res = obs_phase - pred_phase.T + + gamma = np.abs(np.mean(np.exp(1j * res), axis=1)) + max_idx = np.argmax(gamma) + opt_val = val_range[max_idx] + return opt_val, gamma[max_idx], pred_phase[:, max_idx] + + +def oneDimSearchTemporalCoherence(*, demerr_range: np.ndarray, vel_range: np.ndarray, obs_phase: np.ndarray, + design_mat: np.ndarray): + """One dimensional search for maximum temporal coherence that fits the observed arc phase. + + Parameters + ---------- + demerr_range: np.ndarray + Search space for the DEM error in a 1D grid. + vel_range: np.ndarray + Search space for the velocity in a 1D grid. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + obs_phase: np.ndarray + Observed phase of the arc. + + Returns + ------- + demerr: float + vel: float + gamma: float + """ + demerr, gamma_demerr, pred_phase_demerr = findOptimum( + obs_phase=obs_phase, + design_mat=design_mat[:, 0], + val_range=demerr_range + ) + + vel, gamma_vel, pred_phase_vel = findOptimum( + obs_phase=obs_phase, + design_mat=design_mat[:, 1], + val_range=vel_range + ) + + if gamma_vel > gamma_demerr: + demerr, gamma_demerr, pred_phase_demerr = findOptimum( + obs_phase=obs_phase - pred_phase_vel, + design_mat=design_mat[:, 0], + val_range=demerr_range + ) + vel, gamma_vel, pred_phase_vel = findOptimum( + obs_phase=obs_phase - pred_phase_demerr, + design_mat=design_mat[:, 1], + val_range=vel_range + ) + else: + vel, gamma_vel, pred_phase_vel = findOptimum( + obs_phase=obs_phase - pred_phase_demerr, + design_mat=design_mat[:, 1], + val_range=vel_range + ) + demerr, gamma_demerr, pred_phase_demerr = findOptimum( + obs_phase=obs_phase - pred_phase_vel, + design_mat=design_mat[:, 0], + val_range=demerr_range + ) + + # improve initial estimate with gradient descent approach + scale_demerr = demerr_range.max() + scale_vel = vel_range.max() + + demerr, vel, gamma = gradientSearchTemporalCoherence( + scale_vel=scale_vel, + scale_demerr=scale_demerr, + obs_phase=obs_phase, + design_mat=design_mat, + x0=np.array([demerr / scale_demerr, + vel / scale_vel]).T + ) + + pred_phase = np.matmul(design_mat, np.array([demerr, vel])) + res = (obs_phase - pred_phase.T).ravel() + gamma = np.abs(np.mean(np.exp(1j * res))) + return demerr, vel, gamma + + +def gradientSearchTemporalCoherence(*, scale_vel: float, scale_demerr: float, obs_phase: np.ndarray, + design_mat: np.ndarray, x0: np.ndarray): + """GradientSearchTemporalCoherence. + + Parameters + ---------- + scale_demerr: float + Scaling factor for DEM error to equalize the axis of the search space. + scale_vel: float + Scaling factor for velocity to equalize the axis of the search space. + design_mat: np.ndarray + Design matrix for estimating parameters from arc phase. + obs_phase: np.ndarray + Observed phase of the arc. + x0: np.ndarray + Initial values for optimization. + + Returns + ------- + demerr: float + vel: float + gamma: float + """ + opt_res = minimize( + objFuncTemporalCoherence, + x0, + args=(design_mat, obs_phase, scale_vel, scale_demerr), + bounds=((-1, 1), (-1, 1)), + method='L-BFGS-B' + ) + gamma = 1 - opt_res.fun + demerr = opt_res.x[0] * scale_demerr + vel = opt_res.x[1] * scale_vel + return demerr, vel, gamma + + +def launchAmbiguityFunctionSearch(parameters: tuple): + """Wrap for launching ambiguity function for temporal unwrapping in parallel. + + Parameters + ---------- + parameters: tuple + Arguments for temporal unwrapping in parallel. + + Returns + ------- + arc_idx_range: np.ndarray + demerr: np.ndarray + vel: np.ndarray + gamma: np.ndarray + """ + (arc_idx_range, num_arcs, phase, slant_range, loc_inc, ifg_net_obj, wavelength, velocity_bound, demerr_bound, + num_samples) = parameters + + demerr = np.zeros((num_arcs, 1), dtype=np.float32) + vel = np.zeros((num_arcs, 1), dtype=np.float32) + gamma = np.zeros((num_arcs, 1), dtype=np.float32) + + design_mat = np.zeros((ifg_net_obj.num_ifgs, 2), dtype=np.float32) + + demerr_range = np.linspace(-demerr_bound, demerr_bound, num_samples) + vel_range = np.linspace(-velocity_bound, velocity_bound, num_samples) + + # prog_bar = ptime.progressBar(maxValue=num_arcs) + + factor = 4 * np.pi / wavelength + + for k in range(num_arcs): + design_mat[:, 0] = factor * ifg_net_obj.pbase_ifg / (slant_range[k] * np.sin(loc_inc[k])) + design_mat[:, 1] = factor * ifg_net_obj.tbase_ifg + + demerr[k], vel[k], gamma[k] = oneDimSearchTemporalCoherence( + demerr_range=demerr_range, + vel_range=vel_range, + obs_phase=phase[k, :], + design_mat=design_mat + ) + + return arc_idx_range, demerr, vel, gamma + + +def temporalUnwrapping(*, ifg_net_obj: IfgNetwork, net_obj: Network, wavelength: float, velocity_bound: float, + demerr_bound: float, num_samples: int, num_cores: int = 1, logger: Logger) -> \ + tuple[np.ndarray, np.ndarray, np.ndarray]: + """Solve ambiguities for every arc in spatial Network object. + + Parameters + ---------- + ifg_net_obj: IfgNetwork + The IfgNetwork object. + net_obj: Network + The Network object. + wavelength: float + The wavelength. + velocity_bound: float + The velocity bound. + demerr_bound: float + The DEM error bound. + num_samples: int + The number of samples for the search space. + num_cores: int + Number of cores to be used. Default is 1. + logger: Logger + Logging handler. + + Returns + ------- + demerr: np.ndarray + vel: np.ndarray + gamma: np.ndarray + """ + msg = "#" * 10 + msg += " TEMPORAL UNWRAPPING: AMBIGUITY FUNCTION " + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + + if num_cores == 1: + args = ( + np.arange(net_obj.num_arcs), net_obj.num_arcs, net_obj.phase, + net_obj.slant_range, net_obj.loc_inc, ifg_net_obj, wavelength, velocity_bound, demerr_bound, num_samples) + arc_idx_range, demerr, vel, gamma = launchAmbiguityFunctionSearch(parameters=args) + else: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + demerr = np.zeros((net_obj.num_arcs, 1), dtype=np.float32) + vel = np.zeros((net_obj.num_arcs, 1), dtype=np.float32) + gamma = np.zeros((net_obj.num_arcs, 1), dtype=np.float32) + + num_cores = net_obj.num_arcs if num_cores > net_obj.num_arcs else num_cores # avoids having more samples then + # cores + idx = ut.splitDatasetForParallelProcessing(num_samples=net_obj.num_arcs, num_cores=num_cores) + + args = [( + idx_range, + idx_range.shape[0], + net_obj.phase[idx_range, :], + net_obj.slant_range[idx_range], + net_obj.loc_inc[idx_range], + ifg_net_obj, + wavelength, + velocity_bound, + demerr_bound, + num_samples) for idx_range in idx] + + results = pool.map(func=launchAmbiguityFunctionSearch, iterable=args) + + # retrieve results + for i, demerr_i, vel_i, gamma_i in results: + demerr[i] = demerr_i + vel[i] = vel_i + gamma[i] = gamma_i + + m, s = divmod(time.time() - start_time, 60) + logger.info(msg="Finished temporal unwrapping.") + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + return demerr, vel, gamma + + +def launchSpatialUnwrapping(parameters: tuple) -> tuple[np.ndarray, np.ndarray]: + """LaunchSpatialUnwrapping. + + Parameters + ---------- + parameters: tuple + idx_range, num_ifgs, num_points, edges, phase + + Returns + ------- + idx_range: np.ndarray + unw_phase: np.ndarray + """ + # Unpack the parameters + (idx_range, num_ifgs, num_points, method, edges, phase) = parameters + + prog_bar = ptime.progressBar(maxValue=num_ifgs) + + unw_phase = np.zeros((num_points, num_ifgs), dtype=np.float32) + + # Perform the PUMA phase unwrapping + for i in range(num_ifgs): + if method == "puma": + unw_phase[:, i] = unwrap_arbitrary( + psi=phase[:, i], + edges=edges, + simplices=None, + method="gc", + period=2*np.pi, + start_i=0, + p=0.2 + ) + else: + unw_phase[:, i] = unwrap_arbitrary( + psi=phase[:, i], + edges=edges, + simplices=None, # todo: compute simplices for ILP + method="ilp", + period=2*np.pi, + start_i=0, + ) + prog_bar.update(value=i + 1, every=1, + suffix='{}/{} ifgs unwrapped. '.format(i + 1, num_ifgs)) + + unw_phase = unw_phase - np.mean(unw_phase, axis=0) + return idx_range, unw_phase + + +def spatialUnwrapping(*, num_ifgs: int, num_points: int, phase: np.ndarray, edges: np.ndarray, method: str, + num_cores: int, logger: Logger): + """Spatial unwrapping of interferograms for a set of points. + + Parameters + ---------- + num_ifgs: int + Number of interferograms. + num_points: int + Number of points. + phase: np.ndarray + Phase of the interferograms at the points. + edges: np.ndarray + Edges/arcs of the graph. + method: str + Method for spatial unwrapping (puma or ilp). + num_cores: int + Number of cores to be used in multiprocessing. + logger: Logger + Logging handler. + + Returns + ------- + unw_phase: np.ndarray + Unwrapped phase of the interferograms at the points. + """ + msg = "#" * 10 + msg += f" SPATIAL UNWRAPPING: {method}" + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + + if num_cores == 1: + parameters = ( + np.arange(num_ifgs), + num_ifgs, + num_points, + method, + edges, + phase + ) + idx_range, unw_phase = launchSpatialUnwrapping(parameters=parameters) + else: + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + unw_phase = np.zeros((num_points, num_ifgs), dtype=np.float32) + num_cores = num_ifgs if num_cores > num_ifgs else num_cores + # avoids having more samples than cores + idx = ut.splitDatasetForParallelProcessing(num_samples=num_ifgs, num_cores=num_cores) + + args = [( + idx_range, + idx_range.shape[0], + num_points, + method, + edges, + phase[:, idx_range]) for idx_range in idx] + results = pool.map(func=launchSpatialUnwrapping, iterable=args) + + # retrieve results + for i, phase in results: + unw_phase[:, i] = phase + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + return unw_phase + + +def spatialParameterIntegrationIterative(*, + val_arcs: np.ndarray, + all_arcs: np.ndarray, + coord_xy: np.ndarray, + all_weights: np.ndarray, + spatial_ref_idx: int = 0, + res_tol: float = 1e-3, + max_rm_fraction: float = 0.001, + logger: Logger): + """Unwrapping double-difference arc parameters spatially. + + The parameters at the arcs are integrated spatially to the points. The integration is done iteratively using + least-squares by removing the arcs with the highest residuals in each iteration. + The integration stops when the sum of the residuals is below a threshold. + Function is adopted from StaMPS software (Hooper et al., 2007). + + Parameters + ---------- + val_arcs: np.ndarray + Value at the arcs (e.g. DEM error, velocity). + all_arcs: np.ndarray + Arcs of the spatial network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + all_weights: np.ndarray + Weights of the arcs (e.g. temporal coherence from temporal unwrapping) + spatial_ref_idx: int + Index of the spatial reference point (default = 0). Can be arbitrary. + res_tol: float + Threshold on the sum of the residual phase (default = 1e-3). Convergence criterion. + max_rm_fraction: float + Fraction of the arcs that are removed in each iteration (default = 0.001). + logger: Logger + Logging handler + + Returns + ------- + val_points: np.ndarray + Estimated parameters at the points resulting from the integration of the parameters at the arcs. + """ + all_arcs = np.array(all_arcs) + num_points = coord_xy.shape[0] + num_arcs = all_arcs.shape[0] + + # create design matrix + a = np.zeros((num_arcs, num_points)) + for i in range(num_arcs): + a[i, all_arcs[i][0]] = 1 + a[i, all_arcs[i][1]] = -1 + + # find the number of arcs per point + arcs_per_point = np.zeros(num_points, ) + + for i in range(num_points): + arcs_per_point[i] = np.where(a[:, i] != 0)[0].shape[0] + + # remove reference point from design matrix + all_a = csr_matrix(all_weights * np.delete(a, spatial_ref_idx, 1)) + + # don't even start if the network is not connected + if structural_rank(all_a) < all_a.shape[1]: + logger.exception(msg="Spatial point network is not connected. Phase cannot be unwrapped!") + raise Exception + + # set n_bad to maximum fraction of bad edges that can be removed + n_bad = np.ceil(num_arcs * max_rm_fraction).astype(np.int16) + + # initialize output + val_points = np.zeros((num_points,)) + points_idx = np.ones((num_points,), dtype=bool) + points_idx[spatial_ref_idx] = False + x_hat = np.zeros((num_points - 1,)) + + start_time = time.time() + + arcs = all_arcs + obv_vec = val_arcs.reshape(-1, ) * all_weights.reshape(-1, ) + a = all_a + weights = all_weights + num_arcs = obv_vec.size + + r = None + num_arcs_save = None + arcs_save = None + a_save = None + weights_save = None + obv_vec_save = None + i = 0 + while True: + if structural_rank(a) >= a.shape[1]: + x_hat[:] = lsqr(a, obv_vec)[0] + + # store the current version of variables, being able to go back to previous iteration if too many arcs + # removed + a_save = a + obv_vec_save = obv_vec + weights_save = weights + arcs_save = arcs + num_arcs_save = num_arcs + + # compute residuals + r = obv_vec - np.matmul(a.toarray(), x_hat) + + else: # network is not connected anymore, remove less psPoints and try again + # x_hat = np.linalg.lstsq(a_save, obv_vec_save, rcond=None)[0] # unclear: I think it is not necessary to + # recompute the inversion. + n_bad = np.ceil(n_bad / 10).astype(np.int16) # remove less point + + if np.all(np.abs(r) < res_tol): + break + else: + # drop arcs with the highest residuals, but only drop max one arc per point + ps_w_dropped_arc = np.zeros((num_points,)) + good_arc_idx = np.ones((num_arcs_save,), dtype=bool) + r_sort_idx = np.abs(r).argsort()[::-1] # descending order, makes for loop easier + + for j in range(n_bad): # remove arcs one by one + bad_arc_idx = r_sort_idx[j] + ps_idx0 = arcs_save[bad_arc_idx][0] + ps_idx1 = arcs_save[bad_arc_idx][1] + if (ps_w_dropped_arc[ps_idx0] == 0) and (ps_w_dropped_arc[ + ps_idx1] == 0): # if arc not already dropped for either + # point of current arc drop current arc + good_arc_idx[bad_arc_idx] = False + # mark both psPoints from the arc as having an arc dropped + ps_w_dropped_arc[ps_idx0] = 1 + ps_w_dropped_arc[ps_idx1] = 1 + + # update all variables for next iteration + arcs = arcs_save[good_arc_idx, :] + obv_vec = obv_vec_save[good_arc_idx] + a = a_save[good_arc_idx, :] + weights = weights_save[good_arc_idx] + num_arcs = obv_vec.size + + i += 1 + + val_points[points_idx] = x_hat + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.\n'.format(m, s)) + + return val_points + + +def spatialParameterIntegration(*, + val_arcs: np.ndarray, + arcs: np.ndarray, + coord_xy: np.ndarray, + weights: np.ndarray, + spatial_ref_idx: int = 0, + logger: Logger): + """Unwrapping double-difference arc parameters spatially. + + The parameters at the arcs are integrated spatially to the points. The integration is done using least-squares. + + Parameters + ---------- + val_arcs: np.ndarray + Value at the arcs (e.g. DEM error, velocity). + arcs: np.ndarray + Arcs of the spatial network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + weights: np.ndarray + Weights of the arcs (e.g. temporal coherence from temporal unwrapping) + spatial_ref_idx: int + Index of the spatial reference point (default = 0). Can be arbitrary. + logger: Logger + Logging handler + + Returns + ------- + val_points: np.ndarray + Estimated parameters at the points resulting from the integration of the parameters at the arcs. + """ + arcs = np.array(arcs) + num_points = coord_xy.shape[0] + num_arcs = arcs.shape[0] + + # create design matrix + design_mat = np.zeros((num_arcs, num_points)) + for i in range(num_arcs): + design_mat[i, arcs[i][0]] = 1 + design_mat[i, arcs[i][1]] = -1 + + # remove reference point from design matrix + design_mat = csr_matrix(weights * np.delete(design_mat, spatial_ref_idx, 1)) + + # don't even start if the network is not connected + if structural_rank(design_mat) < design_mat.shape[1]: + raise Exception("Spatial point network is not connected. Cannot integrate parameters spatially!") + + start_time = time.time() + + obv_vec = val_arcs.reshape(-1, ) * weights.reshape(-1, ) + + x_hat = lsqr(design_mat, obv_vec)[0] + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + val_points = np.zeros((num_points,)) + points_idx = np.ones((num_points,), dtype=bool) + points_idx[spatial_ref_idx] = False + val_points[points_idx] = x_hat + + return val_points + + +def computeNumArcsPerPoints(*, net_obj: Network, point_id: np.ndarray, + logger: Logger) -> tuple[np.ndarray, np.ndarray]: + """Remove Points with less than specified number of arcs. + + Parameters + ---------- + net_obj: Network + The spatial Network object. + point_id: np.ndarray + ID of the points in the network. + logger: Logger + Logging handler. + + Returns + ------- + design_mat: np.ndarray + Design matrix of the spatial network + arcs_per_point: np.ndarray + Number of arcs that each point is connected with. + """ + logger.info(msg="Removal of arcs and PSC that cannot be tested.") + + num_points = point_id.shape[0] + + # create design matrix + design_mat = np.zeros((net_obj.num_arcs, num_points)) + for i in range(net_obj.num_arcs): + design_mat[i, net_obj.arcs[i][0]] = 1 + design_mat[i, net_obj.arcs[i][1]] = -1 + + # find the number of arcs per point + arcs_per_point = np.zeros(num_points, ) + + for i in range(num_points): + arcs_per_point[i] = np.where(design_mat[:, i] != 0)[0].shape[0] + + return design_mat, arcs_per_point + + +def computeAvgCoherencePerPoint(*, net_obj: Network, point_id: np.ndarray, logger: Logger) -> np.ndarray: + """Compute the average coherence from all arcs that a point is connected with. Used to remove incoherent points. + + Parameters + ---------- + net_obj: Network + The Network object. + point_id: np.ndarray + ID of the points in the network. + logger: Logger + Logging handler. + + Returns + ------- + mean_gamma_point: np.ndarray + Average coherence per point + """ + logger.info(msg="Removal of points whose arcs are incoherent in average.") + + num_points = point_id.shape[0] + + # create design matrix + a = np.zeros((net_obj.num_arcs, num_points)) + for i in range(net_obj.num_arcs): + a[i, net_obj.arcs[i][0]] = net_obj.gamma[i] + a[i, net_obj.arcs[i][1]] = net_obj.gamma[i] + + a[a == 0] = np.nan + mean_gamma_point = np.nanmean(a, axis=0) + + return mean_gamma_point + + +def removeArcsByPointMask(*, net_obj: Network, point_id: np.ndarray, coord_xy: np.ndarray, p_mask: np.ndarray, + design_mat: np.ndarray, logger: Logger) -> tuple[Network, np.ndarray, np.ndarray, np.ndarray]: + """Remove all entries related to the arc observations connected to the points which have a False value in p_mask. + + Parameters + ---------- + net_obj: Network + The Network object. + point_id: np.ndarray + ID of the points in the network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + p_mask: np.ndarray + Boolean mask with True for points to keep, and False for points to remove. + design_mat: np.ndarray + Design matrix describing the relation between arcs and points. + logger: Logger + Logging handler. + + Returns + ------- + net_obj: Network + Network object without the removed arcs and points. + point_id: np.ndarray + ID of the points in the network without the removed points. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network without the removed points. + design_mat: np.ndarray + Design matrix describing the relation between arcs and points without the removed points and arcs. + """ + # find respective arcs + a_idx = list() + for p_idx in np.where(~p_mask)[0]: + a_idx.append(np.where(design_mat[:, p_idx] != 0)[0]) + + if len(a_idx) != 0: + a_idx = np.hstack(a_idx) + a_mask = np.ones((net_obj.num_arcs,), dtype=np.bool_) + a_mask[a_idx] = False + net_obj.removeArcs(mask=a_mask) + design_mat = design_mat[a_mask, :] + else: + a_idx = np.array(a_idx) # so I can check the size + + # remove psPoints + point_id = point_id[p_mask] + design_mat = design_mat[:, p_mask] + coord_xy = coord_xy[p_mask, :] + + # beside removing the arcs in "arcs", the tuple indices have to be changed to make them fit to new point indices + for p_idx in np.sort(np.where(~p_mask)[0])[::-1]: + net_obj.arcs[np.where((net_obj.arcs[:, 0] > p_idx)), 0] -= 1 + net_obj.arcs[np.where((net_obj.arcs[:, 1] > p_idx)), 1] -= 1 + + logger.info(msg="Removed {} arc(s) connected to the removed point(s)".format(a_idx.size)) + return net_obj, point_id, coord_xy, design_mat + + +def removeGrossOutliers(*, net_obj: Network, point_id: np.ndarray, coord_xy: np.ndarray, min_num_arc: int = 3, + quality_thrsh: float = 0.0, + logger: Logger) -> tuple[Network, np.ndarray, np.ndarray, np.ndarray]: + """Remove both gross outliers which have many low quality arcs and points which are not well connected. + + Parameters + ---------- + net_obj: Network + The spatial Network object. + point_id: np.ndarray + ID of the points in the network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + min_num_arc: int + Threshold on the minimal number of arcs per point. Default = 3. + quality_thrsh: float + Threshold on the temporal coherence of the arcs. Default = 0.0. + logger: Logger + Logging handler. + + Returns + ------- + net_obj: Network + Network object without the removed arcs and points. + point_id: np.ndarray + ID of the points in the network without the removed points. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network without the removed points. + a: np.ndarray + Design matrix describing the relation between arcs and points without the removed points and arcs. + """ + logger.info(msg="Detect points with low quality arcs (mean): < {}".format(quality_thrsh)) + mean_gamma_point = computeAvgCoherencePerPoint(net_obj=net_obj, + point_id=point_id, logger=logger) + # not yet removed, because arcs are removed separately + p_mask_mean_coh = (mean_gamma_point >= quality_thrsh).ravel() + logger.info(msg="Detected {} point(s) with mean coherence of all connected arcs < {} ".format( + p_mask_mean_coh[~p_mask_mean_coh].shape[0], quality_thrsh)) + + logger.info(msg="Removal of low quality arcs: < {}".format(quality_thrsh)) + a_mask = (net_obj.gamma >= quality_thrsh).ravel() + logger.info(msg="Removed {} arc(s)".format(a_mask[~a_mask].shape[0])) + net_obj.removeArcs(mask=a_mask) + + design_mat, arcs_per_point = computeNumArcsPerPoints(net_obj=net_obj, point_id=point_id, logger=logger) + + p_mask_num_arcs = (arcs_per_point >= min_num_arc).ravel() + logger.info(msg="Detected {} point(s) with less than {} arcs".format(p_mask_num_arcs[~p_mask_num_arcs].shape[0], + min_num_arc)) + + # remove them jointly + p_mask = p_mask_num_arcs & p_mask_mean_coh + logger.info(msg="Remove {} point(s)".format(p_mask[~p_mask].shape[0])) + net_obj, point_id, coord_xy, design_mat = removeArcsByPointMask(net_obj=net_obj, point_id=point_id, + coord_xy=coord_xy, p_mask=p_mask, + design_mat=design_mat, logger=logger) + return net_obj, point_id, coord_xy, design_mat + + +def parameterBasedNoisyPointRemoval(*, net_par_obj: NetworkParameter, point_id: np.ndarray, coord_xy: np.ndarray, + design_mat: np.ndarray, rmse_thrsh: float = 0.02, num_points_remove: int = 1, + bmap_obj: AmplitudeImage = None, bool_plot: bool = False, + logger: Logger): + """Remove Points during spatial integration step if residuals at many connected arcs are high. + + The idea is similar to outlier removal in DePSI, but without hypothesis testing. + It can be used as a preprocessing step to spatial integration. + The points are removed based on the RMSE computed from the residuals of the parameters (DEM error, velocity) per + arc. The point with the highest RMSE is removed in each iteration. The process stops when the maximum RMSE is below + a threshold. + + + Parameters + ---------- + net_par_obj: NetworkParameter + The spatial NetworkParameter object containing the parameters estimates at each arc. + point_id: np.ndarray + ID of the points in the network. + coord_xy: np.ndarray + Radar coordinates of the points in the spatial network. + design_mat: np.ndarray + Design matrix describing the relation between arcs and points. + rmse_thrsh: float + Threshold for the RMSE of the residuals per point. Default = 0.02. + num_points_remove: int + Number of points to remove in each iteration. Default = 1. + bmap_obj: AmplitudeImage + Basemap object for plotting. Default = None. + bool_plot: bool + Plot the RMSE per point. Default = False. + logger: Logger + Logging handler. + + Returns + ------- + spatial_ref_id: int + ID of the spatial reference point. + point_id: np.ndarray + ID of the points in the network without the removed points. + net_par_obj: NetworkParameter + The NetworkParameter object without the removed points. + """ + msg = "#" * 10 + msg += " NOISY POINT REMOVAL BASED ON ARC PARAMETERS " + msg += "#" * 10 + logger.info(msg=msg) + + num_points = point_id.shape[0] + + logger.info(msg="Selection of the reference PSC") + # select one of the two pixels which are connected via the arc with the highest quality + spatial_ref_idx = np.where(design_mat[np.argmax(net_par_obj.gamma), :] != 0)[0][0] + coord_xy = np.delete(arr=coord_xy, obj=spatial_ref_idx, axis=0) + spatial_ref_id = point_id[spatial_ref_idx] + point_id = np.delete(arr=point_id, obj=spatial_ref_idx, axis=0) + num_points -= 1 + + # remove reference point from design matrix + design_mat = net_par_obj.gamma * np.delete(arr=design_mat, obj=spatial_ref_idx, axis=1) + + logger.info(msg="Spatial integration to detect noisy point") + start_time = time.time() + + it_count = 0 + while True: + logger.info(msg="ITERATION: {}".format(it_count)) + design_mat = csr_matrix(design_mat) + + if structural_rank(design_mat) < design_mat.shape[1]: + logger.error(msg="Singular normal matrix. Network is no longer connected!") + # point_id = np.sort(np.hstack([spatial_ref_id, point_id])) + # return spatial_ref_id, point_id, net_par_obj + raise ValueError + # demerr + obv_vec = net_par_obj.demerr.reshape(-1, ) + demerr_points = lsqr(design_mat.toarray(), obv_vec * net_par_obj.gamma.reshape(-1, ))[0] + r_demerr = obv_vec - np.matmul(design_mat.toarray(), demerr_points) + + # vel + obv_vec = net_par_obj.vel.reshape(-1, ) + vel_points = lsqr(design_mat.toarray(), obv_vec * net_par_obj.gamma.reshape(-1, ))[0] + r_vel = obv_vec - np.matmul(design_mat.toarray(), vel_points) + + rmse_demerr = np.zeros((num_points,)) + rmse_vel = np.zeros((num_points,)) + for p in range(num_points): + r_mask = design_mat[:, p].toarray() != 0 + rmse_demerr[p] = np.sqrt(np.mean(r_demerr[r_mask.ravel()].ravel() ** 2)) + rmse_vel[p] = np.sqrt(np.mean(r_vel[r_mask.ravel()].ravel() ** 2)) + + rmse = rmse_vel.copy() + max_rmse = np.max(rmse.ravel()) + logger.info(msg="Maximum RMSE DEM error: {:.2f} m".format(np.max(rmse_demerr.ravel()))) + logger.info(msg="Maximum RMSE velocity: {:.4f} m / year".format(np.max(rmse_vel.ravel()))) + + if bool_plot: + # vel + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=rmse_vel * 1000, s=3.5, + cmap=plt.cm.get_cmap("autumn_r"), vmin=0, vmax=rmse_thrsh * 1000) + plt.colorbar(sc, pad=0.03, shrink=0.5) + ax.set_title("{}. iteration\nmean velocity - RMSE per point in [mm / year]".format(it_count)) + fig = ax.get_figure() + plt.tight_layout() + fig.savefig(join(dirname(net_par_obj.file_path), "pic", f"step_1_rmse_vel_{it_count}th_iter.png"), + dpi=300) + plt.close(fig) + + # demerr + ax = bmap_obj.plot(logger=logger) + sc = ax.scatter(coord_xy[:, 1], coord_xy[:, 0], c=rmse_demerr, s=3.5, + cmap=plt.cm.get_cmap("autumn_r")) + plt.colorbar(sc, pad=0.03, shrink=0.5) + ax.set_title("{}. iteration\nDEM error - RMSE per point in [m]".format(it_count)) + fig = ax.get_figure() + plt.tight_layout() + fig.savefig(join(dirname(net_par_obj.file_path), "pic", f"step_1_rmse_dem_error_{it_count}th_iter.png"), + dpi=300) + plt.close(fig) + + if max_rmse <= rmse_thrsh: + logger.info(msg="No noisy pixels detected.") + break + + # remove point with highest rmse + p_mask = np.ones((num_points,), dtype=np.bool_) + p_mask[np.argsort(rmse)[::-1][:num_points_remove]] = False # see description of function removeArcsByPointMask + net_par_obj, point_id, coord_xy, design_mat = removeArcsByPointMask(net_obj=net_par_obj, point_id=point_id, + coord_xy=coord_xy, p_mask=p_mask, + design_mat=design_mat.toarray(), + logger=logger) + num_points -= num_points_remove + it_count += 1 + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + + # add spatialRefIdx back to point_id + point_id = np.sort(np.hstack([spatial_ref_id, point_id])) + return spatial_ref_id, point_id, net_par_obj diff --git a/sarvey/utils.py b/sarvey/utils.py new file mode 100644 index 0000000..04e7e46 --- /dev/null +++ b/sarvey/utils.py @@ -0,0 +1,832 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Utils module for SARvey.""" +import multiprocessing +import time +import numpy as np +from scipy.sparse.linalg import lsqr +from typing import Union +from logging import Logger + +from mintpy.utils import ptime + +from sarvey.objects import Points, NetworkParameter, Network, BaseStack, AmplitudeImage +from sarvey.ifg_network import IfgNetwork + + +def convertBboxToBlock(*, bbox: tuple): + """ConvertBboxToBlock. read box and write2hdf5_block block have different definitions.""" + block = None + if len(bbox) == 4: + block = (bbox[1], bbox[3], bbox[0], bbox[2]) + if len(bbox) == 6: + block = (bbox[2], bbox[5], bbox[1], bbox[4], bbox[0], bbox[3]) + return block + + +def invertIfgNetwork(*, phase: np.ndarray, num_points: int, ifg_net_obj: IfgNetwork, num_cores: int, ref_idx: int, + logger: Logger): + """Wrap the ifg network inversion running in parallel. + + Parameters + ---------- + phase: np.ndarray + interferometric phases of the points. + num_points: int + number of points. + ifg_net_obj: IfgNetwork + instance of class IfgNetwork. + num_cores: int + number of cores to use for multiprocessing. + ref_idx: int + index of temporal reference date for interferogram network inversion. + logger: Logger + logging handler + + Returns + ------- + phase_ts: np.ndarray + inverted phase time series of the points. + """ + msg = "#" * 10 + msg += " INVERT IFG NETWORK " + msg += "#" * 10 + logger.info(msg=msg) + + start_time = time.time() + design_mat = ifg_net_obj.getDesignMatrix() + + if num_cores == 1: + args = (np.arange(num_points), num_points, phase, design_mat, ifg_net_obj.num_images, ref_idx) + idx_range, phase_ts = launchInvertIfgNetwork(parameters=args) + else: + # use only 10 percent of the cores, because scipy.sparse.linalg.lsqr is already running in parallel + num_cores = int(np.floor(num_cores / 10)) + logger.info(msg="start parallel processing with {} cores.".format(num_cores)) + pool = multiprocessing.Pool(processes=num_cores) + + phase_ts = np.zeros((num_points, ifg_net_obj.num_images), dtype=np.float32) + + num_cores = num_points if num_cores > num_points else num_cores # avoids having more samples than cores + idx = splitDatasetForParallelProcessing(num_samples=num_points, num_cores=num_cores) + args = [( + idx_range, + idx_range.shape[0], + phase[idx_range, :], + design_mat, + ifg_net_obj.num_images, + ref_idx) for idx_range in idx] + + results = pool.map(func=launchInvertIfgNetwork, iterable=args) + + # retrieve results + for i, phase_i in results: + phase_ts[i, :] = phase_i + + m, s = divmod(time.time() - start_time, 60) + logger.debug(msg='time used: {:02.0f} mins {:02.1f} secs.'.format(m, s)) + return phase_ts + + +def launchInvertIfgNetwork(parameters: tuple): + """Launch the inversion of the interferogram network in parallel. + + Parameters + ---------- + parameters: tuple + parameters for inversion + + Tuple contains: + idx_range: np.ndarray + range of point indices to be processed + num_points: int + number of points + phase: np.ndarray + interferometric phases of the points + design_mat: np.ndarray + design matrix + num_images: int + number of images + ref_idx: int + index of temporal reference date for interferogram network inversion + + Returns + ------- + idx_range: np.ndarray + range of indices of the points processed + phase_ts: np.ndarray + inverted phase time series + """ + # Unpack the parameters + (idx_range, num_points, phase, design_mat, num_images, ref_idx) = parameters + + design_mat = np.delete(arr=design_mat, obj=ref_idx, axis=1) # remove reference date + idx = np.ones((num_images,), dtype=np.bool_) + idx[ref_idx] = False + phase_ts = np.zeros((num_points, num_images), dtype=np.float32) + + prog_bar = ptime.progressBar(maxValue=num_points) + for i in range(num_points): + phase_ts[i, idx] = lsqr(design_mat, phase[i, :])[0] + prog_bar.update(value=i + 1, every=np.ceil(num_points / 100), + suffix='{}/{} points'.format(i + 1, num_points)) + + return idx_range, phase_ts + + +def predictPhase(*, obj: [NetworkParameter, Points], vel: np.ndarray = None, demerr: np.ndarray = None, + ifg_space: bool = True, logger: Logger): + """Predicts the phase time series based on the estimated parameters DEM error and mean velocity. + + Can be used for both arc phase or point phase. Wrapper function for 'predictPhaseCore(...)' + + Parameters + ---------- + obj: Union[NetworkParameter, Points] + object of either 'networkParameter' or 'points'. If instance of 'points' is given, 'vel' and 'demerr' + also need to be specified. + vel: np.ndarray + velocity for each sample (default: None) + demerr: np.ndarray + dem error for each sample (default: None). + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted + in acquisition space. (default: True) + logger: Logger + Logging handler. + + Returns + ------- + pred_phase_demerr: np.ndarray + predicted phase from DEM error + pred_phase_vel: np.ndarray + predicted phase from velocity + + Raises + ------ + ValueError + vel or demerr is none + TypeError + obj is of the wrong type + """ + if isinstance(obj, Points): + if (vel is None) or (demerr is None): + logger.error(msg="Both 'vel' and 'demerr' are needed if 'obj' is instance of class 'points'!") + raise ValueError + pred_phase_demerr, pred_phase_vel = predictPhaseCore( + ifg_net_obj=obj.ifg_net_obj, + wavelength=obj.wavelength, + vel=vel, + demerr=demerr, + slant_range=obj.slant_range, + loc_inc=obj.loc_inc, + ifg_space=ifg_space + ) + elif isinstance(obj, NetworkParameter): + pred_phase_demerr, pred_phase_vel = predictPhaseCore( + ifg_net_obj=obj.ifg_net_obj, + wavelength=obj.wavelength, + vel=obj.vel, + demerr=obj.demerr, + slant_range=obj.slant_range, + loc_inc=obj.loc_inc, + ifg_space=ifg_space + ) + else: + logger.error(msg="'obj' must be instance of 'points' or 'networkParameter'!") + raise TypeError + return pred_phase_demerr, pred_phase_vel + + +def predictPhaseCore(*, ifg_net_obj: IfgNetwork, wavelength: float, vel: np.ndarray, + demerr: np.ndarray, slant_range: np.ndarray, loc_inc: np.ndarray, ifg_space: bool = True): + """Predicts the phase time series based on the estimated parameters DEM error and mean velocity. + + Can be used for both arc phase or point phase. + + Parameters + ---------- + ifg_net_obj: IfgNetwork + instance of class ifgNetwork + wavelength: float + wavelength in [m] + vel: np.ndarray + velocity for each sample + demerr: np.ndarray + dem error for each sample + slant_range: np.ndarray + slant range distance for each sample + loc_inc: np.ndarray + local incidence angle for each sample + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be + predicted in acquisition space. (default: True) + + Returns + ------- + pred_phase_demerr: np.ndarray + predicted phase from DEM error + pred_phase_vel: np.ndarray + predicted phase from velocity + """ + factor = 4 * np.pi / wavelength + + if ifg_space: + tbase = ifg_net_obj.tbase_ifg + pbase = ifg_net_obj.pbase_ifg + else: + tbase = ifg_net_obj.tbase + pbase = ifg_net_obj.pbase + + # compute phase due to DEM error + pred_phase_demerr = factor * pbase[:, np.newaxis] / (slant_range * np.sin(loc_inc))[np.newaxis, :] * demerr + + # compute phase due to velocity + pred_phase_vel = factor * tbase[:, np.newaxis] * vel + + return pred_phase_demerr.T, pred_phase_vel.T + + +def predictPhaseSingle(*, demerr: float, vel: float, slant_range: float, loc_inc: float, + ifg_net_obj: IfgNetwork, wavelength: float, only_vel: bool = False, ifg_space: bool = True): + """Predict the phase time series for only one point based on the estimated parameters DEM error and mean velocity. + + Can be used for both arc phase or point phase. + + Parameters + ---------- + demerr: float + DEM error (scalar) + vel: float + mean velocity (scalar) + slant_range: float + slant range distance in [m] (scalar) + loc_inc: float + local incidence angle in [rad] (scalar) + ifg_net_obj: IfgNetwork + object of class IfgNetwork + wavelength: float + radar wavelength in [m] + only_vel: bool + set to True if only the mean velocity shall be predicted (default: False) + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be predicted in + acquisition space. (default: True) + + Returns + ------- + pred_phase: np.ndarray + predicted phase + """ + factor = 4 * np.pi / wavelength + + if ifg_space: + tbase = ifg_net_obj.tbase_ifg + pbase = ifg_net_obj.pbase_ifg + num_time = ifg_net_obj.num_ifgs + else: + tbase = ifg_net_obj.tbase + pbase = ifg_net_obj.pbase + num_time = ifg_net_obj.num_images + + if only_vel: + a = np.zeros((num_time, 1)) + else: + a = np.zeros((num_time, 2)) + a[:, 0] = factor * tbase + + if only_vel: + pred_phase = np.matmul(a, np.array([vel])).reshape((-1,)) + else: + a[:, 1] = factor * pbase / (slant_range * np.sin(loc_inc)) + pred_phase = np.matmul(a, np.array([vel, demerr])).reshape((-1,)) + + return pred_phase + + +def estimateParameters(*, obj: Union[Points, Network], estimate_ref_atmo: bool = True, ifg_space: bool = True): + """Estimate the parameters either per point or per arc. + + Parameters are velocity and DEM error (or additionally reference APS). + + Parameters + ---------- + obj: Union[Points, Network] + object of either network, networkParameter, points or pointsParameters + estimate_ref_atmo: bool + set to True if APS of reference date shall be estimated. corresponds to offset of linear + motion model (default: False). + ifg_space: bool + set to True if the phase shall be predicted in interferogram space. If False, phase will be + predicted in acquisition space. (default: True) + + Returns + ------- + vel: np.ndarray + velocity for each point + demerr: np.ndarray + dem error for each point + ref_atmo: np.ndarray + reference APS for each point + omega: + sum of squared residuals + v_hat: + residuals + """ + num = obj.phase.shape[0] # either number of points or number of arcs + + if ifg_space: + tbase = obj.ifg_net_obj.tbase_ifg + pbase = obj.ifg_net_obj.pbase_ifg + num_time = obj.ifg_net_obj.num_ifgs + else: + tbase = obj.ifg_net_obj.tbase + pbase = obj.ifg_net_obj.pbase + num_time = obj.ifg_net_obj.num_images + + vel = np.zeros((num,), dtype=np.float32) + demerr = np.zeros((num,), dtype=np.float32) + omega = np.zeros((num,), dtype=np.float32) + coherence = np.zeros((num,), dtype=np.float32) + v_hat = np.zeros((num, num_time), dtype=np.float32) + + ref_atmo = None + if estimate_ref_atmo: + ref_atmo = np.zeros((num,), dtype=np.float32) + a = np.zeros((num_time, 3), dtype=np.float32) + a[:, 2] = 4 * np.pi / obj.wavelength # atmospheric delay at reference acquisition + else: + a = np.zeros((num_time, 2)) + + a[:, 1] = 4 * np.pi / obj.wavelength * tbase # velocity + + for p in range(obj.num_points): + obv_vec = obj.phase[p, :] + a[:, 0] = 4 * np.pi / obj.wavelength * pbase / (obj.slant_range[p] * np.sin(obj.loc_inc[p])) # demerr + + x_hat, omega[p] = np.linalg.lstsq(a, obv_vec, rcond=None)[0:2] + demerr[p] = x_hat[0] + vel[p] = x_hat[1] + if estimate_ref_atmo: + ref_atmo[p] = x_hat[2] + v_hat[p, :] = obv_vec - np.matmul(a, x_hat) + coherence[p] = np.abs(np.mean(np.exp(1j * v_hat[p, :]))) + + if not estimate_ref_atmo: + ref_atmo = None + + return vel, demerr, ref_atmo, coherence, omega, v_hat + + +def splitImageIntoBoxesRngAz(*, length: int, width: int, num_box_az: int, num_box_rng: int): + """Split the image into several boxes. + + (adapted from mintpy.ifgram_inversion.split2boxes) + + Parameters + ---------- + num_box_rng: int + Number of boxes in range direction + num_box_az: + Number of boxes in azimuth direction + length: int + length of the image + width: int + width of the image + + Returns + ------- + box_list: list + of tuple of 4 int (xmin, ymin, xmax, ymax) + num_box: int + number of boxes + """ + y_step = int(np.rint((length / num_box_rng) / 10) * 10) + x_step = int(np.rint((width / num_box_az) / 10) * 10) + + box_list = [] + y0 = 0 + y1 = 0 + while y1 != length: + x0 = 0 + x1 = 0 + # y1 = min([length, y0 + y_step]) + if y0 + y_step + int(np.rint(y_step / 2)) > length: + y1 = length + else: + y1 = y0 + y_step + while x1 != width: + if x0 + x_step + int(np.rint(x_step / 2)) > width: + x1 = width + else: + x1 = x0 + x_step + # x1 = min([width, x0 + x_step]) + box = (x0, y0, x1, y1) + box_list.append(box) + x0 = x1 + y0 = y1 + + num_box = len(box_list) + return box_list, num_box + + +def preparePatches(*, num_patches: int, width: int, length: int, logger: Logger): + """Create patches to subset the image stack for parallel processing to reduce memory usage. + + Parameters + ---------- + num_patches: int + number of patches to split the image into + width: int + width of the image + length: int + length of the image + logger: Logger + logging handler + + Returns + ------- + box_list: list + tuples with the radar coordinates of the boxes + num_patches: int + number of actual patches created by the function + """ + patch_size_lut = { + 1: (1, 1), + 2: (1, 2), + 3: (1, 3), + 4: (2, 2), + 6: (2, 3), + 8: (2, 4), + 10: (2, 5), + 12: (3, 4), + 15: (3, 5), + 28: (4, 7), + } + if num_patches == 1: + box_list = [tuple(i for i in (0, 0, width, length))] + num_patches = 1 + else: + num_patches = num_patches + if num_patches > max(patch_size_lut.keys()): + num_patches = max(patch_size_lut.keys()) + logger.info(msg=f"Number of patches is higher than expected. Reduce to {num_patches} boxes.") + else: + while not (num_patches in patch_size_lut.keys()): + num_patches += 1 + box_list, num_patches = splitImageIntoBoxesRngAz(length=length, + width=width, + num_box_az=patch_size_lut[num_patches][1], + num_box_rng=patch_size_lut[num_patches][0]) + logger.info(msg=f"Process {num_patches} patches " + + f"({patch_size_lut[num_patches][1]} x {patch_size_lut[num_patches][0]}).") + return box_list, num_patches + + +def splitDatasetForParallelProcessing(*, num_samples: int, num_cores: int): + """Split the dataset into chunks of similar size for processing them in parallel. + + Parameters + ---------- + num_samples: int + number of samples to be split + num_cores: int + number of cores to split among + + Returns + ------- + idx: list + list of sample ranges for each core + """ + rest = np.mod(num_samples, num_cores) + avg_num_samples_per_core = int((num_samples - rest) / num_cores) + num_samples_per_core = np.zeros((num_cores,), dtype=np.int16) + num_samples_per_core[:] = avg_num_samples_per_core + c = rest + i = 0 + while c > 0: + num_samples_per_core[i] += 1 + i += 1 + c -= 1 + + idx = list() + cur_idx = 0 + for i in range(num_cores): + idx.append([cur_idx, cur_idx + num_samples_per_core[i]]) + cur_idx += num_samples_per_core[i] + + idx = [np.arange(s, e) for s, e in idx] + return idx + + +def createSpatialGrid(*, coord_utm_img: np.ndarray, length: int, width: int, grid_size: int): + """Create a spatial grid over the image. + + Parameters + ---------- + coord_utm_img: np.ndarray + coordinates of all image pixels in UTM + length: int + number of pixels in length of the image + width: int + number of pixels in width of the image + grid_size: int + size of the grid in [m] + + Returns + ------- + box_list: list + of tuples with the radar coordinates of the boxes + num_box: int + actual number of boxes created by the function + """ + p0 = coord_utm_img[:, 0, 0] + p1 = coord_utm_img[:, 0, -1] + p2 = coord_utm_img[:, -1, 0] + dist_width = np.linalg.norm(p0 - p1) + dist_length = np.linalg.norm(p0 - p2) + num_box_az = int(np.round(dist_width / grid_size)) + num_box_rng = int(np.round(dist_length / grid_size)) + + # split image into different parts + box_list, num_box = splitImageIntoBoxesRngAz(length=length, width=width, + num_box_az=num_box_az, num_box_rng=num_box_rng) + + return box_list, num_box + + +def selectBestPointsInGrid(*, box_list: list, quality: np.ndarray, sel_min: bool = True): + """Select the best point inside a grid. + + If several pixel fullfil the criteria, the first one is selected. + + Parameters + ---------- + box_list: list + of tuples with the radar coordinates of the boxes + quality: np.ndarray + quality of the pixels + sel_min: bool + set to True if the minimum value shall be selected (default: True) + + Returns + ------- + cand_mask_sparse: np.ndarray + boolean mask of the selected pixels + """ + cand_mask_sparse = np.zeros_like(quality).astype(np.bool_) + + for box in box_list: + qual_box = quality[box[1]:box[3], box[0]:box[2]] + if sel_min: + idx_box = np.where(np.min(qual_box) == qual_box) + if np.min(qual_box) == np.inf: # no mininum value exists in this box + continue + else: # max + idx_box = np.where(np.max(qual_box) == qual_box) + + if idx_box[0].shape[0] > 1: # more than one index might be found, due to quality(PS) = 1 in MiaplPy + idx_box_tmp = [idx_box[0][0], idx_box[1][0]] + idx_box = idx_box_tmp + idx_img = (idx_box[0] + box[1], idx_box[1] + box[0]) + cand_mask_sparse[idx_img] = True + return cand_mask_sparse + + +def spatiotemporalConsistency(*, coord_utm: np.ndarray, phase: np.ndarray, wavelength: float, min_dist: int = 15, + max_dist: float = np.inf, knn: int = 50): + """Spatiotemporal consistency proposed by Hanssen et al. (2008) and implemented in DePSI (van Leijen, 2014). + + Parameters + ---------- + coord_utm: np.ndarray + UTM coordinates of the points + phase: np.ndarray + phase time series of the points + wavelength: float + radar wavelength in [m] + min_dist: int + minimum distance to other points in [m] (default: 15) + max_dist: float + maximum distance to other points in [m] (default: np.inf) + knn: int + number of nearest neighbors to consider (default: 50) + + Returns + ------- + stc: np.ndarray + spatiotemporal consistency of the points + """ + from scipy.spatial import KDTree + + num_samples, num_time = phase.shape + tree = KDTree(data=coord_utm) + + stc = np.zeros((num_samples,), np.float64) + + for p in range(num_samples): + dist, idx = tree.query([coord_utm[p, 0], coord_utm[p, 1]], k=knn) + mask = (dist < max_dist) & (dist > min_dist) & (dist != 0) + rho = list() + for i in idx[mask]: + diff = (phase[i, :-1] - phase[p, :-1]) - (phase[i, 1:] - phase[p, 1:]) + rho.append(wavelength / (4 * np.pi) * np.sqrt((1 / (num_time - 1) * np.sum(diff ** 2)))) + if not rho: + stc[p] = np.nan + else: + stc[p] = np.min(rho) + return stc + + +def temporalAutoCorrelation(*, residuals: np.ndarray, lag: int): + """Compute the temporal autocorrelation for given time lag from the residuals. + + Parameters + ---------- + residuals: np.ndarray + residual phase time series (dim: num_points x num_time_steps) + lag: int + time lag used for computing the correlation + + Returns + ------- + auto_corr: np.ndarray + auto-correlation of each point (dim: num_points x lag) + """ + num_points = residuals.shape[0] + auto_corr = np.zeros((num_points, lag)) + for lag_num in range(1, lag + 1): + for p in range(num_points): + auto_corr[p, lag_num - 1] = abs(np.corrcoef( + np.array([residuals[p, :-lag_num], residuals[p, lag_num:]]))[0][1]) + return auto_corr + + +def readPhasePatchwise(*, stack_obj: BaseStack, dataset_name: str, num_patches: int, cand_mask: np.ndarray, + point_id_img: np.ndarray, + logger: Logger): + """Read the phase from a file in a patchwise manner to reduce memory usage. + + Parameters + ---------- + stack_obj: BaseStack + instance of class BaseStack + dataset_name: str + name of the dataset to read (e.g. 'ifgs' or 'phase') + num_patches: int + number of patches to split the image into + cand_mask: np.ndarray + boolean mask of the selected pixels + point_id_img: np.ndarray + image with point IDs for each pixel + logger: Logger + logging handler + + Returns + ------- + phase_points: np.ndarray + phase time series of the selected pixels + """ + if dataset_name == "ifgs": + length, width, num_images = stack_obj.getShape(dataset_name=dataset_name) + elif dataset_name == "phase": # result from miaplpy + num_images, length, width = stack_obj.getShape(dataset_name=dataset_name) + else: + logger.error(f"Reading '{dataset_name}' is not supported.") + raise NotImplementedError + + if num_patches == 1: + phase_img = stack_obj.read(dataset_name=dataset_name) + if dataset_name == "phase": # result from miaplpy + phase_img = np.moveaxis(phase_img, 0, -1) + phase_points = phase_img[cand_mask, :] + else: + phase_points = np.angle(phase_img[cand_mask, :]) + else: + box_list, num_patches = preparePatches(num_patches=num_patches, + width=width, + length=length, + logger=logger) + num_points = cand_mask[cand_mask].shape[0] + phase_points = np.zeros((num_points, num_images), dtype=np.float32) + start_idx = 0 + point_id_order = list() + for idx in range(num_patches): + bbox = box_list[idx] + if dataset_name == "phase": # result from miaplpy + # slcStack has different order: starts with num_images. Adjust bbox (x0, y0, z0, x1, y1, z1) + # read whole slcStack and subset to time span outside this function. + box = (bbox[1], 0, bbox[0], bbox[3], num_images, bbox[2]) + phase_img = stack_obj.read(dataset_name=dataset_name, box=box, print_msg=False) + phase_img = np.moveaxis(phase_img, 0, -1) + else: + phase_img = stack_obj.read(dataset_name=dataset_name, box=bbox, print_msg=False) + cur_cand_mask = cand_mask[bbox[1]:bbox[3], bbox[0]:bbox[2]] + + # extract the wrapped phase for the selected pixels in the patch + cur_num_points = cur_cand_mask[cur_cand_mask].shape[0] + stop_idx = start_idx + cur_num_points + if dataset_name == "phase": + phase_points[start_idx:stop_idx, :] = phase_img[cur_cand_mask, :] # miaplpy results are phases + else: + phase_points[start_idx:stop_idx, :] = np.angle(phase_img[cur_cand_mask, :]) + start_idx = stop_idx + + # store order of IDs to sort the points after loading all ifgs + cur_point_id = point_id_img[bbox[1]:bbox[3], bbox[0]:bbox[2]] + cur_point_id = cur_point_id[cur_cand_mask] + point_id_order.append(cur_point_id) + logger.info(msg="\r\033[KPatches read:\t {}/{}".format(idx + 1, num_patches)) + # reorder points to fit to the same structure for all datasets + idx = np.argsort(np.hstack(point_id_order)) + phase_points = phase_points[idx, :] + + return phase_points + + +def detectValidAreas(*, bmap_obj: AmplitudeImage, logger: Logger): + """Detect valid areas based on amplitude image. + + Parameters + ---------- + bmap_obj: AmplitudeImage + instance of class AmplitudeImage + logger: Logger + logging handler + + Returns + ------- + mask_valid_area: np.ndarray + boolean mask of the valid areas + """ + bmap_obj.open() + mask_valid_area = (10 ** (bmap_obj.background_map / 10)) > 0 + num_invalid = mask_valid_area[~mask_valid_area].shape[0] + if num_invalid > 0: + logger.info(msg=f"Number of invalid pixels found in image: {num_invalid}") + return mask_valid_area + + +def setReferenceToPeakOfHistogram(*, phase: np.ndarray, vel: np.ndarray, num_bins: int = 100): + """Set reference phase value to peak of the velocity histogram. + + It assumes that no velocity (i.e. stable area) is occuring most frequently. + + Parameters + ---------- + phase: np.ndarray + phase time series of the points + vel: np.ndarray + velocity of the points + num_bins: int + number of bins for the histogram (default: 100) + + Returns + ------- + phase: np.ndarray + phase time series adjusted by the new reference phase + """ + if phase.shape[0] < 40: # the method will not give meaningfull results if too few points are available + num_bins = 10 + + # find most frequent velocity + hist, bin_edges = np.histogram(vel, bins=num_bins, density=True) + max_idx = np.argmax(hist) + + # find a set of points which have the most frequent velocity + mask = (vel >= bin_edges[max_idx]) & (vel < bin_edges[max_idx + 1]) + + # determine reference phase from mean of the phase time series of the selected points + ref_phase = np.mean(phase[mask, :], axis=0) + + # adjust the phases by the reference sarvey + phase -= ref_phase + + return phase diff --git a/sarvey/version.py b/sarvey/version.py new file mode 100644 index 0000000..c8ab2b6 --- /dev/null +++ b/sarvey/version.py @@ -0,0 +1,34 @@ +# -*- coding: utf-8 -*- + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Version module for SARvey.""" + +__version__ = '1.0.0' +__versiondate__ = '2024-06-26_01' +__versionalias__ = 'Strawberry Pie' diff --git a/sarvey/viewer.py b/sarvey/viewer.py new file mode 100644 index 0000000..3cf99d8 --- /dev/null +++ b/sarvey/viewer.py @@ -0,0 +1,562 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Viewer Module for SARvey.""" +import os +from typing import Any +from logging import Logger +import matplotlib.cm as cm +import matplotlib.patches as patches +import matplotlib.pyplot as plt +from matplotlib import colormaps, widgets +from matplotlib.backend_bases import MouseButton +from matplotlib.colors import Normalize +import numpy as np +from scipy.spatial import KDTree +import datetime + +from mintpy.objects.colors import ColormapExt +from mintpy.utils import readfile +from mintpy.utils.plot import auto_flip_direction + +from sarvey.objects import AmplitudeImage, Points, BaseStack +import sarvey.utils as ut + + +def plotIfgs(*, phase: np.ndarray, coord: np.ndarray, spatial_ref_idx: int = None, ttl: str = None, cmap: str = "cmy"): + """Plot one interferogram per subplot. + + Parameters + ---------- + phase: np.ndarray + phase per point and ifg, e.g. wrapped or unwrapped phase (dim: no. psPoints x no. ifgs) + coord: np.ndarray + coordinates of the psPoints, e.g. pixel or lat lon (dim: no. psPoints x 2) + spatial_ref_idx: int + index of the spatial reference point (default: None) + ttl: str + title for the figure (default: None) + cmap: str + colormap, use "cmy" for wrapped phase data (default) or "?" for unwrapped or residual phase + """ + if cmap == "cmy": + cmap = ColormapExt('cmy').colormap + else: + cmap = plt.get_cmap(cmap) + + num_ifgs = phase.shape[1] + min_val = np.min(phase) + max_val = np.max(phase) + fig, axs = plt.subplots(np.ceil(np.sqrt(num_ifgs + 1)).astype(np.int32), + np.ceil(np.sqrt(num_ifgs + 1)).astype(np.int32)) + sc = None + for i, ax in enumerate(axs.flat): + if i < num_ifgs: + sc = ax.scatter(coord[:, 1], coord[:, 0], c=phase[:, i], + vmin=min_val, vmax=max_val, s=1, cmap=cmap) + ax.axes.set_xticks([]) + ax.axes.set_yticks([]) + if spatial_ref_idx is not None: + ax.plot(coord[spatial_ref_idx, 1], + coord[spatial_ref_idx, 0], 'k*') + elif i == num_ifgs: + plt.colorbar(sc, cax=ax) + else: + ax.set_visible(False) + if ttl is not None: + fig.suptitle(ttl) + + +def plotScatter(*, value: np.ndarray, coord: np.ndarray, bmap_obj: AmplitudeImage = None, ttl: str = None, + unit: str = None, s: float = 5.0, cmap: colormaps = colormaps["jet_r"], symmetric: bool = False, + logger: Logger, **kwargs: Any): + """Plot a scatter map for given value. + + Parameters + ---------- + value: np.ndarray + value to be plotted per point giving the colour of the point (dim: no. points x 1) + coord: np.ndarray + coordinates of the points, e.g. radar or lat lon (dim: no. points x 2). If bmapObj is given, + the coordinates must be radar coordinates! + bmap_obj: AmplitudeImage + instance of amplitudeImage for plotting background image (default: None) + ttl: str + title for the figure (default: None) + unit: str + unit as title for the colorbar axis (default: None) + s: float + size of the scatter points (default: 5.0) + cmap: str + colormap (default: "jet_r") + symmetric: bool + plot symmetric colormap extend, i.e. abs(vmin) == abs(vmax) (default: False) + logger: Logger + logging Handler + kwargs: Any + additional keyword arguments for scatter plot + + Returns + ------- + fig: plt.Figure + current figure, + ax: plt.Axes + current axis + cb: plt.colorbar + current colorbar + """ + if bmap_obj is not None: + ax = bmap_obj.plot(logger=logger) + fig = plt.gcf() + else: + fig = plt.figure() + ax = fig.add_subplot() + + if symmetric: + v_range = np.max(np.abs(value.ravel())) + sc = ax.scatter(coord[:, 1], coord[:, 0], c=value, s=s, cmap=plt.get_cmap(cmap), + vmin=-v_range, vmax=v_range) + else: + sc = ax.scatter(coord[:, 1], coord[:, 0], c=value, s=s, cmap=plt.get_cmap(cmap), **kwargs) + cb = plt.colorbar(sc, ax=ax, pad=0.03, shrink=0.5) + cb.ax.set_title(unit) + ax.set_title(ttl) + plt.tight_layout() + return fig, ax, cb + + +def plotColoredPointNetwork(*, x: np.ndarray, y: np.ndarray, arcs: np.ndarray, val: np.ndarray, ax: plt.Axes = None, + linewidth: float = 2, cmap_name: str = "seismic", clim: tuple = None): + """Plot a network of points with colored arcs. + + Parameters + ---------- + x: np.ndarray + x-coordinates of the points (dim: no. points x 1) + y: np.ndarray + y-coordinates of the points (dim: no. points x 1) + arcs: np.ndarray + indices of the points to be connected (dim: no. arcs x 2) + val: np.ndarray + values for the color of the arcs (dim: no. arcs x 1) + ax: plt.Axes + axis for plotting (default: None) + linewidth: float + line width of the arcs (default: 2) + cmap_name: str + name of the colormap (default: "seismic") + clim: tuple + color limits for the colormap (default: None) + + Returns + ------- + ax: plt.Axes + current axis + cbar: plt.colorbar + current colorbar + """ + if ax is None: + fig = plt.figure(figsize=[15, 5]) + ax = fig.add_subplot() + else: + fig = ax.get_figure() + ax.scatter(x, y, s=3.5, c=np.ones_like(x)) + + if clim is None: + norm = Normalize(vmin=min(val), vmax=max(val)) + else: + norm = Normalize(vmin=clim[0], vmax=clim[1]) + + mapper = cm.ScalarMappable(norm=norm, cmap=cm.get_cmap(cmap_name)) + mapper_list = [mapper.to_rgba(v) for v in val] + for m in range(arcs.shape[0]): + x_val = [x[arcs[m, 0]], x[arcs[m, 1]]] + y_val = [y[arcs[m, 0]], y[arcs[m, 1]]] + + ax.plot(x_val, y_val, linewidth=linewidth, c=mapper_list[m]) + cbar = fig.colorbar(mapper, ax=ax, pad=0.03, shrink=0.5) + + return ax, cbar + + +def plotGridFromBoxList(*, box_list: list, ax: plt.Axes = None, edgecolor: str = "k", linewidth: float = 1): + """Plot a grid into an axis. + + Parameters + ---------- + box_list: list + boxes to be plotted. box_list can be created with 'splitImageIntoBoxesRngAz' or 'splitImageIntoBoxes' + ax: plt.Axes + axis for plotting (default: None) + edgecolor: str + edge color for the boxes (default: "k") + linewidth: float + line width for the boxes (default: 1) + + Returns + ------- + ax: plt.Axes + current axis + """ + if ax is None: + fig = plt.figure() + ax = fig.add_subplot() + + for box in box_list: + rect = patches.Rectangle((box[0], box[1]), box[2] - box[0], box[3] - box[1], linewidth=linewidth, + edgecolor=edgecolor, facecolor="none") + ax.add_patch(rect) + return ax + + +class TimeSeriesViewer: + """TimeSeriesViewer.""" + + def __init__(self, *, point_obj: Points, vel_scale: str = "mm", path_inputs: str, logger: Logger): + """Init.""" + self.sc = None + self.point_obj = point_obj + self.ts_point_marker = None # for ts point marker + self.ts_point_idx = 0 # index of ts_point + self.ts_refpoint_marker = None # for reference point marker + self.logger = logger + self.ts_refpoint_idx = None # index of reference point + self.vel_scale = vel_scale + scale_dict = {"mm": 1000, "cm": 100, "dm": 10, "m": 1} + if self.vel_scale not in scale_dict.keys(): + raise ValueError(f"Invalid argument: '{self.vel_scale}'") + self.scale = scale_dict[self.vel_scale] + self.tree = KDTree(self.point_obj.coord_xy) + if point_obj.ifg_net_obj.dates is not None: + self.times = [datetime.date.fromisoformat(date) for date in point_obj.ifg_net_obj.dates] + else: # backwards compatible, if ifg_net_obj does not contain dates + self.times = point_obj.ifg_net_obj.tbase + + vel, demerr, ref_atmo, coherence, omega, v_hat = ut.estimateParameters(obj=self.point_obj, ifg_space=False) + self.vel = vel + self.demerr = demerr + self.ref_atmo = ref_atmo + + self.bmap_obj = AmplitudeImage(file_path=os.path.join(os.path.dirname(self.point_obj.file_path), + "background_map.h5")) + self.bmap_obj.open() + self.height = readfile.read(os.path.join(path_inputs, "geometryRadar.h5"), datasetName='height')[0] + + temp_coh_obj = BaseStack( + file=os.path.join(os.path.dirname(self.point_obj.file_path), "temporal_coherence.h5"), + logger=logger) + self.temp_coh_img = temp_coh_obj.read(dataset_name="temp_coh") + + self.font_size = 10 + plt.rc('font', size=self.font_size) # controls default text size + plt.rc('axes', titlesize=self.font_size) # fontsize of the title + plt.rc('axes', labelsize=self.font_size) # fontsize of the x and y labels + plt.rc('xtick', labelsize=self.font_size) # fontsize of the x tick labels + plt.rc('ytick', labelsize=self.font_size) # fontsize of the y tick labels + plt.rc('legend', fontsize=self.font_size) # fontsize of the legend + + self.initFigureMap() + self.initFigureTimeseries() + self.plotMap(val=None) + self.plotPointTimeseries(val=None) # just any point + self.fig1.canvas.mpl_connect('button_press_event', self.onClick) + plt.show() + + def initFigureMap(self): + """InitFigureMap.""" + self.fig1 = plt.figure() + self.ax_img = self.fig1.subplots(1, 1) + + self.ax_cb = self.fig1.add_axes([0.93, 0.6, 0.015, 0.15]) # (left, bottom, width, height) + self.cb = self.fig1.colorbar(self.sc, + cax=self.ax_cb, + ax=self.ax_img, + pad=0.03, + shrink=0.8, + aspect=10, + orientation='vertical') + + # add button to select reference point + self.set_reference_point = False + self.ax_button = self.fig1.add_axes([0.125, 0.9, 0.1, 0.08]) # (left, bottom, width, height) + self.button_mask = widgets.Button(ax=self.ax_button, label='Select\nReference', image=None, color='1') + self.button_mask.on_clicked(self.updateButtonStatus) + + # add radiobutton to select parameter + self.ax_radio_par = self.fig1.add_axes([0.225, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_par = widgets.RadioButtons(self.ax_radio_par, labels=['Velocity', 'DEM error', 'None'], active=0) + self.rb_par.on_clicked(self.plotMap) + + # add radiobutton to select background image + self.ax_radio_backgr = self.fig1.add_axes([0.425, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_backgr = widgets.RadioButtons(self.ax_radio_backgr, labels=['Amplitude', 'DEM', 'Coherence', 'None'], + active=0) + self.rb_backgr.on_clicked(self.plotMap) + + # add info box with info about velocity and DEM error of selected pixel + self.ax_info_box = self.fig1.add_axes([0.625, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.text_obj_time = self.ax_info_box.text(0.1, 0.1, "") + self.ax_info_box.set_xticks([], []) + self.ax_info_box.set_yticks([], []) + + # add variable for axis of slider controlling the visualized coherence background image + self.ax_slide_coh = None + self.sl_last_val = 0.0 + self.sl_coh = None + + def initFigureTimeseries(self): + """InitFigureTimeseries.""" + self.fig2 = plt.figure(figsize=(15, 5)) + self.ax_ts = self.fig2.subplots(1, 1) + + # add radiobutton for fitting linear model + self.ax_radio_fit = self.fig2.add_axes([0.125, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_fit = widgets.RadioButtons(self.ax_radio_fit, labels=['None', 'Linear fit'], active=0) + + # add radiobutton for selecting baseline type + self.ax_radio_baselines = self.fig2.add_axes([0.325, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.rb_baselines = widgets.RadioButtons( + self.ax_radio_baselines, + labels=['Temporal baseline', 'Perpendicular baseline'], + active=0 + ) + + # add check box for removing phase due to parameters + self.ax_cbox_par = self.fig2.add_axes([0.525, 0.9, 0.2, 0.08]) # (left, bottom, width, height) + self.cbox_par = widgets.CheckButtons( + self.ax_cbox_par, + ["Velocity", "DEM error"], + actives=[True, False] + ) + self.rb_fit.on_clicked(self.plotPointTimeseries) + self.rb_baselines.on_clicked(self.plotPointTimeseries) + self.cbox_par.on_clicked(self.plotPointTimeseries) + + def plotMap(self, val: object): # val seems to be unused, but its necessary for the function to work. + """Plot velocity map and time series.""" + flag_initial_plot = (0.0, 1.0) == self.ax_img.get_xlim() + ax_img_xlim = None + ax_img_ylim = None + if not flag_initial_plot: + ax_img_xlim = self.ax_img.get_xlim() + ax_img_ylim = self.ax_img.get_ylim() + + self.ax_img.cla() + + # get selected background from radiobutton + if self.rb_backgr.value_selected == "Amplitude": + self.ax_img = self.bmap_obj.plot(ax=self.ax_img, logger=self.logger) + if self.ax_slide_coh is not None: + self.sl_last_val = self.sl_coh.val + self.ax_slide_coh.remove() + self.ax_slide_coh = None + if self.rb_backgr.value_selected == "DEM": + self.ax_img.imshow(self.height, cmap=ColormapExt('DEM_print').colormap) + meta = {"ORBIT_DIRECTION": self.bmap_obj.orbit_direction} + auto_flip_direction(meta, ax=self.ax_img, print_msg=False) + self.ax_img.set_xlabel("Range") + self.ax_img.set_ylabel("Azimuth") + if self.ax_slide_coh is not None: + self.sl_last_val = self.sl_coh.val + self.ax_slide_coh.remove() + self.ax_slide_coh = None + if self.rb_backgr.value_selected == "Coherence": + if self.ax_slide_coh is None: + # add slider to change value of coherence for background map + self.ax_slide_coh = self.fig1.add_axes([0.425, 0.85, 0.2, 0.03]) # (left, bottom, width, height) + self.sl_coh = widgets.Slider(self.ax_slide_coh, + label='Coherence', + valmin=0.0, + valmax=1.0, + valinit=self.sl_last_val, + valfmt="%.1f") + + self.ax_img.imshow(self.temp_coh_img, + cmap=plt.get_cmap("gray"), + vmin=np.round(self.sl_coh.val, decimals=1), + vmax=1) + meta = {"ORBIT_DIRECTION": self.bmap_obj.orbit_direction} + auto_flip_direction(meta, ax=self.ax_img, print_msg=False) + self.ax_img.set_xlabel("Range") + self.ax_img.set_ylabel("Azimuth") + if self.rb_backgr.value_selected == "None": + self.ax_img.imshow(np.ones_like(self.height, dtype=np.int8), cmap=plt.cm.get_cmap("gray"), vmin=0, vmax=1) + meta = {"ORBIT_DIRECTION": self.bmap_obj.orbit_direction} + auto_flip_direction(meta, ax=self.ax_img, print_msg=False) + self.ax_img.set_xlabel("Range") + self.ax_img.set_ylabel("Azimuth") + if self.ax_slide_coh is not None: + self.sl_last_val = self.sl_coh.val + self.ax_slide_coh.remove() + self.ax_slide_coh = None + + par = None + v_range = None + cb_ttl = "" + if self.rb_par.value_selected == "Velocity": # show velocity + v_range = np.max(np.abs(self.vel * self.scale)) + par = self.vel * self.scale + cb_ttl = f"[{self.vel_scale}/\nyear]" + elif self.rb_par.value_selected == "DEM error": # show demerr + v_range = np.max(np.abs(self.demerr)) + par = self.demerr + cb_ttl = "[m]" + + if self.rb_par.value_selected != "None": + self.sc = self.ax_img.scatter(self.point_obj.coord_xy[:, 1], + self.point_obj.coord_xy[:, 0], + c=par, + s=5, + cmap=colormaps["jet_r"], + vmin=-v_range, + vmax=v_range) + + self.cb.ax.set_title(cb_ttl, fontsize=self.font_size) + self.cb = self.fig1.colorbar(self.sc, cax=self.ax_cb, ax=self.ax_img, pad=0.03, shrink=0.8, aspect=10, + orientation='vertical') + + # add back location of selected sarvey point and current reference + if self.ts_refpoint_idx is not None: # initial value is None + y, x = self.point_obj.coord_xy[self.ts_refpoint_idx, :] + self.ts_refpoint_marker = self.ax_img.scatter(x, y, marker='^', facecolors='none', edgecolors='k') + + y, x = self.point_obj.coord_xy[self.ts_point_idx, :] + self.ts_point_marker = self.ax_img.scatter(x, y, facecolors='none', edgecolors='k') + + if not flag_initial_plot: + self.ax_img.set_xlim(ax_img_xlim) + self.ax_img.set_ylim(ax_img_ylim) + + plt.draw() + + def updateButtonStatus(self, val: object): # val seems to be unused, but its necessary for the function to work. + """Set to true.""" + if self.set_reference_point: + self.set_reference_point = False + self.button_mask.color = '1' + else: + self.set_reference_point = True + self.button_mask.color = '0.5' + + def onClick(self, event): + """Event function to get y/x from button press.""" + if event.inaxes is None: + return + + if not plt.fignum_exists(self.fig2.number): + self.initFigureTimeseries() + plt.show() + + if event.button is MouseButton.RIGHT: + if event.inaxes == self.ax_img: + y, x = int(event.ydata + 0.5), int(event.xdata + 0.5) + idx = self.tree.query([y, x])[-1] + y, x = self.point_obj.coord_xy[idx, :] + + if self.set_reference_point: # update reference point + self.ts_refpoint_idx = idx + self.updateReference() + self.updateButtonStatus(val=None) + # if self.ts_refpoint_marker is not None: # initial value is None + # self.ts_refpoint_marker.remove() + # self.ts_refpoint_marker = self.ax_img.scatter(x, y, marker='^', facecolors='none', edgecolors='k') + else: + self.ts_point_idx = idx + + if self.ts_point_marker is not None: # initial value is None + self.ts_point_marker.remove() + y, x = self.point_obj.coord_xy[self.ts_point_idx, :] + self.ts_point_marker = self.ax_img.scatter(x, y, facecolors='none', edgecolors='k') + self.plotPointTimeseries(val=None) + return + + def updateReference(self): + """Change the phase of all points according to the new reference point. + + Update the plot of the velocity and time series. + """ + self.logger.info(msg="changed reference to ID: {}".format(self.point_obj.point_id[self.ts_refpoint_idx])) + self.point_obj.phase -= self.point_obj.phase[self.ts_refpoint_idx, :] + vel, demerr, ref_atmo, coherence, omega, v_hat = ut.estimateParameters(obj=self.point_obj, ifg_space=False) + self.vel = vel + self.demerr = demerr + self.ref_atmo = ref_atmo + self.plotMap(val=None) + + def plotPointTimeseries(self, val: object): # val seems to be unused, but its necessary for the function to work. + """Plot_point_timeseries.""" + self.ax_ts.cla() + + # transform phase time series into meters + resulting_ts = self.point_obj.wavelength / (4 * np.pi) * self.point_obj.phase[self.ts_point_idx, :] + cbox_status = self.cbox_par.get_status() + if not cbox_status[0]: # Displacement + resulting_ts = resulting_ts - self.point_obj.ifg_net_obj.tbase * self.vel[self.ts_point_idx] + if not cbox_status[1]: # DEM error + phase_topo = (self.point_obj.ifg_net_obj.pbase / (self.point_obj.slant_range[self.ts_point_idx] * + np.sin(self.point_obj.loc_inc[self.ts_point_idx])) * + self.demerr[self.ts_point_idx]) + resulting_ts = resulting_ts - phase_topo + + self.ax_ts.set_ylabel(f"Displacement [{self.vel_scale}]") + + # add trend + if self.rb_fit.value_selected == "Linear fit": + if self.rb_baselines.value_selected == "Temporal baseline": + line = self.point_obj.ifg_net_obj.tbase * self.vel[self.ts_point_idx] + self.ref_atmo[self.ts_point_idx] + self.ax_ts.plot(self.times, line * self.scale, '-k') + elif self.rb_baselines.value_selected == "Perpendicular baseline": + line = (self.point_obj.ifg_net_obj.pbase / (self.point_obj.slant_range[self.ts_point_idx] * + np.sin(self.point_obj.loc_inc[self.ts_point_idx])) * + self.demerr[self.ts_point_idx] + self.ref_atmo[self.ts_point_idx]) + self.ax_ts.plot(self.point_obj.ifg_net_obj.pbase, line * self.scale, '-k') + + # set y-lim to [-20, 20] mm except if it exceeds this scale + y_max = max([0.02, resulting_ts.max() + 0.005]) + y_min = min([-0.02, resulting_ts.min() - 0.005]) + + self.ax_ts.set_ylim(y_min * self.scale, y_max * self.scale) + if self.rb_baselines.value_selected == "Temporal baseline": + self.ax_ts.plot(self.times, resulting_ts * self.scale, '.') + self.ax_ts.set_xlabel("Time [years]") + if self.rb_baselines.value_selected == "Perpendicular baseline": + self.ax_ts.plot(self.point_obj.ifg_net_obj.pbase, resulting_ts * self.scale, '.') + self.ax_ts.set_xlabel("Perpendicular Baseline [m]") + + self.text_obj_time.remove() + point_info = "DEM error: {:.0f} m\nVelocity: {:.0f} {:s}/year".format( + self.demerr[self.ts_point_idx], + self.vel[self.ts_point_idx] * self.scale, + self.vel_scale, + ) + self.text_obj_time = self.ax_info_box.text(0.5, 0.5, point_info, ha='center', va='center') + + # update figure + self.fig1.canvas.draw() + self.fig2.canvas.draw() diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..a040cd5 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,23 @@ +[bumpversion] +current_version = 1.0.0 +commit = True +tag = True + +[bumpversion:file:setup.py] +search = version='{current_version}' +replace = version='{new_version}' + +[bumpversion:file:sarvey/__init__.py] +search = __version__ = '{current_version}' +replace = __version__ = '{new_version}' + +[bdist_wheel] +universal = 1 + +[flake8] +exclude = docs + +[aliases] +# Define setup.py command aliases here +test = pytest + diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..8093624 --- /dev/null +++ b/setup.py @@ -0,0 +1,109 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""The setup script.""" + + +from setuptools import setup, find_packages + +with open('README.rst') as readme_file: + readme = readme_file.read() + +with open('HISTORY.rst') as history_file: + history = history_file.read() + +version = {} +with open("sarvey/version.py") as version_file: + exec(version_file.read(), version) + +req = [ + "cython", "numpy", "pyproj", "matplotlib", "numba", "scipy", + "mintpy", "h5py", "overpy", "miaplpy", "gstools", "shapely", "pandas", "geopandas", "pymaxflow", + "pillow", "pydantic<=1.10.10", "importlib_resources", "kamui" +] + +req_setup = [] + +req_test = ['pytest>=3', 'pytest-cov', 'pytest-reporter-html1', 'urlchecker'] + +req_doc = [ + 'sphinx>=4.1.1', + 'sphinx-argparse', + 'sphinx-autodoc-typehints' +] + +req_lint = ['flake8', 'pycodestyle', 'pydocstyle'] + +req_dev = ['twine'] + req_setup + req_test + req_doc + req_lint + +setup( + author="Andreas Piter", + author_email='piter@ipi.uni-hannover.de', + python_requires='>=3.7', + classifiers=[ + 'Development Status :: 2 - Pre-Alpha', + 'Intended Audience :: Developers', + 'None', + 'Natural Language :: English', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10' + ], + description="InSAR time series analysis software for SAR4Infra project", + entry_points={ + 'console_scripts': [ + 'sarvey=sarvey.sarvey_mti:main', + 'sarvey_plot=sarvey.sarvey_plot:main', + 'sarvey_export=sarvey.sarvey_export:main', + 'sarvey_mask=sarvey.sarvey_mask:main', + 'sarvey_osm=sarvey.sarvey_osm:main', + ], + }, + extras_require={ + "doc": req_doc, + "test": req_test, + "lint": req_lint, + "dev": req_dev + }, + install_requires=req, + license="GPLv3", + include_package_data=True, + keywords='sarvey', + long_description=readme, + name='sarvey', + packages=find_packages(include=['sarvey', 'sarvey.*']), + setup_requires=req_setup, + test_suite='tests', + tests_require=req_test, + url='https://gitlab.projekt.uni-hannover.de/ipi-sar4infra/sarvey/', + version=version['__version__'], + zip_safe=False, +) diff --git a/tests/CI_docker/build_sarvey_testsuite_image.sh b/tests/CI_docker/build_sarvey_testsuite_image.sh new file mode 100755 index 0000000..c08ad9e --- /dev/null +++ b/tests/CI_docker/build_sarvey_testsuite_image.sh @@ -0,0 +1,26 @@ +#!/usr/bin/env bash +set -e +# Any subsequent(*) commands which fail will cause the shell script to exit immediately + +context_dir="./context" +dockerfile="sarvey_ci.docker" +python_script=' +version = {} +with open("../../sarvey/version.py") as version_file: + exec(version_file.read(), version) +print(version["__version__"]) +' +version=`python -c "$python_script"` +tag="sarvey_ci:$version" +gitlab_runner="sarvey_gitlab_CI_runner" + +echo "#### Build runner docker image" +if [[ "$(docker images ${tag} | grep ${tag} 2> /dev/null)" != "" ]]; then + docker rmi ${tag} +fi +DOCKER_BUILDKIT=1 docker build ${context_dir} \ + --no-cache \ + -f ${context_dir}/${dockerfile} \ + -m 20G \ + -t ${tag} +ls diff --git a/tests/CI_docker/context/environment_sarvey.yml b/tests/CI_docker/context/environment_sarvey.yml new file mode 100644 index 0000000..a88445b --- /dev/null +++ b/tests/CI_docker/context/environment_sarvey.yml @@ -0,0 +1,48 @@ +name: sarvey + +channels: &id1 + - conda-forge + +dependencies: + - python=3.10 + - setuptools + - cython + - pyproj + - h5py + - numpy + - isce2 + - scipy + - pip + - gcc_linux-64 + - matplotlib + - overpy + - numba + - mintpy + - git + - shapely + - geopandas + - gstools + - pydantic=1.10.* + # setup requirements + - tox + # test requirements + - pytest + - pytest-cov + - urlchecker + # lint requirements + - flake8 + - pillow + - pycodestyle + - pydocstyle + - pylint + # doc requirements + - sphinx>=4.1.1 + - sphinx-argparse + - sphinx-autodoc-typehints + - sphinxcontrib-jquery + # deployment requirements + - twine + + - pip: + - kamui[extra] + - pytest-reporter-html1 diff --git a/tests/CI_docker/context/sarvey_ci.docker b/tests/CI_docker/context/sarvey_ci.docker new file mode 100644 index 0000000..1ab1d83 --- /dev/null +++ b/tests/CI_docker/context/sarvey_ci.docker @@ -0,0 +1,52 @@ +FROM condaforge/mambaforge:latest as conda + +# update base environment +RUN --mount=type=cache,target=/opt/conda/pkgs \ + mamba update --all -y && \ + conda clean -afy + +ARG DEBIAN_FRONTEND=noninteractive + +RUN mkdir actions-runner; cd actions-runner && \ + apt-get update && apt-get install -y curl + +WORKDIR /actions-runner + +RUN curl -o actions-runner-linux-x64-2.317.0.tar.gz -L https://github.com/actions/runner/releases/download/v2.317.0/actions-runner-linux-x64-2.317.0.tar.gz && \ + tar xzf ./actions-runner-linux-x64-2.317.0.tar.gz && \ + ./bin/installdependencies.sh && \ + useradd -m runneruser && \ + chown -R runneruser:runneruser /actions-runner + +USER runneruser + +RUN ./config.sh --url https://github.com/luhipi/sarvey --token --unattended --replace --name mefe2_sarvey_ci_1.0.0 --labels self-hosted + +USER root + +# install some needed packages +RUN --mount=type=cache,target=/opt/conda/pkgs \ + mamba install -y bzip2 fish gcc gdb git ipython make nano pip tree wget unzip + +# use bash shell instead of sh shell for all docker commands +SHELL ["/bin/bash", "-c"] + +# copy some needed stuff to /root +COPY ../environment_sarvey.yml . + +# create ci_env environment +RUN --mount=type=cache,target=/opt/conda/pkgs \ + pip install conda-merge && \ + wget https://raw.githubusercontent.com/insarlab/MiaplPy/main/conda-env.yml && \ + conda-merge conda-env.yml environment_sarvey.yml > env.yml && \ + mamba env create -n ci_env -f env.yml && \ + source /opt/conda/bin/activate ci_env && \ + pip install git+https://github.com/insarlab/MiaplPy.git && \ + conda list && \ + conda clean -afy + +USER runneruser + +RUN chmod +x /actions-runner/run.sh + +ENTRYPOINT ["/actions-runner/run.sh"] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..9c3c06a --- /dev/null +++ b/tests/__init__.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Unit test package for SARvey.""" diff --git a/tests/linting/.gitkeep b/tests/linting/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/tests/test_config.py b/tests/test_config.py new file mode 100644 index 0000000..4d2bea9 --- /dev/null +++ b/tests/test_config.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + + +"""Tests for `SARvey config`.""" + +import os +import unittest + +from sarvey.config import loadConfiguration, Config +from copy import deepcopy + + +class TestConfig(unittest.TestCase): + root_path = None + config_file = None + configuration = None + output_data_path = None + + @classmethod + def setUp(cls) -> None: + """Define the Class method SetUp.""" + cls.root_path = "./" + if os.path.basename(os.getcwd()) == "tests": + cls.root_path = "../" + + cls.config_file = os.path.abspath(f"{cls.root_path}tests/testdata/config_test.json") + cls.configuration = loadConfiguration(path=cls.config_file) + + def testDataDirectories(self): + """Test configuration for the data directories settings.""" + + config = deepcopy(self.configuration) + Config(**config) diff --git a/tests/test_ifg_network.py b/tests/test_ifg_network.py new file mode 100644 index 0000000..16305d2 --- /dev/null +++ b/tests/test_ifg_network.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Tests for SARvey `ifg_network` functions.""" + +import os +import unittest +from datetime import datetime + +import numpy as np + +from sarvey.ifg_network import SmallBaselineNetwork + + +class TestUtils(unittest.TestCase): + root_path = None + config_file = None + configuration = None + output_data_path = None + + @classmethod + def setUp(cls) -> None: + """Define the Class method SetUp.""" + # define class variables, create temporary output dir etc. here + cls.root_path = "./" + if os.path.basename(os.getcwd()) == "tests": + cls.root_path = "../" + + @classmethod + def tearDown(cls) -> None: + """Define the Class method tearDown.""" + # delete testfolder or do some other cleanup here + + def testConfigure_ok(self): + """Test the expected output.""" + # Input: + pbase = np.array([0, 0, 0, 0]) # not important for this test + dates = [datetime(2023, 8, 17), datetime(2023, 8, 17), datetime(2023, 8, 17), datetime(2023, 8, 17)] + + tbase = np.array([0, 6, 12, 18]) + ifg_net_obj = SmallBaselineNetwork() + ifg_net_obj.configure(pbase=pbase, tbase=tbase, num_link=2, max_tbase=12, dates=dates) + ifg_list = np.array([(0, 1), (0, 2), (1, 2), (1, 3), (2, 3)]) + assert (ifg_net_obj.ifg_list == ifg_list).all() + + tbase = np.array([0, 12, 18, 312]) + ifg_net_obj = SmallBaselineNetwork() + ifg_net_obj.configure(pbase=pbase, tbase=tbase, num_link=3, max_tbase=5, dates=dates) + ifg_list = np.array([(0, 1), (1, 2), (2, 3)]) + assert (ifg_net_obj.ifg_list == ifg_list).all() + + tbase = np.array([0, 12, 18, 312]) + ifg_net_obj = SmallBaselineNetwork() + ifg_net_obj.configure(pbase=pbase, tbase=tbase, num_link=3, max_tbase=20, dates=dates) + ifg_list = np.array([(0, 1), (0, 2), (1, 2), (2, 3)]) + assert (ifg_net_obj.ifg_list == ifg_list).all() + + # def testConfigure_err(self): + # """Test for expected Errors.""" + # diff --git a/tests/test_processing.py b/tests/test_processing.py new file mode 100644 index 0000000..704c4c7 --- /dev/null +++ b/tests/test_processing.py @@ -0,0 +1,391 @@ +#!/usr/bin/env python + +# SARvey - A multitemporal InSAR time series tool for the derivation of displacements. +# +# Copyright (C) 2021-2024 Andreas Piter (IPI Hannover, piter@ipi.uni-hannover.de) +# +# This software was developed together with FERN.Lab (fernlab@gfz-potsdam.de) in the context +# of the SAR4Infra project with funds of the German Federal Ministry for Digital and +# Transport and contributions from Landesamt fuer Vermessung und Geoinformation +# Schleswig-Holstein and Landesbetrieb Strassenbau und Verkehr Schleswig-Holstein. +# +# This program is free software: you can redistribute it and/or modify it under +# the terms of the GNU General Public License as published by the Free Software +# Foundation, either version 3 of the License, or (at your option) any later +# version. +# +# Important: This package uses PyMaxFlow. The core of PyMaxflows library is the C++ +# implementation by Vladimir Kolmogorov. It is also licensed under the GPL, but it REQUIRES that you +# cite [BOYKOV04] (see LICENSE) in any resulting publication if you use this code for research purposes. +# This requirement extends to SARvey. +# +# This program is distributed in the hope that it will be useful, but WITHOUT +# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS +# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more +# details. +# +# You should have received a copy of the GNU Lesser General Public License along +# with this program. If not, see . + +"""Tests for `SARvey` package.""" +import json +import logging +import os +import shutil +import sys +import unittest +from copy import deepcopy +from glob import glob +from json import JSONDecodeError +from os.path import join + +from sarvey.sarvey_mti import createParser, run +from sarvey.config import Config + + +class TestProcessing(unittest.TestCase): + root_path = None + config_file = None + configuration = None + output_data_path = None + args = None + logger = None + + @classmethod + def setUp(self) -> None: + """Define the Class method SetUp.""" + # define class variables, create temporary output dir etc. here + self.root_path = "./" + if os.path.basename(os.getcwd()) == "tests": + self.root_path = "../" + + self.config_file = os.path.abspath(f"{self.root_path}/tests/testdata/config_test.json") + try: + with open(self.config_file) as config_fp: + config_dict = json.load(config_fp) + self.configuration = Config(**config_dict) + except JSONDecodeError as err: + raise IOError(f'Failed to load the configuration json file => {err}') + + self.logger = logging.getLogger(__name__) + console_handler = logging.StreamHandler(sys.stdout) + self.logger.addHandler(console_handler) + self.logger.setLevel(logging.getLevelName('DEBUG')) + + self.output_data_path = os.path.abspath(f"{self.root_path}/tests/testdata/output") + parser = createParser() + self.args = parser.parse_args(["-f", self.config_file, "0", "4", "-w", self.output_data_path]) + + if not os.path.exists(self.output_data_path): + os.makedirs(join(self.output_data_path, "pic")) + else: + if not os.path.exists(join(self.output_data_path, "pic")): + os.mkdir(join(self.output_data_path, "pic")) + + @classmethod + def tearDown(self) -> None: + """Define the Class method tearDown.""" + # delete testfolder or do some other cleanup here + try: + if os.path.exists(self.output_data_path): + shutil.rmtree(self.output_data_path) + except OSError: + print("Deletion of the directory %s failed" % self.output_data_path) + else: + print("Successfully deleted the directory %s" % self.output_data_path) + + def testUnwrappingTimeAndSpace(self): + """TestUnwrappingTimeAndSpace.""" + config = deepcopy(self.configuration) + args = deepcopy(self.args) + + # set config + config.processing.temporal_unwrapping = True + config.filtering.skip_filtering = True + + # preparation + args.start = 0 + args.stop = 0 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "background_map.h5")), \ + 'Processing failed (background_map.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "coordinates_utm.h5")), \ + 'Processing failed (coordinates_utm.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_network.h5")), \ + 'Processing failed (ifg_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_stack.h5")), \ + 'Processing failed (ifg_stack.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "temporal_coherence.h5")), \ + 'Processing failed (temporal_coherence.h5 not created).' + + # consistencyCheck + args.start = 1 + args.stop = 1 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "point_network.h5")), \ + 'Processing failed (point_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "point_network_parameter.h5")), \ + 'Processing failed (point_network_parameter.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_wr.h5")), \ + 'Processing failed (p1_ifg_wr.h5 not created).' + + # unwrapping + args.start = 2 + args.stop = 2 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_unw.h5")), \ + 'Processing failed (p1_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts.h5")), \ + 'Processing failed (p1_ts.h5 not created).' + + # filtering + args.start = 3 + args.stop = 3 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts_filt.h5")), \ + 'Processing failed (p1_ts_filt.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_aps.h5")), \ + 'Processing failed (p1_aps.h5 not created).' + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_wr.h5")), \ + f'Processing failed (coh{coh_value}_ifg_wr.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_aps.h5")), \ + f'Processing failed (coh{coh_value}_aps.h5 not created).' + + # densification + args.start = 4 + args.stop = 4 + + run(config=config, args=args, logger=self.logger) + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_unw.h5")), \ + f'Processing failed (coh{coh_value}_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ts.h5")), \ + f'Processing failed (coh{coh_value}_ts.h5 not created).' + + def testUnwrappingSpace(self): + """TestUnwrappingTimeAndSpace.""" + config = deepcopy(self.configuration) + args = deepcopy(self.args) + + # set config + config.processing.temporal_unwrapping = False + config.preparation.network_type = "sb" + + # preparation + args.start = 0 + args.stop = 0 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "background_map.h5")), \ + 'Processing failed (background_map.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "coordinates_utm.h5")), \ + 'Processing failed (coordinates_utm.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_network.h5")), \ + 'Processing failed (ifg_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_stack.h5")), \ + 'Processing failed (ifg_stack.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "temporal_coherence.h5")), \ + 'Processing failed (temporal_coherence.h5 not created).' + + # consistencyCheck + args.start = 1 + args.stop = 1 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "point_network.h5")), \ + 'Processing failed (point_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "point_network_parameter.h5")), \ + 'Processing failed (point_network_parameter.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_wr.h5")), \ + 'Processing failed (p1_ifg_wr.h5 not created).' + + # unwrapping + args.start = 2 + args.stop = 2 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_unw.h5")), \ + 'Processing failed (p1_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts.h5")), \ + 'Processing failed (p1_ts.h5 not created).' + + # filtering + args.start = 3 + args.stop = 3 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts_filt.h5")), \ + 'Processing failed (p1_ts_filt.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_aps.h5")), \ + 'Processing failed (p1_aps.h5 not created).' + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_wr.h5")), \ + f'Processing failed (coh{coh_value}_ifg_wr.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_aps.h5")), \ + f'Processing failed (coh{coh_value}_aps.h5 not created).' + + # densification + args.start = 4 + args.stop = 4 + + run(config=config, args=args, logger=self.logger) + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_unw.h5")), \ + f'Processing failed (coh{coh_value}_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ts.h5")), \ + f'Processing failed (coh{coh_value}_ts.h5 not created).' + + def testPhaseLinking(self): + """TestUnwrappingTimeAndSpace.""" + config = deepcopy(self.configuration) + args = deepcopy(self.args) + + # set config + config.phase_linking.phase_linking = True + config.phase_linking.use_ps = True + config.processing.temporal_unwrapping = True + config.filtering.coherence_p2 = 0.75 + + # preparation + args.start = 0 + args.stop = 0 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "background_map.h5")), \ + 'Processing failed (background_map.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "coordinates_utm.h5")), \ + 'Processing failed (coordinates_utm.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_network.h5")), \ + 'Processing failed (ifg_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_stack.h5")), \ + 'Processing failed (ifg_stack.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "temporal_coherence.h5")), \ + 'Processing failed (temporal_coherence.h5 not created).' + + # consistencyCheck + args.start = 1 + args.stop = 1 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "point_network.h5")), \ + 'Processing failed (point_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "point_network_parameter.h5")), \ + 'Processing failed (point_network_parameter.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_wr.h5")), \ + 'Processing failed (p1_ifg_wr.h5 not created).' + + # unwrapping + args.start = 2 + args.stop = 2 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_unw.h5")), \ + 'Processing failed (p1_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts.h5")), \ + 'Processing failed (p1_ts.h5 not created).' + + # filtering + args.start = 3 + args.stop = 3 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts_filt.h5")), \ + 'Processing failed (p1_ts_filt.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_aps.h5")), \ + 'Processing failed (p1_aps.h5 not created).' + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_wr.h5")), \ + f'Processing failed (coh{coh_value}_ifg_wr.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_aps.h5")), \ + f'Processing failed (coh{coh_value}_aps.h5 not created).' + + # densification + args.start = 4 + args.stop = 4 + + run(config=config, args=args, logger=self.logger) + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_unw.h5")), \ + f'Processing failed (coh{coh_value}_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ts.h5")), \ + f'Processing failed (coh{coh_value}_ts.h5 not created).' + + def testMasking(self): + """TestUnwrappingTimeAndSpace.""" + config = deepcopy(self.configuration) + args = deepcopy(self.args) + + # set config + config.processing.temporal_unwrapping = False + config.preparation.network_type = "sb" + config.consistency_check.spatial_mask_file_p1 = "tests/testdata/aoi_mask.h5" + config.filtering.spatial_mask_file_p2 = "tests/testdata/aoi_mask.h5" + + # preparation + args.start = 0 + args.stop = 0 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "background_map.h5")), \ + 'Processing failed (background_map.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "coordinates_utm.h5")), \ + 'Processing failed (coordinates_utm.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_network.h5")), \ + 'Processing failed (ifg_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "ifg_stack.h5")), \ + 'Processing failed (ifg_stack.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "temporal_coherence.h5")), \ + 'Processing failed (temporal_coherence.h5 not created).' + + # consistencyCheck + args.start = 1 + args.stop = 1 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "point_network.h5")), \ + 'Processing failed (point_network.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "point_network_parameter.h5")), \ + 'Processing failed (point_network_parameter.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_wr.h5")), \ + 'Processing failed (p1_ifg_wr.h5 not created).' + + # unwrapping + args.start = 2 + args.stop = 2 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ifg_unw.h5")), \ + 'Processing failed (p1_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts.h5")), \ + 'Processing failed (p1_ts.h5 not created).' + + # filtering + args.start = 3 + args.stop = 3 + + run(config=config, args=args, logger=self.logger) + assert glob(os.path.join(config.data_directories.path_outputs, "p1_ts_filt.h5")), \ + 'Processing failed (p1_ts_filt.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, "p1_aps.h5")), \ + 'Processing failed (p1_aps.h5 not created).' + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_wr.h5")), \ + f'Processing failed (coh{coh_value}_ifg_wr.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_aps.h5")), \ + f'Processing failed (coh{coh_value}_aps.h5 not created).' + + # densification + args.start = 4 + args.stop = 4 + + run(config=config, args=args, logger=self.logger) + coh_value = int(config.filtering.coherence_p2 * 100) + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ifg_unw.h5")), \ + f'Processing failed (coh{coh_value}_ifg_unw.h5 not created).' + assert glob(os.path.join(config.data_directories.path_outputs, f"coh{coh_value}_ts.h5")), \ + f'Processing failed (coh{coh_value}_ts.h5 not created).' diff --git a/tests/testdata/config_test.json b/tests/testdata/config_test.json new file mode 100755 index 0000000..5ec4e59 --- /dev/null +++ b/tests/testdata/config_test.json @@ -0,0 +1,68 @@ +{ + "data_directories": { + "path_inputs": "tests/testdata/inputs/", + "path_outputs": "tests/testdata/output/" + }, + "processing": { + "num_cores": 50, + "num_patches": 1, + "temporal_unwrapping": true, + "unwrapping_method": "puma" + }, + "phase_linking": { + "phase_linking": false, + "path_inverted": "tests/testdata/inverted", + "num_siblings": 20, + "spatial_mask_file_pl": null, + "use_ps": false, + "path_mask_file_ps": "tests/testdata/maskPS.h5" + }, + "preparation": { + "start_date": "", + "stop_date": "", + "network_type": "star", + "num_ifgs": 3, + "max_tbase": 100, + "filter_wdw_size": 9 + }, + "consistency_check": { + "coherence_p1": 0.9, + "grid_size": 200, + "spatial_mask_file_p1": null, + "knn": 30, + "max_arc_length": null, + "velocity_bound": 0.1, + "dem_error_bound": 100.0, + "num_samples": 100, + "arc_coherence": 0.6, + "min_num_arc": 3 + }, + "unwrapping": { + "use_temporal_unwrapping_arcs": true, + "knn": 20 + }, + "filtering": { + "skip_filtering": false, + "interpolation_method": "kriging", + "coherence_p2": 0.9, + "grid_size": 1000, + "spatial_mask_file_p2": null, + "use_moving_points": true, + "max_auto_corr": 0.3 + }, + "densification": { + "coherence_threshold": 0.5, + "num_connections_p1": 5, + "num_connections_p2": 10, + "max_distance_p1": 2000, + "velocity_bound": 0.15, + "dem_error_bound": 100.0, + "num_samples": 100, + "knn": 20 + }, + "logging": { + "logging_level": "info", + "logfile_path": "tests/testdata/logfiles/" + + } +} diff --git a/tests/testdata/logfiles/.gitkeep b/tests/testdata/logfiles/.gitkeep new file mode 100644 index 0000000..e69de29 diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..d9417fa --- /dev/null +++ b/tox.ini @@ -0,0 +1,30 @@ +[pydocstyle] +ignore = D213,D203,D213,D212,D413,D409 + +[tox] +envlist = py35, py36, py37, py38, flake8 + +[travis] +python = + 3.8: py38 + 3.7: py37 + 3.6: py36 + 3.5: py35 + +[testenv:flake8] +basepython = python +deps = flake8 +commands = flake8 sarvey tests + +[testenv] +setenv = + PYTHONPATH = {toxinidir} +deps = + -r{toxinidir}/requirements_dev.txt +; If you want to make tox run the tests with the same versions, create a +; requirements.txt with the pinned versions and uncomment the following line: +; -r{toxinidir}/requirements.txt +commands = + pip install -U pip + pytest --basetemp={envtmpdir} +