diff --git a/.travis.yml b/.travis.yml index 2f98c1502..968a741c7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,27 +1,46 @@ language: python python: - 2.7 +sudo: false -# Setup anaconda +# Setup miniconda before_install: - - wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh - - chmod +x miniconda.sh - - ./miniconda.sh -b - - export PATH=/home/travis/miniconda/bin:$PATH + - if [[ ! -e $HOME/miniconda/bin ]]; then + wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh; + chmod +x miniconda.sh; + ./miniconda.sh -b -f -p $HOME/miniconda; + fi + - export OLD_PATH=$PATH + - export PATH=$HOME/miniconda/bin:$PATH install: - conda update -q --yes conda - - conda install -q --yes numpy scipy matplotlib nose - - conda install -q --yes -c https://conda.binstar.org/osgeo gdal + - conda install -q --yes numpy scipy matplotlib nose pillow basemap netcdf4 proj.4 + - conda install -q --yes -c https://conda.anaconda.org/nersc nansat-gdal - export GDAL_DATA=/home/vagrant/miniconda/share/gdal/ - export GEOS_DIR=/home/vagrant/miniconda/ - - pip install -q basemap --allow-external basemap --allow-unverified basemap - python -c 'import gdal; print gdal.__file__' - python -c 'from mpl_toolkits.basemap import Basemap' - pip install coveralls -#- python setup.py install + - pip install cfunits + - python setup.py install script: #"cd ..; nosetests --with-coverage --cover-package=nansat nansat.tests" - coverage run --source=nansat setup.py test + - coverage run --source=nansat setup.py test + after_success: - coveralls + - coveralls + # Resetting path so uploading of cache with curl succeeds. + - export PATH=$OLD_PATH + +after_failure: + # Resetting path so uploading of cache with curl succeeds. + - export PATH=$OLD_PATH + +# before_cache: +# - rm -f $HOME/.cache/pip/log/debug.log + +cache: + directories: +# - $HOME/.cache/pip + - $HOME/miniconda diff --git a/README.md b/README.md index 54348de36..b1caddfb7 100644 --- a/README.md +++ b/README.md @@ -1,11 +1,9 @@ -[![Build Status](https://travis-ci.org/nansencenter/nansat.svg?branch=master)](https://travis-ci.org/nansencenter/nansat) -[![Coverage Status](https://coveralls.io/repos/nansencenter/nansat/badge.svg?branch=master)](https://coveralls.io/r/nansencenter/nansat?branch=master) +[![Build Status](https://travis-ci.org/nansencenter/nansat.svg?branch=develop)](https://travis-ci.org/nansencenter/nansat) +[![Coverage Status](https://coveralls.io/repos/nansencenter/nansat/badge.svg?branch=develop)](https://coveralls.io/r/nansencenter/nansat) ![NANSAT](http://nansencenter.github.io/nansat/images/nansat_logo.png) -nansat -====== -Nansat is a scientist friendly Python toolbox for processing 2D satellite earth observation data. +**Nansat** is a scientist friendly Python toolbox for processing 2D satellite earth observation data. The main **goal** of Nansat is to facilitate: @@ -17,21 +15,64 @@ We appreciate acknowledments of Nansat. Please add "The image analysis was perfo the open-source NanSat (https://github.com/nansencenter/nansat) python package" (or equivalent) if you use Nansat in scientific publications. -Download -========= -https://github.com/nansencenter/nansat/wiki/Download +## Easy to install +The easiest way to install Nansat on a Linux machine is to use [anaconda](http://docs.continuum.io/anaconda/index) +``` +# download lates version of miniconda +wget http://repo.continuum.io/miniconda/Miniconda-latest-Linux-x86_64.sh -O miniconda.sh +# make it executable +chmod +x miniconda.sh -Install Nansat -============== -https://github.com/nansencenter/nansat/wiki/Install-Nansat +# install miniconda virtual environment +./miniconda.sh -b -f -p $HOME/miniconda +# activate the environment +export PATH=$HOME/miniconda/bin/:$PATH -Install required libraries -========================== -See https://github.com/nansencenter/nansat/wiki/Required-libs +# install some requirements from common repositories +conda install -q --yes numpy scipy matplotlib nose pillow basemap netcdf4 proj.4 +#install some requirements from NERSC repository +conda install -q --yes -c https://conda.anaconda.org/nersc nansat-gdal -Tutorial -======== -https://github.com/nansencenter/nansat/wiki/Tutorial +# install some requiremets from pypi +pip install cfunits + +# configure environment +export GDAL_DATA=$HOME/miniconda/share/gdal/ +export GEOS_DIR=$HOME/miniconda/ + +# install nersc-metadata which is not in pip yet +pip install https://github.com/nansencenter/nersc-metadata/archive/master.tar.gz + +# finally install Nansat +pip install https://github.com/nansencenter/nersc-metadata/archive/develop.tar.gz + +# run tests +nosetests nansat +``` +Fore more information see [Install-Nansat](https://github.com/nansencenter/nansat/wiki/Install-Nansat) section or +use pre-configure virtual machines as explained on [Nansat-lectures](https://github.com/nansencenter/nansat-lectures) + +## Easy to use +```Python +# download a test file +!wget https://github.com/nansencenter/nansat/raw/develop/nansat/tests/data/stere.tif + +# import main file opener +from nansat import Nansat + +# open a test file +n = Nansat('stere.tif') + +# see file content +print n + +# view file footpring +n.write_map('stere.footpring.png') + +# create RGB with auto-stretched histogram +n.write_figure('stere_rgb.png', [1,2,3], clim='hist') +``` +Fore more information see [Tutorial](https://github.com/nansencenter/nansat/wiki/Tutorial) or notebooks for [Nansat lectures](https://github.com/nansencenter/nansat-lectures/tree/master/notebooks) diff --git a/cookbook/wiki_tutorial.py b/cookbook/wiki_tutorial.py old mode 100755 new mode 100644 diff --git a/cookbook/yangtse20110222.py b/cookbook/yangtse20110222.py old mode 100755 new mode 100644 diff --git a/mapper_tests/__init__.py b/end2endtests/__init__.py similarity index 100% rename from mapper_tests/__init__.py rename to end2endtests/__init__.py diff --git a/end2endtests/mapper_test_archive.py b/end2endtests/mapper_test_archive.py new file mode 100644 index 000000000..a234eb165 --- /dev/null +++ b/end2endtests/mapper_test_archive.py @@ -0,0 +1,62 @@ +#------------------------------------------------------------------------------- +# Name: test_nansat_archive.py +# Purpose: To test nansat +# +# Author: Anton Korosov, Morten Wergeland Hansen, Asuka Yamakawa +# Modified: Morten Wergeland Hansen +# +# Created: 18.06.2014 +# Last modified:03.06.2015 13:36 +# Copyright: (c) NERSC +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +#------------------------------------------------------------------------------- +import os +import warnings +import time +import glob + + +class DataForTestingMappers(object): + def __init__(self): + ''' Find test files and corresponsing mapper names ''' + existingTestFiles = self.find_existing_files() + filesAndMappers = self.identify_mappers(existingTestFiles) + + self.mapperData = filesAndMappers + + def find_existing_files(self): + ''' Find all files for testsing inside MAPPER_TEST_DATA_DIR''' + testFiles = [] + + testDataEnv = os.getenv('MAPPER_TEST_DATA_DIR') + if testDataEnv is not None: + testDataDirs = testDataEnv.split(':') + for testDataDir in testDataDirs: + if os.path.isdir(testDataDir): + testFiles += glob.glob(os.path.join(testDataDir, '*', '*')) + + testFiles = [testFile for testFile in testFiles if self.readable(testFile)] + + return testFiles + + def identify_mappers(self, testFiles): + ''' From the sub-directory name get the name of the mapper ''' + + mapperNames = [os.path.split(os.path.split(testFile)[0])[1] for testFile in testFiles] + return zip(testFiles, mapperNames) + + + def readable(self, testFile): + ''' Test if file is readable at OS level ''' + if not os.path.exists(testFile): + return False + if not os.access(testFile, os.R_OK): + return False + if os.stat(testFile).st_size == 0: + return False + if os.path.isdir(testFile): + return False + + return True diff --git a/end2endtests/test_mappers.py b/end2endtests/test_mappers.py new file mode 100644 index 000000000..ad1dc926d --- /dev/null +++ b/end2endtests/test_mappers.py @@ -0,0 +1,139 @@ +#------------------------------------------------------------------------------- +# Name: test_nansat.py +# Purpose: Test the nansat module +# +# Author: Morten Wergeland Hansen, Asuka Yamakawa, Anton Korosov +# Modified: Morten Wergeland Hansen +# +# Created: 18.06.2014 +# Last modified:02.07.2015 16:05 +# Copyright: (c) NERSC +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +#------------------------------------------------------------------------------- +import unittest, warnings +import os, sys, glob, datetime +import json + +from types import ModuleType, FloatType +import numpy as np + +from nerscmetadata import gcmd_keywords + +from nansat import Nansat, Domain +from nansat.nansat import _import_mappers +from mapper_test_archive import DataForTestingMappers + +nansatMappers = _import_mappers() + +class TestDataForTestingMappers(unittest.TestCase): + def test_create_test_data(self): + ''' should create TestData instance ''' + t = DataForTestingMappers() + self.assertTrue(hasattr(t, 'mapperData')) + +# https://nose.readthedocs.org/en/latest/writing_tests.html#test-generators +# The x-flag results in the test stopping at first failure or error - use it +# for easier debugging: +# nosetests -v -x end2endtests.test_mappers:TestAllMappers.test_mappers_basic +class TestAllMappers(object): + + @classmethod + def setup_class(cls): + ''' Download testing data ''' + cls.testData = DataForTestingMappers() + + def test_mappers_basic(self): + ''' Run similar basic tests for all mappers ''' + for fileName, mapperName in self.testData.mapperData: + sys.stderr.write('\nMapper '+mapperName+' -> '+fileName+'\n') + # Test call to Nansat, mapper not specified + yield self.open_with_nansat, fileName + # Test call to Nansat, mapper specified + yield self.open_with_nansat, fileName, mapperName + + def test_mappers_start_time(self): + ''' Run similar NansenCloud reated tests for all mappers ''' + for fileName, mapperName in self.testData.mapperData: + sys.stderr.write('\nMapper '+mapperName+' -> '+fileName+'\n') + n = Nansat(fileName, mapperName=mapperName) + # Test nansat.start_time() and nansat.end_time() + yield self.has_start_time, n + + def test_mappers_advanced(self): + ''' Run similar NansenCloud reated tests for all mappers ''' + for fileName, mapperName in self.testData.mapperData: + sys.stderr.write('\nMapper '+mapperName+' -> '+fileName+'\n') + n = Nansat(fileName, mapperName=mapperName) + yield self.is_correct_mapper, n, mapperName + yield self.has_start_time, n + yield self.has_end_time, n + yield self.has_correct_platform, n + yield self.has_correct_instrument, n + + # Test that SAR objects have sigma0 intensity bands in addition + # to complex bands + if n.has_band( + 'surface_backwards_scattering_coefficient_of_radar_wave' + ): + yield self.exist_intensity_band, n + + def has_start_time(self, n): + ''' Has start time ''' + assert type(n.time_coverage_start)==datetime.datetime + + def has_end_time(self, n): + assert type(n.time_coverage_end)==datetime.datetime + + def has_correct_platform(self, n): + meta1 = json.loads(n.get_metadata('platform')) + meta1ShortName = meta1['Short_Name'] + meta2 = gcmd_keywords.get_platform(meta1ShortName) + + assert type(meta1) == dict + assert meta1 == meta2 + + def has_correct_instrument(self, n): + meta1 = json.loads(n.get_metadata('instrument')) + meta1ShortName = meta1['Short_Name'] + meta2 = gcmd_keywords.get_instrument(meta1ShortName) + + assert type(meta1) == dict + assert meta1 == meta2 + + def is_correct_mapper(self, n, mapper): + assert n.mapper==mapper + + def open_with_nansat(self, file, mapper=None, kwargs=None): + ''' Perform call to Nansat and check that it returns a Nansat object ''' + if kwargs is None: + kwargs = {} + + if mapper: + n = Nansat(file, mapperName=mapper, **kwargs) + else: + n = Nansat(file, **kwargs) + assert type(n) == Nansat + + def exist_intensity_band(self, n): + ''' test if intensity bands exist for complex data ''' + allBandNames = [] + complexBandNames = [] + for iBand in range(n.vrt.dataset.RasterCount): + iBandName = n.get_metadata(bandID=iBand + 1)['name'] + allBandNames.append(iBandName) + if '_complex' in iBandName: + complexBandNames.append(iBandName) + + for iComplexName in complexBandNames: + assert iComplexName.replace('_complex', '') in allBandNames + +if __name__=='__main__': + #for mapper in nansatMappers: + # test_name = 'test_%s'%mapper + unittest.main() + + + + diff --git a/mapper_tests/test_radarsat2.py b/end2endtests/test_radarsat2.py similarity index 100% rename from mapper_tests/test_radarsat2.py rename to end2endtests/test_radarsat2.py diff --git a/mapper_tests/mapper_test_archive.py b/mapper_tests/mapper_test_archive.py deleted file mode 100755 index 02b8af6cf..000000000 --- a/mapper_tests/mapper_test_archive.py +++ /dev/null @@ -1,162 +0,0 @@ -#------------------------------------------------------------------------------- -# Name: test_nansat_archive.py -# Purpose: To test nansat -# -# Author: Morten Wergeland Hansen, Asuka Yamakawa -# Modified: Morten Wergeland Hansen -# -# Created: 18.06.2014 -# Last modified:17.04.2015 13:28 -# Copyright: (c) NERSC -# Licence: This file is part of NANSAT. You can redistribute it or modify -# under the terms of GNU General Public License, v.3 -# http://www.gnu.org/licenses/gpl-3.0.html -#------------------------------------------------------------------------------- -import os, warnings, time - - -class DataForTestingMappers(object): - ''' Download test data and keep info about each file ''' - mapperData = None - - def __init__(self): - ''' Set directory to store test data - - If MAPPER_TEST_DATA_DIR is in the environment its value will be used - This is convenient for testing localy and sharing downloaded - data among several users on the server - ''' - self.testDataDir = os.getenv('MAPPER_TEST_DATA_DIR') - if self.testDataDir is None: - self.testDataDir = os.path.join( - os.path.dirname(os.path.abspath(__file__)), - 'test_data') - - def download_all_test_data(self): - ''' Download test data for each mapper ''' - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/asar/ASA_IMS_1PNIPA20100411_101715_000000162088_00280_42418_0338.N1', - 'asar') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/asar/ASA_WSM_1PNPDK20110108_205958_000000923098_00187_46322_6032.N1', - 'asar') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/aster_l1a/AST_L1A_00306192003124632_20120731044546_8073.hdf', - 'aster_l1a') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/csks/CSKS4_SCS_B_PP_11_CO_LA_FF_20111215040251_20111215040257.h5', - 'csks') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/hirlam/DNMI-NEurope.grb', - 'hirlam') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/landsat/LC81750072013176LGN00.tar.gz', - 'landsat') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/landsat/LC81750072013176LGN00.tar.gz', - 'landsat_highresolution') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/meris_l1/MER_FRS_1PNPDK20110503_105638_000001833102_00109_47968_7898.N1', - 'meris_l1') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/modis_l1/MOD021KM.A2010105.2120.005.2010106075131.hdf', - 'modis_l1') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/ncep/gfs20120328.t00z.master.grbf00', - 'ncep') - - self.download_test_file( - '/Data/sat/test_data_nansat_mappers/radarsat2/RS2_20090227_063055_0080_SCWA_HHHV_SCW_30853_0000_1897838', - 'radarsat2') - - self.download_test_file( - '/Data/sat/test_data_nansat_mappers/radarsat2/RS2_20111109_060616_0045_SCNA_HHHV_SGF_164373_9871_6913894', - 'radarsat2') - - self.download_test_file( - '/Data/sat/test_data_nansat_mappers/radarsat2/RS2_20140723_161314_0003_U20_VV_SLC_337855_2455_9614320', - 'radarsat2') - - self.download_test_file( - '/Data/sat/test_data_nansat_mappers/radarsat2/RS2_OK57403_PK539140_DK477416_SCWA_20141022_152035_HH_SGF.ZIP', - 'radarsat2') - - self.download_test_file( - '/Data/sat/test_data_nansat_mappers/radarsat2/RS2_20110608_172753_0005_FQ15_HHVVHVVH_SLC_137348_1953_5671561.zip', - 'radarsat2') - - self.download_test_file( - '/Data/sat/test_data_nansat_mappers/radarsat2/RS2_OK29747_PK294181_DK265214_FQ23_20100508_120125_HH_VV_HV_VH_SLC.zip', - 'radarsat2') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/generic/mapperTest_generic.tif', - 'generic') - - self.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/obpg_l2/A2014275111000.L2_LAC.NorthNorwegianSeas.hdf', - 'obpg_l2') - - def download_test_file(self, inputURL, mapperName): - ''' Download one file for one mapper - - For the given URL and mapper name - Create local dir with name ./test_data/mapper_name/mapper_file.ext' - If the downloaded file does not already exist: - download the file into the dir - Keep the filepath in self.mapper_data[mapper_name] - - Parameters: - ----------- - inputUrl : str - valid URL with the test file to download - mapperName : str - name of the mapper for which the data is downloaded - - ModifIes: - --------- - self.mapper_data : dict - adds new : [] - or appends to the existing key - - ''' - fName = os.path.basename(inputURL) - mapperDir = os.path.split(os.path.split(inputURL)[0])[1] - mapperDataDir = os.path.join(self.testDataDir, mapperDir) - mapperFName = os.path.join(mapperDataDir, fName) - - # "inputURL" can also be a filename on the system - if os.path.exists(inputURL): - mapperFName = inputURL - else: - if not os.path.exists(mapperDataDir): - os.makedirs(mapperDataDir) - - if not os.path.exists(mapperFName): - print "Downloading %s " % mapperFName - t0 = time.time() - os.system('curl -so ' + mapperFName + ' ' + inputURL ) - print time.time() - t0 - - if not os.path.exists(mapperFName): - warnings.warn( """ - Could not access %s on ftp-site with test data - contact - morten.stette@nersc.no to get the ftp-server at NERSC restarted""" - % mapperFName) - else: - if self.mapperData is None: - self.mapperData = {} - if mapperName in self.mapperData: - self.mapperData[mapperName].append(mapperFName) - else: - self.mapperData[mapperName] = [mapperFName] - diff --git a/mapper_tests/test_mappers.py b/mapper_tests/test_mappers.py deleted file mode 100644 index 3bead7ddf..000000000 --- a/mapper_tests/test_mappers.py +++ /dev/null @@ -1,111 +0,0 @@ -#------------------------------------------------------------------------------- -# Name: test_nansat.py -# Purpose: Test the nansat module -# -# Author: Morten Wergeland Hansen, Asuka Yamakawa, Anton Korosov -# Modified: Morten Wergeland Hansen -# -# Created: 18.06.2014 -# Last modified:17.04.2015 10:23 -# Copyright: (c) NERSC -# Licence: This file is part of NANSAT. You can redistribute it or modify -# under the terms of GNU General Public License, v.3 -# http://www.gnu.org/licenses/gpl-3.0.html -#------------------------------------------------------------------------------- -import unittest, warnings -import os, sys, glob -from types import ModuleType, FloatType -import numpy as np - -from nansat import Nansat, Domain -from nansat.nansat import _import_mappers -from mapper_test_archive import DataForTestingMappers - -nansatMappers = _import_mappers() - -class TestDataForTestingMappers(unittest.TestCase): - def test_create_test_data(self): - ''' should create TestData instance ''' - t = DataForTestingMappers() - self.assertTrue(hasattr(t, 'mapperData')) - self.assertTrue(hasattr(t, 'testDataDir')) - - def test_testDataDir_from_env(self): - ''' should create TestData instance ''' - fakeDir = '/fake/dir/to/test/data' - os.environ['MAPPER_TEST_DATA_DIR'] = fakeDir - t = DataForTestingMappers() - self.assertEqual(t.testDataDir, fakeDir) - - def test_testDataDir_exists(self): - ''' should create TestData instance ''' - t = DataForTestingMappers() - self.assertTrue(os.path.exists(t.testDataDir)) - - def test_download_file(self): - ''' Should download the selected file and put into mapperData''' - t = DataForTestingMappers() - t.download_test_file( - 'ftp://ftp.nersc.no/pub/python_test_data/ncep/gfs20120328.t00z.master.grbf00', - 'ncep') - self.assertTrue('ncep' in t.mapperData) - self.assertEqual(type(t.mapperData['ncep']), list) - for ifile in t.mapperData['ncep']: - self.assertTrue(os.path.exists(ifile)) - - -class TestAllMappers(object): - def test_automatic_mapper(self): - ''' Should open all downloaded files with automatically selected mapper ''' - testData = DataForTestingMappers() - testData.download_all_test_data() - for mapper in testData.mapperData: - mapperFiles = testData.mapperData[mapper] - for mapperFile in mapperFiles: - print mapperFile - # OBS: do not yield functions that have the word 'test' in - # their names - these are run automatically by nose... - yield self.open_with_automatic_mapper, mapperFile - yield self.geolocation_of_exportedNC_vs_original, \ - mapperFile - - def test_specific_mapper(self): - ''' Should open all downloaded files with automatically selected mapper ''' - testData = DataForTestingMappers() - testData.download_all_test_data() - for mapperName in testData.mapperData: - mapperFiles = testData.mapperData[mapperName] - for mapperFile in mapperFiles: - print mapperName, '->', mapperFile - # OBS: do not yield functions that have the word 'test' in - # their names - these are run automatically by nose... - yield self.open_with_specific_mapper, mapperFile, mapperName - - def geolocation_of_exportedNC_vs_original(self, file): - orig = Nansat(file) - testFile = 'test.nc' - orig.export(testFile) - copy = Nansat(testFile) - lon0, lat0 = orig.get_geolocation_grids() - lon1, lat1 = copy.get_geolocation_grids() - np.testing.assert_allclose(lon0, lon1) - np.testing.assert_allclose(lat0, lat1) - os.unlink(ncfile) - - def open_with_automatic_mapper(self, mapperFile): - ''' Perform call to Nansat with each file as a separate test ''' - n = Nansat(mapperFile) - assert type(n) == Nansat - - def open_with_specific_mapper(self, mapperFile, mapperName): - ''' Perform call to Nansat with each file as a separate test ''' - n = Nansat(mapperFile, mapperName=mapperName) - assert type(n) == Nansat - -if __name__=='__main__': - unittest.main() - - - - - diff --git a/nansat/__init__.py b/nansat/__init__.py index 8562287a4..86dd4444f 100644 --- a/nansat/__init__.py +++ b/nansat/__init__.py @@ -2,9 +2,9 @@ # Purpose: Use the current folder as a package # Authors: Asuka Yamakawa, Anton Korosov, Knut-Frode Dagestad, # Morten W. Hansen, Alexander Myasoyedov, -# Dmitry Petrenko, Evgeny Morozov +# Dmitry Petrenko, Evgeny Morozov, Aleksander Vines # Created: 29.06.2011 -# Copyright: (c) NERSC 2011 - 2014 +# Copyright: (c) NERSC 2011 - 2015 # Licence: # This file is part of NANSAT. # NANSAT is free software: you can redistribute it and/or modify @@ -15,8 +15,13 @@ # but WITHOUT ANY WARRANTY without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. from __future__ import absolute_import -import os, sys +import os +import sys import warnings +# import some libraries for convenience +from nansat.tools import gdal, ogr +import numpy as np +import matplotlib.pyplot as plt # check if pixel functions were compiled using setup_tools try: @@ -60,8 +65,4 @@ os.environ['LOG_LEVEL'] = '30' -# import some libraries for convenience -from nansat.tools import gdal, ogr -import numpy as np -import matplotlib.pyplot as plt __all__ += ['gdal', 'ogr', 'np', 'plt'] diff --git a/nansat/domain.py b/nansat/domain.py old mode 100755 new mode 100644 index 51f69245a..4540e2ae5 --- a/nansat/domain.py +++ b/nansat/domain.py @@ -2,9 +2,9 @@ # Purpose: Container of Domain class # Authors: Asuka Yamakawa, Anton Korosov, Knut-Frode Dagestad, # Morten W. Hansen, Alexander Myasoyedov, -# Dmitry Petrenko, Evgeny Morozov +# Dmitry Petrenko, Evgeny Morozov, Aleksander Vines # Created: 29.06.2011 -# Copyright: (c) NERSC 2011 - 2013 +# Copyright: (c) NERSC 2011 - 2015 # Licence: # This file is part of NANSAT. # NANSAT is free software: you can redistribute it and/or modify @@ -26,7 +26,7 @@ from matplotlib.patches import Polygon from nansat.tools import add_logger, initial_bearing, haversine, gdal, osr, ogr -from nansat.tools import OptionError +from nansat.tools import OptionError, ProjectionError from nansat.nsr import NSR from nansat.vrt import VRT @@ -213,8 +213,8 @@ def __repr__(self): self.logger.error('Cannot read projection from source!') else: outStr += 'Projection:\n' - outStr += (NSR(self.vrt.get_projection()).ExportToPrettyWkt(1) - + '\n') + outStr += (NSR(self.vrt.get_projection()).ExportToPrettyWkt(1) + + '\n') outStr += '-' * 40 + '\n' outStr += 'Corners (lon, lat):\n' outStr += '\t (%6.2f, %6.2f) (%6.2f, %6.2f)\n' % (corners[0][0], @@ -328,12 +328,7 @@ def write_kml_image(self, kmlFileName=None, kmlFigureName=None): ''' # test input options - if kmlFileName is not None: - # if only output KML-file is given - # then convert the current domain to KML - domains = [self] - else: - # otherwise it is potentially error + if kmlFileName is None: raise OptionError('kmlFileName(%s) is wrong' % (kmlFileName)) if kmlFigureName is None: @@ -433,14 +428,14 @@ def _convert_extentDic(self, dstSRS, extentDic): # convert lat/lon given by 'lle' to the target coordinate system and # add key 'te' and the converted values to extentDic - x1, y1, z1 = coorTrans.TransformPoint(extentDic['lle'][0], - extentDic['lle'][3]) - x2, y2, z2 = coorTrans.TransformPoint(extentDic['lle'][2], - extentDic['lle'][3]) - x3, y3, z3 = coorTrans.TransformPoint(extentDic['lle'][2], - extentDic['lle'][1]) - x4, y4, z4 = coorTrans.TransformPoint(extentDic['lle'][0], - extentDic['lle'][1]) + x1, y1, _ = coorTrans.TransformPoint(extentDic['lle'][0], + extentDic['lle'][3]) + x2, y2, _ = coorTrans.TransformPoint(extentDic['lle'][2], + extentDic['lle'][3]) + x3, y3, _ = coorTrans.TransformPoint(extentDic['lle'][2], + extentDic['lle'][1]) + x4, y4, _ = coorTrans.TransformPoint(extentDic['lle'][0], + extentDic['lle'][1]) minX = min([x1, x2, x3, x4]) maxX = max([x1, x2, x3, x4]) @@ -633,7 +628,7 @@ def get_border(self, nPoints=10): return self.transform_points(colVector, rowVector) - def _get_border_kml(self): + def _get_border_kml(self, *args, **kwargs): '''Generate Placemark entry for KML Returns @@ -642,7 +637,7 @@ def _get_border_kml(self): String with the Placemark entry ''' - domainLon, domainLat = self.get_border() + domainLon, domainLat = self.get_border(*args, **kwargs) # convert Border coordinates into KML-like string coordinates = '' @@ -667,7 +662,7 @@ def _get_border_kml(self): return kmlEntry - def get_border_wkt(self): + def get_border_wkt(self, *args, **kwargs): '''Creates string with WKT representation of the border polygon Returns @@ -676,7 +671,7 @@ def get_border_wkt(self): string with WKT representation of the border polygon ''' - lonList, latList = self.get_border() + lonList, latList = self.get_border(*args, **kwargs) # apply > 180 deg correction to longitudes for ilon, lon in enumerate(lonList): @@ -686,11 +681,11 @@ def get_border_wkt(self): polyCont = ','.join(str(lon) + ' ' + str(lat) for lon, lat in zip(lonList, latList)) # outer quotes have to be double and inner - single! - #wktPolygon = "PolygonFromText('POLYGON((%s))')" % polyCont + # wktPolygon = "PolygonFromText('POLYGON((%s))')" % polyCont wkt = 'POLYGON((%s))' % polyCont return wkt - def get_border_geometry(self): + def get_border_geometry(self, *args, **kwargs): ''' Get OGR Geometry of the border Polygon Returns @@ -699,7 +694,7 @@ def get_border_geometry(self): ''' - return ogr.CreateGeometryFromWkt(self.get_border_wkt()) + return ogr.CreateGeometryFromWkt(self.get_border_wkt(*args, **kwargs)) def overlaps(self, anotherDomain): ''' Checks if this Domain overlaps another Domain @@ -754,6 +749,36 @@ def get_corners(self): self.vrt.dataset.RasterYSize] return self.transform_points(colVector, rowVector) + def get_min_max_lat_lon(self): + '''Get minimum and maximum lat and long values in the geolocation grid + + Returns + -------- + minLat, maxLat, minLon, maxLon : float + min/max lon/lat values for the Domain + + ''' + allLongitudes, allLatitudes = self.get_geolocation_grids() + maxLat = -90 + minLat = 90 + for latitudes in allLatitudes: + for lat in latitudes: + if lat > maxLat: + maxLat = lat + if lat < minLat: + minLat = lat + + maxLon = -180 + minLon = 180 + for longitudes in allLongitudes: + for lon in longitudes: + if lon > maxLon: + maxLon = lon + if lon < minLon: + minLon = lon + + return minLat, maxLat, minLon, maxLon + def get_pixelsize_meters(self): '''Returns the pixelsize (deltaX, deltaY) of the domain @@ -965,13 +990,7 @@ def write_map(self, outputFileName, ''' # if lat/lon vectors are not given as input if lonVec is None or latVec is None or len(lonVec) != len(latVec): - try: - # get lon/lat from Domain/Nansat object - lonVec, latVec = self.get_border() - except: - print('Domain/Nansat object is not given' - 'and lat/lon vectors=None') - return + lonVec, latVec = self.get_border() # convert vectors to numpy arrays lonVec = np.array(lonVec) @@ -997,8 +1016,10 @@ def write_map(self, outputFileName, # add content: coastline, continents, meridians, parallels bmap.drawcoastlines() bmap.fillcontinents(color=continetsColor) - bmap.drawmeridians(np.linspace(minLon, maxLon, meridians)) - bmap.drawparallels(np.linspace(minLat, maxLat, parallels)) + bmap.drawmeridians(np.linspace(minLon, maxLon, meridians), + labels=merLabels) + bmap.drawparallels(np.linspace(minLat, maxLat, parallels), + labels=parLabels) # convert input lat/lon vectors to arrays of vectors with one row # if only one vector was given diff --git a/nansat/figure.py b/nansat/figure.py index e9b9d8b12..91c661b98 100644 --- a/nansat/figure.py +++ b/nansat/figure.py @@ -16,7 +16,7 @@ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. from __future__ import absolute_import import os -from math import floor, log10, pow +from math import floor, log10 import numpy as np from matplotlib import cm @@ -29,10 +29,10 @@ except: from PIL import Image, ImageDraw, ImageFont -from nansat.tools import add_logger +from nansat.tools import add_logger, OptionError -class Figure(): +class Figure(object): '''Perform opeartions with graphical files: create, append legend, save. Figure instance is created in the Nansat.write_figure method @@ -65,8 +65,8 @@ class Figure(): latGrid = None lonGrid = None - nGridLines = 10 - latlonLabels = 0 + lonTicks = 5 + latTicks = 5 transparency = None @@ -369,7 +369,7 @@ def add_latlon_grids(self, **kwargs): Compute step of the grid Make matrices with binarized lat/lon Find edge (make line) - Convert to maks + Convert to mask Add mask to PIL Parameters @@ -379,8 +379,12 @@ def add_latlon_grids(self, **kwargs): array with values of latitudes lonGrid : numpy array array with values of longitudes - nGridLines : int + lonTicks : int or list + number of lines to draw + or locations of gridlines + latTicks : int or list number of lines to draw + or locations of gridlines Modifies --------- @@ -389,45 +393,85 @@ def add_latlon_grids(self, **kwargs): ''' # modify default values self._set_defaults(kwargs) + # test availability of grids - if (self.latGrid is None or self.lonGrid is None or - self.nGridLines is None or self.nGridLines == 0): + if (self.latGrid is None or self.lonGrid is None): return - # get number of grid lines - llSpacing = self.nGridLines - # get vectors for grid lines - latVec = np.linspace(self.latGrid.min(), - self.latGrid.max(), llSpacing) - lonVec = np.linspace(self.lonGrid.min(), - self.lonGrid.max(), llSpacing) + + # get vectors with ticks based on input + latTicks = self._get_auto_ticks(self.latTicks, self.latGrid) + lonTicks = self._get_auto_ticks(self.lonTicks, self.lonGrid) + + # convert lat/lon grids to indeces latI = np.zeros(self.latGrid.shape, 'int8') lonI = np.zeros(self.latGrid.shape, 'int8') - # convert lat/lon to indeces - for i in range(len(latVec)): - latI[self.latGrid > latVec[i]] = i - lonI[self.lonGrid > lonVec[i]] = i - # find pixels on the rgid lines (binarize) - latI = np.diff(latI) - lonI = np.diff(lonI) + for latTick in latTicks: + latI[self.latGrid > latTick] += 1 + for lonTick in lonTicks: + lonI[self.lonGrid > lonTick] += 1 + + # find pixels on the grid lines (binarize) + latI = np.diff(latI, axis=0)[:, :-1] + np.diff(latI, axis=1)[:-1, :] + lonI = np.diff(lonI, axis=0)[:, :-1] + np.diff(lonI, axis=1)[:-1, :] + # make grid from both lat and lon latI += lonI latI[latI != 0] = 1 + # add mask to the image self.apply_mask(mask_array=latI, mask_lut={1: [255, 255, 255]}) + def _get_auto_ticks(self, ticks, grid): + ''' Automatically create a list of lon or lat ticks from number of list + + Parameters + ---------- + ticks : int or list + number or location of ticks + grid : ndarray + grid with lon or lat + Returns + ------- + ticks : list + location of ticks + + ''' + gridMin = grid.min() + gridMax = grid.max() + + if type(ticks) is int: + ticks = np.linspace(gridMin, gridMax, ticks) + elif type(ticks) in [list, tuple]: + newTicks = [] + for tick in ticks: + if tick >= gridMin and tick <= gridMax: + newTicks.append(tick) + ticks = newTicks + else: + raise OptionError('Incorrect type of ticks') + + return ticks + def add_latlon_labels(self, **kwargs): '''Add lat/lon labels along upper and left side Compute step of lables Get lat/lon for these labels from latGrid, lonGrid - Print lables to PIL + Print lables to PIL in white Parameters ---------- - Figure__init__() parameters: + Any of Figure__init__() parameters: latGrid : numpy array + array with values of latitudes lonGrid : numpy array - latlonLabels : int + array with values of longitudes + lonTicks : int or list + number of lines to draw + or locations of gridlines + latTicks : int or list + number of lines to draw + or locations of gridlines Modifies --------- @@ -436,27 +480,65 @@ def add_latlon_labels(self, **kwargs): ''' # modify default values self._set_defaults(kwargs) + # test availability of grids - if (self.latGrid is None or self.lonGrid is None or - self.latlonLabels == 0): + if (self.latGrid is None or self.lonGrid is None): return draw = ImageDraw.Draw(self.pilImg) font = ImageFont.truetype(self.fontFileName, self.fontSize) - # get number of labels; step of lables - llLabels = self.latlonLabels - llShape = self.latGrid.shape - latI = range(0, llShape[0], (llShape[0] / llLabels) - 1) - lonI = range(0, llShape[1], (llShape[1] / llLabels) - 1) - # get lons/lats from first row/column - #lats = self.latGrid[latI, 0] - #lons = self.lonGrid[0, lonI] - for i in range(len(latI)): - lat = self.latGrid[latI[i], 0] - lon = self.lonGrid[0, lonI[i]] - draw.text((0, 10 + latI[i]), '%4.2f' % lat, fill=255, font=font) - draw.text((50 + lonI[i], 0), '%4.2f' % lon, fill=255, font=font) + # get vectors with ticks based on input + latTicks = self._get_auto_ticks(self.latTicks, self.latGrid) + lonTicks = self._get_auto_ticks(self.lonTicks, self.lonGrid) + + # get corresponding lons from upper edge and lats from left edge + lonTicksIdx = self._get_tick_index_from_grid(lonTicks, self.lonGrid, + 1, self.lonGrid.shape[1]) + latTicksIdx = self._get_tick_index_from_grid(latTicks, self.latGrid, + self.lonGrid.shape[0], 1) + + # draw lons + lonsOffset = self.lonGrid.shape[1] / len(lonTicksIdx) / 8. + for lonTickIdx in lonTicksIdx: + lon = self.lonGrid[0, lonTickIdx] + draw.text((lonTickIdx+lonsOffset, 0), '%4.2f' % lon, + fill=255, font=font) + + # draw lats + latsOffset = self.latGrid.shape[0] / len(latTicksIdx) / 8. + for latTickIdx in latTicksIdx: + lat = self.latGrid[latTickIdx, 0] + draw.text((0, latTickIdx+latsOffset), '%4.2f' % lat, + fill=255, font=font) + + def _get_tick_index_from_grid(self, ticks, grid, rows, cols): + ''' Get index of pixels from lon/lat grids closest given ticks + + Parameters + ---------- + ticks : int or list + number or location of ticks + grid : ndarray + grid with lon or lat + rows : int + from which rows to return pixels + cols : int + from which cols to return pixels + + Returns + ------- + ticks : list + index of ticks + ''' + + newTicksIdx = [] + for tick in ticks: + diff = np.abs(grid[:rows, :cols] - tick).flatten() + minDiffIdx = np.nonzero(diff == diff.min())[0][0] + if minDiffIdx > 0: + newTicksIdx.append(minDiffIdx) + return newTicksIdx def clim_from_histogram(self, **kwargs): '''Estimate min and max pixel values from histogram @@ -488,36 +570,31 @@ def clim_from_histogram(self, **kwargs): masked = masked + (self.mask_array == lutVal) # create a ratio list for each band - if isinstance(ratio, float) or isinstance(ratio, int): - ratioList = np.ones(self.array.shape[0]) * float(ratio) - else: - ratioList = [] - for iRatio in range(self.array.shape[0]): - try: - ratioList.append(ratio[iRatio]) - except: - ratioList.append(ratio[0]) + if not (isinstance(ratio, float) or isinstance(ratio, int)): + raise OptionError('Incorrect input ratio %s' % str(ratio)) + + # create a ratio list for each band + if ratio <= 0 or ratio > 1: + raise OptionError('Incorrect input ratio %s' % str(ratio)) # create a 2D array and set min and max values clim = [[0] * self.array.shape[0], [0] * self.array.shape[0]] for iBand in range(self.array.shape[0]): - clim[0][iBand] = np.nanmin(self.array[iBand, :, :]) - clim[1][iBand] = np.nanmax(self.array[iBand, :, :]) + bandArray = self.array[iBand, :, :] + # remove masked data if masked is not None: - self.array[iBand, :, :][masked] = clim[0][iBand] - # if 0 0 and ratioList[iBand] < 1): - try: - hist, bins = self._get_histogram(iBand) - except: - self.logger.warning('Unable to compute histogram') - else: - cumhist = hist.cumsum() - cumhist /= cumhist[-1] - clim[0][iBand] = bins[len(cumhist[cumhist < - (1 - ratioList[iBand]) / 2])] - clim[1][iBand] = bins[len(cumhist[cumhist < - 1 - ((1 - ratioList[iBand]) / 2)])] + bandArray = bandArray[masked == 0] + # remove nan, inf + bandArray = bandArray[np.isfinite(bandArray)] + # get percentile + percentileMin = 100 * (1 - ratio) / 2. + percentileMax = 100 * (1 - (1 - ratio) / 2.) + if bandArray.size > 0: + clim[0][iBand] = np.percentile(bandArray, percentileMin) + clim[1][iBand] = np.percentile(bandArray, percentileMax) + else: + clim[0][iBand], clim[1][iBand] = 0, 1 + self.color_limits = clim return clim @@ -716,9 +793,6 @@ def create_pilImage(self, **kwargs): if self.pilImgLegend is not None: self.pilImg.paste(self.pilImgLegend, (0, self.height)) - # remove array from memory - #self.array = None - def process(self, **kwargs): '''Do all common operations for preparation of a figure for saving @@ -774,8 +848,7 @@ def process(self, **kwargs): self.apply_mask() # add lat/lon grids lines if latGrid and lonGrid are given - if self.latGrid is not None and self.lonGrid is not None: - self.add_latlon_grids() + self.add_latlon_grids() # append legend if self.legend: @@ -785,9 +858,7 @@ def process(self, **kwargs): self.create_pilImage(**kwargs) # add labels with lats/lons - if (self.latGrid is not None and self.lonGrid is not None and - self.latlonLabels > 0): - self.add_latlon_labels() + self.add_latlon_labels() # add logo if self.logoFileName is not None: @@ -929,7 +1000,6 @@ def _round_number(self, val): if digit in frmts: frmt = frmts[digit] else: - #frmt = '%4.2e' frmt = '%.' + '%d' % abs(digit) + 'f' return str(frmt % val) diff --git a/nansat/mappers/envisat.py b/nansat/mappers/envisat.py index cf4e44681..482a4c102 100644 --- a/nansat/mappers/envisat.py +++ b/nansat/mappers/envisat.py @@ -214,9 +214,10 @@ def setup_ads_parameters(self, fileName, gdalMetadata): def _set_envisat_time(self, gdalMetadata): ''' Get time from metadata, set time to VRT''' - # set time - productTime = gdalMetadata["SPH_FIRST_LINE_TIME"] - self._set_time(parse(productTime)) + # set valid time + self.dataset.SetMetadataItem('time_coverage_start', parse(gdalMetadata["SPH_FIRST_LINE_TIME"]).isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', parse(gdalMetadata["SPH_LAST_LINE_TIME"]).isoformat()) + def read_offset_from_header(self, gadsDSName): ''' Read offset of ADS from text header. diff --git a/nansat/mappers/globcolour.py b/nansat/mappers/globcolour.py index 93e5a5978..172f820e2 100644 --- a/nansat/mappers/globcolour.py +++ b/nansat/mappers/globcolour.py @@ -25,6 +25,7 @@ class Globcolour(): 'L681_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water', 'L709_mean': 'surface_upwelling_spectral_radiance_in_air_emerging_from_sea_water', 'CDM_mean': 'volume_absorption_coefficient_of_radiative_flux_in_sea_water_due_to_dissolved_organic_matter', + 'BBP_mean': 'volume_backscattering_coefficient_of_radiative_flux_in_sea_water_due_to_suspended_particles', } def make_rrsw_meta_entry(self, nlwMetaEntry): diff --git a/nansat/mappers/hdf4_mapper.py b/nansat/mappers/hdf4_mapper.py new file mode 100644 index 000000000..9d076fba3 --- /dev/null +++ b/nansat/mappers/hdf4_mapper.py @@ -0,0 +1,33 @@ +# Name: mapper_modisL1 +# Purpose: Mapping for MODIS-L1 data +# Authors: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +from dateutil.parser import parse +import warnings + +from nansat.tools import gdal, ogr, WrongMapperError +from nansat.vrt import VRT + + +class HDF4Mapper(VRT): + + def find_metadata(self, iMetadata, iKey, default=''): + ''' Find metadata which has similar key + Parameters: + iMetadata : dict + input metadata, usually gdalMetadata + iKey : str + key to search for + default : str + default value + + ''' + value = default + for key in iMetadata: + if iKey in key: + value = iMetadata[key] + break + + return value diff --git a/nansat/mappers/mapper_aapp_l1b.py b/nansat/mappers/mapper_aapp_l1b.py old mode 100755 new mode 100644 index eede7d8b4..a367c05e0 --- a/nansat/mappers/mapper_aapp_l1b.py +++ b/nansat/mappers/mapper_aapp_l1b.py @@ -74,8 +74,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): year = int(struct.unpack('= -180 and + lon <= 180 and + lat >= MIN_LAT and + lat <= MAX_LAT): + gcp = gdal.GCP(lon, lat, 0, i1 + dx, i0 + dy) + gcps.append(gcp) + k += 1 + maxY = max(maxY, i0) + minY = min(minY, i0) + yOff = minY + ySize = maxY - minY + + # remove Y-offset from gcps + for gcp in gcps: + gcp.GCPLine -= yOff + + metaDict = [] + + subDatasets = gdalDataset.GetSubDatasets() + metadata = gdalDataset.GetMetadata() + for subDataset in subDatasets: + # select subdatasets fro that resolution (width) + if (subDatasetWidth == int(subDataset[1].split(']')[0].split('x')[-1]) and + 'Latitude' not in subDataset[0] and 'Longitude' not in subDataset[0]): + name = subDataset[0].split('/')[-1] + # find scale + scale = 1 + for meta in metadata: + if name + '_SCALE' in meta: + scale = float(metadata[meta]) + # create meta entry + metaEntry = {'src': {'SourceFilename': subDataset[0], + 'sourceBand': 1, + 'ScaleRatio': scale, + 'ScaleOffset': 0, + 'yOff': yOff, + 'ySize': ySize,}, + 'dst': {'name': name} + } + metaDict.append(metaEntry) + + # create VRT from one of the subdatasets + gdalSubDataset = gdal.Open(metaEntry['src']['SourceFilename']) + VRT.__init__(self, srcRasterXSize=subDatasetWidth, srcRasterYSize=ySize) + # add bands with metadata and corresponding values to the empty VRT + self._create_bands(metaDict) + + self.dataset.SetMetadataItem('time_coverage_start', + parse_time(gdalMetadata['ObservationStartDateTime']).isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', + parse_time(gdalMetadata['ObservationEndDateTime']).isoformat()) + # append GCPs and lat/lon projection to the vsiDataset + self.dataset.SetGCPs(gcps, NSR().wkt) + self.reproject_GCPs('+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=90 +lon_0=0 +no_defs') + self.tps = True + + mm = gcmd_keywords.get_instrument('AMSR2') + ee = gcmd_keywords.get_platform('GCOM-W1') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_amsr2_l3.py b/nansat/mappers/mapper_amsr2_l3.py index 23fd6dce4..f48101b09 100644 --- a/nansat/mappers/mapper_amsr2_l3.py +++ b/nansat/mappers/mapper_amsr2_l3.py @@ -8,6 +8,9 @@ import datetime import os.path import glob +import json + +from nerscmetadata import gcmd_keywords import numpy as np @@ -83,5 +86,12 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) - # Add valid time - self._set_time(parse(gdalMetadata['ObservationStartDateTime'])) + # Adding valid time to dataset + self.dataset.SetMetadataItem('time_coverage_start', parse(gdalMetadata['ObservationStartDateTime']).isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', parse(gdalMetadata['ObservationStartDateTime']).isoformat()) + + mm = gcmd_keywords.get_instrument('AMSR2') + ee = gcmd_keywords.get_platform('GCOM-W1') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) + diff --git a/nansat/mappers/mapper_amsre_uham_leadfraction.py b/nansat/mappers/mapper_amsre_uham_leadfraction.py new file mode 100644 index 000000000..269b86c71 --- /dev/null +++ b/nansat/mappers/mapper_amsre_uham_leadfraction.py @@ -0,0 +1,58 @@ +#------------------------------------------------------------------------------- +# Name: mapper_amsre_UHAM_lead_fraction.py +# Purpose: +# +# Author: Morten Wergeland Hansen +# Modified: Morten Wergeland Hansen +# +# Created: 18.02.2015 +# Last modified:24.02.2015 09:26 +# Copyright: (c) NERSC +# License: +#------------------------------------------------------------------------------- +import datetime +from osgeo import gdal, osr +from nansat.nsr import NSR +from nansat.vrt import VRT + +from nansat.tools import WrongMapperError + +class Mapper(VRT): + + def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): + + title_correct = False + if not gdalMetadata: + raise WrongMapperError + for key, val in gdalMetadata.iteritems(): + if 'title' in key: + if not val == \ + 'Daily AMSR-E Arctic lead area fraction [in percent]': + raise WrongMapperError + else: + title_correct = True + + if not title_correct: + raise WrongMapperError + + # initiate VRT for the NSIDC 10 km grid + VRT.__init__(self, + srcGeoTransform=(-3850000, 6250, 0.0, + 5850000, 0.0, -6250), + srcProjection=NSR(3411).wkt, + srcRasterXSize=1216, + srcRasterYSize=1792) + + src = { + 'SourceFilename': 'NETCDF:"%s":lf'%fileName, + 'SourceBand': 1, + } + dst = { + 'name': 'leadFraction', + 'long_name': 'AMSRE sea ice lead fraction', + } + + self._create_band(src, dst) + self.dataset.FlushCache() + + diff --git a/nansat/mappers/mapper_asar.py b/nansat/mappers/mapper_asar.py old mode 100755 new mode 100644 index b4fecb76a..804a6f357 --- a/nansat/mappers/mapper_asar.py +++ b/nansat/mappers/mapper_asar.py @@ -8,8 +8,12 @@ import numpy as np import scipy.ndimage +from osgeo import gdal from dateutil.parser import parse +import json +from nerscmetadata import gcmd_keywords + from nansat.vrt import VRT from envisat import Envisat from nansat.domain import Domain @@ -81,10 +85,46 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # add dictionary for raw counts metaDict = [] for iPolarization in polarization: + iBand = gdalDataset.GetRasterBand(iPolarization['bandNum']) + dtype = iBand.DataType + shortName = 'RawCounts_%s' %iPolarization['channel'] + bandName = shortName + dstName = 'raw_counts_%s' % iPolarization['channel'] + if (8 <= dtype and dtype < 12): + bandName = shortName+'_complex' + dstName = dstName + '_complex' + + metaDict.append({'src': {'SourceFilename': fileName, + 'SourceBand': iPolarization['bandNum']}, + 'dst': {'name': dstName}}) + + + ''' metaDict.append({'src': {'SourceFilename': fileName, 'SourceBand': iPolarization['bandNum']}, 'dst': {'name': 'raw_counts_%s' % iPolarization['channel']}}) + ''' + # if raw data is complex, add the intensity band + if (8 <= dtype and dtype < 12): + # choose pixelfunction type + if (dtype == 8 or dtype == 9): + pixelFunctionType = 'IntensityInt' + else: + pixelFunctionType = 'intensity' + # get data type of the intensity band + intensityDataType = {'8': 3, '9': 4, + '10': 5, '11': 6}.get(str(dtype), 4) + # add intensity band + metaDict.append( + {'src': {'SourceFilename': fileName, + 'SourceBand': iPolarization['bandNum'], + 'DataType': dtype}, + 'dst': {'name': 'raw_counts_%s' + % iPolarization['channel'], + 'PixelFunctionType': pixelFunctionType, + 'SourceTransferType': gdal.GetDataTypeName(dtype), + 'dataType': intensityDataType}}) ##################################################################### # Add incidence angle and look direction through small VRT objects @@ -94,16 +134,16 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): inc = self.get_array_from_ADS('first_line_incidence_angle') # Calculate SAR look direction (ASAR is always right-looking) - SAR_look_direction = initial_bearing(lon[:, :-1], lat[:, :-1], + look_direction = initial_bearing(lon[:, :-1], lat[:, :-1], lon[:, 1:], lat[:, 1:]) # Interpolate to regain lost row - SAR_look_direction = scipy.ndimage.interpolation.zoom( - SAR_look_direction, (1, 11./10.)) + look_direction = scipy.ndimage.interpolation.zoom( + look_direction, (1, 11./10.)) # Decompose, to avoid interpolation errors around 0 <-> 360 - SAR_look_direction_u = np.sin(np.deg2rad(SAR_look_direction)) - SAR_look_direction_v = np.cos(np.deg2rad(SAR_look_direction)) - look_u_VRT = VRT(array=SAR_look_direction_u, lat=lat, lon=lon) - look_v_VRT = VRT(array=SAR_look_direction_v, lat=lat, lon=lon) + look_direction_u = np.sin(np.deg2rad(look_direction)) + look_direction_v = np.cos(np.deg2rad(look_direction)) + look_u_VRT = VRT(array=look_direction_u, lat=lat, lon=lon) + look_v_VRT = VRT(array=look_direction_v, lat=lat, lon=lon) # Note: If incidence angle and look direction are stored in # same VRT, access time is about twice as large @@ -136,7 +176,7 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): metaDict.append({'src': {'SourceFilename': lookFileName, 'SourceBand': 1}, 'dst': {'wkv': 'sensor_azimuth_angle', - 'name': 'SAR_look_direction'}}) + 'name': 'look_direction'}}) #################### # Add Sigma0-bands @@ -193,10 +233,10 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # ASAR is always right-looking self.dataset.SetMetadataItem('ANTENNA_POINTING', 'RIGHT') self.dataset.SetMetadataItem('ORBIT_DIRECTION', - gdalMetadata['SPH_PASS'].upper()) + gdalMetadata['SPH_PASS'].upper().strip()) ################################################################### - # Add sigma0_VV + # Estimate sigma0_VV from sigma0_HH ################################################################### polarizations = [] for pp in polarization: @@ -229,13 +269,20 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # to improve performance (tradeoff vs accuracy) self.dataset.SetMetadataItem('skip_gcps', '3') - # set SADCAT specific metadata - self.dataset.SetMetadataItem('start_date', + self.dataset.SetMetadataItem('time_coverage_start', (parse(gdalMetadata['MPH_SENSING_START']). isoformat())) - self.dataset.SetMetadataItem('stop_date', + self.dataset.SetMetadataItem('time_coverage_end', (parse(gdalMetadata['MPH_SENSING_STOP']). isoformat())) - self.dataset.SetMetadataItem('sensor', 'ASAR') - self.dataset.SetMetadataItem('satellite', 'Envisat') - self.dataset.SetMetadataItem('mapper', 'asar') + + # Get dictionary describing the instrument and platform according to + # the GCMD keywords + mm = gcmd_keywords.get_instrument('asar') + ee = gcmd_keywords.get_platform('envisat') + + # TODO: Validate that the found instrument and platform are indeed what + # we want.... + + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_asar_netcdf_old_doppler.py b/nansat/mappers/mapper_asar_netcdf_old_doppler.py deleted file mode 100644 index 963f59e88..000000000 --- a/nansat/mappers/mapper_asar_netcdf_old_doppler.py +++ /dev/null @@ -1,157 +0,0 @@ -#------------------------------------------------------------------------------ -# Name: mapper_asar_netcdf_old_doppler.py -# Purpose: To read previously processed ASAR WS Doppler data saved as -# netcdf files -# -# Author: Morten Wergeland Hansen -# Modified: Morten Wergeland Hansen -# -# Created: 09.10.2014 -# Last modified:25.11.2014 20:49 -# Copyright: (c) NERSC -# License: -#------------------------------------------------------------------------------ -import warnings - -import os -import glob -import numpy as np -import scipy -from dateutil.parser import parse - -from nansat.vrt import VRT -from nansat.tools import gdal, WrongMapperError, initial_bearing -from nansat.nsr import NSR -from nansat.node import Node - - -class Mapper(VRT): - ''' - Create VRT with mapping of ASAR wide swath Doppled data - ''' - - def __init__(self, filename, gdalDataset, gdalMetadata, **kwargs): - - # Check this is ASAR old doppler netcdf - if not len(filename.split('.')) == 3: - raise WrongMapperError - if not (filename.split('.')[2] == 'nc' and - filename.split('.')[1] == 'doppler' and - filename.split('.')[0][0:3] == 'ASA'): - raise WrongMapperError - - # Remove 'NC_GLOBAL#' and 'GDAL_' and - # 'NANSAT_' from keys in gdalDataset - tmpGdalMetadata = {} - for key in gdalMetadata.keys(): - newKey = key.replace('NC_GLOBAL#', '') - tmpGdalMetadata[newKey] = gdalMetadata[key] - gdalMetadata = tmpGdalMetadata - - # Get file names from subdatasets - subDatasets = gdalDataset.GetSubDatasets() - filenames = [f[0] for f in subDatasets] - - for ii, fn in enumerate(filenames): - if 'lon' in fn: - break - lon = gdal.Open(filenames.pop(ii)).ReadAsArray() - for ii, fn in enumerate(filenames): - if 'lat' in fn: - break - lat = gdal.Open(filenames.pop(ii)).ReadAsArray() - - # create empty VRT dataset with geolocation only - VRT.__init__(self, lon=lon, lat=lat) - - # Add list of calibration files to global metadata - #self.dataset.SetMetadataItem( - # 'Orbit based range bias calibration files', - # [filenames.pop(ii) for ii,fn in enumerate(filenames) if - # 'calibration_file_orbit' in fn][0]) - remove_calfile_info = [(filenames.pop(ii) - for ii, fn in enumerate(filenames) - if 'calibration_file_orbit' in fn)][0] - - name2wkv_dict = {'azimuth': 'platform_azimuth_angle', - 'incidence_angles': 'angle_of_incidence', - 'sat2target_elevation': 'sensor_view_angle', - 'slant_range_time': '', - 'dop_coef_observed': '', - 'dop_coef_predicted': '', - 'valid': '', - 'raw_counts': '', - 'azivar_raw_counts': '', - 'azibias': '', - 'range_bias_orbit': '', - 'range_bias_std_orbit': '', - 'valid_orbit': '', - 'range_bias_scene': '', - 'range_bias_std_scene': '', - 'valid_scene': '', - } - metaDict = [] - bandNo = {} - for i, filename in enumerate(filenames): - subDataset = gdal.Open(filename) - subBand = subDataset.GetRasterBand(1) - # generate src metadata - src = {'SourceFilename': filename, 'SourceBand': 1} - src['DataType'] = subBand.DataType - - bandMetadata = subBand.GetMetadata_Dict() - bandNo[bandMetadata.get('NETCDF_VARNAME')] = i + 1 - # generate dst metadata - if not bandMetadata.get('NETCDF_VARNAME') in name2wkv_dict.keys(): - continue - dst = {'wkv': name2wkv_dict[bandMetadata.get('NETCDF_VARNAME')], - 'name': bandMetadata.get('NETCDF_VARNAME'), - } - metaDict.append({'src': src, 'dst': dst}) - - # add bands with metadata and corresponding values to the empty VRT - self._create_bands(metaDict) - - metaDict = [] - dco = (self.dataset.GetRasterBand(bandNo['dop_coef_observed']). - ReadAsArray()) - dcp = (self.dataset.GetRasterBand(bandNo['dop_coef_predicted']). - ReadAsArray()) - azibias = self.dataset.GetRasterBand(bandNo['azibias']).ReadAsArray() - range_bias = (self.dataset.GetRasterBand(bandNo['range_bias_scene']). - ReadAsArray()) - fdg = dco - dcp - azibias - range_bias - fdg[range_bias > 10000] = np.nan - fdg[azibias > 10000] = np.nan - fdgVRT = VRT(array=fdg, lat=lat, lon=lon) - - mask = np.ones(np.shape(dco)) - mask[range_bias > 10000] = 0. - mask[azibias > 10000] = 0. - maskVRT = VRT(array=mask, lat=lat, lon=lon) - - self.bandVRTs['fdgVRT'] = fdgVRT - metaDict.append({'src': {'SourceFilename': ( - self.bandVRTs['fdgVRT'].fileName), - 'SourceBand': 1 - }, - 'dst': {'name': 'fdg', - 'long_name': ( - 'Line of sight geophysical Doppler shift'), - 'units': 'Hz', - } - }) - self.bandVRTs['maskVRT'] = maskVRT - metaDict.append({'src': {'SourceFilename': ( - self.bandVRTs['maskVRT'].fileName), - 'SourceBand': 1 - }, - 'dst': {'name': 'mask', - 'long_name': 'Mask for use in plotting', - } - }) - - # add bands with metadata and corresponding values to the empty VRT - self._create_bands(metaDict) - - self.dataset.SetMetadataItem('mapper', 'asar_netcdf_old_doppler') diff --git a/nansat/mappers/mapper_ascat_nasa.py b/nansat/mappers/mapper_ascat_nasa.py index 8aa325287..b9c265a95 100644 --- a/nansat/mappers/mapper_ascat_nasa.py +++ b/nansat/mappers/mapper_ascat_nasa.py @@ -1,6 +1,6 @@ # Name: mapper_ascat_nasa # Purpose: Mapping for ASCAT scatterometer winds -# Authors: Knut-Frode Dagestad +# Authors: Knut-Frode Dagestad, Morten W. Hansen # Licence: This file is part of NANSAT. You can redistribute it or modify # under the terms of GNU General Public License, v.3 # http://www.gnu.org/licenses/gpl-3.0.html @@ -9,6 +9,7 @@ # ftp://podaac-ftp.jpl.nasa.gov/allData/ascat/preview/L2/metop_a/12km/ import os.path import datetime +import warnings from nansat.tools import gdal, ogr from nansat.vrt import VRT, GeolocationArray @@ -101,4 +102,13 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, int(iFileName[15:17]), int(iFileName[17:19]), int(iFileName[19:21])) - self._set_time(startTime) + # Adding valid time to dataset + self.dataset.SetMetadataItem('time_coverage_start', startTime.isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', startTime.isoformat()) + + # set SADCAT specific metadata + self.dataset.SetMetadataItem('sensor', 'ASCAT') + self.dataset.SetMetadataItem('satellite', 'Metop-A') + warnings.warn("Setting satellite to Metop-A - update mapper if it is" \ + " e.g. Metop-B") + self.dataset.SetMetadataItem('mapper', 'ascat_nasa') diff --git a/nansat/mappers/mapper_aster_l1a.py b/nansat/mappers/mapper_aster_l1a.py index 5a1e72121..76c91480b 100644 --- a/nansat/mappers/mapper_aster_l1a.py +++ b/nansat/mappers/mapper_aster_l1a.py @@ -121,4 +121,13 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) - self._set_time(parse(gdalMetadata['FIRSTPACKETTIME'])) + # Adding valid time to dataset + self.dataset.SetMetadataItem('time_coverage_start', + parse(gdalMetadata['FIRSTPACKETTIME']).isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', + parse(gdalMetadata['LASTPACKETTIME']).isoformat()) + + mm = gcmd_keywords.get_instrument('ASTER') + ee = gcmd_keywords.get_platform('TERRA') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_aster_l1b.py b/nansat/mappers/mapper_aster_l1b.py new file mode 100644 index 000000000..04646e9f8 --- /dev/null +++ b/nansat/mappers/mapper_aster_l1b.py @@ -0,0 +1,113 @@ +# Name: mapper_modisL1 +# Purpose: Mapping for MODIS-L1 data +# Authors: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +from dateutil.parser import parse +import warnings +import json + +from nerscmetadata import gcmd_keywords + +from nansat.tools import gdal, ogr, WrongMapperError +from nansat.vrt import VRT +from hdf4_mapper import HDF4Mapper + + +class Mapper(HDF4Mapper): + ''' VRT with mapping of WKV for MODIS Level 1 (QKM, HKM, 1KM) ''' + + def __init__(self, fileName, gdalDataset, gdalMetadata, emrange='VNIR', **kwargs): + ''' Create MODIS_L1 VRT ''' + # check mapper + try: + INSTRUMENTSHORTNAME = gdalMetadata['INSTRUMENTSHORTNAME'] + except: + raise WrongMapperError + if INSTRUMENTSHORTNAME != 'ASTER': + raise WrongMapperError + try: + SHORTNAME = gdalMetadata['SHORTNAME'] + except: + raise WrongMapperError + if SHORTNAME != 'ASTL1B': + raise WrongMapperError + + # set up metadict for data with various resolution + subDSString = 'HDF4_EOS:EOS_SWATH:"%s":%s:%s' + metaDictVNIR = [ + {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData1' )}, 'dst': {'wavelength': '560'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData2' )}, 'dst': {'wavelength': '660'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData3N')}, 'dst': {'wavelength': '820'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'VNIR_Swath', 'ImageData3B')}, 'dst': {'wavelength': '820'}}, + ] + + metaDictSWIR = [ + {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData4')}, 'dst': {'wavelength': '1650'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData5')}, 'dst': {'wavelength': '2165'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData6')}, 'dst': {'wavelength': '2205'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData7')}, 'dst': {'wavelength': '2260'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData8')}, 'dst': {'wavelength': '2330'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'SWIR_Swath', 'ImageData9')}, 'dst': {'wavelength': '2395'}}, + ] + + metaDictTIR = [ + {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath', 'ImageData10')}, 'dst': {'wavelength': '8300'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath', 'ImageData11')}, 'dst': {'wavelength': '8650'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath', 'ImageData12')}, 'dst': {'wavelength': '9100'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath', 'ImageData13')}, 'dst': {'wavelength': '10600'}}, + {'src': {'SourceFilename': subDSString % (fileName, 'TIR_Swath', 'ImageData14')}, 'dst': {'wavelength': '11300'}}, + ] + + # select appropriate metaDict based on parameter + metaDict = {'VNIR': metaDictVNIR, + 'SWIR': metaDictSWIR, + 'TIR': metaDictTIR, + }[emrange] + + # get 1st EOS subdataset and parse to VRT.__init__() + # for retrieving geo-metadata + try: + gdalSubDataset0 = gdal.Open(metaDict[0]['src']['SourceFilename']) + except (AttributeError, IndexError): + raise WrongMapperError + + # create empty VRT dataset with geolocation only + VRT.__init__(self, gdalSubDataset0) + + # add source band, wkv and suffix + for metaEntry in metaDict: + metaEntry['src']['SourceBand'] = 1 + metaEntry['dst']['wkv'] = 'toa_outgoing_spectral_radiance' + metaEntry['dst']['suffix'] = metaEntry['dst']['wavelength'] + + if 'ImageData3N' in metaEntry['src']['SourceFilename']: + metaEntry['dst']['suffix'] += 'N' + + if 'ImageData3B' in metaEntry['src']['SourceFilename']: + metaEntry['dst']['suffix'] += 'B' + + # add scale and offset + for metaEntry in metaDict: + bandNo = metaEntry['src']['SourceFilename'].strip().split(':')[-1].replace('ImageData', '') + metaEntry['src']['ScaleRatio'] = float(gdalMetadata['INCL' + bandNo]) + metaEntry['src']['ScaleOffset'] = float(gdalMetadata['OFFSET' + bandNo]) + + # add bands with metadata and corresponding values to the empty VRT + self._create_bands(metaDict) + + # set time + datetimeString = self.find_metadata(gdalMetadata, "SETTINGTIMEOFPOINTING") + # Adding valid time to dataset + self.dataset.SetMetadataItem('time_coverage_start', + parse(datetimeString+'+00').isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', + parse(datetimeString+'+00').isoformat()) + + mm = gcmd_keywords.get_instrument('ASTER') + ee = gcmd_keywords.get_platform('TERRA') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) + + self.remove_geolocationArray() diff --git a/nansat/mappers/mapper_csks.py b/nansat/mappers/mapper_csks.py index a31f8b8ec..58490b4f8 100644 --- a/nansat/mappers/mapper_csks.py +++ b/nansat/mappers/mapper_csks.py @@ -10,7 +10,7 @@ import numpy as np import os -from nansat.tools import gdal, ogr, osr, WrongMapperError +from nansat.tools import gdal, ogr, osr, WrongMapperError, parse_time from nansat.vrt import VRT, GeolocationArray @@ -173,4 +173,10 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): self._create_band(src, dst) + self.dataset.FlushCache() + + self.dataset.SetMetadataItem('time_coverage_start', + parse_time(gdalMetadata['Scene_Sensing_Start_UTC']).isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', + parse_time(gdalMetadata['Scene_Sensing_Stop_UTC']).isoformat()) diff --git a/nansat/mappers/mapper_emodnet.py b/nansat/mappers/mapper_emodnet.py new file mode 100644 index 000000000..f437f2013 --- /dev/null +++ b/nansat/mappers/mapper_emodnet.py @@ -0,0 +1,75 @@ +# Name: mapper_emodnet.py +# Purpose: Mapper for bathymetry data from EMODNet +# http://portal.emodnet-bathymetry.eu/ +# Authors: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +import os +from dateutil.parser import parse + +import numpy as np +from scipy.io.netcdf import netcdf_file + +from nansat.nsr import NSR +from nansat.vrt import VRT, GeolocationArray +from nansat.node import Node +from nansat.tools import gdal, ogr, WrongMapperError + + +class Mapper(VRT): + def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, + **kwargs): + # check if mapper fits + if not gdalMetadata: + raise WrongMapperError + if not os.path.splitext(inputFileName)[1] == '.mnt': + raise WrongMapperError + try: + mbNorthLatitude = float(gdalMetadata['NC_GLOBAL#mbNorthLatitude']) + mbSouthLatitude = float(gdalMetadata['NC_GLOBAL#mbSouthLatitude']) + mbEastLongitude = float(gdalMetadata['NC_GLOBAL#mbEastLongitude']) + mbWestLongitude = float(gdalMetadata['NC_GLOBAL#mbWestLongitude']) + mbProj4String = gdalMetadata['NC_GLOBAL#mbProj4String'] + Number_lines = int(gdalMetadata['NC_GLOBAL#Number_lines']) + Number_columns = int(gdalMetadata['NC_GLOBAL#Number_columns']) + Element_x_size = float(gdalMetadata['NC_GLOBAL#Element_x_size']) + Element_y_size = float(gdalMetadata['NC_GLOBAL#Element_y_size']) + except: + raise WrongMapperError + + # find subdataset with DEPTH + subDatasets = gdalDataset.GetSubDatasets() + dSourceFile = None + for subDataset in subDatasets: + if subDataset[0].endswith('.mnt":DEPTH'): + dSourceFile = subDataset[0] + if dSourceFile is None: + raise WrongMapperError + dSubDataset = gdal.Open(dSourceFile) + dMetadata = dSubDataset.GetMetadata() + + try: + scale_factor = dMetadata['DEPTH#scale_factor'] + add_offset = dMetadata['DEPTH#add_offset'] + except: + raise WrongMapperError + + geoTransform = [mbWestLongitude, Element_x_size, 0, + mbNorthLatitude, 0, -Element_y_size] + + # create empty VRT dataset with geolocation only + VRT.__init__(self, srcGeoTransform=geoTransform, + srcMetadata=gdalMetadata, + srcProjection=NSR(mbProj4String).wkt, + srcRasterXSize=Number_columns, + srcRasterYSize=Number_lines) + + metaDict = [{'src': {'SourceFilename': dSourceFile, + 'SourceBand': 1, + 'ScaleRatio' : scale_factor, + 'ScaleOffset' : add_offset}, + 'dst': {'wkv': 'depth'}}] + + # add bands with metadata and corresponding values to the empty VRT + self._create_bands(metaDict) diff --git a/nansat/mappers/mapper_generic.py b/nansat/mappers/mapper_generic.py index 80dff61f5..a76716bc2 100644 --- a/nansat/mappers/mapper_generic.py +++ b/nansat/mappers/mapper_generic.py @@ -1,19 +1,28 @@ # Name: mapper_generic.py # Purpose: Generic Mapper for L3/L4 satellite or modeling data -# Authors: Asuka Yamakava, Anton Korosov, Morten Wergeland Hansen +# Authors: Asuka Yamakava, Anton Korosov, Morten Wergeland Hansen, +# Aleksander Vines +# Copyright: (c) NERSC # Licence: This file is part of NANSAT. You can redistribute it or modify # under the terms of GNU General Public License, v.3 # http://www.gnu.org/licenses/gpl-3.0.html import os from dateutil.parser import parse +import datetime import numpy as np from scipy.io.netcdf import netcdf_file +try: + from cfunits import Units +except: + cfunitsInstalled = False +else: + cfunitsInstalled = True + from nansat.nsr import NSR from nansat.vrt import VRT, GeolocationArray -from nansat.node import Node -from nansat.tools import gdal, ogr, WrongMapperError +from nansat.tools import gdal, WrongMapperError, parse_time class Mapper(VRT): @@ -47,12 +56,11 @@ def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, # add bands with metadata and corresponding values to the empty VRT metaDict = [] - geoFileDict = {} xDatasetSource = '' yDatasetSource = '' firstXSize = 0 firstYSize = 0 - for i, fileName in enumerate(fileNames): + for _, fileName in enumerate(fileNames): subDataset = gdal.Open(fileName) # choose the first dataset whith grid if (firstXSize == 0 and firstYSize == 0 and @@ -81,7 +89,7 @@ def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, if 'PixelFunctionType' in bandMetadata: bandMetadata.pop('PixelFunctionType') sourceBands = iBand + 1 - #sourceBands = i*subDataset.RasterCount + iBand + 1 + # sourceBands = i*subDataset.RasterCount + iBand + 1 # generate src metadata src = {'SourceFilename': fileName, @@ -111,21 +119,29 @@ def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, # set wkv and bandname dst['wkv'] = bandMetadata.get('standard_name', '') # first, try the name metadata - bandName = bandMetadata.get('name', '') - # if it doesn't exist get name from NETCDF_VARNAME - if len(bandName) == 0: + if 'name' in bandMetadata: + bandName = bandMetadata['name'] + else: + # if it doesn't exist get name from NETCDF_VARNAME bandName = bandMetadata.get('NETCDF_VARNAME', '') if len(bandName) == 0: - bandName = bandMetadata.get('dods_variable', - '') - if len(bandName) > 0: - if origin_is_nansat and fileExt == '.nc': - # remove digits added by gdal in - # exporting to netcdf... - if bandName[-1:].isdigit(): - bandName = bandName[:-1] - if bandName[-1:].isdigit(): - bandName = bandName[:-1] + bandName = bandMetadata.get( + 'dods_variable', '' + ) + + # remove digits added by gdal in + # exporting to netcdf... + if (len(bandName) > 0 and origin_is_nansat and + fileExt == '.nc'): + if bandName[-1:].isdigit(): + bandName = bandName[:-1] + if bandName[-1:].isdigit(): + bandName = bandName[:-1] + + # if still no bandname, create one + if len(bandName) == 0: + bandName = 'band_%03d' % iBand + dst['name'] = bandName # remove non-necessary metadata from dst @@ -191,6 +207,13 @@ def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, # no projection was found in dataset or metadata: # generate WGS84 by default projection = NSR().wkt + # fix problem with MET.NO files where a, b given in m and XC/YC in km + if ('UNIT["kilometre"' in projection and + ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]' in + projection): + projection = projection.replace( + ',SPHEROID["Spheroid",6378273,7.331926543631893e-12]', + '') # set projection self.dataset.SetProjection(self.repare_projection(projection)) @@ -226,15 +249,90 @@ def __init__(self, inputFileName, gdalDataset, gdalMetadata, logLevel=30, geoTransform = eval(geoTransformStr.replace('|', ',')) self.dataset.SetGeoTransform(geoTransform) - if 'start_date' in gdalMetadata: + subMetadata = firstSubDataset.GetMetadata() + + + ### GET START TIME from METADATA + time_coverage_start = None + if 'start_time' in gdalMetadata: + time_coverage_start = parse_time(gdalMetadata['start_time']) + elif 'start_date' in gdalMetadata: + time_coverage_start = parse_time(gdalMetadata['start_date']) + elif 'time_coverage_start' in gdalMetadata: + time_coverage_start = parse_time( + gdalMetadata['time_coverage_start']) + + ### GET END TIME from METADATA + time_coverage_end = None + if 'stop_time' in gdalMetadata: + time_coverage_start = parse_time(gdalMetadata['stop_time']) + elif 'stop_date' in gdalMetadata: + time_coverage_start = parse_time(gdalMetadata['stop_date']) + elif 'time_coverage_stop' in gdalMetadata: + time_coverage_start = parse_time( + gdalMetadata['time_coverage_stop']) + elif 'end_time' in gdalMetadata: + time_coverage_start = parse_time(gdalMetadata['end_time']) + elif 'end_date' in gdalMetadata: + time_coverage_start = parse_time(gdalMetadata['end_date']) + elif 'time_coverage_end' in gdalMetadata: + time_coverage_start = parse_time( + gdalMetadata['time_coverage_end']) + + ### GET start time from time variable + if (time_coverage_start is None and cfunitsInstalled and + 'time#standard_name' in subMetadata and + subMetadata['time#standard_name'] == 'time' and + 'time#units' in subMetadata and + 'time#calendar' in subMetadata): + # get data from netcdf data + ncFile = netcdf_file(inputFileName, 'r') + timeLength = ncFile.variables['time'].shape[0] + timeValueStart = ncFile.variables['time'][0] + timeValueEnd = ncFile.variables['time'][-1] + ncFile.close() try: - startDate = parse(gdalMetadata['start_date']) + timeDeltaStart = Units.conform(timeValueStart, + Units(subMetadata['time#units'], + calendar=subMetadata['time#calendar']), + Units('days since 1950-01-01')) except ValueError: - self.logger.error('Time format is wrong in input file!') + self.logger.error('calendar units are wrong: %s' % + subMetadata['time#calendar']) else: - self._set_time(startDate) - - self.logger.warning('Use generic mapper - OK!') + time_coverage_start = (datetime.datetime(1950,1,1) + + datetime.timedelta(float(timeDeltaStart))) + + if timeLength > 1: + timeDeltaEnd = Units.conform(timeValueStart, + Units(subMetadata['time#units'], + calendar=subMetadata['time#calendar']), + Units('days since 1950-01-01')) + else: + timeDeltaEnd = timeDeltaStart + 1 + time_coverage_end = (datetime.datetime(1950,1,1) + + datetime.timedelta(float(timeDeltaEnd))) + + ## finally set values of time_coverage start and end if available + if time_coverage_start is not None: + self.dataset.SetMetadataItem('time_coverage_start', + time_coverage_start.isoformat()) + if time_coverage_end is not None: + self.dataset.SetMetadataItem('time_coverage_end', + time_coverage_end.isoformat()) + + if 'sensor' not in gdalMetadata: + self.dataset.SetMetadataItem('sensor', 'unknown') + if 'satellite' not in gdalMetadata: + self.dataset.SetMetadataItem('satellite', 'unknown') + if 'source_type' not in gdalMetadata: + self.dataset.SetMetadataItem('source_type', 'unknown') + if 'platform' not in gdalMetadata: + self.dataset.SetMetadataItem('platform', 'unknown') + if 'instrument' not in gdalMetadata: + self.dataset.SetMetadataItem('instrument', 'unknown') + + self.logger.info('Use generic mapper - OK!') def repare_projection(self, projection): '''Replace odd symbols in projection string '|' => ','; '&' => '"' ''' @@ -264,7 +362,7 @@ def add_gcps_from_metadata(self, geoMetadata): for x in gcpString.split('|'): if len(x) > 0: gcpValues.append(float(x)) - #gcpValues = [float(x) for x in gcpString.strip().split('|')] + # gcpValues = [float(x) for x in gcpString.strip().split('|')] gcpAllValues.append(gcpValues) # create list of GDAL GCPs @@ -281,8 +379,8 @@ def add_gcps_from_variables(self, fileName): # open input netCDF file for reading GCPs try: ncFile = netcdf_file(fileName, 'r') - except TypeError as e: - self.logger.warning('%s' % e) + except (TypeError, IOError) as e: + self.logger.info('%s' % e) return None # check if all GCP variables exist in the file diff --git a/nansat/mappers/mapper_geostationary.py b/nansat/mappers/mapper_geostationary.py old mode 100755 new mode 100644 index 5b04ec832..f27204577 --- a/nansat/mappers/mapper_geostationary.py +++ b/nansat/mappers/mapper_geostationary.py @@ -267,6 +267,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): self.dataset.SetMetadata({'satID': satellite}) # Set time - self._set_time(datetime.datetime.strptime(datestamp, '%Y%m%d%H%M')) + # Adding valid time to dataset + self.dataset.SetMetadataItem('time_coverage_start', + datetime.datetime.strptime(datestamp, '%Y%m%d%H%M').isoformat()) return diff --git a/nansat/mappers/mapper_globcolour_l3b.py b/nansat/mappers/mapper_globcolour_l3b.py index 59f2240d6..f193c22a8 100644 --- a/nansat/mappers/mapper_globcolour_l3b.py +++ b/nansat/mappers/mapper_globcolour_l3b.py @@ -7,11 +7,16 @@ import glob import os.path import datetime +import json from scipy.io.netcdf import netcdf_file import numpy as np import matplotlib.pyplot as plt +from netCDF4 import Dataset + +from nerscmetadata import gcmd_keywords + from nansat.tools import WrongMapperError from nansat.vrt import VRT, GeolocationArray from globcolour import Globcolour @@ -36,7 +41,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, iDir, iFile = os.path.split(fileName) iFileName, iFileExt = os.path.splitext(iFile) #print 'idir:', iDir, iFile, iFileName[0:5], iFileExt[0:8] - if iFileName[0:4] != 'L3b_' or iFileExt != '.nc': + if (iFileName[0:4] != 'L3b_' or iFileExt != '.nc' or + not os.path.exists(fileName) or gdalDataset is not None): raise WrongMapperError # define shape of GLOBCOLOUR grid @@ -63,7 +69,7 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, mask = None for simFile in simFiles: print 'sim: ', simFile - f = netcdf_file(simFile) + f = Dataset(simFile) # get iBinned, index for converting from binned into GLOBCOLOR-grid colBinned = f.variables['col'][:] @@ -76,12 +82,10 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, # get iRawPro, index for converting # from GLOBCOLOR-grid to latlonGrid yRawPro = np.rint(1 + (GLOBCOLOR_ROWS - 1) * - (latlonGrid[0] + 90) / 180) - lon_step_Mat = 1 / np.cos(np.pi * latlonGrid[0] / 180.) / 24. - xRawPro = np.rint(1 + (latlonGrid[1] + 180) / lon_step_Mat) - iRawPro = xRawPro + (yRawPro - 1) * GLOBCOLOR_COLS - iRawPro[iRawPro < 0] = 0 - iRawPro = np.rint(iRawPro).astype('uint32') + (latlonGrid[0] + 90) / 180.) + lon_step_Mat = 24. * np.cos(np.pi * latlonGrid[0] / 180.) + xRawPro = np.rint(1 + (latlonGrid[1] + 180) * lon_step_Mat) + iRawPro = xRawPro.astype('uint32') + (yRawPro.astype('uint32') - 1) * GLOBCOLOR_COLS yRawPro = None xRawPro = None @@ -142,8 +146,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, metaEntry['dst']['wavelength'] = simWavelength # add all metadata from NC-file - for attr in var._attributes: - metaEntry['dst'][attr] = var._attributes[attr] + for attr in var.ncattrs(): + metaEntry['dst'][attr] = var.getncattr(attr) metaDict.append(metaEntry) @@ -152,6 +156,19 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, if metaEntry2 is not None: metaDict.append(metaEntry2) + + instrument = f.title.strip().split(' ')[-2].split('/')[0] + mm = gcmd_keywords.get_instrument(instrument) + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + + platform = { + 'MODIS' : 'AQUA', + 'MERIS' : 'ENVISAT', + 'SEAWIFS': 'QUICKBIRD', + 'VIIRS' : 'SUOMI-NPP'}[instrument.upper()] + pp = gcmd_keywords.get_platform(platform) + self.dataset.SetMetadataItem('platform', json.dumps(pp)) + # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) @@ -159,4 +176,7 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, latlonGrid=None, startDate = datetime.datetime(int(iFileName[4:8]), int(iFileName[8:10]), int(iFileName[10:12])) - self._set_time(startDate) + + # Adding valid time to dataset + self.dataset.SetMetadataItem('time_coverage_start', startDate.isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', startDate.isoformat()) diff --git a/nansat/mappers/mapper_globcolour_l3m.py b/nansat/mappers/mapper_globcolour_l3m.py index 74ad4f84c..561ee72f2 100644 --- a/nansat/mappers/mapper_globcolour_l3m.py +++ b/nansat/mappers/mapper_globcolour_l3m.py @@ -119,7 +119,10 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): self._create_bands(metaDict) # Add valid time - #startYear = int(gdalMetadata['Start Year']) - #startDay = int(gdalMetadata['Start Day']) - #self._set_time(datetime.datetime(startYear, 1, 1) + datetime.timedelta(startDay)) - #""" + startYear = int(gdalMetadata['Start Year']) + startDay = int(gdalMetadata['Start Day']) + # Adding valid time to dataset + self.dataset.SetMetadataItem('time_coverage_start', + (datetime.datetime(startYear, 1, 1) + datetime.timedelta(startDay)).isoformat()) + self.dataset.SetMetadataItem('time_coverage_end', + (datetime.datetime(startYear, 1, 1) + datetime.timedelta(startDay)).isoformat()) diff --git a/nansat/mappers/mapper_globcurrent_online.py b/nansat/mappers/mapper_globcurrent_online.py new file mode 100644 index 000000000..f949d5f95 --- /dev/null +++ b/nansat/mappers/mapper_globcurrent_online.py @@ -0,0 +1,132 @@ +# Name: mapper_ncep_wind_online.py +# Purpose: Nansat mapping for GLOBCURRENT data, stored online in THREDDS +# Author: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +# +# Usage: +# w = Nansat('globcurrent:2010-01-01T12') + +import os +import datetime +from dateutil.parser import parse +from time import sleep as time_sleep + +import numpy as np + +try: + from netCDF4 import Dataset +except ImportError: + raise ImportError(''' + Cannot import Dataset from netCDF4. + You cannot access OC CCI data but + Nansat will work.''') + +from nansat.nsr import NSR +from nansat.vrt import VRT +from nansat.tools import gdal, WrongMapperError, OptionError + +class Mapper(VRT, object): + ''' VRT with mapping of WKV for NCEP GFS ''' + + GLOBCURR_URLS = { + 'EKMAN_15M' : 'http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CUREKM_15M-ERAWS_EEM-V01.0_FULL_TIME_SERIE', + 'EKMAN_HS' : 'http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CUREKM_HS-ERAWS_EEM-V01.0_FULL_TIME_SERIE', + 'GEOSTROPHIC' : 'http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CURGEO_0M-ALT_OI-V01.0_FULL_TIME_SERIE', + 'STOKES_DRIFT' : 'http://tds0.ifremer.fr/thredds/dodsC/GC_MOD_STK_GLO_010_WW3_FULL_TIME_SERIE', + 'TIDAL' : 'http://tds0.ifremer.fr/thredds/dodsC/GC_MOD_TIDE_GLO_010_FES2012_FULL_TIME_SERIE', + 'TOTAL_15M' : 'http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CUREUL_15M-ALT_SUM-V01.0_FULL_TIME_SERIE', + 'TOTAL_HS' : 'http://tds0.ifremer.fr/thredds/dodsC/CLS-L4-CUREUL_HS-ALT_SUM-V01.0_FULL_TIME_SERIE', + } + + def __init__(self, fileName, gdalDataset, gdalMetadata, + product='TOTAL_HS', url='', ds=None, + keywords=['northward', 'eastward'], **kwargs): + ''' Create NCEP VRT + Parameters: + fileName : str + globcurrent:2010-01-01T12 + product : str + one of the GLOBCURRENT products: + EKMAN_15M + EKMAN_HS + GEOSTROPHIC + STOKES_DRIFT + TIDAL + TOTAL_15M + TOTAL_HS + url: str + absolute url of GLOBCURRENT DATA + ''' + + keywordBase = 'globcurrent' + if not fileName.startswith(keywordBase): + raise WrongMapperError + + # get dataset URL + if url == '' and product.upper() in self.GLOBCURR_URLS: + url = self.GLOBCURR_URLS[product.upper()] + + if ds is None: + try: + self.ds = Dataset(url) + except: + raise OptionError('Cannot open %s' % url) + else: + self.ds = ds + + ### Get Date from input fileName + iDate = parse(fileName.split(':')[1]) + + # get time variable from GC + gcTime = self.ds.variables['time'][:] + + # compute date in GC calendar (days since 1950-1-1) + iDateGC = (iDate - datetime.datetime(1950, 1, 1)).total_seconds() / 60. / 60. / 24. + + # return index of closes time + gcLayer = np.argmin(np.abs(gcTime - iDateGC)) + + # convert closest time to datetime + gcDate = (datetime.datetime(1950, 1, 1) + + datetime.timedelta(float(gcTime[gcLayer]))) + + # create VRT with correct lon/lat (geotransform) + VRT.__init__(self, srcProjection=NSR().wkt, + srcRasterXSize=3600, + srcRasterYSize=1600, + srcGeoTransform=(-179.95, 0.1, 0, -79.95, 0, 0.1)) + + varNames = [self.get_var_name(keyword) for keyword in keywords] + metaDict = [self.get_metaitem(varName, gcLayer, url) + for varName in varNames + if varName is not None] + + self._create_bands(metaDict) + + # set time + self.dataset.SetMetadataItem('time_coverage_start', gcDate.isoformat()) + + def get_var_name(self, keyword): + ''' Get names of variable based on keyword ''' + for varName in self.ds.variables.keys(): + if keyword in varName: + return str(varName) + + def get_metaitem(self, varName, gcLayer, url): + ''' Set metadata for creating band VRT ''' + + metaItem = {'src': { + 'SourceFilename': '%s?%s.%s[%d][y][x]' % (url, varName, varName, gcLayer), + 'SourceBand': 1, + }, + 'dst': { + 'name': varName, + } + } + + for attr in self.ds.variables[varName].ncattrs(): + metaItem['dst'][str(attr)] = str(self.ds.variables[varName].getncattr(attr)) + + return metaItem diff --git a/nansat/mappers/mapper_goci_l1.py b/nansat/mappers/mapper_goci_l1.py index 1d64f8b63..68feb42a0 100644 --- a/nansat/mappers/mapper_goci_l1.py +++ b/nansat/mappers/mapper_goci_l1.py @@ -55,9 +55,3 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) - - # add time - #productDate = gdalMetadata["RANGEBEGINNINGDATE"] - #productTime = gdalMetadata["RANGEENDINGTIME"] - #self._set_time(parse(productDate+' '+productTime)) - #self.remove_geolocationArray() diff --git a/nansat/mappers/mapper_gtopo30.py b/nansat/mappers/mapper_gtopo30.py new file mode 100644 index 000000000..cb6ad34d3 --- /dev/null +++ b/nansat/mappers/mapper_gtopo30.py @@ -0,0 +1,47 @@ +#------------------------------------------------------------------------------- +# Name: mapper_gtopo30.py +# Purpose: Mapping for the global 30 arc-second elevation +# +# Author: Morten Wergeland Hansen +# Modified: Morten Wergeland Hansen +# +# Created: 04.06.2015 +# Last modified:08.06.2015 10:27 +# Copyright: (c) NERSC +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +#------------------------------------------------------------------------------- +import os.path + +from nansat.vrt import VRT +from nansat.tools import WrongMapperError + +class Mapper(VRT): + def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): + ''' + Mapping for the global 30 arc-second elevation (see + https://lta.cr.usgs.gov/GTOPO30). + + Parameters: + ----------- + fileName : string + Either the name of a gtopo30 DEM file, or /gtopo30.vrt. The + latter is an aggregation of the DEM-files available with gtopo30 + except the Antarctic one, which is in polarstereographic + projection. You can create your own gtopo30.vrt file with gdal: + > gdalbuildvrt gtopo30.vrt [E,W]*.DEM + ''' + + bn = os.path.basename(fileName) + if not bn=='gtopo30.vrt' and not os.path.splitext(bn)[1]=='.DEM': + raise WrongMapperError + + metaDict = [{'src': {'SourceFilename': fileName, 'SourceBand': 1}, + 'dst': {'wkv': 'height_above_reference_ellipsoid'}}] + + # create empty VRT dataset with geolocation only + VRT.__init__(self, gdalDataset) + + # add bands with metadata and corresponding values to the empty VRT + self._create_bands(metaDict) diff --git a/nansat/mappers/mapper_hirlam.py b/nansat/mappers/mapper_hirlam.py old mode 100755 new mode 100644 index 074e64cb5..576b07427 --- a/nansat/mappers/mapper_hirlam.py +++ b/nansat/mappers/mapper_hirlam.py @@ -6,12 +6,15 @@ # under the terms of GNU General Public License, v.3 # http://www.gnu.org/licenses/gpl-3.0.html import datetime +import json import numpy from nansat.vrt import VRT from nansat.tools import WrongMapperError +from nerscmetadata import gcmd_keywords + class Mapper(VRT): ''' VRT with mapping of WKV for HIRLAM ''' @@ -73,10 +76,20 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # Create bands self._create_bands(metaDict) + # set source, start_date, stop_date + self.dataset.SetMetadataItem('source', 'HIRLAM') + # Adding valid time from the GRIB file to dataset - band = gdalDataset.GetRasterBand(2) - validTime = band.GetMetadata()['GRIB_VALID_TIME'] - self._set_time(datetime.datetime. - utcfromtimestamp(int(validTime.strip().split(' ')[0]))) + start_date = gdalDataset.GetRasterBand(1).GetMetadata()['GRIB_VALID_TIME'] + self.dataset.SetMetadataItem('time_coverage_start', + datetime.datetime.utcfromtimestamp( + int(start_date.strip().split(' ')[0])).isoformat() + '+00:00') + + stop_date = gdalDataset.GetRasterBand(gdalDataset.RasterCount).GetMetadata()['GRIB_VALID_TIME'] + self.dataset.SetMetadataItem('time_coverage_end', + datetime.datetime.utcfromtimestamp( + int(stop_date.strip().split(' ')[0])).isoformat() + '+00:00') - return + mm = gcmd_keywords.get_instrument('computer') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', 'HIgh Resolution Limited Area Model') diff --git a/nansat/mappers/mapper_hirlam_wind_netcdf.py b/nansat/mappers/mapper_hirlam_wind_netcdf.py index 64908f32b..5b714fdfb 100644 --- a/nansat/mappers/mapper_hirlam_wind_netcdf.py +++ b/nansat/mappers/mapper_hirlam_wind_netcdf.py @@ -91,8 +91,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, logLevel=30, # to the empty VRT self._create_bands(metaDict) - # Add time + # Add valid time validTime = datetime.datetime.utcfromtimestamp( int(subDataset.GetRasterBand(1). GetMetadata()['NETCDF_DIM_time'])) - self._set_time(validTime) + self.dataset.SetMetadataItem('time_coverage_start', validTime.isoformat()) diff --git a/nansat/mappers/mapper_kmss.py b/nansat/mappers/mapper_kmss.py old mode 100755 new mode 100644 diff --git a/nansat/mappers/mapper_landsat.py b/nansat/mappers/mapper_landsat.py index cb72584df..b8527fa32 100644 --- a/nansat/mappers/mapper_landsat.py +++ b/nansat/mappers/mapper_landsat.py @@ -1,75 +1,160 @@ -# Name: mapper_landsat -# Purpose: Mapping for LANDSAT.tar.gz +# Name: mapper_landsat +# Purpose: Mapping for LANDSAT*.tar.gz # Authors: Anton Korosov # Licence: This file is part of NANSAT. You can redistribute it or modify # under the terms of GNU General Public License, v.3 # http://www.gnu.org/licenses/gpl-3.0.html +import os +import glob import tarfile import warnings +import datetime +import json -from nansat.tools import WrongMapperError -from nansat.tools import gdal, ogr -from nansat.vrt import VRT -from nansat.node import Node +from nerscmetadata import gcmd_keywords +from nansat.tools import WrongMapperError, parse_time +from nansat.tools import gdal, np +from nansat.vrt import VRT class Mapper(VRT): - ''' Mapper for LANDSAT3,4,5,6,7,8.tar.gz files''' - - def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): - ''' Create LANDSAT VRT ''' - # try to open .tar or .tar.gz or .tgz file with tar - try: - tarFile = tarfile.open(fileName) - except: + ''' Mapper for LANDSAT5,6,7,8 .tar.gz or tif files''' + + def __init__(self, fileName, gdalDataset, gdalMetadata, + resolution='low', **kwargs): + ''' Create LANDSAT VRT from multiple tif files or single tar.gz file''' + mtlFileName = '' + bandFileNames = [] + bandSizes = [] + bandDatasets = [] + fname = os.path.split(fileName)[1] + + if (fileName.endswith('.tar') or + fileName.endswith('.tar.gz') or + fileName.endswith('.tgz')): + # try to open .tar or .tar.gz or .tgz file with tar + try: + tarFile = tarfile.open(fileName) + except: + raise WrongMapperError + + # collect names of bands and corresponding sizes + # into bandsInfo dict and bandSizes list + tarNames = sorted(tarFile.getnames()) + for tarName in tarNames: + # check if TIF files inside TAR qualify + if (tarName[0] in ['L', 'M'] and + os.path.splitext(tarName)[1] in ['.TIF', '.tif']): + # open TIF file from TAR using VSI + sourceFilename = '/vsitar/%s/%s' % (fileName, tarName) + gdalDatasetTmp = gdal.Open(sourceFilename) + # keep name, GDALDataset and size + bandFileNames.append(sourceFilename) + bandSizes.append(gdalDatasetTmp.RasterXSize) + bandDatasets.append(gdalDatasetTmp) + elif (tarName.endswith('MTL.txt') or + tarName.endswith('MTL.TXT')): + # get mtl file + mtlFileName = tarName + + elif ((fname.startswith('L') or fname.startswith('M')) and + (fname.endswith('.tif') or + fname.endswith('.TIF') or + fname.endswith('._MTL.txt'))): + + # try to find TIF/tif files with the same name as input file + path, coreName = os.path.split(fileName) + coreName = os.path.splitext(coreName)[0].split('_')[0] + coreNameMask = coreName+'*[tT][iI][fF]' + tifNames = sorted(glob.glob(os.path.join(path, coreNameMask))) + for tifName in tifNames: + sourceFilename = tifName + gdalDatasetTmp = gdal.Open(sourceFilename) + # keep name, GDALDataset and size + bandFileNames.append(sourceFilename) + bandSizes.append(gdalDatasetTmp.RasterXSize) + bandDatasets.append(gdalDatasetTmp) + + # get mtl file + mtlFiles = glob.glob(coreName+'*[mM][tT][lL].[tT][xX][tT]') + if len(mtlFiles) > 0: + mtlFileName = mtlFiles[0] + else: + raise WrongMapperError + + # if not TIF files found - not appropriate mapper + if not bandFileNames: raise WrongMapperError - tarNames = tarFile.getnames() - #print tarNames + # get appropriate band size based on number of unique size and + # required resoltuion + if resolution == 'low': + bandXSise = min(bandSizes) + elif resolution in ['high', 'hi']: + bandXSise = max(bandSizes) + else: + raise OptionError('Wrong resolution %s for file %s' % (resolution, fileName)) + + # find bands with appropriate size and put to metaDict metaDict = [] - for tarName in tarNames: - if ((tarName[0] == 'L' or tarName[0] == 'M') and - (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')): - #print tarName - bandNo = tarName[-6:-4] + for bandFileName, bandSize, bandDataset in zip(bandFileNames, + bandSizes, + bandDatasets): + if bandSize == bandXSise: + # let last part of file name be suffix + bandSuffix = os.path.splitext(bandFileName)[0].split('_')[-1] + metaDict.append({ - 'src': {'SourceFilename': '/vsitar/%s/%s' % (fileName, - tarName), - 'SourceBand': 1}, + 'src': {'SourceFilename': bandFileName, + 'SourceBand': 1, + 'ScaleRatio': 0.1}, 'dst': {'wkv': 'toa_outgoing_spectral_radiance', - 'suffix': bandNo}}) - - if not metaDict: - raise WrongMapperError - - #print metaDict - sizeDiffBands = [] - for iFile in range(len(metaDict)): - tmpName = metaDict[iFile]['src']['SourceFilename'] - gdalDatasetTmp = gdal.Open(tmpName) - if iFile == 0: - gdalDatasetTmp0 = gdalDatasetTmp - xSize = gdalDatasetTmp.RasterXSize - ySize = gdalDatasetTmp.RasterYSize - elif (xSize != gdalDatasetTmp.RasterXSize or - ySize != gdalDatasetTmp.RasterYSize): - sizeDiffBands.append(iFile) + 'suffix': bandSuffix}}) + gdalDataset4Use = bandDataset # create empty VRT dataset with geolocation only - VRT.__init__(self, gdalDatasetTmp0) + VRT.__init__(self, gdalDataset4Use) # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) - # 8th band of LANDSAT8 is a double size band. - # Reduce the size to same as the 1st band. - if len(sizeDiffBands) != 0: - vrtXML = self.read_xml() - node0 = Node.create(vrtXML) - for iBand in sizeDiffBands: - iBandNode = node0.nodeList('VRTRasterBand')[iBand] - iNodeDstRect = iBandNode.node('DstRect') - iNodeDstRect.replaceAttribute('xSize', str(xSize)) - iNodeDstRect.replaceAttribute('ySize', str(ySize)) - - self.write_xml(node0.rawxml()) + if len(mtlFileName) > 0: + mtlFileName = os.path.join(os.path.split(bandFileNames[0])[0], + mtlFileName) + mtlFileLines = [line.strip() for line in + self.read_xml(mtlFileName).split('\n')] + dateString = [line.split('=')[1].strip() + for line in mtlFileLines + if ('DATE_ACQUIRED' in line or + 'ACQUISITION_DATE' in line)][0] + timeStr = [line.split('=')[1].strip() + for line in mtlFileLines + if ('SCENE_CENTER_TIME' in line or + 'SCENE_CENTER_SCAN_TIME' in line)][0] + time_start = parse_time(dateString + 'T' + timeStr).isoformat() + time_end = (parse_time(dateString + 'T' + timeStr) + + datetime.timedelta(microseconds=60000000)).isoformat() + + self.dataset.SetMetadataItem('time_coverage_start', time_start) + self.dataset.SetMetadataItem('time_coverage_end', time_end) + + # set platform + platform = 'LANDSAT' + if fname[2].isdigit(): + platform += '-'+fname[2] + ee = gcmd_keywords.get_platform(platform) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) + + # set instrument + instrument = { + 'LANDSAT' : 'MSS', + 'LANDSAT-1' : 'MSS', + 'LANDSAT-2' : 'MSS', + 'LANDSAT-3' : 'MSS', + 'LANDSAT-4' : 'TM', + 'LANDSAT-5' : 'TM', + 'LANDSAT-7' : 'ETM+', + 'LANDSAT-8' : 'OLI'}[platform] + ee = gcmd_keywords.get_instrument(instrument) + self.dataset.SetMetadataItem('instrument', json.dumps(ee)) + diff --git a/nansat/mappers/mapper_landsat_highresolution.py b/nansat/mappers/mapper_landsat_highresolution.py deleted file mode 100644 index 637e2b2bb..000000000 --- a/nansat/mappers/mapper_landsat_highresolution.py +++ /dev/null @@ -1,94 +0,0 @@ -# Name: mapper_landsat -# Purpose: Mapping for the highest resolution band of LANDSAT8.tar.gz -# Authors: Anton Korosov, Asuka Yamakawa -# Licence: This file is part of NANSAT. You can redistribute it or modify -# under the terms of GNU General Public License, v.3 -# http://www.gnu.org/licenses/gpl-3.0.html - - -import tarfile -import warnings - -from nansat.tools import WrongMapperError -from nansat.tools import gdal, ogr -from nansat.vrt import VRT -from nansat.node import Node - -class Mapper(VRT): - ''' Mapper for high resolution band of LANDSAT8.tar.gz files''' - - def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): - ''' Create LANDSAT VRT ''' - # try to open .tar or .tar.gz or .tgz file with tar - try: - tarFile = tarfile.open(fileName) - except: - raise WrongMapperError - - tarNames = tarFile.getnames() - metaDictAll = [] - for tarName in tarNames: - if ((tarName[0] == 'L' or tarName[0] == 'M') and - (tarName[-4:] == '.TIF' or tarName[-4:] == '.tif')): - # crate metadataDict for all mappers - bandNo = tarName[-6:-4] - metaDictAll.append({ - 'src': {'SourceFilename': '/vsitar/%s/%s' % (fileName, - tarName), - 'SourceBand': 1}, - 'dst': {'wkv': 'toa_outgoing_spectral_radiance', - 'suffix': bandNo}}) - - if not metaDictAll: - raise WrongMapperError - - # copy metadataDict which has the highest resolution. - for iFile in range(len(metaDictAll)): - tmpName = metaDictAll[iFile]['src']['SourceFilename'] - gdalDatasetTmp = gdal.Open(tmpName) - # set an initial size - if iFile == 0: - gdalDatasetTmp0 = gdalDatasetTmp - xSize0 = gdalDatasetTmp.RasterXSize - ySize0 = gdalDatasetTmp.RasterYSize - xSize, ySize = xSize0, ySize0 - metaDict = [metaDictAll[0]] - ratio = 1.0 - # if size of gdalDatasetTmp is larger than current size, replace - if (xSize < gdalDatasetTmp.RasterXSize and - ySize < gdalDatasetTmp.RasterYSize): - ratio = float(xSize0) / float(gdalDatasetTmp.RasterXSize) - xSize = gdalDatasetTmp.RasterXSize - ySize = gdalDatasetTmp.RasterYSize - metaDict = [metaDictAll[iFile]] - # if size of gdalDatasetTmp is same as the current size, append metaDict - elif (xSize == gdalDatasetTmp.RasterXSize and - ySize == gdalDatasetTmp.RasterYSize): - metaDict.append(metaDictAll[iFile]) - - # modify geoTarnsform for the highest resplution - geoTransform = list(gdalDatasetTmp.GetGeoTransform()) - geoTransform[1] = float(geoTransform[1]) * ratio - geoTransform[5] = float(geoTransform[5]) * ratio - - # create empty VRT dataset with geolocation only - VRT.__init__(self, gdalDatasetTmp0) - - # add bands with metadata and corresponding values to the empty VRT - self._create_bands(metaDict) - - # 8th band of LANDSAT8 is a double size band. - # Reduce the size to same as the 1st band. - vrtXML = self.read_xml() - node0 = Node.create(vrtXML) - node0.replaceAttribute('rasterXSize', str(xSize)) - node0.replaceAttribute('rasterYSize', str(ySize)) - self.write_xml(str(node0.rawxml())) - - # set new goeTransform - if ratio != 1.0: - self.dataset.SetGeoTransform(tuple(geoTransform)) - - - - diff --git a/nansat/mappers/mapper_meris_l1.py b/nansat/mappers/mapper_meris_l1.py old mode 100755 new mode 100644 index 1c57ef3b3..b63a2fa67 --- a/nansat/mappers/mapper_meris_l1.py +++ b/nansat/mappers/mapper_meris_l1.py @@ -6,11 +6,13 @@ # http://www.gnu.org/licenses/gpl-3.0.html from pytz import UTC from dateutil.parser import parse +import json from nansat.vrt import VRT from nansat.tools import WrongMapperError from envisat import Envisat +from nerscmetadata import gcmd_keywords class Mapper(VRT, Envisat): ''' VRT with mapping of WKV for MERIS Level 1 (FR or RR) ''' @@ -138,20 +140,16 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, # set time self._set_envisat_time(gdalMetadata) - # set SADCAT specific metadata - self.dataset.SetMetadataItem('start_date', - (parse( - gdalMetadata['SPH_FIRST_LINE_TIME']). - isoformat() - + '+00:00')) - self.dataset.SetMetadataItem('stop_date', - (parse( - gdalMetadata['SPH_LAST_LINE_TIME']). - isoformat() - + '+00:00')) - self.dataset.SetMetadataItem('sensor', 'MERIS') - self.dataset.SetMetadataItem('satellite', 'ENVISAT') - self.dataset.SetMetadataItem('mapper', 'meris_l1') + # Get dictionary describing the instrument and platform according to + # the GCMD keywords + mm = gcmd_keywords.get_instrument('meris') + ee = gcmd_keywords.get_platform('envisat') + + # TODO: Validate that the found instrument and platform are indeed what we + # want.... + + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) # add geolocation arrays if geolocation: diff --git a/nansat/mappers/mapper_meris_l2.py b/nansat/mappers/mapper_meris_l2.py old mode 100755 new mode 100644 index 648936f78..761af488e --- a/nansat/mappers/mapper_meris_l2.py +++ b/nansat/mappers/mapper_meris_l2.py @@ -158,9 +158,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, if geolocation: self.add_geolocation_from_ads(gdalDataset, zoomSize=zoomSize, step=step) + # set time + self._set_envisat_time(gdalMetadata) - # set SADCAT specific metadata - self.dataset.SetMetadataItem('start_date', parse(gdalMetadata['SPH_FIRST_LINE_TIME']).isoformat()) - self.dataset.SetMetadataItem('stop_date', parse(gdalMetadata['SPH_LAST_LINE_TIME']).isoformat()) self.dataset.SetMetadataItem('sensor', 'MERIS') self.dataset.SetMetadataItem('satellite', 'ENVISAT') diff --git a/nansat/mappers/mapper_metno_hires_seaice.py b/nansat/mappers/mapper_metno_hires_seaice.py old mode 100755 new mode 100644 index ff9015499..b40962b23 --- a/nansat/mappers/mapper_metno_hires_seaice.py +++ b/nansat/mappers/mapper_metno_hires_seaice.py @@ -90,4 +90,5 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # Set time self.logger.info('Valid time: %s', str(validTime)) - self._set_time(validTime) + self.dataset.SetMetadataItem('time_coverage_start', + validTime.isoformat()) diff --git a/nansat/mappers/mapper_metno_local_hires_seaice.py b/nansat/mappers/mapper_metno_local_hires_seaice.py old mode 100755 new mode 100644 diff --git a/nansat/mappers/mapper_mod44w.py b/nansat/mappers/mapper_mod44w.py index c8541a6ec..b8bd5d215 100644 --- a/nansat/mappers/mapper_mod44w.py +++ b/nansat/mappers/mapper_mod44w.py @@ -5,6 +5,9 @@ # under the terms of GNU General Public License, v.3 # http://www.gnu.org/licenses/gpl-3.0.html import os.path +import json + +from nerscmetadata import gcmd_keywords from nansat.vrt import VRT from nansat.tools import WrongMapperError @@ -28,3 +31,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # add bands with metadata and corresponding values to the empty VRT self._create_bands(metaDict) + + mm = gcmd_keywords.get_instrument('MODIS') + ee = gcmd_keywords.get_platform('TERRA') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_modis_l1.py b/nansat/mappers/mapper_modis_l1.py old mode 100755 new mode 100644 index 0ffa899b5..c95bdc8b8 --- a/nansat/mappers/mapper_modis_l1.py +++ b/nansat/mappers/mapper_modis_l1.py @@ -6,12 +6,16 @@ # http://www.gnu.org/licenses/gpl-3.0.html from dateutil.parser import parse import warnings +import json + +from nerscmetadata import gcmd_keywords from nansat.tools import gdal, ogr, WrongMapperError from nansat.vrt import VRT +from hdf4_mapper import HDF4Mapper -class Mapper(VRT): +class Mapper(HDF4Mapper): ''' VRT with mapping of WKV for MODIS Level 1 (QKM, HKM, 1KM) ''' def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): @@ -331,6 +335,28 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): self._create_bands(metaDict) productDate = gdalMetadata["RANGEBEGINNINGDATE"] - productTime = gdalMetadata["RANGEENDINGTIME"] - self._set_time(parse(productDate+' '+productTime)) + productTime = gdalMetadata["RANGEBEGINNINGTIME"] self.remove_geolocationArray() + + # set required metadata + self.dataset.SetMetadataItem('time_coverage_start', + (parse(gdalMetadata["RANGEBEGINNINGDATE"]+ + ' '+gdalMetadata["RANGEBEGINNINGTIME"] + ). + isoformat())) + self.dataset.SetMetadataItem('time_coverage_end', + (parse(gdalMetadata["RANGEENDINGDATE"]+ + ' '+gdalMetadata["RANGEENDINGTIME"] + ). + isoformat())) + + instrumentName = self.find_metadata(gdalMetadata, + 'ASSOCIATEDINSTRUMENTSHORTNAME', + 'MODIS') + platformName = self.find_metadata(gdalMetadata, + 'ASSOCIATEDPLATFORMSHORTNAME', + 'AQUA') + mm = gcmd_keywords.get_instrument(instrumentName) + ee = gcmd_keywords.get_platform(platformName) + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_ncep.py b/nansat/mappers/mapper_ncep.py old mode 100755 new mode 100644 index 684384df3..f739c15f1 --- a/nansat/mappers/mapper_ncep.py +++ b/nansat/mappers/mapper_ncep.py @@ -9,6 +9,8 @@ # NB: Band numbers is hardcoded for band subsets extracted at NERSC, # mapper will not work for other NCEP GFS files before made more generic import datetime +import json +from nerscmetadata import gcmd_keywords from nansat.vrt import VRT from nansat.tools import WrongMapperError @@ -89,7 +91,19 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # Adding valid time from the GRIB file to dataset band = gdalDataset.GetRasterBand(srcBandId['u-component']) validTime = band.GetMetadata()['GRIB_VALID_TIME'] - self._set_time(datetime.datetime. - utcfromtimestamp(int(validTime.strip().split(' ')[0]))) - return + self.dataset.SetMetadataItem('time_coverage_start', + (datetime.datetime.utcfromtimestamp( + int(validTime.strip().split(' ')[0])).isoformat())) + + self.dataset.SetMetadataItem('time_coverage_end', + ((datetime.datetime.utcfromtimestamp( + int(validTime.strip().split(' ')[0])) + + datetime.timedelta(hours=3)).isoformat())) + + # Get dictionary describing the instrument and platform according to + # the GCMD keywords + mm = gcmd_keywords.get_instrument('computer') + ee = gcmd_keywords.get_platform('ncep-gfs') + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_ncep_wind.py b/nansat/mappers/mapper_ncep_wind.py old mode 100755 new mode 100644 index 4568f40fb..7afe0aa2a --- a/nansat/mappers/mapper_ncep_wind.py +++ b/nansat/mappers/mapper_ncep_wind.py @@ -8,6 +8,8 @@ # # Made for GRIB files downloaded from http://nomads.ncep.noaa.gov/data/gfs4/ import datetime +import json +from nerscmetadata import gcmd_keywords from nansat.vrt import VRT from nansat.tools import WrongMapperError @@ -70,9 +72,21 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): self._create_bands(metaDict) # Adding valid time from the GRIB file to dataset - validTime = gdalDataset.GetRasterBand(1).\ - GetMetadata()['GRIB_VALID_TIME'] - self._set_time(datetime.datetime. - utcfromtimestamp(int(validTime.strip().split(' ')[0]))) + validTime = gdalDataset.GetRasterBand(1).GetMetadata()['GRIB_VALID_TIME'] + self.dataset.SetMetadataItem('time_coverage_start', + (datetime.datetime.utcfromtimestamp( + int(validTime.strip().split(' ')[0])).isoformat())) + self.dataset.SetMetadataItem('time_coverage_end', + (datetime.datetime.utcfromtimestamp( + int(validTime.strip().split(' ')[0])).isoformat())) - return + # Get dictionary describing the instrument and platform according to + # the GCMD keywords + mm = gcmd_keywords.get_instrument('computer') + ee = gcmd_keywords.get_platform('ncep-gfs') + + # TODO: Validate that the found instrument and platform are indeed what we + # want.... + + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_ncep_wind_online.py b/nansat/mappers/mapper_ncep_wind_online.py old mode 100755 new mode 100644 diff --git a/nansat/mappers/mapper_nora10_local_vpv.py b/nansat/mappers/mapper_nora10_local_vpv.py index 7137188cc..89fe4746a 100644 --- a/nansat/mappers/mapper_nora10_local_vpv.py +++ b/nansat/mappers/mapper_nora10_local_vpv.py @@ -51,15 +51,16 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, logLevel=30, # Read relevant arrays into memory g = gdal.Open('NETCDF:"' + nc_file + '":' + 'windspeed_10m') ws_10m = np.flipud(g.GetRasterBand(1).ReadAsArray()) - g = gdal.Open(nc_file_winddir) + g = gdal.Open('NETCDF:"' + nc_file_winddir + '":' + + 'wind_direction_10m') wd_10m = np.flipud(g.GetRasterBand(1).ReadAsArray()) g = gdal.Open('NETCDF:"' + nc_file + '":' + 'latitude') lat = np.flipud(g.GetRasterBand(1).ReadAsArray()) g = gdal.Open('NETCDF:"' + nc_file + '":' + 'longitude') lon = np.flipud(g.GetRasterBand(1).ReadAsArray()) - u10 = ws_10m*np.cos(np.deg2rad(wd_10m)) - v10 = ws_10m*np.sin(np.deg2rad(wd_10m)) + u10 = -ws_10m*np.sin(np.deg2rad(wd_10m)) + v10 = -ws_10m*np.cos(np.deg2rad(wd_10m)) VRT_u10 = VRT(array=u10, lat=lat, lon=lon) VRT_v10 = VRT(array=v10, lat=lat, lon=lon) @@ -98,10 +99,10 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, logLevel=30, {'SourceFilename': self.bandVRTs['v_VRT'].fileName, 'SourceBand': 1, 'DataType': 6}], - 'dst': {'wkv': 'wind_to_direction', + 'dst': {'wkv': 'wind_from_direction', 'name': 'winddir', 'height': '10 m', - 'PixelFunctionType': 'UVToDirectionTo'}}) + 'PixelFunctionType': 'UVToDirectionFrom'}}) # create empty VRT dataset with geolocation only VRT.__init__(self, lat=lat, lon=lon) @@ -111,4 +112,4 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, logLevel=30, self._create_bands(metaDict) # Add time - self._set_time(fileTime) + self.dataset.SetMetadataItem('time_coverage_start', fileTime.isoformat()) diff --git a/nansat/mappers/mapper_obpg_l2.py b/nansat/mappers/mapper_obpg_l2.py index 6606d21ce..7ebb7c882 100644 --- a/nansat/mappers/mapper_obpg_l2.py +++ b/nansat/mappers/mapper_obpg_l2.py @@ -6,13 +6,17 @@ # http://www.gnu.org/licenses/gpl-3.0.html from datetime import datetime, timedelta from math import ceil +from dateutil.parser import parse + +import json +from nerscmetadata import gcmd_keywords from nansat.tools import gdal, ogr, WrongMapperError from nansat.vrt import GeolocationArray, VRT from nansat.nsr import NSR +from nansat.mappers.obpg import OBPGL2BaseClass - -class Mapper(VRT): +class Mapper(OBPGL2BaseClass): ''' Mapper for SeaWIFS/MODIS/MERIS/VIIRS L2 data from OBPG TODO: @@ -29,19 +33,13 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, number of GCPs along each dimention ''' - titles = ['HMODISA Level-2 Data', - 'MODISA Level-2 Data', - 'MERIS Level-2 Data', - 'GOCI Level-2 Data', - 'VIIRSN Level-2 Data'] - # should raise error in case of not obpg_l2 file try: title = gdalMetadata["Title"] except: raise WrongMapperError - if title not in titles: + if title not in self.titles: raise WrongMapperError # get subdataset and parse to VRT.__init__() @@ -107,7 +105,11 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, 'latitude': {'src': {}, 'dst': {'wkv': 'latitude'}}, 'longitude': {'src': {}, - 'dst': {'wkv': 'longitude'}} + 'dst': {'wkv': 'longitude'}}, + 'par': {'src': {}, + 'dst': {'wkv': 'downwelling_photosynthetic_photon_radiance_in_sea_water'}}, + 'ipar': {'src': {}, + 'dst': {'wkv': 'instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water'}}, } # loop through available bands and generate metaDict (non fixed) @@ -167,6 +169,7 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, bandNo += 1 if subBandName == 'Rrs': + rrsSubDataset = subDataset[0] metaEntryRrsw = { 'src': [{ 'SourceFilename': subDataset[0], @@ -196,8 +199,6 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, startMillisec = int(gdalMetadata['Start Millisec']) startDate = datetime(startYear, 1, 1) + timedelta(startDay-1, 0, 0, startMillisec) - self._set_time(startDate) - # skip adding georeference for GOCI if title is 'GOCI Level-2 Data': return @@ -244,6 +245,8 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, dy = .5 gcps = [] k = 0 + center_lon = 0 + center_lat = 0 for i0 in range(0, latitude.shape[0], step0): for i1 in range(0, latitude.shape[1], step1): # create GCP with X,Y,pixel,line from lat/lon matrices @@ -256,7 +259,36 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, k, gcp.GCPPixel, gcp.GCPLine, gcp.GCPX, gcp.GCPY) gcps.append(gcp) + center_lon += gcp.GCPX + center_lat += gcp.GCPY k += 1 + # append GCPs and lat/lon projection to the vsiDataset self.dataset.SetGCPs(gcps, NSR().wkt) + self.remove_geolocationArray() + + # reproject GCPs + center_lon /= k + center_lat /= k + srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % (center_lon, center_lat) + self.reproject_GCPs(srs) + + # use TPS for reprojection + self.tps = True + + # add NansenCloud metadata + self.dataset.SetMetadataItem('time_coverage_start', + (parse( + gdalMetadata['time_coverage_start']). + isoformat())) + self.dataset.SetMetadataItem('time_coverage_end', + (parse( + gdalMetadata['time_coverage_stop']). + isoformat())) + instrument = gdalMetadata['Sensor Name'][1:-1] + platform = {'A': 'AQUA', 'T': 'TERRA'}[gdalMetadata['Sensor Name'][-1]] + mm = gcmd_keywords.get_instrument(instrument) + ee = gcmd_keywords.get_platform(platform) + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_obpg_l2_nc.py b/nansat/mappers/mapper_obpg_l2_nc.py new file mode 100644 index 000000000..7d298baf1 --- /dev/null +++ b/nansat/mappers/mapper_obpg_l2_nc.py @@ -0,0 +1,155 @@ +# Name: mapper_obpg_l2 +# Purpose: Mapping for L2 data from the OBPG web-site +# Authors: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +import os +from dateutil.parser import parse +import json + +import numpy as np + +from nerscmetadata import gcmd_keywords + +from nansat.tools import gdal, ogr, WrongMapperError +from nansat.vrt import GeolocationArray, VRT +from nansat.nsr import NSR +from nansat.mappers.obpg import OBPGL2BaseClass + +class Mapper(OBPGL2BaseClass): + ''' Mapper for SeaWIFS/MODIS/MERIS/VIIRS L2 data from OBPG in NC4 format + ''' + + def __init__(self, fileName, gdalDataset, gdalMetadata, + GCP_COUNT=10, **kwargs): + ''' Create VRT + Parameters + ---------- + GCP_COUNT : int + number of GCPs along each dimention + ''' + + # extension must be .nc + if os.path.splitext(fileName)[1] != '.nc': + raise WrongMapperError + + # file must contain navigation_data/longitude + try: + ds = gdal.Open('HDF5:"%s"://navigation_data/longitude' % fileName) + except RuntimeError: + raise WrongMapperError + else: + dsMetadata = ds.GetMetadata() + + # title value must be known + if dsMetadata.get('title', '') not in self.titles: + raise WrongMapperError + + # get geophysical data variables + subDatasets = gdal.Open(fileName).GetSubDatasets() + metaDict = [] + for subDataset in subDatasets: + groupName = subDataset[0].split('/')[-2] + if groupName not in ['geophysical_data', 'navigation_data']: + continue + varName = subDataset[0].split('/')[-1] + subds = gdal.Open(subDataset[0]) + b = subds.GetRasterBand(1) + bMetadata = b.GetMetadata() + + # set SRC/DST parameters + metaEntry = {'src': {'SourceFilename': subDataset[0], + 'sourceBand': 1, + 'DataType': b.DataType}, + 'dst': {'name': varName}} + + # replace datatype for l2_flags + if varName == 'l2_flags': + metaEntry['src']['DataType'] = 4 + metaEntry['src']['SourceType'] = 'SimpleSource' + + # set scale if exist + metaKey = '%s_%s_scale_factor' % (groupName, varName) + if metaKey in bMetadata: + metaEntry['src']['ScaleRatio'] = bMetadata[metaKey] + + # set offset if exist + metaKey = '%s_%s_add_offset' % (groupName, varName) + if metaKey in bMetadata: + metaEntry['src']['ScaleOffset'] = bMetadata[metaKey] + + # set standard_name if exists + metaKey = '%s_%s_standard_name' % (groupName, varName) + if metaKey in bMetadata: + metaEntry['dst']['wkv'] = bMetadata[metaKey] + + # set other metadata + for metaKey in bMetadata: + newMetaKey = metaKey.replace('%s_%s_' % (groupName, varName), '') + if newMetaKey not in ['scale_factor', 'add_offset', 'DIMENSION_LIST', '_FillValue']: + metaEntry['dst'][newMetaKey] = bMetadata[metaKey] + metaDict.append(metaEntry) + + # make GCPs + # get lat/lon grids + longitude = gdal.Open('HDF5:"%s"://navigation_data/longitude' % fileName).ReadAsArray() + latitude = gdal.Open('HDF5:"%s"://navigation_data/latitude' % fileName).ReadAsArray() + rasterYSize, rasterXSize = longitude.shape + + step0 = max(1, int(float(latitude.shape[0]) / GCP_COUNT)) + step1 = max(1, int(float(latitude.shape[1]) / GCP_COUNT)) + + gcps = [] + k = 0 + center_lon = 0 + center_lat = 0 + for i0 in range(0, latitude.shape[0], step0): + for i1 in range(0, latitude.shape[1], step1): + # create GCP with X,Y,pixel,line from lat/lon matrices + lon = float(longitude[i0, i1]) + lat = float(latitude[i0, i1]) + + if (lon >= -180 and lon <= 180 and lat >= -90 and lat <= 90): + gcp = gdal.GCP(lon, lat, 0, i1+0.5, i0+0.5) + gcps.append(gcp) + center_lon += lon + center_lat += lat + k += 1 + + time_coverage_start = dsMetadata['time_coverage_start'] + time_coverage_end = dsMetadata['time_coverage_end'] + + # create VRT + VRT.__init__(self, srcProjection=NSR().wkt, + srcGCPs=gcps, + srcGCPProjection=NSR().wkt, + srcRasterXSize=rasterXSize, + srcRasterYSize=rasterYSize) + # add bands + self._create_bands(metaDict) + + # reproject GCPs + center_lon /= k + center_lat /= k + srs = '+proj=stere +datum=WGS84 +ellps=WGS84 +lon_0=%f +lat_0=%f +no_defs' % (center_lon, center_lat) + self.reproject_GCPs(srs) + + ### BAD, BAd, bad ... + self.dataset.SetProjection(self.dataset.GetGCPProjection()) + + # use TPS for reprojection + self.tps = True + + # add NansenCloud metadata + self.dataset.SetMetadataItem('time_coverage_start', + str(time_coverage_start)) + self.dataset.SetMetadataItem('time_coverage_end', + str(time_coverage_end)) + self.dataset.SetMetadataItem('source_type', 'Satellite') + self.dataset.SetMetadataItem('mapper', 'obpg_l2_nc') + + mm = gcmd_keywords.get_instrument(dsMetadata.get('instrument')) + ee = gcmd_keywords.get_platform(dsMetadata.get('platform')) + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) diff --git a/nansat/mappers/mapper_obpg_l3.py b/nansat/mappers/mapper_obpg_l3.py index b03630403..8fa4f46b3 100644 --- a/nansat/mappers/mapper_obpg_l3.py +++ b/nansat/mappers/mapper_obpg_l3.py @@ -28,7 +28,8 @@ class Mapper(VRT): 'Instantaneous Photosynthetically Available Radiation': 'instantaneous_photosynthetically_available_radiation', 'Particle backscatter at 443 nm': 'volume_backscattering_coefficient_of_radiative_flux_in_sea_water_due_to_suspended_particles', 'Chlorophyll a concentration, Garver-Siegel-Maritorena Model': 'mass_concentration_of_chlorophyll_a_in_sea_water', - 'Photosynthetically Available Radiation': 'photosynthetically_available_radiation', + 'Photosynthetically Available Radiation': 'downwelling_photosynthetic_photon_radiance_in_sea_water', + 'Instantaneous Photosynthetically Available Radiation': 'instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water', } def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): @@ -164,5 +165,6 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): startDay = int(simGdalMetadata.get('Start Day', simGdalMetadata. get('Start)Day', 1))) - self._set_time(datetime.datetime(startYear, 1, 1) + - datetime.timedelta(startDay)) + self.dataset.SetMetadataItem('time_coverage_start', + (datetime.datetime(startYear, 1, 1) + + datetime.timedelta(startDay)).isoformat()) diff --git a/nansat/mappers/mapper_occci_online.py b/nansat/mappers/mapper_occci_online.py new file mode 100644 index 000000000..9e593b315 --- /dev/null +++ b/nansat/mappers/mapper_occci_online.py @@ -0,0 +1,232 @@ +# Name: mapper_ncep_wind_online.py +# Purpose: Nansat mapping for OC CCI data, stored online in THREDDS +# Author: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +# +# Usage: +# w = Nansat('occci_online:1D:chlor_a:2010-01-01') + +import os +import datetime +from dateutil.parser import parse +from time import sleep as time_sleep + +import numpy as np + +try: + from netCDF4 import Dataset +except ImportError: + raise ImportError(''' + Cannot import Dataset from netCDF4. + You cannot access OC CCI data but + Nansat will work.''') + +from nansat.nsr import NSR +from nansat.vrt import VRT +from nansat.tools import gdal, WrongMapperError, OptionError + +class Mapper(VRT, object): + ''' VRT with mapping of WKV for NCEP GFS ''' + + OC_CCI_URLS = { + '1d': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-DAILY', + '1D': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-DAILY', + '5d': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-5DAY', + '5D': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-5DAY', + '8d': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-8DAY', + '8D': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-8DAY', + '1m': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-MONTHLY', + '1M': 'https://rsg.pml.ac.uk/thredds/dodsC/CCI_ALL-v2.0-MONTHLY', + } + + def __init__(self, fileName, gdalDataset, gdalMetadata, + cache='', lons=None, lats=None, **kwargs): + ''' Create NCEP VRT + Parameters: + fileName : str + occci_online:1D:chlor_a:2010-01-01 + occci_online:8D:kd_490:12/22/2014 + cache : str or bool + if str - name of the cahcing directory + If False or None - no caching + lon : list + minimum and maimum values of longitude + lat : list + minimum and maimum values of latitude + ''' + + + keywordBase = 'occci_online' + if not fileName.startswith(keywordBase): + raise WrongMapperError + + # create caching directory + if cache == '': + cache = os.path.curdir + if cache and not os.path.exists(cache): + os.mkdir(cache) + + ### Get temporal resolution + timeStep = fileName.split(':')[1] + # get dataset URL. If resolution doesn't match, get monthly dataset URL + dsURL = self.OC_CCI_URLS.get(timeStep, self.OC_CCI_URLS['1M']) + + ### Get OC CCI product name + prodName = fileName.split(':')[2] + + ### Get Date from input fileName + iDate = parse(fileName.split(':')[3]) + + # get lon, lat, time dimensions from the OC CCI Dataset + self.lon, self.lat, self.time = self.get_lon_lat_time(cache, dsURL) + + # Get actual cci date and number of layer in the dataset + self.date, self.layer = self.get_date_layer(iDate) + + # get rows and cols that contain predefined spatial domain + self.rows, self.cols, lons, lats, geoTransform = self.get_rows_cols(lons, lats) + + # create VRT with correct lon/lat (geotransform) + VRT.__init__(self, srcProjection=NSR().wkt, + srcRasterXSize=len(self.cols), + srcRasterYSize=len(self.rows), + srcGeoTransform=geoTransform) + + # Get SourceFilename either from memory array or from cached file + sourceFilename = self.get_sourcefilename(cache, dsURL, timeStep, prodName, lons, lats) + + metaDict = [{'src': { + 'SourceFilename': sourceFilename, + 'SourceBand': 1, + }, + 'dst': { + 'name': prodName, + } + }] + + self._create_bands(metaDict) + + # set time + self.dataset.SetMetadataItem('time_coverage_start', self.date.isoformat()) + + def get_sourcefilename(self, cache, dsURL, timeStep, prodName, lons, lats): + ''' Get SourceFilename either from memory array or from cached file ''' + print 'Get ', timeStep, prodName + # try to find cached layer + if cache: + layerFilename = os.path.join(cache, + '%s_%s_%s_%s_%+04d%+04d%+04d%+04d.tif' % ( + os.path.split(dsURL)[1], + timeStep, prodName, self.date.strftime('%Y%m%d'), + min(lons), max(lons), + min(lats), max(lats))) + print 'from ', layerFilename, '...' + if os.path.exists(layerFilename): + print 'from ', layerFilename + return layerFilename + + print 'from THREDDS' + ### Continue without pre-cached file + # get product array from remote dataset + ds = Dataset(dsURL) + prodArray = ds.variables[prodName][self.layer, + min(self.rows):max(self.rows)+1, + min(self.cols):max(self.cols)+1] + # if it is a masked array + if hasattr(prodArray, 'mask'): + prodArray.data[prodArray.mask] = np.nan + prodArray = prodArray.data + # create VRT and add to self.bandVRTs + vrt = VRT(array=prodArray, srcProjection=NSR().wkt, + srcRasterXSize=self.dataset.RasterXSize, + srcRasterYSize=self.dataset.RasterYSize, + srcGeoTransform=self.dataset.GetGeoTransform()) + sourceFilename = vrt.fileName + self.bandVRTs[os.path.split(sourceFilename)[1]] = vrt + if cache: + gdal.GetDriverByName('GTiff').CreateCopy(layerFilename, vrt.dataset) + + return sourceFilename + + def timecci2time(self, timeCCI, dsURL): + '''' Convert time from CCI units to internal calendar ''' + if 'CCI_ALL-v1.0-8DAY' in dsURL: + time = np.zeros(timeCCI.shape[0]) + for i, t1 in enumerate(timeCCI): + dt = parse(''.join(t1).replace('Z','')) + time[i] = (dt - datetime.datetime(1970, 1, 1)).days + else: + time = timeCCI + + return time + + def get_lon_lat_time(self, cache, dsURL): + ### Get TIME, LAT, LON + # first try from cache + print 'Get lon, lat, time' + lon, lat, time = None, None, None + if cache: + gridFile = os.path.join(cache, os.path.split(dsURL)[1]+'_grid.npz') + if os.path.exists(gridFile): + try: + lon = np.load(gridFile)['lon'] + lat = np.load(gridFile)['lat'] + time = np.load(gridFile)['time'] + except: + time_sleep(0.5) + lon = np.load(gridFile)['lon'] + lat = np.load(gridFile)['lat'] + time = np.load(gridFile)['time'] + + # if cache does not exist try to fetch from remote dataset + if lon is None: + ds = Dataset(dsURL) + lat = ds.variables['lat'][:] + lon = ds.variables['lon'][:] + timeCCI = ds.variables['time'][:] + time = self.timecci2time(timeCCI, dsURL) + + # cache grid specs + if cache: + np.savez_compressed(gridFile, lon=lon, lat=lat, time=time) + + return lon, lat, time + + def get_rows_cols(self, lons, lats): + ''' Get rows and cols, estimate actual min/max of lat/lon''' + + ### Get min/max lon/lat + if lons is None: + lons = [-180, 180] + if type(lons) in [int, float]: + lons = [lons] + if lats is None: + lats = [-90, 90] + if type(lats) in [int, float]: + lats = [lats] + + rows = np.nonzero((self.lat >= min(lats)) * (self.lat <= max(lats)))[0] + cols = np.nonzero((self.lon >= min(lons)) * (self.lon <= max(lons)))[0] + + lons = [min(self.lon[cols]), max(self.lon[cols])] + lats = [min(self.lat[rows]), max(self.lat[rows])] + + geoTransform = (self.lon[cols][0], (self.lon[cols][-1] - self.lon[cols][0]) / len(cols), 0, + self.lat[rows][0], 0, (self.lat[rows][-1] - self.lat[rows][0]) / len(rows)) + + return rows, cols, lons, lats, geoTransform + + def get_date_layer(self, iDate): + ''' Get actual cci date and number of layer in the dataset ''' + iDateCCI = (iDate - datetime.datetime(1970,1,1)).days + + # find number of the layer to fecth data from + timeDiff = np.abs(self.time - iDateCCI) + if timeDiff.min() > 31: + raise OptionError('Date is outside OC CCI range') + cciLayer = np.argmin(timeDiff) + cciDate = datetime.datetime(1970,1,1) + datetime.timedelta(int(self.time[cciLayer])) + + return cciDate, cciLayer diff --git a/nansat/mappers/mapper_ocean_productivity.py b/nansat/mappers/mapper_ocean_productivity.py old mode 100755 new mode 100644 index ba6a4bab0..99dea49fb --- a/nansat/mappers/mapper_ocean_productivity.py +++ b/nansat/mappers/mapper_ocean_productivity.py @@ -22,13 +22,14 @@ class Mapper(VRT): # detect wkv from metadata 'Parameter' param2wkv = {'chl': 'mass_concentration_of_chlorophyll_a_in_sea_water', 'sst': 'sea_surface_temperature', - 'par': 'instantaneous_photosynthetically_available_radiation', + 'par': 'downwelling_photosynthetic_photon_radiance_in_sea_water', + 'ipar': 'instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water', 'bbp': 'particle_backscatter_at_443_nm' } bandNames = {'mass_concentration_of_chlorophyll_a_in_sea_water': 'algal_1', 'sea_surface_temperature': 'SST', - 'instantaneous_photosynthetically_available_radiation': 'par', + 'instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water': 'ipar', 'particle_backscatter_at_443_nm': 'bbp_443' } @@ -119,5 +120,6 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # Add valid time startYear = int(iFile[4:8]) startDay = int(iFile[8:11]) - self._set_time(datetime.datetime(startYear, 1, 1) + - datetime.timedelta(startDay)) + self.dataset.SetMetadataItem('time_coverage_start', + (datetime.datetime(startYear, 1, 1) + + datetime.timedelta(startDay)).isoformat()) diff --git a/nansat/mappers/mapper_opendap.py b/nansat/mappers/mapper_opendap.py index 214d4bf90..fa88b1875 100644 --- a/nansat/mappers/mapper_opendap.py +++ b/nansat/mappers/mapper_opendap.py @@ -81,7 +81,7 @@ def get_proj4_from_ncvar(self, var): def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): ''' Create VRT from OpenDAP dataset''' # quit if file is not online - if fileName[:7] != 'http://': + if fileName[:7] not in ['http://', 'https:/']: raise WrongMapperError # open file through OpenDAP using netCDF4 library @@ -244,7 +244,13 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): # put band metadata for attr in attrs: - metaEntry['dst'][str(attr)] = str(var.getncattr(attr)) + attrKey = attr.encode('ascii', 'ignore') + attrVal = var.getncattr(attr) + if type(attrVal) in [str, unicode]: + attrVal = attrVal.encode('ascii', 'ignore') + else: + attrVal = str(attrVal) + metaEntry['dst'][attrKey] = attrVal # add wkv if 'standard_name' in attrs: diff --git a/nansat/mappers/mapper_pathfinder52.py b/nansat/mappers/mapper_pathfinder52.py index 80b80ca4a..fde6868e4 100644 --- a/nansat/mappers/mapper_pathfinder52.py +++ b/nansat/mappers/mapper_pathfinder52.py @@ -96,4 +96,4 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, minQual=4, startTimeKey = 'start_time' else: startTimeKey = 'NC_GLOBAL#start_time' - self._set_time(vrt.parse(subGDALDataset.GetMetadataItem(startTimeKey))) + self.dataset.SetMetadataItem('time_coverage_start', subGDALDataset.GetMetadataItem(startTimeKey)) diff --git a/nansat/mappers/mapper_radarsat2.py b/nansat/mappers/mapper_radarsat2.py old mode 100755 new mode 100644 index d71ae09bd..45f0fe848 --- a/nansat/mappers/mapper_radarsat2.py +++ b/nansat/mappers/mapper_radarsat2.py @@ -13,6 +13,9 @@ import scipy.ndimage from math import asin +import json +from nerscmetadata import gcmd_keywords + from nansat.vrt import VRT from nansat.domain import Domain from nansat.node import Node @@ -142,16 +145,16 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): print 'Can not decode pass direction: ' + str(passDirection) # Calculate SAR look direction - SAR_look_direction = sat_heading + antennaPointing + look_direction = sat_heading + antennaPointing # Interpolate to regain lost row - SAR_look_direction = np.mod(SAR_look_direction, 360) - SAR_look_direction = scipy.ndimage.interpolation.zoom( - SAR_look_direction, (1, 11./10.)) + look_direction = np.mod(look_direction, 360) + look_direction = scipy.ndimage.interpolation.zoom( + look_direction, (1, 11./10.)) # Decompose, to avoid interpolation errors around 0 <-> 360 - SAR_look_direction_u = np.sin(np.deg2rad(SAR_look_direction)) - SAR_look_direction_v = np.cos(np.deg2rad(SAR_look_direction)) - look_u_VRT = VRT(array=SAR_look_direction_u, lat=lat, lon=lon) - look_v_VRT = VRT(array=SAR_look_direction_v, lat=lat, lon=lon) + look_direction_u = np.sin(np.deg2rad(look_direction)) + look_direction_v = np.cos(np.deg2rad(look_direction)) + look_u_VRT = VRT(array=look_direction_u, lat=lat, lon=lon) + look_v_VRT = VRT(array=look_direction_v, lat=lat, lon=lon) # Note: If incidence angle and look direction are stored in # same VRT, access time is about twice as large @@ -174,7 +177,7 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): metaDict.append({'src': {'SourceFilename': lookFileName, 'SourceBand': 1}, 'dst': {'wkv': 'sensor_azimuth_angle', - 'name': 'SAR_look_direction'}}) + 'name': 'look_direction'}}) ############################### # Create bands @@ -233,18 +236,23 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): self.dataset.SetMetadataItem('ORBIT_DIRECTION', str(passDirection).upper()) - # Set time - validTime = gdalDataset.GetMetadata()['ACQUISITION_START_TIME'] - self.logger.info('Valid time: %s', str(validTime)) - self._set_time(parse(validTime)) - - # set SADCAT specific metadata - self.dataset.SetMetadataItem('start_date', + # set valid time + self.dataset.SetMetadataItem('time_coverage_start', (parse(gdalMetadata['FIRST_LINE_TIME']). isoformat())) - self.dataset.SetMetadataItem('stop_date', + self.dataset.SetMetadataItem('time_coverage_end', (parse(gdalMetadata['LAST_LINE_TIME']). isoformat())) - self.dataset.SetMetadataItem('sensor', 'SAR') - self.dataset.SetMetadataItem('satellite', 'Radarsat2') - self.dataset.SetMetadataItem('mapper', 'radarsat2') + + # Get dictionary describing the instrument and platform according to + # the GCMD keywords + mm = gcmd_keywords.get_instrument('sar') + ee = gcmd_keywords.get_platform('radarsat-2') + + # TODO: Validate that the found instrument and platform are indeed what we + # want.... + + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) + + self._add_swath_mask_band() diff --git a/nansat/mappers/mapper_s1a_l1.py b/nansat/mappers/mapper_sentinel1a_l1.py similarity index 91% rename from nansat/mappers/mapper_s1a_l1.py rename to nansat/mappers/mapper_sentinel1a_l1.py index 51349a304..12515ff9a 100644 --- a/nansat/mappers/mapper_s1a_l1.py +++ b/nansat/mappers/mapper_sentinel1a_l1.py @@ -6,7 +6,7 @@ # Modified: Morten Wergeland Hansen # # Created: 12.09.2014 -# Last modified:14.10.2014 16:13 +# Last modified:02.07.2015 15:43 # Copyright: (c) NERSC # License: #------------------------------------------------------------------------------ @@ -19,6 +19,9 @@ import scipy from dateutil.parser import parse +import json +from nerscmetadata import gcmd_keywords + from nansat.vrt import VRT from nansat.tools import gdal, WrongMapperError, initial_bearing from nansat.nsr import NSR @@ -138,12 +141,14 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): lon = [] lat = [] inc = [] + ele = [] for gridPoint in geolocationGridPointList.children: X.append(int(gridPoint['pixel'])) Y.append(int(gridPoint['line'])) lon.append(float(gridPoint['longitude'])) lat.append(float(gridPoint['latitude'])) inc.append(float(gridPoint['incidenceAngle'])) + ele.append(float(gridPoint['elevationAngle'])) X = np.unique(X) Y = np.unique(Y) @@ -151,12 +156,18 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): lon = np.array(lon).reshape(len(Y), len(X)) lat = np.array(lat).reshape(len(Y), len(X)) inc = np.array(inc).reshape(len(Y), len(X)) + ele = np.array(ele).reshape(len(Y), len(X)) incVRT = VRT(array=inc, lat=lat, lon=lon) + eleVRT = VRT(array=ele, lat=lat, lon=lon) incVRT = incVRT.get_resized_vrt(self.dataset.RasterXSize, self.dataset.RasterYSize, - eResampleAlg=1) + eResampleAlg=2) + eleVRT = eleVRT.get_resized_vrt(self.dataset.RasterXSize, + self.dataset.RasterYSize, + eResampleAlg=2) self.bandVRTs['incVRT'] = incVRT + self.bandVRTs['eleVRT'] = eleVRT for key in calDict.keys(): xml = self.read_xml(calDict[key]) calibration_LUT_VRTs, longitude, latitude = ( @@ -295,7 +306,7 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): } }) - name = 'SAR_look_direction' + name = 'look_direction' bandNumberDict[name] = bnmax+1 bnmax = bandNumberDict[name] metaDict.append({ @@ -351,7 +362,7 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): 'SourceBand': 1 } ], - 'dst': {'wkv': 'radar_brightness_coefficient', + 'dst': {'wkv': 'surface_backwards_brightness_coefficient_of_radar_wave', 'PixelFunctionType': 'Sentinel1Calibration', 'polarization': pol[key], 'suffix': pol[key], @@ -371,6 +382,17 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): self._create_band(src, dst) self.dataset.FlushCache() + # Add elevation angle as band + name = 'elevation_angle' + bandNumberDict[name] = bnmax+1 + bnmax = bandNumberDict[name] + src = {'SourceFilename': self.bandVRTs['eleVRT'].fileName, + 'SourceBand': 1} + dst = {'wkv': 'angle_of_elevation', + 'name': name} + self._create_band(src, dst) + self.dataset.FlushCache() + # Add sigma0_VV pp = [pol[key] for key in pol] if 'VV' not in pp and 'HH' in pp: @@ -403,28 +425,30 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, **kwargs): meta = n.node('metadataSection') for nn in meta.children: if nn.getAttribute('ID') == u'acquisitionPeriod': - self._set_time(parse((nn.node('metadataWrap'). - node('xmlData'). - node('safe:acquisitionPeriod') - ['safe:startTime'])) - ) - # set SADCAT specific metadata + # set valid time self.dataset.SetMetadataItem( - 'start_date', + 'time_coverage_start', parse((nn.node('metadataWrap'). node('xmlData'). node('safe:acquisitionPeriod')['safe:startTime']) ).isoformat()) self.dataset.SetMetadataItem( - 'stop_date', + 'time_coverage_end', parse((nn.node('metadataWrap'). node('xmlData'). node('safe:acquisitionPeriod')['safe:stopTime']) ).isoformat()) - self.dataset.SetMetadataItem('sensor', 'SAR') - self.dataset.SetMetadataItem('satellite', 'Sentinel-1') - self.dataset.SetMetadataItem('mapper', 's1a_l1') + # Get dictionary describing the instrument and platform according to + # the GCMD keywords + mm = gcmd_keywords.get_instrument('sar') + ee = gcmd_keywords.get_platform('sentinel-1a') + + # TODO: Validate that the found instrument and platform are indeed what we + # want.... + + self.dataset.SetMetadataItem('instrument', json.dumps(mm)) + self.dataset.SetMetadataItem('platform', json.dumps(ee)) def get_LUT_VRTs(self, XML, vectorListName, LUT_list): n = Node.create(XML) diff --git a/nansat/mappers/mapper_s1a_l2.py b/nansat/mappers/mapper_sentinel1a_l2.py similarity index 98% rename from nansat/mappers/mapper_s1a_l2.py rename to nansat/mappers/mapper_sentinel1a_l2.py index 6c0917f40..aa049275c 100644 --- a/nansat/mappers/mapper_s1a_l2.py +++ b/nansat/mappers/mapper_sentinel1a_l2.py @@ -197,5 +197,5 @@ def __init__(self, fileName, gdalDataset, gdalMetadata, self._create_bands(metaDict) # set time - self._set_time(parse(self.dataset. - GetMetadata()['SOURCE_ACQUISITION_UTC_TIME'])) + self.dataset.SetMetadataItem('time_coverage_start', + parse(self.dataset.GetMetadata()['SOURCE_ACQUISITION_UTC_TIME']).isoformat()) diff --git a/nansat/mappers/mapper_sstcci_online.py b/nansat/mappers/mapper_sstcci_online.py new file mode 100644 index 000000000..338e10f29 --- /dev/null +++ b/nansat/mappers/mapper_sstcci_online.py @@ -0,0 +1,188 @@ +# Name: mapper_ncep_wind_online.py +# Purpose: Nansat mapping for OC CCI data, stored online in THREDDS +# Author: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +# +# Usage: +# w = Nansat('occci_online:1D:chlor_a:2010-01-01') + +import os +import datetime +from dateutil.parser import parse +from time import sleep as time_sleep + +import numpy as np + +try: + from netCDF4 import Dataset +except ImportError: + raise ImportError(''' + Cannot import Dataset from netCDF4. + You cannot access OC CCI data but + Nansat will work.''') + +from nansat.nsr import NSR +from nansat.vrt import VRT +from nansat.tools import gdal, WrongMapperError, OptionError + +class Mapper(VRT, object): + ''' VRT with mapping of WKV for NCEP GFS ''' + SST_CCI_URL_FORMAT = 'http://dap.ceda.ac.uk/data/neodc/esacci_sst/data/lt/Analysis/L4/v01.0/%Y/%m/%d/%Y%m%d120000-ESACCI-L4_GHRSST-SSTdepth-OSTIA-GLOB_LT-v02.0-fv01.0.nc' + cachePrefix = '_'.join(os.path.split(SST_CCI_URL_FORMAT)[1].split('-')[1:]) + + def __init__(self, fileName, gdalDataset, gdalMetadata, + cache='', lons=None, lats=None, **kwargs): + ''' Create NCEP VRT + Parameters: + fileName : str + sstcci_online:analysed_sst:2010-01-01 + sstcci_online:analysis_error:2010-01-01 + sstcci_online:sea_ice_fraction:2010-01-01 + sstcci_online:sea_ice_fraction_error:2010-01-01 + sstcci_online:mask:2010-01-01 + cache : str or bool + if str - name of the cahcing directory + If False or None - no caching + lon : list + minimum and maimum values of longitude + lat : list + minimum and maimum values of latitude + ''' + + + keywordBase = 'sstcci_online' + if not fileName.startswith(keywordBase): + raise WrongMapperError + + # create caching directory + if cache == '': + cache = os.path.curdir + if cache and not os.path.exists(cache): + os.mkdir(cache) + + # Get prod name + prodName = fileName.split(':')[1] + + # Get date + iDate = parse(fileName.split(':')[2]) + + # create dataset URL + dsURL = iDate.strftime(self.SST_CCI_URL_FORMAT) + + # get lon, lat, time dimensions from the OC CCI Dataset + self.lon, self.lat = self.get_lon_lat(cache, dsURL) + + # get rows and cols that contain predefined spatial domain + self.rows, self.cols, lons, lats, geoTransform = self.get_rows_cols(lons, lats) + + # create VRT with correct lon/lat (geotransform) + VRT.__init__(self, srcProjection=NSR().wkt, + srcRasterXSize=len(self.cols), + srcRasterYSize=len(self.rows), + srcGeoTransform=geoTransform) + + # Get SourceFilename either from memory array or from cached file + sourceFilename = self.get_sourcefilename(cache, dsURL, iDate, prodName, lons, lats) + + metaDict = [{'src': { + 'SourceFilename': sourceFilename, + 'SourceBand': 1, + }, + 'dst': { + 'name': prodName, + } + }] + + self._create_bands(metaDict) + + # set time + self.dataset.SetMetadataItem('time_coverage_start', iDate.isoformat()) + + def get_sourcefilename(self, cache, dsURL, iDate, prodName, lons, lats): + ''' Get SourceFilename either from memory array or from cached file ''' + print 'Get ', iDate, prodName + # try to find cached layer + if cache: + layerFilename = os.path.join(cache, + '%s_%s_%s_%+04d%+04d%+04d%+04d.tif' % ( + self.cachePrefix, + prodName, iDate.strftime('%Y%m%d'), + min(lons), max(lons), + min(lats), max(lats))) + print 'from ', layerFilename, '...' + if os.path.exists(layerFilename): + print 'from ', layerFilename + return layerFilename + + print 'from THREDDS' + ### Continue without pre-cached file + # get product array from remote dataset + ds = Dataset(dsURL) + prodArray = ds.variables[prodName][0, min(self.rows):max(self.rows), + min(self.cols):max(self.cols)] + prodArray.data[prodArray.mask] = np.nan + # create VRT and add to self.bandVRTs + vrt = VRT(array=prodArray.data, srcProjection=NSR().wkt, + srcRasterXSize=self.dataset.RasterXSize, + srcRasterYSize=self.dataset.RasterYSize, + srcGeoTransform=self.dataset.GetGeoTransform()) + sourceFilename = vrt.fileName + self.bandVRTs[os.path.split(sourceFilename)[1]] = vrt + if cache: + gdal.GetDriverByName('GTiff').CreateCopy(layerFilename, vrt.dataset) + + return sourceFilename + + def get_lon_lat(self, cache, dsURL): + ### Get TIME, LAT, LON + # first try from cache + print 'Get lon, lat' + lon, lat = None, None + if cache: + gridFile = os.path.join(cache, self.cachePrefix+'_grid.npz') + if os.path.exists(gridFile): + try: + lon = np.load(gridFile)['lon'] + lat = np.load(gridFile)['lat'] + except: + time_sleep(0.5) + lon = np.load(gridFile)['lon'] + lat = np.load(gridFile)['lat'] + + # if cache does not exist try to fetch from remote dataset + if lon is None: + ds = Dataset(dsURL) + lat = ds.variables['lat'][:] + lon = ds.variables['lon'][:] + + # cache grid specs + if cache: + np.savez_compressed(gridFile, lon=lon, lat=lat) + + return lon, lat + + def get_rows_cols(self, lons, lats): + ''' Get rows and cols, estimate actual min/max of lat/lon''' + + ### Get min/max lon/lat + if lons is None: + lons = [-180, 180] + if type(lons) in [int, float]: + lons = [lons] + if lats is None: + lats = [-90, 90] + if type(lats) in [int, float]: + lats = [lats] + + rows = np.nonzero((self.lat >= min(lats)) * (self.lat <= max(lats)))[0] + cols = np.nonzero((self.lon >= min(lons)) * (self.lon <= max(lons)))[0] + + lons = [min(self.lon[cols]), max(self.lon[cols])] + lats = [min(self.lat[rows]), max(self.lat[rows])] + + geoTransform = (self.lon[cols][0], (self.lon[cols][-1] - self.lon[cols][0]) / len(cols), 0, + self.lat[rows][0], 0, (self.lat[rows][-1] - self.lat[rows][0]) / len(rows)) + + return rows, cols, lons, lats, geoTransform diff --git a/nansat/mappers/obpg.py b/nansat/mappers/obpg.py new file mode 100644 index 000000000..aa902d0e4 --- /dev/null +++ b/nansat/mappers/obpg.py @@ -0,0 +1,19 @@ +# Name: obpg +# Purpose: Base class for mapping for L2 data from the OBPG web-site +# Authors: Anton Korosov +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +from nansat.vrt import VRT + + +class OBPGL2BaseClass(VRT): + ''' Base Class for Mappers for SeaWIFS/MODIS/MERIS/VIIRS L2 data from OBPG + ''' + + titles = ['HMODISA Level-2 Data', + 'MODISA Level-2 Data', + 'HMODIST Level-2 Data', + 'MERIS Level-2 Data', + 'GOCI Level-2 Data', + 'VIIRSN Level-2 Data'] diff --git a/nansat/mosaic.py b/nansat/mosaic.py old mode 100755 new mode 100644 index 0bae7dfa1..d9babbafb --- a/nansat/mosaic.py +++ b/nansat/mosaic.py @@ -17,215 +17,133 @@ # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. from __future__ import absolute_import import multiprocessing as mp -import datetime +import ctypes import numpy as np import scipy.stats as st from nansat.nansat import Nansat - -class Mosaic(Nansat): - '''Container for mosaicing methods - - Mosaic inherits everything from Nansat - ''' - - # default parameters - nClass = Nansat - eResampleAlg = 0 - period = None, None - threads = 1 - maskName = 'mask' - doReproject = True - bandIDs = [1] - mapper = 'mosaic' - - def _set_defaults(self, idict): - '''Check input params and set defaut values - - Look throught default parameters (in self) and given parameters (dict) - and paste value from input if the key matches - - Parameters - ---------- - idict : dictionary - parameter names and values - - Modifies - --------- - self - - ''' - for key in idict: - if hasattr(self, key): - setattr(self, key, idict[key]) - - def _get_layer_image(self, f): - '''Get nansat object from the specifed file - - Open file with Nansat - Return, if it is within a given period, - - Parameters: - ----------- - f : string - name of the file to be opened - Returns: - -------- - Nansat object or None - ''' - # open file using Nansat or its child class - # the line below is for debugging - #n = self.nClass(f, logLevel=self.logger.level) - self.logger.info('Try to open %s' % f) - #n = self.nClass(f, logLevel=self.logger.level) - try: - n = self.nClass(f, logLevel=self.logger.level) - except: - self.logger.error('Unable to open %s' % f) - return None - - # check if image is out of period - self.logger.info('Try to get time from %s' % f) - if n is not None: - ntime = n.get_time() - - if (ntime[0] is None and any(self.period)): - self.logger.error('%s has no time' % f) - return None - - if (self.period[0] is not None and - ntime[0] < self.period[0]): - self.logger.info('%s is taken before the period' % f) - return None - - if (self.period[1] is not None and - ntime[0] > self.period[1]): - self.logger.info('%s is taken after the period' % f) - return None - - return n - - def _get_layer_mask(self, n): - '''Get mask from input Nansat object - - Open files, reproject, get mask and metadata - - Parameters: - ----------- - n : Nansat - input object - doReproject : boolean - Should we reproject input files? - maskName : string - Name of the mask in the input file - - Returns: - -------- - mask : Numpy array with L2-mask - ''' - mask = 64 * np.ones(self.shape()).astype('int8') - # add mask band [0: nodata, 1: cloud, 2: land, 64: data] - self.logger.info('Try to get raw mask') - try: - mask = n[self.maskName] - except: - self.logger.error('Cannot get mask from %s' % n.fileName) - n.add_band(array=mask, parameters={'name': self.maskName}) - self.logger.debug('Got raw mask - OK') - +# shared arrays for mean, squared mean and count +sharedArray = None +domain = None + + +def mparray2ndarray(sharedArray, shape, dtype='float32'): + ''' convert shared multiprocessing Array to numpy ndarray ''' + # get access to shared array and convert to numpy ndarray + sharedNDArray = np.frombuffer(sharedArray.get_obj(), dtype=dtype) + # change shape to match bands + sharedNDArray.shape = shape + + return sharedNDArray + + +def sumup(layer): + ''' Sum up bands from input images in multiple threads''' + global sharedArray + global domain + + # get nansat from the input Layer + layer.make_nansat_object(domain) + # if not in the period, quit + if not layer.within_period(): + return 1 + # get mask + mask = layer.get_mask_array() + # get arrays with data + bandArrays = [layer.n[band] for band in layer.bands] + bandArrays = np.array(bandArrays) + finiteMask = np.isfinite(bandArrays.sum(axis=0)) + # get metadata + bandMetadata = [layer.n.get_metadata(bandID=band) for band in layer.bands] + + with sharedArray.get_lock(): # synchronize access + sharedNDArray = mparray2ndarray(sharedArray, + (2+len(layer.bands)*2, + mask.shape[0], + mask.shape[1]), + 'float32') + gpi = finiteMask * (mask == 64) + + # update counter + sharedNDArray[0][gpi] += 1 + + # update mask with max + sharedNDArray[1] = np.max([sharedNDArray[1], mask], axis=0) + + # update sum for each band + for i, bandArray in enumerate(bandArrays): + sharedNDArray[2+i][gpi] += bandArray[gpi] + + # update squared sum for each band + for i, bandArray in enumerate(bandArrays): + sharedNDArray[2+len(layer.bands)+i][gpi] += bandArray[gpi] + + # release layer + layer = None + return bandMetadata + + +class Layer: + ''' Small class to get mask and arrays from many bands ''' + def __init__(self, fileName, bands=[1], + opener=Nansat, maskName='mask', + doReproject=True, eResampleAlg=0, + period=(None, None), + logLevel=30): + # Set parameters of processing + self.fileName = fileName + self.bands = bands + self.opener = opener + self.maskName = maskName + self.doReproject = doReproject + self.period = period + self.eResampleAlg = eResampleAlg + self.logLevel = logLevel + + def make_nansat_object(self, domain): + # Open self.fileName with self.opener + print 'Layer', self.fileName, self.logLevel + self.n = self.opener(self.fileName, logLevel=self.logLevel) if self.doReproject: - # reproject image and get reprojected mask - self.logger.debug('Try to get reprojected mask') - n.reproject(self, eResampleAlg=self.eResampleAlg) - try: - mask = n[self.maskName] - except: - self.logger.error('Unable to get reprojected mask!') - self.logger.debug('Get reprojected mask - OK') - - return mask - - def _get_layer(self, f): - '''Get nansat object and mask from input file - - Parameters: - ----------- - f : string - input filename - doReproject : boolean - Should we reproject input files? - maskName : string - Name of the mask in the input file - - Returns: - -------- - n : Nansat object of input file - mask : Numpy array with array - ''' - mask = None - n = self._get_layer_image(f) - if n is not None: - mask = self._get_layer_mask(n) + self.n.reproject(domain, eResampleAlg=self.eResampleAlg) - return n, mask + def within_period(self): + ''' Test if given file is within period of time ''' + withinPeriod = True + ntime = self.n.get_metadata().get('time_coverage_start', None) + if (ntime is None and any(self.period)): + withinPeriod = False - def _get_cube(self, files, band): - '''Make cube with data from one band of input files - - Open files, reproject, get band, insert into cube + if (self.period[0] is not None and ntime < self.period[0]): + withinPeriod = False - Parameter: - ---------- - files : list of strings - input filenames - band : int or string - ID of the band - doReproject : boolean - Should we reproject input files? - maskName : string - Name of the mask in the input file + if (self.period[1] is not None and ntime > self.period[1]): + withinPeriod = False - Returns: - -------- - dataCube : Numpy 3D array with bands - mask : Numpy array with L2-mask - ''' - # preallocate 3D cube and mask - self.logger.debug('Allocating 3D cube') - dataCube = np.zeros((len(files), self.shape()[0], self.shape()[1])) - maskMat = np.zeros((2, self.shape()[0], self.shape()[1]), 'int8') + return withinPeriod - # for all input files - for i, f in enumerate(files): - self.logger.info('Processing %s' % f) + def get_mask_array(self): + ''' Get array with mask values ''' + if self.n.has_band(self.maskName): + mask = self.n[self.maskName] + elif self.doReproject: + mask = self.n['swathmask'] * 64 + else: + mask = np.ones(self.n.shape()) * 64 - # get image and mask - n, mask = self._get_layer(f) - if n is None: - continue - # get band from input image - a = None - try: - a = n[band].astype('float32') - except: - self.logger.error('%s is not in %s' % (band, n.fileName)) - if a is not None: - # mask invalid data - a[mask <= 2] = np.nan + return mask - # add band to the cube - dataCube[i, :, :] = a - # add data to mask matrix (maximum of 0, 1, 2, 64) - maskMat[0, :, :] = mask - maskMat[1, :, :] = maskMat.max(0) +class Mosaic(Nansat): + '''Container for mosaicing methods - return dataCube, maskMat.max(0) + Mosaic inherits everything from Nansat + ''' def average(self, files=[], bands=[1], doReproject=True, maskName='mask', - threads=1, **kwargs): + opener=Nansat, threads=1, eResampleAlg=0, period=(None, None)): '''Memory-friendly, multithreaded mosaicing(averaging) of input files Convert all input files into Nansat objects, reproject onto the @@ -258,7 +176,7 @@ def average(self, files=[], bands=[1], doReproject=True, maskName='mask', reproject input files? maskName : str, ['mask'] name of the mask in input files - nClass : child of Nansat, [Nansat] + opener : child of Nansat, [Nansat] This class is used to read input files threads : int number of parallel processes to use @@ -268,61 +186,62 @@ def average(self, files=[], bands=[1], doReproject=True, maskName='mask', Start and stop datetime objects from pyhon datetime. ''' + # shared array for multiple threads + global sharedArray + global domain + # check inputs if len(files) == 0: self.logger.error('No input files given!') return - # modify default values - self.bandIDs = bands - self.doReproject = doReproject - self.maskName = maskName - self.threads = threads - self._set_defaults(kwargs) - # get desired shape dstShape = self.shape() - self.logger.debug('dstShape: %s' % str(dstShape)) - - # preallocate 2D matrices: - # sum, sum of squares, count of products and mask - self.logger.debug('Allocating 2D matrices') - avgMat = np.zeros((len(bands), dstShape[0], dstShape[1])) - stdMat = np.zeros((len(bands), dstShape[0], dstShape[1])) - cntMat = np.zeros((dstShape[0], dstShape[1]), 'float16') - maskMat = np.zeros((2, dstShape[0], dstShape[1]), 'int8') - - # put 2D matrices into result queue (variable shared by sub-processes) - matQueue = mp.Queue() - matQueue.put((cntMat, maskMat, avgMat, stdMat, files[0])) - - # create task queue with file names - fQueue = mp.JoinableQueue() - - # generate sub-processes - procs = [] - for i in range(threads): - procs.append(mp.Process(target=self._average_one_file, - args=(fQueue, matQueue))) - - # start sub-processes - for i in range(threads): - procs[i].start() - - # put file names into task queue - for f in files: - fQueue.put(f) - # add poison pill to task queue - for i in range(threads): - fQueue.put(None) - - # wait until sub-processes get all tasks from the task queue - fQueue.join() - - # get data from result queue - cntMat, maskMat, avgMat, stdMat, fName = matQueue.get() + # preallocate shared mem array + sharedArray = mp.Array(ctypes.c_float, + [0]*(2 + + len(bands) + + len(bands)) * dstShape[0] * dstShape[1]) + + # create list of layers + domain = Nansat(domain=self) + layers = [Layer(ifile, bands, opener, maskName, doReproject, + eResampleAlg, period, self.logger.level) + for ifile in files] + + # test in debug + # sumup(layers[0]) + + # prepare pool of processors + pool = mp.Pool(threads) + + # run reprojection and summing up + metadata = pool.map(sumup, layers) + + # get band metadata from the first valid file + for bandsMeta in metadata: + if type(bandsMeta) is list: + break # average products + sharedNDArray = mparray2ndarray(sharedArray, + (2+len(bands)*2, + dstShape[0], + dstShape[1]), + 'float32') + + # cleanup + pool.terminate() + pool = None + layers = None + metadata = None + sharedArray = None + + cntMat = sharedNDArray[0] + maskMat = sharedNDArray[1] + avgMat = sharedNDArray[2:2+len(bands)] + stdMat = sharedNDArray[2+len(bands):] + cntMat[cntMat == 0] = np.nan for bi, b in enumerate(bands): self.logger.debug(' Averaging %s' % b) @@ -337,134 +256,93 @@ def average(self, files=[], bands=[1], doReproject=True, maskName='mask', # set mean avgMat[bi] = avg - # calculate mask (max of 0, 1, 2, 4, 64) - maskMat = maskMat.max(0) - # if old 'valid' mask was applied in files, replace with new mask - maskMat[maskMat == 128] = 64 - self.logger.debug('Adding bands') # add mask band self.logger.debug(' mask') self.add_band(array=maskMat, parameters={'name': maskName, 'long_name': 'L2-mask', 'standard_name': 'mask'}) - firstN = self._get_layer_image(fName) # add averaged bands with metadata for bi, b in enumerate(bands): self.logger.debug(' %s' % b) - # get metadata of this band from the first image - parameters = firstN.get_metadata(bandID=b) - parameters.pop('dataType') - parameters.pop('SourceBand') - parameters.pop('SourceFilename') # add band and std with metadata - self.add_band(array=avgMat[bi], parameters=parameters) - parameters['name'] = parameters['name'] + '_std' - self.add_band(array=stdMat[bi], parameters=parameters) - - def _average_one_file(self, fQueue, matQueue): - ''' Parallel processing of one file - - In infinite loop wait for tasks in the task queue - If the task is available, get it and proceed - If task is None (poison pill) quit the infinite loop - If task is filename: - open the file - reproject - get data from file, - get intermediate result from the result queue - add data from file into the result - put the intermedieate result back to the queue + self.add_band(array=avgMat[bi], parameters=bandsMeta[bi]) + bandsMeta[bi]['name'] = bandsMeta[bi]['name'] + '_std' + self.add_band(array=stdMat[bi], parameters=bandsMeta[bi]) + + def _get_cube(self, files, band, doReproject, maskName, opener, + eResampleAlg, + period, + vmin=-np.inf, + vmax=np.inf): + '''Make cube with data from one band of input files - Parameters + Open files, reproject, get band, insert into cube + + Parameter: ---------- - fQueue : multiprocessing.JoinableQueue - task queue with file names - matQueue : multiprocessing.Queue - result queue with cntMat, avgMat, stdMat and maskMat + files : list of strings + input filenames + band : int or string + ID of the band + doReproject : boolean + Should we reproject input files? + maskName : string + Name of the mask in the input file + opener : class + Nansat or any Nansat child to open input image + eResampleAlg : int + parameter for Nansat.reproject() + period : tuple + valid (start_date, end_date) or (None, None) - Modifies + Returns: -------- - fQueue : get results from the task queue - matQueue : get and put results from into the result queue + dataCube : Numpy 3D array with bands + mask : Numpy array with L2-mask + metadata : dict with band metadata ''' - # start infinite loop - while True: - # get task from the queue - f = fQueue.get() - - if f is None: - # if poison pill received, quit infinite loop - fQueue.task_done() - break + # preallocate 3D cube and mask + self.logger.debug('Allocating 3D cube') + dataCube = np.zeros((len(files), self.shape()[0], self.shape()[1])) + maskMat = np.zeros((2, self.shape()[0], self.shape()[1]), 'int8') - # otherwise start processing of task + # for all input files + for i, f in enumerate(files): self.logger.info('Processing %s' % f) + layer = Layer(f, [band], opener, maskName, doReproject, + eResampleAlg, period, logLevel=self.logger.level) + # get nansat from the input Layer + layer.make_nansat_object(domain) - dstShape = self.shape() - - # get image and mask - self.logger.info('Open %s and get mask' % f) - n, mask = self._get_layer(f) - - # skip processing of invalid image - if n is None: - self.logger.error('%s invalid file!' % f) - fQueue.task_done() + # if not in the period, quit + if not layer.within_period(): continue + # get mask + mask = layer.get_mask_array() + # get arrays with data + bandArray = layer.n[band].astype('float32') + # remove invalid data + bandArray[mask < 64] = np.nan + bandArray[bandArray < vmin] = np.nan + bandArray[bandArray > vmax] = np.nan + + # get metadata + bandMetadata = layer.n.get_metadata(bandID=band) - # create temporary matrices to store results - cntMatTmp = np.zeros((dstShape[0], dstShape[1]), 'float16') - cntMatTmp[mask == 64] = 1 - avgMatTmp = np.zeros((len(self.bandIDs), - dstShape[0], dstShape[1]), 'float16') - stdMatTmp = np.zeros((len(self.bandIDs), - dstShape[0], dstShape[1]), 'float16') - - # add data to summation matrices - for bi, b in enumerate(self.bandIDs): - self.logger.info(' Adding %s to sum' % b) - # get projected data from Nansat object - a = None - try: - a = n[b] - except: - self.logger.error('%s is not in %s' % (b, n.fileName)) - if a is not None: - # mask invalid data - a[mask < 64] = 0 - # sum of valid values and squares - avgMatTmp[bi] += a - stdMatTmp[bi] += np.square(a) - # destroy Nansat image - n = None - - # get intermediate results from queue - cntMat, maskMat, avgMat, stdMat, fName = matQueue.get() - - # add data to the counting matrix - cntMat += cntMatTmp - - # add data to the mask matrix (maximum of 0, 1, 2, 64) + # add band to the cube + dataCube[i, :, :] = bandArray + + # add data to mask matrix (maximum of 0, 1, 2, 64) maskMat[0, :, :] = mask maskMat[1, :, :] = maskMat.max(0) - # add data to sum and square_sum matrix - avgMat += avgMatTmp - stdMat += stdMatTmp - - # remember file name - fName = f - - # update intermediate results into queue - matQueue.put((cntMat, maskMat, avgMat, stdMat, fName)) - - # tell the queue that task is done - fQueue.task_done() + return dataCube, maskMat.max(0), bandMetadata def median(self, files=[], bands=[1], doReproject=True, maskName='mask', - **kwargs): + opener=Nansat, eResampleAlg=0, period=(None, None), + vmin=-np.inf, vmax=np.inf): '''Calculate median of input bands Memory and CPU greedy method. Generates 3D cube from bands of @@ -493,152 +371,17 @@ def median(self, files=[], bands=[1], doReproject=True, maskName='mask', self.logger.error('No input files given!') return - # modify default values - self.bandIDs = bands - self.doReproject = doReproject - self.maskName = maskName - self._set_defaults(kwargs) - - lastN = self._get_layer_image(files[-1]) # add medians of all bands for band in bands: - bandCube, mask = self._get_cube(files, band) - bandMedian = st.nanmedian(bandCube, axis=0) + cube, mask, metadata = self._get_cube(files, band, + doReproject, + maskName, + opener, + eResampleAlg, + period, vmin, vmax) + median = st.nanmedian(cube, axis=0) - # get metadata of this band from the last image - parameters = lastN.get_metadata(bandID=band) # add band and std with metadata - self.add_band(array=bandMedian, parameters=parameters) + self.add_band(array=median, parameters=metadata) self.add_band(array=mask, parameters={'name': 'mask'}) - - def latest(self, files=[], bands=[1], doReproject=True, maskName='mask', - **kwargs): - '''Mosaic by adding the latest image on top without averaging - - Uses Nansat.get_time() to estimate time of each input file; - Sorts images by aquisition time; - Creates date_index band - with mask of coverage of each frame; - Uses date_index to fill bands of self only with the latest data - - Parameters - ----------- - files : list - list of input files - bands : list - list of names/band_numbers to be processed - doReproject : boolean, [True] - reproject input files? - maskName : str, ['mask'] - name of the mask in input files - nClass : child of Nansat, [Nansat] - This class is used to read input files - eResampleAlg : int, [0] - agorithm for reprojection, see Nansat.reproject() - period : [datetime0, datetime1] - Start and stop datetime objects from pyhon datetime. - - ''' - # check inputs - if len(files) == 0: - self.logger.error('No input files given!') - return - - # modify default values - self.bandIDs = bands - self.doReproject = doReproject - self.maskName = maskName - self._set_defaults(kwargs) - - # collect ordinals of times of each input file - itimes = np.zeros(len(files)) - for i in range(len(files)): - n = self._get_layer_image(files[i]) - nstime = n.get_time()[0] - if nstime is None: - nstime = 693596 # 1900-01-01 - else: - nstime = nstime.toordinal() - itimes[i] = nstime - - # sort times - ars = np.argsort(itimes) - - # maxIndex keeps mask of coverae of each frame - maxIndex = np.zeros((2, self.shape()[0], self.shape()[1])) - for i in range(len(files)): - # open file and get mask - n, mask = self._get_layer(files[ars[i]]) - # fill matrix with serial number of the file - maskIndex = (np.zeros(mask.shape) + i + 1).astype('uint16') - # erase non-valid values - maskIndex[mask != 64] = 0 - # first layer of maxIndex keeps serial number of this file - maxIndex[0, :, :] = maskIndex - # second layer of maxIndex keeps maximum serial number - # or serial number of the latest image - maxIndex[1, :, :] = maxIndex.max(0) - maxIndex = maxIndex.max(0) - - # preallocate 2D matrices for mosaiced data and mask - self.logger.debug('Allocating 2D matrices') - avgMat = {} - for b in bands: - avgMat[b] = np.zeros((maxIndex.shape[0], maxIndex.shape[1])) - maskMat = np.zeros((maxIndex.shape[0], maxIndex.shape[1])) - - for i in range(len(files)): - f = files[ars[i]] - self.logger.info('Processing %s' % f) - - # get image and mask - n, mask = self._get_layer(f) - if n is None: - continue - # insert mask into result only for pixels masked - # by the serial number of the input file - maskMat[maxIndex == (i + 1)] = mask[maxIndex == (i + 1)] - - # insert data into mosaic matrix - for b in bands: - self.logger.debug(' Inserting %s to latest' % b) - # get projected data from Nansat object - a = None - try: - a = n[b].astype('float32') - except: - self.logger.error('%s is not in %s' % (b, n.fileName)) - if a is not None: - # insert data into result only for pixels masked - # by the serial number of the input file - avgMat[b][maxIndex == (i + 1)] = a[maxIndex == (i + 1)] - - # destroy input nansat - n = None - # keep last image opened - lastN = self._get_layer_image(f) - - self.logger.debug('Adding bands') - # add mask band - self.logger.debug(' mask') - self.add_band(array=maskMat, parameters={'name': maskName, - 'long_name': 'L2-mask', - 'standard_name': 'mask'}) - # add mosaiced bands with metadata - for b in bands: - self.logger.debug(' %s' % b) - - # get metadata of this band from the last image - parameters = lastN.get_metadata(bandID=b) - # add band with metadata - self.add_band(array=avgMat[b], parameters=parameters) - - # compose list of dates of input images - timeString = '' - dt = datetime.datetime(1, 1, 1) - for i in range(len(itimes)): - timeString += (dt.fromordinal(int(itimes[ars[i]])). - strftime('%Y-%m-%dZ%H:%M ')) - # add band with mask of coverage of each frame - self.add_band(array=maxIndex, parameters={'name': 'date_index', - 'values': timeString}) diff --git a/nansat/nansat.py b/nansat/nansat.py old mode 100755 new mode 100644 index cc66186bc..52ea858ce --- a/nansat/nansat.py +++ b/nansat/nansat.py @@ -21,7 +21,6 @@ import sys import tempfile import datetime -import dateutil.parser import pkgutil import warnings import collections @@ -30,9 +29,10 @@ else: from ordereddict import OrderedDict -import scipy from scipy.io.netcdf import netcdf_file +from scipy.stats import nanmedian import numpy as np +from numpy.lib.recfunctions import append_fields import matplotlib from matplotlib import cm import matplotlib.pyplot as plt @@ -41,21 +41,15 @@ from nansat.domain import Domain from nansat.figure import Figure from nansat.vrt import VRT -from nansat.nansatshape import Nansatshape from nansat.tools import add_logger, gdal -from nansat.tools import OptionError, WrongMapperError, Error, GDALError +from nansat.tools import OptionError, WrongMapperError, NansatReadError, GDALError +from nansat.tools import parse_time, test_openable from nansat.node import Node from nansat.pointbrowser import PointBrowser # container for all mappers nansatMappers = None -def test_openable(fname): - try: - f = open(fname,'r') - except IOError: - raise - f.close() class Nansat(Domain): '''Container for geospatial data, performs all high-level operations @@ -199,6 +193,9 @@ def __getitem__(self, bandID): expression = band.GetMetadata().get('expression', '') # get data bandData = band.ReadAsArray() + if bandData is None: + raise GDALError('Cannot read array from band %s' % str(bandID)) + # execute expression if any if expression != '': bandData = eval(expression) @@ -214,9 +211,16 @@ def __getitem__(self, bandID): altFillValue = -10000. bandData[bandData == altFillValue] = np.nan - if np.size(np.where(np.isinf(bandData)))>0: + # replace infs with np.NAN + if np.size(np.where(np.isinf(bandData))) > 0: bandData[np.isinf(bandData)] = np.nan + # erase out-of-swath pixels with np.Nan (if not integer) + if (self.has_band('swathmask') and bandData.dtype.char in + np.typecodes['AllFloat']): + swathmask = self.get_GDALRasterBand('swathmask').ReadAsArray() + bandData[swathmask == 0] = np.nan + return bandData def __repr__(self): @@ -289,6 +293,10 @@ def add_bands(self, arrays, parameters=None, nomem=False): # and ''' + # replace empty parameters with list of None + if parameters is None: + parameters = [None] * len(arrays) + # create VRTs from arrays bandVRTs = [VRT(array=array, nomem=nomem) for array in arrays] @@ -327,7 +335,7 @@ def has_band(self, band): Parameters ---------- band : str - name of the band to check + name or standard_name of the band to check Returns ------- @@ -336,7 +344,9 @@ def has_band(self, band): ''' bandExists = False for b in self.bands(): - if self.bands()[b]['name'] == band: + if (self.bands()[b]['name'] == band or + ('standard_name' in self.bands()[b] and + self.bands()[b]['standard_name'] == band)): bandExists = True return bandExists @@ -388,6 +398,9 @@ def export(self, fileName, bands=None, rmMetadata=[], addGeolocArray=True, CreateCopy fails in case the band name has special characters, e.g. the slash in 'HH/VV'. + Metadata strings with special characters are escaped with XML/HTML + encoding. + Examples -------- n.export(netcdfile) @@ -403,12 +416,16 @@ def export(self, fileName, bands=None, rmMetadata=[], addGeolocArray=True, exportVRT.imag = [] # delete unnecessary bands + rmBands = [] + selfBands = self.bands() if bands is not None: - srcBands = np.arange(self.vrt.dataset.RasterCount) + 1 - dstBands = np.array(bands) - mask = np.in1d(srcBands, dstBands) - rmBands = srcBands[mask==False] - exportVRT.delete_bands(rmBands.tolist()) + for selfBand in selfBands: + # if band number or band name is not listed: mark for removal + if (selfBand not in bands and + selfBands[selfBand]['name'] not in bands): + rmBands.append(selfBand) + # delete bands from VRT + exportVRT.delete_bands(rmBands) # Find complex data band complexBands = [] @@ -485,7 +502,14 @@ def export(self, fileName, bands=None, rmMetadata=[], addGeolocArray=True, globMetadata.pop(rmMeta) except: self.logger.info('Global metadata %s not found' % rmMeta) - exportVRT.dataset.SetMetadata(globMetadata) + + # Apply escaping to metadata strings to preserve special characters (in + # XML/HTML format) + globMetadata_escaped = {} + for key, val in globMetadata.iteritems(): + # Keys not escaped - this may be changed if needed... + globMetadata_escaped[key] = gdal.EscapeString(val, gdal.CPLES_XML) + exportVRT.dataset.SetMetadata(globMetadata_escaped) # if output filename is same as input one... if self.fileName == fileName: @@ -516,7 +540,7 @@ def export(self, fileName, bands=None, rmMetadata=[], addGeolocArray=True, # if GCPs should be added gcps = exportVRT.dataset.GetGCPs() srs = exportVRT.get_projection() - addGCPs = addGCPs and driver=='netCDF' and len(gcps) > 0 + addGCPs = addGCPs and driver == 'netCDF' and len(gcps) > 0 if addGCPs: # remove GeoTransform exportVRT._remove_geotransform() @@ -525,7 +549,7 @@ def export(self, fileName, bands=None, rmMetadata=[], addGeolocArray=True, else: # add projection metadata exportVRT.dataset.SetMetadataItem( - 'NANSAT_Projection', srs.replace(',','|').replace('"', '&')) + 'NANSAT_Projection', srs.replace(',', '|').replace('"', '&')) # add GeoTransform metadata geoTransformStr = str( @@ -672,8 +696,9 @@ def export2thredds(self, fileName, bands, metadata=None, dstBands[iband]['scale'] = float(bands[iband].get('scale', 1.0)) dstBands[iband]['offset'] = float(bands[iband].get('offset', 0.0)) if '_FillValue' in bands[iband]: - dstBands[iband]['_FillValue'] = float( - bands[iband]['_FillValue']) + dstBands[iband]['_FillValue'] = np.array( + [bands[iband]['_FillValue']], + dtype=dstBands[iband]['type'])[0] # mask values with np.nan if maskName is not None and iband != maskName: @@ -681,17 +706,11 @@ def export2thredds(self, fileName, bands, metadata=None, # add array to a temporary Nansat object bandMetadata = self.get_metadata(bandID=iband) - - # remove unwanted metadata from bands - for rmMeta in rmMetadata: - if rmMeta in bandMetadata.keys(): - bandMetadata.pop(rmMeta) - data.add_band(array=array, parameters=bandMetadata) self.logger.debug('Bands for export: %s' % str(dstBands)) # get corners of reprojected data - lonCrn, latCrn = data.get_corners() + minLat, maxLat, minLon, maxLon = data.get_min_max_lat_lon() # common global attributes: if createdTime is None: @@ -701,23 +720,17 @@ def export2thredds(self, fileName, bands, metadata=None, globMetadata = {'institution': 'NERSC', 'source': 'satellite remote sensing', 'creation_date': createdTime, - 'northernmost_latitude': np.float(max(latCrn)), - 'southernmost_latitude': np.float(min(latCrn)), - 'westernmost_longitude': np.float(min(lonCrn)), - 'easternmost_longitude': np.float(max(lonCrn)), + 'northernmost_latitude': np.float(maxLat), + 'southernmost_latitude': np.float(minLat), + 'westernmost_longitude': np.float(minLon), + 'easternmost_longitude': np.float(maxLon), 'history': ' '} - #join or replace default by custom global metadata - + # join or replace default by custom global metadata if metadata is not None: for metaKey in metadata: globMetadata[metaKey] = metadata[metaKey] - # remove unwanted metadata from global metadata - for rmMeta in rmMetadata: - if rmMeta in globMetadata.keys(): - globMetadata.pop(rmMeta) - # export temporary Nansat object to a temporary netCDF fid, tmpName = tempfile.mkstemp(suffix='.nc') data.export(tmpName) @@ -757,15 +770,15 @@ def export2thredds(self, fileName, bands, metadata=None, ncOVar.units = 'days since 1900-1-1 0:0:0 +0' ncOVar.axis = 'T' + # get time from Nansat object or from input datetime if time is None: - time = filter(None, self.get_time()) + time = self.time_coverage_start # create value of time variable - if len(time) > 0: - td = time[0] - datetime.datetime(1900, 1, 1) - days = td.days + (float(td.seconds) / 60.0 / 60.0 / 24.0) - # add date - ncOVar[:] = days + td = time - datetime.datetime(1900, 1, 1) + days = td.days + (float(td.seconds) / 60.0 / 60.0 / 24.0) + # add date + ncOVar[:] = days # recreate file for ncIVarName in ncI.variables: @@ -777,7 +790,7 @@ def export2thredds(self, fileName, bands, metadata=None, ncIVar.dimensions) elif ncIVarName == gridMappingVarName: # create projection var - ncOVar = ncO.createVariable(ncIVarName, ncIVar.typecode(), + ncOVar = ncO.createVariable(gridMappingName, ncIVar.typecode(), ncIVar.dimensions) elif 'name' in ncIVar._attributes and ncIVar.name in dstBands: # dont add time-axis to lon/lat grids @@ -796,7 +809,7 @@ def export2thredds(self, fileName, bands, metadata=None, # copy rounded data from x/y if ncIVarName in ['x', 'y']: ncOVar[:] = np.floor(data).astype('>f4') - #add axis=X or axis=Y + # add axis=X or axis=Y ncOVar.axis = {'x': 'X', 'y': 'Y'}[ncIVarName] for attrib in ncIVar._attributes: if len(ncIVar._attributes[attrib]) > 0: @@ -824,24 +837,26 @@ def export2thredds(self, fileName, bands, metadata=None, # replace non-value by '_FillValue' if (ncIVar.name in dstBands): if '_FillValue' in dstBands[ncIVar.name].keys(): - data[np.isnan(data)] = bands[ncIVar.name]['_FillValue'] + data[np.isnan(data)] = dstBands[ + ncIVar.name]['_FillValue'] + ncOVar._attributes['_FillValue'] = dstBands[ + ncIVar.name]['_FillValue'] ncOVar[:] = data.astype(dstBands[ncIVar.name]['type']) - # copy (some) attributes for inAttrName in ncIVar._attributes: - if inAttrName not in ['dataType', 'SourceFilename', - 'SourceBand', '_Unsigned', - 'FillValue', 'time']: - ncOVar._attributes[inAttrName] = ( - ncIVar._attributes[inAttrName]) + if str(inAttrName) not in rmMetadata + ['dataType', + 'SourceFilename', 'SourceBand', '_Unsigned', + 'FillValue', 'time', '_FillValue']: + ncOVar._attributes[inAttrName] = ncIVar._attributes[inAttrName] - # add custom attributes + # add custom attributes from input parameter bands if ncIVar.name in bands: for newAttr in bands[ncIVar.name]: - if newAttr not in ['type', 'scale', 'offset']: - ncOVar._attributes[newAttr] = ( - bands[ncIVar.name][newAttr]) + if newAttr not in rmMetadata + ['type', 'scale', + 'offset', + '_FillValue']: + ncOVar._attributes[newAttr] = bands[ncIVar.name][newAttr] # add grid_mapping info if gridMappingName is not None: ncOVar._attributes['grid_mapping'] = gridMappingName @@ -927,12 +942,11 @@ def resize(self, factor=1, width=None, height=None, newRasterXSize = int(rasterXSize * factor) self.logger.info('New size/factor: (%f, %f)/%f' % - (newRasterXSize, newRasterYSize, factor)) + (newRasterXSize, newRasterYSize, factor)) if eResampleAlg <= 0: self.vrt = self.vrt.get_subsampled_vrt(newRasterXSize, newRasterYSize, - factor, eResampleAlg) else: # update size and GeoTranform in XML of the warped VRT object @@ -1026,7 +1040,8 @@ def list_bands(self, doPrint=True): return outString def reproject(self, dstDomain=None, eResampleAlg=0, blockSize=None, - WorkingDataType=None, tps=None, **kwargs): + WorkingDataType=None, tps=None, skip_gcps=1, addmask=True, + **kwargs): ''' Change projection of the object based on the given Domain Create superVRT from self.vrt with AutoCreateWarpedVRT() using @@ -1063,6 +1078,9 @@ def reproject(self, dstDomain=None, eResampleAlg=0, blockSize=None, If not given explicitly, 'skip_gcps' is fetched from the metadata of self, or from dstDomain (as set by mapper or user). [defaults to 1 if not specified, i.e. using all GCPs] + addmask : bool + If True, add band 'swathmask'. 1 - valid data, 0 no-data. + This band is used to replace no-data values with np.nan Modifies --------- @@ -1128,12 +1146,27 @@ def reproject(self, dstDomain=None, eResampleAlg=0, blockSize=None, # when using TPS (if requested) src_skip_gcps = self.vrt.dataset.GetMetadataItem('skip_gcps') dst_skip_gcps = dstDomain.vrt.dataset.GetMetadataItem('skip_gcps') - if not 'skip_gcps' in kwargs.keys(): # If not given explicitly... - kwargs['skip_gcps'] = 1 # default (use all GCPs) - if dst_skip_gcps is not None: # ...or use setting from dst - kwargs['skip_gcps'] = int(dst_skip_gcps) - if src_skip_gcps is not None: # ...or use setting from src - kwargs['skip_gcps'] = int(src_skip_gcps) + kwargs['skip_gcps'] = skip_gcps # default (use all GCPs) + if dst_skip_gcps is not None: # ...or use setting from dst + kwargs['skip_gcps'] = int(dst_skip_gcps) + if src_skip_gcps is not None: # ...or use setting from src + kwargs['skip_gcps'] = int(src_skip_gcps) + + # add band that masks valid values with 1 and nodata with 0 + # after reproject + if addmask: + self.vrt = self.vrt.get_super_vrt() + self.vrt._create_band( + src=[{ + 'SourceFilename': self.vrt.vrt.fileName, + 'SourceBand': 1, + 'DataType': gdal.GDT_Byte}], + dst={ + 'dataType': gdal.GDT_Byte, + 'wkv': 'swath_binary_mask', + 'PixelFunctionType': 'OnesPixelFunc', + }) + self.vrt.dataset.FlushCache() # create Warped VRT self.vrt = self.vrt.get_warped_vrt(dstSRS=dstSRS, @@ -1223,24 +1256,25 @@ def watermask(self, mod44path=None, dstDomain=None, **kwargs): self.logger.debug('MODPATH: %s' % mod44path) if not mod44DataExist: - # MOD44W data does not exist generate empty matrix - watermaskArray = np.zeros([self.vrt.dataset.RasterXSize, - self.vrt.dataset.RasterYSize]) - watermask = Nansat(domain=self, array=watermaskArray) - else: - # MOD44W data does exist: open the VRT file in Nansat - watermask = Nansat(mod44path + '/MOD44W.vrt', mapperName='MOD44W', - logLevel=self.logger.level) - # reproject on self or given Domain - if dstDomain is None: - watermask.reproject(self, **kwargs) - else: - watermask.reproject(dstDomain, **kwargs) + raise IOError('250 meters resolution watermask from MODIS ' + '44W Product does not exist - see Nansat ' + 'documentation to get it (the path is % s)' % mod44path) + + # MOD44W data does exist: open the VRT file in Nansat + watermask = Nansat(mod44path + '/MOD44W.vrt', mapperName='MOD44W', + logLevel=self.logger.level) + # reproject on self or given Domain + if dstDomain is None: + dstDomain = self + lon, lat = dstDomain.get_border() + watermask.crop(lonlim=[lon.min(), lon.max()], + latlim=[lat.min(), lat.max()]) + watermask.reproject(dstDomain, addmask=False, **kwargs) return watermask def write_figure(self, fileName=None, bands=1, clim=None, addDate=False, - **kwargs): + array_modfunc=None, **kwargs): ''' Save a raster band to a figure in graphical format. Get numpy array from the band(s) and band information specified @@ -1279,6 +1313,9 @@ def write_figure(self, fileName=None, bands=1, clim=None, addDate=False, addDate : boolean False (default) : no date will be aded to the caption True : the first time of the object will be added to the caption + array_modfunc : None + None (default) : figure created using array in provided band + function : figure created using array modified by provided function **kwargs : parameters for Figure(). Modifies @@ -1327,6 +1364,8 @@ def write_figure(self, fileName=None, bands=1, clim=None, addDate=False, for band in bands: # get array from band and reshape to (1,height,width) iArray = self[band] + if array_modfunc: + iArray = array_modfunc(iArray) iArray = iArray.reshape(1, iArray.shape[0], iArray.shape[1]) # create new 3D array or append band if array is None: @@ -1389,7 +1428,7 @@ def write_figure(self, fileName=None, bands=1, clim=None, addDate=False, # add DATE to caption if addDate: - caption += self.get_time()[0].strftime(' %Y-%m-%d') + caption += self.time_coverage_start.strftime(' %Y-%m-%d') self.logger.info('caption: %s ' % caption) @@ -1458,7 +1497,6 @@ def write_geotiffimage(self, fileName, bandID=1): colormap = band.GetMetadataItem('colormap') except: colormap = 'jet' - #try: cmap = cm.get_cmap(colormap, 256) cmap = cmap(np.arange(256)) * 255 colorTable = gdal.ColorTable() @@ -1466,8 +1504,6 @@ def write_geotiffimage(self, fileName, bandID=1): colorEntry = (int(cmap[i, 0]), int(cmap[i, 1]), int(cmap[i, 2]), int(cmap[i, 3])) colorTable.SetColorEntry(i, colorEntry) - #except: - # print 'Could not add colormap; Matplotlib may not be available.' # Write Tiff image, with data scaled to values between 0 and 255 outDataset = gdal.GetDriverByName('Gtiff').Create(fileName, band.XSize, @@ -1487,35 +1523,13 @@ def write_geotiffimage(self, fileName, bandID=1): outDataset = None self.vrt.copyproj(fileName) - def get_time(self, bandID=None): - ''' Get time for dataset and/or its bands + @property + def time_coverage_start(self): + return parse_time(self.get_metadata('time_coverage_start')) - Parameters - ---------- - bandID : int or str (default = None) - band number or name - - Returns - -------- - time : list with datetime objects for each band. - If time is the same for all bands, the list contains 1 item - - ''' - time = [] - for i in range(self.vrt.dataset.RasterCount): - band = self.get_GDALRasterBand(i + 1) - try: - time.append(dateutil.parser.parse( - band.GetMetadataItem('time'))) - except: - self.logger.debug('Band ' + str(i + 1) + ' has no time') - time.append(None) - - if bandID is not None: - bandNumber = self._get_band_number(bandID) - return time[bandNumber - 1] - else: - return time + @property + def time_coverage_end(self): + return parse_time(self.get_metadata('time_coverage_end')) def get_metadata(self, key=None, bandID=None): ''' Get metadata from self.vrt.dataset @@ -1543,7 +1557,11 @@ def get_metadata(self, key=None, bandID=None): # get all metadata or from a key if key is not None: - metadata = metadata.get(key, None) + try: + metadata = metadata[key] + except KeyError: + raise OptionError('%s does not have metadata %s' % ( + self.fileName, key)) return metadata @@ -1603,14 +1621,17 @@ def _get_mapper(self, mapperName, **kwargs): Raises -------- - Error : occurs if given mapper cannot open the input file + IOError : occurs if the input file does not exist + OptionError : occurs if given mapper cannot open the input file + NansatReadError : occurs if no mapper fits the input file ''' if os.path.isfile(self.fileName): - # Make sure file exists and can be opened for reading before proceeding + # Make sure file exists and can be opened for reading + # before proceeding test_openable(self.fileName) else: - ff = glob.glob(os.path.join(self.fileName,'*.*')) + ff = glob.glob(os.path.join(self.fileName, '*.*')) for f in ff: test_openable(f) # lazy import of nansat mappers @@ -1644,12 +1665,12 @@ def _get_mapper(self, mapperName, **kwargs): '').lower() # check if the mapper is available if mapperName not in nansatMappers: - raise Error('Mapper ' + mapperName + ' not found') + raise OptionError('Mapper ' + mapperName + ' not found') # check if mapper is importbale or raise an ImportError error if isinstance(nansatMappers[mapperName], tuple): errType, err, traceback = nansatMappers[mapperName] - #self.logger.error(err, exc_info=(errType, err, traceback)) + # self.logger.error(err, exc_info=(errType, err, traceback)) raise errType, err, traceback # create VRT using the selected mapper @@ -1696,13 +1717,15 @@ def _get_mapper(self, mapperName, **kwargs): tmpVRT._create_band({'SourceFilename': self.fileName, 'SourceBand': iBand + 1}) tmpVRT.dataset.FlushCache() + self.mapper = 'gdal_bands' # if GDAL cannot open the file, and no mappers exist which can make VRT if tmpVRT is None and gdalDataset is None: # check if given data file exists if not os.path.isfile(self.fileName): raise IOError('%s: File does not exist' % (self.fileName)) - raise GDALError('NANSAT can not open the file ' + self.fileName) + raise NansatReadError('%s: File cannot be read with NANSAT - ' + 'consider writing a mapper' % self.fileName) return tmpVRT @@ -1761,283 +1784,143 @@ def _get_band_number(self, bandID): return bandNumber - def get_transect(self, points=None, bandList=[1], latlon=True, - returnOGR=False, layerNum=0, - smoothRadius=0, smoothAlg=0, transect=True, - onlypixline=False, **kwargs): - - '''Get transect from two poins and retun the values by numpy array + def get_transect(self, points, bands, + lonlat=True, + smoothRadius=0, + smooth_function=nanmedian, + data=None): + '''Get values from transect from given vector of poins Parameters ---------- - points : list with one or more points or shape file name - i.e. [ - # get all transect values - [(lon_T1, lat_T1), (lon_T2, lat_T2), (lon_T3, lat_T3), ...] - # get point values - (lon_P1, lat_P1), (lon_P2, lat_P2), ... - ] - or - [ - # get all transect values - [(col_T1, row_T1), (col_T2, row_T2), (col_T3, row_T3), ...], - # get point values - (col_P1, row_P1), (col_P2, row_P2), ... - ] - bandList : list of int or string + points : 2xN list or array, N (number of points) >= 1 + coordinates [[x1, x2, y2], [y1, y2, y3]] + bands : list of int or string elements of the list are band number or band Name - latlon : bool + lonlat : bool If the points in lat/lon, then True. If the points in pixel/line, then False. - returnOGR: bool - If True, then return numpy array - If False, return OGR object - layerNum: int - If shapefile is given as points, it is the number of the layer smoothRadius: int If smootRadius is greater than 0, smooth every transect pixel as the median or mean value in a circule with radius equal to the given number. - smoothAlg: 0 or 1 for median or mean - transect : bool - used if a shape file name is given as the input. - If True, return the transect. If False, return the points. - vmin, vmax : int (optional) - minimum and maximum pixel values of an image shown - in case points is None. + smooth_function: func + function for averaging values collected within smooth radius + data : ndarray + alternative array with data to take values from Returns -------- - if returnOGR: - transect : OGR object with points coordinates and values + transect : numpy record array + + ''' + # check if points is 2D array with shape 2xN (N>=1) + if (len(np.shape(points)) != 2 or + np.shape(points)[0] != 2 or + np.shape(points)[1] < 1): + # points are not 2xN array + raise OptionError('Input points must be 2xN array with N>0') + + # get names of bands + bandNames = [] + for band in bands: + try: + bandN = self._get_band_number(band) + except OptionError: + self.logger.error('Wrong band name %s' % band) + else: + bandNames.append(self.bands()[bandN]['name']) + + if data is not None: + bandNames.append('input') + + # if points in degree, convert them into pix/lin + if lonlat: + pix, lin = self.transform_points(points[0], points[1], DstToSrc=1) else: - transectDict: dictionary - key is band name. - Value is a dictionary of the transect values of each shape. - vectorsDict: dictionary - keys are shape ID. values are dictionaries - with longitude and latitude lists of each shape. - pixlinCoordDic: dictionary - keys are shape ID. values are numpy array - with pixels and lines coordinates - - NB - ---- - If points are given from GUI, - it is possible to select multiple shapes by pressing any key + pix, lin = points[0], points[1] + + # full vectors of pixel coordinates based on coordinates of vertices + pixVector, linVector = [pix[0]], [lin[0]] + for pn in range(len(pix[1:])): + px0, px1 = pix[pn], pix[pn+1] + py0, py1 = lin[pn], lin[pn+1] + length = np.round(np.hypot(px1-px0, py0-py1)) + pixVector += list(np.linspace(px0, px1, length+1)[1:]) + linVector += list(np.linspace(py0, py1, length+1)[1:]) + + # remove out of region points + pixVector = np.floor(pixVector) + linVector = np.floor(linVector) + gpi = ((pixVector >= (0 + smoothRadius)) * + (linVector >= (0 + smoothRadius)) * + (pixVector < (self.shape()[1] - smoothRadius)) * + (linVector < (self.shape()[0] - smoothRadius))) + pixVector = pixVector[gpi] + linVector = linVector[gpi] + + # create output transect + t = np.recarray((len(pixVector)), dtype=[('pixel', int), + ('line', int), + ('lon', float), + ('lat', float), ]) + + # add pixel, line, lon, lat values to output + t['pixel'] = pixVector + t['line'] = linVector + t['lon'], t['lat'] = self.transform_points(t['pixel'], t['line'], + DstToSrc=0) + + # mask for extraction within circular area + xgrid, ygrid = np.mgrid[0:smoothRadius * 2 + 1, 0:smoothRadius * 2 + 1] + distance = ((xgrid - smoothRadius) ** 2 + + (ygrid - smoothRadius) ** 2) ** 0.5 + mask = distance <= smoothRadius + + # get values from bands or input data + if len(bandNames) > 0: + for bandName in bandNames: + if bandName == 'input': + bandArray = data + else: + bandArray = self[bandName] + # average values from pixel inside a circle + bandValues = [] + for r, c in zip(t['line'], t['pixel']): + subarray = bandArray[r-smoothRadius:r+smoothRadius+1, + c-smoothRadius:c+smoothRadius+1] + bandValues.append(smooth_function(subarray[mask])) + t = append_fields(t, bandName, bandValues).data + + return t + + def digitize_points(self, band=1, **kwargs): + + '''Get coordinates of interactively digitized points + + Parameters + ---------- + band : int or str + ID of Nansat band + **kwargs : keyword arguments for imshow + + Returns + -------- + points : list + list of 2xN arrays of points to be used in Nansat.get_transect() ''' - if matplotlib.is_interactive() and points is None: + if matplotlib.is_interactive(): warnings.warn(''' Python is started with -pylab option, transect will not work. Please restart python without -pylab.''') - return - - smooth_function = scipy.stats.nanmedian - if smoothAlg == 1: - smooth_function = scipy.stats.nanmean - - data = None - # if shapefile is given, get corner points from it - if type(points) == str: - nansatOGR = Nansatshape(fileName=points) - points, latlon = nansatOGR.get_points(latlon) - if transect: - points = [points] - - bandNameDict ={} - bandsMeta = self.bands() - for iKey in bandsMeta.keys(): - bandNameDict[iKey] = bandsMeta[iKey]['name'] - - # if points is not given, get points from GUI ... - if points is None: - latlon = False - data = self[bandList[0]] - browser = PointBrowser(data, transect, **kwargs) - browser.get_points() - points = [] - oneLine = [] + return [] - for i in range(len(browser.coordinates)): - transect = browser.connect[i] - if transect: - oneLine.append(browser.coordinates[i]) - else: - if i == 0: - oneLine = [browser.coordinates[i]] - if len(oneLine) == 1: - oneLine.append(oneLine[-1]) - points.append(oneLine) - oneLine = [browser.coordinates[i]] - if i == len(browser.coordinates) - 1: - if len(oneLine) == 1: - oneLine.append(oneLine[-1]) - points.append(oneLine) - pixlinCoordDic = {} - gpiDic = {} - for i, iShape in enumerate (points): - pixlinCoord = np.array([[], []]) - for j in range(len(iShape) - 1): - if type(iShape[0]) != tuple: - point0 = iShape - point1 = iShape - else: - point0 = iShape[j] - point1 = iShape[j + 1] - - # if points in degree, convert them into pix/lin - if latlon: - pix, lin = self.transform_points([point0[0], point1[0]], - [point0[1], point1[1]], - DstToSrc=1) - point0 = (pix[0], lin[0]) - point1 = (pix[1], lin[1]) - - # compute Euclidean distance between point0 and point1 - length = int(np.hypot(point0[0] - point1[0], - point0[1] - point1[1])) - # if a point is given - if length == 0: - length = 1 - # get sequential coordinates on pix/lin between two points - pixVector = list(np.linspace(point0[0], - point1[0], - length).astype(int)) - linVector = list(np.linspace(point0[1], - point1[1], - length).astype(int)) - pixlinCoord = np.append(pixlinCoord, - [pixVector, linVector], - axis=1) - pixlinCoordDic['shape%d' %i] = pixlinCoord - - # truncate out-of-image points - gpiDic['shape%d' %i] = ((pixlinCoordDic['shape%d' %i][0] >= 0) * - (pixlinCoordDic['shape%d' %i][1] >= 0) * - (pixlinCoordDic['shape%d' %i][0] < - self.vrt.dataset.RasterXSize) * - (pixlinCoordDic['shape%d' %i][1] < - self.vrt.dataset.RasterYSize)) - - if onlypixline: - for iKey, iPixlinCoord in pixlinCoordDic.iteritems(): - pixlinCoordDic[iKey] = iPixlinCoord[:, gpiDic[iKey]] - return pixlinCoordDic - - if smoothRadius: - pixlinCoordSmoothDic = {} - for iShapeKey, iShapePoints in pixlinCoordDic.items(): - # get start/end coordinates of subwindows - pixlinCoordSmoothDic[iShapeKey+'_0'] = iShapePoints - smoothRadius - pixlinCoordSmoothDic[iShapeKey+'_1'] = iShapePoints + smoothRadius - # truncate out-of-image points - gpi = ((pixlinCoordSmoothDic[iShapeKey+'_0'][0] >= 0) * - (pixlinCoordSmoothDic[iShapeKey+'_0'][1] >= 0) * - (pixlinCoordSmoothDic[iShapeKey+'_1'][0] >= 0) * - (pixlinCoordSmoothDic[iShapeKey+'_1'][1] >= 0) * - (pixlinCoordSmoothDic[iShapeKey+'_0'][0] < self.vrt.dataset.RasterXSize) * - (pixlinCoordSmoothDic[iShapeKey+'_0'][1] < self.vrt.dataset.RasterYSize) * - (pixlinCoordSmoothDic[iShapeKey+'_1'][0] < self.vrt.dataset.RasterXSize) * - (pixlinCoordSmoothDic[iShapeKey+'_1'][1] < self.vrt.dataset.RasterYSize)) - pixlinCoordSmoothDic[iShapeKey+'_0'] = pixlinCoordSmoothDic[iShapeKey+'_0'][:, gpi] - pixlinCoordSmoothDic[iShapeKey+'_1'] = pixlinCoordSmoothDic[iShapeKey+'_1'][:, gpi] - - for iKey, iPixlinCoord in pixlinCoordDic.iteritems(): - pixlinCoordDic[iKey] = iPixlinCoord[:, gpiDic[iKey]] - - # convert pix/lin into lon/lat - vectorsDict = {} - for iShapeKey, iShapePoints in pixlinCoordDic.items(): - lonVector, latVector = self.transform_points(iShapePoints[0], - iShapePoints[1], - DstToSrc=0) - vectorsDict[iShapeKey] = {'longitude': lonVector, - 'latitude': latVector} - - # if smoothRadius, create a mask to extract circular area - # from a box area - if smoothRadius: - xgrid, ygrid = np.mgrid[0:smoothRadius * 2 + 1, - 0:smoothRadius * 2 + 1] - distance = ((xgrid - smoothRadius) ** 2 + - (ygrid - smoothRadius) ** 2) ** 0.5 - mask = distance <= smoothRadius - - transectDict = {} - # get data - for iBand in bandList: - tmpDic = {} - if type(iBand) == str: - iBand = self._get_band_number(iBand) - if data is None: - data = self[iBand] - # extract values - for iShapeKey, iShapePoints in pixlinCoordDic.items(): - if smoothRadius: - transect0 = [] - for xmin, xmax, ymin, ymax in zip( - pixlinCoordSmoothDic[iShapeKey+'_0'][1], - pixlinCoordSmoothDic[iShapeKey+'_1'][1], - pixlinCoordSmoothDic[iShapeKey+'_0'][0], - pixlinCoordSmoothDic[iShapeKey+'_1'][0]): - subdata = data[int(xmin):int(xmax + 1), - int(ymin):int(ymax + 1)] - transect0.append(smooth_function(subdata[mask])) - tmpDic[iShapeKey] = transect0 - else: - tmpDic[iShapeKey] = data[list(iShapePoints[1]), - list(iShapePoints[0])].tolist() - #transectDict['band%d' %iBand]= tmpDic - transectDict[str(iBand)+':'+bandNameDict[iBand]] = tmpDic - data = None - - if returnOGR: - # Lists for field names and datatype - names = ['X (pixel)', 'Y (line)'] - formats = ['i4', 'i4'] - for iBand in transectDict.keys(): - names.append('transect_' + str(iBand)) - formats.append('f8') - # Create zeros structured numpy array - pixel = np.array([]) - line = np.array([]) - longitude = np.array([]) - latitude = np.array([]) - transect = {} - - for iShape, iCoords in pixlinCoordDic.items(): - pixel = np.append(pixel, iCoords[0]) - line = np.append(line, iCoords[1]) - longitude = np.append(longitude, - vectorsDict[iShape]['longitude']) - latitude = np.append(latitude, - vectorsDict[iShape]['latitude']) - for iBand in transectDict.keys(): - if not (iBand in transect.keys()): - transect[iBand] = np.array([]) - transect[iBand] = np.append(transect[iBand], - transectDict[iBand][iShape]) - - fieldValues = np.zeros(len(line), dtype={'names': names, - 'formats': formats}) - # Set values into the structured numpy array - fieldValues['X (pixel)'] = pixel - fieldValues['Y (line)'] = line - for iBand in transect.keys(): - fieldValues['transect_'+iBand] = transect[iBand] - - # Create Nansatshape object - NansatOGR = Nansatshape(srs=NSR(self.vrt.get_projection())) - # Set features and geometries into the Nansatshape - NansatOGR.add_features(coordinates=np.array([lonVector, - latVector]), - values=fieldValues) - # Return Nansatshape object - return NansatOGR - else: - return transectDict, vectorsDict, pixlinCoordDic + data = self[band] + browser = PointBrowser(data, **kwargs) + points = browser.get_points() + return points def crop(self, xOff=0, yOff=0, xSize=None, ySize=None, lonlim=None, latlim=None): @@ -2306,4 +2189,3 @@ def _import_mappers(logLevel=None): nansatMappers['mapper_generic'] = gm return nansatMappers - diff --git a/nansat/nansatmap.py b/nansat/nansatmap.py index 1aef79c31..798e550ec 100644 --- a/nansat/nansatmap.py +++ b/nansat/nansatmap.py @@ -18,12 +18,14 @@ import re from mpl_toolkits.basemap import Basemap +import matplotlib as mpl from matplotlib import cm import matplotlib.pyplot as plt from scipy import ndimage import numpy as np from nansat.nsr import NSR +from nansat.tools import get_random_color class Nansatmap(Basemap): @@ -337,6 +339,38 @@ def contourf(self, data, v=None, self._do_contour(Basemap.contourf, data, v, smooth, mode, **kwargs) self.colorbar = len(self.mpl) - 1 + def imshow(self, data, low=0, high=255, **kwargs): + ''' Make RGB plot over the map + + data : numpy array + RGB or RGBA input data + **kwargs: + Parameters for Basemap.imshow + + Modifies + --------- + self.mpl : list + append AxesImage object with imshow + + ''' + # Create X/Y axes + self._create_xy_grids() + + # add random colormap + if 'cmap' in kwargs and kwargs['cmap'] == 'random': + values = np.unique(data[np.isfinite(data)]) + cmap, norm = self._create_random_colormap(values, + low=low, high=high) + kwargs['cmap'] = cmap + kwargs['norm'] = norm + + # Plot data using imshow + self.mpl.append(Basemap.imshow(self, data, + extent=[self.x.min(), self.x.max(), + self.y.min(), self.y.max()], + origin='upper', **kwargs)) + self.colorbar = len(self.mpl) - 1 + def pcolormesh(self, data, **kwargs): '''Make a pseudo-color plot over the map @@ -451,15 +485,33 @@ def add_colorbar(self, fontsize=6, **kwargs): # add colorbar and set font size if self.colorbar is not None: - cbar = self.fig.colorbar(self.mpl[self.colorbar], **kwargs) + origin = self.mpl[self.colorbar] + + # if colormap is ListedColormap + # add integer ticks + ticks = None + listedColormap = False + if (hasattr(origin, 'cmap') and (type(origin.cmap) == + mpl.colors.ListedColormap)): + ticks = (origin.norm.boundaries[:-1] + + np.diff(origin.norm.boundaries) / 2.) + listedColormap = True + + cbar = self.fig.colorbar(origin, ticks=ticks, **kwargs) + if listedColormap: + labels = origin.norm.boundaries[:-1] + if np.all(labels == np.floor(labels)): + labels = labels.astype('int32') + cbar.ax.set_xticklabels(labels) imaxes = plt.gca() plt.axes(cbar.ax) plt.xticks(fontsize=fontsize) plt.axes(imaxes) - def drawgrid(self, fontsize=10, lat_num=5, lon_num=5, + def drawgrid(self, lat_num=5, lon_num=5, lat_labels=[True, False, False, False], - lon_labels=[False, False, True, False]): + lon_labels=[False, False, True, False], + **kwargs): '''Draw and label parallels (lat and lon lines) for values (in degrees) Parameters @@ -479,12 +531,10 @@ def drawgrid(self, fontsize=10, lat_num=5, lon_num=5, ''' self.drawparallels(np.arange(self.latMin, self.latMax, (self.latMax - self.latMin) / lat_num), - labels=lat_labels, - fontsize=fontsize) + labels=lat_labels, **kwargs) self.drawmeridians(np.arange(self.lonMin, self.lonMax, (self.lonMax - self.lonMin) / lon_num), - labels=lon_labels, - fontsize=fontsize) + labels=lon_labels, **kwargs) def draw_continents(self, **kwargs): ''' Draw continents @@ -505,7 +555,8 @@ def draw_continents(self, **kwargs): # draw continets self.fillcontinents(**kwargs) - def save(self, fileName, landmask=True, **kwargs): + def save(self, fileName, landmask=True, dpi=75, + pad_inches=0, bbox_inches='tight', **kwargs): '''Draw continents and save Parameters @@ -523,7 +574,9 @@ def save(self, fileName, landmask=True, **kwargs): # set default extension if not((fileName.split('.')[-1] in self.extensionList)): fileName = fileName + self.DEFAULT_EXTENSION - self.fig.savefig(fileName) + self.fig.savefig(fileName, dpi=dpi, + pad_inches=pad_inches, + bbox_inches=bbox_inches) def _set_defaults(self, idict): '''Check input params and set defaut values @@ -567,3 +620,47 @@ def _create_xy_grids(self): self._create_lonlat_grids() if self.x is None or self.y is None: self.x, self.y = self(self.lon, self.lat) + + def _create_random_colormap(self, values, low=0, high=255): + ''' Generate colormap and colorbar with random discrete colors + + Parameters + ---------- + values : list or 1D array + values for which the random colors are to be generated + Returns + ------- + cmap : matplotlib.color.Colormap + norm : matplotlib.color.BoundaryNorm + ''' + # create first random color + randomColors = [get_random_color(low=low, high=high)] + # add more random colors + for v in values[1:]: + randomColors.append(get_random_color(randomColors[-1], + low=low, high=high)) + + # create colormap and norm + cmap = mpl.colors.ListedColormap(randomColors) + bounds = sorted(list(values)) + bounds += [max(bounds) + 1] # bounds should be longer than values by 1 + norm = mpl.colors.BoundaryNorm(bounds, cmap.N) + + return cmap, norm + + def add_zone_labels(self, zones, fontsize=5): + ''' Finds best place of labels for a zone map, adds labels to the map + + Parameters + ---------- + zones : numpy array with integer zones + the same array as usied in Nansatmap.imshow + ''' + zoneIndices = np.unique(zones[np.isfinite(zones)]) + for zi in zoneIndices: + zrows, zcols = np.nonzero(zones == zi) + zrc = np.median(zrows) + zcc = np.median(zcols) + lon, lat = self.domain.transform_points([zcc], [zrc], 0) + x, y = self(lon[0], lat[0]) + plt.text(x, y, '%d' % zi, fontsize=fontsize) diff --git a/nansat/nansatshape.py b/nansat/nansatshape.py index 888e7afa3..fa5875f13 100644 --- a/nansat/nansatshape.py +++ b/nansat/nansatshape.py @@ -15,7 +15,7 @@ # but WITHOUT ANY WARRANTY without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. from __future__ import absolute_import -from string import Template, ascii_uppercase, digits +from string import ascii_uppercase, digits from random import choice from nansat.tools import ogr diff --git a/nansat/node.py b/nansat/node.py index ca1650b42..b7c696bb7 100644 --- a/nansat/node.py +++ b/nansat/node.py @@ -139,26 +139,33 @@ def replaceNode(self, tag, elemNum=0, newNode=None): def delNode(self, tag, options=None): ''' - Recursively find the all subnodes with this tag and remove - from self.children. + Recursively find nodes containing subnodes with this tag and remove + subnodes options : dictionary - if there are several same tags, specify a node by their attributes. + if there are several tags, specify a node by their attributes. ''' + # indeces of children to be removed + ideleted = [] + for i, child in enumerate(self.children): - if child.node(tag) and options is None: - self.children.pop(i) - elif child.node(tag): - for j, jKey in enumerate(options.keys()): - try: - if (child.getAttribute(jKey) == str(options[jKey]) and - len(options.keys()) == j+1): - self.children.pop(i) - except: - break + if str(child.tag) != str(tag): + # if child has another tag : delete children + child.delNode(tag, options) + elif options is None: + # if child has this tag and no options : mark for deletion + ideleted.append(i) else: - child.delNode(tag) + # if child has this tag + # and options match attributes : mark for deletion + for key in options.keys(): + if str(child.attributes.get(key, '')) == str(options[key]): + ideleted.append(i) + + # delete marked children + for i in sorted(ideleted, reverse=True): + self.children.pop(i) def find_dom_child(self, dom, tagName, n=0): '''Recoursively find child of the dom''' diff --git a/nansat/nsr.py b/nansat/nsr.py index 243722189..05225ba0c 100644 --- a/nansat/nsr.py +++ b/nansat/nsr.py @@ -31,7 +31,7 @@ def __init__(self, srs=0): PROJ4: string with proj4 options [http://trac.osgeo.org/proj/] e.g.: '+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs' - '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=75 +lon_0=10 +no_defs' + '+proj=stere +datum=WGS84 +ellps=WGS84 +lat_0=75 +lon_0=0 +no_defs' EPSG: integer with EPSG number, [http://spatialreference.org/], e.g. 4326 diff --git a/nansat/pixelfunctions/Makefile b/nansat/pixelfunctions/Makefile old mode 100755 new mode 100644 diff --git a/nansat/pixelfunctions/README.txt b/nansat/pixelfunctions/README.txt old mode 100755 new mode 100644 diff --git a/nansat/pixelfunctions/README_Nansat b/nansat/pixelfunctions/README_Nansat old mode 100755 new mode 100644 diff --git a/nansat/pixelfunctions/_pixfun.c b/nansat/pixelfunctions/_pixfun.c old mode 100755 new mode 100644 diff --git a/nansat/pixelfunctions/pixelfunctions.c b/nansat/pixelfunctions/pixelfunctions.c old mode 100755 new mode 100644 index 151b7d82d..7dcdf774a --- a/nansat/pixelfunctions/pixelfunctions.c +++ b/nansat/pixelfunctions/pixelfunctions.c @@ -1,1466 +1,1550 @@ -/****************************************************************************** - * - * Project: GDAL - * Purpose: Implementation of a set of GDALDerivedPixelFunc(s) to be used - * with source raster band of virtual GDAL datasets. - * Author: Antonio Valentino - * - ****************************************************************************** - * Copyright (c) 2008-2011 Antonio Valentino - * - * Permission is hereby granted, free of charge, to any person obtaining a - * copy of this software and associated documentation files (the "Software"), - * to deal in the Software without restriction, including without limitation - * the rights to use, copy, modify, merge, publish, distribute, sublicense, - * and/or sell copies of the Software, and to permit persons to whom the - * Software is furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included - * in all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS - * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL - * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER - * DEALINGS IN THE SOFTWARE. - *****************************************************************************/ - -#include -#include -#include -#include - -void GenericPixelFunction(double f(double*), void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace); - -CPLErr RealPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, nPixelSpaceSrc, nLineSpaceSrc; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - nPixelSpaceSrc = GDALGetDataTypeSize( eSrcType ) / 8; - nLineSpaceSrc = nPixelSpaceSrc * nXSize; - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; ++iLine ) { - GDALCopyWords(((GByte *)papoSources[0]) + nLineSpaceSrc * iLine, - eSrcType, nPixelSpaceSrc, - ((GByte *)pData) + nLineSpace * iLine, - eBufType, nPixelSpace, nXSize); - } - - /* ---- Return success ---- */ - return CE_None; -} /* RealPixelFunc */ - - -CPLErr ImagPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - if (GDALDataTypeIsComplex( eSrcType )) - { - int nPixelSpaceSrc = GDALGetDataTypeSize( eSrcType ) / 8; - int nLineSpaceSrc = nPixelSpaceSrc * nXSize; - - void* pImag = ((GByte *)papoSources[0]) - + GDALGetDataTypeSize( eSrcType ) / 8 / 2; - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; ++iLine ) { - GDALCopyWords(((GByte *)pImag) + nLineSpaceSrc * iLine, - eSrcType, nPixelSpaceSrc, - ((GByte *)pData) + nLineSpace * iLine, - eBufType, nPixelSpace, nXSize); - } - } else { - double dfImag = 0; - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; ++iLine ) { - /* always copy from the same location */ - GDALCopyWords(&dfImag, eSrcType, 0, - ((GByte *)pData) + nLineSpace * iLine, - eBufType, nPixelSpace, nXSize); - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* ImagPixelFunc */ - - -CPLErr ModulePixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - double dfPixVal; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - if (GDALDataTypeIsComplex( eSrcType )) - { - double dfReal, dfImag; - void *pReal = papoSources[0]; - void *pImag = ((GByte *)papoSources[0]) - + GDALGetDataTypeSize( eSrcType ) / 8 / 2; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfReal = SRCVAL(pReal, eSrcType, ii); - dfImag = SRCVAL(pImag, eSrcType, ii); - - dfPixVal = sqrt( dfReal * dfReal + dfImag * dfImag ); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* ---- Set pixels ---- */ - for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfPixVal = abs(SRCVAL(papoSources[0], eSrcType, ii)); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* ModulePixelFunc */ - - -CPLErr PhasePixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - double dfPixVal, dfReal; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - if (GDALDataTypeIsComplex( eSrcType )) - { - double dfImag; - void *pReal = papoSources[0]; - void *pImag = ((GByte *)papoSources[0]) - + GDALGetDataTypeSize( eSrcType ) / 8 / 2; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfReal = SRCVAL(pReal, eSrcType, ii); - dfImag = SRCVAL(pImag, eSrcType, ii); - - dfPixVal = atan2(dfImag, dfReal); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* ---- Set pixels ---- */ - /* - for( iLine = 0; iLine < nYSize; ++iLine ) { - / * always copy from the same location * / - GDALCopyWords(&dfImag, eSrcType, 0, - ((GByte *)pData) + nLineSpace * iLine, - eBufType, nPixelSpace, nXSize); - } - */ - /* ---- Set pixels ---- */ - double pi = atan2(0, -1); - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - void *pReal = papoSources[0]; - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfReal = SRCVAL(pReal, eSrcType, ii); - dfPixVal = (dfReal < 0) ? pi : 0; - - GDALCopyWords(&dfPixVal, GDT_Float64, dfPixVal, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* PhasePixelFunc */ - - -CPLErr ConjPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - if (GDALDataTypeIsComplex( eSrcType ) && GDALDataTypeIsComplex( eBufType )) - { - int iLine, iCol, ii; - double adfPixVal[2]; - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - void *pReal = papoSources[0]; - void *pImag = ((GByte *)papoSources[0]) + nOffset; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - adfPixVal[0] = +SRCVAL(pReal, eSrcType, ii); /* re */ - adfPixVal[1] = -SRCVAL(pImag, eSrcType, ii); /* im */ - - GDALCopyWords(adfPixVal, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* no complex data type */ - return RealPixelFunc(papoSources, nSources, pData, nXSize, nYSize, - eSrcType, eBufType, nPixelSpace, nLineSpace); - } - - /* ---- Return success ---- */ - return CE_None; -} /* ConjPixelFunc */ - - -CPLErr SumPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, iCol, ii, iSrc; - - /* ---- Init ---- */ - if (nSources < 2) return CE_Failure; - - /* ---- Set pixels ---- */ - if (GDALDataTypeIsComplex( eSrcType )) - { - double adfSum[2]; - void *pReal, *pImag; - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - adfSum[0] = 0; - adfSum[1] = 0; - - for( iSrc = 0; iSrc < nSources; ++iSrc ) { - pReal = papoSources[iSrc]; - pImag = ((GByte *)pReal) + nOffset; - - /* Source raster pixels may be obtained with SRCVAL macro */ - adfSum[0] += SRCVAL(pReal, eSrcType, ii); - adfSum[1] += SRCVAL(pImag, eSrcType, ii); - } - - GDALCopyWords(adfSum, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* non complex */ - double dfSum; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - dfSum = 0; - - for( iSrc = 0; iSrc < nSources; ++iSrc ) { - /* Source raster pixels may be obtained with SRCVAL macro */ - dfSum += SRCVAL(papoSources[iSrc], eSrcType, ii); - } - - GDALCopyWords(&dfSum, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* SumPixelFunc */ - - -CPLErr DiffPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - - /* ---- Init ---- */ - if (nSources != 2) return CE_Failure; - - if (GDALDataTypeIsComplex( eSrcType )) - { - - double adfPixVal[2]; - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - void *pReal0 = papoSources[0]; - void *pImag0 = ((GByte *)papoSources[0]) + nOffset; - void *pReal1 = papoSources[1]; - void *pImag1 = ((GByte *)papoSources[1]) + nOffset; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - adfPixVal[0] = SRCVAL(pReal0, eSrcType, ii) - - SRCVAL(pReal1, eSrcType, ii); - adfPixVal[1] = SRCVAL(pImag0, eSrcType, ii) - - SRCVAL(pImag1, eSrcType, ii); - - GDALCopyWords(adfPixVal, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* non complex */ - double dfPixVal; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfPixVal = SRCVAL(papoSources[0], eSrcType, ii) - - SRCVAL(papoSources[1], eSrcType, ii); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* DiffPixelFunc */ - - -CPLErr MulPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, iCol, ii, iSrc; - - /* ---- Init ---- */ - if (nSources < 2) return CE_Failure; - - /* ---- Set pixels ---- */ - if (GDALDataTypeIsComplex( eSrcType )) - { - double adfPixVal[2], dfOldR, dfOldI, dfNewR, dfNewI; - void *pReal, *pImag; - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - adfPixVal[0] = 1.; - adfPixVal[1] = 0.; - - for( iSrc = 0; iSrc < nSources; ++iSrc ) { - pReal = papoSources[iSrc]; - pImag = ((GByte *)pReal) + nOffset; - - dfOldR = adfPixVal[0]; - dfOldI = adfPixVal[1]; - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfNewR = SRCVAL(pReal, eSrcType, ii); - dfNewI = SRCVAL(pImag, eSrcType, ii); - - adfPixVal[0] = dfOldR * dfNewR - dfOldI * dfNewI; - adfPixVal[1] = dfOldR * dfNewI + dfOldI * dfNewR; - } - - GDALCopyWords(adfPixVal, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* non complex */ - double dfPixVal; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - dfPixVal = 1; - - for( iSrc = 0; iSrc < nSources; ++iSrc ) { - /* Source raster pixels may be obtained with SRCVAL macro */ - dfPixVal *= SRCVAL(papoSources[iSrc], eSrcType, ii); - } - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* MulPixelFunc */ - - -CPLErr CMulPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - - /* ---- Init ---- */ - if (nSources != 2) return CE_Failure; - - /* ---- Set pixels ---- */ - if (GDALDataTypeIsComplex( eSrcType )) - { - double adfPixVal[2], dfReal0, dfImag0, dfReal1, dfImag1; - - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - void *pReal0 = papoSources[0]; - void *pImag0 = ((GByte *)papoSources[0]) + nOffset; - void *pReal1 = papoSources[1]; - void *pImag1 = ((GByte *)papoSources[1]) + nOffset; - - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfReal0 = SRCVAL(pReal0, eSrcType, ii); - dfReal1 = SRCVAL(pReal1, eSrcType, ii); - dfImag0 = SRCVAL(pImag0, eSrcType, ii); - dfImag1 = SRCVAL(pImag1, eSrcType, ii); - adfPixVal[0] = dfReal0 * dfReal1 + dfImag0 * dfImag1; - adfPixVal[1] = dfReal1 * dfImag0 - dfReal0 * dfImag1; - - GDALCopyWords(adfPixVal, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* non complex */ - double adfPixVal[2] = {0, 0}; - - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - adfPixVal[0] = SRCVAL(papoSources[0], eSrcType, ii) - * SRCVAL(papoSources[1], eSrcType, ii); - - GDALCopyWords(adfPixVal, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* CMulPixelFunc */ - - -CPLErr InvPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, iCol, ii; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - /* ---- Set pixels ---- */ - if (GDALDataTypeIsComplex( eSrcType )) - { - double adfPixVal[2], dfReal, dfImag, dfAux; - - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - void *pReal = papoSources[0]; - void *pImag = ((GByte *)papoSources[0]) + nOffset; - - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfReal = SRCVAL(pReal, eSrcType, ii); - dfImag = SRCVAL(pImag, eSrcType, ii); - dfAux = dfReal * dfReal + dfImag * dfImag; - adfPixVal[0] = +dfReal / dfAux; - adfPixVal[1] = -dfImag / dfAux; - - GDALCopyWords(adfPixVal, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* non complex */ - double dfPixVal; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfPixVal = 1. / SRCVAL(papoSources[0], eSrcType, ii); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* InvPixelFunc */ - - -CPLErr IntensityPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - double dfPixVal; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - if (GDALDataTypeIsComplex( eSrcType )) - { - double dfReal, dfImag; - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - void *pReal = papoSources[0]; - void *pImag = ((GByte *)papoSources[0]) + nOffset; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfReal = SRCVAL(pReal, eSrcType, ii); - dfImag = SRCVAL(pImag, eSrcType, ii); - - dfPixVal = dfReal * dfReal + dfImag * dfImag; - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - /* ---- Set pixels ---- */ - for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfPixVal = SRCVAL(papoSources[0], eSrcType, ii); - dfPixVal *= dfPixVal; - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* IntensityPixelFunc */ - - -CPLErr SqrtPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, iCol, ii; - double dfPixVal; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - if (GDALDataTypeIsComplex( eSrcType )) return CE_Failure; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */; - dfPixVal = sqrt( SRCVAL(papoSources[0], eSrcType, ii) ); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* SqrtPixelFunc */ - - -CPLErr Log10PixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - if (GDALDataTypeIsComplex( eSrcType )) - { - /* complex input datatype */ - double dfReal, dfImag, dfPixVal; - int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; - void *pReal = papoSources[0]; - void *pImag = ((GByte *)papoSources[0]) + nOffset; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfReal = SRCVAL(pReal, eSrcType, ii); - dfImag = SRCVAL(pImag, eSrcType, ii); - - dfPixVal = log10( dfReal * dfReal + dfImag * dfImag ); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } else { - double dfPixVal; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfPixVal = SRCVAL(papoSources[0], eSrcType, ii); - dfPixVal = log10( abs( dfPixVal ) ); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* Log10PixelFunc */ - - -CPLErr PowPixelFuncHelper(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace, - double base, double fact) -{ - int iLine, iCol, ii; - double dfPixVal; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - if (GDALDataTypeIsComplex( eSrcType )) return CE_Failure; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - dfPixVal = SRCVAL(papoSources[0], eSrcType, ii); - dfPixVal = pow(base, dfPixVal / fact); - - GDALCopyWords(&dfPixVal, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + - iCol * nPixelSpace, eBufType, nPixelSpace, 1); - } - } - - /* ---- Return success ---- */ - return CE_None; -} /* PowPixelFuncHelper */ - -CPLErr dB2AmpPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - return PowPixelFuncHelper(papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace, 10., 20.); -} /* dB2AmpPixelFunc */ - - -CPLErr dB2PowPixelFunc(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - return PowPixelFuncHelper(papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace, 10., 10.); -} /* dB2PowPixelFunc */ - -/************************************************************************/ -/* Nansat pixelfunctions */ -/************************************************************************/ - -CPLErr BetaSigmaToIncidence(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - double incidence; - double beta0, sigma0; - - /* ---- Init ---- */ - if (nSources != 2) return CE_Failure; - #define PI 3.14159265; - - //printf("%d",eSrcType); - - if (GDALDataTypeIsComplex( eSrcType )) - { - double b0Real, b0Imag; - double s0Real, s0Imag; - void *b0pReal = papoSources[0]; - void *s0pReal = papoSources[1]; - void *b0pImag = ((GByte *)papoSources[0]) - + GDALGetDataTypeSize( eSrcType ) / 8 / 2; - void *s0pImag = ((GByte *)papoSources[1]) - + GDALGetDataTypeSize( eSrcType ) / 8 / 2; - - /* ---- Set pixels ---- */ - for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - b0Real = SRCVAL(b0pReal, eSrcType, ii); - b0Imag = SRCVAL(b0pImag, eSrcType, ii); - s0Real = SRCVAL(s0pReal, eSrcType, ii); - s0Imag = SRCVAL(s0pImag, eSrcType, ii); - - beta0 = b0Real*b0Real + b0Imag*b0Imag; - sigma0 = s0Real*s0Real + s0Imag*s0Imag; - - if (beta0 != 0) incidence = asin(sigma0/beta0)*180/PI - else incidence = -10000; // NB: this is also hard-coded in - // mapper_radarsat2.py, and - // should be the same in other - // mappers where this function - // is needed... - GDALCopyWords(&incidence, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } - } else { - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; iLine++ ) - { - for( iCol = 0; iCol < nXSize; iCol++ ) - { - ii = iLine * nXSize + iCol; - /* Source raster pixels may be obtained with SRCVAL macro */ - beta0 = SRCVAL(papoSources[0], eSrcType, ii); - sigma0 = SRCVAL(papoSources[1], eSrcType, ii); - - if (beta0 != 0) incidence = asin(sigma0/beta0)*180/PI - else incidence = -10000; // NB: this is also hard-coded in - // mapper_radarsat2.py, and - // should be the same in other - // mappers where this function - // is needed... - GDALCopyWords(&incidence, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } - } - - /* ---- Return success ---- */ - return CE_None; -} - - -CPLErr UVToMagnitude(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - double magnitude; - double u, v; - - /* ---- Init ---- */ - if (nSources != 2) return CE_Failure; - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; iLine++ ) - { - for( iCol = 0; iCol < nXSize; iCol++ ) - { - ii = iLine * nXSize + iCol; - /* Source raster pixels may be obtained with SRCVAL macro */ - u = SRCVAL(papoSources[0], eSrcType, ii); - v = SRCVAL(papoSources[1], eSrcType, ii); - - magnitude = sqrt(u*u + v*v); - - GDALCopyWords(&magnitude, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } - - /* ---- Return success ---- */ -return CE_None; -} - - - -CPLErr Sigma0HHBetaToSigma0VV(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - double sigma0HH, beta0, incidence, factor, sigma0VV; - - /* ---- Init ---- */ - if (nSources != 2) return CE_Failure; - /*fprintf("nSources: %d\n", nSources);*/ - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; iLine++ ) - { - for( iCol = 0; iCol < nXSize; iCol++ ) - { - ii = iLine * nXSize + iCol; - /* Source raster pixels may be obtained with SRCVAL macro */ - sigma0HH = SRCVAL(papoSources[0], eSrcType, ii); - beta0 = SRCVAL(papoSources[1], eSrcType, ii); - - /* get incidence angle first */ - if (beta0 != 0){ - incidence = asin(sigma0HH/beta0); - } else { - incidence = 0; - } - - /* Polarisation ratio from Thompson et al. with alpha=1 */ - factor = pow( (1 + 2 * pow(tan(incidence), 2)) / (1 + 1 * pow(tan(incidence), 2)), 2); - sigma0VV = sigma0HH * factor; - - GDALCopyWords(&sigma0VV, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } - - /* ---- Return success ---- */ - return CE_None; -} - - -CPLErr RawcountsToSigma0_CosmoSkymed_SBI(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - - int ii, iLine, iCol; - /* int iReal, iImag; */ - double imPower, real, imag; - - /* ---- Init ---- */ - if (nSources != 2) return CE_Failure; - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; iLine++ ){ - for( iCol = 0; iCol < nXSize; iCol++ ){ - ii = iLine * nXSize + iCol; - /* Source raster pixels may be obtained with SRCVAL macro */ - real = SRCVAL(papoSources[0], eSrcType, ii); - imag = SRCVAL(papoSources[1], eSrcType, ii); - - /*printf("%d",iReal); OK!*/ - - /*real = (double) iReal;*/ - /*imag = (double) iImag;*/ - - /*printf("%.1f",imag); OK!*/ - - imPower = pow(real,2.0) + pow(imag,2.0); - /*printf("%.1f",imPower); */ - - GDALCopyWords(&imPower, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - - } - } - - /* ---- Return success ---- */ - return CE_None; - -} - -CPLErr RawcountsToSigma0_CosmoSkymed_QLK(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - - int ii, iLine, iCol; - double raw_counts, imPower; - - /* ---- Init ---- */ - if (nSources != 1) return CE_Failure; - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; iLine++ ) - { - for( iCol = 0; iCol < nXSize; iCol++ ) - { - ii = iLine * nXSize + iCol; - /* Source raster pixels may be obtained with SRCVAL macro */ - raw_counts = SRCVAL(papoSources[0], eSrcType, ii); - imPower = pow(raw_counts,2.); - - GDALCopyWords(&imPower, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } - - /* ---- Return success ---- */ - return CE_None; - -} - - -CPLErr ComplexData(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol; - double adfPixVal[2]; - void *pReal = papoSources[0]; - void *pImag = papoSources[1]; - - for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { - for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { - - /* Source raster pixels may be obtained with SRCVAL macro */ - adfPixVal[0] = SRCVAL(pReal, eSrcType, ii); - adfPixVal[1] = SRCVAL(pImag, eSrcType, ii); - - GDALCopyWords(&adfPixVal, GDT_CFloat64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } - - /* ---- Return success ---- */ -return CE_None; -} - - -/************************************************************************/ -/* Convert Rrs to Rrsw */ -/************************************************************************/ -/* scientifc function */ -double NormReflectanceToRemSensReflectanceFunction(double *b){ - return b[0] / (0.52 + 1.7 * b[0]); -} - -double RawcountsIncidenceToSigma0Function(double *b){ - double pi = 3.14159265; - return (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); -} - -double Sentinel1CalibrationFunction(double *b){ - - // With noise removal -- I am not sure if the noise (b[2]) should be - // squared or not but have sent an email to esa.. - //return ( pow(b[1],2.0) - pow(b[2],2.0) ) / pow(b[0], 2.0); - // Without noise removal - return pow(b[1],2.0) / pow(b[0], 2.0); - -} - -double Sigma0HHToSigma0VVFunction(double *b){ - double pi = 3.14159265; - double s0hh, factor; - s0hh = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); - /* Polarisation ratio from Thompson et al. with alpha=1 */ - factor = pow( (1 + 2 * pow(tan(b[1]*pi/180.0), 2)) / (1 + 1 * pow(tan(b[1]*pi/180.0), 2)), 2); - return s0hh * factor; -} - -double Sentinel1Sigma0HHToSigma0VVFunction( double *b ){ - - double s0hh, s0vv; - double bcal[3]; - double s0hh2s0vv[2]; - - bcal[0] = b[0]; // sigmaNought LUT - bcal[1] = b[2]; // DN - bcal[2] = b[3]; // noise - s0hh = Sentinel1CalibrationFunction(bcal); - - s0hh2s0vv[0] = s0hh; - s0hh2s0vv[1] = b[1]; - - s0vv = Sigma0HHToSigma0VVFunction(s0hh2s0vv); - - return s0vv; - -} - -double Sigma0NormalizedIceFunction(double *b){ - double pi = 3.14159265; - double sigma0 = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); - return sigma0 * pow((tan(b[1] * pi / 180.0) / tan(31.0 * pi / 180.0)), 1.5); -} - -double Sigma0VVNormalizedWaterFunction(double *b){ - double pi = 3.14159265; - double sigma0 = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); - return sigma0 * pow((sin(b[1] * pi / 180.0) / sin(31.0 * pi / 180.0)), 4.0); -} - -double Sigma0HHNormalizedWaterFunction(double *b){ - double pi = 3.14159265; - double sigma0 = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); - return sigma0 * pow((tan(b[1] * pi / 180.0) / tan(31.0 * pi / 180.0)), 4.0); -} - -double UVToDirectionFromFunction(double *b){ - /* Convention 0-360 degrees positive clockwise from north*/ - double pi = 3.14159265; - //return (b[0]==9999 || b[1]==9999) ? 9999 : 180.0 - atan2(-b[0],b[1])*180./pi; - return 180.0 - atan2(-b[0],b[1])*180./pi; -} - -double UVToDirectionToFunction(double *b){ - /* Convention 0-360 degrees positive clockwise from north*/ - double pi = 3.14159265; - return 360.0 - atan2(-b[0],b[1])*180./pi; - /* - Below code is hirlam specific - we don't know if the invalid data is - actually 9999. One option is to make mapper specific pixelfunctions - but for now only return the direction as if all data was good. - */ - //return (b[0]==9999 || b[1]==9999) ? 9999 : 360.0 - atan2(-b[0],b[1])*180./pi; -} - -/* pixel function */ -CPLErr UVToDirectionTo(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(UVToDirectionToFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr UVToDirectionFrom(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(UVToDirectionFromFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - - -CPLErr NormReflectanceToRemSensReflectance(void **papoSources, int nSources, void *pData, - int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(NormReflectanceToRemSensReflectanceFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr Sentinel1Calibration(void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(Sentinel1CalibrationFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr Sentinel1Sigma0HHToSigma0VV(void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(Sentinel1Sigma0HHToSigma0VVFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr RawcountsIncidenceToSigma0(void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(RawcountsIncidenceToSigma0Function, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr Sigma0HHToSigma0VV(void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - // Works for ASAR! - GenericPixelFunction(Sigma0HHToSigma0VVFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr Sigma0NormalizedIce(void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(Sigma0NormalizedIceFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr Sigma0VVNormalizedWater(void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(Sigma0VVNormalizedWaterFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - -CPLErr Sigma0HHNormalizedWater(void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace){ - - GenericPixelFunction(Sigma0HHNormalizedWaterFunction, - papoSources, nSources, pData, - nXSize, nYSize, eSrcType, eBufType, - nPixelSpace, nLineSpace); - - return CE_None; -} - - - -/************************************************************************/ -/* Generic Pixel Function is called from a pixel function and calls - * corresponding scientific function */ -/************************************************************************/ - -// all data (band) size must be same and full size of bands (XSize x YSize). -void GenericPixelFunction(double f(double*), void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int ii, iLine, iCol, iSrc; - double *bVal, result; - bVal = malloc(nSources * sizeof (double)); - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; iLine++ ){ - for( iCol = 0; iCol < nXSize; iCol++ ){ - ii = iLine * nXSize + iCol; - /* Source raster pixels may be obtained with SRCVAL macro */ - for (iSrc = 0; iSrc < nSources; iSrc ++){ - bVal[iSrc] = SRCVAL(papoSources[iSrc], eSrcType, ii); - //if (iLine==0 && iCol==0){ - // printf("%d ",iSrc); - // printf("%.4f\n",bVal[iSrc]); - //} - } - - result = f(bVal); - - GDALCopyWords(&result, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } -} - -// From the 1st to (N-1)th bands are full size (XSize x YSize), -// and the last band is a one-pixel band (1 x 1). -void GenericPixelFunctionPixel(double f(double*), void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, iCol, iSrc; - double *bVal, result; - bVal = malloc(nSources * sizeof (double)); - - /* ---- Set pixels ---- */ - /* Set the first value form one-pixel band */ - bVal[0] = SRCVAL(papoSources[nSources-1], eSrcType, 0); - for( iLine = 0; iLine < nYSize; iLine++ ) - { - for( iCol = 0; iCol < nXSize; iCol++ ) - { - for (iSrc = 1; iSrc < nSources; iSrc ++) - /* Source raster pixels may be obtained with SRCVAL macro */ - bVal[iSrc] = SRCVAL(papoSources[iSrc-1], eSrcType, iLine * nXSize + iCol); - - result = f(bVal); - - GDALCopyWords(&result, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } -} - -// From the 1st to (N-1)th bands are full size (XSize x YSize), -// and the last band is a line band (XSize x 1). -void GenericPixelFunctionLine(double f(double*), void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, iCol, iSrc; - double *bVal, result; - bVal = malloc(nSources * sizeof (double)); - - /* ---- Set pixels ---- */ - for( iLine = 0; iLine < nYSize; iLine++ ) - { - for( iCol = 0; iCol < nXSize; iCol++ ) - { - /* Source raster pixels may be obtained with SRCVAL macro */ - bVal[0] = SRCVAL(papoSources[nSources-1], eSrcType, iCol); - - for (iSrc = 1; iSrc < nSources; iSrc ++) - /* Source raster pixels may be obtained with SRCVAL macro */ - bVal[iSrc] = SRCVAL(papoSources[iSrc-1], eSrcType, iLine * nXSize + iCol); - - result = f(bVal); - - GDALCopyWords(&result, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } -} - -// From the 1st to (N-2)th bands are full size (XSize x YSize), -// the last 2nd band is a line band (XSize x 1) and the last is one pixel band. -void GenericPixelFunctionPixelLine(double f(double*), void **papoSources, - int nSources, void *pData, int nXSize, int nYSize, - GDALDataType eSrcType, GDALDataType eBufType, - int nPixelSpace, int nLineSpace) -{ - int iLine, iCol, iSrc; - double *bVal, result; - bVal = malloc(nSources * sizeof (double)); - - /* ---- Set pixels ---- */ - bVal[0] = SRCVAL(papoSources[nSources-1], eSrcType, 0); - for( iLine = 0; iLine < nYSize; iLine++ ) - { - for( iCol = 0; iCol < nXSize; iCol++ ) - { - bVal[1] = SRCVAL(papoSources[nSources-2], eSrcType, iCol); - - for(iSrc = 2; iSrc < nSources; iSrc++ ) - bVal[iSrc] = SRCVAL(papoSources[iSrc-2], eSrcType, iLine * nXSize + iCol); - - result = f(bVal); - - GDALCopyWords(&result, GDT_Float64, 0, - ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, - eBufType, nPixelSpace, 1); - } - } -} - - - -/************************************************************************/ -/* GDALRegisterDefaultPixelFunc() */ -/************************************************************************/ - -/** - * This adds a default set of pixel functions to the global list of - * available pixel functions for derived bands: - * - * - "real": extract real part from a single raster band (just a copy if the - * input is non-complex) - * - "imag": extract imaginary part from a single raster band (0 for - * non-complex) - * - "mod": extract module from a single raster band (real or complex) - * - "phase": extract phase from a single raster band (0 for non-complex) - * - "conj": computes the complex conjugate of a single raster band (just a - * copy if the input is non-complex) - * - "sum": sum 2 or more raster bands - * - "diff": computes the difference between 2 raster bands (b1 - b2) - * - "mul": multilpy 2 or more raster bands - * - "cmul": multiply the first band for the complex comjugate of the second - * - "inv": inverse (1./x). Note: no check is performed on zero division - * - "intensity": computes the intensity Re(x*conj(x)) of a single raster band - * (real or complex) - * - "sqrt": perform the square root of a single raster band (real only) - * - "log10": compute the logarithm (base 10) of the abs of a single raster - * band (real or complex): log10( abs( x ) ) - * - "dB2amp": perform scale conversion from logarithmic to linear - * (amplitude) (i.e. 10 ^ ( x / 20 ) ) of a single raster - * band (real only) - * - "dB2pow": perform scale conversion from logarithmic to linear - * (power) (i.e. 10 ^ ( x / 10 ) ) of a single raster - * band (real only) - * - * @see GDALAddDerivedBandPixelFunc - * - * @return CE_None, invalid (NULL) parameters are currently ignored. - */ -CPLErr CPL_STDCALL GDALRegisterDefaultPixelFunc() -{ - GDALAddDerivedBandPixelFunc("real", RealPixelFunc); - GDALAddDerivedBandPixelFunc("imag", ImagPixelFunc); - GDALAddDerivedBandPixelFunc("mod", ModulePixelFunc); - GDALAddDerivedBandPixelFunc("phase", PhasePixelFunc); - GDALAddDerivedBandPixelFunc("conj", ConjPixelFunc); - GDALAddDerivedBandPixelFunc("sum", SumPixelFunc); - GDALAddDerivedBandPixelFunc("diff", DiffPixelFunc); - GDALAddDerivedBandPixelFunc("mul", MulPixelFunc); - GDALAddDerivedBandPixelFunc("cmul", CMulPixelFunc); - GDALAddDerivedBandPixelFunc("inv", InvPixelFunc); - GDALAddDerivedBandPixelFunc("intensity", IntensityPixelFunc); - GDALAddDerivedBandPixelFunc("sqrt", SqrtPixelFunc); - GDALAddDerivedBandPixelFunc("log10", Log10PixelFunc); - GDALAddDerivedBandPixelFunc("dB2amp", dB2AmpPixelFunc); - GDALAddDerivedBandPixelFunc("dB2pow", dB2PowPixelFunc); - - GDALAddDerivedBandPixelFunc("BetaSigmaToIncidence", BetaSigmaToIncidence); - GDALAddDerivedBandPixelFunc("UVToMagnitude", UVToMagnitude); - GDALAddDerivedBandPixelFunc("UVToDirectionTo", UVToDirectionTo); - GDALAddDerivedBandPixelFunc("UVToDirectionFrom", UVToDirectionFrom); - GDALAddDerivedBandPixelFunc("Sigma0HHBetaToSigma0VV", Sigma0HHBetaToSigma0VV); //Radarsat-2 - GDALAddDerivedBandPixelFunc("Sigma0HHToSigma0VV", Sigma0HHToSigma0VV); // ASAR - GDALAddDerivedBandPixelFunc("RawcountsIncidenceToSigma0", RawcountsIncidenceToSigma0); - GDALAddDerivedBandPixelFunc("RawcountsToSigma0_CosmoSkymed_QLK", RawcountsToSigma0_CosmoSkymed_QLK); - GDALAddDerivedBandPixelFunc("RawcountsToSigma0_CosmoSkymed_SBI", RawcountsToSigma0_CosmoSkymed_SBI); - GDALAddDerivedBandPixelFunc("ComplexData", ComplexData); - GDALAddDerivedBandPixelFunc("NormReflectanceToRemSensReflectance", NormReflectanceToRemSensReflectance); - GDALAddDerivedBandPixelFunc("Sigma0NormalizedIce", Sigma0NormalizedIce); - GDALAddDerivedBandPixelFunc("Sigma0HHNormalizedWater", Sigma0HHNormalizedWater); - GDALAddDerivedBandPixelFunc("Sigma0VVNormalizedWater", Sigma0VVNormalizedWater); - GDALAddDerivedBandPixelFunc("Sentinel1Calibration", Sentinel1Calibration); - GDALAddDerivedBandPixelFunc("Sentinel1Sigma0HHToSigma0VV", Sentinel1Sigma0HHToSigma0VV); - - return CE_None; -} - +/****************************************************************************** + * + * Project: GDAL + * Purpose: Implementation of a set of GDALDerivedPixelFunc(s) to be used + * with source raster band of virtual GDAL datasets. + * Author: Antonio Valentino + * + ****************************************************************************** + * Copyright (c) 2008-2011 Antonio Valentino + * + * Permission is hereby granted, free of charge, to any person obtaining a + * copy of this software and associated documentation files (the "Software"), + * to deal in the Software without restriction, including without limitation + * the rights to use, copy, modify, merge, publish, distribute, sublicense, + * and/or sell copies of the Software, and to permit persons to whom the + * Software is furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included + * in all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS + * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL + * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER + * DEALINGS IN THE SOFTWARE. + *****************************************************************************/ + +#include +#include +#include +#include + +void GenericPixelFunction(double f(double*), void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace); + +CPLErr RealPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, nPixelSpaceSrc, nLineSpaceSrc; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + nPixelSpaceSrc = GDALGetDataTypeSize( eSrcType ) / 8; + nLineSpaceSrc = nPixelSpaceSrc * nXSize; + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; ++iLine ) { + GDALCopyWords(((GByte *)papoSources[0]) + nLineSpaceSrc * iLine, + eSrcType, nPixelSpaceSrc, + ((GByte *)pData) + nLineSpace * iLine, + eBufType, nPixelSpace, nXSize); + } + + /* ---- Return success ---- */ + return CE_None; +} /* RealPixelFunc */ + + +CPLErr ImagPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + if (GDALDataTypeIsComplex( eSrcType )) + { + int nPixelSpaceSrc = GDALGetDataTypeSize( eSrcType ) / 8; + int nLineSpaceSrc = nPixelSpaceSrc * nXSize; + + void* pImag = ((GByte *)papoSources[0]) + + GDALGetDataTypeSize( eSrcType ) / 8 / 2; + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; ++iLine ) { + GDALCopyWords(((GByte *)pImag) + nLineSpaceSrc * iLine, + eSrcType, nPixelSpaceSrc, + ((GByte *)pData) + nLineSpace * iLine, + eBufType, nPixelSpace, nXSize); + } + } else { + double dfImag = 0; + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; ++iLine ) { + /* always copy from the same location */ + GDALCopyWords(&dfImag, eSrcType, 0, + ((GByte *)pData) + nLineSpace * iLine, + eBufType, nPixelSpace, nXSize); + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* ImagPixelFunc */ + + +CPLErr ModulePixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + double dfPixVal; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + if (GDALDataTypeIsComplex( eSrcType )) + { + double dfReal, dfImag; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + + GDALGetDataTypeSize( eSrcType ) / 8 / 2; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfReal = SRCVAL(pReal, eSrcType, ii); + dfImag = SRCVAL(pImag, eSrcType, ii); + + dfPixVal = sqrt( dfReal * dfReal + dfImag * dfImag ); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* ---- Set pixels ---- */ + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfPixVal = abs(SRCVAL(papoSources[0], eSrcType, ii)); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* ModulePixelFunc */ + + +CPLErr PhasePixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + double dfPixVal, dfReal; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + if (GDALDataTypeIsComplex( eSrcType )) + { + double dfImag; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + + GDALGetDataTypeSize( eSrcType ) / 8 / 2; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfReal = SRCVAL(pReal, eSrcType, ii); + dfImag = SRCVAL(pImag, eSrcType, ii); + + dfPixVal = atan2(dfImag, dfReal); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* ---- Set pixels ---- */ + /* + for( iLine = 0; iLine < nYSize; ++iLine ) { + / * always copy from the same location * / + GDALCopyWords(&dfImag, eSrcType, 0, + ((GByte *)pData) + nLineSpace * iLine, + eBufType, nPixelSpace, nXSize); + } + */ + /* ---- Set pixels ---- */ + double pi = atan2(0, -1); + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + void *pReal = papoSources[0]; + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfReal = SRCVAL(pReal, eSrcType, ii); + dfPixVal = (dfReal < 0) ? pi : 0; + + GDALCopyWords(&dfPixVal, GDT_Float64, dfPixVal, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* PhasePixelFunc */ + + +CPLErr ConjPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + if (GDALDataTypeIsComplex( eSrcType ) && GDALDataTypeIsComplex( eBufType )) + { + int iLine, iCol, ii; + double adfPixVal[2]; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + nOffset; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + adfPixVal[0] = +SRCVAL(pReal, eSrcType, ii); /* re */ + adfPixVal[1] = -SRCVAL(pImag, eSrcType, ii); /* im */ + + GDALCopyWords(adfPixVal, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* no complex data type */ + return RealPixelFunc(papoSources, nSources, pData, nXSize, nYSize, + eSrcType, eBufType, nPixelSpace, nLineSpace); + } + + /* ---- Return success ---- */ + return CE_None; +} /* ConjPixelFunc */ + + +CPLErr SumPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, iCol, ii, iSrc; + + /* ---- Init ---- */ + if (nSources < 2) return CE_Failure; + + /* ---- Set pixels ---- */ + if (GDALDataTypeIsComplex( eSrcType )) + { + double adfSum[2]; + void *pReal, *pImag; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + adfSum[0] = 0; + adfSum[1] = 0; + + for( iSrc = 0; iSrc < nSources; ++iSrc ) { + pReal = papoSources[iSrc]; + pImag = ((GByte *)pReal) + nOffset; + + /* Source raster pixels may be obtained with SRCVAL macro */ + adfSum[0] += SRCVAL(pReal, eSrcType, ii); + adfSum[1] += SRCVAL(pImag, eSrcType, ii); + } + + GDALCopyWords(adfSum, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* non complex */ + double dfSum; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + dfSum = 0; + + for( iSrc = 0; iSrc < nSources; ++iSrc ) { + /* Source raster pixels may be obtained with SRCVAL macro */ + dfSum += SRCVAL(papoSources[iSrc], eSrcType, ii); + } + + GDALCopyWords(&dfSum, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* SumPixelFunc */ + + +CPLErr DiffPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + + /* ---- Init ---- */ + if (nSources != 2) return CE_Failure; + + if (GDALDataTypeIsComplex( eSrcType )) + { + + double adfPixVal[2]; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal0 = papoSources[0]; + void *pImag0 = ((GByte *)papoSources[0]) + nOffset; + void *pReal1 = papoSources[1]; + void *pImag1 = ((GByte *)papoSources[1]) + nOffset; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + adfPixVal[0] = SRCVAL(pReal0, eSrcType, ii) + - SRCVAL(pReal1, eSrcType, ii); + adfPixVal[1] = SRCVAL(pImag0, eSrcType, ii) + - SRCVAL(pImag1, eSrcType, ii); + + GDALCopyWords(adfPixVal, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* non complex */ + double dfPixVal; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfPixVal = SRCVAL(papoSources[0], eSrcType, ii) + - SRCVAL(papoSources[1], eSrcType, ii); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* DiffPixelFunc */ + + +CPLErr MulPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, iCol, ii, iSrc; + + /* ---- Init ---- */ + if (nSources < 2) return CE_Failure; + + /* ---- Set pixels ---- */ + if (GDALDataTypeIsComplex( eSrcType )) + { + double adfPixVal[2], dfOldR, dfOldI, dfNewR, dfNewI; + void *pReal, *pImag; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + adfPixVal[0] = 1.; + adfPixVal[1] = 0.; + + for( iSrc = 0; iSrc < nSources; ++iSrc ) { + pReal = papoSources[iSrc]; + pImag = ((GByte *)pReal) + nOffset; + + dfOldR = adfPixVal[0]; + dfOldI = adfPixVal[1]; + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfNewR = SRCVAL(pReal, eSrcType, ii); + dfNewI = SRCVAL(pImag, eSrcType, ii); + + adfPixVal[0] = dfOldR * dfNewR - dfOldI * dfNewI; + adfPixVal[1] = dfOldR * dfNewI + dfOldI * dfNewR; + } + + GDALCopyWords(adfPixVal, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* non complex */ + double dfPixVal; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + dfPixVal = 1; + + for( iSrc = 0; iSrc < nSources; ++iSrc ) { + /* Source raster pixels may be obtained with SRCVAL macro */ + dfPixVal *= SRCVAL(papoSources[iSrc], eSrcType, ii); + } + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* MulPixelFunc */ + + +CPLErr CMulPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + + /* ---- Init ---- */ + if (nSources != 2) return CE_Failure; + + /* ---- Set pixels ---- */ + if (GDALDataTypeIsComplex( eSrcType )) + { + double adfPixVal[2], dfReal0, dfImag0, dfReal1, dfImag1; + + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal0 = papoSources[0]; + void *pImag0 = ((GByte *)papoSources[0]) + nOffset; + void *pReal1 = papoSources[1]; + void *pImag1 = ((GByte *)papoSources[1]) + nOffset; + + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfReal0 = SRCVAL(pReal0, eSrcType, ii); + dfReal1 = SRCVAL(pReal1, eSrcType, ii); + dfImag0 = SRCVAL(pImag0, eSrcType, ii); + dfImag1 = SRCVAL(pImag1, eSrcType, ii); + adfPixVal[0] = dfReal0 * dfReal1 + dfImag0 * dfImag1; + adfPixVal[1] = dfReal1 * dfImag0 - dfReal0 * dfImag1; + + GDALCopyWords(adfPixVal, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* non complex */ + double adfPixVal[2] = {0, 0}; + + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + adfPixVal[0] = SRCVAL(papoSources[0], eSrcType, ii) + * SRCVAL(papoSources[1], eSrcType, ii); + + GDALCopyWords(adfPixVal, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* CMulPixelFunc */ + + +CPLErr InvPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, iCol, ii; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + /* ---- Set pixels ---- */ + if (GDALDataTypeIsComplex( eSrcType )) + { + double adfPixVal[2], dfReal, dfImag, dfAux; + + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + nOffset; + + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfReal = SRCVAL(pReal, eSrcType, ii); + dfImag = SRCVAL(pImag, eSrcType, ii); + dfAux = dfReal * dfReal + dfImag * dfImag; + adfPixVal[0] = +dfReal / dfAux; + adfPixVal[1] = -dfImag / dfAux; + + GDALCopyWords(adfPixVal, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* non complex */ + double dfPixVal; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfPixVal = 1. / SRCVAL(papoSources[0], eSrcType, ii); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* InvPixelFunc */ + + +CPLErr IntensityPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + double dfPixVal; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + if (GDALDataTypeIsComplex( eSrcType )) + { + double dfReal, dfImag; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + nOffset; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfReal = SRCVAL(pReal, eSrcType, ii); + dfImag = SRCVAL(pImag, eSrcType, ii); + + dfPixVal = dfReal * dfReal + dfImag * dfImag; + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + /* ---- Set pixels ---- */ + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfPixVal = SRCVAL(papoSources[0], eSrcType, ii); + dfPixVal *= dfPixVal; + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* IntensityPixelFunc */ + + +CPLErr SqrtPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, iCol, ii; + double dfPixVal; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + if (GDALDataTypeIsComplex( eSrcType )) return CE_Failure; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */; + dfPixVal = sqrt( SRCVAL(papoSources[0], eSrcType, ii) ); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* SqrtPixelFunc */ + + +CPLErr Log10PixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + if (GDALDataTypeIsComplex( eSrcType )) + { + /* complex input datatype */ + double dfReal, dfImag, dfPixVal; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + nOffset; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfReal = SRCVAL(pReal, eSrcType, ii); + dfImag = SRCVAL(pImag, eSrcType, ii); + + dfPixVal = log10( dfReal * dfReal + dfImag * dfImag ); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + double dfPixVal; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfPixVal = SRCVAL(papoSources[0], eSrcType, ii); + dfPixVal = log10( abs( dfPixVal ) ); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* Log10PixelFunc */ + + +CPLErr PowPixelFuncHelper(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace, + double base, double fact) +{ + int iLine, iCol, ii; + double dfPixVal; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + if (GDALDataTypeIsComplex( eSrcType )) return CE_Failure; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + dfPixVal = SRCVAL(papoSources[0], eSrcType, ii); + dfPixVal = pow(base, dfPixVal / fact); + + GDALCopyWords(&dfPixVal, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + + /* ---- Return success ---- */ + return CE_None; +} /* PowPixelFuncHelper */ + +CPLErr dB2AmpPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + return PowPixelFuncHelper(papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace, 10., 20.); +} /* dB2AmpPixelFunc */ + + +CPLErr dB2PowPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + return PowPixelFuncHelper(papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace, 10., 10.); +} /* dB2PowPixelFunc */ + +/************************************************************************/ +/* Nansat pixelfunctions */ +/************************************************************************/ + +CPLErr BetaSigmaToIncidence(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + double incidence; + double beta0, sigma0; + + /* ---- Init ---- */ + if (nSources != 2) return CE_Failure; + #define PI 3.14159265; + + /*printf("%d",eSrcType);*/ + + if (GDALDataTypeIsComplex( eSrcType )) + { + double b0Real, b0Imag; + double s0Real, s0Imag; + void *b0pReal = papoSources[0]; + void *s0pReal = papoSources[1]; + void *b0pImag = ((GByte *)papoSources[0]) + + GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *s0pImag = ((GByte *)papoSources[1]) + + GDALGetDataTypeSize( eSrcType ) / 8 / 2; + + /* ---- Set pixels ---- */ + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + b0Real = SRCVAL(b0pReal, eSrcType, ii); + b0Imag = SRCVAL(b0pImag, eSrcType, ii); + s0Real = SRCVAL(s0pReal, eSrcType, ii); + s0Imag = SRCVAL(s0pImag, eSrcType, ii); + + beta0 = b0Real*b0Real + b0Imag*b0Imag; + sigma0 = s0Real*s0Real + s0Imag*s0Imag; + + if (beta0 != 0) incidence = asin(sigma0/beta0)*180/PI + else incidence = -10000; // NB: this is also hard-coded in + // mapper_radarsat2.py, and + // should be the same in other + // mappers where this function + // is needed... + GDALCopyWords(&incidence, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } + } else { + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ) + { + for( iCol = 0; iCol < nXSize; iCol++ ) + { + ii = iLine * nXSize + iCol; + /* Source raster pixels may be obtained with SRCVAL macro */ + beta0 = SRCVAL(papoSources[0], eSrcType, ii); + sigma0 = SRCVAL(papoSources[1], eSrcType, ii); + + if (beta0 != 0) incidence = asin(sigma0/beta0)*180/PI + else incidence = -10000; // NB: this is also hard-coded in + // mapper_radarsat2.py, and + // should be the same in other + // mappers where this function + // is needed... + GDALCopyWords(&incidence, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } + } + + /* ---- Return success ---- */ + return CE_None; +} + + +CPLErr UVToMagnitude(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + double magnitude; + double u, v; + + /* ---- Init ---- */ + if (nSources != 2) return CE_Failure; + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ) + { + for( iCol = 0; iCol < nXSize; iCol++ ) + { + ii = iLine * nXSize + iCol; + /* Source raster pixels may be obtained with SRCVAL macro */ + u = SRCVAL(papoSources[0], eSrcType, ii); + v = SRCVAL(papoSources[1], eSrcType, ii); + + magnitude = sqrt(u*u + v*v); + + GDALCopyWords(&magnitude, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } + + /* ---- Return success ---- */ +return CE_None; +} + + + +CPLErr Sigma0HHBetaToSigma0VV(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + double sigma0HH, beta0, incidence, factor, sigma0VV; + + /* ---- Init ---- */ + if (nSources != 2) return CE_Failure; + /*fprintf("nSources: %d\n", nSources);*/ + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ) + { + for( iCol = 0; iCol < nXSize; iCol++ ) + { + ii = iLine * nXSize + iCol; + /* Source raster pixels may be obtained with SRCVAL macro */ + sigma0HH = SRCVAL(papoSources[0], eSrcType, ii); + beta0 = SRCVAL(papoSources[1], eSrcType, ii); + + /* get incidence angle first */ + if (beta0 != 0){ + incidence = asin(sigma0HH/beta0); + } else { + incidence = 0; + } + + /* Polarisation ratio from Thompson et al. with alpha=1 */ + factor = pow( (1 + 2 * pow(tan(incidence), 2)) / (1 + 1 * pow(tan(incidence), 2)), 2); + sigma0VV = sigma0HH * factor; + + GDALCopyWords(&sigma0VV, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } + + /* ---- Return success ---- */ + return CE_None; +} + + +CPLErr RawcountsToSigma0_CosmoSkymed_SBI(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + + int ii, iLine, iCol; + /* int iReal, iImag; */ + double imPower, real, imag; + + /* ---- Init ---- */ + if (nSources != 2) return CE_Failure; + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ){ + for( iCol = 0; iCol < nXSize; iCol++ ){ + ii = iLine * nXSize + iCol; + /* Source raster pixels may be obtained with SRCVAL macro */ + real = SRCVAL(papoSources[0], eSrcType, ii); + imag = SRCVAL(papoSources[1], eSrcType, ii); + + /*printf("%d",iReal); OK!*/ + + /*real = (double) iReal;*/ + /*imag = (double) iImag;*/ + + /*printf("%.1f",imag); OK!*/ + + imPower = pow(real,2.0) + pow(imag,2.0); + /*printf("%.1f",imPower); */ + + GDALCopyWords(&imPower, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + + } + } + + /* ---- Return success ---- */ + return CE_None; + +} + +CPLErr RawcountsToSigma0_CosmoSkymed_QLK(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + + int ii, iLine, iCol; + double raw_counts, imPower; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ) + { + for( iCol = 0; iCol < nXSize; iCol++ ) + { + ii = iLine * nXSize + iCol; + /* Source raster pixels may be obtained with SRCVAL macro */ + raw_counts = SRCVAL(papoSources[0], eSrcType, ii); + imPower = pow(raw_counts,2.); + + GDALCopyWords(&imPower, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } + + /* ---- Return success ---- */ + return CE_None; + +} + + +CPLErr ComplexData(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + double adfPixVal[2]; + void *pReal = papoSources[0]; + void *pImag = papoSources[1]; + + for( iLine = 0, ii= 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + /* Source raster pixels may be obtained with SRCVAL macro */ + adfPixVal[0] = SRCVAL(pReal, eSrcType, ii); + adfPixVal[1] = SRCVAL(pImag, eSrcType, ii); + + GDALCopyWords(&adfPixVal, GDT_CFloat64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } + + /* ---- Return success ---- */ +return CE_None; +} + +CPLErr IntensityInt(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol; + int dfPixVal; + + /* ---- Init ---- */ + if (nSources != 1) return CE_Failure; + + int dfReal, dfImag; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + nOffset; + + // ---- Set pixels ---- + + if (GDALDataTypeIsComplex( eSrcType )) + { + int dfReal, dfImag; + int nOffset = GDALGetDataTypeSize( eSrcType ) / 8 / 2; + void *pReal = papoSources[0]; + void *pImag = ((GByte *)papoSources[0]) + nOffset; + + // ---- Set pixels ---- + + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + // Source raster pixels may be obtained with SRCVAL macro + dfReal = SRCVAL(pReal, eSrcType, ii); + dfImag = SRCVAL(pImag, eSrcType, ii); + + dfPixVal = dfReal * dfReal + dfImag * dfImag; + + GDALCopyWords(&dfPixVal, GDT_Int16, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } else { + // ---- Set pixels ---- + for( iLine = 0, ii = 0; iLine < nYSize; ++iLine ) { + for( iCol = 0; iCol < nXSize; ++iCol, ++ii ) { + + // Source raster pixels may be obtained with SRCVAL macro + dfPixVal = SRCVAL(papoSources[0], eSrcType, ii); + dfPixVal *= dfPixVal; + + GDALCopyWords(&dfPixVal, GDT_Int32, 0, + ((GByte *)pData) + nLineSpace * iLine + + iCol * nPixelSpace, eBufType, nPixelSpace, 1); + } + } + } + /* ---- Return success ---- */ + return CE_None; +} /* IntensityInt */ + + +CPLErr OnesPixelFunc(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + char one=1; + int iLine, iCol; + + /* ---- Set all pixels to 1 ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ){ + for( iCol = 0; iCol < nXSize; iCol++ ){ + + GDALCopyWords(&one, GDT_Byte, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } + /* ---- Return success ---- */ + return CE_None; +} + + + +/************************************************************************/ +/* Convert Rrs to Rrsw */ +/************************************************************************/ +/* scientifc function */ +double NormReflectanceToRemSensReflectanceFunction(double *b){ + return b[0] / (0.52 + 1.7 * b[0]); +} + +double RawcountsIncidenceToSigma0Function(double *b){ + double pi = 3.14159265; + return (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); +} + +double Sentinel1CalibrationFunction(double *b){ + + // With noise removal -- I am not sure if the noise (b[2]) should be + // squared or not but have sent an email to esa.. + //return ( pow(b[1],2.0) - pow(b[2],2.0) ) / pow(b[0], 2.0); + // Without noise removal + return pow(b[1],2.0) / pow(b[0], 2.0); + +} + +double Sigma0HHToSigma0VVFunction(double *b){ + double pi = 3.14159265; + double s0hh, factor; + s0hh = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); + /* Polarisation ratio from Thompson et al. with alpha=1 */ + factor = pow( (1 + 2 * pow(tan(b[1]*pi/180.0), 2)) / (1 + 1 * pow(tan(b[1]*pi/180.0), 2)), 2); + return s0hh * factor; +} + +double Sentinel1Sigma0HHToSigma0VVFunction( double *b ){ + + double s0hh, s0vv; + double bcal[3]; + double s0hh2s0vv[2]; + + bcal[0] = b[0]; // sigmaNought LUT + bcal[1] = b[2]; // DN + bcal[2] = b[3]; // noise + s0hh = Sentinel1CalibrationFunction(bcal); + + s0hh2s0vv[0] = s0hh; + s0hh2s0vv[1] = b[1]; + + s0vv = Sigma0HHToSigma0VVFunction(s0hh2s0vv); + + return s0vv; + +} + +double Sigma0NormalizedIceFunction(double *b){ + double pi = 3.14159265; + double sigma0 = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); + return sigma0 * pow((tan(b[1] * pi / 180.0) / tan(31.0 * pi / 180.0)), 1.5); +} + +double Sigma0VVNormalizedWaterFunction(double *b){ + double pi = 3.14159265; + double sigma0 = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); + return sigma0 * pow((sin(b[1] * pi / 180.0) / sin(31.0 * pi / 180.0)), 4.0); +} + +double Sigma0HHNormalizedWaterFunction(double *b){ + double pi = 3.14159265; + double sigma0 = (pow(b[0], 2.0) * sin(b[1] * pi / 180.0)); + return sigma0 * pow((tan(b[1] * pi / 180.0) / tan(31.0 * pi / 180.0)), 4.0); +} + +double UVToDirectionFromFunction(double *b){ + /* Convention 0-360 degrees positive clockwise from north*/ + double pi = 3.14159265; + //return (b[0]==9999 || b[1]==9999) ? 9999 : 180.0 - atan2(-b[0],b[1])*180./pi; + return 180.0 - atan2(-b[0],b[1])*180./pi; +} + +double UVToDirectionToFunction(double *b){ + /* Convention 0-360 degrees positive clockwise from north*/ + double pi = 3.14159265; + return 360.0 - atan2(-b[0],b[1])*180./pi; + /* + Below code is hirlam specific - we don't know if the invalid data is + actually 9999. One option is to make mapper specific pixelfunctions + but for now only return the direction as if all data was good. + */ + //return (b[0]==9999 || b[1]==9999) ? 9999 : 360.0 - atan2(-b[0],b[1])*180./pi; +} + +/* pixel function */ +CPLErr UVToDirectionTo(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(UVToDirectionToFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr UVToDirectionFrom(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(UVToDirectionFromFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + + +CPLErr NormReflectanceToRemSensReflectance(void **papoSources, int nSources, void *pData, + int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(NormReflectanceToRemSensReflectanceFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr Sentinel1Calibration(void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(Sentinel1CalibrationFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr Sentinel1Sigma0HHToSigma0VV(void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(Sentinel1Sigma0HHToSigma0VVFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr RawcountsIncidenceToSigma0(void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(RawcountsIncidenceToSigma0Function, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr Sigma0HHToSigma0VV(void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + // Works for ASAR! + GenericPixelFunction(Sigma0HHToSigma0VVFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr Sigma0NormalizedIce(void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(Sigma0NormalizedIceFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr Sigma0VVNormalizedWater(void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(Sigma0VVNormalizedWaterFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + +CPLErr Sigma0HHNormalizedWater(void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace){ + + GenericPixelFunction(Sigma0HHNormalizedWaterFunction, + papoSources, nSources, pData, + nXSize, nYSize, eSrcType, eBufType, + nPixelSpace, nLineSpace); + + return CE_None; +} + + + +/************************************************************************/ +/* Generic Pixel Function is called from a pixel function and calls + * corresponding scientific function */ +/************************************************************************/ + +// all data (band) size must be same and full size of bands (XSize x YSize). +void GenericPixelFunction(double f(double*), void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int ii, iLine, iCol, iSrc; + double *bVal, result; + bVal = malloc(nSources * sizeof (double)); + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ){ + for( iCol = 0; iCol < nXSize; iCol++ ){ + ii = iLine * nXSize + iCol; + /* Source raster pixels may be obtained with SRCVAL macro */ + for (iSrc = 0; iSrc < nSources; iSrc ++){ + bVal[iSrc] = SRCVAL(papoSources[iSrc], eSrcType, ii); + //if (iLine==0 && iCol==0){ + // printf("%d ",iSrc); + // printf("%.4f\n",bVal[iSrc]); + //} + } + + result = f(bVal); + + GDALCopyWords(&result, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } +} + +// From the 1st to (N-1)th bands are full size (XSize x YSize), +// and the last band is a one-pixel band (1 x 1). +void GenericPixelFunctionPixel(double f(double*), void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, iCol, iSrc; + double *bVal, result; + bVal = malloc(nSources * sizeof (double)); + + /* ---- Set pixels ---- */ + /* Set the first value form one-pixel band */ + bVal[0] = SRCVAL(papoSources[nSources-1], eSrcType, 0); + for( iLine = 0; iLine < nYSize; iLine++ ) + { + for( iCol = 0; iCol < nXSize; iCol++ ) + { + for (iSrc = 1; iSrc < nSources; iSrc ++) + /* Source raster pixels may be obtained with SRCVAL macro */ + bVal[iSrc] = SRCVAL(papoSources[iSrc-1], eSrcType, iLine * nXSize + iCol); + + result = f(bVal); + + GDALCopyWords(&result, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } +} + +// From the 1st to (N-1)th bands are full size (XSize x YSize), +// and the last band is a line band (XSize x 1). +void GenericPixelFunctionLine(double f(double*), void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, iCol, iSrc; + double *bVal, result; + bVal = malloc(nSources * sizeof (double)); + + /* ---- Set pixels ---- */ + for( iLine = 0; iLine < nYSize; iLine++ ) + { + for( iCol = 0; iCol < nXSize; iCol++ ) + { + /* Source raster pixels may be obtained with SRCVAL macro */ + bVal[0] = SRCVAL(papoSources[nSources-1], eSrcType, iCol); + + for (iSrc = 1; iSrc < nSources; iSrc ++) + /* Source raster pixels may be obtained with SRCVAL macro */ + bVal[iSrc] = SRCVAL(papoSources[iSrc-1], eSrcType, iLine * nXSize + iCol); + + result = f(bVal); + + GDALCopyWords(&result, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } +} + +// From the 1st to (N-2)th bands are full size (XSize x YSize), +// the last 2nd band is a line band (XSize x 1) and the last is one pixel band. +void GenericPixelFunctionPixelLine(double f(double*), void **papoSources, + int nSources, void *pData, int nXSize, int nYSize, + GDALDataType eSrcType, GDALDataType eBufType, + int nPixelSpace, int nLineSpace) +{ + int iLine, iCol, iSrc; + double *bVal, result; + bVal = malloc(nSources * sizeof (double)); + + /* ---- Set pixels ---- */ + bVal[0] = SRCVAL(papoSources[nSources-1], eSrcType, 0); + for( iLine = 0; iLine < nYSize; iLine++ ) + { + for( iCol = 0; iCol < nXSize; iCol++ ) + { + bVal[1] = SRCVAL(papoSources[nSources-2], eSrcType, iCol); + + for(iSrc = 2; iSrc < nSources; iSrc++ ) + bVal[iSrc] = SRCVAL(papoSources[iSrc-2], eSrcType, iLine * nXSize + iCol); + + result = f(bVal); + + GDALCopyWords(&result, GDT_Float64, 0, + ((GByte *)pData) + nLineSpace * iLine + iCol * nPixelSpace, + eBufType, nPixelSpace, 1); + } + } +} + + + +/************************************************************************/ +/* GDALRegisterDefaultPixelFunc() */ +/************************************************************************/ + +/** + * This adds a default set of pixel functions to the global list of + * available pixel functions for derived bands: + * + * - "real": extract real part from a single raster band (just a copy if the + * input is non-complex) + * - "imag": extract imaginary part from a single raster band (0 for + * non-complex) + * - "mod": extract module from a single raster band (real or complex) + * - "phase": extract phase from a single raster band (0 for non-complex) + * - "conj": computes the complex conjugate of a single raster band (just a + * copy if the input is non-complex) + * - "sum": sum 2 or more raster bands + * - "diff": computes the difference between 2 raster bands (b1 - b2) + * - "mul": multilpy 2 or more raster bands + * - "cmul": multiply the first band for the complex comjugate of the second + * - "inv": inverse (1./x). Note: no check is performed on zero division + * - "intensity": computes the intensity Re(x*conj(x)) of a single raster band + * (real or complex) + * - "sqrt": perform the square root of a single raster band (real only) + * - "log10": compute the logarithm (base 10) of the abs of a single raster + * band (real or complex): log10( abs( x ) ) + * - "dB2amp": perform scale conversion from logarithmic to linear + * (amplitude) (i.e. 10 ^ ( x / 20 ) ) of a single raster + * band (real only) + * - "dB2pow": perform scale conversion from logarithmic to linear + * (power) (i.e. 10 ^ ( x / 10 ) ) of a single raster + * band (real only) + * + * @see GDALAddDerivedBandPixelFunc + * + * @return CE_None, invalid (NULL) parameters are currently ignored. + */ +CPLErr CPL_STDCALL GDALRegisterDefaultPixelFunc() +{ + GDALAddDerivedBandPixelFunc("real", RealPixelFunc); + GDALAddDerivedBandPixelFunc("imag", ImagPixelFunc); + GDALAddDerivedBandPixelFunc("mod", ModulePixelFunc); + GDALAddDerivedBandPixelFunc("phase", PhasePixelFunc); + GDALAddDerivedBandPixelFunc("conj", ConjPixelFunc); + GDALAddDerivedBandPixelFunc("sum", SumPixelFunc); + GDALAddDerivedBandPixelFunc("diff", DiffPixelFunc); + GDALAddDerivedBandPixelFunc("mul", MulPixelFunc); + GDALAddDerivedBandPixelFunc("cmul", CMulPixelFunc); + GDALAddDerivedBandPixelFunc("inv", InvPixelFunc); + GDALAddDerivedBandPixelFunc("intensity", IntensityPixelFunc); + GDALAddDerivedBandPixelFunc("sqrt", SqrtPixelFunc); + GDALAddDerivedBandPixelFunc("log10", Log10PixelFunc); + GDALAddDerivedBandPixelFunc("dB2amp", dB2AmpPixelFunc); + GDALAddDerivedBandPixelFunc("dB2pow", dB2PowPixelFunc); + + GDALAddDerivedBandPixelFunc("BetaSigmaToIncidence", BetaSigmaToIncidence); + GDALAddDerivedBandPixelFunc("UVToMagnitude", UVToMagnitude); + GDALAddDerivedBandPixelFunc("UVToDirectionTo", UVToDirectionTo); + GDALAddDerivedBandPixelFunc("UVToDirectionFrom", UVToDirectionFrom); + GDALAddDerivedBandPixelFunc("Sigma0HHBetaToSigma0VV", Sigma0HHBetaToSigma0VV); //Radarsat-2 + GDALAddDerivedBandPixelFunc("Sigma0HHToSigma0VV", Sigma0HHToSigma0VV); // ASAR + GDALAddDerivedBandPixelFunc("RawcountsIncidenceToSigma0", RawcountsIncidenceToSigma0); + GDALAddDerivedBandPixelFunc("RawcountsToSigma0_CosmoSkymed_QLK", RawcountsToSigma0_CosmoSkymed_QLK); + GDALAddDerivedBandPixelFunc("RawcountsToSigma0_CosmoSkymed_SBI", RawcountsToSigma0_CosmoSkymed_SBI); + GDALAddDerivedBandPixelFunc("ComplexData", ComplexData); + GDALAddDerivedBandPixelFunc("NormReflectanceToRemSensReflectance", NormReflectanceToRemSensReflectance); + GDALAddDerivedBandPixelFunc("Sigma0NormalizedIce", Sigma0NormalizedIce); + GDALAddDerivedBandPixelFunc("Sigma0HHNormalizedWater", Sigma0HHNormalizedWater); + GDALAddDerivedBandPixelFunc("Sigma0VVNormalizedWater", Sigma0VVNormalizedWater); + GDALAddDerivedBandPixelFunc("Sentinel1Calibration", Sentinel1Calibration); + GDALAddDerivedBandPixelFunc("Sentinel1Sigma0HHToSigma0VV", Sentinel1Sigma0HHToSigma0VV); + GDALAddDerivedBandPixelFunc("IntensityInt", IntensityInt); + GDALAddDerivedBandPixelFunc("OnesPixelFunc", OnesPixelFunc); + return CE_None; +} + diff --git a/nansat/pixelfunctions/pixfunplugin.c b/nansat/pixelfunctions/pixfunplugin.c old mode 100755 new mode 100644 diff --git a/nansat/pointbrowser.py b/nansat/pointbrowser.py index de0dd08a1..55dd608c4 100644 --- a/nansat/pointbrowser.py +++ b/nansat/pointbrowser.py @@ -15,7 +15,7 @@ # but WITHOUT ANY WARRANTY without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. -import matplotlib +import numpy as np import matplotlib.pyplot as plt @@ -24,7 +24,7 @@ class PointBrowser(): Click on raster images shown by plt.imshow and get the X-Y coordinates. ''' - def __init__(self, data, transect=True, **kwargs): + def __init__(self, data, fmt='x-k', **kwargs): ''' Open figure with imshow and colorbar Parameters @@ -48,6 +48,7 @@ def __init__(self, data, transect=True, **kwargs): ''' self.fig = plt.figure() self.data = data + self.fmt = fmt self.text_ax = plt.axes([0.0, 0.85, 1.0, 0.15]) self.ax = plt.axes([0.0, 0.0, 1.0, 0.85]) img = self.ax.imshow(self.data, extent=(0, self.data.shape[1], @@ -55,54 +56,31 @@ def __init__(self, data, transect=True, **kwargs): origin='lower', **kwargs) self.fig.colorbar(img) - self.points, = self.ax.plot([], [], '+', ms=12, color='b') - self.lines = [] - self.coordinates = [] + self.points = [] + self.lines = [self.ax.plot([], [], self.fmt)[0]] + self.coordinates = [[]] self.connect = [] - self.drawLine = transect def onclick(self, event): ''' Append onclick event ''' - if event.xdata is not None and event.ydata is not None: - if str(event.key)=='alt+z' or str(event.key)=='z': - pass - else: - # ignore clicked point if "z" key is held down - # - holding down any other key (NOT cmd (mac),shift,alt,ctrl) - # means a new line is started at the clicked point - # - holding down no key means current line is extended to include the clicked point - self.coordinates.append((event.xdata, event.ydata)) - # press (any) key (NOT 'cmd','ctrl','alt','shift', or 'z' - see above) means to start new line. - # if pressed, then set 0 to self.connect. otherwise set 1. - if event.key is None and self.drawLine: - self.connect.append(1) - else: - self.connect.append(0) - - # get coordinate of clicked point - tCoordinates = map(tuple, zip(*self.coordinates)) - self.points.set_data(tCoordinates) - self.points.figure.canvas.draw() - - # separate points by each line - linesCoords = [] - for i, iLine in enumerate(self.coordinates): - if i == 0: - oneLine = [self.coordinates[0]] - elif self.connect[i] == 0: - linesCoords.append(oneLine) - oneLine = [self.coordinates[i]] - else: - oneLine.append(self.coordinates[i]) - linesCoords.append(oneLine) - - # draw lines - if self.drawLine: - line, = self.ax.plot([], []) - for iLinePoints in linesCoords: - tCoordinates = map(tuple, zip(*iLinePoints)) - self.lines.append(line.set_data(tCoordinates)) - line.figure.canvas.draw() + # ignore click outside image + if event.xdata is None or event.ydata is None: + return + + # ignore clicked point if "z" key is held down + if str(event.key) == 'alt+z' or str(event.key) == 'z': + return + + if event.key is not None: + # - holding down any other key (NOT cmd (mac),shift,alt,ctrl) + # means a new line is started at the clicked point + self.coordinates.append([]) + self.lines.append(self.ax.plot([], [], self.fmt)[0]) + + self.coordinates[-1].append((event.xdata, event.ydata)) + self.points.append(self.ax.plot(event.xdata, event.ydata, self.fmt)) + self.lines[-1].set_data(np.array(self.coordinates[-1]).T) + self.ax.figure.canvas.draw() def get_points(self): ''' Process click event ''' @@ -122,6 +100,12 @@ def get_points(self): ' location, release "space" and continue clicking\n' 'To zoom: press "z" and use pan/zoom tools, then release "z"') self.text_ax.text(0.01, 0.9, text, fontsize=13, - verticalalignment='top', horizontalalignment='left') + verticalalignment='top', horizontalalignment='left') + # collect data plt.show() + + # convert list of lists of coordinates to list of arrays + points = [np.array(p).T for p in self.coordinates if len(p) > 0] + + return points diff --git a/nansat/tests/data/arctic.nc b/nansat/tests/data/arctic.nc new file mode 100644 index 000000000..a0b89ed9f Binary files /dev/null and b/nansat/tests/data/arctic.nc differ diff --git a/nansat/tests/data/complex.nc b/nansat/tests/data/complex.nc old mode 100755 new mode 100644 diff --git a/nansat/tests/test_figure.py b/nansat/tests/test_figure.py new file mode 100644 index 000000000..67e176491 --- /dev/null +++ b/nansat/tests/test_figure.py @@ -0,0 +1,132 @@ +#------------------------------------------------------------------------------ +# Name: test_nansat.py +# Purpose: Test the Nansat class +# +# Author: Morten Wergeland Hansen, Asuka Yamakawa +# Modified: Morten Wergeland Hansen +# +# Created: 18.06.2014 +# Last modified:16.03.2015 13:19 +# Copyright: (c) NERSC +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +#------------------------------------------------------------------------------ +import unittest +import warnings +import os +import sys +import glob +from types import ModuleType, FloatType +import datetime + +import matplotlib.pyplot as plt +import numpy as np +from scipy.io.netcdf import netcdf_file + +from nansat import Figure, Nansat, Domain +from nansat.tools import gdal, OptionError + +import nansat_test_data as ntd + +IS_CONDA = 'conda' in os.environ['PATH'] + + +class FigureTest(unittest.TestCase): + def setUp(self): + self.test_file_gcps = os.path.join(ntd.test_data_path, 'gcps.tif') + plt.switch_backend('Agg') + + if not os.path.exists(self.test_file_gcps): + raise ValueError('No test data available') + + def test_init_array(self): + f = Figure(np.zeros((10,10))) + + self.assertEqual(type(f), Figure) + + + def test_get_auto_ticks_number(self): + n = Nansat(self.test_file_gcps) + lon, lat = n.get_geolocation_grids() + f = Figure(lon) + lonTicks = f._get_auto_ticks(5, lon) + latTicks = f._get_auto_ticks(5, lat) + + self.assertEqual(len(lonTicks), 5) + n.logger.error(str(lonTicks)) + n.logger.error(str(latTicks)) + + def test_get_auto_ticks_vector(self): + n = Nansat(self.test_file_gcps) + lon, lat = n.get_geolocation_grids() + f = Figure(lon) + lonTicks = f._get_auto_ticks([28, 29, 30, 100], lon) + + self.assertEqual(len(lonTicks), 3) + + def test_add_latlon_grids_auto(self): + ''' Should create figure with lon/lat gridlines spaced automatically ''' + tmpfilename = os.path.join(ntd.tmp_data_path, 'figure_latlon_grids_auto.png') + n = Nansat(self.test_file_gcps) + b = n[1] + lon, lat = n.get_geolocation_grids() + + f = Figure(b) + f.process(clim='hist', lonGrid=lon, latGrid=lat) + f.save(tmpfilename) + + self.assertEqual(type(f), Figure) + self.assertTrue(os.path.exists(tmpfilename)) + + def test_add_latlon_grids_number(self): + ''' Should create figure with lon/lat gridlines given manually ''' + tmpfilename = os.path.join(ntd.tmp_data_path, + 'figure_latlon_grids_number.png') + n = Nansat(self.test_file_gcps) + n.resize(3) + b = n[1] + lon, lat = n.get_geolocation_grids() + + f = Figure(b) + f.process(cmax=100, lonGrid=lon, + latGrid=lat, + lonTicks=7, + latTicks=7) + f.save(tmpfilename) + + self.assertEqual(type(f), Figure) + self.assertTrue(os.path.exists(tmpfilename)) + + def test_add_latlon_grids_list(self): + ''' Should create figure with lon/lat gridlines given manually ''' + tmpfilename = os.path.join(ntd.tmp_data_path, + 'figure_latlon_grids_list.png') + n = Nansat(self.test_file_gcps) + b = n[1] + lon, lat = n.get_geolocation_grids() + + f = Figure(b) + f.process(clim='hist', lonGrid=lon, + latGrid=lat, + lonTicks=[28, 29, 30], + latTicks=[70.5, 71, 71.5, 73]) + f.save(tmpfilename) + + self.assertEqual(type(f), Figure) + self.assertTrue(os.path.exists(tmpfilename)) + + + def test_get_tick_index_from_grid(self): + ''' Should return indeces of pixel closest to ticks ''' + n = Nansat(self.test_file_gcps) + lon, lat = n.get_geolocation_grids() + + f = Figure(lon) + lonTicksIdx = f._get_tick_index_from_grid([28.5, 29], lon, 1, lon.shape[1]) + latTicksIdx = f._get_tick_index_from_grid([71, 71.5], lat, lat.shape[0], 1) + n.logger.error(str(lonTicksIdx)) + n.logger.error(str(latTicksIdx)) + +if __name__ == "__main__": + unittest.main() diff --git a/nansat/tests/test_mosaic.py b/nansat/tests/test_mosaic.py index a4343f95c..9ae9eb8e6 100644 --- a/nansat/tests/test_mosaic.py +++ b/nansat/tests/test_mosaic.py @@ -23,6 +23,7 @@ import numpy as np from nansat import Nansat, Domain, Mosaic +from nansat.mosaic import Layer from nansat.tools import gdal import nansat_test_data as ntd @@ -38,14 +39,29 @@ def setUp(self): if not os.path.exists(self.test_file_gcps): raise ValueError('No test data available') - def test_init(self): + def test_average(self): mo = Mosaic(domain=self.domain) + mo.average([self.test_file_gcps, self.test_file_stere], + bands=['L_645', 'L_555', 'L_469']) - self.assertEqual(type(mo), Mosaic) + mask = mo['mask'] + L_645 = mo['L_645'] + L_555 = mo['L_555'] + L_469 = mo['L_469'] - def test_average(self): + tmpfilename = os.path.join(ntd.tmp_data_path, + 'mosaic_average_export.nc') + bands = { + 'L_645' : {'type': '>i1'}, + 'L_555' : {'type': '>i1'}, + 'L_469' : {'type': '>i1'}, + } + mo.set_metadata('time_coverage_start', '2016-01-19') + mo.export2thredds(tmpfilename, bands) + + def test_median(self): mo = Mosaic(domain=self.domain) - mo.average([self.test_file_gcps, self.test_file_stere], + mo.median([self.test_file_gcps, self.test_file_stere], bands=['L_645', 'L_555', 'L_469']) mask = mo['mask'] @@ -54,14 +70,60 @@ def test_average(self): L_469 = mo['L_469'] tmpfilename = os.path.join(ntd.tmp_data_path, - 'mosaic_export.nc') + 'mosaic_median_export.nc') bands = { + 'mask' : {'type': '>i1'}, 'L_645' : {'type': '>i1'}, 'L_555' : {'type': '>i1'}, 'L_469' : {'type': '>i1'}, } + mo.set_metadata('time_coverage_start', '2016-01-19') mo.export2thredds(tmpfilename, bands) +class LayerTest(unittest.TestCase): + def setUp(self): + self.domain = Domain(4326, '-lle 27 70 31 72 -ts 700 650') + self.test_file_gcps = os.path.join(ntd.test_data_path, 'gcps.tif') + + def test_get_nansat_object(self): + ''' Mosaic.Layer should open file with Nansat and reproject ''' + l = Layer(self.test_file_gcps) + l.make_nansat_object(self.domain) + + self.assertEqual(type(l.n), Nansat) + self.assertEqual(l.n.shape(), (650, 700)) + + def test_get_nansat_object_no_reproject(self): + ''' Mosaic.Layer should open file with Nansat ''' + l = Layer(self.test_file_gcps, doReproject=False) + l.make_nansat_object(self.domain) + + self.assertEqual(type(l.n), Nansat) + self.assertEqual(l.n.shape(), (200, 200)) + + def test_get_mask(self): + '''Mosaic.Layer should get mask from reprojected file ''' + n = Nansat(self.test_file_gcps) + n.reproject(self.domain) + swathmask = n['swathmask'] + + l = Layer(self.test_file_gcps) + l.make_nansat_object(self.domain) + mask = l.get_mask_array() + + self.assertEqual(type(mask), np.ndarray) + self.assertEqual(mask.shape, (650, 700)) + np.testing.assert_allclose(mask, swathmask*64) + + def test_get_mask_no_reproject(self): + '''Mosaic.Layer should get mask from reprojected file ''' + l = Layer(self.test_file_gcps, doReproject=False) + l.make_nansat_object(self.domain) + mask = l.get_mask_array() + + self.assertEqual(type(mask), np.ndarray) + np.testing.assert_allclose(mask, np.ones((200,200))*64) + if __name__ == "__main__": unittest.main() diff --git a/nansat/tests/test_nansat.py b/nansat/tests/test_nansat.py index c5f45c441..f831922fc 100644 --- a/nansat/tests/test_nansat.py +++ b/nansat/tests/test_nansat.py @@ -1,24 +1,24 @@ -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ # Name: test_nansat.py # Purpose: Test the Nansat class # # Author: Morten Wergeland Hansen, Asuka Yamakawa -# Modified: Morten Wergeland Hansen +# Modified: Morten Wergeland Hansen, Aleksander Vines # # Created: 18.06.2014 -# Last modified:16.04.2015 10:48 +# Last modified:30.09.2015 14:00 # Copyright: (c) NERSC # Licence: This file is part of NANSAT. You can redistribute it or modify # under the terms of GNU General Public License, v.3 # http://www.gnu.org/licenses/gpl-3.0.html -#------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------ import unittest import warnings import os -import sys -import glob -from types import ModuleType, FloatType import datetime +import json +from xml.sax.saxutils import unescape + import matplotlib.pyplot as plt import numpy as np from scipy.io.netcdf import netcdf_file @@ -28,24 +28,34 @@ import nansat_test_data as ntd -IS_CONDA = 'conda' in os.environ['PATH'] - class NansatTest(unittest.TestCase): def setUp(self): self.test_file_gcps = os.path.join(ntd.test_data_path, 'gcps.tif') self.test_file_stere = os.path.join(ntd.test_data_path, 'stere.tif') self.test_file_complex = os.path.join(ntd.test_data_path, 'complex.nc') + self.test_file_arctic = os.path.join(ntd.test_data_path, 'arctic.nc') + self.tmpfilename = os.path.join(ntd.tmp_data_path, 'test.nc') plt.switch_backend('Agg') if not os.path.exists(self.test_file_gcps): raise ValueError('No test data available') - def test_init_filename(self): + def test_open_gcps(self): n = Nansat(self.test_file_gcps, logLevel=40) self.assertEqual(type(n), Nansat) + def test_get_time_coverage_start_end(self): + n = Nansat(self.test_file_gcps, logLevel=40) + n.set_metadata('time_coverage_start', '2016-01-20') + n.set_metadata('time_coverage_end', '2016-01-21') + + self.assertEqual(type(n.time_coverage_start), + datetime.datetime) + self.assertEqual(type(n.time_coverage_end), + datetime.datetime) + def test_init_domain(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") n = Nansat(domain=d, logLevel=40) @@ -68,14 +78,25 @@ def test_init_domain_array(self): def test_geolocation_of_exportedNC_vs_original(self): ''' Lon/lat in original and exported file should coincide ''' orig = Nansat(self.test_file_gcps) - tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_gcps.nc') - orig.export(tmpfilename) + orig.export(self.tmpfilename) - copy = Nansat(tmpfilename) + copy = Nansat(self.tmpfilename) lon0, lat0 = orig.get_geolocation_grids() lon1, lat1 = copy.get_geolocation_grids() np.testing.assert_allclose(lon0, lon1) np.testing.assert_allclose(lat0, lat1) + os.unlink(self.tmpfilename) + + def test_special_characters_in_exported_metadata(self): + orig = Nansat(self.test_file_gcps) + orig.vrt.dataset.SetMetadataItem('jsonstring', json.dumps({'meta1': + 'hei', 'meta2': 'derr'})) + orig.export(self.tmpfilename) + copy = Nansat(self.tmpfilename) + dd = json.loads( unescape( copy.get_metadata('jsonstring'), {'"': + '"'})) + self.assertIsInstance(dd, dict) + os.unlink(self.tmpfilename) def test_add_band(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") @@ -88,6 +109,26 @@ def test_add_band(self): self.assertEqual(n.get_metadata('name', 1), 'band1') self.assertEqual(n[1].shape, (500, 500)) + def test_export_netcdf(self): + ''' Test export and following import of data with bands containing + np.nan values + ''' + n = Nansat(self.test_file_gcps) + arrNoNaN = np.random.randn(n.shape()[0], n.shape()[1]) + n.add_band(arrNoNaN, {'name': 'testBandNoNaN'}) + arrWithNaN = arrNoNaN.copy() + arrWithNaN[n.shape()[0]/2-10:n.shape()[0]/2+10, + n.shape()[1]/2-10:n.shape()[1]/2+10] = np.nan + n.add_band(arrWithNaN, {'name': 'testBandWithNaN'}) + n.export(self.tmpfilename) + exported = Nansat(self.tmpfilename) + earrNoNaN = exported['testBandNoNaN'] + # Use allclose to allow some roundoff errors + self.assertTrue(np.allclose(arrNoNaN, earrNoNaN)) + earrWithNaN = exported['testBandWithNaN'] + np.testing.assert_allclose(arrWithNaN, earrWithNaN) + os.unlink(self.tmpfilename) + def test_add_band_twice(self): d = Domain(4326, "-te 25 70 35 72 -ts 500 500") arr = np.random.randn(500, 500) @@ -176,7 +217,8 @@ def test_export_gcps_complex_to_netcdf(self): n1.add_band(b0.astype('complex64'), parameters={'name': 'L_469'}) - tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_gcps_complex.nc') + tmpfilename = os.path.join(ntd.tmp_data_path, + 'nansat_export_gcps_complex.nc') n1.export(tmpfilename) ncf = netcdf_file(tmpfilename) @@ -206,12 +248,34 @@ def test_export_band(self): n = Nansat(self.test_file_gcps, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export_band.tif') - n.export(tmpfilename, bands= [1], driver='GTiff') + n.export(tmpfilename, bands=[1], driver='GTiff') n = Nansat(tmpfilename, mapperName='generic') self.assertTrue(os.path.exists(tmpfilename)) self.assertEqual(n.vrt.dataset.RasterCount, 1) + def test_export_band_by_name(self): + n = Nansat(self.test_file_gcps, logLevel=40) + tmpfilename = os.path.join(ntd.tmp_data_path, + 'nansat_export_band.tif') + n.export(tmpfilename, bands=['L_645'], driver='GTiff') + n = Nansat(tmpfilename, mapperName='generic') + + self.assertTrue(os.path.exists(tmpfilename)) + self.assertEqual(n.vrt.dataset.RasterCount, 1) + + def test_reproject_and_export_band(self): + n1 = Nansat(self.test_file_gcps, logLevel=40) + n2 = Nansat(self.test_file_stere, logLevel=40) + n1.reproject(n2) + tmpfilename = os.path.join(ntd.tmp_data_path, + 'nansat_reproject_export_band.nc') + n1.export(tmpfilename, bands=[1]) + + n = Nansat(tmpfilename, mapperName='generic') + self.assertTrue(os.path.exists(tmpfilename)) + self.assertEqual(n.vrt.dataset.RasterCount, 1) + def test_export_selected_bands(self): n = Nansat(self.test_file_gcps) resfile = 'tmp.nc' @@ -225,45 +289,89 @@ def test_export_selected_bands(self): self.assertTrue(nn.has_band('L_555')) os.unlink(resfile) # Test with band names - not yet implemented - #n.export(resfile, bands=['newBand', 'L_555']) - #nn = Nansat(resfile) - #self.assertTrue(nn.has_band('newBand')) - #self.assertTrue(nn.has_band('L_555')) - #os.unlink(resfile) - - def test_export2thredds_stere_one_band(self): - # skip the test if anaconda is used - if IS_CONDA: - return - n = Nansat(self.test_file_stere, logLevel=40) +# n.export(resfile, bands=['newBand', 'L_555']) +# nn = Nansat(resfile) +# self.assertTrue(nn.has_band('newBand')) +# self.assertTrue(nn.has_band('L_555')) +# os.unlink(resfile) + + def test_export2thredds_arctic_long_lat(self): + n = Nansat(self.test_file_arctic, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, - 'nansat_export2thredds_1b.nc') - n.export2thredds(tmpfilename, ['L_469']) - - self.assertTrue(os.path.exists(tmpfilename)) - - - def test_export2thredds_stere_many_bands(self): - # skip the test if anaconda is used - if IS_CONDA: - return - n = Nansat(self.test_file_stere, logLevel=40) - tmpfilename = os.path.join(ntd.tmp_data_path, - 'nansat_export2thredds_3b.nc') + 'nansat_export2thredds_arctic.nc') bands = { - 'L_645' : {'type': '>i1'}, - 'L_555' : {'type': '>i1'}, - 'L_469' : {'type': '>i1'}, + 'Bristol': {'type': '>i2'}, + 'Bootstrap': {'type': '>i2'}, + 'UMass_AES': {'type': '>i2'}, } - n.export2thredds(tmpfilename, bands) + n.export2thredds(tmpfilename, bands, + time=datetime.datetime(2016,1,20)) self.assertTrue(os.path.exists(tmpfilename)) + g = gdal.Open(tmpfilename) + metadata = g.GetMetadata_Dict() + + # Test that the long/lat values are set aproximately correct + ncg = 'NC_GLOBAL#' + easternmost_longitude = metadata.get(ncg + 'easternmost_longitude') + self.assertTrue(float(easternmost_longitude) > 179, + 'easternmost_longitude is wrong:' + + easternmost_longitude) + westernmost_longitude = metadata.get(ncg + 'westernmost_longitude') + self.assertTrue(float(westernmost_longitude) < -179, + 'westernmost_longitude is wrong:' + + westernmost_longitude) + northernmost_latitude = metadata.get(ncg + 'northernmost_latitude') + self.assertTrue(float(northernmost_latitude) > 89.999, + 'northernmost_latitude is wrong:' + + northernmost_latitude) + southernmost_latitude = metadata.get(ncg + 'southernmost_latitude') + self.assertTrue(float(southernmost_latitude) < 54, + 'southernmost_latitude is wrong:' + + southernmost_latitude) + self.assertTrue(float(southernmost_latitude) > 53, + 'southernmost_latitude is wrong:' + + southernmost_latitude) def test_dont_export2thredds_gcps(self): n = Nansat(self.test_file_gcps, logLevel=40) + n2 = Nansat(domain=n) + n.add_band(np.ones(n2.shape(), np.float32)) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_export2thredds.nc') - self.assertRaises(OptionError, n.export2thredds, tmpfilename, ['L_645']) + self.assertRaises(OptionError, n2.export2thredds, tmpfilename, + ['L_645']) + + def test_export2thredds_longlat_list(self): + d = Domain("+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs", + "-te 27 70 31 72 -ts 200 200") + n = Nansat(domain=d) + n.add_band(np.ones(d.shape(), np.float32), + parameters={'name': 'L_469'}) + n.set_metadata('time_coverage_start', '2016-01-19') + + tmpfilename = os.path.join(ntd.tmp_data_path, + 'nansat_export2thredds_longlat.nc') + n.export2thredds(tmpfilename, ['L_469']) + ncI = netcdf_file(tmpfilename, 'r') + ncIVar = ncI.variables['L_469'] + self.assertTrue(ncIVar.grid_mapping in ncI.variables.keys()) + + def test_export2thredds_longlat_dict(self): + d = Domain("+proj=latlong +datum=WGS84 +ellps=WGS84 +no_defs", + "-te 27 70 31 72 -ts 200 200") + n = Nansat(domain=d) + n.add_band(np.ones(d.shape(), np.float32), + parameters={'name': 'L_469'}) + n.set_metadata('time_coverage_start', '2016-01-19') + + tmpfilename = os.path.join(ntd.tmp_data_path, + 'nansat_export2thredds_longlat.nc') + n.export2thredds(tmpfilename, {'L_469': {'type': '>i1'}}) + ncI = netcdf_file(tmpfilename, 'r') + ncIVar = ncI.variables['L_469'] + self.assertTrue(ncIVar.grid_mapping in ncI.variables.keys()) + self.assertEqual(ncIVar[:].dtype, np.int8) def test_resize_by_pixelsize(self): n = Nansat(self.test_file_gcps, logLevel=40) @@ -306,42 +414,43 @@ def test_resize_complex_algAverage(self): warnings.simplefilter("always") n.resize(0.5, eResampleAlg=-1) - self.assertTrue(len(w)==1) + self.assertTrue(len(w) == 1) self.assertTrue(issubclass(w[-1].category, UserWarning)) self.assertTrue( - 'The imaginary parts of complex numbers ' \ + 'The imaginary parts of complex numbers ' 'are lost when resampling by averaging ' - in str(w[-1].message)) + in str(w[-1].message) + ) def test_resize_complex_alg0(self): n = Nansat(self.test_file_complex, logLevel=40) n.resize(0.5, eResampleAlg=0) - self.assertTrue(np.any(n[1].imag!=0)) + self.assertTrue(np.any(n[1].imag != 0)) def test_resize_complex_alg1(self): n = Nansat(self.test_file_complex, logLevel=40) n.resize(0.5, eResampleAlg=1) - self.assertTrue(np.any(n[1].imag!=0)) + self.assertTrue(np.any(n[1].imag != 0)) def test_resize_complex_alg2(self): n = Nansat(self.test_file_complex, logLevel=40) n.resize(0.5, eResampleAlg=2) - self.assertTrue(np.any(n[1].imag!=0)) + self.assertTrue(np.any(n[1].imag != 0)) def test_resize_complex_alg3(self): n = Nansat(self.test_file_complex, logLevel=40) n.resize(0.5, eResampleAlg=3) - self.assertTrue(np.any(n[1].imag!=0)) + self.assertTrue(np.any(n[1].imag != 0)) def test_resize_complex_alg4(self): n = Nansat(self.test_file_complex, logLevel=40) n.resize(0.5, eResampleAlg=4) - self.assertTrue(np.any(n[1].imag!=0)) + self.assertTrue(np.any(n[1].imag != 0)) def test_get_GDALRasterBand(self): n = Nansat(self.test_file_gcps, logLevel=40) @@ -367,6 +476,45 @@ def test_reproject_domain(self): self.assertEqual(n.shape(), (500, 500)) self.assertEqual(type(n[1]), np.ndarray) + self.assertTrue(n.has_band('swathmask')) + + def test_reproject_of_complex(self): + ''' Should return np.nan in areas out of swath ''' + n = Nansat(self.test_file_complex, logLevel=40) + d = Domain(4326, '-te -92.08 26.85 -92.00 26.91 -ts 200 200') + n.reproject(d) + b = n[1] + + self.assertTrue(n.has_band('swathmask')) + self.assertTrue(np.isnan(b[0, 0])) + self.assertTrue(np.isfinite(b[100, 100])) + + def test_add_band_and_reproject(self): + ''' Should add band and swath mask + and return 0 in areas out of swath ''' + n = Nansat(self.test_file_gcps, logLevel=40) + d = Domain(4326, "-te 27 70 30 72 -ts 500 500") + n.add_band(np.ones(n.shape())) + n.reproject(d) + b1 = n[1] + b4 = n[4] + + self.assertTrue(n.has_band('swathmask')) + self.assertTrue(b1[0, 0] == 0) + self.assertTrue(b1[300, 300] > 0) + self.assertTrue(np.isnan(b4[0, 0])) + self.assertTrue(b4[300, 300] == 1.) + + def test_reproject_no_addmask(self): + ''' Should not add swath mask and return 0 in areas out of swath ''' + n = Nansat(self.test_file_complex, logLevel=40) + d = Domain(4326, '-te -92.08 26.85 -92.00 26.91 -ts 200 200') + n.reproject(d, addmask=False) + b = n[1] + + self.assertTrue(not n.has_band('swathmask')) + self.assertTrue(np.isfinite(b[0, 0])) + self.assertTrue(np.isfinite(b[100, 100])) def test_reproject_stere(self): n1 = Nansat(self.test_file_gcps, logLevel=40) @@ -436,7 +584,7 @@ def test_write_figure_clim(self): self.assertTrue(os.path.exists(tmpfilename)) - def test_write_figure_clim(self): + def test_write_figure_legend(self): n1 = Nansat(self.test_file_stere, logLevel=40) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_write_figure_legend.png') @@ -452,13 +600,6 @@ def test_write_geotiffimage(self): self.assertTrue(os.path.exists(tmpfilename)) - def test_get_time(self): - n1 = Nansat(self.test_file_gcps, logLevel=40) - t = n1.get_time() - - self.assertEqual(len(t), len(n1.bands())) - self.assertEqual(type(t[0]), datetime.datetime) - def test_get_metadata(self): n1 = Nansat(self.test_file_stere, logLevel=40) m = n1.get_metadata() @@ -474,9 +615,9 @@ def test_get_metadata_key(self): def test_get_metadata_wrong_key(self): n1 = Nansat(self.test_file_stere, logLevel=40) - m = n1.get_metadata('some_crap') - self.assertTrue(m is None) + with self.assertRaises(OptionError): + n1.get_metadata('some_crap') def test_get_metadata_bandid(self): n1 = Nansat(self.test_file_stere, logLevel=40) @@ -501,53 +642,87 @@ def test_set_metadata_bandid(self): def test_get_transect(self): n1 = Nansat(self.test_file_gcps, logLevel=40) - v, xy, pl = n1.get_transect([[(28.31299128, 70.93709219), - (28.93691525, 70.69646524)]]) + t = n1.get_transect([[28.31299128, 28.93691525], + [70.93709219, 70.69646524]], + ['L_645']) tmpfilename = os.path.join(ntd.tmp_data_path, 'nansat_get_transect.png') - plt.plot(v['1:L_645']['shape0'], xy['shape0']['latitude']) + plt.plot(t['lat'], t['L_645'], '.-') plt.savefig(tmpfilename) plt.close('all') - self.assertTrue(len(v['1:L_645']['shape0']) > 50) - self.assertEqual(len(v['1:L_645']['shape0']), - len(xy['shape0']['latitude'])) - self.assertEqual(len(v['1:L_645']['shape0']), - len(pl['shape0'][0])) - self.assertEqual(type(xy['shape0']['latitude']), np.ndarray) - self.assertEqual(type(pl['shape0'][0]), np.ndarray) + self.assertTrue('L_645' in t.dtype.fields) + self.assertTrue('line' in t.dtype.fields) + self.assertTrue('pixel' in t.dtype.fields) + self.assertTrue('lat' in t.dtype.fields) + self.assertTrue('lon' in t.dtype.fields) + self.assertEqual(type(t['lat']), np.ndarray) + self.assertEqual(type(t['lon']), np.ndarray) def test_get_transect_outside(self): n1 = Nansat(self.test_file_gcps, logLevel=40) - v, xy, pl = n1.get_transect([[(28.31299128, 70.93709219), - (0.0, 0.0)]]) - - self.assertTrue(len(v['1:L_645']['shape0']) > 50) - self.assertEqual(len(v['1:L_645']['shape0']), - len(xy['shape0']['latitude'])) - self.assertEqual(len(v['1:L_645']['shape0']), - len(pl['shape0'][0])) - self.assertEqual(type(xy['shape0']['latitude']), np.ndarray) - self.assertEqual(type(pl['shape0'][0]), np.ndarray) - - def test_get_transect_false(self): + t = n1.get_transect([[0, 28.31299128], [0, 70.93709219]], [1]) + + self.assertTrue('L_645' in t.dtype.fields) + self.assertTrue('line' in t.dtype.fields) + self.assertTrue('pixel' in t.dtype.fields) + self.assertTrue('lat' in t.dtype.fields) + self.assertTrue('lon' in t.dtype.fields) + self.assertEqual(type(t['lat']), np.ndarray) + self.assertEqual(type(t['lon']), np.ndarray) + + def test_get_transect_wrong_points(self): + n1 = Nansat(self.test_file_gcps, logLevel=40) + self.assertRaises(OptionError, n1.get_transect, [1, 1], [1]) + + def test_get_transect_wrong_band(self): n1 = Nansat(self.test_file_gcps, logLevel=40) - v, xy, pl = n1.get_transect([(28.31299128, 70.93709219), - (28.93691525, 70.69646524)]) + t = n1.get_transect([[0, 28.31299128], [0, 70.93709219]], [10]) - self.assertEqual(len(v['1:L_645']), 2) - self.assertEqual(len(v['1:L_645']), len(xy)) - self.assertEqual(len(v['1:L_645']), len(pl)) - self.assertEqual(type(xy['shape0']['latitude']), np.ndarray) - self.assertEqual(type(pl['shape0'][0]), np.ndarray) + self.assertTrue('line' in t.dtype.fields) + self.assertTrue('pixel' in t.dtype.fields) + self.assertTrue('lat' in t.dtype.fields) + self.assertTrue('lon' in t.dtype.fields) + self.assertEqual(type(t['lat']), np.ndarray) + self.assertEqual(type(t['lon']), np.ndarray) - def test_get_no_transect_interactive(self): - import matplotlib.pyplot as plt + def test_get_transect_pixlin(self): + n1 = Nansat(self.test_file_gcps, logLevel=40) + t = n1.get_transect([[10, 20], + [10, 10]], + ['L_645'], + lonlat=False) + + self.assertTrue('L_645' in t.dtype.fields) + self.assertTrue('line' in t.dtype.fields) + self.assertTrue('pixel' in t.dtype.fields) + self.assertTrue('lat' in t.dtype.fields) + self.assertTrue('lon' in t.dtype.fields) + self.assertEqual(type(t['lat']), np.ndarray) + self.assertEqual(type(t['lon']), np.ndarray) + self.assertEqual(len(t['lon']), 11) + + def test_get_transect_data(self): + n1 = Nansat(self.test_file_gcps, logLevel=40) + b1 = n1[1] + t = n1.get_transect([[28.3],[70.9]], [], data=b1) + + self.assertTrue('input' in t.dtype.fields) + self.assertTrue('L_645' not in t.dtype.fields) + self.assertTrue('line' in t.dtype.fields) + self.assertTrue('pixel' in t.dtype.fields) + self.assertTrue('lat' in t.dtype.fields) + self.assertTrue('lon' in t.dtype.fields) + self.assertEqual(type(t['lat']), np.ndarray) + self.assertEqual(type(t['lon']), np.ndarray) + + def test_digitize_points(self): + ''' shall return empty array in non interactive mode ''' plt.ion() n1 = Nansat(self.test_file_gcps, logLevel=40) - noneResult = n1.get_transect() + points = n1.digitize_points(1) - self.assertEqual(noneResult, None) + self.assertEqual(len(points), 0) plt.ioff() def test_crop(self): @@ -568,6 +743,22 @@ def test_crop_lonlat_lims(self): self.assertEqual(ext, (31, 89, 110, 111)) self.assertEqual(type(n1[1]), np.ndarray) + def test_watermask(self): + ''' if watermask data exists: should fetch array with watermask + else: should raise an error''' + n1 = Nansat(self.test_file_gcps, logLevel=40) + mod44path = os.getenv('MOD44WPATH') + if mod44path is not None and os.path.exists(mod44path + '/MOD44W.vrt'): + wm = n1.watermask()[1] + self.assertEqual(type(wm), np.ndarray) + self.assertEqual(wm.shape[0], n1.shape()[0]) + self.assertEqual(wm.shape[1], n1.shape()[1]) + + def test_watermask_fail(self): + ''' Nansat.watermask should raise an IOError''' + n1 = Nansat(self.test_file_gcps, logLevel=40) + os.environ['MOD44WPATH'] = '/fakepath' + self.assertRaises(IOError, n1.watermask) if __name__ == "__main__": unittest.main() diff --git a/nansat/tests/test_nansatmap.py b/nansat/tests/test_nansatmap.py new file mode 100644 index 000000000..a647b7e18 --- /dev/null +++ b/nansat/tests/test_nansatmap.py @@ -0,0 +1,102 @@ +#------------------------------------------------------------------------------ +# Name: test_nansat.py +# Purpose: Test the Nansat class +# +# Author: Morten Wergeland Hansen, Anton Korosov, Asuka Yamakawa +# Modified: Morten Wergeland Hansen +# +# Created: 18.06.2014 +# Last modified:18.11.2014 11:48 +# Copyright: (c) NERSC +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +#------------------------------------------------------------------------------ +import unittest +import warnings +import os +import sys +import glob +from types import ModuleType, FloatType +import datetime + +import matplotlib.pyplot as plt +import numpy as np +from scipy.io.netcdf import netcdf_file +from scipy.interpolate import griddata + +from nansat import Nansat, Domain, Nansatmap, NSR +from nansat.tools import gdal, OptionError + +import nansat_test_data as ntd + +IS_CONDA = 'conda' in os.environ['PATH'] + + +class NansatmapTest(unittest.TestCase): + def setUp(self): + self.test_file_stere = os.path.join(ntd.test_data_path, 'stere.tif') + plt.switch_backend('Agg') + + if not os.path.exists(self.test_file_stere): + raise ValueError('No test data available') + + def test_create_map(self): + ''' should simply create a Nansatmap instance ''' + n = Nansat(self.test_file_stere, logLevel=40) + nmap = Nansatmap(n) + + self.assertEqual(type(nmap), Nansatmap) + + def test_imshow(self): + ''' Should use Nansatmap.imshow ''' + n = Nansat(self.test_file_stere, logLevel=40) + b1 = n[1] + nmap = Nansatmap(n) + nmap.imshow(b1, cmap='ak01') + tmpfilename = os.path.join(ntd.tmp_data_path, 'nansatmap_imshow.png') + nmap.save(tmpfilename) + + self.assertTrue(os.path.exists(tmpfilename)) + + def test_imshow_random(self): + ''' Should use Nansatmap.imshow ''' + n = Nansat(self.test_file_stere, logLevel=40) + b1 = n[1] + nmap = Nansatmap(n) + nmap.imshow(b1/5, cmap='random') + nmap.add_colorbar() + tmpfilename = os.path.join(ntd.tmp_data_path, 'nansatmap_imshow_random.png') + nmap.save(tmpfilename) + + self.assertTrue(os.path.exists(tmpfilename)) + + def test_pcolormesh(self): + ''' Should use Nansatmap.pcolormesh ''' + n = Nansat(self.test_file_stere, logLevel=40) + b1 = n[1] + nmap = Nansatmap(n) + nmap.pcolormesh(b1) + tmpfilename = os.path.join(ntd.tmp_data_path, 'nansatmap_pcolormesh.png') + nmap.save(tmpfilename) + + self.assertTrue(os.path.exists(tmpfilename)) + + def test_add_labels(self): + size, npo = 100, 10 + xy = np.random.randint(0, size, npo*2).reshape(npo, 2) + z = np.random.randint(0, size, npo) + xg, yg = np.meshgrid(range(size), range(size)) + zg = griddata(xy, z, np.dstack([xg, yg]), method='nearest') + dstDomain = Domain(NSR().wkt, '-te -10 -10 10 10 -ts 100 100') + + nmap = Nansatmap(dstDomain) + nmap.imshow(zg, cmap='random') + nmap.add_zone_labels(zg, fontsize=10) + tmpfilename = os.path.join(ntd.tmp_data_path, 'nansatmap_zonelables.png') + nmap.save(tmpfilename) + + self.assertTrue(os.path.exists(tmpfilename)) + +if __name__ == "__main__": + unittest.main() diff --git a/nansat/tests/test_tools.py b/nansat/tests/test_tools.py new file mode 100644 index 000000000..94aedb1b1 --- /dev/null +++ b/nansat/tests/test_tools.py @@ -0,0 +1,40 @@ +#------------------------------------------------------------------------------ +# Name: test_nansat.py +# Purpose: Test the Nansat class +# +# Author: Morten Wergeland Hansen, Asuka Yamakawa +# Modified: Morten Wergeland Hansen +# +# Created: 18.06.2014 +# Last modified:16.04.2015 10:48 +# Copyright: (c) NERSC +# Licence: This file is part of NANSAT. You can redistribute it or modify +# under the terms of GNU General Public License, v.3 +# http://www.gnu.org/licenses/gpl-3.0.html +#------------------------------------------------------------------------------ +import unittest +import datetime + +from matplotlib.colors import hex2color +from nansat.tools import get_random_color, parse_time + +class ToolsTest(unittest.TestCase): + def test_get_random_color(self): + ''' Should return HEX code of random color ''' + c0 = get_random_color() + c1 = get_random_color(c0) + c2 = get_random_color(c1, 300) + + self.assertEqual(type(hex2color(c0)), tuple) + self.assertEqual(type(hex2color(c1)), tuple) + self.assertEqual(type(hex2color(c2)), tuple) + + def test_parse_time(self): + dt = parse_time('2016-01-19') + + self.assertEqual(type(dt), datetime.datetime) + + def test_parse_time_incorrect(self): + dt = parse_time('2016-01-19Z') + + self.assertEqual(type(dt), datetime.datetime) diff --git a/nansat/tools.py b/nansat/tools.py index 9cd1330a4..ee5b075fa 100644 --- a/nansat/tools.py +++ b/nansat/tools.py @@ -17,18 +17,18 @@ from __future__ import absolute_import import os -import sys import warnings import logging +from dateutil.parser import parse from matplotlib import cm +from matplotlib.colors import hex2color + import numpy as np from scipy import mod try: - import gdal - import ogr - import osr + import gdal, ogr, osr except: from osgeo import gdal, ogr, osr @@ -109,22 +109,17 @@ warnings.warn('Cannot generate and register the OBPG colormap!') -class Error(Exception): - '''Base class for exceptions in this module.''' - pass - - -class OptionError(Error): +class OptionError(Exception): '''Error for improper options (arguments) ''' pass -class ProjectionError(Error): +class ProjectionError(Exception): '''Cannot get the projection''' pass -class GDALError(Error): +class GDALError(Exception): '''Error from GDAL ''' pass @@ -163,7 +158,6 @@ def initial_bearing(lon1, lat1, lon2, lat2): rlat1 = np.radians(lat1) rlon2 = np.radians(lon2) rlat2 = np.radians(lat2) - deltalon = rlon2 - rlon1 bearing = np.arctan2(np.sin(rlon2 - rlon1) * np.cos(rlat2), np.cos(rlat1) * np.sin(rlat2) - np.sin(rlat1) * np.cos(rlat2) * @@ -228,3 +222,74 @@ def add_logger(logName='', logLevel=None): logger.handlers[0].setLevel(int(os.environ['LOG_LEVEL'])) return logger + + +def get_random_color(c0=None, minDist=100, low=0, high=255): + ''' Create random color which is far enough from the input color + + Parameters + ---------- + c0 : str + hexademical representation of the color (e.g. '#ff0000' for red) + minDist : int + minimal distance to input color + + Returns + ------- + c0 : str + hexademical representation of the new random color + ''' + # check inputs + if c0 is None: + c0 = '#000000' + # convert input color to tuple of R,G,B + c0rgb = np.array(hex2color(c0)) + + # create new random color + c1rgb = np.array([np.random.randint(low, high), + np.random.randint(low, high), + np.random.randint(low, high)]) + + # calculate distance + d = np.sum((c0rgb - c1rgb)**2)**0.5 + + # if distance is small, create new random color + if d < minDist: + c1 = get_random_color(c0, minDist) + else: + # convert to HEX code + c1 = '#%02x%02x%02x' % tuple(c1rgb) + + return c1 + + +def parse_time(time_string): + ''' Parse time string accounting for possible wrong formatting + Parameters + ---------- + time_string : str + string with date and time + Returns + ------- + time_value : datetime object + + ''' + time_string = time_string.strip() + # To account for datasets on the format YYYY-MM-DDZ which is + # invalid since it has no time, but a timezone + try: + time_value = parse(time_string) + except ValueError: + if (len(time_string) == 11 and + time_string.endswith('Z')): + time_value = parse(time_string[:10]) + + return time_value + + +def test_openable(fname): + try: + f = open(fname, 'r') + except IOError: + raise + f.close() diff --git a/nansat/vrt.py b/nansat/vrt.py old mode 100755 new mode 100644 index 21af296c1..e57fa5e14 --- a/nansat/vrt.py +++ b/nansat/vrt.py @@ -19,7 +19,6 @@ import tempfile from string import Template, ascii_uppercase, digits from random import choice -import datetime import warnings import numpy as np @@ -160,8 +159,8 @@ class VRT(object): $ScaleOffset $ScaleRatio $LUT - - + + ''') RawRasterBandSource = Template(''' @@ -308,8 +307,12 @@ def __init__(self, gdalDataset=None, vrtDataset=None, self.dataset.SetProjection(srcProjection) self.dataset.SetGeoTransform(srcGeoTransform) - # set metadata - self.dataset.SetMetadata(srcMetadata) + # set source metadata corrected for potential Unicode + if type(srcMetadata) is dict: + for key in srcMetadata.keys(): + srcMetadata[key] = srcMetadata[key].encode('ascii', + 'ignore') + self.dataset.SetMetadata(srcMetadata) # add geolocation array from input or from source data if geolocationArray is None: @@ -326,7 +329,6 @@ def __init__(self, gdalDataset=None, vrtDataset=None, self.logger.debug('VRT self.dataset: %s' % self.dataset) self.logger.debug('VRT description: %s' % self.dataset.GetDescription()) - #self.logger.debug('VRT metadata: %s ' % self.dataset.GetMetadata()) self.logger.debug('VRT RasterXSize %d' % self.dataset.RasterXSize) self.logger.debug('VRT RasterYSize %d' % self.dataset.RasterYSize) @@ -484,6 +486,7 @@ def _create_band(self, src, dst=None): self.logger.debug('SRC[DataType]: %d' % src['DataType']) srcDs = gdal.Open(src['SourceFilename']) + # create XML for each source src['XML'] = self.ComplexSource.substitute( Dataset=src['SourceFilename'], @@ -493,10 +496,10 @@ def _create_band(self, src, dst=None): ScaleOffset=src['ScaleOffset'], ScaleRatio=src['ScaleRatio'], LUT=src['LUT'], - srcXSize=srcDs.RasterXSize, - srcYSize=srcDs.RasterYSize, - dstXSize=srcDs.RasterXSize, - dstYSize=srcDs.RasterYSize) + xSize=src.get('xSize', srcDs.RasterXSize), + ySize=src.get('ySize', srcDs.RasterYSize), + xOff=src.get('xOff', 0), + yOff=src.get('yOff', 0),) # create destination options if 'PixelFunctionType' in dst and len(dst['PixelFunctionType']) > 0: @@ -530,7 +533,7 @@ def _create_band(self, src, dst=None): dst['dataType'] = gdal.GDT_Float32 else: self.logger.debug('Set dst[dataType]: %d' % src['DataType']) - #otherwise take the DataType from source + # otherwise take the DataType from source dst['dataType'] = src['DataType'] # Set destination name @@ -600,37 +603,24 @@ def _create_band(self, src, dst=None): # return name of the created band return dst['name'] - def _set_time(self, time): - ''' Set time of dataset and/or its bands - - Parameters - ---------- - time : datetime + def _add_swath_mask_band(self): + ''' Create a new band where all values = 1 - If a single datetime is given, this is stored in - all bands of the dataset as a metadata item 'time'. - If a list of datetime objects is given, different - time can be given to each band. + Modifies + --------- + Single band 'swathmask' with ones is added to the self.dataset ''' - # Make sure time is a list with one datetime element per band - numBands = self.dataset.RasterCount - if (isinstance(time, datetime.datetime) or - isinstance(time, datetime.date)): - time = [time] - if len(time) == 1: - time = time * numBands - if len(time) != numBands: - self.logger.error('Dataset has %s elements, ' - 'but given time has %s elements.' - % (str(numBands), str(len(time)))) - - # Store time as metadata key 'time' in each band - for i in range(numBands): - iBand = self.dataset.GetRasterBand(i + 1) - iBand.SetMetadataItem('time', str(time[i].isoformat())) - - return + self._create_band( + src=[{ + 'SourceFilename': self.fileName, + 'SourceBand': 1, + 'DataType': gdal.GDT_Byte}], + dst={ + 'dataType': gdal.GDT_Byte, + 'wkv': 'swath_binary_mask', + 'PixelFunctionType': 'OnesPixelFunc', + }) def _get_wkv(self, wkvName): ''' Get wkv from wkv.xml @@ -678,10 +668,11 @@ def _put_metadata(self, rasterBand, metadataDict): for key in metadataDict: try: metaValue = str(metadataDict[key]) + metaKey = str(key) except UnicodeEncodeError: self.logger.error('Cannot add %s to metadata' % key) else: - rasterBand.SetMetadataItem(key, metaValue) + rasterBand.SetMetadataItem(metaKey, metaValue) return rasterBand @@ -714,7 +705,7 @@ def create_dataset_from_array(self, array): self.logger.debug('arrayDType: %s', arrayDType) - #create conents of VRT-file pointing to the binary file + # create conents of VRT-file pointing to the binary file dataType = {'uint8': 'Byte', 'int8': 'Byte', 'uint16': 'UInt16', @@ -747,7 +738,7 @@ def create_dataset_from_array(self, array): SrcFileName=binaryFile, PixelOffset=pixelOffset, LineOffset=lineOffset) - #write XML contents to + # write XML contents to self.write_xml(contents) def read_xml(self, inFileName=None): @@ -768,7 +759,7 @@ def read_xml(self, inFileName=None): inFileName = str(self.fileName) self.dataset.FlushCache() - #read from the vsi-file + # read from the vsi-file # open vsiFile = gdal.VSIFOpenL(inFileName, 'r') # get file size @@ -795,8 +786,6 @@ def write_xml(self, vsiFileContent=None): If XML content was written, self.dataset is re-opened ''' - #write to the vsi-file - vsiFile = gdal.VSIFOpenL(self.fileName, 'w') gdal.VSIFWriteL(vsiFileContent, len(vsiFileContent), 1, vsiFile) @@ -883,7 +872,7 @@ def _remove_geotransform(self): tmpVRTXML = self.read_xml() # find and remove GeoTransform node0 = Node.create(tmpVRTXML) - node1 = node0.delNode('GeoTransform') + node0.delNode('GeoTransform') # Write the modified elemements back into temporary VRT self.write_xml(node0.rawxml()) @@ -1297,6 +1286,7 @@ def delete_band(self, bandNum): ''' node0 = Node.create(self.read_xml()) node0.delNode('VRTRasterBand', options={'band': bandNum}) + node0.delNode('BandMapping', options={'src': bandNum}) self.write_xml(node0.rawxml()) def delete_bands(self, bandNums): @@ -1308,8 +1298,7 @@ def delete_bands(self, bandNums): elements are int ''' - bandNums.sort() - bandNums.reverse() + bandNums.sort(reverse=True) for iBand in bandNums: self.delete_band(iBand) @@ -1424,7 +1413,6 @@ def get_shifted_vrt(self, shiftDegree): for i in range(len(node0.nodeList('VRTRasterBand'))): # create i-th 'VRTRasterBand' node node1 = node0.node('VRTRasterBand', i) - node1Band = node1.getAttribute('band') # modify the 1st band shiftStr = str(shiftPixel) sizeStr = str(shiftVRT.vrt.dataset.RasterXSize - shiftPixel) @@ -1438,7 +1426,6 @@ def get_shifted_vrt(self, shiftDegree): # add the 2nd band xmlSource = node1.rawxml() cloneNode = Node.create(xmlSource).node('ComplexSource') - #cloneNode = node1.node('ComplexSource') cloneNode.node('SrcRect').replaceAttribute('xOff', sizeStr) cloneNode.node('DstRect').replaceAttribute('xOff', str(0)) cloneNode.node('SrcRect').replaceAttribute('xSize', shiftStr) @@ -1521,8 +1508,7 @@ def get_super_vrt(self): return superVRT - def get_subsampled_vrt(self, newRasterXSize, newRasterYSize, - factor, eResampleAlg): + def get_subsampled_vrt(self, newRasterXSize, newRasterYSize, eResampleAlg): '''Create VRT and replace step in the source''' subsamVRT = self.get_super_vrt() @@ -1535,9 +1521,6 @@ def get_subsampled_vrt(self, newRasterXSize, newRasterYSize, node0.replaceAttribute('rasterXSize', str(newRasterXSize)) node0.replaceAttribute('rasterYSize', str(newRasterYSize)) - rasterYSize = subsamVRT.vrt.dataset.RasterYSize - rasterXSize = subsamVRT.vrt.dataset.RasterXSize - # replace xSize in of each source for iNode1 in node0.nodeList('VRTRasterBand'): for sourceName in ['ComplexSource', 'SimpleSource']: @@ -1554,10 +1537,9 @@ def get_subsampled_vrt(self, newRasterXSize, newRasterYSize, # if the values are complex number, give a warning if iNode1.getAttribute('dataType').startswith('C'): warnings.warn( - 'Band %s : The imaginary parts of complex numbers ' \ - 'are lost when resampling by averaging ' \ - '(eResampleAlg=-1)' %iNode1.getAttribute('band') - ) + 'Band %s : The imaginary parts of complex numbers ' + 'are lost when resampling by averaging ' + '(eResampleAlg=-1)' % iNode1.getAttribute('band')) # Write the modified elemements into VRT subsamVRT.write_xml(node0.rawxml()) @@ -1604,8 +1586,6 @@ def transform_points(self, colVector, rowVector, DstToSrc=0, xy = np.array([colVector, rowVector]).transpose() # transfrom coordinates - #lonlat = transformer.TransformPoints(DstToSrc, xy)#[0] - #import pdb; pdb.set_trace() lonlat = transformer.TransformPoints(DstToSrc, xy)[0] # convert return to lon,lat vectors @@ -1633,7 +1613,7 @@ def get_projection(self): ProjectionError : occurrs when the projection is empty. ''' - #get projection or GCPProjection + # get projection or GCPProjection projection = self.dataset.GetProjection() if projection == '': projection = self.dataset.GetGCPProjection() diff --git a/nansat/wkv.xml b/nansat/wkv.xml old mode 100755 new mode 100644 index cb6a9ece1..536a6bd2e --- a/nansat/wkv.xml +++ b/nansat/wkv.xml @@ -1,6 +1,38 @@ - radar_brightness_coefficient + surface_backwards_doppler_frequency_shift_of_radar_wave_due_to_surface_velocity + Radar Doppler frequency shift due to surface velocity + dca + Hz + -60 60 + jet + + + predicted_surface_backwards_doppler_frequency_shift_of_radar_wave + Geometric predicted Radar Doppler frequency shift + dcp + Hz + 0 2000 + jet + + + standard_deviation_of_surface_backwards_doppler_centroid_frequency_shift_of_radar_wave + Standard deviation of Radar Doppler centroid frequency shift estimate + dc_std + Hz + 0 5 + jet + + + surface_backwards_doppler_centroid_frequency_shift_of_radar_wave + Radar Doppler centroid frequency shift + dc + Hz + 0 2000 + jet + + + surface_backwards_brightness_coefficient_of_radar_wave Reflectivity per unit area in slant range beta0 m/m @@ -168,7 +200,7 @@ sensor_zenith_angle Sensor Zenith Angle - sat_zenith + sensor_zenith degrees 0 90 jet @@ -184,7 +216,7 @@ sensor_azimuth_angle Sensor Azimuth Angle - sat_azimuth + sensor_azimuth degrees 0 360 jet @@ -237,6 +269,14 @@ 230 330 jet + + height_above_reference_ellipsoid + Digital elevation model height above mean sea level + topo + m + 0 4000 + jet + land_binary_mask Land mask @@ -245,6 +285,14 @@ 0 1 jet + + swath_binary_mask + Swath mask + swathmask + + 0 1 + jet + latitude Latitude @@ -321,7 +369,7 @@ instantaneous_photosynthetically_available_radiation Instantaneous Photosynthetically Available Radiation PAR - enstain m-2 s-1 + mol m-2 s-1 sr-1 0 0.0024 jet @@ -342,11 +390,35 @@ jet - photosynthetically_available_radiation + downwelling_photosynthetic_photon_radiance_in_sea_water Photosynthetically Available Radiation PAR - enstain m-2 day-1 + mol m-2 s-1 sr-1 + 0 80 + jet + + + instantaneous_downwelling_photosynthetic_photon_radiance_in_sea_water + Instantaneous Photosynthetically Available Radiation + IPAR + mol m-2 s-1 sr-1 0 80 jet + + depth + Depth + depth + m + 0 13000 + jet + + + concentration_of_coccoliths_in_sea_water + Coccoliths + ccl + 10e12 plates m-3 + 0.001 15.000 + jet + diff --git a/setup.py b/setup.py index 6a0b519d9..f9f94f174 100644 --- a/setup.py +++ b/setup.py @@ -21,22 +21,22 @@ try: import numpy except ImportError: - raise ImportError(import_error_msg %'numpy') + raise ImportError(import_error_msg % 'numpy') try: import scipy except ImportError: - raise ImportError(import_error_msg %'scipy') + raise ImportError(import_error_msg % 'scipy') try: import matplotlib except ImportError: - raise ImportError(import_error_msg %'matplotlib') + raise ImportError(import_error_msg % 'matplotlib') try: from mpl_toolkits.basemap import Basemap except ImportError as e: - raise ImportError(import_error_msg %'basemap') + raise ImportError(import_error_msg % 'basemap') try: from osgeo import gdal, osr, ogr @@ -61,12 +61,14 @@ AUTHOR_EMAIL = "nansat-dev@googlegroups.com" PLATFORMS = ["UNKNOWN"] MAJOR = 0 -MINOR = 6 -MICRO = 6 +MINOR = 7 +MICRO = 0 ISRELEASED = True -VERSION = '%d.%d.%d' % (MAJOR, MINOR, MICRO) # Remember to remove "dev" when releasing +VERSION = '%d.%d-dev.%d' % (MAJOR, MINOR, MICRO) # Remember to remove "dev" when releasing REQS = [ "Pillow", + "requests", + "nerscmetadata" ] #----------------------------------------------------------------------------# @@ -92,6 +94,7 @@ extra_compile_args = ['-fPIC', '-Wall', '-Wno-long-long', '-pedantic', '-O3'] extra_link_args = [] # not used currently + def _ask_gdal_config(resultlist, option, result_prefix): p = Popen(['gdal-config', option], stdout=subprocess.PIPE) t = p.stdout.read().decode().strip() @@ -103,11 +106,13 @@ def _ask_gdal_config(resultlist, option, result_prefix): res = [x[len(result_prefix):] for x in res] resultlist[:] = res + def use_gdal_config(): _ask_gdal_config(include_dirs, '--cflags', '-I') _ask_gdal_config(library_dirs, '--libs', '-L') _ask_gdal_config(libraries, '--libs', '-l') + try: use_gdal_config() except Exception as e: @@ -141,6 +146,8 @@ def use_gdal_config(): set py_exe=%line1:~2% call %py_exe% %pyscript% %* """ + + class my_install_scripts(install_scripts): def run(self): install_scripts.run(self) @@ -176,6 +183,7 @@ def run(self): else: ext_errors = (CCompilerError, DistutilsExecError, DistutilsPlatformError) + def run_setup(skip_compile): if skip_compile: kw = dict() @@ -224,9 +232,15 @@ def run_setup(skip_compile): ]], cmdclass = {'install_scripts': my_install_scripts}, install_requires=REQS, + dependency_links = [ + "https://github.com/nansencenter/nersc-metadata/tarball/master#egg=nerscmetadata" + ], test_suite="nansat.tests", **kw - ) + ) + ## write json file with gcmd keywords + #from nerscmetadata.gcmd_keywords import write_json + #write_json() try: run_setup(skip_compile) @@ -245,4 +259,3 @@ def run_setup(skip_compile): print(BUILD_EXT_WARNING) print("Plain-Python installation succeeded.") print('*' * 75) -