From 2500fe0af0ddd1b0d090e0469a3eacc1e0123037 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Mon, 16 May 2022 13:29:13 -0700 Subject: [PATCH 01/14] feat: added ESR netCDF4 formats to list of model types --- .github/workflows/Dockerfile | 2 +- .../getting_started/Getting-Started.rst | 1 + doc/source/user_guide/read_tide_model.rst | 4 + notebooks/Check Tide Map.ipynb | 4 +- notebooks/Plot Antarctic Tidal Currents.ipynb | 2 +- notebooks/Plot Antarctic Tide Range.ipynb | 2 +- notebooks/Plot Arctic Ocean Map.ipynb | 2 +- notebooks/Plot Ross Ice Shelf Map.ipynb | 2 +- notebooks/Plot Tide Forecasts.ipynb | 2 +- pyTMD/check_tide_points.py | 5 +- pyTMD/compute_tide_corrections.py | 5 +- pyTMD/load_nodal_corrections.py | 7 +- pyTMD/model.py | 67 ++++-- pyTMD/predict_tidal_ts.py | 5 +- pyTMD/predict_tide.py | 5 +- pyTMD/predict_tide_drift.py | 5 +- pyTMD/read_FES_model.py | 22 +- pyTMD/read_GOT_model.py | 14 +- pyTMD/read_netcdf_model.py | 18 +- pyTMD/read_tide_model.py | 193 +++++++++++++++--- pyTMD/tools.py | 10 +- scripts/compute_tidal_currents.py | 5 +- scripts/compute_tidal_elevations.py | 5 +- scripts/compute_tides_ICESat2_ATL03.py | 5 +- scripts/compute_tides_ICESat2_ATL06.py | 5 +- scripts/compute_tides_ICESat2_ATL07.py | 5 +- scripts/compute_tides_ICESat2_ATL10.py | 5 +- scripts/compute_tides_ICESat2_ATL11.py | 5 +- scripts/compute_tides_ICESat2_ATL12.py | 5 +- scripts/compute_tides_ICESat_GLA12.py | 5 +- scripts/compute_tides_icebridge_data.py | 5 +- 31 files changed, 323 insertions(+), 104 deletions(-) diff --git a/.github/workflows/Dockerfile b/.github/workflows/Dockerfile index 13076f3e..bbe8b4f2 100644 --- a/.github/workflows/Dockerfile +++ b/.github/workflows/Dockerfile @@ -30,7 +30,7 @@ WORKDIR /tmp ENV JOBS 2 ENV CFLAGS="-fPIC" -ENV ZLIB_VERSION=1.2.11 +ENV ZLIB_VERSION=1.2.12 RUN wget -q http://zlib.net/zlib-${ZLIB_VERSION}.tar.gz && \ tar -xzf zlib-${ZLIB_VERSION}.tar.gz && \ cd zlib-${ZLIB_VERSION} && \ diff --git a/doc/source/getting_started/Getting-Started.rst b/doc/source/getting_started/Getting-Started.rst index 6fbeccb8..90c522f7 100644 --- a/doc/source/getting_started/Getting-Started.rst +++ b/doc/source/getting_started/Getting-Started.rst @@ -44,6 +44,7 @@ Presently, the following models and their directories parameterized within ``pyT * CATS0201: ``/cats0201_tmd/`` * `CATS2008 `_: ``/CATS2008/`` * CATS2008_load: ``/CATS2008a_SPOTL_Load/`` + * CATS2022: ``/CATS2022/`` - Arctic Ocean and Greenland Coast Tidal Simulations [Padman2004]_ diff --git a/doc/source/user_guide/read_tide_model.rst b/doc/source/user_guide/read_tide_model.rst index 8c9afb48..fb8221aa 100644 --- a/doc/source/user_guide/read_tide_model.rst +++ b/doc/source/user_guide/read_tide_model.rst @@ -26,6 +26,8 @@ Calling Sequence .. autofunction:: pyTMD.read_tide_model.read_atlas_grid +.. autofunction:: pyTMD.read_tide_model.read_netcdf_grid + .. autofunction:: pyTMD.read_tide_model.read_constituents .. autofunction:: pyTMD.read_tide_model.read_elevation_file @@ -42,6 +44,8 @@ Calling Sequence .. autofunction:: pyTMD.read_tide_model.combine_atlas_model +.. autofunction:: pyTMD.read_tide_model.read_netcdf_file + .. autofunction:: pyTMD.read_tide_model.extend_array .. autofunction:: pyTMD.read_tide_model.extend_matrix diff --git a/notebooks/Check Tide Map.ipynb b/notebooks/Check Tide Map.ipynb index 6385f9f6..be2ba1e9 100644 --- a/notebooks/Check Tide Map.ipynb +++ b/notebooks/Check Tide Map.ipynb @@ -119,7 +119,7 @@ " ).elevation(TMDwidgets.model.value)\n", " \n", "# read tidal constants and interpolate to grid points\n", - "if model.format in ('OTIS','ATLAS'):\n", + "if model.format in ('OTIS','ATLAS','ESR'):\n", " # if reading a single OTIS solution\n", " xi,yi,hz,mz,iob,dt = pyTMD.read_tide_model.read_tide_grid(model.grid_file)\n", "elif (model.format == 'netcdf'):\n", @@ -157,7 +157,7 @@ " LON = np.atleast_1d(LON)\n", " LAT = np.atleast_1d(LAT)\n", " # read tidal constants and interpolate to grid points\n", - " if model.format in ('OTIS','ATLAS'):\n", + " if model.format in ('OTIS','ATLAS','ESR'):\n", " # if reading a single OTIS solution\n", " xi,yi,hz,mz,iob,dt = pyTMD.read_tide_model.read_tide_grid(model.grid_file)\n", " # adjust longitudinal convention of input latitude and longitude\n", diff --git a/notebooks/Plot Antarctic Tidal Currents.ipynb b/notebooks/Plot Antarctic Tidal Currents.ipynb index 409d7fd6..8ad9a7c0 100644 --- a/notebooks/Plot Antarctic Tidal Currents.ipynb +++ b/notebooks/Plot Antarctic Tidal Currents.ipynb @@ -191,7 +191,7 @@ "# iterate over u and v currents\n", "for TYPE in model.type:\n", " # read tidal constants and interpolate to grid points\n", - " if model.format in ('OTIS','ATLAS'):\n", + " if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", " model.model_file, model.projection, TYPE=TYPE,\n", " METHOD='spline', GRID=model.format)\n", diff --git a/notebooks/Plot Antarctic Tide Range.ipynb b/notebooks/Plot Antarctic Tide Range.ipynb index 9757b427..255b2205 100644 --- a/notebooks/Plot Antarctic Tide Range.ipynb +++ b/notebooks/Plot Antarctic Tide Range.ipynb @@ -223,7 +223,7 @@ "outputs": [], "source": [ "# read tidal constants and interpolate to grid points\n", - "if model.format in ('OTIS','ATLAS'):\n", + "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", " model.model_file, model.projection, TYPE=model.type,\n", " METHOD='spline', GRID=model.format)\n", diff --git a/notebooks/Plot Arctic Ocean Map.ipynb b/notebooks/Plot Arctic Ocean Map.ipynb index bd146377..47f016bf 100644 --- a/notebooks/Plot Arctic Ocean Map.ipynb +++ b/notebooks/Plot Arctic Ocean Map.ipynb @@ -190,7 +190,7 @@ "delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data'])\n", "\n", "# read tidal constants and interpolate to grid points\n", - "if model.format in ('OTIS','ATLAS'):\n", + "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", " model.model_file, model.projection, TYPE=model.type,\n", " METHOD='spline', GRID=model.format)\n", diff --git a/notebooks/Plot Ross Ice Shelf Map.ipynb b/notebooks/Plot Ross Ice Shelf Map.ipynb index d402c86e..67d44a1f 100644 --- a/notebooks/Plot Ross Ice Shelf Map.ipynb +++ b/notebooks/Plot Ross Ice Shelf Map.ipynb @@ -190,7 +190,7 @@ "delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data'])\n", "\n", "# read tidal constants and interpolate to grid points\n", - "if model.format in ('OTIS','ATLAS'):\n", + "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", " model.model_file, model.projection, TYPE=model.type,\n", " METHOD='spline', GRID=model.format)\n", diff --git a/notebooks/Plot Tide Forecasts.ipynb b/notebooks/Plot Tide Forecasts.ipynb index eeec97a2..12fe8dd2 100644 --- a/notebooks/Plot Tide Forecasts.ipynb +++ b/notebooks/Plot Tide Forecasts.ipynb @@ -144,7 +144,7 @@ "# verify longitudes\n", "LON = m.wrap_longitudes(LON)\n", "# read tidal constants and interpolate to leaflet points\n", - "if model.format in ('OTIS','ATLAS'):\n", + "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(np.atleast_1d(LON),\n", " np.atleast_1d(LAT), model.grid_file, model.model_file,\n", " model.projection, TYPE=model.type, METHOD='spline',\n", diff --git a/pyTMD/check_tide_points.py b/pyTMD/check_tide_points.py index 543daadd..4961099b 100644 --- a/pyTMD/check_tide_points.py +++ b/pyTMD/check_tide_points.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" check_tide_points.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Check if points are within a tide model domain OTIS format tidal solutions provided by Ohio State University and ESR @@ -52,6 +52,7 @@ bilinear_interp.py: bilinear interpolation of data to coordinates UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: updated docstrings to numpy documentation format Updated 09/2021: refactor to use model class for files and attributes Updated 07/2021: added check that tide model directory is accessible @@ -141,7 +142,7 @@ def check_tide_points(x, y, DIRECTORY=None, MODEL=None, np.atleast_1d(y).flatten()) # read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): # if reading a single OTIS solution xi,yi,hz,mz,iob,dt = pyTMD.read_tide_model.read_tide_grid(model.grid_file) # invert model mask diff --git a/pyTMD/compute_tide_corrections.py b/pyTMD/compute_tide_corrections.py index 786cf5b4..fb83af95 100644 --- a/pyTMD/compute_tide_corrections.py +++ b/pyTMD/compute_tide_corrections.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tide_corrections.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting elevation or imagery data Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -79,6 +79,7 @@ nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: updated docstrings to numpy documentation format Updated 12/2021: added function to calculate a tidal time series verify coordinate dimensions for each input data type @@ -277,7 +278,7 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, diff --git a/pyTMD/load_nodal_corrections.py b/pyTMD/load_nodal_corrections.py index 95c68e5c..c733b640 100755 --- a/pyTMD/load_nodal_corrections.py +++ b/pyTMD/load_nodal_corrections.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -load_nodal_corrections.py (04/2022) +load_nodal_corrections.py (05/2022) Calculates the nodal corrections for tidal constituents Modification of ARGUMENTS fortran subroutine by Richard Ray 03/1999 @@ -37,6 +37,7 @@ Ocean Tides", Journal of Atmospheric and Oceanic Technology, (2002). UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: updated docstrings to numpy documentation format Updated 12/2020: fix k1 for FES models Updated 08/2020: change time variable names to not overwrite functions @@ -126,7 +127,7 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): arg[:,14] = t1 - s + 3.0*h - p + 90.0 #-- chi1 arg[:,15] = t1 - 2.0*h + pp - 90.0 #-- pi1 arg[:,16] = t1 - h - 90.0 #-- p1 - if CORRECTIONS in ('OTIS','ATLAS','netcdf'): + if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): arg[:,17] = t1 + 90.0 #-- s1 elif CORRECTIONS in ('GOT','FES'): arg[:,17] = t1 + 180.0 #-- s1 (Doodson's phase) @@ -186,7 +187,7 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): f = np.zeros((nt,60)) u = np.zeros((nt,60)) #-- determine nodal corrections f and u for each model type - if CORRECTIONS in ('OTIS','ATLAS','netcdf'): + if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): f[:,0] = 1.0 #-- Sa f[:,1] = 1.0 #-- Ssa f[:,2] = 1.0 - 0.130*cosn #-- Mm diff --git a/pyTMD/model.py b/pyTMD/model.py index 969f504d..0fc369c1 100644 --- a/pyTMD/model.py +++ b/pyTMD/model.py @@ -1,11 +1,12 @@ #!/usr/bin/env python u""" model.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Retrieves tide model parameters for named tide models and from model definition files UPDATE HISTORY: + Updated 05/2022: added ESR CATS2022 to list of models Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant set default directory to None for documentation @@ -147,6 +148,10 @@ def grid(self, m): self.model_directory = os.path.join(self.directory, 'CATS2008a_SPOTL_Load') self.grid_file = self.pathfinder('grid_CATS2008a_opt') + elif (m == 'CATS2022'): + self.format = 'OTIS' + self.model_directory = os.path.join(self.directory,'CATS2022') + self.grid_file = self.pathfinder('CATS2022_test.nc') elif (m == 'TPXO9-atlas'): self.model_directory = os.path.join(self.directory,'TPXO9_atlas') self.grid_file = self.pathfinder('grid_tpxo9_atlas') @@ -291,6 +296,27 @@ def elevation(self, m): self.long_name = "Load Tide" self.description = ("Local displacement due to Ocean " "Loading (-6 to 0 cm)") + elif (m == 'CATS2022'): + self.format = 'ESR' + self.model_directory = os.path.join(self.directory,'CATS2022') + self.grid_file = self.pathfinder('CATS2022_test.nc') + self.model_file = self.pathfinder('CATS2022_test.nc') + self.projection = 'CATS2008' + # model description and references + self.reference = ('https://www.esr.org/research/' + 'polar-tide-models/list-of-polar-tide-models/cats2008/') + self.atl03 = 'tide_ocean' + self.atl06 = 'tide_ocean' + self.atl07 = 'height_segment_ocean' + self.atl10 = 'height_segment_ocean' + self.atl11 = 'tide_ocean' + self.atl12 = 'tide_ocean_seg' + self.gla12 = 'd_ocElv' + self.variable = 'tide_ocean' + self.long_name = "Ocean Tide" + self.description = ("Ocean Tides including diurnal and " + "semi-diurnal (harmonic analysis), and longer period " + "tides (dynamic and self-consistent equilibrium).") elif (m == 'TPXO9-atlas'): self.model_directory = os.path.join(self.directory,'TPXO9_atlas') self.grid_file = self.pathfinder('grid_tpxo9_atlas') @@ -946,6 +972,12 @@ def current(self, m): self.grid_file = self.pathfinder('grid_CATS2008') self.model_file = dict(u=self.pathfinder('uv.CATS2008.out')) self.projection = 'CATS2008' + elif (m == 'CATS2022'): + self.format = 'ESR' + self.model_directory = os.path.join(self.directory,'CATS2022') + self.grid_file = self.pathfinder('CATS2022_test.nc') + self.model_file = dict(u=self.pathfinder('CATS2022_test.nc')) + self.projection = 'CATS2008' elif (m == 'TPXO9-atlas'): self.model_directory = os.path.join(self.directory,'TPXO9_atlas') self.grid_file = self.pathfinder('grid_tpxo9_atlas') @@ -1225,7 +1257,7 @@ def antarctic_ocean(): """ Returns list of Antarctic ocean tide elevation models """ - return ['CATS0201','CATS2008'] + return ['CATS0201','CATS2008','CATS2022'] @staticmethod def antarctic_load(): @@ -1239,7 +1271,7 @@ def antarctic_current(): """ Returns list of Antarctic tidal current models """ - return ['CATS0201','CATS2008'] + return ['CATS0201','CATS2008','CATS2022'] @staticmethod def arctic_ocean(): @@ -1267,11 +1299,11 @@ def ocean_elevation(): """ Returns list of ocean tide elevation models """ - return ['CATS0201','CATS2008','TPXO9-atlas','TPXO9-atlas-v2', - 'TPXO9-atlas-v3','TPXO9-atlas-v4','TPXO9-atlas-v5','TPXO9.1', - 'TPXO8-atlas','TPXO7.2','AODTM-5','AOTIM-5','AOTIM-5-2018', - 'Arc2kmTM','Gr1km-v2','GOT4.7','GOT4.8','GOT4.10', - 'FES2014','EOT20'] + return ['CATS0201','CATS2008','CATS2022','TPXO9-atlas', + 'TPXO9-atlas-v2','TPXO9-atlas-v3','TPXO9-atlas-v4', + 'TPXO9-atlas-v5','TPXO9.1','TPXO8-atlas','TPXO7.2', + 'AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM','Gr1km-v2', + 'GOT4.7','GOT4.8','GOT4.10','FES2014','EOT20'] @staticmethod def load_elevation(): @@ -1286,9 +1318,9 @@ def ocean_current(): """ Returns list of tidal current models """ - return ['CATS0201','CATS2008','TPXO9-atlas','TPXO9-atlas-v2', - 'TPXO9-atlas-v3','TPXO9-atlas-v4','TPXO9-atlas-v5', - 'TPXO9.1','TPXO8-atlas','TPXO7.2', + return ['CATS0201','CATS2008','CATS2022''TPXO9-atlas', + 'TPXO9-atlas-v2','TPXO9-atlas-v3','TPXO9-atlas-v4', + 'TPXO9-atlas-v5','TPXO9.1','TPXO8-atlas','TPXO7.2', 'AODTM-5','AOTIM-5','AOTIM-5-2018', 'Arc2kmTM','Gr1km-v2','FES2014'] @@ -1308,6 +1340,13 @@ def ATLAS_compact(): """ return ['TPXO8-atlas'] + @staticmethod + def ESR(): + """ + Returns list of ESR format models + """ + return ['CATS2022'] + @staticmethod def ATLAS(): """ @@ -1383,7 +1422,7 @@ def from_file(self, definition_file): temp = self.from_dict(parameters) # verify model name, format and type assert temp.name - assert temp.format in ('OTIS','ATLAS','netcdf','GOT','FES') + assert temp.format in ('OTIS','ATLAS','ESR','netcdf','GOT','FES') assert temp.type # verify necessary attributes are with model format assert temp.model_file @@ -1398,10 +1437,10 @@ def from_file(self, definition_file): temp.model_file = os.path.expanduser(temp.model_file) temp.model_directory = os.path.dirname(temp.model_file) # extract full path to tide grid file - if temp.format in ('OTIS','ATLAS','netcdf'): + if temp.format in ('OTIS','ATLAS','ESR','netcdf'): assert temp.grid_file temp.grid_file = os.path.expanduser(temp.grid_file) - if temp.format in ('OTIS','ATLAS'): + if temp.format in ('OTIS','ATLAS','ESR'): assert temp.projection # convert scale from string to float if temp.format in ('netcdf','GOT','FES'): diff --git a/pyTMD/predict_tidal_ts.py b/pyTMD/predict_tidal_ts.py index ac844fb6..47bbe8d7 100644 --- a/pyTMD/predict_tidal_ts.py +++ b/pyTMD/predict_tidal_ts.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -predict_tidal_ts.py (04/2022) +predict_tidal_ts.py (05/2022) Predict tidal time series at a location using harmonic constants CALLING SEQUENCE: @@ -32,6 +32,7 @@ load_nodal_corrections.py: loads nodal corrections for tidal constituents UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: updated docstrings to numpy documentation format Updated 02/2021: replaced numpy bool to prevent deprecation warning Updated 09/2020: append output mask over each constituent @@ -85,7 +86,7 @@ def predict_tidal_ts(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): ht.mask = np.zeros((nt),dtype=bool) #-- for each constituent for k,c in enumerate(constituents): - if CORRECTIONS in ('OTIS','ATLAS','netcdf'): + if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): #-- load parameters for each constituent amp,ph,omega,alpha,species = load_constituent(c) #-- add component for constituent to output tidal time series diff --git a/pyTMD/predict_tide.py b/pyTMD/predict_tide.py index 991a3b0d..7918897c 100644 --- a/pyTMD/predict_tide.py +++ b/pyTMD/predict_tide.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -predict_tide.py (04/2022) +predict_tide.py (05/2022) Predict tides at a single time using harmonic constants CALLING SEQUENCE: @@ -32,6 +32,7 @@ load_nodal_corrections.py: loads nodal corrections for tidal constituents UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: updated docstrings to numpy documentation format Updated 02/2021: replaced numpy bool to prevent deprecation warning Updated 09/2020: append output mask over each constituent @@ -87,7 +88,7 @@ def predict_tide(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): ht.mask = np.zeros((npts),dtype=bool) #-- for each constituent for k,c in enumerate(constituents): - if CORRECTIONS in ('OTIS','ATLAS','netcdf'): + if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): #-- load parameters for each constituent amp,ph,omega,alpha,species = load_constituent(c) #-- add component for constituent to output tidal elevation diff --git a/pyTMD/predict_tide_drift.py b/pyTMD/predict_tide_drift.py index 34a1a0f2..6e38a7a0 100755 --- a/pyTMD/predict_tide_drift.py +++ b/pyTMD/predict_tide_drift.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -predict_tide_drift.py (04/2022) +predict_tide_drift.py (05/2022) Predict tides at multiple times and locations using harmonic constants CALLING SEQUENCE: @@ -32,6 +32,7 @@ load_nodal_corrections.py: loads nodal corrections for tidal constituents UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: updated docstrings to numpy documentation format Updated 02/2021: replaced numpy bool to prevent deprecation warning Updated 09/2020: append output mask over each constituent @@ -85,7 +86,7 @@ def predict_tide_drift(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): ht.mask = np.zeros((nt),dtype=bool) #-- for each constituent for k,c in enumerate(constituents): - if CORRECTIONS in ('OTIS','ATLAS','netcdf'): + if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): #-- load parameters for each constituent amp,ph,omega,alpha,species = load_constituent(c) #-- add component for constituent to output tidal elevation diff --git a/pyTMD/read_FES_model.py b/pyTMD/read_FES_model.py index 8dbe9990..a66e9132 100644 --- a/pyTMD/read_FES_model.py +++ b/pyTMD/read_FES_model.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -read_FES_model.py (04/2022) +read_FES_model.py (05/2022) Reads files for a tidal model and makes initial calculations to run tide program Includes functions to extract tidal harmonic constants from the FES (Finite Element Solution) tide models for given locations @@ -54,6 +54,7 @@ nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates UPDATE HISTORY: + Updated 05/2022: reformat arguments to extract_FES_constants definition Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant fix netCDF4 masks for nan values @@ -86,8 +87,15 @@ from pyTMD.nearest_extrap import nearest_extrap #-- PURPOSE: extract tidal harmonic constants from tide models at coordinates -def extract_FES_constants(ilon, ilat, model_files, TYPE='z', VERSION=None, - METHOD='spline', EXTRAPOLATE=False, CUTOFF=10.0, GZIP=True, SCALE=1.0): +def extract_FES_constants(ilon, ilat, + model_files=None, + TYPE='z', + VERSION=None, + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=10.0, + GZIP=True, + SCALE=1.0): """ Reads files for an ascii or netCDF4 tidal model @@ -101,9 +109,7 @@ def extract_FES_constants(ilon, ilat, model_files, TYPE='z', VERSION=None, longitude to interpolate ilat: float latitude to interpolate - grid_file: str - grid file for model - model_files: list + model_files: list or NoneType, default None list of model files for each constituent TYPE: str, default 'z' Tidal variable to read @@ -328,9 +334,9 @@ def read_netcdf_file(input_file, GZIP=False, TYPE=None, VERSION=None): model file GZIP: bool, default False Input file is compressed - VERSION: str or NoneType + VERSION: str or NoneType, default None model version - TYPE: str or NoneType + TYPE: str or NoneType, default None Tidal variable to read - ``'z'``: heights diff --git a/pyTMD/read_GOT_model.py b/pyTMD/read_GOT_model.py index 8f7bb829..ef54014d 100644 --- a/pyTMD/read_GOT_model.py +++ b/pyTMD/read_GOT_model.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -read_GOT_model.py (04/2022) +read_GOT_model.py (05/2022) Reads files for Richard Ray's Global Ocean Tide (GOT) models and makes initial calculations to run the tide program Includes functions to extract tidal harmonic constants out of a tidal model for @@ -39,6 +39,7 @@ nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates UPDATE HISTORY: + Updated 05/2022: reformat arguments to extract_GOT_constants definition Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant Updated 12/2021: adjust longitude convention based on model longitude @@ -78,8 +79,13 @@ from pyTMD.nearest_extrap import nearest_extrap #-- PURPOSE: extract tidal harmonic constants out of GOT model at coordinates -def extract_GOT_constants(ilon, ilat, model_files, METHOD=None, - EXTRAPOLATE=False, CUTOFF=10.0, GZIP=True, SCALE=1.0): +def extract_GOT_constants(ilon, ilat, + model_files=None, + METHOD=None, + EXTRAPOLATE=False, + CUTOFF=10.0, + GZIP=True, + SCALE=1.0): """ Reads files for Richard Ray's Global Ocean Tide (GOT) models @@ -93,7 +99,7 @@ def extract_GOT_constants(ilon, ilat, model_files, METHOD=None, longitude to interpolate ilat: float latitude to interpolate - model_files: list + model_files: list or NoneType, default None list of model files for each constituent METHOD: str, default 'spline' Interpolation method diff --git a/pyTMD/read_netcdf_model.py b/pyTMD/read_netcdf_model.py index 7099c974..4a15c8d0 100644 --- a/pyTMD/read_netcdf_model.py +++ b/pyTMD/read_netcdf_model.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -read_netcdf_model.py (04/2022) +read_netcdf_model.py (05/2022) Reads files for a tidal model and makes initial calculations to run tide program Includes functions to extract tidal harmonic constants from OTIS tide models for given locations @@ -54,6 +54,7 @@ nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates UPDATE HISTORY: + Updated 05/2022: reformat arguments to extract_netcdf_constants definition Updated 04/2022: updated docstrings to numpy documentation format Updated 12/2021: adjust longitude convention based on model longitude Updated 09/2021: fix cases where there is no mask on constituent files @@ -88,8 +89,15 @@ from pyTMD.nearest_extrap import nearest_extrap #-- PURPOSE: extract tidal harmonic constants from tide models at coordinates -def extract_netcdf_constants(ilon, ilat, grid_file, model_files, TYPE='z', - METHOD='spline', EXTRAPOLATE=False, CUTOFF=10.0, GZIP=True, SCALE=1.0): +def extract_netcdf_constants(ilon, ilat, + grid_file=None, + model_files=None, + TYPE='z', + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=10.0, + GZIP=True, + SCALE=1.0): """ Reads files for ATLAS netCDF4 tidal models @@ -103,9 +111,9 @@ def extract_netcdf_constants(ilon, ilat, grid_file, model_files, TYPE='z', longitude to interpolate ilat: float latitude to interpolate - grid_file: str + grid_file: str or NoneType, default None grid file for model - model_files: list + model_files: list or NoneType, default None list of model files for each constituent TYPE: str, default 'z' Tidal variable to read diff --git a/pyTMD/read_tide_model.py b/pyTMD/read_tide_model.py index 34e7576e..4460366c 100644 --- a/pyTMD/read_tide_model.py +++ b/pyTMD/read_tide_model.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -read_tide_model.py (04/2022) +read_tide_model.py (05/2022) Reads files for a tidal model and makes initial calculations to run tide program Includes functions to extract tidal harmonic constants from OTIS tide models for given locations @@ -33,7 +33,8 @@ set to np.inf to extrapolate for all points GRID: binary file type to read ATLAS: reading a global solution with localized solutions - OTIS: combined global solution + ESR: combined global or local netCDF4 solution + OTIS: combined global or local solution OUTPUTS: amplitude: amplitudes of tidal constituents @@ -47,6 +48,8 @@ https://numpy.org/doc/stable/user/numpy-for-matlab-users.html scipy: Scientific Tools for Python https://docs.scipy.org/doc/ + netCDF4: Python interface to the netCDF C library + https://unidata.github.io/netcdf4-python/netCDF4/index.html PROGRAM DEPENDENCIES: convert_ll_xy.py: converts lat/lon points to and from projected coordinates @@ -54,6 +57,7 @@ nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates UPDATE HISTORY: + Updated 05/2022: add functions for using ESR netCDF4 format models Updated 04/2022: updated docstrings to numpy documentation format use longcomplex data format to be windows compliant Updated 03/2022: invert tide mask to be True for invalid points @@ -91,6 +95,7 @@ Updated 09/2017: Adapted for Python """ import os +import netCDF4 import numpy as np import scipy.interpolate from pyTMD.convert_ll_xy import convert_ll_xy @@ -98,8 +103,15 @@ from pyTMD.nearest_extrap import nearest_extrap #-- PURPOSE: extract tidal harmonic constants from tide models at coordinates -def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', - METHOD='spline', EXTRAPOLATE=False, CUTOFF=10.0, GRID='OTIS'): +def extract_tidal_constants(ilon, ilat, + grid_file=None, + model_file=None, + EPSG=None, + TYPE='z', + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=10.0, + GRID='OTIS'): """ Reads files for an OTIS-formatted tidal model @@ -113,11 +125,12 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', longitude to interpolate ilat: float latitude to interpolate - grid_file: str + grid_file: str or NoneType, default None grid file for model - model_file: str or list + model_file: str, list or NoneType, default None model file containing each constituent - EPSG: projection of tide model data + EPSG: str or NoneType, default None, + projection of tide model data TYPE: str, default 'z' Tidal variable to read @@ -139,10 +152,11 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', Set to np.inf to extrapolate for all points GRID: str, default 'OTIS' - Binary file type to read + Tide model file type to read - ``'ATLAS'``: reading a global solution with localized solutions - - ``'OTIS'``: combined global solution + - ``'ESR'``: combined global or local netCDF4 solution + - ``'OTIS'``: combined global or local solution Returns ------- @@ -164,6 +178,9 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', x0,y0,hz0,mz0,iob,dt,pmask,local = read_atlas_grid(grid_file) xi,yi,hz = combine_atlas_model(x0,y0,hz0,pmask,local,VARIABLE='depth') mz = create_atlas_mask(x0,y0,mz0,local,VARIABLE='depth') + elif (GRID == 'ESR'): + #-- if reading a single ESR netCDF4 solution + xi,yi,hz,mz,sf = read_netcdf_grid(grid_file) else: #-- if reading a single OTIS solution xi,yi,hz,mz,iob,dt = read_tide_grid(grid_file) @@ -262,7 +279,7 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', constituents = [read_constituents(m)[0].pop() for m in model_file] nc = len(constituents) else: - constituents,nc = read_constituents(model_file) + constituents,nc = read_constituents(model_file, GRID=GRID) #-- number of output data points npts = len(D) amplitude = np.ma.zeros((npts,nc)) @@ -273,12 +290,15 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', if (TYPE == 'z'): #-- read constituent from elevation file if (GRID == 'ATLAS'): - z0,zlocal = read_atlas_elevation(model_file,i,c) - xi,yi,z=combine_atlas_model(x0,y0,z0,pmask,zlocal,VARIABLE='z') + z0,zlocal = read_atlas_elevation(model_file, i, c) + xi,yi,z = combine_atlas_model(x0, y0, z0, pmask, zlocal, + VARIABLE='z') + elif (GRID == 'ESR'): + z = read_netcdf_file(model_file, i, TYPE=TYPE) elif isinstance(model_file,list): - z = read_elevation_file(model_file[i],0) + z = read_elevation_file(model_file[i], 0) else: - z = read_elevation_file(model_file,i) + z = read_elevation_file(model_file, i) #-- replace original values with extend matrices if GLOBAL: z = extend_matrix(z) @@ -331,12 +351,15 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', elif TYPE in ('U','u'): #-- read constituent from transport file if (GRID == 'ATLAS'): - u0,v0,uvlocal = read_atlas_transport(model_file,i,c) - xi,yi,u=combine_atlas_model(x0,y0,u0,pmask,uvlocal,VARIABLE='u') + u0,v0,uvlocal = read_atlas_transport(model_file, i, c) + xi,yi,u = combine_atlas_model(x0, y0, u0, pmask, uvlocal, + VARIABLE='u') + elif (GRID == 'ESR'): + u = read_netcdf_file(model_file, i, TYPE=TYPE) elif isinstance(model_file,list): - u,v = read_transport_file(model_file[i],0) + u,v = read_transport_file(model_file[i], 0) else: - u,v = read_transport_file(model_file,i) + u,v = read_transport_file(model_file, i) #-- replace original values with extend matrices if GLOBAL: u = extend_matrix(u) @@ -390,12 +413,15 @@ def extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, TYPE='z', elif TYPE in ('V','v'): #-- read constituent from transport file if (GRID == 'ATLAS'): - u0,v0,uvlocal = read_atlas_transport(model_file,i,c) - xi,yi,v = combine_atlas_model(x0,y0,v0,pmask,local,VARIABLE='v') + u0,v0,uvlocal = read_atlas_transport(model_file, i, c) + xi,yi,v = combine_atlas_model(x0, y0, v0, pmask, uvlocal, + VARIABLE='v') + elif (GRID == 'ESR'): + v = read_netcdf_file(model_file, i, TYPE=TYPE) elif isinstance(model_file,list): - u,v = read_transport_file(model_file[i],0) + u,v = read_transport_file(model_file[i], 0) else: - u,v = read_transport_file(model_file,i) + u,v = read_transport_file(model_file, i) #-- replace original values with extend matrices if GLOBAL: v = extend_matrix(v) @@ -670,8 +696,50 @@ def read_atlas_grid(input_file): #-- return values return (x,y,hz,mz,iob,dt,pmask,local) +#-- PURPOSE: read grid file +def read_netcdf_grid(input_file): + """ + Read netCDF4 grid file to extract model coordinates, bathymetry, + masks and flexure scaling factors + + Parameters + ---------- + input_file: str + input grid file + + Returns + ------- + x: float + x-coordinates of input grid + y: float + y-coordinates of input grid + hz: float + model bathymetry + mz: int + land/water mask + sf: float + scaling factor for applying ice flexure + """ + #-- read the netcdf format tide grid file + fileID=netCDF4.Dataset(os.path.expanduser(input_file),'r') + #-- read coordinates + x = fileID.variables['x'][:].copy() + y = fileID.variables['y'][::-1].copy() + #-- read water column thickness + hz = fileID.variables['wct'][::-1,:].copy() + #-- read mask + mz = fileID.variables['mask'][::-1,:].copy() + #-- read flexure and convert from percent to scale factor + sf = fileID.variables['flexure'][::-1,:]/100.0 + #-- update bathymetry mask + hz.mask = (hz.data == 0.0) + #-- close the grid file + fileID.close() + #-- return values + return (x,y,hz,mz,sf) + #-- PURPOSE: read list of constituents from an elevation or transport file -def read_constituents(input_file): +def read_constituents(input_file, GRID='OTIS'): """ Read the list of constituents from an elevation or transport file @@ -679,6 +747,12 @@ def read_constituents(input_file): ---------- input_file: str input tidal file + GRID: str, default 'OTIS' + Tide model file type to read + + - ``'ATLAS'``: reading a global solution with localized solutions + - ``'ESR'``: combined global or local netCDF4 solution + - ``'OTIS'``: combined global or local solution Returns ------- @@ -690,13 +764,20 @@ def read_constituents(input_file): #-- check that model file is accessible if not os.access(os.path.expanduser(input_file), os.F_OK): raise FileNotFoundError(os.path.expanduser(input_file)) - #-- open the file - fid = open(os.path.expanduser(input_file),'rb') - ll, = np.fromfile(fid, dtype=np.dtype('>i4'), count=1) - nx,ny,nc = np.fromfile(fid, dtype=np.dtype('>i4'), count=3) - fid.seek(16,1) - constituents = [c.decode("utf8").rstrip() for c in fid.read(nc*4).split()] - fid.close() + if (GRID == 'ESR'): + #-- open the netCDF4 file + fid = netCDF4.Dataset(os.path.expanduser(input_file),'r') + constituents = fid.variables['cons'].long_name.split() + nc = len(constituents) + fid.close() + else: + #-- open the file + fid = open(os.path.expanduser(input_file),'rb') + ll, = np.fromfile(fid, dtype=np.dtype('>i4'), count=1) + nx,ny,nc = np.fromfile(fid, dtype=np.dtype('>i4'), count=3) + fid.seek(16,1) + constituents = [c.decode("utf8").rstrip() for c in fid.read(nc*4).split()] + fid.close() return (constituents,nc) #-- PURPOSE: read elevation file to extract real and imaginary components for @@ -1183,6 +1264,58 @@ def combine_atlas_model(xi, yi, zi, pmask, local, VARIABLE=None): #-- return 2 arc-minute solution and coordinates return (x30,y30,z30) +#-- PURPOSE: read netCDF4 file to extract real and imaginary components for +#-- constituent +def read_netcdf_file(input_file, ic, TYPE=None): + """ + Read netCDF4 file to extract real and imaginary components for constituent + + Parameters + ---------- + input_file: str + input transport file + ic: int + index of consituent + TYPE: str or NoneType, default None + Tidal variable to read + + - ``'z'``: heights + - ``'u'``: horizontal transport velocities + - ``'U'``: horizontal depth-averaged transport + - ``'v'``: vertical transport velocities + - ``'V'``: vertical depth-averaged transport + + Returns + ------- + hc: complex + complex form of tidal constituent oscillation + """ + #-- read the netcdf format tide grid file + fileID = netCDF4.Dataset(os.path.expanduser(input_file),'r') + #-- variable dimensions + nx = fileID.dimensions['x'].size + ny = fileID.dimensions['y'].size + #-- real and imaginary components of tidal constituent + hc = np.ma.zeros((ny,nx),dtype=np.complex64) + hc.mask = np.zeros((ny,nx),dtype=bool) + #-- extract constituent + if (TYPE == 'z'): + #-- convert elevations from mm to m + hc.data.real[:,:] = fileID.variables['hRe'][ic,::-1,:]/1e3 + hc.data.imag[:,:] = -fileID.variables['hIm'][ic,::-1,:]/1e3 + elif TYPE in ('U','u'): + #-- convert transports from cm^2/s to m^2/s + hc.data.real[:,:] = fileID.variables['uRe'][ic,::-1,:]/1e4 + hc.data.imag[:,:] = -fileID.variables['uIm'][ic,::-1,:]/1e4 + elif TYPE in ('V','v'): + #-- convert transports from cm^2/s to m^2/s + hc.data.real[:,:] = fileID.variables['vRe'][ic,::-1,:]/1e4 + hc.data.imag[:,:] = -fileID.variables['vIm'][ic,::-1,:]/1e4 + #-- close the file + fileID.close() + #-- return output variables + return hc + #-- For a rectangular bathymetry grid: #-- construct masks for zeta, u and v nodes on a C-grid def Muv(hz): diff --git a/pyTMD/tools.py b/pyTMD/tools.py index b327f017..dcab1e2a 100644 --- a/pyTMD/tools.py +++ b/pyTMD/tools.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" tools.py -Written by Tyler Sutterley (03/2022) +Written by Tyler Sutterley (05/2022) Jupyter notebook, user interface and plotting tools PYTHON DEPENDENCIES: @@ -17,6 +17,7 @@ https://github.com/matplotlib/matplotlib UPDATE HISTORY: + Updated 05/2022: include world copy jump in webmercator maps Updated 03/2022: add marker relocation routines from notebooks Updated 02/2022: add leaflet map projections Written 09/2021 @@ -177,6 +178,7 @@ def set_atlas(self, sender): class leaflet: def __init__(self, projection='Global', **kwargs): # set default keyword arguments + kwargs.setdefault('map',None) kwargs.setdefault('attribution',True) kwargs.setdefault('zoom',1) kwargs.setdefault('zoom_control',False) @@ -187,7 +189,7 @@ def __init__(self, projection='Global', **kwargs): # create basemap in projection if (projection == 'Global'): self.map = ipyleaflet.Map(center=kwargs['center'], - zoom=kwargs['zoom'], max_zoom=15, + zoom=kwargs['zoom'], max_zoom=15, world_copy_jump=True, attribution_control=kwargs['attribution'], basemap=ipyleaflet.basemaps.Esri.WorldTopoMap) self.crs = 'EPSG:3857' @@ -206,6 +208,10 @@ def __init__(self, projection='Global', **kwargs): basemap=ipyleaflet.basemaps.Esri.AntarcticBasemap, crs=projections.EPSG3031.Basemap) self.crs = 'EPSG:3031' + else: + # use a predefined ipyleaflet map + self.map = kwargs['map'] + self.crs = self.map.crs['name'] # add control for layers if kwargs['layer_control']: self.layer_control = ipyleaflet.LayersControl(position='topleft') diff --git a/scripts/compute_tidal_currents.py b/scripts/compute_tidal_currents.py index a46a75ff..4e5f1e79 100755 --- a/scripts/compute_tidal_currents.py +++ b/scripts/compute_tidal_currents.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tidal_currents.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates zonal and meridional tidal currents for an input file Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -109,6 +109,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -323,7 +324,7 @@ def compute_tidal_currents(tide_dir, input_file, output_file, #-- iterate over u and v currents for t in model.type: #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon.flatten(), lat.flatten(), model.grid_file, model.model_file['u'], model.projection, TYPE=t, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, diff --git a/scripts/compute_tidal_elevations.py b/scripts/compute_tidal_elevations.py index ee0f692d..45606080 100755 --- a/scripts/compute_tidal_elevations.py +++ b/scripts/compute_tidal_elevations.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tidal_elevations.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for an input file Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -94,6 +94,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -297,7 +298,7 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon.flatten(), lat.flatten(), model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, diff --git a/scripts/compute_tides_ICESat2_ATL03.py b/scripts/compute_tides_ICESat2_ATL03.py index 7c1a662c..32c23c2a 100644 --- a/scripts/compute_tides_ICESat2_ATL03.py +++ b/scripts/compute_tides_ICESat2_ATL03.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_ICESat2_ATL03.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting ICESat-2 photon height data Calculated at ATL03 segment level using reference photon geolocation and time Segment level corrections can be applied to the individual photon events (PEs) @@ -61,6 +61,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -204,7 +205,7 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, diff --git a/scripts/compute_tides_ICESat2_ATL06.py b/scripts/compute_tides_ICESat2_ATL06.py index 5a8f1b4a..2c75c9f8 100644 --- a/scripts/compute_tides_ICESat2_ATL06.py +++ b/scripts/compute_tides_ICESat2_ATL06.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_ICESat2_ATL06.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting ICESat-2 land ice elevation data Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -56,6 +56,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -195,7 +196,7 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, diff --git a/scripts/compute_tides_ICESat2_ATL07.py b/scripts/compute_tides_ICESat2_ATL07.py index 84941af5..2dc5b336 100644 --- a/scripts/compute_tides_ICESat2_ATL07.py +++ b/scripts/compute_tides_ICESat2_ATL07.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_ICESat2_ATL07.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting ICESat-2 sea ice height data Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -56,6 +56,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -192,7 +193,7 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, diff --git a/scripts/compute_tides_ICESat2_ATL10.py b/scripts/compute_tides_ICESat2_ATL10.py index 7face2a2..a74e9468 100644 --- a/scripts/compute_tides_ICESat2_ATL10.py +++ b/scripts/compute_tides_ICESat2_ATL10.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_ICESat2_ATL10.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting ICESat-2 sea ice height data Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -56,6 +56,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -215,7 +216,7 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, diff --git a/scripts/compute_tides_ICESat2_ATL11.py b/scripts/compute_tides_ICESat2_ATL11.py index 298f6eb2..c28378c7 100644 --- a/scripts/compute_tides_ICESat2_ATL11.py +++ b/scripts/compute_tides_ICESat2_ATL11.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_ICESat2_ATL11.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting ICESat-2 annual land ice height data Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -56,6 +56,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -216,7 +217,7 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(longitude[track], latitude[track], model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, diff --git a/scripts/compute_tides_ICESat2_ATL12.py b/scripts/compute_tides_ICESat2_ATL12.py index 5a8fed34..79dda427 100644 --- a/scripts/compute_tides_ICESat2_ATL12.py +++ b/scripts/compute_tides_ICESat2_ATL12.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_ICESat2_ATL12.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting ICESat-2 ocean surface height data Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -56,6 +56,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -193,7 +194,7 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide_time = pyTMD.time.convert_delta_time(gps_seconds-leap_seconds, epoch1=(1980,1,6,0,0,0), epoch2=(1992,1,1,0,0,0), scale=1.0/86400.0) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, diff --git a/scripts/compute_tides_ICESat_GLA12.py b/scripts/compute_tides_ICESat_GLA12.py index c1f90b7b..f7405978 100644 --- a/scripts/compute_tides_ICESat_GLA12.py +++ b/scripts/compute_tides_ICESat_GLA12.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_ICESat_GLA12.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting ICESat/GLAS L2 GLA12 Antarctic and Greenland Ice Sheet elevation data @@ -60,6 +60,7 @@ predict_tide_drift.py: predict tidal elevations using harmonic constants UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: save ICESat campaign attribute to output file @@ -192,7 +193,7 @@ def compute_tides_ICESat(tide_dir, INPUT_FILE, TIDE_MODEL=None, #-- delta time (TT - UT1) file delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon_40HZ, lat_40HZ, model.grid_file, model.model_file, model.projection, TYPE=TYPE, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, diff --git a/scripts/compute_tides_icebridge_data.py b/scripts/compute_tides_icebridge_data.py index 44112468..979a3e37 100644 --- a/scripts/compute_tides_icebridge_data.py +++ b/scripts/compute_tides_icebridge_data.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" compute_tides_icebridge_data.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Calculates tidal elevations for correcting Operation IceBridge elevation data Uses OTIS format tidal solutions provided by Ohio State University and ESR @@ -64,6 +64,7 @@ read_ATM1b_QFIT_binary.py: read ATM1b QFIT binary files (NSIDC version 1) UPDATE HISTORY: + Updated 05/2022: added ESR netCDF4 formats to list of model types Updated 04/2022: include utf-8 encoding in reads to be windows compliant use argparse descriptions within sphinx documentation Updated 03/2022: using static decorators to define available models @@ -526,7 +527,7 @@ def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) #-- read tidal constants and interpolate to grid points - if model.format in ('OTIS','ATLAS'): + if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(dinput['lon'], dinput['lat'], model.grid_file, model.model_file, model.projection, TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, From 9029e692328df8430e793747f19bc19c1629e00b Mon Sep 17 00:00:00 2001 From: tsutterley Date: Mon, 16 May 2022 15:43:06 -0700 Subject: [PATCH 02/14] refactor: changed keyword arguments to camel case --- doc/source/user_guide/iers_mean_pole.rst | 2 +- .../user_guide/infer_minor_corrections.rst | 2 +- .../user_guide/load_nodal_corrections.rst | 3 +- doc/source/user_guide/read_FES_model.rst | 8 +- doc/source/user_guide/read_GOT_model.rst | 3 +- doc/source/user_guide/read_netcdf_model.rst | 3 +- doc/source/user_guide/read_tide_model.rst | 2 +- notebooks/Check Tide Map.ipynb | 6 +- notebooks/Plot Antarctic Tidal Currents.ipynb | 21 +- notebooks/Plot Antarctic Tide Range.ipynb | 22 +- notebooks/Plot Arctic Ocean Map.ipynb | 21 +- notebooks/Plot Ross Ice Shelf Map.ipynb | 26 +- notebooks/Plot Tide Forecasts.ipynb | 24 +- pyTMD/check_tide_points.py | 8 +- pyTMD/compute_tide_corrections.py | 35 +-- pyTMD/iers_mean_pole.py | 27 +- pyTMD/infer_minor_corrections.py | 39 ++- pyTMD/load_nodal_corrections.py | 42 ++- pyTMD/predict_tidal_ts.py | 16 +- pyTMD/predict_tide.py | 16 +- pyTMD/predict_tide_drift.py | 18 +- pyTMD/read_FES_model.py | 147 +++++----- pyTMD/read_GOT_model.py | 100 ++++--- pyTMD/read_netcdf_model.py | 195 +++++++------ pyTMD/read_tide_model.py | 262 ++++++++++-------- pyTMD/time.py | 43 ++- scripts/compute_LPET_ICESat2_ATL03.py | 2 +- scripts/compute_LPET_ICESat2_ATL06.py | 2 +- scripts/compute_LPET_ICESat2_ATL07.py | 2 +- scripts/compute_LPET_ICESat2_ATL10.py | 2 +- scripts/compute_LPET_ICESat2_ATL11.py | 2 +- scripts/compute_LPET_ICESat2_ATL12.py | 2 +- scripts/compute_LPET_icebridge_data.py | 2 +- scripts/compute_LPT_ICESat_GLA12.py | 2 +- scripts/compute_LPT_displacements.py | 2 +- scripts/compute_LPT_icebridge_data.py | 4 +- scripts/compute_OPT_ICESat_GLA12.py | 2 +- scripts/compute_OPT_displacements.py | 2 +- scripts/compute_OPT_icebridge_data.py | 4 +- scripts/compute_tidal_currents.py | 25 +- scripts/compute_tidal_elevations.py | 29 +- scripts/compute_tides_ICESat2_ATL03.py | 29 +- scripts/compute_tides_ICESat2_ATL06.py | 25 +- scripts/compute_tides_ICESat2_ATL07.py | 25 +- scripts/compute_tides_ICESat2_ATL10.py | 25 +- scripts/compute_tides_ICESat2_ATL11.py | 33 +-- scripts/compute_tides_ICESat2_ATL12.py | 25 +- scripts/compute_tides_ICESat_GLA12.py | 27 +- scripts/compute_tides_icebridge_data.py | 27 +- scripts/reduce_OTIS_files.py | 13 +- test/test_atlas_read.py | 17 +- test/test_download_and_read.py | 24 +- test/test_eop.py | 2 +- test/test_fes_predict.py | 8 +- test/test_perth3_read.py | 6 +- test/test_time.py | 2 +- 56 files changed, 826 insertions(+), 637 deletions(-) diff --git a/doc/source/user_guide/iers_mean_pole.rst b/doc/source/user_guide/iers_mean_pole.rst index ad398924..1e4d7e6f 100644 --- a/doc/source/user_guide/iers_mean_pole.rst +++ b/doc/source/user_guide/iers_mean_pole.rst @@ -12,7 +12,7 @@ Calling Sequence .. code-block:: python from pyTMD.iers_mean_pole import iers_mean_pole - x,y,flag = iers_mean_pole(input_file,input_epoch,version,FILL_VALUE=FILL_VALUE) + x,y,flag = iers_mean_pole(input_file,input_epoch,version,fill_value=FILL_VALUE) `Source code`__ diff --git a/doc/source/user_guide/infer_minor_corrections.rst b/doc/source/user_guide/infer_minor_corrections.rst index 3035fb19..35d6d536 100644 --- a/doc/source/user_guide/infer_minor_corrections.rst +++ b/doc/source/user_guide/infer_minor_corrections.rst @@ -12,7 +12,7 @@ Calling Sequence from pyTMD.infer_minor_corrections import infer_minor_corrections dh = infer_minor_corrections(t, zmajor, constituents, - DELTAT=DELTAT, CORRECTIONS=CORRECTIONS) + deltat=DELTAT, corrections=CORRECTIONS) `Source code`__ diff --git a/doc/source/user_guide/load_nodal_corrections.rst b/doc/source/user_guide/load_nodal_corrections.rst index aa7c8189..c6c5367a 100644 --- a/doc/source/user_guide/load_nodal_corrections.rst +++ b/doc/source/user_guide/load_nodal_corrections.rst @@ -11,7 +11,8 @@ Calling Sequence .. code-block:: python from pyTMD.load_nodal_corrections import load_nodal_corrections - pu,pf,G = load_nodal_corrections(MJD,constituents) + pu,pf,G = load_nodal_corrections(MJD, constituents, + deltat=DELTAT, corrections=CORRECTIONS) `Source code`__ diff --git a/doc/source/user_guide/read_FES_model.rst b/doc/source/user_guide/read_FES_model.rst index ac428660..4fa4c8ff 100644 --- a/doc/source/user_guide/read_FES_model.rst +++ b/doc/source/user_guide/read_FES_model.rst @@ -12,8 +12,12 @@ Calling Sequence .. code-block:: python from pyTMD.read_FES_model import extract_FES_constants - amp,ph = extract_FES_constants(ilon, ilat, model_files, TYPE='z', - VERSION=version,METHOD='spline',GZIP=True,SCALE=1.0/100.0) + amp,ph = extract_FES_constants(ilon, ilat, model_files, + type='z', + version=version, + method='spline', + compressed=True, + scale=1.0/100.0) `Source code`__ diff --git a/doc/source/user_guide/read_GOT_model.rst b/doc/source/user_guide/read_GOT_model.rst index f1f2d388..324a1ab4 100644 --- a/doc/source/user_guide/read_GOT_model.rst +++ b/doc/source/user_guide/read_GOT_model.rst @@ -10,7 +10,8 @@ Calling Sequence .. code-block:: python from pyTMD.read_GOT_model import extract_GOT_constants - amp,ph,c = extract_GOT_constants(ilon,ilat,model_files,METHOD='spline') + amp,ph,c = extract_GOT_constants(ilon,i lat, model_files, + method='spline') `Source code`__ diff --git a/doc/source/user_guide/read_netcdf_model.rst b/doc/source/user_guide/read_netcdf_model.rst index 24ee77d9..9dad68cc 100644 --- a/doc/source/user_guide/read_netcdf_model.rst +++ b/doc/source/user_guide/read_netcdf_model.rst @@ -10,7 +10,8 @@ Calling Sequence .. code-block:: python from pyTMD.read_netcdf_model import read_netcdf_model - amp,ph,D,c = read_netcdf_model(ilon,ilat,grid_file,model_files,TYPE='z',METHOD='spline') + amp,ph,D,c = read_netcdf_model(ilon, ilat, grid_file, model_files, + type='z', method='spline') `Source code`__ diff --git a/doc/source/user_guide/read_tide_model.rst b/doc/source/user_guide/read_tide_model.rst index fb8221aa..38222099 100644 --- a/doc/source/user_guide/read_tide_model.rst +++ b/doc/source/user_guide/read_tide_model.rst @@ -14,7 +14,7 @@ Calling Sequence from pyTMD.read_tide_model import extract_tidal_constants amp,ph,D,c = extract_tidal_constants(ilon, ilat, grid_file, model_file, EPSG, - TYPE='z', METHOD='spline', GRID='OTIS') + type='z', method='spline', grid='OTIS') `Source code`__ diff --git a/notebooks/Check Tide Map.ipynb b/notebooks/Check Tide Map.ipynb index be2ba1e9..7d798eb8 100644 --- a/notebooks/Check Tide Map.ipynb +++ b/notebooks/Check Tide Map.ipynb @@ -125,19 +125,19 @@ "elif (model.format == 'netcdf'):\n", " # if reading a netCDF OTIS atlas solution\n", " xi,yi,hz = pyTMD.read_netcdf_model.read_netcdf_grid(model.grid_file,\n", - " GZIP=model.compressed, TYPE='z')\n", + " compressed=model.compressed, type='z')\n", " # invert bathymetry mask\n", " mz = np.invert(hz.mask)\n", "elif (model.format == 'GOT'):\n", " # if reading a NASA GOT solution\n", " hc,xi,yi,c = pyTMD.read_GOT_model.read_GOT_grid(model.model_file[0],\n", - " GZIP=model.compressed)\n", + " compressed=model.compressed)\n", " # invert tidal constituent mask\n", " mz = np.invert(hc.mask)\n", "elif (model.format == 'FES'):\n", " # if reading a FES netCDF solution\n", " hc,xi,yi = pyTMD.read_FES_model.read_netcdf_file(model.model_file[0],\n", - " GZIP=model.compressed, TYPE='z', VERSION=model.version)\n", + " compressed=model.compressed, type='z', version=model.version)\n", " # invert tidal constituent mask\n", " mz = np.invert(hc.mask)" ] diff --git a/notebooks/Plot Antarctic Tidal Currents.ipynb b/notebooks/Plot Antarctic Tidal Currents.ipynb index 8ad9a7c0..4e4a15e7 100644 --- a/notebooks/Plot Antarctic Tidal Currents.ipynb +++ b/notebooks/Plot Antarctic Tidal Currents.ipynb @@ -193,23 +193,24 @@ " # read tidal constants and interpolate to grid points\n", " if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", - " model.model_file, model.projection, TYPE=TYPE,\n", - " METHOD='spline', GRID=model.format)\n", + " model.model_file, model.projection, type=TYPE,\n", + " method='spline', grid=model.format)\n", " DELTAT = np.zeros_like(tide_time)\n", " elif (model.format == 'netcdf'):\n", " amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file,\n", - " model.model_file[TYPE], TYPE=TYPE, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " model.model_file[TYPE], type=TYPE, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", " DELTAT = np.zeros_like(tide_time)\n", " elif (model.format == 'GOT'):\n", " amp,ph,c = extract_GOT_constants(lon, lat, model.model_file,\n", - " METHOD='spline', SCALE=model.scale, GZIP=model.compressed)\n", + " method='spline', scale=model.scale,\n", + " compressed=model.compressed)\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", " elif (model.format == 'FES'):\n", " amp,ph = extract_FES_constants(lon, lat, model.model_file[TYPE],\n", - " TYPE=model.type, VERSION=model.versin, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " type=TYPE, version=model.version, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", " c = model.constituents\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", @@ -223,10 +224,10 @@ " tide[TYPE] = np.ma.zeros((ny,nx,24))\n", " for hour in range(24):\n", " # predict tidal elevations at time and infer minor corrections\n", - " TIDE = predict_tide(tide_time[hour], hc, c, DELTAT=DELTAT[hour],\n", - " CORRECTIONS=model.format)\n", + " TIDE = predict_tide(tide_time[hour], hc, c, deltat=DELTAT[hour],\n", + " corrections=model.format)\n", " MINOR = infer_minor_corrections(tide_time[hour], hc, c,\n", - " DELTAT=DELTAT[hour], CORRECTIONS=model.format)\n", + " deltat=DELTAT[hour], corrections=model.format)\n", " # add major and minor components and reform grid\n", " tide[TYPE][:,:,hour] = np.reshape((TIDE+MINOR),(ny,nx))" ] diff --git a/notebooks/Plot Antarctic Tide Range.ipynb b/notebooks/Plot Antarctic Tide Range.ipynb index 255b2205..3c072fef 100644 --- a/notebooks/Plot Antarctic Tide Range.ipynb +++ b/notebooks/Plot Antarctic Tide Range.ipynb @@ -225,19 +225,20 @@ "# read tidal constants and interpolate to grid points\n", "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", - " model.model_file, model.projection, TYPE=model.type,\n", - " METHOD='spline', GRID=model.format)\n", + " model.model_file, model.projection, type=model.type,\n", + " method='spline', grid=model.format)\n", "elif (model.format == 'netcdf'):\n", " amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file,\n", - " model.model_file, TYPE=model.type, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " model.model_file, type=model.type, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", "elif (model.format == 'GOT'):\n", " amp,ph,c = extract_GOT_constants(lon, lat, model.model_file,\n", - " METHOD='spline', SCALE=model.scale, GZIP=model.compressed)\n", + " method='spline', scale=model.scale,\n", + " compressed=model.compressed)\n", "elif (model.format == 'FES'):\n", " amp,ph = extract_FES_constants(lon, lat, model.model_file,\n", - " TYPE=model.type, VERSION=model.version, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " type=model.type, version=model.version, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", " c = model.constituents\n", "\n", "# calculate minor constituent amplitudes\n", @@ -305,13 +306,6 @@ "# show the plot\n", "plt.show()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": { diff --git a/notebooks/Plot Arctic Ocean Map.ipynb b/notebooks/Plot Arctic Ocean Map.ipynb index 47f016bf..dfa4b243 100644 --- a/notebooks/Plot Arctic Ocean Map.ipynb +++ b/notebooks/Plot Arctic Ocean Map.ipynb @@ -192,23 +192,24 @@ "# read tidal constants and interpolate to grid points\n", "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", - " model.model_file, model.projection, TYPE=model.type,\n", - " METHOD='spline', GRID=model.format)\n", + " model.model_file, model.projection, type=model.type,\n", + " method='spline', grid=model.format)\n", " DELTAT = np.zeros_like(tide_time)\n", "elif (model.format == 'netcdf'):\n", " amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file,\n", - " model.model_file, TYPE=model.type, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " model.model_file, type=model.type, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", " DELTAT = np.zeros_like(tide_time)\n", "elif (model.format == 'GOT'):\n", " amp,ph,c = extract_GOT_constants(lon, lat, model.model_file,\n", - " METHOD='spline', SCALE=model.scale, GZIP=model.compressed)\n", + " method='spline', scale=model.scale,\n", + " compressed=model.compressed)\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", "elif (model.format == 'FES'):\n", " amp,ph = extract_FES_constants(lon, lat, model.model_file,\n", - " TYPE=model.type, VERSION=model.version, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " type=model.type, version=model.version, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", " c = model.constituents\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", @@ -222,10 +223,10 @@ "tide_cm = np.ma.zeros((ny,nx,24))\n", "for hour in range(24):\n", " # predict tidal elevations at time and infer minor corrections\n", - " TIDE = predict_tide(tide_time[hour], hc, c, DELTAT=DELTAT[hour],\n", - " CORRECTIONS=model.format)\n", + " TIDE = predict_tide(tide_time[hour], hc, c, deltat=DELTAT[hour],\n", + " corrections=model.format)\n", " MINOR = infer_minor_corrections(tide_time[hour], hc, c,\n", - " DELTAT=DELTAT[hour], CORRECTIONS=model.format)\n", + " deltat=DELTAT[hour], corrections=model.format)\n", " # add major and minor components and reform grid\n", " # convert from meters to centimeters\n", " tide_cm[:,:,hour] = 100.0*np.reshape((TIDE+MINOR),(ny,nx))" diff --git a/notebooks/Plot Ross Ice Shelf Map.ipynb b/notebooks/Plot Ross Ice Shelf Map.ipynb index 67d44a1f..e4964150 100644 --- a/notebooks/Plot Ross Ice Shelf Map.ipynb +++ b/notebooks/Plot Ross Ice Shelf Map.ipynb @@ -192,23 +192,24 @@ "# read tidal constants and interpolate to grid points\n", "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file,\n", - " model.model_file, model.projection, TYPE=model.type,\n", - " METHOD='spline', GRID=model.format)\n", + " model.model_file, model.projection, type=model.type,\n", + " method='spline', grid=model.format)\n", " DELTAT = np.zeros_like(tide_time)\n", "elif (model.format == 'netcdf'):\n", " amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file,\n", - " model.model_file, TYPE=model.type, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " model.model_file, type=model.type, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", " DELTAT = np.zeros_like(tide_time)\n", "elif (model.format == 'GOT'):\n", " amp,ph,c = extract_GOT_constants(lon, lat, model.model_file,\n", - " METHOD='spline', SCALE=model.scale, GZIP=model.compressed)\n", + " method='spline', scale=model.scale,\n", + " compressed=model.compressed)\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", "elif (model.format == 'FES'):\n", " amp,ph = extract_FES_constants(lon, lat, model.model_file,\n", - " TYPE=model.type, VERSION=model.version, METHOD='spline',\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " type=model.type, version=model.version, method='spline',\n", + " scale=model.scale, compressed=model.compressed)\n", " c = model.constituents\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", @@ -222,10 +223,10 @@ "tide_cm = np.ma.zeros((ny,nx,24))\n", "for hour in range(24):\n", " # predict tidal elevations at time and infer minor corrections\n", - " TIDE = predict_tide(tide_time[hour], hc, c, DELTAT=DELTAT[hour],\n", - " CORRECTIONS=model.format)\n", + " TIDE = predict_tide(tide_time[hour], hc, c, deltat=DELTAT[hour],\n", + " corrections=model.format)\n", " MINOR = infer_minor_corrections(tide_time[hour], hc, c,\n", - " DELTAT=DELTAT[hour], CORRECTIONS=model.format)\n", + " deltat=DELTAT[hour], corrections=model.format)\n", " # add major and minor components and reform grid\n", " # convert from meters to centimeters\n", " tide_cm[:,:,hour] = 100.0*np.reshape((TIDE+MINOR),(ny,nx))" @@ -310,8 +311,11 @@ } ], "metadata": { + "interpreter": { + "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6" + }, "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3.8.10 64-bit", "language": "python", "name": "python3" }, diff --git a/notebooks/Plot Tide Forecasts.ipynb b/notebooks/Plot Tide Forecasts.ipynb index 12fe8dd2..d21f79c7 100644 --- a/notebooks/Plot Tide Forecasts.ipynb +++ b/notebooks/Plot Tide Forecasts.ipynb @@ -147,27 +147,27 @@ "if model.format in ('OTIS','ATLAS','ESR'):\n", " amp,ph,D,c = extract_tidal_constants(np.atleast_1d(LON),\n", " np.atleast_1d(LAT), model.grid_file, model.model_file,\n", - " model.projection, TYPE=model.type, METHOD='spline',\n", - " EXTRAPOLATE=True, GRID=model.format)\n", + " model.projection, type=model.type, method='spline',\n", + " extrapolate=True, grid=model.format)\n", " DELTAT = np.zeros_like(tide_time)\n", "elif (model.format == 'netcdf'):\n", " amp,ph,D,c = extract_netcdf_constants(np.atleast_1d(LON),\n", " np.atleast_1d(LAT), model.grid_file, model.model_file,\n", - " TYPE=model.type, METHOD='spline', EXTRAPOLATE=True,\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " type=model.type, method='spline', extrapolate=True,\n", + " scale=model.scale, compressed=model.compressed)\n", " DELTAT = np.zeros_like(tide_time)\n", "elif (model.format == 'GOT'):\n", " amp,ph,c = extract_GOT_constants(np.atleast_1d(LON),\n", - " np.atleast_1d(LAT), model.model_file, METHOD='spline',\n", - " EXTRAPOLATE=True, SCALE=model.scale,\n", - " GZIP=model.compressed)\n", + " np.atleast_1d(LAT), model.model_file, method='spline',\n", + " extrapolate=True, scale=model.scale,\n", + " compressed=model.compressed)\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", "elif (model.format == 'FES'):\n", " amp,ph = extract_FES_constants(np.atleast_1d(LON),\n", - " np.atleast_1d(LAT), model.model_file, TYPE=model.type,\n", - " VERSION=model.version, METHOD='spline', EXTRAPOLATE=True,\n", - " SCALE=model.scale, GZIP=model.compressed)\n", + " np.atleast_1d(LAT), model.model_file, type=model.type,\n", + " version=model.version, method='spline', extrapolate=True,\n", + " scale=model.scale, compressed=model.compressed)\n", " c = model.constituents\n", " # interpolate delta times from calendar dates to tide time\n", " DELTAT = calc_delta_time(delta_file, tide_time)\n", @@ -180,9 +180,9 @@ "# convert time from MJD to days relative to Jan 1, 1992 (48622 MJD)\n", "# predict tidal elevations at time 1 and infer minor corrections\n", "TIDE = predict_tidal_ts(tide_time, hc, c,\n", - " DELTAT=DELTAT, CORRECTIONS=model.format)\n", + " deltat=DELTAT, corrections=model.format)\n", "MINOR = infer_minor_corrections(tide_time, hc, c,\n", - " DELTAT=DELTAT, CORRECTIONS=model.format)\n", + " deltat=DELTAT, corrections=model.format)\n", "TIDE.data[:] += MINOR.data[:]\n", "# convert to centimeters\n", "TIDE.data[:] *= 100.0\n", diff --git a/pyTMD/check_tide_points.py b/pyTMD/check_tide_points.py index 4961099b..8c35b461 100644 --- a/pyTMD/check_tide_points.py +++ b/pyTMD/check_tide_points.py @@ -53,6 +53,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: updated docstrings to numpy documentation format Updated 09/2021: refactor to use model class for files and attributes Updated 07/2021: added check that tide model directory is accessible @@ -153,7 +154,7 @@ def check_tide_points(x, y, DIRECTORY=None, MODEL=None, elif (model.format == 'netcdf'): # if reading a netCDF OTIS atlas solution xi,yi,hz = pyTMD.read_netcdf_model.read_netcdf_grid(model.grid_file, - GZIP=model.compressed, TYPE=model.type) + compressed=model.compressed, type=model.type) # copy bathymetry mask mz = np.copy(hz.mask) # copy latitude and longitude and adjust longitudes @@ -163,7 +164,7 @@ def check_tide_points(x, y, DIRECTORY=None, MODEL=None, elif (model.format == 'GOT'): # if reading a NASA GOT solution hc,xi,yi,c = pyTMD.read_GOT_model.read_GOT_grid(model.model_file[0], - GZIP=model.compressed) + compressed=model.compressed) # copy tidal constituent mask mz = np.copy(hc.mask) # copy latitude and longitude and adjust longitudes @@ -173,7 +174,8 @@ def check_tide_points(x, y, DIRECTORY=None, MODEL=None, elif (model.format == 'FES'): # if reading a FES netCDF solution hc,xi,yi = pyTMD.read_FES_model.read_netcdf_file(model.model_file[0], - GZIP=model.compressed, TYPE=model.type, VERSION=model.version) + compressed=model.compressed, type=model.type, + version=model.version) # copy tidal constituent mask mz = np.copy(hc.mask) # copy latitude and longitude and adjust longitudes diff --git a/pyTMD/compute_tide_corrections.py b/pyTMD/compute_tide_corrections.py index fb83af95..7511a062 100644 --- a/pyTMD/compute_tide_corrections.py +++ b/pyTMD/compute_tide_corrections.py @@ -80,6 +80,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: updated docstrings to numpy documentation format Updated 12/2021: added function to calculate a tidal time series verify coordinate dimensions for each input data type @@ -280,27 +281,27 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, #-- read tidal constants and interpolate to grid points if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file, - model.model_file, model.projection, TYPE=model.type, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - GRID=model.format) + model.model_file, model.projection, type=model.type, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + grid=model.format) deltat = np.zeros_like(t) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file, - model.model_file, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + model.model_file, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, + compressed=model.compressed) deltat = np.zeros_like(t) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(lon, lat, model.model_file, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, t) elif (model.format == 'FES'): amp,ph = extract_FES_constants(lon, lat, model.model_file, - TYPE=model.type, VERSION=model.version, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + type=model.type, version=model.version, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, + compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -318,9 +319,9 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, tide.mask = np.zeros((ny,nx,nt),dtype=bool) for i in range(nt): TIDE = predict_tide(t[i], hc, c, - DELTAT=deltat[i], CORRECTIONS=model.format) + deltat=deltat[i], corrections=model.format) MINOR = infer_minor_corrections(t[i], hc, c, - DELTAT=deltat[i], CORRECTIONS=model.format) + deltat=deltat[i], corrections=model.format) #-- add major and minor components and reform grid tide[:,:,i] = np.reshape((TIDE+MINOR), (ny,nx)) tide.mask[:,:,i] = np.reshape((TIDE.mask | MINOR.mask), (ny,nx)) @@ -329,18 +330,18 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, tide = np.ma.zeros((npts), fill_value=FILL_VALUE) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(t, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(t, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] elif (TYPE.lower() == 'time series'): npts = len(t) tide = np.ma.zeros((npts), fill_value=FILL_VALUE) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tidal_ts(t, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(t, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace invalid values with fill value tide.data[tide.mask] = tide.fill_value diff --git a/pyTMD/iers_mean_pole.py b/pyTMD/iers_mean_pole.py index bf8c0805..b0503a61 100644 --- a/pyTMD/iers_mean_pole.py +++ b/pyTMD/iers_mean_pole.py @@ -19,10 +19,10 @@ x: Angular coordinate x of conventional mean pole [arcsec] y: Angular coordinate y of conventional mean pole [arcsec] flag: epoch is valid for version and version number is valid - data will be set to FILL_VALUE if flag == False + data will be set to fill_value if flag == False OPTIONS: - FILL_VALUE: value for invalid flags + fill_value: value for invalid flags PYTHON DEPENDENCIES: numpy: Scientific Computing Tools For Python @@ -34,6 +34,7 @@ IERS Technical Note No. 36, BKG (2010) UPDATE HISTORY: + Updated 05/2022: changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format Updated 02/2021: replaced numpy bool to prevent deprecation warning Updated 07/2020: added function docstrings @@ -41,11 +42,14 @@ Written 09/2017 """ from __future__ import division + import os +import copy +import warnings import numpy as np #-- read table of mean pole values, calculate angular coordinates at epoch -def iers_mean_pole(input_file, input_epoch, version, FILL_VALUE=np.nan): +def iers_mean_pole(input_file, input_epoch, version, **kwargs): """ Calculates the angular coordinates of the IERS Conventional Mean Pole (CMP) @@ -58,7 +62,7 @@ def iers_mean_pole(input_file, input_epoch, version, FILL_VALUE=np.nan): in decimal years version: str Year of the conventional model - FILL_VALUE: float, default np.nan + fill_value: float, default np.nan Value for invalid flags Returns @@ -75,6 +79,15 @@ def iers_mean_pole(input_file, input_epoch, version, FILL_VALUE=np.nan): .. [1] Petit, G. and Luzum, B. (eds.), IERS Conventions (2010), IERS Technical Note No. 36, BKG (2010) """ + #-- set default keyword arguments + kwargs.setdefault('fill_value', np.nan) + #-- raise warnings for deprecated keyword argument + if 'FILL_VALUE' in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format('FILL_VALUE','fill_value'), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs['fill_value'] = copy.copy(kwargs['FILL_VALUE']) #-- verify IERS model version assert version in ('2003','2010','2015'), "Incorrect IERS model version" #-- read mean pole file @@ -89,9 +102,9 @@ def iers_mean_pole(input_file, input_epoch, version, FILL_VALUE=np.nan): #-- final shape of the table nrows, ncols = np.shape(table) #-- allocate for output arrays - x = np.full_like(input_epoch,FILL_VALUE) - y = np.full_like(input_epoch,FILL_VALUE) - flag = np.zeros_like(input_epoch,dtype=bool) + x = np.full_like(input_epoch, kwargs['fill_value']) + y = np.full_like(input_epoch, kwargs['fill_value']) + flag = np.zeros_like(input_epoch, dtype=bool) for t,epoch in enumerate(input_epoch): #-- Conventional mean pole model in IERS Conventions 2003 if (version == '2003') and (epoch >= 1975) and (epoch < 2004): diff --git a/pyTMD/infer_minor_corrections.py b/pyTMD/infer_minor_corrections.py index adc577ae..22342841 100755 --- a/pyTMD/infer_minor_corrections.py +++ b/pyTMD/infer_minor_corrections.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -infer_minor_corrections.py (04/2022) +infer_minor_corrections.py (05/2022) Return correction for minor constituents based on Richard Ray's PERTH3 code PERTH: PREdict Tidal Heights @@ -16,8 +16,8 @@ dh: height from minor constituents OPTIONS: - DELTAT: time correction for converting to Ephemeris Time (days) - CORRECTIONS: use nodal corrections from OTIS/ATLAS or GOT models + deltat: time correction for converting to Ephemeris Time (days) + corrections: use nodal corrections from OTIS/ATLAS or GOT models PYTHON DEPENDENCIES: numpy: Scientific Computing Tools For Python @@ -35,6 +35,7 @@ time series", Advances in Water Resources, 12, (1989). UPDATE HISTORY: + Updated 05/2022: changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format Updated 08/2020: change time variable names to not overwrite functions update nodal corrections for FES models @@ -46,10 +47,12 @@ use the number of dates if calculating a tidal time series at a point Updated 09/2017: Rewritten in Python """ +import copy +import warnings import numpy as np from pyTMD.calc_astrol_longitudes import calc_astrol_longitudes -def infer_minor_corrections(t,zmajor,constituents,DELTAT=0.0,CORRECTIONS=''): +def infer_minor_corrections(t, zmajor, constituents, **kwargs): """ Calculate the tidal corrections for minor constituents inferred using major constituents @@ -62,9 +65,9 @@ def infer_minor_corrections(t,zmajor,constituents,DELTAT=0.0,CORRECTIONS=''): Complex HC for given constituents/points constituents: list tidal constituent IDs - DELTAT: float, default 0.0 + deltat: float, default 0.0 time correction for converting to Ephemeris Time (days) - CORRECTIONS: str, default '' + corrections: str, default 'OTIS' use nodal corrections from OTIS/ATLAS or GOT models Returns @@ -80,6 +83,19 @@ def infer_minor_corrections(t,zmajor,constituents,DELTAT=0.0,CORRECTIONS=''): .. [3] M. G. G. Foreman and R. F. Henry, "The harmonic analysis of tidal model time series", Advances in Water Resources, 12, (1989). """ + #-- set default keyword arguments + kwargs.setdefault('deltat', 0.0) + kwargs.setdefault('corrections', 'OTIS') + #-- raise warnings for deprecated keyword arguments + deprecated_keywords = dict(DELTAT='deltat',CORRECTIONS='corrections') + for old,new in deprecated_keywords.items(): + if old in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format(old,new), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs[new] = copy.copy(kwargs[old]) + #-- degrees to radians dtr = np.pi/180.0 #-- number of constituents @@ -92,7 +108,7 @@ def infer_minor_corrections(t,zmajor,constituents,DELTAT=0.0,CORRECTIONS=''): #-- convert time from days relative to Jan 1, 1992 to Modified Julian Days MJD = 48622.0 + t #-- major constituents used for inferring minor tides - cindex = ['q1','o1','p1','k1','n2','m2','s2','k2','2n2'] + cindex = ['q1', 'o1', 'p1', 'k1', 'n2', 'm2', 's2', 'k2', '2n2'] #-- re-order major tides to correspond to order of cindex z = np.ma.zeros((n,9),dtype=np.complex64) nz = 0 @@ -133,7 +149,7 @@ def infer_minor_corrections(t,zmajor,constituents,DELTAT=0.0,CORRECTIONS=''): zmin[:,16] = 0.0033*z[:,5] + 0.0082*z[:,6]#-- L2 zmin[:,17] = 0.0585*z[:,6]#-- t2 #-- additional coefficients for FES models - if CORRECTIONS in ('FES',): + if kwargs['corrections'] in ('FES',): #-- spline coefficients for admittances mu2 = [0.069439968323, 0.351535557706, -0.046278307672] nu2 = [-0.006104695053, 0.156878802427, 0.006755704028] @@ -152,9 +168,10 @@ def infer_minor_corrections(t,zmajor,constituents,DELTAT=0.0,CORRECTIONS=''): t1 = 15.0*hour t2 = 30.0*hour #-- set function for astronomical longitudes - ASTRO5 = True if CORRECTIONS in ('GOT','FES') else False + ASTRO5 = True if kwargs['corrections'] in ('GOT','FES') else False #-- convert from Modified Julian Dates into Ephemeris Time - S,H,P,omega,pp = calc_astrol_longitudes(MJD+DELTAT, ASTRO5=ASTRO5) + S,H,P,omega,pp = calc_astrol_longitudes(MJD + kwargs['deltat'], + ASTRO5=ASTRO5) #-- determine equilibrium tidal arguments arg = np.zeros((n,20)) @@ -219,7 +236,7 @@ def infer_minor_corrections(t,zmajor,constituents,DELTAT=0.0,CORRECTIONS=''): u[:,15] = u[:,11]#-- L2 u[:,16] = np.arctan2(-0.441*sinn, 1.0 + 0.441*cosn)/dtr#-- L2 - if CORRECTIONS in ('FES',): + if kwargs['corrections'] in ('FES',): #-- additional astronomical terms for FES models II = np.arccos(0.913694997 - 0.035692561*np.cos(omega*dtr)) at1 = np.arctan(1.01883*np.tan(omega*dtr/2.0)) diff --git a/pyTMD/load_nodal_corrections.py b/pyTMD/load_nodal_corrections.py index c733b640..59a537c4 100755 --- a/pyTMD/load_nodal_corrections.py +++ b/pyTMD/load_nodal_corrections.py @@ -16,8 +16,8 @@ G: phase correction in degrees OPTIONS: - DELTAT: time correction for converting to Ephemeris Time (days) - CORRECTIONS: use nodal corrections from OTIS/ATLAS or GOT models + deltat: time correction for converting to Ephemeris Time (days) + corrections: use nodal corrections from OTIS/ATLAS or GOT models PYTHON DEPENDENCIES: numpy: Scientific Computing Tools For Python @@ -38,22 +38,25 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format Updated 12/2020: fix k1 for FES models Updated 08/2020: change time variable names to not overwrite functions update nodal corrections for FES models Updated 07/2020: added function docstrings. add shallow water constituents - Updated 09/2019: added netcdf option to CORRECTIONS option + Updated 09/2019: added netcdf option to corrections option Updated 08/2018: added correction option ATLAS for localized OTIS solutions Updated 07/2018: added option to use GSFC GOT nodal corrections Updated 09/2017: Rewritten in Python Rewritten in Matlab by Lana Erofeeva 01/2003 Written by Richard Ray 03/1999 """ +import copy +import warnings import numpy as np from pyTMD.calc_astrol_longitudes import calc_astrol_longitudes -def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): +def load_nodal_corrections(MJD, constituents, **kwargs): """ Calculates the nodal corrections for tidal constituents @@ -63,9 +66,9 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): modified julian day of input date constituents: list tidal constituent IDs - DELTAT: float, default 0.0 + deltat: float, default 0.0 time correction for converting to Ephemeris Time (days) - CORRECTIONS: str, default 'OTIS' + corrections: str, default 'OTIS' use nodal corrections from OTIS/ATLAS or GOT models Returns @@ -87,6 +90,18 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): .. [4] Egbert and Erofeeva, "Efficient Inverse Modeling of Barotropic Ocean Tides", Journal of Atmospheric and Oceanic Technology, (2002). """ + #-- set default keyword arguments + kwargs.setdefault('deltat', 0.0) + kwargs.setdefault('corrections', 'OTIS') + #-- raise warnings for deprecated keyword arguments + deprecated_keywords = dict(DELTAT='deltat',CORRECTIONS='corrections') + for old,new in deprecated_keywords.items(): + if old in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format(old,new), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs[new] = copy.copy(kwargs[old]) #-- constituents array (not all are included in tidal program) cindex = ['sa','ssa','mm','msf','mf','mt','alpha1','2q1','sigma1','q1', @@ -100,9 +115,10 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): dtr = np.pi/180.0 #-- set function for astronomical longitudes - ASTRO5 = True if CORRECTIONS in ('GOT','FES') else False + ASTRO5 = True if kwargs['corrections'] in ('GOT','FES') else False #-- convert from Modified Julian Dates into Ephemeris Time - s,h,p,omega,pp = calc_astrol_longitudes(MJD+DELTAT, ASTRO5=ASTRO5) + s,h,p,omega,pp = calc_astrol_longitudes(MJD + kwargs['deltat'], + ASTRO5=ASTRO5) hour = (MJD % 1)*24.0 t1 = 15.0*hour t2 = 30.0*hour @@ -127,9 +143,9 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): arg[:,14] = t1 - s + 3.0*h - p + 90.0 #-- chi1 arg[:,15] = t1 - 2.0*h + pp - 90.0 #-- pi1 arg[:,16] = t1 - h - 90.0 #-- p1 - if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): + if kwargs['corrections'] in ('OTIS','ATLAS','ESR','netcdf'): arg[:,17] = t1 + 90.0 #-- s1 - elif CORRECTIONS in ('GOT','FES'): + elif kwargs['corrections'] in ('GOT','FES'): arg[:,17] = t1 + 180.0 #-- s1 (Doodson's phase) arg[:,18] = t1 + h + 90.0 #-- k1 arg[:,19] = t1 + 2.0*h - pp + 90.0 #-- psi1 @@ -187,7 +203,7 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): f = np.zeros((nt,60)) u = np.zeros((nt,60)) #-- determine nodal corrections f and u for each model type - if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): + if kwargs['corrections'] in ('OTIS','ATLAS','ESR','netcdf'): f[:,0] = 1.0 #-- Sa f[:,1] = 1.0 #-- Ssa f[:,2] = 1.0 - 0.130*cosn #-- Mm @@ -337,7 +353,7 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): #-- mean sea level u[:,59] = 0.0 #-- Z0 - elif CORRECTIONS in ('FES',): + elif kwargs['corrections'] in ('FES',): #-- additional astronomical terms for FES models II = np.arccos(0.913694997 - 0.035692561*np.cos(omega*dtr)) at1 = np.arctan(1.01883*np.tan(omega*dtr/2.0)) @@ -482,7 +498,7 @@ def load_nodal_corrections(MJD,constituents,DELTAT=0.0,CORRECTIONS='OTIS'): #-- mean sea level u[:,59] = 0.0 #-- Z0 - elif CORRECTIONS in ('GOT',): + elif kwargs['corrections'] in ('GOT',): f[:,9] = 1.009 + 0.187*cosn - 0.015*cos2n#-- Q1 f[:,11] = f[:,9]#-- O1 f[:,16] = 1.0 #-- P1 diff --git a/pyTMD/predict_tidal_ts.py b/pyTMD/predict_tidal_ts.py index 47bbe8d7..3874c2f1 100644 --- a/pyTMD/predict_tidal_ts.py +++ b/pyTMD/predict_tidal_ts.py @@ -15,8 +15,8 @@ ht: tidal time series reconstructed using the nodal corrections OPTIONS: - DELTAT: time correction for converting to Ephemeris Time (days) - CORRECTIONS: use nodal corrections from OTIS/ATLAS or GOT models + deltat: time correction for converting to Ephemeris Time (days) + corrections: use nodal corrections from OTIS/ATLAS or GOT models REFERENCES: G. D. Egbert and S. Erofeeva, "Efficient Inverse Modeling of Barotropic @@ -48,7 +48,7 @@ from pyTMD.load_constituent import load_constituent from pyTMD.load_nodal_corrections import load_nodal_corrections -def predict_tidal_ts(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): +def predict_tidal_ts(t, hc, constituents, deltat=0.0, corrections='OTIS'): """ Predict tidal time series at a single location using harmonic constants @@ -60,9 +60,9 @@ def predict_tidal_ts(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): harmonic constant vector constituents: list tidal constituent IDs - DELTAT: float, default 0.0 + deltat: float, default 0.0 time correction for converting to Ephemeris Time (days) - CORRECTIONS: str, default '' + corrections: str, default '' use nodal corrections from OTIS/ATLAS or GOT models Returns @@ -80,18 +80,18 @@ def predict_tidal_ts(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): #-- load the nodal corrections #-- convert time to Modified Julian Days (MJD) pu,pf,G = load_nodal_corrections(t + 48622.0, constituents, - DELTAT=DELTAT, CORRECTIONS=CORRECTIONS) + deltat=deltat, corrections=corrections) #-- allocate for output time series ht = np.ma.zeros((nt)) ht.mask = np.zeros((nt),dtype=bool) #-- for each constituent for k,c in enumerate(constituents): - if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): + if corrections in ('OTIS','ATLAS','ESR','netcdf'): #-- load parameters for each constituent amp,ph,omega,alpha,species = load_constituent(c) #-- add component for constituent to output tidal time series th = omega*t*86400.0 + ph + pu[:,k] - elif CORRECTIONS in ('GOT','FES'): + elif corrections in ('GOT','FES'): th = G[:,k]*np.pi/180.0 + pu[:,k] #-- sum over all tides at location ht.data[:] += pf[:,k]*hc.real[0,k]*np.cos(th) - \ diff --git a/pyTMD/predict_tide.py b/pyTMD/predict_tide.py index 7918897c..6d1fdc4e 100644 --- a/pyTMD/predict_tide.py +++ b/pyTMD/predict_tide.py @@ -15,8 +15,8 @@ ht: tide values reconstructed using the nodal corrections OPTIONS: - DELTAT: time correction for converting to Ephemeris Time (days) - CORRECTIONS: use nodal corrections from OTIS/ATLAS or GOT models + deltat: time correction for converting to Ephemeris Time (days) + corrections: use nodal corrections from OTIS/ATLAS or GOT models REFERENCES: G. D. Egbert and S. Erofeeva, "Efficient Inverse Modeling of Barotropic @@ -49,7 +49,7 @@ from pyTMD.load_constituent import load_constituent from pyTMD.load_nodal_corrections import load_nodal_corrections -def predict_tide(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): +def predict_tide(t, hc, constituents, deltat=0.0, corrections='OTIS'): """ Predict tides at a single time using harmonic constants @@ -61,9 +61,9 @@ def predict_tide(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): harmonic constant vector constituents: list tidal constituent IDs - DELTAT: float, default 0.0 + deltat: float, default 0.0 time correction for converting to Ephemeris Time (days) - CORRECTIONS: str, default 'OTIS' + corrections: str, default 'OTIS' use nodal corrections from OTIS/ATLAS or GOT models Returns @@ -82,18 +82,18 @@ def predict_tide(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): #-- load the nodal corrections #-- convert time to Modified Julian Days (MJD) pu,pf,G = load_nodal_corrections(t + 48622.0, constituents, - DELTAT=DELTAT, CORRECTIONS=CORRECTIONS) + deltat=deltat, corrections=corrections) #-- allocate for output tidal elevation ht = np.ma.zeros((npts)) ht.mask = np.zeros((npts),dtype=bool) #-- for each constituent for k,c in enumerate(constituents): - if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): + if corrections in ('OTIS','ATLAS','ESR','netcdf'): #-- load parameters for each constituent amp,ph,omega,alpha,species = load_constituent(c) #-- add component for constituent to output tidal elevation th = omega*t*86400.0 + ph + pu[0,k] - elif CORRECTIONS in ('GOT','FES'): + elif corrections in ('GOT','FES'): th = G[0,k]*np.pi/180.0 + pu[0,k] #-- sum over all tides ht.data[:] += pf[0,k]*hc.real[:,k]*np.cos(th) - \ diff --git a/pyTMD/predict_tide_drift.py b/pyTMD/predict_tide_drift.py index 6e38a7a0..607328c1 100755 --- a/pyTMD/predict_tide_drift.py +++ b/pyTMD/predict_tide_drift.py @@ -15,8 +15,8 @@ ht: tidal time series reconstructed using the nodal corrections OPTIONS: - DELTAT: time correction for converting to Ephemeris Time (days) - CORRECTIONS: use nodal corrections from OTIS/ATLAS or GOT models + deltat: time correction for converting to Ephemeris Time (days) + corrections: use nodal corrections from OTIS/ATLAS or GOT models REFERENCES: G. D. Egbert and S. Erofeeva, "Efficient Inverse Modeling of Barotropic @@ -48,7 +48,7 @@ from pyTMD.load_constituent import load_constituent from pyTMD.load_nodal_corrections import load_nodal_corrections -def predict_tide_drift(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): +def predict_tide_drift(t, hc, constituents, deltat=0.0, corrections='OTIS'): """ Predict tides at multiple times and locations using harmonic constants @@ -60,9 +60,9 @@ def predict_tide_drift(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): harmonic constant vector constituents: list tidal constituent IDs - DELTAT: float, default 0.0 + deltat: float, default 0.0 time correction for converting to Ephemeris Time (days) - CORRECTIONS: str, default 'OTIS' + corrections: str, default 'OTIS' use nodal corrections from OTIS/ATLAS or GOT models Returns @@ -80,18 +80,18 @@ def predict_tide_drift(t, hc, constituents, DELTAT=0.0, CORRECTIONS='OTIS'): #-- load the nodal corrections #-- convert time to Modified Julian Days (MJD) pu,pf,G = load_nodal_corrections(t + 48622.0, constituents, - DELTAT=DELTAT, CORRECTIONS=CORRECTIONS) + deltat=deltat, corrections=corrections) #-- allocate for output time series ht = np.ma.zeros((nt)) ht.mask = np.zeros((nt),dtype=bool) #-- for each constituent for k,c in enumerate(constituents): - if CORRECTIONS in ('OTIS','ATLAS','ESR','netcdf'): + if corrections in ('OTIS','ATLAS','ESR','netcdf'): #-- load parameters for each constituent - amp,ph,omega,alpha,species = load_constituent(c) + amp, ph, omega, alpha, species = load_constituent(c) #-- add component for constituent to output tidal elevation th = omega*t*86400.0 + ph + pu[:,k] - elif CORRECTIONS in ('GOT','FES'): + elif corrections in ('GOT','FES'): th = G[:,k]*np.pi/180.0 + pu[:,k] #-- sum over all tides ht.data[:] += pf[:,k]*hc.real[:,k]*np.cos(th) - \ diff --git a/pyTMD/read_FES_model.py b/pyTMD/read_FES_model.py index a66e9132..768d3442 100644 --- a/pyTMD/read_FES_model.py +++ b/pyTMD/read_FES_model.py @@ -16,25 +16,25 @@ model_files: list of model files for each constituent OPTIONS: - TYPE: tidal variable to run + type: tidal variable to run z: heights u: horizontal transport velocities v: vertical transport velocities - VERSION: model version to run + version: model version to run FES1999 FES2004 FES2012 FES2014 EOT20 - METHOD: interpolation method + method: interpolation method bilinear: quick bilinear interpolation spline: scipy bivariate spline interpolation linear, nearest: scipy regular grid interpolations - EXTRAPOLATE: extrapolate model using nearest-neighbors - CUTOFF: extrapolation cutoff in kilometers + extrapolate: extrapolate model using nearest-neighbors + cutoff: extrapolation cutoff in kilometers set to np.inf to extrapolate for all points - GZIP: input ascii or netCDF4 files are compressed - SCALE: scaling factor for converting to output units + compressed: input files are gzip compressed + scale: scaling factor for converting to output units OUTPUTS: amplitude: amplitudes of tidal constituents @@ -55,6 +55,7 @@ UPDATE HISTORY: Updated 05/2022: reformat arguments to extract_FES_constants definition + changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant fix netCDF4 masks for nan values @@ -77,6 +78,7 @@ Written 07/2020 """ import os +import copy import gzip import uuid import netCDF4 @@ -87,15 +89,7 @@ from pyTMD.nearest_extrap import nearest_extrap #-- PURPOSE: extract tidal harmonic constants from tide models at coordinates -def extract_FES_constants(ilon, ilat, - model_files=None, - TYPE='z', - VERSION=None, - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=10.0, - GZIP=True, - SCALE=1.0): +def extract_FES_constants(ilon, ilat, model_files=None, **kwargs): """ Reads files for an ascii or netCDF4 tidal model @@ -111,13 +105,13 @@ def extract_FES_constants(ilon, ilat, latitude to interpolate model_files: list or NoneType, default None list of model files for each constituent - TYPE: str, default 'z' + type: str, default 'z' Tidal variable to read - ``'z'``: heights - ``'u'``: horizontal transport velocities - ``'v'``: vertical transport velocities - VERSION: str or NoneType, default None + version: str or NoneType, default None Model version to read - ``'FES1999'`` @@ -125,21 +119,21 @@ def extract_FES_constants(ilon, ilat, - ``'FES2012'`` - ``'FES2014'`` - ``'EOT20'`` - METHOD: str, default 'spline' + method: str, default 'spline' Interpolation method - ``'bilinear'``: quick bilinear interpolation - ``'spline'``: scipy bivariate spline interpolation - ``'linear'``, ``'nearest'``: scipy regular grid interpolations - EXTRAPOLATE: bool, default False + extrapolate: bool, default False Extrapolate model using nearest-neighbors - CUTOFF: float, default 10.0 + cutoff: float, default 10.0 Extrapolation cutoff in kilometers Set to np.inf to extrapolate for all points - GZIP: bool, default False - Input files are compressed - SCALE: float, default 1.0 + compressed: bool, default False + Input files are gzip compressed + scale: float, default 1.0 Scaling factor for converting to output units Returns @@ -149,6 +143,25 @@ def extract_FES_constants(ilon, ilat, phase: float phases of tidal constituents """ + #-- set default keyword arguments + kwargs.setdefault('type', 'z') + kwargs.setdefault('version', None) + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + kwargs.setdefault('compressed', False) + kwargs.setdefault('scale', 1.0) + #-- raise warnings for deprecated keyword arguments + deprecated_keywords = dict(TYPE='type',VERSION='version', + METHOD='method',EXTRAPOLATE='extrapolate',CUTOFF='cutoff', + GZIP='compressed',SCALE='scale') + for old,new in deprecated_keywords.items(): + if old in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format(old,new), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs[new] = copy.copy(kwargs[old]) #-- raise warning if model files are entered as a string if isinstance(model_files,str): @@ -174,14 +187,12 @@ def extract_FES_constants(ilon, ilat, if not os.access(os.path.expanduser(fi), os.F_OK): raise FileNotFoundError(os.path.expanduser(fi)) #-- read constituent from elevation file - if VERSION in ('FES1999','FES2004'): + if kwargs['version'] in ('FES1999','FES2004'): #-- FES ascii constituent files - hc,lon,lat = read_ascii_file(os.path.expanduser(fi), - GZIP=GZIP, TYPE=TYPE, VERSION=VERSION) - elif VERSION in ('FES2012','FES2014','EOT20'): + hc,lon,lat = read_ascii_file(os.path.expanduser(fi), **kwargs) + elif kwargs['version'] in ('FES2012','FES2014','EOT20'): #-- FES netCDF4 constituent files - hc,lon,lat = read_netcdf_file(os.path.expanduser(fi), - GZIP=GZIP, TYPE=TYPE, VERSION=VERSION) + hc,lon,lat = read_netcdf_file(os.path.expanduser(fi), **kwargs) #-- adjust longitudinal convention of input latitude and longitude #-- to fit tide model convention if (np.min(ilon) < 0.0) & (np.max(lon) > 180.0): @@ -193,25 +204,26 @@ def extract_FES_constants(ilon, ilat, #-- tide model convention (-180:180) ilon[ilon>180.0] -= 360.0 #-- interpolated complex form of constituent oscillation - hci = np.ma.zeros((npts),dtype=hc.dtype,fill_value=hc.fill_value) + hci = np.ma.zeros((npts), dtype=hc.dtype, fill_value=hc.fill_value) hci.mask = np.zeros((npts),dtype=bool) #-- interpolate amplitude and phase of the constituent - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- replace invalid values with nan hc[hc.mask] = np.nan #-- use quick bilinear to interpolate values - hci.data[:] = bilinear_interp(lon,lat,hc,ilon,ilat,dtype=hc.dtype) + hci.data[:] = bilinear_interp(lon, lat, hc, ilon, ilat, + dtype=hc.dtype) #-- replace nan values with fill_value hci.mask[:] |= np.isnan(hci.data) hci.data[hci.mask] = hci.fill_value - elif (METHOD == 'spline'): + elif (kwargs['method'] == 'spline'): #-- interpolate complex form of the constituent with scipy - f1=scipy.interpolate.RectBivariateSpline(lon,lat, - hc.data.real.T,kx=1,ky=1) - f2=scipy.interpolate.RectBivariateSpline(lon,lat, - hc.data.imag.T,kx=1,ky=1) - f3=scipy.interpolate.RectBivariateSpline(lon,lat, - hc.mask.T,kx=1,ky=1) + f1=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.real.T, kx=1, ky=1) + f2=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.imag.T, kx=1, ky=1) + f3=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.mask.T, kx=1, ky=1) hci.data.real[:] = f1.ev(ilon,ilat) hci.data.imag[:] = f2.ev(ilon,ilat) hci.mask[:] = f3.ev(ilon,ilat).astype(bool) @@ -220,26 +232,27 @@ def extract_FES_constants(ilon, ilat, else: #-- use scipy regular grid to interpolate values for a given method r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), - hc.data, method=METHOD, bounds_error=False, + hc.data, method=kwargs['method'], bounds_error=False, fill_value=hci.fill_value) r2 = scipy.interpolate.RegularGridInterpolator((lat,lon), - hc.mask, method=METHOD, bounds_error=False, fill_value=1) + hc.mask, method=kwargs['method'], bounds_error=False, + fill_value=1) hci.data[:] = r1.__call__(np.c_[ilat,ilon]) hci.mask[:] = np.ceil(r2.__call__(np.c_[ilat,ilon])).astype(bool) #-- replace invalid values with fill_value hci.mask[:] |= (hci.data == hci.fill_value) hci.data[hci.mask] = hci.fill_value #-- extrapolate data using nearest-neighbors - if EXTRAPOLATE and np.any(hci.mask): + if kwargs['extrapolate'] and np.any(hci.mask): #-- find invalid data points inv, = np.nonzero(hci.mask) #-- replace invalid values with nan hc[hc.mask] = np.nan #-- extrapolate points within cutoff of valid model points - hci[inv] = nearest_extrap(lon,lat,hc,ilon[inv],ilat[inv], - dtype=hc.dtype,cutoff=CUTOFF) + hci[inv] = nearest_extrap(lon, lat, hc, ilon[inv], ilat[inv], + dtype=hc.dtype, cutoff=kwargs['cutoff']) #-- convert amplitude from input units to meters - amplitude.data[:,i] = np.abs(hci.data)*SCALE + amplitude.data[:,i] = np.abs(hci.data)*kwargs['scale'] amplitude.mask[:,i] = np.copy(hci.mask) #-- phase of the constituent in radians ph.data[:,i] = np.arctan2(-np.imag(hci.data),np.real(hci.data)) @@ -255,7 +268,7 @@ def extract_FES_constants(ilon, ilat, return (amplitude,phase) #-- PURPOSE: read FES ascii tide model grid files -def read_ascii_file(input_file, GZIP=False, **kwargs): +def read_ascii_file(input_file, **kwargs): """ Read FES (Finite Element Solution) tide model file @@ -263,8 +276,8 @@ def read_ascii_file(input_file, GZIP=False, **kwargs): ---------- input_file: str model file - GZIP: bool, default False - input file is compressed + compressed: bool, default False + Input file is gzip compressed Returns ------- @@ -272,10 +285,13 @@ def read_ascii_file(input_file, GZIP=False, **kwargs): lon: longitude of tidal model lat: latitude of tidal model """ + #-- set default keyword arguments + kwargs.setdefault('compressed', False) #-- tilde-expand input file input_file = os.path.expanduser(input_file) #-- read input tide model file - if GZIP: + if kwargs['compressed']: + #-- read gzipped ascii file with gzip.open(input_file, 'rb') as f: file_contents = f.read(input_file).splitlines() else: @@ -324,7 +340,7 @@ def read_ascii_file(input_file, GZIP=False, **kwargs): return (hc,lon,lat) #-- PURPOSE: read FES netCDF4 tide model files -def read_netcdf_file(input_file, GZIP=False, TYPE=None, VERSION=None): +def read_netcdf_file(input_file, **kwargs): """ Read FES (Finite Element Solution) tide model netCDF4 file @@ -332,16 +348,16 @@ def read_netcdf_file(input_file, GZIP=False, TYPE=None, VERSION=None): ---------- input_file: str model file - GZIP: bool, default False - Input file is compressed - VERSION: str or NoneType, default None - model version - TYPE: str or NoneType, default None + type: str or NoneType, default None Tidal variable to read - ``'z'``: heights - ``'u'``: horizontal transport velocities - ``'v'``: vertical transport velocities + version: str or NoneType, default None + FES model version + compressed: bool, default False + Input file is gzip compressed Returns ------- @@ -352,32 +368,37 @@ def read_netcdf_file(input_file, GZIP=False, TYPE=None, VERSION=None): lat: float latitude of tidal model """ + #-- set default keyword arguments + kwargs.setdefault('type', None) + kwargs.setdefault('version', None) + kwargs.setdefault('compressed', False) #-- read the netcdf format tide elevation file - if GZIP: + if kwargs['compressed']: + #-- read gzipped netCDF4 file f = gzip.open(os.path.expanduser(input_file),'rb') fileID = netCDF4.Dataset(uuid.uuid4().hex,'r',memory=f.read()) else: fileID = netCDF4.Dataset(os.path.expanduser(input_file), 'r') #-- variable dimensions for each model - if VERSION in ('FES2012',): + if kwargs['version'] in ('FES2012',): lon = fileID.variables['longitude'][:] lat = fileID.variables['latitude'][:] - elif VERSION in ('FES2014','EOT20'): + elif kwargs['version'] in ('FES2014','EOT20'): lon = fileID.variables['lon'][:] lat = fileID.variables['lat'][:] #-- amplitude and phase components for each type - if (TYPE == 'z'): + if (kwargs['type'] == 'z'): amp = fileID.variables['amplitude'][:] ph = fileID.variables['phase'][:] - elif (TYPE == 'u'): + elif (kwargs['type'] == 'u'): amp = fileID.variables['Ua'][:] ph = fileID.variables['Ug'][:] - elif (TYPE == 'v'): + elif (kwargs['type'] == 'v'): amp = fileID.variables['Va'][:] ph = fileID.variables['Vg'][:] #-- close the file fileID.close() - f.close() if GZIP else None + f.close() if kwargs['compressed'] else None #-- calculate complex form of constituent oscillation hc = amp*np.exp(-1j*ph*np.pi/180.0) #-- set masks diff --git a/pyTMD/read_GOT_model.py b/pyTMD/read_GOT_model.py index ef54014d..2ad31169 100644 --- a/pyTMD/read_GOT_model.py +++ b/pyTMD/read_GOT_model.py @@ -12,15 +12,15 @@ model_files: list of model files for each constituent OPTIONS: - METHOD: interpolation method + method: interpolation method bilinear: quick bilinear interpolation spline: scipy bivariate spline interpolation linear, nearest: scipy regular grid interpolations - EXTRAPOLATE: extrapolate model using nearest-neighbors - CUTOFF: extrapolation cutoff in kilometers + extrapolate: extrapolate model using nearest-neighbors + cutoff: extrapolation cutoff in kilometers set to np.inf to extrapolate for all points - GZIP: input files are compressed - SCALE: scaling factor for converting to output units + compressed: input files are gzip compressed + scale: scaling factor for converting to output units OUTPUTS: amplitude: amplitudes of tidal constituents @@ -40,6 +40,7 @@ UPDATE HISTORY: Updated 05/2022: reformat arguments to extract_GOT_constants definition + changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant Updated 12/2021: adjust longitude convention based on model longitude @@ -57,13 +58,13 @@ adjust dimensions of input coordinates to be iterable Updated 08/2020: replaced griddata with scipy regular grid interpolators Updated 07/2020: added function docstrings. separate bilinear interpolation - update griddata interpolation. add option GZIP for compression + update griddata interpolation. add option for compression Updated 06/2020: use argmin and argmax in bilinear interpolation Updated 11/2019: find invalid mask points for each constituent Updated 09/2019: output as numpy masked arrays instead of nan-filled arrays Updated 07/2019: interpolate fill value mask with bivariate splines Updated 12/2018: python3 compatibility updates for division and zip - Updated 10/2018: added SCALE as load tides are in mm and ocean are in cm + Updated 10/2018: added scale as load tides are in mm and ocean are in cm Updated 08/2018: added multivariate spline interpolation option Written 07/2018 """ @@ -71,6 +72,7 @@ import os import re +import copy import gzip import warnings import numpy as np @@ -79,13 +81,7 @@ from pyTMD.nearest_extrap import nearest_extrap #-- PURPOSE: extract tidal harmonic constants out of GOT model at coordinates -def extract_GOT_constants(ilon, ilat, - model_files=None, - METHOD=None, - EXTRAPOLATE=False, - CUTOFF=10.0, - GZIP=True, - SCALE=1.0): +def extract_GOT_constants(ilon, ilat, model_files=None, **kwargs): """ Reads files for Richard Ray's Global Ocean Tide (GOT) models @@ -101,21 +97,21 @@ def extract_GOT_constants(ilon, ilat, latitude to interpolate model_files: list or NoneType, default None list of model files for each constituent - METHOD: str, default 'spline' + method: str, default 'spline' Interpolation method - ``'bilinear'``: quick bilinear interpolation - ``'spline'``: scipy bivariate spline interpolation - ``'linear'``, ``'nearest'``: scipy regular grid interpolations - EXTRAPOLATE: bool, default False + extrapolate: bool, default False Extrapolate model using nearest-neighbors - CUTOFF: float, default 10.0 + cutoff: float, default 10.0 Extrapolation cutoff in kilometers Set to np.inf to extrapolate for all points - GZIP: bool, default False - Input files are compressed - SCALE: float, default 1.0 + compressed: bool, default False + Input files are gzip compressed + scale: float, default 1.0 Scaling factor for converting to output units Returns @@ -127,6 +123,23 @@ def extract_GOT_constants(ilon, ilat, constituents: list list of model constituents """ + #-- set default keyword arguments + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + kwargs.setdefault('compressed', False) + kwargs.setdefault('scale', 1.0) + #-- raise warnings for deprecated keyword arguments + deprecated_keywords = dict(METHOD='method', + EXTRAPOLATE='extrapolate',CUTOFF='cutoff', + GZIP='compressed',SCALE='scale') + for old,new in deprecated_keywords.items(): + if old in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format(old,new), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs[new] = copy.copy(kwargs[old]) #-- raise warning if model files are entered as a string if isinstance(model_files,str): @@ -154,7 +167,7 @@ def extract_GOT_constants(ilon, ilat, raise FileNotFoundError(os.path.expanduser(model_file)) #-- read constituent from elevation file hc,lon,lat,cons = read_GOT_grid(os.path.expanduser(model_file), - GZIP=GZIP) + compressed=kwargs['compressed']) #-- append to the list of constituents constituents.append(cons) #-- adjust longitudinal convention of input latitude and longitude @@ -174,25 +187,26 @@ def extract_GOT_constants(ilon, ilat, lon = extend_array(lon,dlon) hc = extend_matrix(hc) #-- interpolated complex form of constituent oscillation - hci = np.ma.zeros((npts),dtype=hc.dtype,fill_value=hc.fill_value) + hci = np.ma.zeros((npts), dtype=hc.dtype, fill_value=hc.fill_value) hci.mask = np.zeros((npts),dtype=bool) #-- interpolate amplitude and phase of the constituent - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- replace invalid values with nan hc[hc.mask] = np.nan #-- use quick bilinear to interpolate values - hci.data[:] = bilinear_interp(lon,lat,hc,ilon,ilat,dtype=hc.dtype) + hci.data[:] = bilinear_interp(lon, lat, hc, ilon, ilat, + dtype=hc.dtype) #-- replace nan values with fill_value hci.mask[:] |= np.isnan(hci.data) hci.data[hci.mask] = hci.fill_value - elif (METHOD == 'spline'): + elif (kwargs['method'] == 'spline'): #-- interpolate complex form of the constituent with scipy - f1=scipy.interpolate.RectBivariateSpline(lon,lat, - hc.data.real.T,kx=1,ky=1) - f2=scipy.interpolate.RectBivariateSpline(lon,lat, - hc.data.imag.T,kx=1,ky=1) - f3=scipy.interpolate.RectBivariateSpline(lon,lat, - hc.mask.T,kx=1,ky=1) + f1=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.real.T, kx=1, ky=1) + f2=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.data.imag.T, kx=1, ky=1) + f3=scipy.interpolate.RectBivariateSpline(lon, lat, + hc.mask.T, kx=1, ky=1) hci.data.real[:] = f1.ev(ilon,ilat) hci.data.imag[:] = f2.ev(ilon,ilat) hci.mask[:] = f3.ev(ilon,ilat).astype(bool) @@ -201,26 +215,27 @@ def extract_GOT_constants(ilon, ilat, else: #-- use scipy regular grid to interpolate values for a given method r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), - hc.data, method=METHOD, bounds_error=False, + hc.data, method=kwargs['method'], bounds_error=False, fill_value=hci.fill_value) r2 = scipy.interpolate.RegularGridInterpolator((lat,lon), - hc.mask, method=METHOD, bounds_error=False, fill_value=1) + hc.mask, method=kwargs['method'], bounds_error=False, + fill_value=1) hci.data[:] = r1.__call__(np.c_[ilat,ilon]) hci.mask[:] = np.ceil(r2.__call__(np.c_[ilat,ilon])).astype(bool) #-- replace invalid values with fill_value hci.mask[:] |= (hci.data == hci.fill_value) hci.data[hci.mask] = hci.fill_value #-- extrapolate data using nearest-neighbors - if EXTRAPOLATE and np.any(hci.mask): + if kwargs['extrapolate'] and np.any(hci.mask): #-- find invalid data points inv, = np.nonzero(hci.mask) #-- replace invalid values with nan hc[hc.mask] = np.nan #-- extrapolate points within cutoff of valid model points - hci[inv] = nearest_extrap(lon,lat,hc,ilon[inv],ilat[inv], - dtype=hc.dtype,cutoff=CUTOFF) + hci[inv] = nearest_extrap(lon, lat, hc, ilon[inv], ilat[inv], + dtype=hc.dtype, cutoff=kwargs['cutoff']) #-- convert amplitude from input units to meters - amplitude.data[:,i] = np.abs(hci.data)*SCALE + amplitude.data[:,i] = np.abs(hci.data)*kwargs['scale'] amplitude.mask[:,i] = np.copy(hci.mask) #-- phase of the constituent in radians ph.data[:,i] = np.arctan2(-np.imag(hci.data),np.real(hci.data)) @@ -285,7 +300,7 @@ def extend_matrix(input_matrix): return temp #-- PURPOSE: read GOT model grid files -def read_GOT_grid(input_file, GZIP=False): +def read_GOT_grid(input_file, **kwargs): """ Read Richard Ray's Global Ocean Tide (GOT) model file @@ -293,8 +308,8 @@ def read_GOT_grid(input_file, GZIP=False): ---------- input_file: str Model file - GZIP: bool - Input file is compressed + compressed: bool, default False + Input file is gzip compressed Returns ------- @@ -307,10 +322,13 @@ def read_GOT_grid(input_file, GZIP=False): cons: str tidal constituent ID """ + #-- set default keyword arguments + kwargs.setdefault('compressed', False) #-- tilde-expand input file input_file = os.path.expanduser(input_file) #-- read input tide model file - if GZIP: + if kwargs['compressed']: + #-- read gzipped ascii file with gzip.open(input_file, 'rb') as f: file_contents = f.read().decode('utf8').splitlines() else: diff --git a/pyTMD/read_netcdf_model.py b/pyTMD/read_netcdf_model.py index 4a15c8d0..7494fd98 100644 --- a/pyTMD/read_netcdf_model.py +++ b/pyTMD/read_netcdf_model.py @@ -18,21 +18,21 @@ model_files: list of model files for each constituent (can be gzipped) OPTIONS: - TYPE: tidal variable to run + type: tidal variable to run z: heights u: horizontal transport velocities U: horizontal depth-averaged transport v: vertical transport velocities V: vertical depth-averaged transport - METHOD: interpolation method + method: interpolation method bilinear: quick bilinear interpolation spline: scipy bivariate spline interpolation linear, nearest: scipy regular grid interpolations - EXTRAPOLATE: extrapolate model using nearest-neighbors - CUTOFF: extrapolation cutoff in kilometers + extrapoalte: extrapolate model using nearest-neighbors + cutoff: extrapolation cutoff in kilometers set to np.inf to extrapolate for all points - GZIP: input netCDF4 files are compressed - SCALE: scaling factor for converting to output units + compressed: input netCDF4 files are gzip compressed + scale: scaling factor for converting to output units OUTPUTS: amplitude: amplitudes of tidal constituents @@ -55,6 +55,7 @@ UPDATE HISTORY: Updated 05/2022: reformat arguments to extract_netcdf_constants definition + changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format Updated 12/2021: adjust longitude convention based on model longitude Updated 09/2021: fix cases where there is no mask on constituent files @@ -79,6 +80,7 @@ Written 09/2019 """ import os +import copy import gzip import uuid import netCDF4 @@ -92,12 +94,7 @@ def extract_netcdf_constants(ilon, ilat, grid_file=None, model_files=None, - TYPE='z', - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=10.0, - GZIP=True, - SCALE=1.0): + **kwargs): """ Reads files for ATLAS netCDF4 tidal models @@ -115,7 +112,7 @@ def extract_netcdf_constants(ilon, ilat, grid file for model model_files: list or NoneType, default None list of model files for each constituent - TYPE: str, default 'z' + type: str, default 'z' Tidal variable to read - ``'z'``: heights @@ -123,21 +120,21 @@ def extract_netcdf_constants(ilon, ilat, - ``'U'``: horizontal depth-averaged transport - ``'v'``: vertical transport velocities - ``'V'``: vertical depth-averaged transport - METHOD: str, default 'spline' + method: str, default 'spline' Interpolation method - ``'bilinear'``: quick bilinear interpolation - ``'spline'``: scipy bivariate spline interpolation - ``'linear'``, ``'nearest'``: scipy regular grid interpolations - EXTRAPOLATE: bool, default False + extrapolate: bool, default False Extrapolate model using nearest-neighbors - CUTOFF: float, default 10.0 + cutoff: float, default 10.0 Extrapolation cutoff in kilometers Set to np.inf to extrapolate for all points - GZIP: bool, default False - Input files are compressed - SCALE: float, default 1.0 + compressed: bool, default False + Input files are gzip compressed + scale: float, default 1.0 Scaling factor for converting to output units Returns @@ -151,6 +148,24 @@ def extract_netcdf_constants(ilon, ilat, constituents: list list of model constituents """ + #-- set default keyword arguments + kwargs.setdefault('type', 'z') + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + kwargs.setdefault('compressed', True) + kwargs.setdefault('scale', 1.0) + #-- raise warnings for deprecated keyword arguments + deprecated_keywords = dict(TYPE='type',METHOD='method', + EXTRAPOLATE='extrapolate',CUTOFF='cutoff', + GZIP='compressed',SCALE='scale') + for old,new in deprecated_keywords.items(): + if old in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format(old,new), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs[new] = copy.copy(kwargs[old]) #-- raise warning if model files are entered as a string if isinstance(model_files,str): @@ -162,7 +177,8 @@ def extract_netcdf_constants(ilon, ilat, raise FileNotFoundError(os.path.expanduser(grid_file)) #-- read the tide grid file for bathymetry and spatial coordinates - lon,lat,bathymetry = read_netcdf_grid(grid_file, TYPE, GZIP=GZIP) + lon,lat,bathymetry = read_netcdf_grid(grid_file, kwargs['type'], + compressed=kwargs['compressed']) #-- adjust dimensions of input coordinates to be iterable ilon = np.atleast_1d(ilon) @@ -192,36 +208,37 @@ def extract_netcdf_constants(ilon, ilat, #-- interpolate bathymetry and mask to output points D = np.ma.zeros((npts)) D.mask = np.zeros((npts),dtype=bool) - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- replace invalid values with nan bathymetry[bathymetry.mask] = np.nan #-- use quick bilinear to interpolate values - D.data[:] = bilinear_interp(lon,lat,bathymetry,ilon,ilat) + D.data[:] = bilinear_interp(lon, lat, bathymetry, ilon, ilat) #-- replace nan values with fill_value D.mask[:] = np.isnan(D.data) D.data[D.mask] = D.fill_value - elif (METHOD == 'spline'): + elif (kwargs['method'] == 'spline'): #-- use scipy bivariate splines to interpolate values - f1 = scipy.interpolate.RectBivariateSpline(lon,lat, - bathymetry.data.T,kx=1,ky=1) - f2 = scipy.interpolate.RectBivariateSpline(lon,lat, - bathymetry.mask.T,kx=1,ky=1) + f1 = scipy.interpolate.RectBivariateSpline(lon, lat, + bathymetry.data.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(lon, lat, + bathymetry.mask.T, kx=1, ky=1) D.data[:] = f1.ev(ilon,ilat) D.mask[:] = np.ceil(f2.ev(ilon,ilat).astype(bool)) else: #-- use scipy regular grid to interpolate values for a given method r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), - bathymetry.data, method=METHOD, bounds_error=False) + bathymetry.data, method=kwargs['method'], bounds_error=False) r2 = scipy.interpolate.RegularGridInterpolator((lat,lon), - bathymetry.mask, method=METHOD, bounds_error=False, fill_value=1) + bathymetry.mask, method=kwargs['method'], bounds_error=False, + fill_value=1) D.data[:] = r1.__call__(np.c_[ilat,ilon]) D.mask[:] = np.ceil(r2.__call__(np.c_[ilat,ilon])).astype(bool) #-- u and v are velocities in cm/s - if TYPE in ('v','u'): + if kwargs['type'] in ('v','u'): unit_conv = (D.data/100.0) #-- U and V are transports in m^2/s - elif TYPE in ('V','U'): + elif kwargs['type'] in ('V','U'): unit_conv = 1.0 #-- number of constituents @@ -238,9 +255,10 @@ def extract_netcdf_constants(ilon, ilat, #-- check that model file is accessible if not os.access(os.path.expanduser(model_file), os.F_OK): raise FileNotFoundError(os.path.expanduser(model_file)) - if (TYPE == 'z'): + if (kwargs['type'] == 'z'): #-- read constituent from elevation file - z,con = read_elevation_file(model_file, GZIP=GZIP) + z,con = read_elevation_file(model_file, + compressed=kwargs['compressed']) #-- append constituent to list constituents.append(con) #-- replace original values with extend matrices @@ -250,18 +268,19 @@ def extract_netcdf_constants(ilon, ilat, #-- interpolate amplitude and phase of the constituent z1 = np.ma.zeros((npts),dtype=z.dtype) z1.mask = np.zeros((npts),dtype=bool) - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- replace invalid values with nan z[z.mask] = np.nan - z1.data[:] = bilinear_interp(lon,lat,z,ilon,ilat,dtype=z.dtype) + z1.data[:] = bilinear_interp(lon, lat, z, ilon, ilat, + dtype=z.dtype) #-- mask invalid values z1.mask[:] |= np.copy(D.mask) z1.data[z1.mask] = z1.fill_value - elif (METHOD == 'spline'): - f1 = scipy.interpolate.RectBivariateSpline(lon,lat, - z.data.real.T,kx=1,ky=1) - f2 = scipy.interpolate.RectBivariateSpline(lon,lat, - z.data.imag.T,kx=1,ky=1) + elif (kwargs['method'] == 'spline'): + f1 = scipy.interpolate.RectBivariateSpline(lon, lat, + z.data.real.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(lon, lat, + z.data.imag.T, kx=1, ky=1) z1.data.real = f1.ev(ilon,ilat) z1.data.imag = f2.ev(ilon,ilat) #-- mask invalid values @@ -270,29 +289,30 @@ def extract_netcdf_constants(ilon, ilat, else: #-- use scipy regular grid to interpolate values r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), - z.data, method=METHOD, bounds_error=False, + z.data, method=kwargs['method'], bounds_error=False, fill_value=z1.fill_value) z1.data[:] = r1.__call__(np.c_[ilat,ilon]) #-- mask invalid values z1.mask[:] |= np.copy(D.mask) z1.data[z1.mask] = z1.fill_value #-- extrapolate data using nearest-neighbors - if EXTRAPOLATE and np.any(z1.mask): + if kwargs['extrapolate'] and np.any(z1.mask): #-- find invalid data points inv, = np.nonzero(z1.mask) #-- replace invalid values with nan z[z.mask] = np.nan #-- extrapolate points within cutoff of valid model points - z1[inv] = nearest_extrap(lon,lat,z,ilon[inv],ilat[inv], - dtype=z.dtype,cutoff=CUTOFF) + z1[inv] = nearest_extrap(lon, lat, z, ilon[inv], ilat[inv], + dtype=z.dtype, cutoff=kwargs['cutoff']) #-- amplitude and phase of the constituent ampl.data[:,i] = np.abs(z1.data) ampl.mask[:,i] = np.copy(z1.mask) ph.data[:,i] = np.arctan2(-np.imag(z1.data),np.real(z1.data)) ph.mask[:,i] = np.copy(z1.mask) - elif TYPE in ('U','u','V','v'): + elif kwargs['type'] in ('U','u','V','v'): #-- read constituent from transport file - tr,con = read_transport_file(model_file, TYPE, GZIP=GZIP) + tr,con = read_transport_file(model_file, kwargs['type'], + compressed=kwargs['compressed']) #-- append constituent to list constituents.append(con) #-- replace original values with extend matrices @@ -302,16 +322,17 @@ def extract_netcdf_constants(ilon, ilat, #-- interpolate amplitude and phase of the constituent tr1 = np.ma.zeros((npts),dtype=tr.dtype) tr1.mask = np.zeros((npts),dtype=bool) - if (METHOD == 'bilinear'): - tr1.data[:]=bilinear_interp(lon,lat,tr,ilon,ilat,dtype=tr.dtype) + if (kwargs['method'] == 'bilinear'): + tr1.data[:]=bilinear_interp(lon, lat, tr, ilon, ilat, + dtype=tr.dtype) #-- mask invalid values tr1.mask[:] |= np.copy(D.mask) tr1.data[tr1.mask] = tr1.fill_value - elif (METHOD == 'spline'): - f1 = scipy.interpolate.RectBivariateSpline(lon,lat, - tr.data.real.T,kx=1,ky=1) - f2 = scipy.interpolate.RectBivariateSpline(lon,lat, - tr.data.imag.T,kx=1,ky=1) + elif (kwargs['method'] == 'spline'): + f1 = scipy.interpolate.RectBivariateSpline(lon, lat, + tr.data.real.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(lon, lat, + tr.data.imag.T, kx=1, ky=1) tr1.data.real = f1.ev(ilon,ilat) tr1.data.imag = f2.ev(ilon,ilat) #-- mask invalid values @@ -320,21 +341,21 @@ def extract_netcdf_constants(ilon, ilat, else: #-- use scipy regular grid to interpolate values r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), - tr.data, method=METHOD, bounds_error=False, + tr.data, method=kwargs['method'], bounds_error=False, fill_value=tr1.fill_value) tr1.data[:] = r1.__call__(np.c_[ilat,ilon]) #-- mask invalid values tr1.mask[:] |= np.copy(D.mask) tr1.data[tr1.mask] = tr1.fill_value #-- extrapolate data using nearest-neighbors - if EXTRAPOLATE and np.any(tr1.mask): + if kwargs['extrapolate'] and np.any(tr1.mask): #-- find invalid data points inv, = np.nonzero(tr1.mask) #-- replace invalid values with nan tr[tr.mask] = np.nan #-- extrapolate points within cutoff of valid model points - tr1[inv] = nearest_extrap(lon,lat,tr,ilon[inv],ilat[inv], - dtype=tr.dtype,cutoff=CUTOFF) + tr1[inv] = nearest_extrap(lon, lat, tr, ilon[inv], ilat[inv], + dtype=tr.dtype, cutoff=kwargs['cutoff']) #-- convert units #-- amplitude and phase of the constituent ampl.data[:,i] = np.abs(tr1.data)/unit_conv @@ -343,12 +364,12 @@ def extract_netcdf_constants(ilon, ilat, ph.mask[:,i] = np.copy(tr1.mask) #-- convert amplitude from input units to meters - amplitude = ampl*SCALE + amplitude = ampl*kwargs['scale'] #-- convert phase to degrees phase = ph*180.0/np.pi phase[phase < 0] += 360.0 #-- return the interpolated values - return (amplitude,phase,D,constituents) + return (amplitude, phase, D, constituents) #-- PURPOSE: wrapper function to extend an array def extend_array(input_array,step_size): @@ -398,7 +419,7 @@ def extend_matrix(input_matrix): return temp #-- PURPOSE: read grid file -def read_netcdf_grid(input_file, TYPE, GZIP=False): +def read_netcdf_grid(input_file, variable, **kwargs): """ Read grid file to extract model coordinates and bathymetry @@ -406,7 +427,7 @@ def read_netcdf_grid(input_file, TYPE, GZIP=False): ---------- input_file: str input grid file - TYPE: str + variable: str Tidal variable to read - ``'z'``: heights @@ -415,8 +436,8 @@ def read_netcdf_grid(input_file, TYPE, GZIP=False): - ``'v'``: vertical transport velocities - ``'V'``: vertical depth-averaged transport - GZIP: bool, default False - input netCDF4 file is compressed + compressed: bool, default False + Input file is gzip compressed Returns ------- @@ -427,10 +448,12 @@ def read_netcdf_grid(input_file, TYPE, GZIP=False): bathymetry: float model bathymetry """ + #-- set default keyword arguments + kwargs.setdefault('compressed', False) #-- read the netcdf format tide grid file #-- reading a combined global solution with localized solutions - if GZIP: - #-- read GZIP file + if kwargs['compressed']: + #-- read gzipped netCDF4 file f = gzip.open(os.path.expanduser(input_file),'rb') fileID=netCDF4.Dataset(uuid.uuid4().hex,'r',memory=f.read()) else: @@ -441,19 +464,19 @@ def read_netcdf_grid(input_file, TYPE, GZIP=False): #-- allocate numpy masked array for bathymetry bathymetry = np.ma.zeros((ny,nx)) #-- read bathymetry and coordinates for variable type - if (TYPE == 'z'): + if (variable == 'z'): #-- get bathymetry at nodes bathymetry.data[:,:] = fileID.variables['hz'][:,:].T #-- read latitude and longitude at z-nodes lon = fileID.variables['lon_z'][:].copy() lat = fileID.variables['lat_z'][:].copy() - elif TYPE in ('U','u'): + elif variable in ('U','u'): #-- get bathymetry at u-nodes bathymetry.data[:,:] = fileID.variables['hu'][:,:].T #-- read latitude and longitude at u-nodes lon = fileID.variables['lon_u'][:].copy() lat = fileID.variables['lat_u'][:].copy() - elif TYPE in ('V','v'): + elif variable in ('V','v'): #-- get bathymetry at v-nodes bathymetry.data[:,:] = fileID.variables['hv'][:,:].T #-- read latitude and longitude at v-nodes @@ -463,12 +486,12 @@ def read_netcdf_grid(input_file, TYPE, GZIP=False): bathymetry.mask = (bathymetry.data == 0.0) #-- close the grid file fileID.close() - f.close() if GZIP else None + f.close() if kwargs['compressed'] else None return (lon,lat,bathymetry) #-- PURPOSE: read elevation file to extract real and imaginary components for #-- constituent -def read_elevation_file(input_file, GZIP=False): +def read_elevation_file(input_file, **kwargs): """ Read elevation file to extract real and imaginary components for constituent @@ -476,8 +499,8 @@ def read_elevation_file(input_file, GZIP=False): ---------- input_file: str input elevation file - GZIP: bool, default False - input netCDF4 files are compressed + compressed: bool, default False + Input file is gzip compressed Returns ------- @@ -486,9 +509,12 @@ def read_elevation_file(input_file, GZIP=False): con: str tidal constituent ID """ + #-- set default keyword arguments + kwargs.setdefault('compressed', False) #-- read the netcdf format tide elevation file #-- reading a combined global solution with localized solutions - if GZIP: + if kwargs['compressed']: + #-- read gzipped netCDF4 file f = gzip.open(os.path.expanduser(input_file),'rb') fileID = netCDF4.Dataset(uuid.uuid4().hex,'r',memory=f.read()) else: @@ -505,13 +531,13 @@ def read_elevation_file(input_file, GZIP=False): h.data.imag[:,:] = fileID.variables['hIm'][:,:].T #-- close the file fileID.close() - f.close() if GZIP else None + f.close() if kwargs['compressed'] else None #-- return the elevation and constituent return (h,con.strip()) #-- PURPOSE: read transport file to extract real and imaginary components for #-- constituent -def read_transport_file(input_file, TYPE, GZIP=False): +def read_transport_file(input_file, variable, **kwargs): """ Read transport file to extract real and imaginary components for constituent @@ -519,7 +545,7 @@ def read_transport_file(input_file, TYPE, GZIP=False): ---------- input_file: str input transport file - TYPE: str + variable: str Tidal variable to read - ``'u'``: horizontal transport velocities @@ -527,8 +553,8 @@ def read_transport_file(input_file, TYPE, GZIP=False): - ``'v'``: vertical transport velocities - ``'V'``: vertical depth-averaged transport - GZIP: bool, default False - input netCDF4 files are compressed + compressed: bool, default False + Input file is gzip compressed Returns ------- @@ -537,9 +563,12 @@ def read_transport_file(input_file, TYPE, GZIP=False): con: str tidal constituent ID """ - #-- read the netcdf format tide grid file + #-- set default keyword arguments + kwargs.setdefault('compressed', False) + #-- read the netcdf format tide transport file #-- reading a combined global solution with localized solutions - if GZIP: + if kwargs['compressed']: + #-- read gzipped netCDF4 file f = gzip.open(os.path.expanduser(input_file),'rb') fileID = netCDF4.Dataset(uuid.uuid4().hex,'r',memory=f.read()) else: @@ -552,14 +581,14 @@ def read_transport_file(input_file, TYPE, GZIP=False): #-- real and imaginary components of transport tr = np.ma.zeros((ny,nx),dtype=np.complex64) tr.mask = np.zeros((ny,nx),dtype=bool) - if TYPE in ('U','u'): + if variable in ('U','u'): tr.data.real[:,:] = fileID.variables['uRe'][:,:].T tr.data.imag[:,:] = fileID.variables['uIm'][:,:].T - elif TYPE in ('V','v'): + elif variable in ('V','v'): tr.data.real[:,:] = fileID.variables['vRe'][:,:].T tr.data.imag[:,:] = fileID.variables['vIm'][:,:].T #-- close the file fileID.close() - f.close() if GZIP else None + f.close() if kwargs['compressed'] else None #-- return the transport components and constituent return (tr,con.strip()) diff --git a/pyTMD/read_tide_model.py b/pyTMD/read_tide_model.py index 4460366c..36c7c5eb 100644 --- a/pyTMD/read_tide_model.py +++ b/pyTMD/read_tide_model.py @@ -18,23 +18,24 @@ EPSG: projection of tide model data OPTIONS: - TYPE: tidal variable to run + type: tidal variable to run z: heights u: horizontal transport velocities U: horizontal depth-averaged transport v: vertical transport velocities V: vertical depth-averaged transport - METHOD: interpolation method + method: interpolation method bilinear: quick bilinear interpolation spline: scipy bivariate spline interpolation linear, nearest: scipy regular grid interpolations - EXTRAPOLATE: extrapolate model using nearest-neighbors - CUTOFF: extrapolation cutoff in kilometers + extrapolate: extrapolate model using nearest-neighbors + cutoff: extrapolation cutoff in kilometers set to np.inf to extrapolate for all points - GRID: binary file type to read + grid: binary file type to read ATLAS: reading a global solution with localized solutions ESR: combined global or local netCDF4 solution OTIS: combined global or local solution + apply_flexure: apply flexure scaling factor OUTPUTS: amplitude: amplitudes of tidal constituents @@ -58,6 +59,7 @@ UPDATE HISTORY: Updated 05/2022: add functions for using ESR netCDF4 format models + changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format use longcomplex data format to be windows compliant Updated 03/2022: invert tide mask to be True for invalid points @@ -82,20 +84,22 @@ Updated 08/2020: check that interpolated points are within range of model replaced griddata interpolation with scipy regular grid interpolators Updated 07/2020: added function docstrings. separate bilinear interpolation - update griddata interpolation. changed TYPE variable to keyword argument + update griddata interpolation. changed type variable to keyword argument Updated 06/2020: output currents as numpy masked arrays use argmin and argmax in bilinear interpolation Updated 11/2019: interpolate heights and fluxes to numpy masked arrays Updated 09/2019: output as numpy masked arrays instead of nan-filled arrays Updated 01/2019: decode constituents for Python3 compatibility - Updated 08/2018: added option GRID for using ATLAS outputs that + Updated 08/2018: added option grid for using ATLAS outputs that combine both global and localized tidal solutions added multivariate spline interpolation option Updated 07/2018: added different interpolation methods Updated 09/2017: Adapted for Python """ import os +import copy import netCDF4 +import warnings import numpy as np import scipy.interpolate from pyTMD.convert_ll_xy import convert_ll_xy @@ -107,11 +111,7 @@ def extract_tidal_constants(ilon, ilat, grid_file=None, model_file=None, EPSG=None, - TYPE='z', - METHOD='spline', - EXTRAPOLATE=False, - CUTOFF=10.0, - GRID='OTIS'): + **kwargs): """ Reads files for an OTIS-formatted tidal model @@ -131,7 +131,7 @@ def extract_tidal_constants(ilon, ilat, model file containing each constituent EPSG: str or NoneType, default None, projection of tide model data - TYPE: str, default 'z' + type: str, default 'z' Tidal variable to read - ``'z'``: heights @@ -139,24 +139,26 @@ def extract_tidal_constants(ilon, ilat, - ``'U'``: horizontal depth-averaged transport - ``'v'``: vertical transport velocities - ``'V'``: vertical depth-averaged transport - METHOD: str, default 'spline' + method: str, default 'spline' Interpolation method - ``'bilinear'``: quick bilinear interpolation - ``'spline'``: scipy bivariate spline interpolation - ``'linear'``, ``'nearest'``: scipy regular grid interpolations - EXTRAPOLATE: bool, default False + extrapolate: bool, default False Extrapolate model using nearest-neighbors - CUTOFF: float, default 10.0 + cutoff: float, default 10.0 Extrapolation cutoff in kilometers Set to np.inf to extrapolate for all points - GRID: str, default 'OTIS' + grid: str, default 'OTIS' Tide model file type to read - ``'ATLAS'``: reading a global solution with localized solutions - ``'ESR'``: combined global or local netCDF4 solution - ``'OTIS'``: combined global or local solution + apply_flexure: bool, default False + apply flexure scaling factor Returns ------- @@ -169,16 +171,34 @@ def extract_tidal_constants(ilon, ilat, constituents: list list of model constituents """ + #-- set default keyword arguments + kwargs.setdefault('type', 'z') + kwargs.setdefault('method', 'spline') + kwargs.setdefault('extrapolate', False) + kwargs.setdefault('cutoff', 10.0) + kwargs.setdefault('grid', 'OTIS') + kwargs.setdefault('apply_flexure', False) + #-- raise warnings for deprecated keyword arguments + deprecated_keywords = dict(TYPE='type',METHOD='method', + EXTRAPOLATE='extrapolate',CUTOFF='cutoff',GRID='grid') + for old,new in deprecated_keywords.items(): + if old in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format(old,new), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs[new] = copy.copy(kwargs[old]) + #-- check that grid file is accessible if not os.access(os.path.expanduser(grid_file), os.F_OK): raise FileNotFoundError(os.path.expanduser(grid_file)) #-- read the OTIS-format tide grid file - if (GRID == 'ATLAS'): + if (kwargs['grid'] == 'ATLAS'): #-- if reading a global solution with localized solutions x0,y0,hz0,mz0,iob,dt,pmask,local = read_atlas_grid(grid_file) xi,yi,hz = combine_atlas_model(x0,y0,hz0,pmask,local,VARIABLE='depth') mz = create_atlas_mask(x0,y0,mz0,local,VARIABLE='depth') - elif (GRID == 'ESR'): + elif (kwargs['grid'] == 'ESR'): #-- if reading a single ESR netCDF4 solution xi,yi,hz,mz,sf = read_netcdf_grid(grid_file) else: @@ -193,19 +213,19 @@ def extract_tidal_constants(ilon, ilat, dx = xi[1] - xi[0] dy = yi[1] - yi[0] - if (TYPE != 'z'): + if (kwargs['type'] != 'z'): mz,mu,mv = Muv(hz) hu,hv = Huv(hz) #-- if global: extend limits - GLOBAL = False + global_grid = False #-- replace original values with extend arrays/matrices if ((xi[-1] - xi[0]) == (360.0 - dx)) & (EPSG == '4326'): xi = extend_array(xi, dx) hz = extend_matrix(hz) mz = extend_matrix(mz) - #-- set global flag - GLOBAL = True + #-- set global grid flag + global_grid = True #-- adjust longitudinal convention of input latitude and longitude #-- to fit tide model convention @@ -218,9 +238,9 @@ def extract_tidal_constants(ilon, ilat, #-- masks zero values hz = np.ma.array(hz,mask=(hz==0)) - if (TYPE != 'z'): + if (kwargs['type'] != 'z'): #-- replace original values with extend matrices - if GLOBAL: + if global_grid: hu = extend_matrix(hu) hv = extend_matrix(hv) mu = extend_matrix(mu) @@ -230,48 +250,48 @@ def extract_tidal_constants(ilon, ilat, hv = np.ma.array(hv,mask=(hv==0)) #-- interpolate depth and mask to output points - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- use quick bilinear to interpolate values - D = bilinear_interp(xi,yi,hz,x,y) - mz1 = bilinear_interp(xi,yi,mz,x,y) + D = bilinear_interp(xi, yi, hz, x, y) + mz1 = bilinear_interp(xi, yi, mz, x, y) mz1 = np.ceil(mz1).astype(mz.dtype) - if (TYPE != 'z'): - mu1 = bilinear_interp(xi,yi,mu,x,y) + if (kwargs['type'] != 'z'): + mu1 = bilinear_interp(xi, yi, mu, x, y) mu1 = np.ceil(mu1).astype(mu.dtype) - mv1 = bilinear_interp(xi,yi,mv,x,y) + mv1 = bilinear_interp(xi, yi, mv, x, y) mv1 = np.ceil(mv1).astype(mz.dtype) - elif (METHOD == 'spline'): + elif (kwargs['method'] == 'spline'): #-- use scipy bivariate splines to interpolate values - f1=scipy.interpolate.RectBivariateSpline(xi,yi,hz.T,kx=1,ky=1) - f2=scipy.interpolate.RectBivariateSpline(xi,yi,mz.T,kx=1,ky=1) + f1=scipy.interpolate.RectBivariateSpline(xi, yi, hz.T, kx=1, ky=1) + f2=scipy.interpolate.RectBivariateSpline(xi, yi, mz.T, kx=1, ky=1) D = f1.ev(x,y) mz1 = np.ceil(f2.ev(x,y)).astype(mz.dtype) - if (TYPE != 'z'): - f3=scipy.interpolate.RectBivariateSpline(xi,yi,mu.T,kx=1,ky=1) - f4=scipy.interpolate.RectBivariateSpline(xi,yi,mv.T,kx=1,ky=1) + if (kwargs['type'] != 'z'): + f3=scipy.interpolate.RectBivariateSpline(xi, yi, mu.T, kx=1, ky=1) + f4=scipy.interpolate.RectBivariateSpline(xi, yi, mv.T, kx=1, ky=1) mu1 = np.ceil(f3.ev(x,y)).astype(mu.dtype) mv1 = np.ceil(f4.ev(x,y)).astype(mv.dtype) else: #-- use scipy regular grid to interpolate values for a given method - r1 = scipy.interpolate.RegularGridInterpolator((yi,xi),hz, - method=METHOD,bounds_error=False) - r2 = scipy.interpolate.RegularGridInterpolator((yi,xi),mz, - method=METHOD,bounds_error=False,fill_value=0) + r1 = scipy.interpolate.RegularGridInterpolator((yi,xi), hz, + method=kwargs['method'], bounds_error=False) + r2 = scipy.interpolate.RegularGridInterpolator((yi,xi), mz, + method=kwargs['method'], bounds_error=False, fill_value=0) D = r1.__call__(np.c_[y,x]) mz1 = np.ceil(r2.__call__(np.c_[y,x])).astype(mz.dtype) - if (TYPE != 'z'): - r3 = scipy.interpolate.RegularGridInterpolator((yi,xi),mu, - method=METHOD,bounds_error=False,fill_value=0) - r4 = scipy.interpolate.RegularGridInterpolator((yi,xi),mv, - method=METHOD,bounds_error=False,fill_value=0) + if (kwargs['type'] != 'z'): + r3 = scipy.interpolate.RegularGridInterpolator((yi,xi), mu, + method=kwargs['method'], bounds_error=False, fill_value=0) + r4 = scipy.interpolate.RegularGridInterpolator((yi,xi), mv, + method=kwargs['method'], bounds_error=False, fill_value=0) mu1 = np.ceil(r3.__call__(np.c_[y,x])).astype(mu.dtype) mv1 = np.ceil(r4.__call__(np.c_[y,x])).astype(mv.dtype) - #-- u and v are velocities in cm/s - if TYPE in ('v','u'): + #-- u and v: velocities in cm/s + if kwargs['type'] in ('v','u'): unit_conv = (D/100.0) - #-- U and V are transports in m^2/s - elif TYPE in ('V','U'): + #-- U and V: transports in m^2/s + elif kwargs['type'] in ('V','U'): unit_conv = 1.0 #-- read and interpolate each constituent @@ -279,7 +299,7 @@ def extract_tidal_constants(ilon, ilat, constituents = [read_constituents(m)[0].pop() for m in model_file] nc = len(constituents) else: - constituents,nc = read_constituents(model_file, GRID=GRID) + constituents,nc = read_constituents(model_file, grid=kwargs['grid']) #-- number of output data points npts = len(D) amplitude = np.ma.zeros((npts,nc)) @@ -287,39 +307,43 @@ def extract_tidal_constants(ilon, ilat, ph = np.ma.zeros((npts,nc)) ph.mask = np.zeros((npts,nc),dtype=bool) for i,c in enumerate(constituents): - if (TYPE == 'z'): + if (kwargs['type'] == 'z'): #-- read constituent from elevation file - if (GRID == 'ATLAS'): + if (kwargs['grid'] == 'ATLAS'): z0,zlocal = read_atlas_elevation(model_file, i, c) xi,yi,z = combine_atlas_model(x0, y0, z0, pmask, zlocal, - VARIABLE='z') - elif (GRID == 'ESR'): - z = read_netcdf_file(model_file, i, TYPE=TYPE) + variable='z') + elif (kwargs['grid'] == 'ESR'): + z = read_netcdf_file(model_file, i, variable='z') + #-- apply flexure scaling + if kwargs['apply_flexure']: + z *= sf elif isinstance(model_file,list): z = read_elevation_file(model_file[i], 0) else: z = read_elevation_file(model_file, i) #-- replace original values with extend matrices - if GLOBAL: + if global_grid: z = extend_matrix(z) #-- copy mask to elevation z.mask |= mz.astype(bool) #-- interpolate amplitude and phase of the constituent z1 = np.ma.zeros((npts),dtype=z.dtype) - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- replace zero values with nan z[(z==0) | z.mask] = np.nan #-- use quick bilinear to interpolate values - z1.data[:] = bilinear_interp(xi,yi,z,x,y,dtype=np.longcomplex) + z1.data[:] = bilinear_interp(xi, yi, z, x, y, + dtype=np.longcomplex) #-- replace nan values with fill_value z1.mask = (np.isnan(z1.data) | (mz1.astype(bool))) z1.data[z1.mask] = z1.fill_value - elif (METHOD == 'spline'): + elif (kwargs['method'] == 'spline'): #-- use scipy bivariate splines to interpolate values f1 = scipy.interpolate.RectBivariateSpline(xi,yi, - z.real.T,kx=1,ky=1) + z.real.T, kx=1, ky=1) f2 = scipy.interpolate.RectBivariateSpline(xi,yi, - z.imag.T,kx=1,ky=1) + z.imag.T, kx=1, ky=1) z1.data.real = f1.ev(x,y) z1.data.imag = f2.ev(x,y) #-- replace zero values with fill_value @@ -327,41 +351,45 @@ def extract_tidal_constants(ilon, ilat, z1.data[z1.mask] = z1.fill_value else: #-- use scipy regular grid to interpolate values - r1 = scipy.interpolate.RegularGridInterpolator((yi,xi),z, - method=METHOD,bounds_error=False,fill_value=z1.fill_value) + r1 = scipy.interpolate.RegularGridInterpolator((yi,xi), z, + method=kwargs['method'], + bounds_error=False, + fill_value=z1.fill_value) z1 = np.ma.zeros((npts),dtype=z.dtype) z1.data[:] = r1.__call__(np.c_[y,x]) #-- replace invalid values with fill_value z1.mask = (z1.data == z1.fill_value) | (mz1.astype(bool)) z1.data[z1.mask] = z1.fill_value #-- extrapolate data using nearest-neighbors - if EXTRAPOLATE and np.any(z1.mask): + if kwargs['extrapolate'] and np.any(z1.mask): #-- find invalid data points inv, = np.nonzero(z1.mask) #-- replace zero values with nan z[(z==0) | z.mask] = np.nan #-- extrapolate points within cutoff of valid model points - z1[inv] = nearest_extrap(xi,yi,z,x[inv],y[inv], - dtype=np.longcomplex,cutoff=CUTOFF,EPSG=EPSG) + z1[inv] = nearest_extrap(xi, yi, z, x[inv], y[inv], + dtype=np.longcomplex, + cutoff=kwargs['cutoff'], + EPSG=EPSG) #-- amplitude and phase of the constituent amplitude.data[:,i] = np.abs(z1.data) amplitude.mask[:,i] = np.copy(z1.mask) ph.data[:,i] = np.arctan2(-np.imag(z1.data),np.real(z1.data)) ph.mask[:,i] = np.copy(z1.mask) - elif TYPE in ('U','u'): + elif kwargs['type'] in ('U','u'): #-- read constituent from transport file - if (GRID == 'ATLAS'): + if (kwargs['grid'] == 'ATLAS'): u0,v0,uvlocal = read_atlas_transport(model_file, i, c) xi,yi,u = combine_atlas_model(x0, y0, u0, pmask, uvlocal, - VARIABLE='u') - elif (GRID == 'ESR'): - u = read_netcdf_file(model_file, i, TYPE=TYPE) + variable='u') + elif (kwargs['grid'] == 'ESR'): + u = read_netcdf_file(model_file, i, variable='u') elif isinstance(model_file,list): u,v = read_transport_file(model_file[i], 0) else: u,v = read_transport_file(model_file, i) #-- replace original values with extend matrices - if GLOBAL: + if global_grid: u = extend_matrix(u) #-- copy mask to u transports u.mask |= mu.astype(bool) @@ -369,19 +397,20 @@ def extract_tidal_constants(ilon, ilat, xu = xi - dx/2.0 #-- interpolate amplitude and phase of the constituent u1 = np.ma.zeros((npts),dtype=u.dtype) - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- replace zero values with nan u[(u==0) | u.mask] = np.nan #-- use quick bilinear to interpolate values - u1.data[:] = bilinear_interp(xu,yi,u,x,y,dtype=np.longcomplex) + u1.data[:] = bilinear_interp(xu, yi, u, x, y, + dtype=np.longcomplex) #-- replace nan values with fill_value u1.mask = (np.isnan(u1.data) | (mu1.astype(bool))) u1.data[u1.mask] = u1.fill_value - elif (METHOD == 'spline'): - f1 = scipy.interpolate.RectBivariateSpline(xu,yi, - u.real.T,kx=1,ky=1) - f2 = scipy.interpolate.RectBivariateSpline(xu,yi, - u.imag.T,kx=1,ky=1) + elif (kwargs['method'] == 'spline'): + f1 = scipy.interpolate.RectBivariateSpline(xu, yi, + u.real.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(xu, yi, + u.imag.T, kx=1, ky=1) u1.data.real = f1.ev(x,y) u1.data.imag = f2.ev(x,y) #-- replace zero values with fill_value @@ -389,41 +418,44 @@ def extract_tidal_constants(ilon, ilat, u1.data[u1.mask] = u1.fill_value else: #-- use scipy regular grid to interpolate values - r1 = scipy.interpolate.RegularGridInterpolator((yi,xu),u, - method=METHOD,bounds_error=False,fill_value=u1.fill_value) + r1 = scipy.interpolate.RegularGridInterpolator((yi,xu), u, + method=kwargs['method'], bounds_error=False, + fill_value=u1.fill_value) u1.data[:] = r1.__call__(np.c_[y,x]) #-- replace invalid values with fill_value u1.mask = (u1.data == u1.fill_value) | (mu1.astype(bool)) u1.data[u1.mask] = u1.fill_value #-- extrapolate data using nearest-neighbors - if EXTRAPOLATE and np.any(u1.mask): + if kwargs['extrapolate'] and np.any(u1.mask): #-- find invalid data points inv, = np.nonzero(u1.mask) #-- replace zero values with nan u[(u==0) | u.mask] = np.nan #-- extrapolate points within cutoff of valid model points - u1[inv] = nearest_extrap(xu,yi,u,x[inv],y[inv], - dtype=np.longcomplex,cutoff=CUTOFF,EPSG=EPSG) + u1[inv] = nearest_extrap(xu, yi, u, x[inv], y[inv], + dtype=np.longcomplex, + cutoff=kwargs['cutoff'], + EPSG=EPSG) #-- convert units #-- amplitude and phase of the constituent amplitude.data[:,i] = np.abs(u1.data)/unit_conv amplitude.mask[:,i] = np.copy(u1.mask) ph.data[:,i] = np.arctan2(-np.imag(u1),np.real(u1)) ph.mask[:,i] = np.copy(u1.mask) - elif TYPE in ('V','v'): + elif kwargs['type'] in ('V','v'): #-- read constituent from transport file - if (GRID == 'ATLAS'): + if (kwargs['grid'] == 'ATLAS'): u0,v0,uvlocal = read_atlas_transport(model_file, i, c) xi,yi,v = combine_atlas_model(x0, y0, v0, pmask, uvlocal, - VARIABLE='v') - elif (GRID == 'ESR'): - v = read_netcdf_file(model_file, i, TYPE=TYPE) + variable='v') + elif (kwargs['grid'] == 'ESR'): + v = read_netcdf_file(model_file, i, type='v') elif isinstance(model_file,list): u,v = read_transport_file(model_file[i], 0) else: u,v = read_transport_file(model_file, i) #-- replace original values with extend matrices - if GLOBAL: + if global_grid: v = extend_matrix(v) #-- copy mask to v transports v.mask |= mv.astype(bool) @@ -431,19 +463,20 @@ def extract_tidal_constants(ilon, ilat, yv = yi - dy/2.0 #-- interpolate amplitude and phase of the constituent v1 = np.ma.zeros((npts),dtype=v.dtype) - if (METHOD == 'bilinear'): + if (kwargs['method'] == 'bilinear'): #-- replace zero values with nan v[(v==0) | v.mask] = np.nan #-- use quick bilinear to interpolate values - v1.data[:] = bilinear_interp(xi,yv,v,x,y,dtype=np.longcomplex) + v1.data[:] = bilinear_interp(xi, yv, v, x, y, + dtype=np.longcomplex) #-- replace nan values with fill_value v1.mask = (np.isnan(v1.data) | (mv1.astype(bool))) v1.data[v1.mask] = v1.fill_value - elif (METHOD == 'spline'): - f1 = scipy.interpolate.RectBivariateSpline(xi,yv, - v.real.T,kx=1,ky=1) - f2 = scipy.interpolate.RectBivariateSpline(xi,yv, - v.imag.T,kx=1,ky=1) + elif (kwargs['method'] == 'spline'): + f1 = scipy.interpolate.RectBivariateSpline(xi, yv, + v.real.T, kx=1, ky=1) + f2 = scipy.interpolate.RectBivariateSpline(xi, yv, + v.imag.T, kx=1, ky=1) v1.data.real = f1.ev(x,y) v1.data.imag = f2.ev(x,y) #-- replace zero values with fill_value @@ -451,21 +484,25 @@ def extract_tidal_constants(ilon, ilat, v1.data[v1.mask] = v1.fill_value else: #-- use scipy regular grid to interpolate values - r1 = scipy.interpolate.RegularGridInterpolator((yv,xi),v, - method=METHOD,bounds_error=False,fill_value=v1.fill_value) + r1 = scipy.interpolate.RegularGridInterpolator((yv,xi), v, + method=kwargs['method'], + bounds_error=False, + fill_value=v1.fill_value) v1.data[:] = r1.__call__(np.c_[y,x]) #-- replace invalid values with fill_value v1.mask = (v1.data == v1.fill_value) | (mv1.astype(bool)) v1.data[v1.mask] = v1.fill_value #-- extrapolate data using nearest-neighbors - if EXTRAPOLATE and np.any(v1.mask): + if kwargs['extrapolate'] and np.any(v1.mask): #-- find invalid data points inv, = np.nonzero(v1.mask) #-- replace zero values with nan v[(v==0) | v.mask] = np.nan #-- extrapolate points within cutoff of valid model points - v1[inv] = nearest_extrap(x,yv,v,x[inv],y[inv], - dtype=np.longcomplex,cutoff=CUTOFF,EPSG=EPSG) + v1[inv] = nearest_extrap(xi, yv, v, x[inv], y[inv], + dtype=np.longcomplex, + cutoff=kwargs['cutoff'], + EPSG=EPSG) #-- convert units #-- amplitude and phase of the constituent amplitude.data[:,i] = np.abs(v1.data)/unit_conv @@ -731,15 +768,16 @@ def read_netcdf_grid(input_file): mz = fileID.variables['mask'][::-1,:].copy() #-- read flexure and convert from percent to scale factor sf = fileID.variables['flexure'][::-1,:]/100.0 - #-- update bathymetry mask + #-- update bathymetry and scale factor masks hz.mask = (hz.data == 0.0) + sf.mask = (sf.data == 0.0) #-- close the grid file fileID.close() #-- return values return (x,y,hz,mz,sf) #-- PURPOSE: read list of constituents from an elevation or transport file -def read_constituents(input_file, GRID='OTIS'): +def read_constituents(input_file, grid='OTIS'): """ Read the list of constituents from an elevation or transport file @@ -747,7 +785,7 @@ def read_constituents(input_file, GRID='OTIS'): ---------- input_file: str input tidal file - GRID: str, default 'OTIS' + grid: str, default 'OTIS' Tide model file type to read - ``'ATLAS'``: reading a global solution with localized solutions @@ -764,7 +802,7 @@ def read_constituents(input_file, GRID='OTIS'): #-- check that model file is accessible if not os.access(os.path.expanduser(input_file), os.F_OK): raise FileNotFoundError(os.path.expanduser(input_file)) - if (GRID == 'ESR'): + if (grid == 'ESR'): #-- open the netCDF4 file fid = netCDF4.Dataset(os.path.expanduser(input_file),'r') constituents = fid.variables['cons'].long_name.split() @@ -1266,7 +1304,7 @@ def combine_atlas_model(xi, yi, zi, pmask, local, VARIABLE=None): #-- PURPOSE: read netCDF4 file to extract real and imaginary components for #-- constituent -def read_netcdf_file(input_file, ic, TYPE=None): +def read_netcdf_file(input_file, ic, variable=None): """ Read netCDF4 file to extract real and imaginary components for constituent @@ -1276,7 +1314,7 @@ def read_netcdf_file(input_file, ic, TYPE=None): input transport file ic: int index of consituent - TYPE: str or NoneType, default None + variable: str or NoneType, default None Tidal variable to read - ``'z'``: heights @@ -1299,15 +1337,15 @@ def read_netcdf_file(input_file, ic, TYPE=None): hc = np.ma.zeros((ny,nx),dtype=np.complex64) hc.mask = np.zeros((ny,nx),dtype=bool) #-- extract constituent - if (TYPE == 'z'): + if (variable == 'z'): #-- convert elevations from mm to m hc.data.real[:,:] = fileID.variables['hRe'][ic,::-1,:]/1e3 hc.data.imag[:,:] = -fileID.variables['hIm'][ic,::-1,:]/1e3 - elif TYPE in ('U','u'): + elif variable in ('U','u'): #-- convert transports from cm^2/s to m^2/s hc.data.real[:,:] = fileID.variables['uRe'][ic,::-1,:]/1e4 hc.data.imag[:,:] = -fileID.variables['uIm'][ic,::-1,:]/1e4 - elif TYPE in ('V','v'): + elif variable in ('V','v'): #-- convert transports from cm^2/s to m^2/s hc.data.real[:,:] = fileID.variables['vRe'][ic,::-1,:]/1e4 hc.data.imag[:,:] = -fileID.variables['vIm'][ic,::-1,:]/1e4 diff --git a/pyTMD/time.py b/pyTMD/time.py index e5f5e87f..c0910769 100644 --- a/pyTMD/time.py +++ b/pyTMD/time.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" time.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Utilities for calculating time operations PYTHON DEPENDENCIES: @@ -16,6 +16,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 05/2022: changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format Updated 04/2021: updated NIST ftp server url for leap-seconds.list Updated 03/2021: replaced numpy bool/int to prevent deprecation warnings @@ -31,7 +32,9 @@ """ import os import re +import copy import logging +import warnings import datetime import numpy as np import dateutil.parser @@ -385,7 +388,7 @@ def convert_calendar_decimal(year, month, day=None, hour=None, minute=None, return t_date #-- PURPOSE: Converts from Julian day to calendar date and time -def convert_julian(JD, ASTYPE=None, FORMAT='dict'): +def convert_julian(JD, **kwargs): """ Converts from Julian day to calendar date and time @@ -393,9 +396,9 @@ def convert_julian(JD, ASTYPE=None, FORMAT='dict'): ---------- JD: float Julian Day (days since 01-01-4713 BCE at 12:00:00) - ASTYPE: str or NoneType, default None + astype: str or NoneType, default None convert output to variable type - FORMAT: str, default 'dict' + format: str, default 'dict' format of output variables - ``'dict'``: dictionary with variable keys @@ -426,6 +429,18 @@ def convert_julian(JD, ASTYPE=None, FORMAT='dict'): Calendar Dates", Quarterly Journal of the Royal Astronomical Society, 25(1), 1984. """ + #-- set default keyword arguments + kwargs.setdefault('astype', None) + kwargs.setdefault('format', 'dict') + #-- raise warnings for deprecated keyword arguments + deprecated_keywords = dict(ASTYPE='astype',FORMAT='format') + for old,new in deprecated_keywords.items(): + if old in kwargs.keys(): + warnings.warn("""Deprecated keyword argument {0}. + Changed to '{1}'""".format(old,new), + DeprecationWarning) + #-- set renamed argument to not break workflows + kwargs[new] = copy.copy(kwargs[old]) #-- convert to array if only a single value was imported if (np.ndim(JD) == 0): @@ -458,13 +473,13 @@ def convert_julian(JD, ASTYPE=None, FORMAT='dict'): SECOND = (G - MINUTE/1440.0) * 86400.0 #-- convert all variables to output type (from float) - if ASTYPE is not None: - YEAR = YEAR.astype(ASTYPE) - MONTH = MONTH.astype(ASTYPE) - DAY = DAY.astype(ASTYPE) - HOUR = HOUR.astype(ASTYPE) - MINUTE = MINUTE.astype(ASTYPE) - SECOND = SECOND.astype(ASTYPE) + if kwargs['astype'] is not None: + YEAR = YEAR.astype(kwargs['astype']) + MONTH = MONTH.astype(kwargs['astype']) + DAY = DAY.astype(kwargs['astype']) + HOUR = HOUR.astype(kwargs['astype']) + MINUTE = MINUTE.astype(kwargs['astype']) + SECOND = SECOND.astype(kwargs['astype']) #-- if only a single value was imported initially: remove singleton dims if SINGLE_VALUE: @@ -476,12 +491,12 @@ def convert_julian(JD, ASTYPE=None, FORMAT='dict'): SECOND = SECOND.item(0) #-- return date variables in output format (default python dictionary) - if (FORMAT == 'dict'): + if (kwargs['format'] == 'dict'): return dict(year=YEAR, month=MONTH, day=DAY, hour=HOUR, minute=MINUTE, second=SECOND) - elif (FORMAT == 'tuple'): + elif (kwargs['format'] == 'tuple'): return (YEAR, MONTH, DAY, HOUR, MINUTE, SECOND) - elif (FORMAT == 'zip'): + elif (kwargs['format'] == 'zip'): return zip(YEAR, MONTH, DAY, HOUR, MINUTE, SECOND) #-- PURPOSE: Count number of leap seconds that have passed for each GPS time diff --git a/scripts/compute_LPET_ICESat2_ATL03.py b/scripts/compute_LPET_ICESat2_ATL03.py index 67e8ead6..93f74a07 100644 --- a/scripts/compute_LPET_ICESat2_ATL03.py +++ b/scripts/compute_LPET_ICESat2_ATL03.py @@ -400,7 +400,7 @@ def HDF5_ATL03_tide_write(IS2_atl03_tide, IS2_atl03_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_LPET_ICESat2_ATL06.py b/scripts/compute_LPET_ICESat2_ATL06.py index fae01cc3..db8f2b22 100644 --- a/scripts/compute_LPET_ICESat2_ATL06.py +++ b/scripts/compute_LPET_ICESat2_ATL06.py @@ -403,7 +403,7 @@ def HDF5_ATL06_tide_write(IS2_atl06_tide, IS2_atl06_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_LPET_ICESat2_ATL07.py b/scripts/compute_LPET_ICESat2_ATL07.py index 38f35efb..bf64158b 100644 --- a/scripts/compute_LPET_ICESat2_ATL07.py +++ b/scripts/compute_LPET_ICESat2_ATL07.py @@ -433,7 +433,7 @@ def HDF5_ATL07_tide_write(IS2_atl07_tide, IS2_atl07_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_LPET_ICESat2_ATL10.py b/scripts/compute_LPET_ICESat2_ATL10.py index 721de797..4fb5d8b9 100644 --- a/scripts/compute_LPET_ICESat2_ATL10.py +++ b/scripts/compute_LPET_ICESat2_ATL10.py @@ -395,7 +395,7 @@ def HDF5_ATL10_tide_write(IS2_atl10_tide, IS2_atl10_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_LPET_ICESat2_ATL11.py b/scripts/compute_LPET_ICESat2_ATL11.py index 0b3b96c1..22c4efd3 100644 --- a/scripts/compute_LPET_ICESat2_ATL11.py +++ b/scripts/compute_LPET_ICESat2_ATL11.py @@ -584,7 +584,7 @@ def HDF5_ATL11_tide_write(IS2_atl11_tide, IS2_atl11_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_LPET_ICESat2_ATL12.py b/scripts/compute_LPET_ICESat2_ATL12.py index 5b63a759..1b6051c7 100644 --- a/scripts/compute_LPET_ICESat2_ATL12.py +++ b/scripts/compute_LPET_ICESat2_ATL12.py @@ -395,7 +395,7 @@ def HDF5_ATL12_tide_write(IS2_atl12_tide, IS2_atl12_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_LPET_icebridge_data.py b/scripts/compute_LPET_icebridge_data.py index b5942544..d2c6553a 100644 --- a/scripts/compute_LPET_icebridge_data.py +++ b/scripts/compute_LPET_icebridge_data.py @@ -522,7 +522,7 @@ def compute_LPET_icebridge_data(arg, VERBOSE=False, MODE=0o775): time_julian = 2400000.5 + pyTMD.time.convert_delta_time(time_range, epoch1=(1992,1,1,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0) #-- convert to calendar date - cal = pyTMD.time.convert_julian(time_julian,ASTYPE=int) + cal = pyTMD.time.convert_julian(time_julian,astype=int) #-- add attributes with measurement date start, end and duration args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) diff --git a/scripts/compute_LPT_ICESat_GLA12.py b/scripts/compute_LPT_ICESat_GLA12.py index 110fd6f9..d66d71fc 100644 --- a/scripts/compute_LPT_ICESat_GLA12.py +++ b/scripts/compute_LPT_ICESat_GLA12.py @@ -121,7 +121,7 @@ def compute_LPT_ICESat(FILE, VERBOSE=False, MODE=0o775): #-- J2000: seconds since 2000-01-01 12:00:00 UTC t = DS_UTCTime_40HZ[:]/86400.0 + 51544.5 #-- convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') #-- convert calendar dates into year decimal tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, hour=HH,minute=MN,second=SS) diff --git a/scripts/compute_LPT_displacements.py b/scripts/compute_LPT_displacements.py index 122a6d68..4baa3cb6 100644 --- a/scripts/compute_LPT_displacements.py +++ b/scripts/compute_LPT_displacements.py @@ -235,7 +235,7 @@ def compute_LPT_displacements(input_file, output_file, FORMAT='csv', MJD = pyTMD.time.convert_delta_time(delta_time-leap_seconds, epoch1=epoch1, epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- add offset to convert to Julian days and then convert to calendar dates - Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, FORMAT='tuple') + Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, format='tuple') #-- calculate time in year-decimal format time_decimal = pyTMD.time.convert_calendar_decimal(Y,M,day=D, hour=h,minute=m,second=s) diff --git a/scripts/compute_LPT_icebridge_data.py b/scripts/compute_LPT_icebridge_data.py index 38f0e304..f75b47ef 100644 --- a/scripts/compute_LPT_icebridge_data.py +++ b/scripts/compute_LPT_icebridge_data.py @@ -464,7 +464,7 @@ def compute_LPT_icebridge_data(arg, VERBOSE=False, MODE=0o775): #-- J2000: seconds since 2000-01-01 12:00:00 UTC t = dinput['time'][:]/86400.0 + 51544.5 #-- convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') #-- convert calendar dates into year decimal tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, hour=HH,minute=MN,second=SS) @@ -604,7 +604,7 @@ def compute_LPT_icebridge_data(arg, VERBOSE=False, MODE=0o775): JD_start = np.min(t) + 2400000.5 JD_end = np.max(t) + 2400000.5 #-- convert to calendar date - cal = pyTMD.time.convert_julian(np.array([JD_start,JD_end]),ASTYPE=int) + cal = pyTMD.time.convert_julian(np.array([JD_start,JD_end]),astype=int) #-- add attributes with measurement date start, end and duration args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) diff --git a/scripts/compute_OPT_ICESat_GLA12.py b/scripts/compute_OPT_ICESat_GLA12.py index 07b49cbe..77af5d32 100644 --- a/scripts/compute_OPT_ICESat_GLA12.py +++ b/scripts/compute_OPT_ICESat_GLA12.py @@ -129,7 +129,7 @@ def compute_OPT_ICESat(FILE, METHOD=None, VERBOSE=False, MODE=0o775): #-- J2000: seconds since 2000-01-01 12:00:00 UTC t = DS_UTCTime_40HZ[:]/86400.0 + 51544.5 #-- convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') #-- convert calendar dates into year decimal tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, hour=HH,minute=MN,second=SS) diff --git a/scripts/compute_OPT_displacements.py b/scripts/compute_OPT_displacements.py index 6108dcf3..019ec36d 100644 --- a/scripts/compute_OPT_displacements.py +++ b/scripts/compute_OPT_displacements.py @@ -254,7 +254,7 @@ def compute_OPT_displacements(input_file, output_file, FORMAT='csv', MJD = pyTMD.time.convert_delta_time(delta_time-leap_seconds, epoch1=epoch1, epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- add offset to convert to Julian days and then convert to calendar dates - Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, FORMAT='tuple') + Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, format='tuple') #-- calculate time in year-decimal format time_decimal = pyTMD.time.convert_calendar_decimal(Y,M,day=D, hour=h,minute=m,second=s) diff --git a/scripts/compute_OPT_icebridge_data.py b/scripts/compute_OPT_icebridge_data.py index 2ec5af99..dec5761a 100644 --- a/scripts/compute_OPT_icebridge_data.py +++ b/scripts/compute_OPT_icebridge_data.py @@ -476,7 +476,7 @@ def compute_OPT_icebridge_data(arg,METHOD=None,VERBOSE=False,MODE=0o775): #-- J2000: seconds since 2000-01-01 12:00:00 UTC t = dinput['time'][:]/86400.0 + 51544.5 #-- convert from MJD to calendar dates - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(t + 2400000.5,format='tuple') #-- convert calendar dates into year decimal tdec = pyTMD.time.convert_calendar_decimal(YY,MM,day=DD, hour=HH,minute=MN,second=SS) @@ -622,7 +622,7 @@ def compute_OPT_icebridge_data(arg,METHOD=None,VERBOSE=False,MODE=0o775): JD_start = np.min(t) + 2400000.5 JD_end = np.max(t) + 2400000.5 #-- convert to calendar date - cal = pyTMD.time.convert_julian(np.array([JD_start,JD_end]),ASTYPE=int) + cal = pyTMD.time.convert_julian(np.array([JD_start,JD_end]),astype=int) #-- add attributes with measurement date start, end and duration args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) diff --git a/scripts/compute_tidal_currents.py b/scripts/compute_tidal_currents.py index 4e5f1e79..c9318962 100755 --- a/scripts/compute_tidal_currents.py +++ b/scripts/compute_tidal_currents.py @@ -110,6 +110,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -327,20 +328,20 @@ def compute_tidal_currents(tide_dir, input_file, output_file, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon.flatten(), lat.flatten(), model.grid_file, model.model_file['u'], model.projection, - TYPE=t, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - GRID=model.format) + type=t, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + grid=model.format) deltat = np.zeros((nt)) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon.flatten(), lat.flatten(), - model.grid_file, model.model_file[t], TYPE=t, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + model.grid_file, model.model_file[t], type=t, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, + compressed=model.compressed) deltat = np.zeros((nt)) elif (model.format == 'FES'): amp,ph = extract_FES_constants(lon.flatten(), lat.flatten(), - model.model_file[t], TYPE=t, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file[t], type=t, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -357,9 +358,9 @@ def compute_tidal_currents(tide_dir, input_file, output_file, output[t].mask = np.zeros((ny,nx,nt),dtype=bool) for i in range(nt): TIDE = predict_tide(tide_time[i], hc, c, - DELTAT=deltat[i], CORRECTIONS=model.format) + deltat=deltat[i], corrections=model.format) MINOR = infer_minor_corrections(tide_time[i], hc, c, - DELTAT=deltat[i], CORRECTIONS=model.format) + deltat=deltat[i], corrections=model.format) #-- add major and minor components and reform grid output[t][:,:,i] = np.reshape((TIDE+MINOR), (ny,nx)) output[t].mask[:,:,i] = np.reshape((TIDE.mask | MINOR.mask), @@ -368,9 +369,9 @@ def compute_tidal_currents(tide_dir, input_file, output_file, output[t] = np.ma.zeros((nt), fill_value=fill_value) output[t].mask = np.any(hc.mask,axis=1) output[t].data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) output[t].data[:] += minor.data[:] #-- replace invalid values with fill value output[t].data[output[t].mask] = output[t].fill_value diff --git a/scripts/compute_tidal_elevations.py b/scripts/compute_tidal_elevations.py index 45606080..718b12d4 100755 --- a/scripts/compute_tidal_elevations.py +++ b/scripts/compute_tidal_elevations.py @@ -95,6 +95,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -301,26 +302,26 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon.flatten(), lat.flatten(), model.grid_file, model.model_file, model.projection, - TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, GRID=model.format) + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, grid=model.format) deltat = np.zeros((nt)) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon.flatten(), lat.flatten(), - model.grid_file, model.model_file, TYPE=model.type, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.grid_file, model.model_file, type=model.type, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) deltat = np.zeros((nt)) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(lon.flatten(), lat.flatten(), - model.model_file, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file,tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(lon.flatten(), lat.flatten(), - model.model_file, TYPE=model.type, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file, type=model.type, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -337,9 +338,9 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, tide.mask = np.zeros((ny,nx,nt),dtype=bool) for i in range(nt): TIDE = predict_tide(tide_time[i], hc, c, - DELTAT=deltat[i], CORRECTIONS=model.format) + deltat=deltat[i], corrections=model.format) MINOR = infer_minor_corrections(tide_time[i], hc, c, - DELTAT=deltat[i], CORRECTIONS=model.format) + deltat=deltat[i], corrections=model.format) #-- add major and minor components and reform grid tide[:,:,i] = np.reshape((TIDE+MINOR), (ny,nx)) tide.mask[:,:,i] = np.reshape((TIDE.mask | MINOR.mask), (ny,nx)) @@ -347,9 +348,9 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, tide = np.ma.zeros((nt), fill_value=fill_value) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace invalid values with fill value tide.data[tide.mask] = tide.fill_value diff --git a/scripts/compute_tides_ICESat2_ATL03.py b/scripts/compute_tides_ICESat2_ATL03.py index 32c23c2a..a8bb0767 100644 --- a/scripts/compute_tides_ICESat2_ATL03.py +++ b/scripts/compute_tides_ICESat2_ATL03.py @@ -62,6 +62,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -207,27 +208,27 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, #-- read tidal constants and interpolate to grid points if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file, - model.model_file, model.projection, TYPE=model.type, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - GRID=model.format) + model.model_file, model.projection, type=model.type, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + grid=model.format) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file, - model.model_file, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + model.model_file, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, + compressed=model.compressed) deltat = np.zeros_like(tide_time) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(lon, lat, model.model_file, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(lon, lat, model.model_file, - TYPE=model.type, VERSION=model.version, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + type=model.type, version=model.version, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, + compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -242,9 +243,9 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide = np.ma.empty((n_seg),fill_value=fv) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace masked and nan values with fill value invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) @@ -509,7 +510,7 @@ def HDF5_ATL03_tide_write(IS2_atl03_tide, IS2_atl03_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_tides_ICESat2_ATL06.py b/scripts/compute_tides_ICESat2_ATL06.py index 2c75c9f8..81e5d063 100644 --- a/scripts/compute_tides_ICESat2_ATL06.py +++ b/scripts/compute_tides_ICESat2_ATL06.py @@ -57,6 +57,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -199,26 +200,26 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - model.projection, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, GRID=model.format) + model.projection, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) deltat = np.zeros_like(tide_time) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, TYPE=model.type, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file, type=model.type, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -233,9 +234,9 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide = np.ma.empty((n_seg),fill_value=fv) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace masked and nan values with fill value invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) @@ -510,7 +511,7 @@ def HDF5_ATL06_tide_write(IS2_atl06_tide, IS2_atl06_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_tides_ICESat2_ATL07.py b/scripts/compute_tides_ICESat2_ATL07.py index 2dc5b336..24dea549 100644 --- a/scripts/compute_tides_ICESat2_ATL07.py +++ b/scripts/compute_tides_ICESat2_ATL07.py @@ -57,6 +57,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -196,26 +197,26 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - model.projection, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, GRID=model.format) + model.projection, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - SCALE=model.scale, CUTOFF=CUTOFF, GZIP=model.compressed) + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + scale=model.scale, cutoff=CUTOFF, compressed=model.compressed) deltat = np.zeros_like(tide_time) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, TYPE=model.type, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file, type=model.type, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -230,9 +231,9 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide = np.ma.empty((n_seg)) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace masked and nan values with fill value invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) @@ -542,7 +543,7 @@ def HDF5_ATL07_tide_write(IS2_atl07_tide, IS2_atl07_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_tides_ICESat2_ATL10.py b/scripts/compute_tides_ICESat2_ATL10.py index a74e9468..de53a73e 100644 --- a/scripts/compute_tides_ICESat2_ATL10.py +++ b/scripts/compute_tides_ICESat2_ATL10.py @@ -57,6 +57,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -219,26 +220,26 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - model.projection, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, GRID=model.format) + model.projection, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - SCALE=model.scale, CUTOFF=CUTOFF, GZIP=model.compressed) + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + scale=model.scale, cutoff=CUTOFF, compressed=model.compressed) deltat = np.zeros_like(tide_time) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, TYPE=model.type, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file, type=model.type, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -253,9 +254,9 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide = np.ma.empty((n_seg)) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace masked and nan values with fill value invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) @@ -505,7 +506,7 @@ def HDF5_ATL10_tide_write(IS2_atl10_tide, IS2_atl10_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_tides_ICESat2_ATL11.py b/scripts/compute_tides_ICESat2_ATL11.py index c28378c7..a95317e3 100644 --- a/scripts/compute_tides_ICESat2_ATL11.py +++ b/scripts/compute_tides_ICESat2_ATL11.py @@ -57,6 +57,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -220,28 +221,28 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(longitude[track], latitude[track], model.grid_file, model.model_file, - model.projection, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, GRID=model.format) + model.projection, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(longitude[track], latitude[track], model.grid_file, model.model_file, - TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) deltat = np.zeros_like(tide_time) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(longitude[track], - latitude[track], model.model_file, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + latitude[track], model.model_file, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, + compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(longitude[track], latitude[track], model.model_file, - TYPE=model.type, VERSION=model.version, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + type=model.type, version=model.version, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -262,9 +263,9 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, #-- predict tidal elevations and infer minor corrections tide[track].data[valid,cycle] = predict_tide_drift( tide_time[valid,cycle], hc[valid,:], c, - DELTAT=deltat[valid,cycle], CORRECTIONS=model.format) + deltat=deltat[valid,cycle], corrections=model.format) minor = infer_minor_corrections(tide_time[valid,cycle], hc[valid,:], - c, DELTAT=deltat[valid,cycle], CORRECTIONS=model.format) + c, deltat=deltat[valid,cycle], corrections=model.format) tide[track].data[valid,cycle] += minor.data[:] elif (track == 'XT'): #-- find valid time and spatial points @@ -272,10 +273,10 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, valid, = np.nonzero(~tide[track].mask[:]) #-- predict tidal elevations and infer minor corrections tide[track].data[valid] = predict_tide_drift(tide_time[valid], - hc[valid,:], c, DELTAT=deltat[valid], - CORRECTIONS=model.format) + hc[valid,:], c, deltat=deltat[valid], + corrections=model.format) minor = infer_minor_corrections(tide_time[valid], hc[valid,:], - c, DELTAT=deltat[valid], CORRECTIONS=model.format) + c, deltat=deltat[valid], corrections=model.format) tide[track].data[valid] += minor.data[:] #-- replace masked and nan values with fill value @@ -671,7 +672,7 @@ def HDF5_ATL11_tide_write(IS2_atl11_tide, IS2_atl11_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_tides_ICESat2_ATL12.py b/scripts/compute_tides_ICESat2_ATL12.py index 79dda427..4425afb2 100644 --- a/scripts/compute_tides_ICESat2_ATL12.py +++ b/scripts/compute_tides_ICESat2_ATL12.py @@ -57,6 +57,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -197,26 +198,26 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - model.projection, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, GRID=model.format) + model.projection, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, - TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) deltat = np.zeros_like(tide_time) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(val['longitude'], val['latitude'], - model.model_file, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(val['longitude'], val['latitude'], - model.model_file, TYPE=model.type, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file, type=model.type, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -231,9 +232,9 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide = np.ma.empty((n_seg)) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace masked and nan values with fill value invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) @@ -505,7 +506,7 @@ def HDF5_ATL12_tide_write(IS2_atl12_tide, IS2_atl12_attrs, INPUT=None, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(gps_seconds - leaps, epoch1=(1980,1,6,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- convert to calendar date - YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,FORMAT='tuple') + YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(time_julian,format='tuple') #-- add attributes with measurement date start, end and duration tcs = datetime.datetime(int(YY[0]), int(MM[0]), int(DD[0]), int(HH[0]), int(MN[0]), int(SS[0]), int(1e6*(SS[0] % 1))) diff --git a/scripts/compute_tides_ICESat_GLA12.py b/scripts/compute_tides_ICESat_GLA12.py index f7405978..a3423e4d 100644 --- a/scripts/compute_tides_ICESat_GLA12.py +++ b/scripts/compute_tides_ICESat_GLA12.py @@ -61,6 +61,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: save ICESat campaign attribute to output file @@ -195,27 +196,27 @@ def compute_tides_ICESat(tide_dir, INPUT_FILE, TIDE_MODEL=None, #-- read tidal constants and interpolate to grid points if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(lon_40HZ, lat_40HZ, - model.grid_file, model.model_file, model.projection, TYPE=TYPE, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, GRID=model.format) + model.grid_file, model.model_file, model.projection, + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, grid=model.format) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon_40HZ, lat_40HZ, - model.grid_file, model.model_file, TYPE=TYPE, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + model.grid_file, model.model_file, type=model.type, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) deltat = np.zeros_like(tide_time) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(lon_40HZ, lat_40HZ, - model.model_file, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, tide_time) elif (model.format == 'FES'): amp,ph = extract_FES_constants(lon_40HZ, lat_40HZ, - model.model_file, TYPE=TYPE, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file, type=model.type, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -230,9 +231,9 @@ def compute_tides_ICESat(tide_dir, INPUT_FILE, TIDE_MODEL=None, tide = np.ma.empty((n_40HZ),fill_value=fv) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace masked and nan values with fill value invalid, = np.nonzero(np.isnan(tide.data) | tide.mask) diff --git a/scripts/compute_tides_icebridge_data.py b/scripts/compute_tides_icebridge_data.py index 979a3e37..70bda774 100644 --- a/scripts/compute_tides_icebridge_data.py +++ b/scripts/compute_tides_icebridge_data.py @@ -65,6 +65,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types + updated keyword arguments to read tide model programs Updated 04/2022: include utf-8 encoding in reads to be windows compliant use argparse descriptions within sphinx documentation Updated 03/2022: using static decorators to define available models @@ -530,26 +531,26 @@ def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, if model.format in ('OTIS','ATLAS','ESR'): amp,ph,D,c = extract_tidal_constants(dinput['lon'], dinput['lat'], model.grid_file, model.model_file, model.projection, - TYPE=model.type, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, GRID=model.format) + type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, grid=model.format) deltat = np.zeros_like(t) elif model.format in ('netcdf'): amp,ph,D,c = extract_netcdf_constants(dinput['lon'], dinput['lat'], - model.grid_file, model.model_file, TYPE=model.type, METHOD=METHOD, - EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, SCALE=model.scale, - GZIP=model.compressed) + model.grid_file, model.model_file, type=model.type, method=METHOD, + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, scale=model.scale, + compressed=model.compressed) deltat = np.zeros_like(t) elif (model.format == 'GOT'): amp,ph,c = extract_GOT_constants(dinput['lon'], dinput['lat'], - model.model_file, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - CUTOFF=CUTOFF, SCALE=model.scale, GZIP=model.compressed) + model.model_file, method=METHOD, extrapolate=EXTRAPOLATE, + cutoff=CUTOFF, scale=model.scale, compressed=model.compressed) #-- interpolate delta times from calendar dates to tide time deltat = calc_delta_time(delta_file, t) elif (model.format == 'FES'): amp,ph = extract_FES_constants(dinput['lon'], dinput['lat'], - model.model_file, TYPE=model.type, VERSION=model.version, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, CUTOFF=CUTOFF, - SCALE=model.scale, GZIP=model.compressed) + model.model_file, type=model.type, version=model.version, + method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + scale=model.scale, compressed=model.compressed) #-- available model constituents c = model.constituents #-- interpolate delta times from calendar dates to tide time @@ -584,9 +585,9 @@ def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, tide = np.ma.empty((file_lines),fill_value=fill_value) tide.mask = np.any(hc.mask,axis=1) tide.data[:] = predict_tide_drift(t, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) minor = infer_minor_corrections(t, hc, c, - DELTAT=deltat, CORRECTIONS=model.format) + deltat=deltat, corrections=model.format) tide.data[:] += minor.data[:] #-- replace invalid values with fill value tide.data[tide.mask] = tide.fill_value @@ -635,7 +636,7 @@ def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, time_julian = 2400000.5 + pyTMD.time.convert_delta_time(time_range, epoch1=(1992,1,1,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0) #-- convert to calendar date - cal = pyTMD.time.convert_julian(time_julian,ASTYPE=int) + cal = pyTMD.time.convert_julian(time_julian,astype=int) #-- add attributes with measurement date start, end and duration args = (cal['hour'][0],cal['minute'][0],cal['second'][0]) fid.attrs['RangeBeginningTime'] = '{0:02d}:{1:02d}:{2:02d}'.format(*args) diff --git a/scripts/reduce_OTIS_files.py b/scripts/reduce_OTIS_files.py index 83a43434..4661cdff 100644 --- a/scripts/reduce_OTIS_files.py +++ b/scripts/reduce_OTIS_files.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" reduce_OTIS_files.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (05/2022) Read OTIS-format tidal files and reduce to a regional subset COMMAND LINE OPTIONS: @@ -29,6 +29,7 @@ output_otis_tides.py: writes OTIS-format tide files UPDATE HISTORY: + Updated 05/2022: updated keyword arguments to read tide model programs Updated 04/2022: use argparse descriptions within documentation Updated 11/2021: add function for attempting to extract projection Updated 09/2021: refactor to use model class for files and attributes @@ -87,8 +88,8 @@ def make_regional_OTIS_files(tide_dir, TIDE_MODEL, BOUNDS=4*[None], if (model.format == 'ATLAS'): #-- if reading a global solution with localized solutions x0,y0,hz0,mz0,iob,dt,pmask,local = read_atlas_grid(model.grid_file) - xi,yi,hz = combine_atlas_model(x0,y0,hz0,pmask,local,VARIABLE='depth') - mz = create_atlas_mask(x0,y0,mz0,local,VARIABLE='depth') + xi,yi,hz = combine_atlas_model(x0,y0,hz0,pmask,local,variable='depth') + mz = create_atlas_mask(x0,y0,mz0,local,variable='depth') else: #-- if reading a pure global solution xi,yi,hz,mz,iob,dt = read_tide_grid(model.grid_file) @@ -141,7 +142,7 @@ def make_regional_OTIS_files(tide_dir, TIDE_MODEL, BOUNDS=4*[None], #-- read constituent from elevation file if (model.format == 'ATLAS'): z0,zlocal=read_atlas_elevation(model_file['z'],i,c) - xi,yi,z=combine_atlas_model(x0,y0,z0,pmask,zlocal,VARIABLE='z') + xi,yi,z=combine_atlas_model(x0,y0,z0,pmask,zlocal,variable='z') else: z=read_elevation_file(model_file['z'],i) #-- reduce elevation to new bounds @@ -168,8 +169,8 @@ def make_regional_OTIS_files(tide_dir, TIDE_MODEL, BOUNDS=4*[None], #-- read constituent from transport file if (model.format == 'ATLAS'): u0,v0,uvlocal=read_atlas_transport(model_file['u'],i,c) - xi,yi,u=combine_atlas_model(x0,y0,u0,pmask,uvlocal,VARIABLE='u') - xi,yi,v=combine_atlas_model(x0,y0,v0,pmask,uvlocal,VARIABLE='v') + xi,yi,u=combine_atlas_model(x0,y0,u0,pmask,uvlocal,variable='u') + xi,yi,v=combine_atlas_model(x0,y0,v0,pmask,uvlocal,variable='v') else: u,v=read_transport_file(model_file['u'],i) #-- reduce transport components to new bounds diff --git a/test/test_atlas_read.py b/test/test_atlas_read.py index 27998b44..8ded0f8f 100644 --- a/test/test_atlas_read.py +++ b/test/test_atlas_read.py @@ -140,8 +140,9 @@ def test_read_TPXO9_v2(METHOD, EXTRAPOLATE): #-- extract amplitude and phase from tide model amp,ph,D,c = pyTMD.read_netcdf_model.extract_netcdf_constants( - val['Lon'], val['Lat'], grid_file, model_file, TYPE=TYPE, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, SCALE=SCALE, GZIP=GZIP) + val['Lon'], val['Lat'], grid_file, model_file, type=TYPE, + method=METHOD, extrapolate=EXTRAPOLATE, scale=SCALE, + compressed=GZIP) #-- convert phase from 0:360 to -180:180 ph[ph > 180] -= 360.0 @@ -206,8 +207,8 @@ def test_verify_TPXO8(METHOD, EXTRAPOLATE): #-- extract amplitude and phase from tide model amp,ph,D,c = pyTMD.read_tide_model.extract_tidal_constants( val['longitude'], val['latitude'], model.grid_file, - model.model_file, model.projection, TYPE=model.type, - METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, GRID=model.format) + model.model_file, model.projection, type=model.type, + method=METHOD, extrapolate=EXTRAPOLATE, grid=model.format) #-- delta time deltat = np.zeros_like(val['time']) #-- calculate complex phase in radians for Euler's @@ -222,7 +223,7 @@ def test_verify_TPXO8(METHOD, EXTRAPOLATE): #-- predict tidal elevations at time tide.mask[:] = np.any(hc.mask, axis=1) tide.data[:] = pyTMD.predict_tide_drift(val['time'], hc[:,i], - constituents, DELTAT=deltat, CORRECTIONS=model.format) + constituents, deltat=deltat, corrections=model.format) #-- will verify differences between model outputs are within tolerance eps = 0.03 @@ -280,8 +281,8 @@ def test_verify_TPXO9_v2(METHOD, EXTRAPOLATE): #-- extract amplitude and phase from tide model amp,ph,D,c = pyTMD.read_netcdf_model.extract_netcdf_constants( val['longitude'], val['latitude'], grid_file, model_file, - TYPE=TYPE, METHOD=METHOD, EXTRAPOLATE=EXTRAPOLATE, - SCALE=SCALE, GZIP=GZIP) + type=TYPE, method=METHOD, extrapolate=EXTRAPOLATE, + scale=SCALE, compressed=GZIP) #-- delta time deltat = np.zeros_like(val['time']) #-- verify constituents @@ -296,7 +297,7 @@ def test_verify_TPXO9_v2(METHOD, EXTRAPOLATE): #-- predict tidal elevations at time tide.mask[:] = np.any(hc.mask, axis=1) tide.data[:] = pyTMD.predict_tide_drift(val['time'], hc, c, - DELTAT=deltat, CORRECTIONS=model_format) + deltat=deltat, corrections=model_format) #-- will verify differences between model outputs are within tolerance eps = 0.05 diff --git a/test/test_download_and_read.py b/test/test_download_and_read.py index d259bedc..3410b47e 100644 --- a/test/test_download_and_read.py +++ b/test/test_download_and_read.py @@ -280,8 +280,8 @@ def test_compare_CATS2008(self): #-- extract amplitude and phase from tide model amp,ph,D,cons = pyTMD.read_tide_model.extract_tidal_constants(station_lon, - station_lat, grid_file, model_file, EPSG, TYPE=TYPE, METHOD='spline', - GRID=GRID) + station_lat, grid_file, model_file, EPSG, type=TYPE, method='spline', + grid=GRID) #-- reorder constituents of model and convert amplitudes to cm model_amp = np.ma.zeros((antarctic_stations,len(constituents))) model_ph = np.ma.zeros((antarctic_stations,len(constituents))) @@ -376,8 +376,8 @@ def test_verify_CATS2008(self, parameters): #-- extract amplitude and phase from tide model amp,ph,D,c = pyTMD.read_tide_model.extract_tidal_constants(station_lon, - station_lat, grid_file, model_file, EPSG, TYPE=TYPE, - METHOD='spline', GRID=GRID) + station_lat, grid_file, model_file, EPSG, type=TYPE, + method='spline', grid=GRID) #-- calculate complex phase in radians for Euler's cph = -1j*ph*np.pi/180.0 #-- will verify differences between model outputs are within tolerance @@ -420,9 +420,9 @@ def test_verify_CATS2008(self, parameters): #-- predict tidal elevations at time and infer minor corrections tide.mask[:] = np.any(hc.mask) tide.data[:] = pyTMD.predict_tidal_ts(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=GRID) + deltat=deltat, corrections=GRID) minor = pyTMD.infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=GRID) + deltat=deltat, corrections=GRID) tide.data[:] += minor.data[:] #-- calculate differences between matlab and python version @@ -492,8 +492,8 @@ def test_tidal_ellipse(self): for TYPE in TYPES: #-- extract amplitude and phase from tide model amp,ph,D,c=pyTMD.read_tide_model.extract_tidal_constants(station_lon[i], - station_lat[i], grid_file, model_file, EPSG, TYPE=TYPE, - METHOD='spline', GRID=GRID) + station_lat[i], grid_file, model_file, EPSG, type=TYPE, + method='spline', grid=GRID) #-- calculate complex phase in radians for Euler's cph = -1j*ph*np.pi/180.0 #-- calculate constituent oscillation for station @@ -685,8 +685,8 @@ def test_verify_AOTIM5_2018(self, parameters): #-- extract amplitude and phase from tide model amp,ph,D,c = pyTMD.read_tide_model.extract_tidal_constants(station_lon, - station_lat, grid_file, model_file, EPSG, TYPE=TYPE, - METHOD='spline', GRID=GRID) + station_lat, grid_file, model_file, EPSG, type=TYPE, + method='spline', grid=GRID) #-- calculate complex phase in radians for Euler's cph = -1j*ph*np.pi/180.0 #-- will verify differences between model outputs are within tolerance @@ -724,9 +724,9 @@ def test_verify_AOTIM5_2018(self, parameters): #-- predict tidal elevations at time and infer minor corrections tide.mask[:] = np.any(hc.mask) tide.data[:] = pyTMD.predict_tidal_ts(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=GRID) + deltat=deltat, corrections=GRID) minor = pyTMD.infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=GRID) + deltat=deltat, corrections=GRID) tide.data[:] += minor.data[:] #-- calculate differences between matlab and python version diff --git a/test/test_eop.py b/test/test_eop.py index 5bd0626a..8b9aca15 100644 --- a/test/test_eop.py +++ b/test/test_eop.py @@ -40,7 +40,7 @@ def test_read_EOP(EPOCH): MJD = pyTMD.time.convert_delta_time(delta_time, epoch1=(2000,1,1,0,0,0), epoch2=(1858,11,17,0,0,0), scale=1.0/86400.0) #-- add offset to convert to Julian days and then convert to calendar dates - Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, FORMAT='tuple') + Y,M,D,h,m,s = pyTMD.time.convert_julian(2400000.5 + MJD, format='tuple') #-- calculate time in year-decimal format time_decimal = pyTMD.time.convert_calendar_decimal(Y,M,day=D, hour=h,minute=m,second=s) diff --git a/test/test_fes_predict.py b/test/test_fes_predict.py index 65a85788..08cac872 100644 --- a/test/test_fes_predict.py +++ b/test/test_fes_predict.py @@ -122,8 +122,8 @@ def test_verify_FES2014(): #-- extract amplitude and phase from tide model amp,ph = pyTMD.read_FES_model.extract_FES_constants(longitude, - latitude, model_file, TYPE=TYPE, VERSION=VERSION, - METHOD='spline', GZIP=True, SCALE=SCALE,) + latitude, model_file, type=TYPE, version=VERSION, + method='spline', compressed=True, scale=SCALE,) #-- interpolate delta times from calendar dates to tide time delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) deltat = pyTMD.calc_delta_time(delta_file, tide_time) @@ -137,9 +137,9 @@ def test_verify_FES2014(): #-- predict tidal elevations at time and infer minor corrections tide.mask[:] = np.any(hc.mask, axis=1) tide.data[:] = pyTMD.predict_tide_drift(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model_format) + deltat=deltat, corrections=model_format) minor = pyTMD.infer_minor_corrections(tide_time, hc, c, - DELTAT=deltat, CORRECTIONS=model_format) + deltat=deltat, corrections=model_format) tide.data[:] += minor.data[:] #-- will verify differences between model outputs are within tolerance diff --git a/test/test_perth3_read.py b/test/test_perth3_read.py index d7ad75a0..0ad8cc1e 100644 --- a/test/test_perth3_read.py +++ b/test/test_perth3_read.py @@ -118,7 +118,7 @@ def test_verify_GOT47(METHOD): #-- extract amplitude and phase from tide model amp,ph,cons = pyTMD.read_GOT_model.extract_GOT_constants(longitude, - latitude, model_file, METHOD=METHOD, GZIP=GZIP, SCALE=SCALE) + latitude, model_file, method=METHOD, compressed=GZIP, scale=SCALE) assert all(c in constituents for c in cons) #-- interpolate delta times from calendar dates to tide time delta_file = pyTMD.utilities.get_data_path(['data','merged_deltat.data']) @@ -134,9 +134,9 @@ def test_verify_GOT47(METHOD): #-- predict tidal elevations at time and infer minor corrections tide.mask[:] = np.any(hc.mask, axis=1) tide.data[:] = pyTMD.predict_tide_drift(tide_time, hc, cons, - DELTAT=deltat, CORRECTIONS=model_format) + deltat=deltat, corrections=model_format) minor = pyTMD.infer_minor_corrections(tide_time, hc, cons, - DELTAT=deltat, CORRECTIONS=model_format) + deltat=deltat, corrections=model_format) tide.data[:] += minor.data[:] #-- will verify differences between model outputs are within tolerance diff --git a/test/test_time.py b/test/test_time.py index 6a7ac379..cabcd64c 100644 --- a/test/test_time.py +++ b/test/test_time.py @@ -31,7 +31,7 @@ def test_julian(YEAR,MONTH): #-- convert MJD to calendar date JD = np.squeeze(MJD) + 2400000.5 YY,MM,DD,HH,MN,SS = pyTMD.time.convert_julian(JD, - FORMAT='tuple', ASTYPE=np.float64) + format='tuple', astype=np.float64) #-- assert dates assert (YY == YEAR) assert (MM == MONTH) From 21e49c48625f599b9366f942f14405d4fe85c4c6 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Tue, 17 May 2022 17:09:26 -0700 Subject: [PATCH 03/14] docs: add paragraph about delta times --- doc/source/getting_started/Getting-Started.rst | 9 +++++++++ pyTMD/calc_astrol_longitudes.py | 5 +++++ pyTMD/calc_delta_time.py | 14 +++++++++++--- 3 files changed, 25 insertions(+), 3 deletions(-) diff --git a/doc/source/getting_started/Getting-Started.rst b/doc/source/getting_started/Getting-Started.rst index 90c522f7..213d9f69 100644 --- a/doc/source/getting_started/Getting-Started.rst +++ b/doc/source/getting_started/Getting-Started.rst @@ -195,6 +195,13 @@ The `time module `_ ``pyTMD`` keeps updated `tables of leap seconds `_ for converting from GPS, LORAN and TAI times. ``pyTMD`` keeps updated `tables of delta times `_ for converting between dynamic (TT) and universal (UT1) times. +Delta times (TT - UT1) are the differences between Dynamic Time (TT) and Universal Time (UT1) [Meeus1998]_. +Universal Time (UT1) is based on the rotation of the Earth, +which varies irregularly, and so UT1 is adjusted periodically. +Dynamic Time (TT) is a uniform, monotonically increasing time standard based on atomic clocks that is +used for the accurate calculation of celestial mechanics, orbits and ephemerides. +Delta times can be added to Universal Time (UT1) values to convert to Dynamic Time (TT) values. + Spatial Coordinates ################### @@ -236,6 +243,8 @@ References .. [Lyard2020] F. H. Lyard, D. J. Allain, M. Cancet, L. Carr\ |egrave|\ re, and N. Picot, "FES2014 global ocean tides atlas: design and performances", *Ocean Science Discussions*, in review, (2020). `doi: 10.5194/os-2020-96 `_ +.. [Meeus1998] J. Meeus, *Astronomical Algorithms*, 2nd edition, 477 pp., (1998). + .. [Padman2004] L. Padman and S. Y. Erofeeva, "A barotropic inverse tidal model for the Arctic Ocean", *Geophysical Research Letters*, 31(2), L02303. (2004). `doi: 10.1029/2003GL019003 `_ .. [Padman2008] L. Padman, S. Y. Erofeeva, and H. A. Fricker, "Improving Antarctic tide models by assimilation of ICESat laser altimetry over ice shelves", *Geophysical Research Letters*, 35, L22504, (2008). `doi: 10.1029/2008GL035592 `_ diff --git a/pyTMD/calc_astrol_longitudes.py b/pyTMD/calc_astrol_longitudes.py index 588a8092..0cb8cd4b 100644 --- a/pyTMD/calc_astrol_longitudes.py +++ b/pyTMD/calc_astrol_longitudes.py @@ -69,6 +69,7 @@ def polynomial_sum(coefficients, t): def calc_astrol_longitudes(MJD, MEEUS=False, ASTRO5=False): """ Computes the basic astronomical mean longitudes: s, h, p, N and PP + [Meeus1998]_ Parameters ---------- @@ -91,6 +92,10 @@ def calc_astrol_longitudes(MJD, MEEUS=False, ASTRO5=False): mean longitude of ascending lunar node (degrees) PP: float longitude of solar perigee (degrees) + + References + ---------- + .. [Meeus1998] J. Meeus, *Astronomical Algorithms*, 2nd edition, 477 pp., (1998). """ circle = 360.0 if MEEUS: diff --git a/pyTMD/calc_delta_time.py b/pyTMD/calc_delta_time.py index 03d451ba..93f285de 100644 --- a/pyTMD/calc_delta_time.py +++ b/pyTMD/calc_delta_time.py @@ -21,6 +21,9 @@ scipy: Scientific Tools for Python https://docs.scipy.org/doc/ +REFERENCES: + Jean Meeus, Astronomical Algorithms, 2nd edition, 1998. + UPDATE HISTORY: Updated 04/2022: updated docstrings to numpy documentation format Updated 08/2020: using builtin time operations, interpolate with tide time @@ -39,7 +42,8 @@ #-- by interpolating a delta time file to a given date def calc_delta_time(delta_file, idays): """ - Calculates the difference between universal time and dynamical time + Calculates the difference between universal time (UT) and + dynamical time (TT) [Meeus1998]_ Parameters ---------- @@ -52,13 +56,17 @@ def calc_delta_time(delta_file, idays): ------- deltat: float delta time at the input time + + References + ---------- + .. [Meeus1998] J. Meeus, *Astronomical Algorithms*, 2nd edition, 477 pp., (1998). """ #-- read delta time file dinput = np.loadtxt(os.path.expanduser(delta_file)) #-- calculate Julian days and then convert to days since 1992-01-01T00:00:00 - days=pyTMD.time.convert_calendar_dates(dinput[:,0],dinput[:,1],dinput[:,2], + days = pyTMD.time.convert_calendar_dates(dinput[:,0],dinput[:,1],dinput[:,2], epoch=(1992,1,1,0,0,0)) #-- use scipy interpolating splines to interpolate delta times - spl=scipy.interpolate.UnivariateSpline(days,dinput[:,3],k=1,s=0,ext=0) + spl = scipy.interpolate.UnivariateSpline(days,dinput[:,3],k=1,s=0,ext=0) #-- return the delta time for the input date converted to days return spl(idays)/86400.0 From 99f1b335d94798cbf38660e9f82038679ccf2f03 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Tue, 17 May 2022 17:11:45 -0700 Subject: [PATCH 04/14] update version --- version.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/version.txt b/version.txt index ece61c60..238d6e88 100644 --- a/version.txt +++ b/version.txt @@ -1 +1 @@ -1.0.6 \ No newline at end of file +1.0.7 From 1ce34e29123fb699b748519a2160d45750865371 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Thu, 19 May 2022 09:46:50 -0700 Subject: [PATCH 05/14] Update read_netcdf_model.py --- pyTMD/read_netcdf_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyTMD/read_netcdf_model.py b/pyTMD/read_netcdf_model.py index 7494fd98..b83214a8 100644 --- a/pyTMD/read_netcdf_model.py +++ b/pyTMD/read_netcdf_model.py @@ -223,7 +223,7 @@ def extract_netcdf_constants(ilon, ilat, f2 = scipy.interpolate.RectBivariateSpline(lon, lat, bathymetry.mask.T, kx=1, ky=1) D.data[:] = f1.ev(ilon,ilat) - D.mask[:] = np.ceil(f2.ev(ilon,ilat).astype(bool)) + D.mask[:] = np.ceil(f2.ev(ilon,ilat)).astype(bool) else: #-- use scipy regular grid to interpolate values for a given method r1 = scipy.interpolate.RegularGridInterpolator((lat,lon), From ace69a2f521a4ca21c430abd2a88934cc3eb7256 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Fri, 20 May 2022 09:17:06 -0700 Subject: [PATCH 06/14] fix: format for CATS2002 grid format --- doc/source/getting_started/Getting-Started.rst | 4 ++-- pyTMD/model.py | 7 ++++--- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/doc/source/getting_started/Getting-Started.rst b/doc/source/getting_started/Getting-Started.rst index 213d9f69..a1e5f094 100644 --- a/doc/source/getting_started/Getting-Started.rst +++ b/doc/source/getting_started/Getting-Started.rst @@ -146,9 +146,9 @@ For parameters consisting of lists, the parameter values can be separated by spa Each definition file should have a ``name``, ``format`` and ``type`` parameters. Each model format also requires specific sets of parameters. -- ``OTIS`` and ``ATLAS`` +- ``OTIS``, ``ATLAS`` and ``ESR`` - * ``format``: ``'OTIS'`` or ``'ATLAS'`` + * ``format``: ``'OTIS'``, ``'ATLAS'`` or ``ESR`` * ``grid_file``: full path to model grid file * ``model_file``: full path to model constituent file(s) * ``name``: tide model name diff --git a/pyTMD/model.py b/pyTMD/model.py index 0fc369c1..80f84967 100644 --- a/pyTMD/model.py +++ b/pyTMD/model.py @@ -54,13 +54,14 @@ class model: - ``OTIS`` - ``ATLAS`` + - ``ESR`` - ``netcdf`` - ``GOT`` - ``FES`` gla12: str HDF5 dataset string for output GLA12 tide heights grid_file: str - Model grid file for ``OTIS`` and ``ATLAS`` models + Model grid file for ``OTIS``, ``ATLAS`` and ``ESR`` models gzip: bool Suffix if model is compressed long_name: str @@ -72,7 +73,7 @@ class model: name: str Model name projection: str - Model projection for ``OTIS`` and ``ATLAS`` models + Model projection for ``OTIS``, ``ATLAS`` and ``ESR`` models scale: float Model scaling factor for converting to output units suffix: str @@ -149,7 +150,7 @@ def grid(self, m): 'CATS2008a_SPOTL_Load') self.grid_file = self.pathfinder('grid_CATS2008a_opt') elif (m == 'CATS2022'): - self.format = 'OTIS' + self.format = 'ESR' self.model_directory = os.path.join(self.directory,'CATS2022') self.grid_file = self.pathfinder('CATS2022_test.nc') elif (m == 'TPXO9-atlas'): From 0426b60b61cc3fd8e5990f8c9502c862f6a59f0b Mon Sep 17 00:00:00 2001 From: tsutterley Date: Mon, 23 May 2022 16:49:47 -0700 Subject: [PATCH 07/14] feat: added options to apply flexure to computational programs --- .../user_guide/compute_tidal_elevations.rst | 3 + .../compute_tides_ICESat2_ATL03.rst | 3 + .../compute_tides_ICESat2_ATL06.rst | 3 + .../compute_tides_ICESat2_ATL11.rst | 3 + .../user_guide/compute_tides_ICESat_GLA12.rst | 3 + .../compute_tides_icebridge_data.rst | 3 + pyTMD/compute_tide_corrections.py | 9 ++- pyTMD/model.py | 6 ++ pyTMD/read_tide_model.py | 4 +- scripts/compute_tidal_elevations.py | 63 ++++++++++++++----- scripts/compute_tides_ICESat2_ATL03.py | 49 +++++++++++---- scripts/compute_tides_ICESat2_ATL06.py | 50 +++++++++++---- scripts/compute_tides_ICESat2_ATL11.py | 50 +++++++++++---- scripts/compute_tides_ICESat_GLA12.py | 50 ++++++++++----- scripts/compute_tides_icebridge_data.py | 40 +++++++++--- 15 files changed, 257 insertions(+), 82 deletions(-) diff --git a/doc/source/user_guide/compute_tidal_elevations.rst b/doc/source/user_guide/compute_tidal_elevations.rst index 4eeb63d5..9ef71c23 100644 --- a/doc/source/user_guide/compute_tidal_elevations.rst +++ b/doc/source/user_guide/compute_tidal_elevations.rst @@ -44,3 +44,6 @@ compute_tidal_elevations.py --cutoff -c : @after * set to ``'inf'`` to extrapolate for all points + + --apply-flexure : @after + Only valid for models containing flexure fields diff --git a/doc/source/user_guide/compute_tides_ICESat2_ATL03.rst b/doc/source/user_guide/compute_tides_ICESat2_ATL03.rst index e24c82b7..81406c9f 100644 --- a/doc/source/user_guide/compute_tides_ICESat2_ATL03.rst +++ b/doc/source/user_guide/compute_tides_ICESat2_ATL03.rst @@ -22,3 +22,6 @@ compute_tides_ICESat2_ATL03.py --cutoff -c : @after * set to ``'inf'`` to extrapolate for all points + + --apply-flexure : @after + Only valid for models containing flexure fields diff --git a/doc/source/user_guide/compute_tides_ICESat2_ATL06.rst b/doc/source/user_guide/compute_tides_ICESat2_ATL06.rst index d96c6d4b..76f01b30 100644 --- a/doc/source/user_guide/compute_tides_ICESat2_ATL06.rst +++ b/doc/source/user_guide/compute_tides_ICESat2_ATL06.rst @@ -20,3 +20,6 @@ compute_tides_ICESat2_ATL06.py --cutoff -c : @after * set to ``'inf'`` to extrapolate for all points + + --apply-flexure : @after + Only valid for models containing flexure fields diff --git a/doc/source/user_guide/compute_tides_ICESat2_ATL11.rst b/doc/source/user_guide/compute_tides_ICESat2_ATL11.rst index 01a4a48d..f1c3eb2f 100644 --- a/doc/source/user_guide/compute_tides_ICESat2_ATL11.rst +++ b/doc/source/user_guide/compute_tides_ICESat2_ATL11.rst @@ -21,3 +21,6 @@ compute_tides_ICESat2_ATL11.py --cutoff -c : @after * set to ``'inf'`` to extrapolate for all points + + --apply-flexure : @after + Only valid for models containing flexure fields diff --git a/doc/source/user_guide/compute_tides_ICESat_GLA12.rst b/doc/source/user_guide/compute_tides_ICESat_GLA12.rst index fa18555c..4dda250c 100644 --- a/doc/source/user_guide/compute_tides_ICESat_GLA12.rst +++ b/doc/source/user_guide/compute_tides_ICESat_GLA12.rst @@ -20,3 +20,6 @@ compute_tides_ICESat_GLA12.py --cutoff -c : @after * set to ``'inf'`` to extrapolate for all points + + --apply-flexure : @after + Only valid for models containing flexure fields diff --git a/doc/source/user_guide/compute_tides_icebridge_data.rst b/doc/source/user_guide/compute_tides_icebridge_data.rst index 682a64c4..76f68fcb 100644 --- a/doc/source/user_guide/compute_tides_icebridge_data.rst +++ b/doc/source/user_guide/compute_tides_icebridge_data.rst @@ -20,3 +20,6 @@ compute_tides_icebridge_data.py --cutoff -c : @after * set to ``'inf'`` to extrapolate for all points + + --apply-flexure : @after + Only valid for models containing flexure fields diff --git a/pyTMD/compute_tide_corrections.py b/pyTMD/compute_tide_corrections.py index 7511a062..532e069b 100644 --- a/pyTMD/compute_tide_corrections.py +++ b/pyTMD/compute_tide_corrections.py @@ -81,6 +81,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types updated keyword arguments to read tide model programs + added option to apply flexure to heights for applicable models Updated 04/2022: updated docstrings to numpy documentation format Updated 12/2021: added function to calculate a tidal time series verify coordinate dimensions for each input data type @@ -128,7 +129,7 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, ATLAS_FORMAT='netcdf', GZIP=False, DEFINITION_FILE=None, EPSG=3031, EPOCH=(2000,1,1,0,0,0), TYPE='drift', TIME='UTC', METHOD='spline', - EXTRAPOLATE=False, CUTOFF=10.0, FILL_VALUE=np.nan): + EXTRAPOLATE=False, CUTOFF=10.0, APPLY_FLEXURE=False, FILL_VALUE=np.nan): """ Compute tides at points and times using tidal harmonics @@ -186,6 +187,10 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, Extrapolation cutoff in kilometers Set to np.inf to extrapolate for all points + APPLY_FLEXURE: bool, default False + Apply ice flexure scaling factor to height constituents + + Only valid for models containing flexure fields FILL_VALUE: float, default np.nan Output invalid value @@ -283,7 +288,7 @@ def compute_tide_corrections(x, y, delta_time, DIRECTORY=None, MODEL=None, amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file, model.model_file, model.projection, type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - grid=model.format) + grid=model.format, apply_flexure=APPLY_FLEXURE) deltat = np.zeros_like(t) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file, diff --git a/pyTMD/model.py b/pyTMD/model.py index 80f84967..501f4fd0 100644 --- a/pyTMD/model.py +++ b/pyTMD/model.py @@ -7,6 +7,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR CATS2022 to list of models + added attribute for flexure fields being available for model Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant set default directory to None for documentation @@ -49,6 +50,8 @@ class model: HDF5 ``description`` attribute string for output tide heights directory: str or None, default None Working data directory for tide models + flexure: bool + Flexure adjustment field for tide heights is available format: str Model format @@ -109,6 +112,7 @@ def __init__(self, directory=None, **kwargs): self.directory = os.path.expanduser(directory) else: self.directory = os.getcwd() + self.flexure = False # set tide model format self.format = copy.copy(kwargs['format']) self.gla12 = None @@ -303,6 +307,8 @@ def elevation(self, m): self.grid_file = self.pathfinder('CATS2022_test.nc') self.model_file = self.pathfinder('CATS2022_test.nc') self.projection = 'CATS2008' + # internal flexure field is available + self.flexure = True # model description and references self.reference = ('https://www.esr.org/research/' 'polar-tide-models/list-of-polar-tide-models/cats2008/') diff --git a/pyTMD/read_tide_model.py b/pyTMD/read_tide_model.py index 36c7c5eb..cfc33da2 100644 --- a/pyTMD/read_tide_model.py +++ b/pyTMD/read_tide_model.py @@ -35,7 +35,7 @@ ATLAS: reading a global solution with localized solutions ESR: combined global or local netCDF4 solution OTIS: combined global or local solution - apply_flexure: apply flexure scaling factor + apply_flexure: apply ice flexure scaling factor to constituents OUTPUTS: amplitude: amplitudes of tidal constituents @@ -158,7 +158,7 @@ def extract_tidal_constants(ilon, ilat, - ``'ESR'``: combined global or local netCDF4 solution - ``'OTIS'``: combined global or local solution apply_flexure: bool, default False - apply flexure scaling factor + Apply ice flexure scaling factor to height constituents Returns ------- diff --git a/scripts/compute_tidal_elevations.py b/scripts/compute_tidal_elevations.py index 718b12d4..9b0e0f2f 100755 --- a/scripts/compute_tidal_elevations.py +++ b/scripts/compute_tidal_elevations.py @@ -54,6 +54,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points + --apply-flexure: Apply ice flexure scaling factor to height constituents + Only valid for models containing flexure fields -V, --verbose: Verbose output of processing run -M X, --mode X: Permission mode of output file @@ -96,6 +98,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types updated keyword arguments to read tide model programs + added command line option to apply flexure for applicable models Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -176,11 +179,24 @@ def get_projection(attributes, PROJECTION): #-- PURPOSE: read csv, netCDF or HDF5 data #-- compute tides at points and times using tidal model driver algorithms def compute_tidal_elevations(tide_dir, input_file, output_file, - TIDE_MODEL=None, ATLAS_FORMAT='netcdf', GZIP=True, - DEFINITION_FILE=None, FORMAT='csv', VARIABLES=[], HEADER=0, - TYPE='drift', TIME_UNITS='days since 1858-11-17T00:00:00', - TIME_STANDARD='UTC', TIME=None, PROJECTION='4326', METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): + TIDE_MODEL=None, + ATLAS_FORMAT='netcdf', + GZIP=True, + DEFINITION_FILE=None, + FORMAT='csv', + VARIABLES=[], + HEADER=0, + TYPE='drift', + TIME_UNITS='days since 1858-11-17T00:00:00', + TIME_STANDARD='UTC', + TIME=None, + PROJECTION='4326', + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=None, + APPLY_FLEXURE=False, + VERBOSE=False, + MODE=0o775): #-- create logger for verbosity level loglevel = logging.INFO if VERBOSE else logging.CRITICAL @@ -303,7 +319,7 @@ def compute_tidal_elevations(tide_dir, input_file, output_file, amp,ph,D,c = extract_tidal_constants(lon.flatten(), lat.flatten(), model.grid_file, model.model_file, model.projection, type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, grid=model.format) + cutoff=CUTOFF, grid=model.format, apply_flexure=APPLY_FLEXURE) deltat = np.zeros((nt)) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon.flatten(), lat.flatten(), @@ -458,6 +474,10 @@ def arguments(): parser.add_argument('--cutoff','-c', type=np.float64, default=10.0, help='Extrapolation cutoff in kilometers') + #-- apply flexure scaling factors to height constituents + parser.add_argument('--apply-flexure', + default=False, action='store_true', + help='Apply ice flexure scaling factor to height constituents') #-- verbose output of processing run #-- print information about each input and output file parser.add_argument('--verbose','-V', @@ -479,19 +499,30 @@ def main(): #-- set output file from input filename if not entered if not args.outfile: fileBasename,fileExtension = os.path.splitext(args.infile) - vars = (fileBasename,args.tide,fileExtension) - args.outfile = '{0}_{1}{2}'.format(*vars) + flexure_flag = '_FLEXURE' if args.apply_flexure else '' + vars = (fileBasename,args.tide,flexure_flag,fileExtension) + args.outfile = '{0}_{1}{2}{3}'.format(*vars) #-- run tidal elevation program for input file compute_tidal_elevations(args.directory, args.infile, args.outfile, - TIDE_MODEL=args.tide, ATLAS_FORMAT=args.atlas_format, - GZIP=args.gzip, DEFINITION_FILE=args.definition_file, - FORMAT=args.format, VARIABLES=args.variables, - HEADER=args.header, TYPE=args.type, TIME_UNITS=args.epoch, - TIME=args.deltatime, TIME_STANDARD=args.standard, - PROJECTION=args.projection, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) + TIDE_MODEL=args.tide, + ATLAS_FORMAT=args.atlas_format, + GZIP=args.gzip, + DEFINITION_FILE=args.definition_file, + FORMAT=args.format, + VARIABLES=args.variables, + HEADER=args.header, + TYPE=args.type, + TIME_UNITS=args.epoch, + TIME=args.deltatime, + TIME_STANDARD=args.standard, + PROJECTION=args.projection, + METHOD=args.interpolate, + EXTRAPOLATE=args.extrapolate, + CUTOFF=args.cutoff, + APPLY_FLEXURE=args.apply_flexure, + VERBOSE=args.verbose, + MODE=args.mode) #-- run main program if __name__ == '__main__': diff --git a/scripts/compute_tides_ICESat2_ATL03.py b/scripts/compute_tides_ICESat2_ATL03.py index a8bb0767..7d050b3a 100644 --- a/scripts/compute_tides_ICESat2_ATL03.py +++ b/scripts/compute_tides_ICESat2_ATL03.py @@ -27,6 +27,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points + --apply-flexure: Apply ice flexure scaling factor to height constituents + Only valid for models containing flexure fields -M X, --mode X: Permission mode of directories and files created -V, --verbose: Output information about each created file @@ -63,6 +65,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types updated keyword arguments to read tide model programs + added command line option to apply flexure for applicable models Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -120,9 +123,17 @@ #-- PURPOSE: read ICESat-2 geolocated photon data (ATL03) from NSIDC #-- compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): +def compute_tides_ICESat2(tide_dir, INPUT_FILE, + TIDE_MODEL=None, + ATLAS_FORMAT=None, + GZIP=True, + DEFINITION_FILE=None, + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=None, + APPLY_FLEXURE=False, + VERBOSE=False, + MODE=0o775): #-- create logger for verbosity level loglevel = logging.INFO if VERBOSE else logging.CRITICAL @@ -140,6 +151,8 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, IS2_atl03_mds,IS2_atl03_attrs,IS2_atl03_beams = read_HDF5_ATL03_main(INPUT_FILE, ATTRIBUTES=True) DIRECTORY = os.path.dirname(INPUT_FILE) + #-- flexure flag if being applied + flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' #-- extract parameters from ICESat-2 ATLAS HDF5 file name rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') @@ -148,12 +161,12 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, except: #-- output tide HDF5 file (generic) fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,fileExtension) - OUTPUT_FILE = '{0}_{1}_TIDES{2}'.format(*args) + args = (fileBasename,model.name,flexure_flag,fileExtension) + OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) else: #-- output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,model.name,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_{1}_TIDES_{2}{3}{4}{5}{6}{7}_{8}{9}{10}_{11}_{12}{13}.h5' + args = (PRD,model.name,flexure_flag,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) + file_format = '{0}_{1}{2}_TIDES_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5' OUTPUT_FILE = file_format.format(*args) #-- number of GPS seconds between the GPS epoch @@ -210,7 +223,7 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, amp,ph,D,c = extract_tidal_constants(lon, lat, model.grid_file, model.model_file, model.projection, type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, cutoff=CUTOFF, - grid=model.format) + grid=model.format, apply_flexure=APPLY_FLEXURE) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon, lat, model.grid_file, @@ -572,6 +585,10 @@ def arguments(): parser.add_argument('--cutoff','-c', type=np.float64, default=10.0, help='Extrapolation cutoff in kilometers') + #-- apply flexure scaling factors to height constituents + parser.add_argument('--apply-flexure', + default=False, action='store_true', + help='Apply ice flexure scaling factor to height constituents') #-- verbosity settings #-- verbose will output information about each output file parser.add_argument('--verbose','-V', @@ -592,11 +609,17 @@ def main(): #-- run for each input ATL03 file for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) + compute_tides_ICESat2(args.directory, FILE, + TIDE_MODEL=args.tide, + ATLAS_FORMAT=args.atlas_format, + GZIP=args.gzip, + DEFINITION_FILE=args.definition_file, + METHOD=args.interpolate, + EXTRAPOLATE=args.extrapolate, + CUTOFF=args.cutoff, + APPLY_FLEXURE=args.apply_flexure, + VERBOSE=args.verbose, + MODE=args.mode) #-- run main program if __name__ == '__main__': diff --git a/scripts/compute_tides_ICESat2_ATL06.py b/scripts/compute_tides_ICESat2_ATL06.py index 81e5d063..098ef83b 100644 --- a/scripts/compute_tides_ICESat2_ATL06.py +++ b/scripts/compute_tides_ICESat2_ATL06.py @@ -22,6 +22,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points + --apply-flexure: Apply ice flexure scaling factor to height constituents + Only valid for models containing flexure fields -M X, --mode X: Permission mode of directories and files created -V, --verbose: Output information about each created file @@ -58,6 +60,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types updated keyword arguments to read tide model programs + added command line option to apply flexure for applicable models Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -117,9 +120,17 @@ #-- PURPOSE: read ICESat-2 land ice data (ATL06) from NSIDC #-- compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): +def compute_tides_ICESat2(tide_dir, INPUT_FILE, + TIDE_MODEL=None, + ATLAS_FORMAT=None, + GZIP=True, + DEFINITION_FILE=None, + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=None, + APPLY_FLEXURE=False, + VERBOSE=False, + MODE=0o775): #-- create logger for verbosity level loglevel = logging.INFO if VERBOSE else logging.CRITICAL @@ -137,6 +148,8 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, IS2_atl06_mds,IS2_atl06_attrs,IS2_atl06_beams = read_HDF5_ATL06(INPUT_FILE, ATTRIBUTES=True) DIRECTORY = os.path.dirname(INPUT_FILE) + #-- flexure flag if being applied + flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' #-- extract parameters from ICESat-2 ATLAS HDF5 file name rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})(\d{2})(\d{2})' r'(\d{2})(\d{2})_(\d{4})(\d{2})(\d{2})_(\d{3})_(\d{2})(.*?).h5$') @@ -145,12 +158,12 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, except: #-- output tide HDF5 file (generic) fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,fileExtension) - OUTPUT_FILE = '{0}_{1}_TIDES{2}'.format(*args) + args = (fileBasename,model.name,flexure_flag,fileExtension) + OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) else: #-- output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,model.name,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) - file_format = '{0}_{1}_TIDES_{2}{3}{4}{5}{6}{7}_{8}{9}{10}_{11}_{12}{13}.h5' + args = (PRD,model.name,flexure_flag,YY,MM,DD,HH,MN,SS,TRK,CYCL,GRAN,RL,VERS,AUX) + file_format = '{0}_{1}{2}_TIDES_{3}{4}{5}{6}{7}{8}_{9}{10}{11}_{12}_{13}{14}.h5' OUTPUT_FILE = file_format.format(*args) #-- number of GPS seconds between the GPS epoch @@ -201,7 +214,8 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, amp,ph,D,c = extract_tidal_constants(val['longitude'], val['latitude'], model.grid_file, model.model_file, model.projection, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + grid=model.format, apply_flexure=APPLY_FLEXURE) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(val['longitude'], @@ -573,6 +587,10 @@ def arguments(): parser.add_argument('--cutoff','-c', type=np.float64, default=10.0, help='Extrapolation cutoff in kilometers') + #-- apply flexure scaling factors to height constituents + parser.add_argument('--apply-flexure', + default=False, action='store_true', + help='Apply ice flexure scaling factor to height constituents') #-- verbosity settings #-- verbose will output information about each output file parser.add_argument('--verbose','-V', @@ -593,11 +611,17 @@ def main(): #-- run for each input ATL06 file for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) + compute_tides_ICESat2(args.directory, FILE, + TIDE_MODEL=args.tide, + ATLAS_FORMAT=args.atlas_format, + GZIP=args.gzip, + DEFINITION_FILE=args.definition_file, + METHOD=args.interpolate, + EXTRAPOLATE=args.extrapolate, + CUTOFF=args.cutoff, + APPLY_FLEXURE=args.apply_flexure, + VERBOSE=args.verbose, + MODE=args.mode) #-- run main program if __name__ == '__main__': diff --git a/scripts/compute_tides_ICESat2_ATL11.py b/scripts/compute_tides_ICESat2_ATL11.py index a95317e3..55136a5d 100644 --- a/scripts/compute_tides_ICESat2_ATL11.py +++ b/scripts/compute_tides_ICESat2_ATL11.py @@ -22,6 +22,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points + --apply-flexure: Apply ice flexure scaling factor to height constituents + Only valid for models containing flexure fields -M X, --mode X: Permission mode of directories and files created -V, --verbose: Output information about each created file @@ -58,6 +60,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types updated keyword arguments to read tide model programs + added command line option to apply flexure for applicable models Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: added Arctic 2km model (Arc2kmTM) to list of models @@ -101,9 +104,17 @@ #-- PURPOSE: read ICESat-2 annual land ice height data (ATL11) from NSIDC #-- compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): +def compute_tides_ICESat2(tide_dir, INPUT_FILE, + TIDE_MODEL=None, + ATLAS_FORMAT=None, + GZIP=True, + DEFINITION_FILE=None, + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=None, + APPLY_FLEXURE=False, + VERBOSE=False, + MODE=0o775): #-- create logger for verbosity level loglevel = logging.INFO if VERBOSE else logging.CRITICAL @@ -121,6 +132,8 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, IS2_atl11_mds,IS2_atl11_attrs,IS2_atl11_pairs = read_HDF5_ATL11(INPUT_FILE, ATTRIBUTES=True, CROSSOVERS=True) DIRECTORY = os.path.dirname(INPUT_FILE) + #-- flexure flag if being applied + flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' #-- extract parameters from ICESat-2 ATLAS HDF5 file name rx = re.compile(r'(processed_)?(ATL\d{2})_(\d{4})(\d{2})_(\d{2})(\d{2})_' r'(\d{3})_(\d{2})(.*?).h5$') @@ -129,12 +142,12 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, except: #-- output tide HDF5 file (generic) fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,fileExtension) - OUTPUT_FILE = '{0}_{1}_TIDES{2}'.format(*args) + args = (fileBasename,model.name,flexure_flag,fileExtension) + OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) else: #-- output tide HDF5 file for ASAS/NSIDC granules - args = (PRD,model.name,TRK,GRAN,SCYC,ECYC,RL,VERS,AUX) - file_format = '{0}_{1}_TIDES_{2}{3}_{4}{5}_{6}_{7}{8}.h5' + args = (PRD,model.name,flexure_flag,TRK,GRAN,SCYC,ECYC,RL,VERS,AUX) + file_format = '{0}_{1}{2}_TIDES_{3}{4}_{5}{6}_{7}_{8}{9}.h5' OUTPUT_FILE = file_format.format(*args) #-- number of GPS seconds between the GPS epoch @@ -222,7 +235,8 @@ def compute_tides_ICESat2(tide_dir, INPUT_FILE, TIDE_MODEL=None, amp,ph,D,c = extract_tidal_constants(longitude[track], latitude[track], model.grid_file, model.model_file, model.projection, type=model.type, method=METHOD, - extrapolate=EXTRAPOLATE, cutoff=CUTOFF, grid=model.format) + extrapolate=EXTRAPOLATE, cutoff=CUTOFF, + grid=model.format, apply_flexure=APPLY_FLEXURE) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(longitude[track], @@ -734,6 +748,10 @@ def arguments(): parser.add_argument('--cutoff','-c', type=np.float64, default=10.0, help='Extrapolation cutoff in kilometers') + #-- apply flexure scaling factors to height constituents + parser.add_argument('--apply-flexure', + default=False, action='store_true', + help='Apply ice flexure scaling factor to height constituents') #-- verbosity settings #-- verbose will output information about each output file parser.add_argument('--verbose','-V', @@ -754,11 +772,17 @@ def main(): #-- run for each input ATL11 file for FILE in args.infile: - compute_tides_ICESat2(args.directory, FILE, TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) + compute_tides_ICESat2(args.directory, FILE, + TIDE_MODEL=args.tide, + ATLAS_FORMAT=args.atlas_format, + GZIP=args.gzip, + DEFINITION_FILE=args.definition_file, + METHOD=args.interpolate, + EXTRAPOLATE=args.extrapolate, + CUTOFF=args.cutoff, + APPLY_FLEXURE=args.apply_flexure, + VERBOSE=args.verbose, + MODE=args.mode) #-- run main program if __name__ == '__main__': diff --git a/scripts/compute_tides_ICESat_GLA12.py b/scripts/compute_tides_ICESat_GLA12.py index a3423e4d..27956239 100644 --- a/scripts/compute_tides_ICESat_GLA12.py +++ b/scripts/compute_tides_ICESat_GLA12.py @@ -26,6 +26,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points + --apply-flexure: Apply ice flexure scaling factor to height constituents + Only valid for models containing flexure fields -M X, --mode X: Permission mode of directories and files created -V, --verbose: Output information about each created file @@ -62,6 +64,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types updated keyword arguments to read tide model programs + added command line option to apply flexure for applicable models Updated 04/2022: use argparse descriptions within documentation Updated 03/2022: using static decorators to define available models Updated 02/2022: save ICESat campaign attribute to output file @@ -117,9 +120,17 @@ #-- PURPOSE: read ICESat ice sheet HDF5 elevation data (GLAH12) from NSIDC #-- compute tides at points and times using tidal model driver algorithms -def compute_tides_ICESat(tide_dir, INPUT_FILE, TIDE_MODEL=None, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): +def compute_tides_ICESat(tide_dir, INPUT_FILE, + TIDE_MODEL=None, + ATLAS_FORMAT=None, + GZIP=True, + DEFINITION_FILE=None, + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=None, + APPLY_FLEXURE=False, + VERBOSE=False, + MODE=0o775): #-- create logger for verbosity level loglevel = logging.INFO if VERBOSE else logging.CRITICAL @@ -135,7 +146,8 @@ def compute_tides_ICESat(tide_dir, INPUT_FILE, TIDE_MODEL=None, #-- get directory from INPUT_FILE logger.info('{0} -->'.format(INPUT_FILE)) DIRECTORY = os.path.dirname(INPUT_FILE) - + #-- flexure flag if being applied + flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' #-- compile regular expression operator for extracting information from file rx = re.compile((r'GLAH(\d{2})_(\d{3})_(\d{1})(\d{1})(\d{2})_(\d{3})_' r'(\d{4})_(\d{1})_(\d{2})_(\d{4})\.H5'), re.VERBOSE) @@ -157,12 +169,12 @@ def compute_tides_ICESat(tide_dir, INPUT_FILE, TIDE_MODEL=None, except: #-- output tide HDF5 file (generic) fileBasename,fileExtension = os.path.splitext(INPUT_FILE) - args = (fileBasename,model.name,fileExtension) - OUTPUT_FILE = '{0}_{1}_TIDES{2}'.format(*args) + args = (fileBasename,model.name,flexure_flag,fileExtension) + OUTPUT_FILE = '{0}_{1}{2}_TIDES{3}'.format(*args) else: #-- output tide HDF5 file for NSIDC granules - args = (PRD,RL,model.name,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE) - file_format = 'GLAH{0}_{1}_{2}_TIDES_{3}{4}{5}_{6}_{7}_{8}_{9}_{10}.h5' + args = (PRD,RL,model.name,flexure_flag,RGTP,ORB,INST,CYCL,TRK,SEG,GRAN,TYPE) + file_format = 'GLAH{0}_{1}_{2}{3}_TIDES_{4}{5}{6}_{7}_{8}_{9}_{10}_{11}.h5' OUTPUT_FILE = file_format.format(*args) #-- read GLAH12 HDF5 file @@ -198,7 +210,7 @@ def compute_tides_ICESat(tide_dir, INPUT_FILE, TIDE_MODEL=None, amp,ph,D,c = extract_tidal_constants(lon_40HZ, lat_40HZ, model.grid_file, model.model_file, model.projection, type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, grid=model.format) + cutoff=CUTOFF, grid=model.format, apply_flexure=APPLY_FLEXURE) deltat = np.zeros_like(tide_time) elif (model.format == 'netcdf'): amp,ph,D,c = extract_netcdf_constants(lon_40HZ, lat_40HZ, @@ -478,6 +490,10 @@ def arguments(): parser.add_argument('--cutoff','-c', type=np.float64, default=10.0, help='Extrapolation cutoff in kilometers') + #-- apply flexure scaling factors to height constituents + parser.add_argument('--apply-flexure', + default=False, action='store_true', + help='Apply ice flexure scaling factor to height constituents') #-- verbosity settings #-- verbose will output information about each output file parser.add_argument('--verbose','-V', @@ -498,11 +514,17 @@ def main(): #-- run for each input GLA12 file for FILE in args.infile: - compute_tides_ICESat(args.directory, FILE, TIDE_MODEL=args.tide, - ATLAS_FORMAT=args.atlas_format, GZIP=args.gzip, - DEFINITION_FILE=args.definition_file, METHOD=args.interpolate, - EXTRAPOLATE=args.extrapolate, CUTOFF=args.cutoff, - VERBOSE=args.verbose, MODE=args.mode) + compute_tides_ICESat(args.directory, FILE, + TIDE_MODEL=args.tide, + ATLAS_FORMAT=args.atlas_format, + GZIP=args.gzip, + DEFINITION_FILE=args.definition_file, + METHOD=args.interpolate, + EXTRAPOLATE=args.extrapolate, + CUTOFF=args.cutoff, + APPLY_FLEXURE=args.apply_flexure, + VERBOSE=args.verbose, + MODE=args.mode) #-- run main program if __name__ == '__main__': diff --git a/scripts/compute_tides_icebridge_data.py b/scripts/compute_tides_icebridge_data.py index 70bda774..c30f762e 100644 --- a/scripts/compute_tides_icebridge_data.py +++ b/scripts/compute_tides_icebridge_data.py @@ -28,6 +28,8 @@ -E X, --extrapolate X: Extrapolate with nearest-neighbors -c X, --cutoff X: Extrapolation cutoff in kilometers set to inf to extrapolate for all points + --apply-flexure: Apply ice flexure scaling factor to height constituents + Only valid for models containing flexure fields -M X, --mode X: Permission mode of directories and files created -V, --verbose: Output information about each created file @@ -66,6 +68,7 @@ UPDATE HISTORY: Updated 05/2022: added ESR netCDF4 formats to list of model types updated keyword arguments to read tide model programs + added command line option to apply flexure for applicable models Updated 04/2022: include utf-8 encoding in reads to be windows compliant use argparse descriptions within sphinx documentation Updated 03/2022: using static decorators to define available models @@ -419,8 +422,15 @@ def read_LVIS_HDF5_file(input_file, input_subsetter): #-- PURPOSE: read Operation IceBridge data from NSIDC #-- compute tides at points and times using tidal model driver algorithms def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, - ATLAS_FORMAT=None, GZIP=True, DEFINITION_FILE=None, METHOD='spline', - EXTRAPOLATE=False, CUTOFF=None, VERBOSE=False, MODE=0o775): + ATLAS_FORMAT=None, + GZIP=True, + DEFINITION_FILE=None, + METHOD='spline', + EXTRAPOLATE=False, + CUTOFF=None, + APPLY_FLEXURE=False, + VERBOSE=False, + MODE=0o775): #-- create logger for verbosity level loglevel = logging.INFO if VERBOSE else logging.CRITICAL @@ -532,7 +542,7 @@ def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, amp,ph,D,c = extract_tidal_constants(dinput['lon'], dinput['lat'], model.grid_file, model.model_file, model.projection, type=model.type, method=METHOD, extrapolate=EXTRAPOLATE, - cutoff=CUTOFF, grid=model.format) + cutoff=CUTOFF, grid=model.format, apply_flexure=APPLY_FLEXURE) deltat = np.zeros_like(t) elif model.format in ('netcdf'): amp,ph,D,c = extract_netcdf_constants(dinput['lon'], dinput['lat'], @@ -571,9 +581,11 @@ def compute_tides_icebridge_data(tide_dir, arg, TIDE_MODEL, hem_flag = {'N':'GR','S':'AN'} #-- use starting second to distinguish between files for the day JJ1 = np.min(dinput['time']) % 86400 + #-- flexure flag if being applied + flexure_flag = '_FLEXURE' if APPLY_FLEXURE and model.flexure else '' #-- output file format - args = (hem_flag[HEM],model.name,OIB,YY1,MM1,DD1,JJ1) - FILENAME = '{0}_NASA_{1}_TIDES_WGS84_{2}{3}{4}{5}{6:05.0f}.H5'.format(*args) + args = (hem_flag[HEM],model.name,flexure_flag,OIB,YY1,MM1,DD1,JJ1) + FILENAME = '{0}_NASA_{1}{2}_TIDES_WGS84_{3}{4}{5}{6}{7:05.0f}.H5'.format(*args) #-- print file information logger.info('\t{0}'.format(FILENAME)) @@ -703,6 +715,10 @@ def arguments(): parser.add_argument('--cutoff','-c', type=np.float64, default=10.0, help='Extrapolation cutoff in kilometers') + #-- apply flexure scaling factors to height constituents + parser.add_argument('--apply-flexure', + default=False, action='store_true', + help='Apply ice flexure scaling factor to height constituents') #-- verbosity settings #-- verbose will output information about each output file parser.add_argument('--verbose','-V', @@ -724,10 +740,16 @@ def main(): #-- run for each input Operation IceBridge file for arg in args.infile: compute_tides_icebridge_data(args.directory, arg, - TIDE_MODEL=args.tide, ATLAS_FORMAT=args.atlas_format, - GZIP=args.gzip, DEFINITION_FILE=args.definition_file, - METHOD=args.interpolate, EXTRAPOLATE=args.extrapolate, - CUTOFF=args.cutoff, VERBOSE=args.verbose, MODE=args.mode) + TIDE_MODEL=args.tide, + ATLAS_FORMAT=args.atlas_format, + GZIP=args.gzip, + DEFINITION_FILE=args.definition_file, + METHOD=args.interpolate, + EXTRAPOLATE=args.extrapolate, + CUTOFF=args.cutoff, + APPLY_FLEXURE=args.apply_flexure, + VERBOSE=args.verbose, + MODE=args.mode) #-- run main program if __name__ == '__main__': From 6e5e77c71b1b3b7a5b4da5bd48f31251cf08c6cf Mon Sep 17 00:00:00 2001 From: tsutterley Date: Thu, 2 Jun 2022 14:11:18 -0700 Subject: [PATCH 08/14] feat: added Greenland 1km model (Gr1kmTM) to list of models --- doc/source/user_guide/arcticdata_tides.rst | 2 + pyTMD/model.py | 55 ++++++++++++++++++---- scripts/arcticdata_tides.py | 35 +++++++++----- scripts/compute_tidal_currents.py | 17 ------- 4 files changed, 71 insertions(+), 38 deletions(-) diff --git a/doc/source/user_guide/arcticdata_tides.rst b/doc/source/user_guide/arcticdata_tides.rst index f7f3a009..e99226f2 100644 --- a/doc/source/user_guide/arcticdata_tides.rst +++ b/doc/source/user_guide/arcticdata_tides.rst @@ -7,6 +7,8 @@ arcticdata_tides.py * `AODTM-5 `_ * `AOTIM-5 `_ * `AOTIM-5-2018 `_ + * `Arc2kmTM `_ + * `Gr1kmTM `_ `Source code`__ diff --git a/pyTMD/model.py b/pyTMD/model.py index 501f4fd0..1094ada6 100644 --- a/pyTMD/model.py +++ b/pyTMD/model.py @@ -1,11 +1,12 @@ #!/usr/bin/env python u""" model.py -Written by Tyler Sutterley (05/2022) +Written by Tyler Sutterley (06/2022) Retrieves tide model parameters for named tide models and from model definition files UPDATE HISTORY: + Updated 06/2022: added Greenland 1km model (Gr1kmTM) to list of models Updated 05/2022: added ESR CATS2022 to list of models added attribute for flexure fields being available for model Updated 04/2022: updated docstrings to numpy documentation format @@ -214,6 +215,10 @@ def grid(self, m): self.format = 'OTIS' self.model_directory = os.path.join(self.directory,'Arc2kmTM') self.grid_file = self.pathfinder('grid_Arc2kmTM_v1') + elif (m == 'Gr1kmTM'): + self.format = 'OTIS' + self.model_directory = os.path.join(self.directory,'Gr1kmTM') + self.grid_file = self.pathfinder('grid_Gr1kmTM_v1') elif (m == 'Gr1km-v2'): self.format = 'OTIS' self.model_directory = os.path.join(self.directory,'greenlandTMD_v2') @@ -626,7 +631,28 @@ def elevation(self, m): self.projection = '3413' self.version = 'v1' # model description and references - self.reference = 'https://doi.org/10.18739/A2PV6B79W' + self.reference = 'https://doi.org/10.18739/A2D21RK6K' + self.atl03 = 'tide_ocean' + self.atl06 = 'tide_ocean' + self.atl07 = 'height_segment_ocean' + self.atl10 = 'height_segment_ocean' + self.atl11 = 'tide_ocean' + self.atl12 = 'tide_ocean_seg' + self.gla12 = 'd_ocElv' + self.variable = 'tide_ocean' + self.long_name = "Ocean Tide" + self.description = ("Ocean Tides including diurnal and " + "semi-diurnal (harmonic analysis), and longer period " + "tides (dynamic and self-consistent equilibrium).") + elif (m == 'Gr1kmTM'): + self.format = 'OTIS' + self.model_directory = os.path.join(self.directory,'Gr1kmTM') + self.grid_file = self.pathfinder('grid_Gr1kmTM_v1') + self.model_file = self.pathfinder('h_Gr1kmTM_v1') + self.projection = '3413' + self.version = 'v1' + # model description and references + self.reference = 'https://doi.org/10.18739/A2B853K18' self.atl03 = 'tide_ocean' self.atl06 = 'tide_ocean' self.atl07 = 'height_segment_ocean' @@ -1177,7 +1203,16 @@ def current(self, m): self.projection = '3413' self.version = 'v1' # model description and references - self.reference = 'https://doi.org/10.18739/A2PV6B79W' + self.reference = 'https://doi.org/10.18739/A2D21RK6K' + elif (m == 'Gr1kmTM'): + self.format = 'OTIS' + self.model_directory = os.path.join(self.directory,'Gr1kmTM') + self.grid_file = self.pathfinder('grid_Gr1kmTM_v1') + self.model_file = dict(u=self.pathfinder(self.pathfinder('UV_Gr1kmTM_v1'))) + self.projection = '3413' + self.version = 'v1' + # model description and references + self.reference = 'https://doi.org/10.18739/A2B853K18' elif (m == 'Gr1km-v2'): self.format = 'OTIS' self.model_directory = os.path.join(self.directory,'greenlandTMD_v2') @@ -1285,7 +1320,8 @@ def arctic_ocean(): """ Returns list of Arctic ocean tide elevation models """ - return ['AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM','Gr1km-v2'] + return ['AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM', + 'Gr1kmTM','Gr1km-v2'] @staticmethod def arctic_load(): @@ -1299,7 +1335,8 @@ def arctic_current(): """ Returns list of Arctic tidal current models """ - return ['AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM','Gr1km-v2'] + return ['AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM', + 'Gr1kmTM','Gr1km-v2'] @staticmethod def ocean_elevation(): @@ -1309,8 +1346,8 @@ def ocean_elevation(): return ['CATS0201','CATS2008','CATS2022','TPXO9-atlas', 'TPXO9-atlas-v2','TPXO9-atlas-v3','TPXO9-atlas-v4', 'TPXO9-atlas-v5','TPXO9.1','TPXO8-atlas','TPXO7.2', - 'AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM','Gr1km-v2', - 'GOT4.7','GOT4.8','GOT4.10','FES2014','EOT20'] + 'AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM','Gr1kmTM', + 'Gr1km-v2','GOT4.7','GOT4.8','GOT4.10','FES2014','EOT20'] @staticmethod def load_elevation(): @@ -1329,7 +1366,7 @@ def ocean_current(): 'TPXO9-atlas-v2','TPXO9-atlas-v3','TPXO9-atlas-v4', 'TPXO9-atlas-v5','TPXO9.1','TPXO8-atlas','TPXO7.2', 'AODTM-5','AOTIM-5','AOTIM-5-2018', - 'Arc2kmTM','Gr1km-v2','FES2014'] + 'Arc2kmTM','Gr1kmTM','Gr1km-v2','FES2014'] @staticmethod def OTIS(): @@ -1338,7 +1375,7 @@ def OTIS(): """ return ['CATS0201','CATS2008','CATS2008_load','TPXO9.1', 'TPXO7.2','TPXO7.2_load','AODTM-5','AOTIM-5', - 'AOTIM-5-2018','Arc2kmTM','Gr1km-v2',] + 'AOTIM-5-2018','Arc2kmTM','Gr1kmTM','Gr1km-v2'] @staticmethod def ATLAS_compact(): diff --git a/scripts/arcticdata_tides.py b/scripts/arcticdata_tides.py index e6ca963b..8c0e94e9 100644 --- a/scripts/arcticdata_tides.py +++ b/scripts/arcticdata_tides.py @@ -1,14 +1,17 @@ #!/usr/bin/env python u""" arcticdata_tides.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (06/2022) Download Arctic Ocean Tide Models from the NSF ArcticData archive + AODTM-5: https://arcticdata.io/catalog/view/doi:10.18739/A2901ZG3N AOTIM-5: https://arcticdata.io/catalog/view/doi:10.18739/A2S17SS80 AOTIM-5-2018: https://arcticdata.io/catalog/view/doi:10.18739/A21R6N14K +Arc2kmTM: https://arcticdata.io/catalog/view/doi:10.18739/A2D21RK6K +Gr1kmTM: https://arcticdata.io/catalog/view/doi:10.18739/A2B853K18 CALLING SEQUENCE: - python arcticdata_tides.py --tide=AOTIM-5-2018 + python arcticdata_tides.py --tide=Gr1kmTM COMMAND LINE OPTIONS: --help: list the command line options @@ -17,6 +20,8 @@ AODTM-5 AOTIM-5 AOTIM-5-2018 + Arc2kmTM + Gr1kmTM -M X, --mode X: Local permissions mode of the files downloaded PYTHON DEPENDENCIES: @@ -27,6 +32,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 06/2022: added Greenland 1km model (Gr1kmTM) to list of models Updated 04/2022: use argparse descriptions within documentation Updated 10/2021: using python logging for handling verbose output Updated 07/2021: can use prefix files to define command line arguments @@ -35,10 +41,8 @@ """ from __future__ import print_function -import sys import os import re -import time import logging import zipfile import argparse @@ -46,21 +50,26 @@ import pyTMD.utilities #-- PURPOSE: Download Arctic Ocean Tide Models from the NSF ArcticData archive -def arcticdata_tides(MODEL,DIRECTORY=None,MODE=0o775): +def arcticdata_tides(MODEL, DIRECTORY=None, MODE=0o775): #-- create logger for verbosity level logger = pyTMD.utilities.build_logger(__name__,level=logging.INFO) - #-- doi for each model + #-- digital object identifier (doi) for each Arctic tide model DOI = {} DOI['AODTM-5'] = '10.18739/A2901ZG3N' DOI['AOTIM-5'] = '10.18739/A2S17SS80' DOI['AOTIM-5-2018'] = '10.18739/A21R6N14K' - #-- local subdirectory for each model + DOI['Arc2kmTM'] = '10.18739/A2D21RK6K' + DOI['Gr1kmTM'] = '10.18739/A2B853K18' + #-- local subdirectory for each Arctic tide model LOCAL = {} LOCAL['AODTM-5'] = 'aodtm5_tmd' LOCAL['AOTIM-5'] = 'aotim5_tmd' LOCAL['AOTIM-5-2018'] = 'Arc5km2018' + LOCAL['Arc2kmTM'] = 'Arc2kmTM' + LOCAL['Gr1kmTM'] = 'Gr1kmTM' + #-- recursively create directories if non-existent if not os.access(os.path.join(DIRECTORY,LOCAL[MODEL]), os.F_OK): os.makedirs(os.path.join(DIRECTORY,LOCAL[MODEL]), MODE) @@ -71,8 +80,8 @@ def arcticdata_tides(MODEL,DIRECTORY=None,MODE=0o775): pyTMD.utilities.quote_plus(posixpath.join('application','bagit-097')), pyTMD.utilities.quote_plus(resource_map_doi)] #-- download zipfile from host - zfile = zipfile.ZipFile(pyTMD.utilities.from_http(HOST)) logger.info('{0} -->\n'.format(posixpath.join(*HOST))) + zfile = zipfile.ZipFile(pyTMD.utilities.from_http(HOST)) #-- find model files within zip file rx = re.compile('(grid|h[0]?|UV[0]?|Model|xy)_(.*?)',re.VERBOSE) members = [m for m in zfile.filelist if rx.search(m.filename)] @@ -81,7 +90,7 @@ def arcticdata_tides(MODEL,DIRECTORY=None,MODE=0o775): #-- strip directories from member filename m.filename = posixpath.basename(m.filename) local_file = os.path.join(DIRECTORY,LOCAL[MODEL],m.filename) - logger.info('\t{0}\n'.format(local_file)) + logger.info(local_file) #-- extract file zfile.extract(m, path=os.path.join(DIRECTORY,LOCAL[MODEL])) #-- change permissions mode @@ -106,8 +115,8 @@ def arguments(): help='Working data directory') #-- Arctic Ocean tide model to download parser.add_argument('--tide','-T', - metavar='TIDE', type=str, nargs='+', default=['AOTIM-5-2018'], - choices=('AODTM-5','AOTIM-5','AOTIM-5-2018'), + metavar='TIDE', type=str, nargs='+', default=['Gr1kmTM'], + choices=('AODTM-5','AOTIM-5','AOTIM-5-2018','Arc2kmTM','Gr1kmTM'), help='Arctic Ocean tide model to download') #-- permissions mode of the local directories and files (number in octal) parser.add_argument('--mode','-M', @@ -125,7 +134,9 @@ def main(): #-- check internet connection before attempting to run program if pyTMD.utilities.check_connection('https://arcticdata.io'): for m in args.tide: - arcticdata_tides(m,DIRECTORY=args.directory,MODE=args.mode) + arcticdata_tides(m, + DIRECTORY=args.directory, + MODE=args.mode) #-- run main program if __name__ == '__main__': diff --git a/scripts/compute_tidal_currents.py b/scripts/compute_tidal_currents.py index c9318962..c53dc3c5 100755 --- a/scripts/compute_tidal_currents.py +++ b/scripts/compute_tidal_currents.py @@ -19,23 +19,6 @@ COMMAND LINE OPTIONS: -D X, --directory X: Working data directory -T X, --tide X: Tide model to use in calculating currents - CATS0201 - CATS2008 - TPXO9-atlas - TPXO9-atlas-v2 - TPXO9-atlas-v3 - TPXO9-atlas-v4 - TPXO9-atlas-v5 - TPXO9.1 - TPXO8-atlas - TPXO7.2 - TPXO7.2_load - AODTM-5 - AOTIM-5 - AOTIM-5-2018 - Arc2kmTM - Gr1km-v2 - FES2014 --atlas-format X: ATLAS tide model format (OTIS, netcdf) --gzip, -G: Tide model files are gzip compressed --definition-file X: Model definition file for use in calculating currents From d5fa85c9ece9d4f95965cc4fce345c0c44dc0037 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Tue, 7 Jun 2022 13:42:39 -0700 Subject: [PATCH 09/14] unit updates in the ESR netCDF4 format --- README.rst | 2 +- .../getting_started/Getting-Started.rst | 2 +- doc/source/getting_started/Resources.rst | 2 +- pyTMD/load_constituent.py | 2 +- pyTMD/model.py | 25 +++++------------- pyTMD/read_tide_model.py | 26 +++++++++---------- 6 files changed, 23 insertions(+), 36 deletions(-) diff --git a/README.rst b/README.rst index e703aabe..d63702ef 100644 --- a/README.rst +++ b/README.rst @@ -45,7 +45,7 @@ Python-based tidal prediction software that reads OTIS, GOT and FES formatted ti - `ESR Tide Model Driver (TMD) Matlab Toolbox `_ - `OSU Global and Regional Tide Models `_ - `ESR Polar Tide Models `_ -- `A Global Ocean Tide Model From TOPEX/POSEIDON Altimetry: GOT99.2 `_ +- `A Global Ocean Tide Model From TOPEX/POSEIDON Altimetry: GOT99.2 `_ - `Finite Element Solution (FES) tide models `_ - `Delta times from US Naval Observatory (USNO) Earth Orientation Products `_ - `Delta times from NASA Crustal Dynamics Data Information System (CDDIS) `_ diff --git a/doc/source/getting_started/Getting-Started.rst b/doc/source/getting_started/Getting-Started.rst index a1e5f094..d2930cfe 100644 --- a/doc/source/getting_started/Getting-Started.rst +++ b/doc/source/getting_started/Getting-Started.rst @@ -251,7 +251,7 @@ References .. [Padman2018] L. Padman, M. R. Siegfried, and H. A. Fricker, "Ocean Tide Influences on the Antarctic and Greenland Ice Sheets", *Reviews of Geophysics*, 56, (2018). `doi: 10.1002/2016RG000546 `_ -.. [Ray1999] R. D. Ray, "A Global Ocean Tide Model From TOPEX/POSEIDON Altimetry: GOT99.2", *NASA Technical Memorandum*, `NASA/TM--1999-209478 `_. +.. [Ray1999] R. D. Ray, "A Global Ocean Tide Model From TOPEX/POSEIDON Altimetry: GOT99.2", *NASA Technical Memorandum*, `NASA/TM--1999-209478 `_. .. [Stammer2014] D. Stammer et al., "Accuracy assessment of global barotropic ocean tide models", *Reviews of Geophysics*, 52, 243--282, (2014). `doi: 10.1002/2014RG000450 `_ diff --git a/doc/source/getting_started/Resources.rst b/doc/source/getting_started/Resources.rst index 48f78596..75c0d0e0 100644 --- a/doc/source/getting_started/Resources.rst +++ b/doc/source/getting_started/Resources.rst @@ -8,7 +8,7 @@ Ocean and Load Tide Resources - `OSU Global and Regional Tide Models `_ - `ESR Polar Tide Models `_ - `Ocean Tides around Antarctica and in the Southern Ocean `_ -- `A Global Ocean Tide Model From TOPEX/POSEIDON Altimetry: GOT99.2 `_ +- `A Global Ocean Tide Model From TOPEX/POSEIDON Altimetry: GOT99.2 `_ - `Finite Element Solution (FES) tide models `_ - `Antarctic Tide Gauge Database `_ - `Delta times from US Naval Observatory (USNO) Earth Orientation Products `_ diff --git a/pyTMD/load_constituent.py b/pyTMD/load_constituent.py index dcfe0928..367e8a85 100644 --- a/pyTMD/load_constituent.py +++ b/pyTMD/load_constituent.py @@ -80,7 +80,7 @@ def load_constituent(c): 5.620755e-04,2.134402e-04,4.363323e-04,1.503693e-04,2.081166e-04]) #-- Astronomical arguments (relative to t0 = 1 Jan 0:00 1992) #-- phases for each constituent are referred to the time when the phase of - #-- the forcing for that constituent is zero on the Greenich meridian + #-- the forcing for that constituent is zero on the Greenwich meridian phase_all = np.array([1.731557546,0.000000000,0.173003674,1.558553872, 6.050721243,6.110181633,3.487600001,5.877717569,4.086699633, 3.463115091,5.427136701,0.553986502,0.052841931,2.137025284, diff --git a/pyTMD/model.py b/pyTMD/model.py index 1094ada6..8ca0d0e2 100644 --- a/pyTMD/model.py +++ b/pyTMD/model.py @@ -7,6 +7,7 @@ UPDATE HISTORY: Updated 06/2022: added Greenland 1km model (Gr1kmTM) to list of models + updated citation url for Global Ocean Tide (GOT) models Updated 05/2022: added ESR CATS2022 to list of models added attribute for flexure fields being available for model Updated 04/2022: updated docstrings to numpy documentation format @@ -696,9 +697,7 @@ def elevation(self, m): self.scale = 1.0/100.0 self.version = '4.7' # model description and references - self.reference = ('https://denali.gsfc.nasa.gov/' - 'personal_pages/ray/MiscPubs/' - '19990089548_1999150788.pdf') + self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.atl03 = 'tide_ocean' self.atl06 = 'tide_ocean' self.atl07 = 'height_segment_ocean' @@ -723,9 +722,7 @@ def elevation(self, m): self.scale = 1.0/1000.0 self.version = '4.7' # model description and references - self.reference = ('https://denali.gsfc.nasa.gov/' - 'personal_pages/ray/MiscPubs/' - '19990089548_1999150788.pdf') + self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.atl03 = 'tide_load' self.atl06 = 'tide_load' self.atl07 = 'height_segment_load' @@ -747,9 +744,7 @@ def elevation(self, m): self.scale = 1.0/100.0 self.version = '4.8' # model description and references - self.reference = ('https://denali.gsfc.nasa.gov/' - 'personal_pages/ray/MiscPubs/' - '19990089548_1999150788.pdf') + self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.atl03 = 'tide_ocean' self.atl06 = 'tide_ocean' self.atl07 = 'height_segment_ocean' @@ -774,9 +769,7 @@ def elevation(self, m): self.scale = 1.0/1000.0 self.version = '4.8' # model description and references - self.reference = ('https://denali.gsfc.nasa.gov/' - 'personal_pages/ray/MiscPubs/' - '19990089548_1999150788.pdf') + self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.atl03 = 'tide_load' self.atl06 = 'tide_load' self.atl07 = 'height_segment_load' @@ -798,9 +791,7 @@ def elevation(self, m): self.scale = 1.0/100.0 self.version = '4.10' # model description and references - self.reference = ('https://denali.gsfc.nasa.gov/' - 'personal_pages/ray/MiscPubs/' - '19990089548_1999150788.pdf') + self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.atl03 = 'tide_ocean' self.atl06 = 'tide_ocean' self.atl07 = 'height_segment_ocean' @@ -825,9 +816,7 @@ def elevation(self, m): self.scale = 1.0/1000.0 self.version = '4.10' # model description and references - self.reference = ('https://denali.gsfc.nasa.gov/' - 'personal_pages/ray/MiscPubs/' - '19990089548_1999150788.pdf') + self.reference = 'https://ntrs.nasa.gov/citations/19990089548' self.atl03 = 'tide_load' self.atl06 = 'tide_load' self.atl07 = 'height_segment_load' diff --git a/pyTMD/read_tide_model.py b/pyTMD/read_tide_model.py index cfc33da2..381e192e 100644 --- a/pyTMD/read_tide_model.py +++ b/pyTMD/read_tide_model.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -read_tide_model.py (05/2022) +read_tide_model.py (06/2022) Reads files for a tidal model and makes initial calculations to run tide program Includes functions to extract tidal harmonic constants from OTIS tide models for given locations @@ -58,6 +58,7 @@ nearest_extrap.py: nearest-neighbor extrapolation of data to coordinates UPDATE HISTORY: + Updated 06/2022: unit updates in the ESR netCDF4 format Updated 05/2022: add functions for using ESR netCDF4 format models changed keyword arguments to camel case Updated 04/2022: updated docstrings to numpy documentation format @@ -759,12 +760,12 @@ def read_netcdf_grid(input_file): """ #-- read the netcdf format tide grid file fileID=netCDF4.Dataset(os.path.expanduser(input_file),'r') - #-- read coordinates + #-- read coordinates and flip y orientation x = fileID.variables['x'][:].copy() y = fileID.variables['y'][::-1].copy() - #-- read water column thickness + #-- read water column thickness and flip y orientation hz = fileID.variables['wct'][::-1,:].copy() - #-- read mask + #-- read mask and flip y orientation mz = fileID.variables['mask'][::-1,:].copy() #-- read flexure and convert from percent to scale factor sf = fileID.variables['flexure'][::-1,:]/100.0 @@ -1336,19 +1337,16 @@ def read_netcdf_file(input_file, ic, variable=None): #-- real and imaginary components of tidal constituent hc = np.ma.zeros((ny,nx),dtype=np.complex64) hc.mask = np.zeros((ny,nx),dtype=bool) - #-- extract constituent + #-- extract constituent and flip y orientation if (variable == 'z'): - #-- convert elevations from mm to m - hc.data.real[:,:] = fileID.variables['hRe'][ic,::-1,:]/1e3 - hc.data.imag[:,:] = -fileID.variables['hIm'][ic,::-1,:]/1e3 + hc.data.real[:,:] = fileID.variables['hRe'][ic,::-1,:] + hc.data.imag[:,:] = -fileID.variables['hIm'][ic,::-1,:] elif variable in ('U','u'): - #-- convert transports from cm^2/s to m^2/s - hc.data.real[:,:] = fileID.variables['uRe'][ic,::-1,:]/1e4 - hc.data.imag[:,:] = -fileID.variables['uIm'][ic,::-1,:]/1e4 + hc.data.real[:,:] = fileID.variables['uRe'][ic,::-1,:] + hc.data.imag[:,:] = -fileID.variables['uIm'][ic,::-1,:] elif variable in ('V','v'): - #-- convert transports from cm^2/s to m^2/s - hc.data.real[:,:] = fileID.variables['vRe'][ic,::-1,:]/1e4 - hc.data.imag[:,:] = -fileID.variables['vIm'][ic,::-1,:]/1e4 + hc.data.real[:,:] = fileID.variables['vRe'][ic,::-1,:] + hc.data.imag[:,:] = -fileID.variables['vIm'][ic,::-1,:] #-- close the file fileID.close() #-- return output variables From fc69db4d8644d91832ffd1839bb18e2c3b76dc23 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Mon, 13 Jun 2022 14:47:49 -0700 Subject: [PATCH 10/14] Update spatial.py --- pyTMD/spatial.py | 1 + 1 file changed, 1 insertion(+) diff --git a/pyTMD/spatial.py b/pyTMD/spatial.py index e958cfef..fc04a7b0 100644 --- a/pyTMD/spatial.py +++ b/pyTMD/spatial.py @@ -197,6 +197,7 @@ def from_ascii(filename, **kwargs): #-- output spatial data and attributes dinput = {c:np.zeros((file_lines-kwargs['header'])) for c in columns} dinput['attributes'] = {c:dict() for c in columns} + header = int(kwargs['header']) #-- extract spatial data array #-- for each line in the file for i,line in enumerate(file_contents[header:]): From e07ff34b231dc24569fe213b4dc4dfc733dc7cb7 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Mon, 13 Jun 2022 17:37:32 -0700 Subject: [PATCH 11/14] Update python-request.yml --- .github/workflows/python-request.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/python-request.yml b/.github/workflows/python-request.yml index f3d936a6..03b1e2cf 100644 --- a/.github/workflows/python-request.yml +++ b/.github/workflows/python-request.yml @@ -35,7 +35,7 @@ jobs: environment-file: environment.yml - name: Create conda Test Environment run: | - conda install flake8 pytest pytest-cov cython octave oct2py boto3 + conda install openblas-devel flake8 pytest pytest-cov cython octave oct2py - name: Lint with flake8 run: | # stop the build if there are Python syntax errors or undefined names From 6e9abb094e51d78ecf5188eaae52b6f4677ff441 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Thu, 16 Jun 2022 13:02:27 -0700 Subject: [PATCH 12/14] refactor: use pypi versions of ICESat-2 and ATM1b tools --- doc/environment.yml | 4 +-- .../getting_started/Getting-Started.rst | 26 ++++++++++++------- environment.yml | 4 +-- requirements.txt | 2 ++ scripts/compute_LPET_icebridge_data.py | 6 ++--- scripts/compute_LPT_icebridge_data.py | 6 ++--- scripts/compute_OPT_icebridge_data.py | 7 +++-- scripts/compute_tides_icebridge_data.py | 6 ++--- setup.py | 10 +++---- 9 files changed, 37 insertions(+), 34 deletions(-) diff --git a/doc/environment.yml b/doc/environment.yml index 6fcd3496..e745b4f4 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -27,6 +27,6 @@ dependencies: - sphinx_rtd_theme - texlive-core - pip: - - git+https://github.com/tsutterley/read-ICESat-2.git - - git+https://github.com/tsutterley/read-ATM1b-QFIT-binary.git + - icesat2-toolkit + - ATM1b-QFIT - .. diff --git a/doc/source/getting_started/Getting-Started.rst b/doc/source/getting_started/Getting-Started.rst index d2930cfe..d6188097 100644 --- a/doc/source/getting_started/Getting-Started.rst +++ b/doc/source/getting_started/Getting-Started.rst @@ -193,8 +193,13 @@ For pole tide programs, the epoch is 1858-11-17T00:00:00 (Modified Julian Days). The `time module `_ within ``pyTMD`` can convert different time formats to the necessary time format of a given program. The `time module `_ can also parse date strings describing the units and epoch of relative times, or the calendar date of measurement for geotiff formats. ``pyTMD`` keeps updated `tables of leap seconds `_ for converting from GPS, LORAN and TAI times. -``pyTMD`` keeps updated `tables of delta times `_ for converting between dynamic (TT) and universal (UT1) times. +- TAI time: International Atomic Time which is computed as the weighted average of several hundred atomic clocks. +- UTC time: Coordinated Universal Time which is `periodically adjusted `_ to account for the difference between the definition of the second and the rotation of Earth. +- GPS time: Atomic timing system for the Global Positioning System constellation of satellites monitored by the United States Naval Observatory (USNO). GPS time and UTC time were equal on January 6, 1980. TAI time is ahead of GPS time by 19 seconds. +- LORAN time: Atomic timing system for the Loran-C chain transmitter sites used in terrestrial radionavigation. LORAN time and UTC time were equal on January 1, 1958. TAI time is ahead of LORAN time by 10 seconds. + +``pyTMD`` also keeps updated `tables of delta times `_ for converting between dynamic (TT) and universal (UT1) times. Delta times (TT - UT1) are the differences between Dynamic Time (TT) and Universal Time (UT1) [Meeus1998]_. Universal Time (UT1) is based on the rotation of the Earth, which varies irregularly, and so UT1 is adjusted periodically. @@ -209,15 +214,16 @@ The default coordinate system in ``pyTMD`` is WGS84 geodetic coordinates in lati ``pyTMD`` uses `pyproj `_ to convert from different coordinate systems and datums. Some regional tide models are projected in a different coordinate system. For these cases, ``pyTMD`` will `convert from latitude and longitude to the model coordinate system `_. -OTIS models may be projected into a separate coordinate system. -The available OTIS projections within ``pyTMD`` are -``'4326'`` (global latitude and longitude), -``'3031'`` (Antarctic Polar Stereographic in kilometers), -``'3413'`` (NSIDC Sea Ice Polar Stereographic North in kilometers), -``'CATS2008'`` (CATS2008 Polar Stereographic in kilometers), -``'3976'`` (NSIDC Sea Ice Polar Stereographic South in kilometers), and -``'PSNorth'`` (idealized polar stereographic in kilometers). -For other OTIS model projections, a formatted projection string (e.g. PROJ, WKT, or EPSG) can be used. + +OTIS models may be projected into a separate coordinate system. The available OTIS projections within ``pyTMD`` are +- ``'4326'`` (global latitude and longitude) +- ``'3031'`` (Antarctic Polar Stereographic in kilometers) +- ``'3413'`` (NSIDC Sea Ice Polar Stereographic North in kilometers) +- ``'CATS2008'`` (CATS2008 Polar Stereographic in kilometers) +- ``'3976'`` (NSIDC Sea Ice Polar Stereographic South in kilometers) +- ``'PSNorth'`` (idealized polar stereographic in kilometers) + +For other model projections, a formatted coordinate reference system (CRS) descriptor (e.g. PROJ, WKT, or EPSG code) can be used. Interpolation ############# diff --git a/environment.yml b/environment.yml index 2a25daa7..e9d42834 100644 --- a/environment.yml +++ b/environment.yml @@ -21,5 +21,5 @@ dependencies: - scipy - setuptools_scm - pip: - - git+https://github.com/tsutterley/read-ICESat-2.git - - git+https://github.com/tsutterley/read-ATM1b-QFIT-binary.git + - icesat2-toolkit + - ATM1b-QFIT diff --git a/requirements.txt b/requirements.txt index ce79d41a..942fd56d 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,7 @@ +ATM1b-QFIT gdal h5py +icesat2-toolkit lxml netCDF4 numpy diff --git a/scripts/compute_LPET_icebridge_data.py b/scripts/compute_LPET_icebridge_data.py index d2c6553a..418295b0 100644 --- a/scripts/compute_LPET_icebridge_data.py +++ b/scripts/compute_LPET_icebridge_data.py @@ -60,7 +60,7 @@ import pyTMD.utilities from pyTMD.calc_delta_time import calc_delta_time from pyTMD.compute_equilibrium_tide import compute_equilibrium_tide -import read_ATM1b_QFIT_binary.read_ATM1b_QFIT_binary as ATM1b +import ATM1b_QFIT.read_ATM1b_QFIT_binary #-- PURPOSE: reading the number of file lines removing commented lines def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): @@ -73,7 +73,7 @@ def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): file_lines, = fileID[HDF5].shape elif QFIT: #-- read the size of a QFIT binary file - file_lines = ATM1b.ATM1b_QFIT_shape(input_file) + file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) else: #-- read the input file, split at lines and remove all commented lines with open(input_file, mode='r', encoding='utf8') as f: @@ -130,7 +130,7 @@ def read_ATM_qfit_file(input_file, input_subsetter): #-- Version 1 of ATM QFIT files (binary) elif (SFX == 'qi'): #-- read input QFIT data file and subset if specified - fid,h = ATM1b.read_ATM1b_QFIT_binary(input_file) + fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) #-- number of lines of data within file file_lines = file_length(input_file,input_subsetter,QFIT=True) ATM_L1b_input['lat'] = fid['latitude'][:] diff --git a/scripts/compute_LPT_icebridge_data.py b/scripts/compute_LPT_icebridge_data.py index f75b47ef..ed544ac5 100644 --- a/scripts/compute_LPT_icebridge_data.py +++ b/scripts/compute_LPT_icebridge_data.py @@ -67,7 +67,7 @@ import scipy.interpolate from pyTMD.iers_mean_pole import iers_mean_pole from pyTMD.read_iers_EOP import read_iers_EOP -import read_ATM1b_QFIT_binary.read_ATM1b_QFIT_binary as ATM1b +import ATM1b_QFIT.read_ATM1b_QFIT_binary #-- PURPOSE: reading the number of file lines removing commented lines def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): @@ -80,7 +80,7 @@ def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): file_lines, = fileID[HDF5].shape elif QFIT: #-- read the size of a QFIT binary file - file_lines = ATM1b.ATM1b_QFIT_shape(input_file) + file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) else: #-- read the input file, split at lines and remove all commented lines with open(input_file, mode='r', encoding='utf8') as f: @@ -137,7 +137,7 @@ def read_ATM_qfit_file(input_file, input_subsetter): #-- Version 1 of ATM QFIT files (binary) elif (SFX == 'qi'): #-- read input QFIT data file and subset if specified - fid,h = ATM1b.read_ATM1b_QFIT_binary(input_file) + fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) #-- number of lines of data within file file_lines = file_length(input_file,input_subsetter,QFIT=True) ATM_L1b_input['lat'] = fid['latitude'][:] diff --git a/scripts/compute_OPT_icebridge_data.py b/scripts/compute_OPT_icebridge_data.py index dec5761a..46284839 100644 --- a/scripts/compute_OPT_icebridge_data.py +++ b/scripts/compute_OPT_icebridge_data.py @@ -77,8 +77,7 @@ from pyTMD.iers_mean_pole import iers_mean_pole from pyTMD.read_iers_EOP import read_iers_EOP from pyTMD.read_ocean_pole_tide import read_ocean_pole_tide -import read_ATM1b_QFIT_binary.read_ATM1b_QFIT_binary as ATM1b - +import ATM1b_QFIT.read_ATM1b_QFIT_binary #-- PURPOSE: reading the number of file lines removing commented lines def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): #-- subset the data to indices if specified @@ -90,7 +89,7 @@ def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): file_lines, = fileID[HDF5].shape elif QFIT: #-- read the size of a QFIT binary file - file_lines = ATM1b.ATM1b_QFIT_shape(input_file) + file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) else: #-- read the input file, split at lines and remove all commented lines with open(input_file, mode='r', encoding='utf8') as f: @@ -147,7 +146,7 @@ def read_ATM_qfit_file(input_file, input_subsetter): #-- Version 1 of ATM QFIT files (binary) elif (SFX == 'qi'): #-- read input QFIT data file and subset if specified - fid,h = ATM1b.read_ATM1b_QFIT_binary(input_file) + fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) #-- number of lines of data within file file_lines = file_length(input_file,input_subsetter,QFIT=True) ATM_L1b_input['lat'] = fid['latitude'][:] diff --git a/scripts/compute_tides_icebridge_data.py b/scripts/compute_tides_icebridge_data.py index c30f762e..bec6f720 100644 --- a/scripts/compute_tides_icebridge_data.py +++ b/scripts/compute_tides_icebridge_data.py @@ -117,7 +117,7 @@ import pyTMD.time import pyTMD.model import pyTMD.utilities -import read_ATM1b_QFIT_binary.read_ATM1b_QFIT_binary as ATM1b +import ATM1b_QFIT.read_ATM1b_QFIT_binary from pyTMD.calc_delta_time import calc_delta_time from pyTMD.infer_minor_corrections import infer_minor_corrections from pyTMD.predict_tide_drift import predict_tide_drift @@ -137,7 +137,7 @@ def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): file_lines, = fileID[HDF5].shape elif QFIT: #-- read the size of a QFIT binary file - file_lines = ATM1b.ATM1b_QFIT_shape(input_file) + file_lines = ATM1b_QFIT.ATM1b_QFIT_shape(input_file) else: #-- read the input file, split at lines and remove all commented lines with open(input_file, mode='r', encoding='utf8') as f: @@ -194,7 +194,7 @@ def read_ATM_qfit_file(input_file, input_subsetter): #-- Version 1 of ATM QFIT files (binary) elif (SFX == 'qi'): #-- read input QFIT data file and subset if specified - fid,h = ATM1b.read_ATM1b_QFIT_binary(input_file) + fid,h = ATM1b_QFIT.read_ATM1b_QFIT_binary(input_file) #-- number of lines of data within file file_lines = file_length(input_file,input_subsetter,QFIT=True) ATM_L1b_input['lat'] = fid['latitude'][:] diff --git a/setup.py b/setup.py index 4a606dcb..5066f6a4 100644 --- a/setup.py +++ b/setup.py @@ -12,19 +12,16 @@ 'solutions and make tidal predictions') keywords = 'Ocean Tides, Load Tides, Pole Tides, Tidal Prediction, OTIS, GOT, FES' # get long_description from README.rst -with open("README.rst", mode="r", encoding='utf8') as fh: +with open('README.rst', mode='r', encoding='utf8') as fh: long_description = fh.read() long_description_content_type = "text/x-rst" # get install requirements -with open('requirements.txt', mode="r", encoding='utf8') as fh: +with open('requirements.txt', mode='r', encoding='utf8') as fh: install_requires = [line.split().pop(0) for line in fh.read().splitlines()] -# dependency links (data readers) -dependency_links = ['https://github.com/tsutterley/read-ICESat-2/tarball/main', - 'https://github.com/tsutterley/read-ATM1b-QFIT-binary/tarball/main'] # get version -with open('version.txt', mode="r", encoding='utf8') as fh: +with open('version.txt', mode='r', encoding='utf8') as fh: fallback_version = fh.read() # list of all scripts to be included with package @@ -101,7 +98,6 @@ def check_output(cmd): packages=find_packages(), install_requires=install_requires, setup_requires=setup_requires, - dependency_links=dependency_links, use_scm_version=use_scm_version, scripts=scripts, include_package_data=True, From 1079f11691dbdb21371d4ba98e2db61837381232 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Thu, 16 Jun 2022 16:46:56 -0700 Subject: [PATCH 13/14] feat: added field_mapping options to netCDF4 and HDF5 reads --- doc/source/user_guide/spatial.rst | 2 + pyTMD/spatial.py | 62 ++++++++++++++++++++++++--- scripts/compute_OPT_icebridge_data.py | 1 + 3 files changed, 58 insertions(+), 7 deletions(-) diff --git a/doc/source/user_guide/spatial.rst b/doc/source/user_guide/spatial.rst index 56c53092..04f51ef6 100644 --- a/doc/source/user_guide/spatial.rst +++ b/doc/source/user_guide/spatial.rst @@ -36,6 +36,8 @@ General Methods .. autofunction:: pyTMD.spatial.data_type +.. autofunction:: pyTMD.spatial.from_file + .. autofunction:: pyTMD.spatial.from_ascii .. autofunction:: pyTMD.spatial.from_netCDF4 diff --git a/pyTMD/spatial.py b/pyTMD/spatial.py index fc04a7b0..1c1151d1 100644 --- a/pyTMD/spatial.py +++ b/pyTMD/spatial.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" spatial.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (06/2022) Utilities for reading, writing and operating on spatial data @@ -19,6 +19,8 @@ https://github.com/yaml/pyyaml UPDATE HISTORY: + Updated 06/2022: added field_mapping options to netCDF4 and HDF5 reads + added from_file wrapper function to read from particular formats Updated 04/2022: add option to reduce input GDAL raster datasets updated docstrings to numpy documentation format use gzip virtual filesystem for reading compressed geotiffs @@ -124,6 +126,32 @@ def data_type(x, y, t): else: raise ValueError('Unknown data type') +def from_file(filename, format, **kwargs): + """ + Wrapper function for reading data from an input format + + Parameters + ---------- + filename: str + full path of input file + format: str + format of input file + **kwargs: dict + Keyword arguments for file reader + """ + #-- read input file to extract spatial coordinates and data + if (format == 'ascii'): + dinput = from_ascii(filename, **kwargs) + elif (format == 'netCDF4'): + dinput = from_netCDF4(filename, **kwargs) + elif (format == 'HDF5'): + dinput = from_HDF5(filename, **kwargs) + elif (format == 'geotiff'): + dinput = from_geotiff(filename, **kwargs) + else: + raise ValueError('Invalid format {0}'.format(format)) + return dinput + def from_ascii(filename, **kwargs): """ Read data from an ascii file @@ -234,6 +262,8 @@ def from_netCDF4(filename, **kwargs): name for y-dimension variable varname: str, default 'data' name for data variable + field_mapping: dict, default {} + mapping between output variables and input netCDF4 """ #-- set default keyword arguments kwargs.setdefault('compression',None) @@ -241,6 +271,7 @@ def from_netCDF4(filename, **kwargs): kwargs.setdefault('xname','lon') kwargs.setdefault('yname','lat') kwargs.setdefault('varname','data') + kwargs.setdefault('field_mapping',{}) #-- read data from netCDF4 file #-- Open the NetCDF4 file for reading if (kwargs['compression'] == 'gzip'): @@ -271,10 +302,15 @@ def from_netCDF4(filename, **kwargs): attributes_list = ['description','units','long_name','calendar', 'standard_name','grid_mapping','_FillValue'] #-- mapping between netCDF4 variable names and output names - variable_mapping = dict(x=kwargs['xname'],y=kwargs['yname'], - data=kwargs['varname'],time=kwargs['timename']) + if not kwargs['field_mapping']: + kwargs['field_mapping']['x'] = copy.copy(kwargs['xname']) + kwargs['field_mapping']['y'] = copy.copy(kwargs['yname']) + if kwargs['varname'] is not None: + kwargs['field_mapping']['data'] = copy.copy(kwargs['varname']) + if kwargs['timename'] is not None: + kwargs['field_mapping']['time'] = copy.copy(kwargs['timename']) #-- for each variable - for key,nc in variable_mapping.items(): + for key,nc in kwargs['field_mapping'].items(): #-- Getting the data from each NetCDF variable dinput[key] = fileID.variables[nc][:] #-- get attributes for the included variables @@ -292,6 +328,8 @@ def from_netCDF4(filename, **kwargs): if 'grid_mapping' in dinput['attributes']['data'].keys(): #-- try getting the attribute grid_mapping = dinput['attributes']['data']['grid_mapping'] + #-- get coordinate reference system attributes + dinput['attributes']['crs'] = {} for att_name in fileID[grid_mapping].ncattrs(): dinput['attributes']['crs'][att_name] = \ fileID.variables[grid_mapping].getncattr(att_name) @@ -328,6 +366,8 @@ def from_HDF5(filename, **kwargs): name for y-dimension variable varname: str, default 'data' name for data variable + field_mapping: dict, default {} + mapping between output variables and input HDF5 """ #-- set default keyword arguments kwargs.setdefault('compression',None) @@ -335,6 +375,7 @@ def from_HDF5(filename, **kwargs): kwargs.setdefault('xname','lon') kwargs.setdefault('yname','lat') kwargs.setdefault('varname','data') + kwargs.setdefault('field_mapping',{}) #-- read data from HDF5 file #-- Open the HDF5 file for reading if (kwargs['compression'] == 'gzip'): @@ -370,10 +411,15 @@ def from_HDF5(filename, **kwargs): attributes_list = ['description','units','long_name','calendar', 'standard_name','grid_mapping','_FillValue'] #-- mapping between HDF5 variable names and output names - variable_mapping = dict(x=kwargs['xname'],y=kwargs['yname'], - data=kwargs['varname'],time=kwargs['timename']) + if not kwargs['field_mapping']: + kwargs['field_mapping']['x'] = copy.copy(kwargs['xname']) + kwargs['field_mapping']['y'] = copy.copy(kwargs['yname']) + if kwargs['varname'] is not None: + kwargs['field_mapping']['data'] = copy.copy(kwargs['varname']) + if kwargs['timename'] is not None: + kwargs['field_mapping']['time'] = copy.copy(kwargs['timename']) #-- for each variable - for key,h5 in variable_mapping.items(): + for key,h5 in kwargs['field_mapping'].items(): #-- Getting the data from each HDF5 variable dinput[key] = np.copy(fileID[h5][:]) #-- get attributes for the included variables @@ -388,6 +434,8 @@ def from_HDF5(filename, **kwargs): if 'grid_mapping' in dinput['attributes']['data'].keys(): #-- try getting the attribute grid_mapping = dinput['attributes']['data']['grid_mapping'] + #-- get coordinate reference system attributes + dinput['attributes']['crs'] = {} for att_name,att_val in fileID[grid_mapping].attrs.items(): dinput['attributes']['crs'][att_name] = att_val #-- get the spatial projection reference information from wkt diff --git a/scripts/compute_OPT_icebridge_data.py b/scripts/compute_OPT_icebridge_data.py index 46284839..51314e72 100644 --- a/scripts/compute_OPT_icebridge_data.py +++ b/scripts/compute_OPT_icebridge_data.py @@ -78,6 +78,7 @@ from pyTMD.read_iers_EOP import read_iers_EOP from pyTMD.read_ocean_pole_tide import read_ocean_pole_tide import ATM1b_QFIT.read_ATM1b_QFIT_binary + #-- PURPOSE: reading the number of file lines removing commented lines def file_length(input_file, input_subsetter, HDF5=False, QFIT=False): #-- subset the data to indices if specified From 947626a3cdfefb8d8450de19288570d4d2acdbd7 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Tue, 21 Jun 2022 13:59:08 -0700 Subject: [PATCH 14/14] update nc constituents attribute --- pyTMD/read_tide_model.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyTMD/read_tide_model.py b/pyTMD/read_tide_model.py index 381e192e..89a7cbe9 100644 --- a/pyTMD/read_tide_model.py +++ b/pyTMD/read_tide_model.py @@ -806,7 +806,7 @@ def read_constituents(input_file, grid='OTIS'): if (grid == 'ESR'): #-- open the netCDF4 file fid = netCDF4.Dataset(os.path.expanduser(input_file),'r') - constituents = fid.variables['cons'].long_name.split() + constituents = fid.variables['constituents'].constituent_order.split() nc = len(constituents) fid.close() else: