diff --git a/doc/environment.yml b/doc/environment.yml index 6de41a48..4d84a5f8 100644 --- a/doc/environment.yml +++ b/doc/environment.yml @@ -28,10 +28,10 @@ dependencies: - pyyaml - scipy - sphinx - - sphinx-argparse - sphinx_rtd_theme - texlive-core - tk - pip: + - sphinx-argparse>=0.4 - git+https://github.com/tsutterley/geoid-toolkit.git - .. diff --git a/doc/source/api_reference/aod1b_geocenter.rst b/doc/source/api_reference/aod1b_geocenter.rst index 1829de7d..9ed4ef18 100644 --- a/doc/source/api_reference/aod1b_geocenter.rst +++ b/doc/source/api_reference/aod1b_geocenter.rst @@ -18,7 +18,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/aod1b_geocenter.py + :filename: aod1b_geocenter.py :func: arguments :prog: aod1b_geocenter.py :nodescription: diff --git a/doc/source/api_reference/aod1b_oblateness.rst b/doc/source/api_reference/aod1b_oblateness.rst index 0426fd4f..cf36c82e 100644 --- a/doc/source/api_reference/aod1b_oblateness.rst +++ b/doc/source/api_reference/aod1b_oblateness.rst @@ -18,7 +18,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/aod1b_oblateness.py + :filename: aod1b_oblateness.py :func: arguments :prog: aod1b_oblateness.py :nodescription: diff --git a/doc/source/api_reference/calc_degree_one.rst b/doc/source/api_reference/calc_degree_one.rst index 4d863b05..ea5e7b96 100644 --- a/doc/source/api_reference/calc_degree_one.rst +++ b/doc/source/api_reference/calc_degree_one.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/calc_degree_one.py + :filename: calc_degree_one.py :func: arguments :prog: calc_degree_one.py :nodescription: diff --git a/doc/source/api_reference/calc_harmonic_resolution.rst b/doc/source/api_reference/calc_harmonic_resolution.rst index 1b6541f3..ceef9e2c 100644 --- a/doc/source/api_reference/calc_harmonic_resolution.rst +++ b/doc/source/api_reference/calc_harmonic_resolution.rst @@ -14,7 +14,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/calc_harmonic_resolution.py + :filename: calc_harmonic_resolution.py :func: arguments :prog: calc_harmonic_resolution.py :nodescription: diff --git a/doc/source/api_reference/calc_mascon.rst b/doc/source/api_reference/calc_mascon.rst index bb2e56ce..b599e99f 100644 --- a/doc/source/api_reference/calc_mascon.rst +++ b/doc/source/api_reference/calc_mascon.rst @@ -16,7 +16,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/calc_mascon.py + :filename: calc_mascon.py :func: arguments :prog: calc_mascon.py :nodescription: diff --git a/doc/source/api_reference/calc_sensitivity_kernel.rst b/doc/source/api_reference/calc_sensitivity_kernel.rst index 9a6e6a20..60d75240 100644 --- a/doc/source/api_reference/calc_sensitivity_kernel.rst +++ b/doc/source/api_reference/calc_sensitivity_kernel.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/calc_sensitivity_kernel.py + :filename: calc_sensitivity_kernel.py :func: arguments :prog: calc_sensitivity_kernel.py :nodescription: diff --git a/doc/source/api_reference/cnes_grace_sync.rst b/doc/source/api_reference/cnes_grace_sync.rst index 51817f0c..0ad5325a 100644 --- a/doc/source/api_reference/cnes_grace_sync.rst +++ b/doc/source/api_reference/cnes_grace_sync.rst @@ -13,7 +13,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/cnes_grace_sync.py + :filename: cnes_grace_sync.py :func: arguments :prog: cnes_grace_sync.py :nodescription: diff --git a/doc/source/api_reference/combine_harmonics.rst b/doc/source/api_reference/combine_harmonics.rst index 07003509..648dbb1a 100644 --- a/doc/source/api_reference/combine_harmonics.rst +++ b/doc/source/api_reference/combine_harmonics.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/combine_harmonics.py + :filename: combine_harmonics.py :func: arguments :prog: combine_harmonics.py :nodescription: diff --git a/doc/source/api_reference/convert_harmonics.rst b/doc/source/api_reference/convert_harmonics.rst index 475999ad..b9545005 100644 --- a/doc/source/api_reference/convert_harmonics.rst +++ b/doc/source/api_reference/convert_harmonics.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/convert_harmonics.py + :filename: convert_harmonics.py :func: arguments :prog: convert_harmonics.py :nodescription: diff --git a/doc/source/api_reference/dealiasing_monthly_mean.rst b/doc/source/api_reference/dealiasing_monthly_mean.rst index 7d65ed86..4ff781d4 100644 --- a/doc/source/api_reference/dealiasing_monthly_mean.rst +++ b/doc/source/api_reference/dealiasing_monthly_mean.rst @@ -18,7 +18,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/dealiasing_monthly_mean.py + :filename: dealiasing_monthly_mean.py :func: arguments :prog: dealiasing_monthly_mean.py :nodescription: diff --git a/doc/source/api_reference/esa_costg_swarm_sync.rst b/doc/source/api_reference/esa_costg_swarm_sync.rst index c50a1c91..9b183eab 100644 --- a/doc/source/api_reference/esa_costg_swarm_sync.rst +++ b/doc/source/api_reference/esa_costg_swarm_sync.rst @@ -13,7 +13,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/esa_costg_swarm_sync.py + :filename: esa_costg_swarm_sync.py :func: arguments :prog: esa_costg_swarm_sync.py :nodescription: diff --git a/doc/source/api_reference/gfz_icgem_costg_ftp.rst b/doc/source/api_reference/gfz_icgem_costg_ftp.rst index 8081316d..77903988 100644 --- a/doc/source/api_reference/gfz_icgem_costg_ftp.rst +++ b/doc/source/api_reference/gfz_icgem_costg_ftp.rst @@ -13,7 +13,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/gfz_icgem_costg_ftp.py + :filename: gfz_icgem_costg_ftp.py :func: arguments :prog: gfz_icgem_costg_ftp.py :nodescription: diff --git a/doc/source/api_reference/gfz_isdc_dealiasing_ftp.rst b/doc/source/api_reference/gfz_isdc_dealiasing_ftp.rst index 45de6e7c..dc8f85ca 100644 --- a/doc/source/api_reference/gfz_isdc_dealiasing_ftp.rst +++ b/doc/source/api_reference/gfz_isdc_dealiasing_ftp.rst @@ -13,7 +13,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/gfz_isdc_dealiasing_ftp.py + :filename: gfz_isdc_dealiasing_ftp.py :func: arguments :prog: gfz_isdc_dealiasing_ftp.py :nodescription: diff --git a/doc/source/api_reference/gfz_isdc_grace_ftp.rst b/doc/source/api_reference/gfz_isdc_grace_ftp.rst index 92dd259e..6bf4691d 100644 --- a/doc/source/api_reference/gfz_isdc_grace_ftp.rst +++ b/doc/source/api_reference/gfz_isdc_grace_ftp.rst @@ -16,7 +16,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/gfz_isdc_grace_ftp.py + :filename: gfz_isdc_grace_ftp.py :func: arguments :prog: gfz_isdc_grace_ftp.py :nodescription: diff --git a/doc/source/api_reference/grace_mean_harmonics.rst b/doc/source/api_reference/grace_mean_harmonics.rst index cd841684..99ece1d2 100644 --- a/doc/source/api_reference/grace_mean_harmonics.rst +++ b/doc/source/api_reference/grace_mean_harmonics.rst @@ -13,7 +13,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/grace_mean_harmonics.py + :filename: grace_mean_harmonics.py :func: arguments :prog: grace_mean_harmonics.py :nodescription: diff --git a/doc/source/api_reference/grace_spatial_error.rst b/doc/source/api_reference/grace_spatial_error.rst index ec0fca8f..1a98aa5e 100644 --- a/doc/source/api_reference/grace_spatial_error.rst +++ b/doc/source/api_reference/grace_spatial_error.rst @@ -14,7 +14,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/grace_spatial_error.py + :filename: grace_spatial_error.py :func: arguments :prog: grace_spatial_error.py :nodescription: diff --git a/doc/source/api_reference/grace_spatial_maps.rst b/doc/source/api_reference/grace_spatial_maps.rst index efaf212c..d977656a 100644 --- a/doc/source/api_reference/grace_spatial_maps.rst +++ b/doc/source/api_reference/grace_spatial_maps.rst @@ -15,7 +15,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/grace_spatial_maps.py + :filename: grace_spatial_maps.py :func: arguments :prog: grace_spatial_maps.py :nodescription: diff --git a/doc/source/api_reference/itsg_graz_grace_sync.rst b/doc/source/api_reference/itsg_graz_grace_sync.rst index 246c30ea..47a679e3 100644 --- a/doc/source/api_reference/itsg_graz_grace_sync.rst +++ b/doc/source/api_reference/itsg_graz_grace_sync.rst @@ -13,7 +13,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/itsg_graz_grace_sync.py + :filename: itsg_graz_grace_sync.py :func: arguments :prog: itsg_graz_grace_sync.py :nodescription: diff --git a/doc/source/api_reference/make_grace_index.rst b/doc/source/api_reference/make_grace_index.rst index 38e648f2..d9d73413 100644 --- a/doc/source/api_reference/make_grace_index.rst +++ b/doc/source/api_reference/make_grace_index.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/make_grace_index.py + :filename: make_grace_index.py :func: arguments :prog: make_grace_index.py :nodescription: diff --git a/doc/source/api_reference/mascon_reconstruct.rst b/doc/source/api_reference/mascon_reconstruct.rst index 53f508f3..508585bf 100644 --- a/doc/source/api_reference/mascon_reconstruct.rst +++ b/doc/source/api_reference/mascon_reconstruct.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/mascon_reconstruct.py + :filename: mascon_reconstruct.py :func: arguments :prog: mascon_reconstruct.py :nodescription: diff --git a/doc/source/api_reference/monte_carlo_degree_one.rst b/doc/source/api_reference/monte_carlo_degree_one.rst index 5fe17532..c7a7e223 100644 --- a/doc/source/api_reference/monte_carlo_degree_one.rst +++ b/doc/source/api_reference/monte_carlo_degree_one.rst @@ -13,7 +13,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/monte_carlo_degree_one.py + :filename: monte_carlo_degree_one.py :func: arguments :prog: monte_carlo_degree_one.py :nodescription: diff --git a/doc/source/api_reference/podaac_cumulus.rst b/doc/source/api_reference/podaac_cumulus.rst index 4c14d7c5..54bd7676 100644 --- a/doc/source/api_reference/podaac_cumulus.rst +++ b/doc/source/api_reference/podaac_cumulus.rst @@ -14,7 +14,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/podaac_cumulus.py + :filename: podaac_cumulus.py :func: arguments :prog: podaac_cumulus.py :nodescription: diff --git a/doc/source/api_reference/podaac_grace_sync.rst b/doc/source/api_reference/podaac_grace_sync.rst index fe86f010..b76f1d43 100644 --- a/doc/source/api_reference/podaac_grace_sync.rst +++ b/doc/source/api_reference/podaac_grace_sync.rst @@ -16,7 +16,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/podaac_grace_sync.py + :filename: podaac_grace_sync.py :func: arguments :prog: podaac_grace_sync.py :nodescription: diff --git a/doc/source/api_reference/podaac_webdav.rst b/doc/source/api_reference/podaac_webdav.rst index 5cf71a2e..9cd5e3f7 100644 --- a/doc/source/api_reference/podaac_webdav.rst +++ b/doc/source/api_reference/podaac_webdav.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/podaac_webdav.py + :filename: podaac_webdav.py :func: arguments :prog: podaac_webdav.py :nodescription: diff --git a/doc/source/api_reference/regress_grace_maps.rst b/doc/source/api_reference/regress_grace_maps.rst index 887c6472..783e72d1 100644 --- a/doc/source/api_reference/regress_grace_maps.rst +++ b/doc/source/api_reference/regress_grace_maps.rst @@ -12,7 +12,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/regress_grace_maps.py + :filename: regress_grace_maps.py :func: arguments :prog: regress_grace_maps.py :nodescription: diff --git a/doc/source/api_reference/run_grace_date.rst b/doc/source/api_reference/run_grace_date.rst index 045d174c..2a8421fa 100644 --- a/doc/source/api_reference/run_grace_date.rst +++ b/doc/source/api_reference/run_grace_date.rst @@ -16,7 +16,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/run_grace_date.py + :filename: run_grace_date.py :func: arguments :prog: run_grace_date.py :nodescription: diff --git a/doc/source/api_reference/run_sea_level_equation.rst b/doc/source/api_reference/run_sea_level_equation.rst index 19e6fcd6..c751bd08 100644 --- a/doc/source/api_reference/run_sea_level_equation.rst +++ b/doc/source/api_reference/run_sea_level_equation.rst @@ -14,7 +14,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/run_sea_level_equation.py + :filename: run_sea_level_equation.py :func: arguments :prog: run_sea_level_equation.py :nodescription: diff --git a/doc/source/api_reference/scale_grace_maps.rst b/doc/source/api_reference/scale_grace_maps.rst index de4f6ee1..2bdd8023 100644 --- a/doc/source/api_reference/scale_grace_maps.rst +++ b/doc/source/api_reference/scale_grace_maps.rst @@ -17,7 +17,7 @@ Calling Sequence ################ .. argparse:: - :filename: ../../scripts/scale_grace_maps.py + :filename: scale_grace_maps.py :func: arguments :prog: scale_grace_maps.py :nodescription: diff --git a/doc/source/api_reference/utilities.rst b/doc/source/api_reference/utilities.rst index b4601602..07886b94 100644 --- a/doc/source/api_reference/utilities.rst +++ b/doc/source/api_reference/utilities.rst @@ -71,8 +71,12 @@ General Methods .. autofunction:: gravity_toolkit.utilities.cmr_filter_json +.. autofunction:: gravity_toolkit.utilities.cmr_metadata_json + .. autofunction:: gravity_toolkit.utilities.cmr +.. autofunction:: gravity_toolkit.utilities.cmr_metadata + .. autofunction:: gravity_toolkit.utilities.compile_regex_pattern .. autofunction:: gravity_toolkit.utilities.from_figshare diff --git a/gravity_toolkit/clenshaw_summation.py b/gravity_toolkit/clenshaw_summation.py index ae8a7ad6..37d6b87d 100644 --- a/gravity_toolkit/clenshaw_summation.py +++ b/gravity_toolkit/clenshaw_summation.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" clenshaw_summation.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Calculates the spatial field for a series of spherical harmonics for a sequence of ungridded points @@ -49,6 +49,7 @@ Bollettino di Geodesia e Scienze (1982) UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format Updated 11/2021: added UNITS list option for converting to custom units Updated 09/2021: fix passing SCALE keyword argument to clenshaw_s_m @@ -181,7 +182,7 @@ def clenshaw_summation(clm, slm, lon, lat, RAD=0, UNITS=0, LMAX=0, LOVE=None, #-- custom units dfactor = np.copy(UNITS) else: - raise ValueError('Unknown units {0}'.format(UNITS)) + raise ValueError(f'Unknown units {UNITS}') #-- calculate arrays for clenshaw summations over colatitudes s_m_c = np.zeros((npts,LMAX*2+2)) diff --git a/gravity_toolkit/gen_disc_load.py b/gravity_toolkit/gen_disc_load.py index 2c5f4b33..6df7f4d2 100644 --- a/gravity_toolkit/gen_disc_load.py +++ b/gravity_toolkit/gen_disc_load.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" gen_disc_load.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Calculates gravitational spherical harmonic coefficients for a uniform disc load CALLING SEQUENCE: @@ -54,6 +54,7 @@ https://doi.org/10.1007/s00190-011-0522-7 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format Updated 11/2021: added UNITS option for converting from different inputs Updated 01/2021: use harmonics class for spherical harmonic operations @@ -172,7 +173,7 @@ def gen_disc_load(data, lon, lat, area, LMAX=60, MMAX=None, UNITS=2, #-- custom units unit_conv = np.copy(UNITS) else: - raise ValueError('Unknown units {0}'.format(UNITS)) + raise ValueError(f'Unknown units {UNITS}') #-- Coefficient for calculating Stokes coefficients for a disc load #-- From Jacob et al (2012), Farrell (1972) and Longman (1962) diff --git a/gravity_toolkit/gen_point_load.py b/gravity_toolkit/gen_point_load.py index fb2bda67..de6c5f26 100644 --- a/gravity_toolkit/gen_point_load.py +++ b/gravity_toolkit/gen_point_load.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" gen_point_load.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Calculates gravitational spherical harmonic coefficients for point masses CALLING SEQUENCE: @@ -47,6 +47,7 @@ https://doi.org/10.1029/JB078i011p01760 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format Updated 11/2021: added UNITS list option for converting from custom units Updated 01/2021: use harmonics class for spherical harmonic operations @@ -137,7 +138,7 @@ def gen_point_load(data, lon, lat, LMAX=60, MMAX=None, UNITS=1, LOVE=None): dfactor = np.copy(UNITS) int_fact[:] = 1.0 else: - raise ValueError('Unknown units {0}'.format(UNITS)) + raise ValueError(f'Unknown units {UNITS}') #-- flattened form of data converted to units D = int_fact*data.flatten() diff --git a/gravity_toolkit/gen_spherical_cap.py b/gravity_toolkit/gen_spherical_cap.py index 458f04ce..b597e8d4 100755 --- a/gravity_toolkit/gen_spherical_cap.py +++ b/gravity_toolkit/gen_spherical_cap.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" gen_spherical_cap.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Calculates gravitational spherical harmonic coefficients for a spherical cap Spherical cap derivation from Longman (1962), Farrell (1972), Pollack (1973) @@ -63,6 +63,7 @@ https://doi.org/10.1007/s00190-011-0522-7 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format Updated 11/2021: added UNITS list option for converting from custom units Updated 07/2020: added function docstrings @@ -215,7 +216,7 @@ def gen_spherical_cap(data, lon, lat, LMAX=60, MMAX=None, #-- custom units unit_conv = np.copy(UNITS) else: - raise ValueError('Unknown units {0}'.format(UNITS)) + raise ValueError(f'Unknown units {UNITS}') #-- Coefficient for calculating Stokes coefficients for a spherical cap #-- From Jacob et al (2012), Farrell (1972) and Longman (1962) diff --git a/gravity_toolkit/gen_stokes.py b/gravity_toolkit/gen_stokes.py index cc3257f1..afc71824 100755 --- a/gravity_toolkit/gen_stokes.py +++ b/gravity_toolkit/gen_stokes.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" gen_stokes.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Converts data from the spatial domain to spherical harmonic coefficients @@ -43,6 +43,7 @@ and filters the GRACE/GRACE-FO coefficients for striping errors UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format Updated 11/2021: added UNITS list option for converting from custom units Updated 05/2021: define int/float precision to prevent deprecation warning @@ -179,7 +180,7 @@ def gen_stokes(data, lon, lat, LMIN=0, LMAX=60, MMAX=None, UNITS=1, dfactor = np.copy(UNITS) int_fact[:] = np.sin(th)*dphi*dth else: - raise ValueError('Unknown units {0}'.format(UNITS)) + raise ValueError(f'Unknown units {UNITS}') #-- Calculating cos/sin of phi arrays #-- output [m,phi] diff --git a/gravity_toolkit/geocenter.py b/gravity_toolkit/geocenter.py index c3f858c3..aff5eeca 100644 --- a/gravity_toolkit/geocenter.py +++ b/gravity_toolkit/geocenter.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" geocenter.py -Written by Tyler Sutterley (06/2022) +Written by Tyler Sutterley (11/2022) Data class for reading and processing geocenter data PYTHON DEPENDENCIES: @@ -15,6 +15,7 @@ https://github.com/yaml/pyyaml UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 06/2022: drop external reader dependency for UCI format Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant @@ -115,7 +116,7 @@ def case_insensitive_filename(self,filename): f = [f for f in os.listdir(directory) if re.match(basename,f,re.I)] #-- check that geocenter file exists if not f: - errmsg = '{0} not found in file system'.format(filename) + errmsg = f'{filename} not found in file system' raise FileNotFoundError(errmsg) self.filename = os.path.join(directory,f.pop()) #-- print filename @@ -143,7 +144,7 @@ def from_AOD1B(self, release, calendar_year, calendar_month): AOD1B_file = 'AOD1B_{0}_{1}_{2:4.0f}_{3:02.0f}.txt'.format(*args) #-- check that file exists if not os.access(os.path.join(self.directory,AOD1B_file), os.F_OK): - errmsg = 'AOD1B File {0} not in File System'.format(AOD1B_file) + errmsg = f'AOD1B File {AOD1B_file} not in File System' raise FileNotFoundError(errmsg) #-- read AOD1b geocenter skipping over commented header text with open(os.path.join(self.directory,AOD1B_file), mode='r', encoding='utf8') as f: @@ -341,7 +342,7 @@ def from_SLR(self, geocenter_file, **kwargs): kwargs['release'],'geocenter')) #-- check that AOD1B directory exists if not os.access(self.directory, os.F_OK): - errmsg = '{0} not found in file system'.format(self.directory) + errmsg = f'{self.directory} not found in file system' raise FileNotFoundError(errmsg) #-- Input geocenter file and split lines @@ -470,7 +471,7 @@ def from_UCI(self, geocenter_file, **kwargs): #-- verify HEADER flag was set if not HEADER: - raise IOError('Data not found in file:\n\t{0}'.format(geocenter_file)) + raise IOError(f'Data not found in file:\n\t{geocenter_file}') #-- number of months within the file n_mon = np.int64(file_lines - count) @@ -570,7 +571,7 @@ def from_swenson(self, geocenter_file, **kwargs): #-- catch to see if HEADER flag was not set to false if kwargs['header']: - raise IOError('Data lines not found in file {0}'.format(geocenter_file)) + raise IOError(f'Data lines not found in file {geocenter_file}') #-- number of months within the file n_mon = np.int64(file_lines - count) @@ -730,7 +731,7 @@ def from_tellus(self, geocenter_file, **kwargs): self.eC11[t] = np.float64(line_contents[5]) self.eS11[t] = np.float64(line_contents[6]) else: - raise Exception('Unknown harmonic order {0:d}'.format(m)) + raise ValueError(f'Unknown harmonic order {m:d}') #-- calendar year and month if kwargs['JPL']: diff --git a/gravity_toolkit/grace_date.py b/gravity_toolkit/grace_date.py index 6b5d9f38..b5ff41b6 100644 --- a/gravity_toolkit/grace_date.py +++ b/gravity_toolkit/grace_date.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" grace_date.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Contributions by Hugo Lecomte and Yara Mohajerani Reads index file from podaac_grace_sync.py or gfz_isdc_grace_ftp.py @@ -46,6 +46,7 @@ time.py: utilities for calculating time operations UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: raise exception if index file cannot be found use logging for debugging level verbose output Updated 08/2022: moved file parsing functions to time module @@ -156,9 +157,9 @@ def grace_date(base_dir, PROC='', DREL='', DSET='', OUTPUT=True, MODE=0o775): index_file = os.path.join(grace_dir, 'index.txt') #-- check that index file exists if not os.access(index_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(index_file)) + raise FileNotFoundError(f'{index_file} not found') #-- log index file if debugging - logging.debug('Reading index file: {0}'.format(index_file)) + logging.debug(f'Reading index file: {index_file}') #-- read index file for GRACE/GRACE-FO filenames with open(index_file, mode='r', encoding='utf8') as f: input_files = f.read().splitlines() @@ -252,7 +253,7 @@ def grace_date(base_dir, PROC='', DREL='', DSET='', OUTPUT=True, MODE=0o775): #-- Output GRACE/GRACE-FO date ascii file if OUTPUT: - date_file = '{0}_{1}_DATES.txt'.format(PROC, DREL) + date_file = f'{PROC}_{DREL}_DATES.txt' fid = open(os.path.join(grace_dir,date_file), 'w') #-- date file header information args = ('Mid-date','Month','Start_Day','End_Day','Total_Days') diff --git a/gravity_toolkit/grace_find_months.py b/gravity_toolkit/grace_find_months.py index 6927c811..518ac85f 100644 --- a/gravity_toolkit/grace_find_months.py +++ b/gravity_toolkit/grace_find_months.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" grace_find_months.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Parses date index file from grace_date program Finds the months available for a GRACE/GRACE-FO/Swarm product @@ -36,6 +36,7 @@ grace_date.py: reads GRACE index file and calculates dates for each month UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format Updated 05/2021: define int/float precision to prevent deprecation warning Updated 07/2020: added function docstrings @@ -106,9 +107,9 @@ def grace_find_months(base_dir, PROC, DREL, DSET='GSM'): grace_dir = os.path.join(base_dir, PROC, DREL, DSET) #-- check that GRACE/GRACE-FO date file exists - date_file = os.path.join(grace_dir,'{0}_{1}_DATES.txt'.format(PROC, DREL)) + date_file = os.path.join(grace_dir, f'{PROC}_{DREL}_DATES.txt') if not os.access(date_file, os.F_OK): - grace_date(base_dir,PROC=PROC,DREL=DREL,DSET=DSET,OUTPUT=True) + grace_date(base_dir, PROC=PROC, DREL=DREL, DSET=DSET, OUTPUT=True) #-- read GRACE/GRACE-FO date ascii file from grace_date.py #-- skip the header row and extract dates (decimal format) and months diff --git a/gravity_toolkit/grace_input_months.py b/gravity_toolkit/grace_input_months.py index fafa99f0..1d61b5cc 100644 --- a/gravity_toolkit/grace_input_months.py +++ b/gravity_toolkit/grace_input_months.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" grace_input_months.py -Written by Tyler Sutterley (10/2022) +Written by Tyler Sutterley (11/2022) Contributions by Hugo Lecomte and Yara Mohajerani Reads GRACE/GRACE-FO files for a specified spherical harmonic degree and order @@ -105,6 +105,7 @@ read_gfc_harmonics.py: reads spherical harmonic data from gfc files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 10/2022: tilde-expansion of input working data directory Updated 09/2022: use logging for debugging level verbose output add option to replace degree 4 zonal harmonics with SLR @@ -389,7 +390,7 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- read spherical harmonic data products infile = grace_files[grace_month] #-- log input file if debugging - logging.debug('Reading file {0:d}: {1}'.format(i, infile)) + logging.debug(f'Reading file {i:d}: {infile}') #-- read GRACE/GRACE-FO/Swarm file if PROC in ('GRAZ','Swarm'): #-- Degree 2 zonals will be converted to a tide free state @@ -425,21 +426,21 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, # SLR_file = os.path.join(base_dir,'TN-11_C20_SLR.txt') SLR_file = os.path.join(base_dir,'C20_RL06.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C20 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C20 file: {SLR_file}') #-- read SLR file C20_input = read_SLR_C20(SLR_file) FLAGS.append('_wCSR_C20') elif (SLR_C20 == 'GFZ'): - SLR_file = os.path.join(base_dir,'GFZ_{0}_C20_SLR.dat'.format(DREL)) + SLR_file = os.path.join(base_dir,f'GFZ_{DREL}_C20_SLR.dat') #-- log SLR file if debugging - logging.debug('Reading SLR C20 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C20 file: {SLR_file}') #-- read SLR file C20_input = read_SLR_C20(SLR_file) FLAGS.append('_wGFZ_C20') elif (SLR_C20 == 'GSFC'): SLR_file = os.path.join(base_dir,'TN-14_C30_C20_GSFC_SLR.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C20 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C20 file: {SLR_file}') #-- read SLR file C20_input = read_SLR_C20(SLR_file) FLAGS.append('_wGSFC_C20') @@ -447,9 +448,9 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- Replacing C2,1/S2,1 with SLR #-- Running function read_SLR_CS2.py if (kwargs['SLR_21'] == 'CSR'): - SLR_file = os.path.join(base_dir,'C21_S21_{0}.txt'.format(DREL)) + SLR_file = os.path.join(base_dir,f'C21_S21_{DREL}.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C21/S21 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C21/S21 file: {SLR_file}') #-- read SLR file C21_input = read_SLR_CS2(SLR_file) FLAGS.append('_wCSR_21') @@ -457,7 +458,7 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, GravIS_file = 'GRAVIS-2B_GFZOP_GRACE+SLR_LOW_DEGREES_0002.dat' SLR_file = os.path.join(base_dir,GravIS_file) #-- log SLR file if debugging - logging.debug('Reading SLR C21/S21 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C21/S21 file: {SLR_file}') #-- read SLR file C21_input = read_SLR_CS2(SLR_file) FLAGS.append('_wGFZ_21') @@ -466,7 +467,7 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, # SLR_file = os.path.join(base_dir,'GSFC_C21_S21.txt') SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C21/S21 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C21/S21 file: {SLR_file}') #-- read SLR file C21_input = read_SLR_CS2(SLR_file, DATE=grace_Ylms['time'], ORDER=1) FLAGS.append('_wGSFC_21') @@ -474,16 +475,16 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- Replacing C2,2/S2,2 with SLR #-- Running function read_SLR_CS2.py if (kwargs['SLR_22'] == 'CSR'): - SLR_file = os.path.join(base_dir,'C22_S22_{0}.txt'.format(DREL)) + SLR_file = os.path.join(base_dir,f'C22_S22_{DREL}.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C22/S22 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C22/S22 file: {SLR_file}') #-- read SLR file C22_input = read_SLR_CS2(SLR_file) FLAGS.append('_wCSR_22') elif (kwargs['SLR_22'] == 'GSFC'): SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C22/S22 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C22/S22 file: {SLR_file}') #-- read SLR file C22_input = read_SLR_CS2(SLR_file, DATE=grace_Ylms['time'], ORDER=2) FLAGS.append('_wGSFC_22') @@ -493,21 +494,21 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, if (kwargs['SLR_C30'] == 'CSR'): SLR_file = os.path.join(base_dir,'CSR_Monthly_5x5_Gravity_Harmonics.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C30 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C30 file: {SLR_file}') #-- read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wCSR_C30') elif (kwargs['SLR_C30'] == 'LARES'): SLR_file = os.path.join(base_dir,'C30_LARES_filtered.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C30 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C30 file: {SLR_file}') #-- read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wLARES_C30') elif (kwargs['SLR_C30'] == 'GSFC'): SLR_file = os.path.join(base_dir,'TN-14_C30_C20_GSFC_SLR.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C30 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C30 file: {SLR_file}') #-- read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wGSFC_C30') @@ -515,7 +516,7 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, GravIS_file = 'GRAVIS-2B_GFZOP_GRACE+SLR_LOW_DEGREES_0002.dat' SLR_file = os.path.join(base_dir,GravIS_file) #-- log SLR file if debugging - logging.debug('Reading SLR C30 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C30 file: {SLR_file}') #-- read SLR file C30_input = read_SLR_C30(SLR_file) FLAGS.append('_wGFZ_C30') @@ -525,21 +526,21 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, if (kwargs['SLR_C40'] == 'CSR'): SLR_file = os.path.join(base_dir,'CSR_Monthly_5x5_Gravity_Harmonics.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C40 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C40 file: {SLR_file}') #-- read SLR file C40_input = read_SLR_C40(SLR_file) FLAGS.append('_wCSR_C40') elif (kwargs['SLR_C40'] == 'LARES'): SLR_file = os.path.join(base_dir,'C40_LARES_filtered.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C40 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C40 file: {SLR_file}') #-- read SLR file C40_input = read_SLR_C40(SLR_file) FLAGS.append('_wLARES_C40') elif (kwargs['SLR_C40'] == 'GSFC'): SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C40 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C40 file: {SLR_file}') #-- read SLR file C40_input = read_SLR_C40(SLR_file, DATE=grace_Ylms['time']) FLAGS.append('_wGSFC_C40') @@ -549,14 +550,14 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, if (kwargs['SLR_C50'] == 'CSR'): SLR_file = os.path.join(base_dir,'CSR_Monthly_5x5_Gravity_Harmonics.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C50 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C50 file: {SLR_file}') #-- read SLR file C50_input = read_SLR_C50(SLR_file) FLAGS.append('_wCSR_C50') elif (kwargs['SLR_C50'] == 'LARES'): SLR_file = os.path.join(base_dir,'C50_LARES_filtered.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C50 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C50 file: {SLR_file}') #-- read SLR file C50_input = read_SLR_C50(SLR_file) FLAGS.append('_wLARES_C50') @@ -564,7 +565,7 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, # SLR_file = os.path.join(base_dir,'GSFC_SLR_C20_C30_C50_GSM_replacement.txt') SLR_file = os.path.join(base_dir,'gsfc_slr_5x5c61s61.txt') #-- log SLR file if debugging - logging.debug('Reading SLR C50 file: {0}'.format(SLR_file)) + logging.debug(f'Reading SLR C50 file: {SLR_file}') #-- read SLR file C50_input = read_SLR_C50(SLR_file, DATE=grace_Ylms['time']) FLAGS.append('_wGSFC_C50') @@ -576,24 +577,24 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, if DREL in ('RL04','RL05'): #-- old degree one files default_geocenter = os.path.join(base_dir,'geocenter', - 'deg1_coef_{0}.txt'.format(DREL)) + f'deg1_coef_{DREL}.txt') JPL = False else: #-- new TN-13 degree one files default_geocenter = os.path.join(base_dir,'geocenter', - 'TN-13_GEOC_{0}_{1}.txt'.format(PROC,DREL)) + f'TN-13_GEOC_{PROC}_{DREL}.txt') JPL = True #-- read degree one files from JPL GRACE Tellus DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter #-- log geocenter file if debugging - logging.debug('Reading Geocenter file: {0}'.format(DEG1_file)) + logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_tellus(DEG1_file,JPL=JPL) - FLAGS.append('_w{0}_DEG1'.format(DEG1)) + FLAGS.append(f'_w{DEG1}_DEG1') elif (DEG1 == 'SLR'): #-- CSR Satellite Laser Ranging (SLR) degree 1 # #-- SLR-derived degree-1 mass variations # #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ - # DEG1_file=os.path.join(base_dir,'geocenter','GCN_{0}.txt'.format(DREL)) + # DEG1_file = os.path.join(base_dir,'geocenter',f'GCN_{DREL}.txt') # COLUMNS = ['time','X','Y','Z','X_sigma','Y_sigma','Z_sigma'] # DEG1_input = gravity_toolkit.geocenter().from_SLR(DEG1_file, # AOD=True, release=DREL, header=16, COLUMNS=COLUMNS) @@ -612,11 +613,11 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, COLUMNS = ['MJD','time','X','Y','Z','XM','YM','ZM', 'X_sigma','Y_sigma','Z_sigma','XM_sigma','YM_sigma','ZM_sigma'] #-- log geocenter file if debugging - logging.debug('Reading Geocenter file: {0}'.format(DEG1_file)) + logging.debug(f'Reading Geocenter file: {DEG1_file}') #-- read degree one files from CSR satellite laser ranging - DEG1_input = gravity_toolkit.geocenter(radius=6.378136e9).from_SLR(DEG1_file, - AOD=True,release=DREL,header=15,columns=COLUMNS) - FLAGS.append('_w{0}_DEG1'.format(DEG1)) + DEG1_input = gravity_toolkit.geocenter(radius=6.378136e9).from_SLR( + DEG1_file, AOD=True, release=DREL, header=15, columns=COLUMNS) + FLAGS.append(f'_w{DEG1}_DEG1') elif DEG1 in ('SLF','UCI'): #-- degree one files from Sutterley and Velicogna (2019) #-- default: iterated and with self-attraction and loading effects @@ -627,19 +628,19 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- read degree one files from Sutterley and Velicogna (2019) DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter #-- log geocenter file if debugging - logging.debug('Reading Geocenter file: {0}'.format(DEG1_file)) + logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_UCI(DEG1_file) - FLAGS.append('_w{0}_DEG1'.format(DEG1)) + FLAGS.append(f'_w{DEG1}_DEG1') elif (DEG1 == 'Swenson'): #-- degree 1 coefficients provided by Sean Swenson in mm w.e. default_geocenter = os.path.join(base_dir,'geocenter', - 'gad_gsm.{0}.txt'.format(DREL)) + f'gad_gsm.{DREL}.txt') #-- read degree one files from Swenson et al. (2008) DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter #-- log geocenter file if debugging - logging.debug('Reading Geocenter file: {0}'.format(DEG1_file)) + logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_swenson(DEG1_file) - FLAGS.append('_w{0}_DEG1'.format(DEG1)) + FLAGS.append(f'_w{DEG1}_DEG1') elif (DEG1 == 'GFZ'): #-- degree 1 coefficients provided by GFZ GravIS #-- http://gravis.gfz-potsdam.de/corrections @@ -648,9 +649,9 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- read degree one files from GFZ GravIS DEG1_file = kwargs.get('DEG1_FILE') or default_geocenter #-- log geocenter file if debugging - logging.debug('Reading Geocenter file: {0}'.format(DEG1_file)) + logging.debug(f'Reading Geocenter file: {DEG1_file}') DEG1_input = gravity_toolkit.geocenter().from_gravis(DEG1_file) - FLAGS.append('_w{0}_DEG1'.format(DEG1)) + FLAGS.append(f'_w{DEG1}_DEG1') #-- atmospheric flag if correcting ECMWF "jumps" (using GAE/GAF/GAG files) if kwargs['ATM']: @@ -666,8 +667,8 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- verify that there are replacement C20 months for specified range months_test = sorted(set(months) - set(C20_input['month'])) if months_test: - gm = ','.join('{0:03d}'.format(gm) for gm in months_test) - raise IOError('No Matching C20 Months ({0})'.format(gm)) + gm = ','.join(f'{gm:03d}' for gm in months_test) + raise IOError(f'No Matching C20 Months ({gm})') #-- replace C20 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C20_input['month'] == grace_month) @@ -681,8 +682,8 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- verify that there are replacement C21/S21 months for specified range months_test = sorted(set(single_acc_months) - set(C21_input['month'])) if months_test: - gm = ','.join('{0:03d}'.format(gm) for gm in months_test) - raise IOError('No Matching C21/S21 Months ({0})'.format(gm)) + gm = ','.join(f'{gm:03d}' for gm in months_test) + raise IOError(f'No Matching C21/S21 Months ({gm})') #-- replace C21/S21 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C21_input['month'] == grace_month) @@ -698,8 +699,8 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- verify that there are replacement C22/S22 months for specified range months_test = sorted(set(single_acc_months) - set(C22_input['month'])) if months_test: - gm = ','.join('{0:03d}'.format(gm) for gm in months_test) - raise IOError('No Matching C22/S22 Months ({0})'.format(gm)) + gm = ','.join(f'{gm:03d}' for gm in months_test) + raise IOError(f'No Matching C22/S22 Months ({gm})') #-- replace C22/S22 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C22_input['month'] == grace_month) @@ -715,8 +716,8 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- verify that there are replacement C30 months for specified range months_test = sorted(set(single_acc_months) - set(C30_input['month'])) if months_test: - gm = ','.join('{0:03d}'.format(gm) for gm in months_test) - raise IOError('No Matching C30 Months ({0})'.format(gm)) + gm = ','.join(f'{gm:03d}' for gm in months_test) + raise IOError(f'No Matching C30 Months ({gm})') #-- replace C30 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C30_input['month'] == grace_month) @@ -730,8 +731,8 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- verify that there are replacement C40 months for specified range months_test = sorted(set(single_acc_months) - set(C40_input['month'])) if months_test: - gm = ','.join('{0:03d}'.format(gm) for gm in months_test) - raise IOError('No Matching C40 Months ({0})'.format(gm)) + gm = ','.join(f'{gm:03d}' for gm in months_test) + raise IOError(f'No Matching C40 Months ({gm})') #-- replace C40 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C40_input['month'] == grace_month) @@ -745,8 +746,8 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- verify that there are replacement C50 months for specified range months_test = sorted(set(single_acc_months) - set(C50_input['month'])) if months_test: - gm = ','.join('{0:03d}'.format(gm) for gm in months_test) - raise IOError('No Matching C50 Months ({0})'.format(gm)) + gm = ','.join(f'{gm:03d}' for gm in months_test) + raise IOError(f'No Matching C50 Months ({gm})') #-- replace C50 with SLR coefficients for i,grace_month in enumerate(months): count = np.count_nonzero(C50_input['month'] == grace_month) @@ -776,8 +777,8 @@ def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, start_mon, end_mon, #-- check that all months are available for a given geocenter months_test = sorted(set(months) - set(DEG1_input.month)) if months_test: - gm = ','.join('{0:03d}'.format(gm) for gm in months_test) - raise IOError('No Matching Geocenter Months ({0})'.format(gm)) + gm = ','.join(f'{gm:03d}' for gm in months_test) + raise IOError(f'No Matching Geocenter Months ({gm})') #-- for each considered date for i,grace_month in enumerate(months): k, = np.nonzero(DEG1_input.month == grace_month) @@ -864,7 +865,7 @@ def read_ecmwf_corrections(base_dir, LMAX, months, MMAX=None): for key, val in corr_file.items(): #-- log ECMWF correction file if debugging infile = os.path.join(base_dir, val) - logging.debug('Reading ECMWF file: {0}'.format(infile)) + logging.debug(f'Reading ECMWF file: {infile}') #-- allocate for clm and slm of atmospheric corrections atm_corr_clm[key] = np.zeros((LMAX+1,MMAX+1)) atm_corr_slm[key] = np.zeros((LMAX+1,MMAX+1)) diff --git a/gravity_toolkit/grace_months_index.py b/gravity_toolkit/grace_months_index.py index 05a0dd4e..b9946ad2 100644 --- a/gravity_toolkit/grace_months_index.py +++ b/gravity_toolkit/grace_months_index.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" grace_months_index.py -Written by Tyler Sutterley (05/2022) +Written by Tyler Sutterley (11/2022) Creates a file with the start and end days for each dataset Shows the range of each month for (CSR/GFZ/JPL) (RL04/RL05/RL06) @@ -40,6 +40,7 @@ time.py: utilities for calculating time operations UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 05/2022: use argparse descriptions within documentation use new GSFC release 6 version 2 mascons as the default Updated 04/2022: updated docstrings to numpy documentation format @@ -109,7 +110,7 @@ def grace_months_index(base_dir, DREL=['RL06','rl06v2.0'], MODE=None): grace_dir = os.path.join(base_dir, pr, rl, DSET) #-- read GRACE date ascii file #-- file created in read_grace.py or grace_dates.py - grace_date_file = '{0}_{1}_DATES.txt'.format(pr,rl) + grace_date_file = f'{pr}_{rl}_DATES.txt' if os.access(os.path.join(grace_dir,grace_date_file), os.F_OK): #-- skip the header line date_input = np.loadtxt(os.path.join(grace_dir,grace_date_file), @@ -118,7 +119,7 @@ def grace_months_index(base_dir, DREL=['RL06','rl06v2.0'], MODE=None): nmon = np.shape(date_input)[0] #-- Setting the dictionary key e.g. 'CSR_RL04' - var_name = '{0}_{1}'.format(pr,rl) + var_name = f'{pr}_{rl}' #-- Creating a python dictionary for each dataset with parameters: #-- month #, start year, start day, end year, end day @@ -143,9 +144,9 @@ def grace_months_index(base_dir, DREL=['RL06','rl06v2.0'], MODE=None): #-- sort datasets alphanumerically var_name = sorted(var_info.keys()) - txt = ''.join(['{0:^21}'.format(d) for d in var_name]) + txt = ''.join([f'{d:^21}' for d in var_name]) #-- printing header to file - print('{0:^11} {1}'.format('MONTH',txt),file=fid) + print(f'{"MONTH":^11} {txt}', file=fid) #-- for each possible month #-- GRACE starts at month 004 (April 2002) @@ -173,15 +174,15 @@ def grace_months_index(base_dir, DREL=['RL06','rl06v2.0'], MODE=None): end_day, = var_info[var]['endday'][ind] #-- output string is the date range #-- string format: 2002_102--2002_120 - output_string.append('{0:4d}_{1:03d}--{2:4d}_{3:03d}'.format( - st_yr, st_day, end_yr, end_day)) + output_string.append(f'{st_yr:4d}_{st_day:03d}--' + f'{end_yr:4d}_{end_day:03d}') else: #-- if there is no matching month = missing output_string.append(' ** missing ** ') #-- create single string with output string components #-- formatting the strings to be 20 characters in length - data_string = ' '.join(['{0:>20}'.format(s) for s in output_string]) + data_string = ' '.join([f'{s:>20}' for s in output_string]) #-- printing data line to file args = (m, month_string, calendar_year, data_string) print('{0:03d} {1:>3}{2:4d} {3}'.format(*args), file=fid) diff --git a/gravity_toolkit/harmonics.py b/gravity_toolkit/harmonics.py index 2d9c8663..958d6648 100644 --- a/gravity_toolkit/harmonics.py +++ b/gravity_toolkit/harmonics.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" harmonics.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Contributions by Hugo Lecomte Spherical harmonic data class for processing GRACE/GRACE-FO Level-2 data @@ -25,6 +25,7 @@ destripe_harmonics.py: filters spherical harmonics for correlated errors UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format using internal netCDF4 and HDF5 readers and writers added function for converting to a python dictionary @@ -153,7 +154,7 @@ def case_insensitive_filename(self,filename): directory = os.path.dirname(os.path.expanduser(filename)) f = [f for f in os.listdir(directory) if re.match(basename,f,re.I)] if not f: - errmsg = '{0} not found in file system'.format(filename) + errmsg = f'{filename} not found in file system' raise FileNotFoundError(errmsg) self.filename = os.path.join(directory,f.pop()) #-- print filename @@ -1449,8 +1450,8 @@ def subset(self, months): #-- check that all months are available months_check = list(set(months) - set(self.month)) if months_check: - m = ','.join(['{0:03d}'.format(m) for m in months_check]) - raise IOError('GRACE/GRACE-FO months {0} not Found'.format(m)) + m = ','.join([f'{m:03d}' for m in months_check]) + raise IOError(f'GRACE/GRACE-FO months {m} not Found') #-- indices to sort data objects months_list = [i for i,m in enumerate(self.month) if m in months] #-- output harmonics object diff --git a/gravity_toolkit/read_GIA_model.py b/gravity_toolkit/read_GIA_model.py index f258b5a7..da4831f6 100755 --- a/gravity_toolkit/read_GIA_model.py +++ b/gravity_toolkit/read_GIA_model.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" read_GIA_model.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Reads GIA data files that can come in various formats depending on the group Outputs spherical harmonics for the GIA rates and the GIA model parameters @@ -96,6 +96,7 @@ https://doi.org/10.1002/2016JB013844 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: use logging for debugging level verbose output Updated 05/2022: output full citation for each GIA model group Updated 04/2022: updated docstrings to numpy documentation format @@ -414,9 +415,9 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(input_file)) + raise FileNotFoundError(f'{input_file} not found') #-- log GIA file if debugging - logging.debug('Reading GIA file: {0}'.format(input_file)) + logging.debug(f'Reading GIA file: {input_file}') #-- opening gia data file and read contents with open(input_file, mode='r', encoding='utf8') as f: gia_data = f.read().splitlines() @@ -449,9 +450,9 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(input_file)) + raise FileNotFoundError(f'{input_file} not found') #-- log GIA file if debugging - logging.debug('Reading GIA file: {0}'.format(input_file)) + logging.debug(f'Reading GIA file: {input_file}') #-- opening gia data file and read contents with open(input_file, mode='r', encoding='utf8') as f: gia_data = f.read().splitlines() @@ -489,9 +490,9 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(input_file)) + raise FileNotFoundError(f'{input_file} not found') #-- log GIA file if debugging - logging.debug('Reading GIA file: {0}'.format(input_file)) + logging.debug(f'Reading GIA file: {input_file}') #-- The file starts with a header. #-- converting to numerical array (note 64 bit floating point) gia_data = np.loadtxt(input_file, skiprows=1, dtype='f8') @@ -524,9 +525,9 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(input_file)) + raise FileNotFoundError(f'{input_file} not found') #-- log GIA file if debugging - logging.debug('Reading GIA file: {0}'.format(input_file)) + logging.debug(f'Reading GIA file: {input_file}') #-- The file starts with a header. #-- converting to numerical array (note 64 bit floating point) dtype = {'names':('l','m','Ylms'),'formats':('i','i','f8')} @@ -554,9 +555,9 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- check that GIA data file is present in file system input_file = os.path.expanduser(input_file) if not os.access(input_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(input_file)) + raise FileNotFoundError(f'{input_file} not found') #-- log GIA file if debugging - logging.debug('Reading GIA file: {0}'.format(input_file)) + logging.debug(f'Reading GIA file: {input_file}') #-- opening gia data file and read contents with open(input_file, mode='r', encoding='utf8') as f: gia_data = f.read().splitlines() @@ -610,7 +611,7 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): # ascii: reformatted GIA in ascii format elif (GIA == 'ascii'): #-- log GIA file if debugging - logging.debug('Reading GIA file: {0}'.format(input_file)) + logging.debug(f'Reading GIA file: {input_file}') #-- reading GIA data from reformatted (simplified) ascii files Ylms = gravity_toolkit.harmonics().from_ascii(input_file, date=False) Ylms.truncate(LMAX) @@ -625,7 +626,7 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): # HDF5: reformatted GIA in HDF5 format elif GIA in ('netCDF4','HDF5'): #-- log GIA file if debugging - logging.debug('Reading GIA file: {0}'.format(input_file)) + logging.debug(f'Reading GIA file: {input_file}') #-- reading GIA data from reformatted netCDF4 and HDF5 files Ylms = gravity_toolkit.harmonics().from_file(input_file, format=GIA, date=False) @@ -644,17 +645,17 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- IJ05-R2: Ivins R2 GIA Models #-- adding file specific earth parameters parameters, = re.findall(file_pattern,os.path.basename(input_file)) - gia_Ylms['title'] = '{0}_{1}'.format(prefix,parameters) + gia_Ylms['title'] = f'{prefix}_{parameters}' elif (GIA == 'ICE6G'): #-- ICE6G: ICE-6G GIA Models #-- adding file specific earth parameters parameters, = re.findall(file_pattern,os.path.basename(input_file)) - gia_Ylms['title'] = '{0}_{1}'.format(prefix,parameters) + gia_Ylms['title'] = f'{prefix}_{parameters}' elif (GIA == 'W12a'): #-- W12a: Whitehouse GIA Models #-- for Whitehouse W12a (BEST, LOWER, UPPER): model = re.findall(file_pattern,os.path.basename(input_file)).pop() - gia_Ylms['title'] = '{0}_{1}'.format(prefix,parameters[model]) + gia_Ylms['title'] = f'{prefix}_{parameters[model]}' elif (GIA == 'SM09'): #-- SM09: Simpson/Milne GIA Models #-- making parameters in the file similar to IJ05 @@ -662,7 +663,7 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- upper mantle viscosity and lower mantle viscosity LTh,UMV,LMV=re.findall(file_pattern,os.path.basename(input_file)).pop() #-- formatting rheology parameters similar to IJ05 models - gia_Ylms['title'] = '{0}_{1}_.{2}_{3}'.format(prefix,LTh,UMV,LMV) + gia_Ylms['title'] = f'{prefix}_{LTh}_.{UMV}_{LMV}' elif (GIA == 'Wu10'): #-- Wu10: Wu (2010) GIA Correction gia_Ylms['title'] = 'Wu_2010' @@ -673,17 +674,17 @@ def read_GIA_model(input_file, GIA=None, MMAX=None, DATAFORM=None, **kwargs): #-- ICE6G-D: ICE-6G Version-D GIA Models #-- adding file specific earth parameters m1,p1,p2 = re.findall(file_pattern,os.path.basename(input_file)).pop() - gia_Ylms['title'] = '{0}_{1}{2}'.format(prefix,p1,p2) + gia_Ylms['title'] = f'{prefix}_{p1}{p2}' elif (GIA == 'AW13-ICE6G'): #-- AW13-ICE6G: Geruo A ICE-6G GIA Models #-- extract the ice history and case flags hist,case,sf=re.findall(file_pattern,os.path.basename(input_file)).pop() - gia_Ylms['title'] = '{0}_{1}_{2}'.format(prefix,hist,case) + gia_Ylms['title'] = f'{prefix}_{hist}_{case}' elif (GIA == 'AW13-IJ05'): #-- AW13-IJ05: Geruo A IJ05-R2 GIA Models #-- adding file specific earth parameters vrs,param,aux=re.findall(file_pattern,os.path.basename(input_file)).pop() - gia_Ylms['title'] = '{0}_{1}_{2}'.format(prefix,vrs,param) + gia_Ylms['title'] = f'{prefix}_{vrs}_{param}' #-- output harmonics to ascii, netCDF4 or HDF5 file if DATAFORM in ('ascii', 'netCDF4', 'HDF5'): diff --git a/gravity_toolkit/read_GRACE_harmonics.py b/gravity_toolkit/read_GRACE_harmonics.py index ae5f21d8..ceebb3ca 100644 --- a/gravity_toolkit/read_GRACE_harmonics.py +++ b/gravity_toolkit/read_GRACE_harmonics.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" read_GRACE_harmonics.py -Written by Tyler Sutterley (10/2022) +Written by Tyler Sutterley (11/2022) Contributions by Hugo Lecomte Reads GRACE files and extracts spherical harmonic data and drift rates (RL04) @@ -42,6 +42,7 @@ time.py: utilities for calculating time operations UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 10/2022: make keyword arguments part of kwargs dictionary Updated 05/2022: updated comments Updated 04/2022: updated docstrings to numpy documentation format @@ -195,7 +196,7 @@ def read_GRACE_harmonics(input_file, LMAX, **kwargs): #-- extract GRACE and GRACE-FO file headers #-- replace colons in header if within quotations head = [re.sub(r'\"(.*?)\:\s(.*?)\"',r'"\1, \2"',l) for l in file_contents - if not re.match(r'{0}|GRDOTA'.format(FLAG),l)] + if not re.match(rf'{FLAG}|GRDOTA',l)] if SFX in ('.gfc',): #-- extract parameters from header header_parameters = ['modelname','earth_gravity_constant','radius', @@ -331,7 +332,7 @@ def extract_file(input_file, compressed): input_file = os.path.expanduser(input_file) #-- check that data file is present in file system if not os.access(input_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(input_file)) + raise FileNotFoundError(f'{input_file} not found') #-- check if file is uncompressed byteIO object if isinstance(input_file, io.IOBase) and not compressed: #-- extract spherical harmonic coefficients diff --git a/gravity_toolkit/read_SLR_harmonics.py b/gravity_toolkit/read_SLR_harmonics.py index 328d6f31..9fcdcf05 100644 --- a/gravity_toolkit/read_SLR_harmonics.py +++ b/gravity_toolkit/read_SLR_harmonics.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" read_SLR_harmonics.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Reads in low-degree spherical harmonic coefficients calculated from Satellite Laser Ranging (SLR) measurements @@ -50,6 +50,7 @@ time.py: utilities for calculating time operations UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format include utf-8 encoding in reads to be windows compliant Updated 12/2021: added function for converting from 7-day arcs @@ -90,7 +91,7 @@ def read_SLR_harmonics(SLR_file, **kwargs): elif bool(re.search(r'CSR_Monthly_5x5_Gravity_Harmonics',SLR_file,re.I)): return read_CSR_monthly_6x1(SLR_file, **kwargs) else: - raise Exception('Unknown SLR file format {0}'.format(SLR_file)) + raise Exception(f'Unknown SLR file format {SLR_file}') #-- PURPOSE: read monthly degree harmonic data from Satellite Laser Ranging (SLR) def read_CSR_monthly_6x1(SLR_file, SCALE=1e-10, HEADER=True): diff --git a/gravity_toolkit/read_love_numbers.py b/gravity_toolkit/read_love_numbers.py index 5d4f2103..7371e96d 100755 --- a/gravity_toolkit/read_love_numbers.py +++ b/gravity_toolkit/read_love_numbers.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" read_love_numbers.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Reads sets of load Love numbers from PREM and applies isomorphic parameters Linearly interpolates load love numbers for missing degrees @@ -56,6 +56,7 @@ 103(B12), 30205-30229, (1998) UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: use logging for debugging level verbose output Updated 04/2022: updated docstrings to numpy documentation format added wrapper function for reading load Love numbers from file @@ -239,7 +240,7 @@ def read_love_numbers(love_numbers_file, LMAX=None, HEADER=2, #-- Center of Mass of Solid Earth alpha = 0.0 else: - raise Exception('Invalid Reference Frame {0}'.format(REFERENCE)) + raise Exception(f'Invalid Reference Frame {REFERENCE}') #-- apply isomorphic parameters for n in ('hl','kl','ll'): love[n][1] -= alpha @@ -268,7 +269,7 @@ def extract_love_numbers(love_numbers_file): love_numbers_file = os.path.expanduser(love_numbers_file) #-- check that load love number data file is present in file system if not os.access(love_numbers_file, os.F_OK): - raise FileNotFoundError('{0} not found'.format(love_numbers_file)) + raise FileNotFoundError(f'{love_numbers_file} not found') #-- Input load love number data file and read contents with open(love_numbers_file, mode='r', encoding='utf8') as f: return f.read().splitlines() @@ -358,7 +359,7 @@ def load_love_numbers(LMAX, LOVE_NUMBERS=0, REFERENCE='CF', FORMAT='tuple'): header = 1 columns = ['l','hl','ll','kl','nl','nk'] #-- log load love numbers file if debugging - logging.debug('Reading Love numbers file: {0}'.format(love_numbers_file)) + logging.debug(f'Reading Love numbers file: {love_numbers_file}') #-- LMAX of load love numbers from Han and Wahr (1995) is 696. #-- from Wahr (2007) linearly interpolating kl works #-- however, as we are linearly extrapolating out, do not make diff --git a/gravity_toolkit/sea_level_equation.py b/gravity_toolkit/sea_level_equation.py index ba9ca6b0..44c13006 100644 --- a/gravity_toolkit/sea_level_equation.py +++ b/gravity_toolkit/sea_level_equation.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -sea_level_equation.py (04/2022) +sea_level_equation.py (11/2022) Solves the sea level equation with the option of including polar motion feedback Uses a Clenshaw summation to calculate the spherical harmonic summation @@ -89,6 +89,7 @@ https://doi.org/10.1029/JB090iB11p09363 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: updated docstrings to numpy documentation format Updated 10/2021: using python logging for handling verbose output can set custom values for BODY_TIDE_LOVE and FLUID_LOVE @@ -344,8 +345,8 @@ def sea_level_equation(loadClm, loadSlm, glon, glat, land_function, LMAX=0, sea_height = -tmass/rho_water/rad_e**2/ocean_area #-- if verbose output: print ocean area and uniform sea level height - logging.info('Total Ocean Area: {0:0.10g}'.format(ocean_area)) - logging.info('Uniform Ocean Height: {0:0.10g}'.format(sea_height)) + logging.info(f'Total Ocean Area: {ocean_area:0.10g}') + logging.info(f'Uniform Ocean Height: {sea_height:0.10g}') #-- distribute sea height over ocean harmonics height_Ylms = ocean_Ylms.scale(sea_height) @@ -404,9 +405,9 @@ def sea_level_equation(loadClm, loadSlm, glon, glat, land_function, LMAX=0, sea_height = (-tmass/rho_water/rad_e**2 - rmass)/ocean_area #-- if verbose output: print iteration, mass and anomaly for convergence - logging.info('Iteration: {0:d}'.format(n_iter)) - logging.info('Integrated Ocean Height: {0:0.10g}'.format(rmass)) - logging.info('Difference from Initial Height: {0:0.10g}'.format(sea_height)) + logging.info(f'Iteration: {n_iter:d}') + logging.info(f'Integrated Ocean Height: {rmass:0.10g}') + logging.info(f'Difference from Initial Height: {sea_height:0.10g}') #-- geoid component is split into two parts (Kendall 2005) #-- this part is the spatially uniform shift in the geoid that is diff --git a/gravity_toolkit/spatial.py b/gravity_toolkit/spatial.py index 23b94693..544fc7b7 100644 --- a/gravity_toolkit/spatial.py +++ b/gravity_toolkit/spatial.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" spatial.py -Written by Tyler Sutterley (08/2022) +Written by Tyler Sutterley (11/2022) Data class for reading, writing and processing spatial data @@ -20,6 +20,7 @@ time.py: utilities for calculating time operations UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 08/2022: fix output latitude HDF5 and netCDF4 attributes place index filename within try/except statement Updated 04/2022: updated docstrings to numpy documentation format @@ -145,7 +146,7 @@ def case_insensitive_filename(self,filename): directory = os.path.dirname(os.path.expanduser(filename)) f = [f for f in os.listdir(directory) if re.match(basename,f,re.I)] if not f: - errmsg = '{0} not found in file system'.format(filename) + errmsg = f'{filename} not found in file system' raise FileNotFoundError(errmsg) self.filename = os.path.join(directory,f.pop()) #-- print filename @@ -1234,8 +1235,8 @@ def subset(self, months): #-- check that all months are available months_check = list(set(months) - set(self.month)) if months_check: - m = ','.join(['{0:03d}'.format(m) for m in months_check]) - raise IOError('GRACE/GRACE-FO months {0} not Found'.format(m)) + m = ','.join([f'{m:03d}' for m in months_check]) + raise IOError(f'GRACE/GRACE-FO months {m} not Found') #-- indices to sort data objects months_list = [i for i,m in enumerate(self.month) if m in months] #-- output spatial object diff --git a/gravity_toolkit/time.py b/gravity_toolkit/time.py index f817ba54..5516508a 100644 --- a/gravity_toolkit/time.py +++ b/gravity_toolkit/time.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" time.py -Written by Tyler Sutterley (10/2022) +Written by Tyler Sutterley (11/2022) Utilities for calculating time operations PYTHON DEPENDENCIES: @@ -11,6 +11,7 @@ https://dateutil.readthedocs.io/en/stable/ UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 10/2022: added more time parsing for longer periods Updated 08/2022: added file parsing functions from GRACE date utilities added function to dynamically select newest version of granules @@ -89,7 +90,7 @@ def parse_date_string(date_string): #-- split the date string into units and epoch units, epoch = split_date_string(date_string) if units not in _to_sec.keys(): - raise ValueError('Invalid units: {0}'.format(units)) + raise ValueError(f'Invalid units: {units}') #-- return the epoch (as list) and the time unit conversion factors return (datetime_to_list(epoch), _to_sec[units]) @@ -106,7 +107,7 @@ def split_date_string(date_string): try: units,_,epoch = date_string.split(None,2) except ValueError: - raise ValueError('Invalid format: {0}'.format(date_string)) + raise ValueError(f'Invalid format: {date_string}') else: return (units.lower(),dateutil.parser.parse(epoch)) @@ -707,8 +708,8 @@ def convert_julian(JD, **kwargs): deprecated_keywords = dict(ASTYPE='astype', FORMAT='format') for old,new in deprecated_keywords.items(): if old in kwargs.keys(): - warnings.warn("""Deprecated keyword argument {0}. - Changed to '{1}'""".format(old,new), + warnings.warn(f"""Deprecated keyword argument {old}. + Changed to '{new}'""", DeprecationWarning) #-- set renamed argument to not break workflows kwargs[new] = copy.copy(kwargs[old]) diff --git a/gravity_toolkit/tools.py b/gravity_toolkit/tools.py index 3a28c1c4..84d74b00 100644 --- a/gravity_toolkit/tools.py +++ b/gravity_toolkit/tools.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" tools.py -Written by Tyler Sutterley (06/2022) +Written by Tyler Sutterley (11/2022) Jupyter notebook, user interface and plotting tools PYTHON DEPENDENCIES: @@ -27,6 +27,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 06/2022: place ipython and tkinter imports within try/except Updated 05/2022: adjusted mask oceans function to be able to output mask Updated 04/2022: updated docstrings to numpy documentation format @@ -898,7 +899,7 @@ def levels(self): def label(self): """return tick labels for Matplotlib colorbars """ - return ['{0:0.0f}'.format(ct) for ct in self.levels] + return [f'{ct:0.0f}' for ct in self.levels] def from_cpt(filename, use_extremes=True, **kwargs): """ @@ -1072,7 +1073,7 @@ def custom_colormap(N, map_name, **kwargs): for i in range(N): color_map[i,:] = colorsys.hsv_to_rgb(h[i],s[i],v[i]) else: - raise ValueError('Incorrect color map specified ({0})'.format(map_name)) + raise ValueError(f'Incorrect color map specified ({map_name})') # output RGB lists containing normalized location and colors Xnorm = len(color_map) - 1.0 @@ -1224,7 +1225,7 @@ def mask_oceans(xin, yin, data=None, order=0, lakes=False, masked data grid """ # read in land/sea mask - lsmask = get_data_path(['data','landsea_{0}.nc'.format(resolution)]) + lsmask = get_data_path(['data',f'landsea_{resolution}.nc']) #-- Land-Sea Mask with Antarctica from Rignot (2017) and Greenland from GEUS #-- 0=Ocean, 1=Land, 2=Lake, 3=Small Island, 4=Ice Shelf #-- Open the land-sea NetCDF file for reading diff --git a/gravity_toolkit/utilities.py b/gravity_toolkit/utilities.py index 0d03d138..1c01e6c9 100644 --- a/gravity_toolkit/utilities.py +++ b/gravity_toolkit/utilities.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" utilities.py -Written by Tyler Sutterley (08/2022) +Written by Tyler Sutterley (11/2022) Download and management utilities for syncing time and auxiliary files PYTHON DEPENDENCIES: @@ -9,6 +9,7 @@ https://pypi.python.org/pypi/lxml UPDATE HISTORY: + Updated 11/2022: add CMR queries for collection metadata Updated 08/2022: add regular expression function for finding files Updated 07/2022: add s3 endpoints and buckets for Earthdata Cumulus Updated 05/2022: function for extracting bucket name from presigned url @@ -239,7 +240,7 @@ def copy(source, destination, move=False, **kwargs): source = os.path.abspath(os.path.expanduser(source)) destination = os.path.abspath(os.path.expanduser(destination)) #-- log source and destination - logging.info('{0} -->\n\t{1}'.format(source,destination)) + logging.info(f'{source} -->\n\t{destination}') shutil.copyfile(source, destination) shutil.copystat(source, destination) if move: @@ -268,7 +269,7 @@ def create_unique_file(filename): else: return os.fdopen(fd, 'w+') #-- new filename adds counter the between fileBasename and fileExtension - filename = '{0}_{1:d}{2}'.format(fileBasename, counter, fileExtension) + filename = f'{fileBasename}_{counter:d}{fileExtension}' counter += 1 #-- PURPOSE: check ftp connection @@ -333,8 +334,8 @@ def ftp_list(HOST, username=None, password=None, timeout=None, #-- try to connect to ftp host try: ftp = ftplib.FTP(HOST[0],timeout=timeout) - except (socket.gaierror,IOError): - raise RuntimeError('Unable to connect to {0}'.format(HOST[0])) + except (socket.gaierror,IOError) as e: + raise RuntimeError(f'Unable to connect to {HOST[0]}') else: ftp.login(username,password) #-- list remote path @@ -345,7 +346,7 @@ def ftp_list(HOST, username=None, password=None, timeout=None, for i,f in enumerate(output): try: #-- try sending modification time command - mdtm = ftp.sendcmd('MDTM {0}'.format(f)) + mdtm = ftp.sendcmd(f'MDTM {f}') except ftplib.error_perm: #-- directories will return with an error pass @@ -417,15 +418,15 @@ def from_ftp(HOST, username=None, password=None, timeout=None, try: #-- try to connect to ftp host ftp = ftplib.FTP(HOST[0],timeout=timeout) - except (socket.gaierror,IOError): - raise RuntimeError('Unable to connect to {0}'.format(HOST[0])) + except (socket.gaierror,IOError) as e: + raise RuntimeError(f'Unable to connect to {HOST[0]}') else: ftp.login(username,password) #-- remote path ftp_remote_path = posixpath.join(*HOST[1:]) #-- copy remote file contents to bytesIO object remote_buffer = io.BytesIO() - ftp.retrbinary('RETR {0}'.format(ftp_remote_path), + ftp.retrbinary(f'RETR {ftp_remote_path}', remote_buffer.write, blocksize=chunk) remote_buffer.seek(0) #-- save file basename with bytesIO object @@ -433,7 +434,7 @@ def from_ftp(HOST, username=None, password=None, timeout=None, #-- generate checksum hash for remote file remote_hash = hashlib.md5(remote_buffer.getvalue()).hexdigest() #-- get last modified date of remote file and convert into unix time - mdtm = ftp.sendcmd('MDTM {0}'.format(ftp_remote_path)) + mdtm = ftp.sendcmd(f'MDTM {ftp_remote_path}') remote_mtime = get_unix_time(mdtm[4:], format="%Y%m%d%H%M%S") #-- compare checksums if local and (hash != remote_hash): @@ -586,7 +587,7 @@ def from_http(HOST, timeout=None, context=ssl.SSLContext(), #-- Create and submit request. request = urllib2.Request(posixpath.join(*HOST)) response = urllib2.urlopen(request,timeout=timeout,context=context) - except (urllib2.HTTPError, urllib2.URLError): + except (urllib2.HTTPError, urllib2.URLError) as e: raise Exception('Download error from {0}'.format(posixpath.join(*HOST))) else: #-- copy remote file contents to bytesIO object @@ -668,9 +669,9 @@ def attempt_login(urs, context=ssl.SSLContext(), pass # if username or password are not available if not username: - username = builtins.input('Username for {0}: '.format(urs)) + username = builtins.input(f'Username for {urs}: ') if not password: - prompt = 'Password for {0}@{1}: '.format(username, urs) + prompt = f'Password for {username}@{urs}: ' password = getpass.getpass(prompt=prompt) # for each retry for retry in range(kwargs['retries']): @@ -691,7 +692,7 @@ def attempt_login(urs, context=ssl.SSLContext(), else: return opener # reattempt login - username = builtins.input('Username for {0}: '.format(urs)) + username = builtins.input(f'Username for {urs}: ') password = getpass.getpass(prompt=prompt) # reached end of available retries raise RuntimeError('End of Retries: Check NASA Earthdata credentials') @@ -752,7 +753,7 @@ def build_opener(username, password, context=ssl.SSLContext(), #-- Encode username/password for request authorization headers #-- add Authorization header to opener if authorization_header: - b64 = base64.b64encode('{0}:{1}'.format(username,password).encode()) + b64 = base64.b64encode(f'{username}:{password}'.encode()) opener.addheaders = [("Authorization","Basic {0}".format(b64.decode()))] #-- Now all calls to urllib2.urlopen use our opener. urllib2.install_opener(opener) @@ -778,7 +779,8 @@ def build_opener(username, password, context=ssl.SSLContext(), 'lpdaac': 'lp-prod-protected', 'nsidc': 'nsidc-cumulus-prod-protected', 'ornldaac': 'ornl-cumulus-prod-protected', - 'podaac': 'podaac-ops-cumulus-protected' + 'podaac': 'podaac-ops-cumulus-protected', + 'podaac-doc': 'podaac-ops-cumulus-docs' } #-- PURPOSE: get AWS s3 client for PO.DAAC Cumulus @@ -1210,7 +1212,7 @@ def cmr_filter_json(search_results, endpoint="data"): #-- check that there are urls for request if ('feed' not in search_results) or ('entry' not in search_results['feed']): return (granule_names,granule_urls) - # descriptor links for each endpoint + #-- descriptor links for each endpoint rel = {} rel['data'] = "http://esipfed.org/ns/fedsearch/1.1/data#" rel['s3'] = "http://esipfed.org/ns/fedsearch/1.1/s3#" @@ -1226,6 +1228,45 @@ def cmr_filter_json(search_results, endpoint="data"): #-- return the list of urls, granule ids and modified times return (granule_names,granule_urls,granule_mtimes) +#-- PURPOSE: filter the CMR json response for desired metadata files +def cmr_metadata_json(search_results, endpoint="data"): + """ + Filter the CMR json response for desired metadata files + + Parameters + ---------- + search_results: dict + json response from CMR query + endpoint: str, default 'data' + url endpoint type + + - ``'documentation'``: PO.DAAC documentation archive + - ``'data'``: PO.DAAC https archive + - ``'s3'``: PO.DAAC Cumulus AWS S3 bucket + + Returns + ------- + collection_urls: list + urls from collection of endpoint type + """ + #-- output list of collection urls + collection_urls = [] + #-- check that there are urls for request + if ('feed' not in search_results) or ('entry' not in search_results['feed']): + return collection_urls + #-- descriptor links for each endpoint + rel = {} + rel['documentation'] = "http://esipfed.org/ns/fedsearch/1.1/documentation#" + rel['data'] = "http://esipfed.org/ns/fedsearch/1.1/data#" + rel['s3'] = "http://esipfed.org/ns/fedsearch/1.1/s3#" + #-- iterate over references and get cmr location + for entry in search_results['feed']['entry']: + for link in entry['links']: + if (link['rel'] == rel[endpoint]): + collection_urls.append(link['href']) + #-- return the list of urls + return collection_urls + #-- PURPOSE: cmr queries for GRACE/GRACE-FO products def cmr(mission=None, center=None, release=None, level='L2', product=None, solution='BA01', version='0', start_date=None, end_date=None, @@ -1290,36 +1331,37 @@ def cmr(mission=None, center=None, release=None, level='L2', product=None, #-- create "opener" (OpenerDirector instance) opener = urllib2.build_opener(*handler) #-- build CMR query + cmr_query_type = 'granules' cmr_format = 'json' cmr_page_size = 2000 CMR_HOST = ['https://cmr.earthdata.nasa.gov','search', - 'granules.{0}'.format(cmr_format)] + f'{cmr_query_type}.{cmr_format}'] #-- build list of CMR query parameters CMR_KEYS = [] - CMR_KEYS.append('?provider={0}'.format(provider)) + CMR_KEYS.append(f'?provider={provider}') CMR_KEYS.append('&sort_key[]=start_date') CMR_KEYS.append('&sort_key[]=producer_granule_id') CMR_KEYS.append('&scroll=true') - CMR_KEYS.append('&page_size={0}'.format(cmr_page_size)) + CMR_KEYS.append(f'&page_size={cmr_page_size}') #-- dictionary of product shortnames short_names = cmr_product_shortname(mission, center, release, level=level, version=version) for short_name in short_names: - CMR_KEYS.append('&short_name={0}'.format(short_name)) + CMR_KEYS.append(f'&short_name={short_name}') #-- append keys for start and end time #-- verify that start and end times are in ISO format start_date = isoformat(start_date) if start_date else '' end_date = isoformat(end_date) if end_date else '' - CMR_KEYS.append('&temporal={0},{1}'.format(start_date, end_date)) + CMR_KEYS.append(f'&temporal={start_date},{end_date}') #-- append keys for querying specific products CMR_KEYS.append("&options[readable_granule_name][pattern]=true") CMR_KEYS.append("&options[spatial][or]=true") readable_granule = cmr_readable_granules(product, level=level, solution=solution, version=version) - CMR_KEYS.append("&readable_granule_name[]={0}".format(readable_granule)) + CMR_KEYS.append(f"&readable_granule_name[]={readable_granule}") #-- full CMR query url cmr_query_url = "".join([posixpath.join(*CMR_HOST),*CMR_KEYS]) - logging.info('CMR request={0}'.format(cmr_query_url)) + logging.info(f'CMR request={cmr_query_url}') #-- output list of granule names and urls granule_names = [] granule_urls = [] @@ -1346,6 +1388,92 @@ def cmr(mission=None, center=None, release=None, level='L2', product=None, #-- return the list of granule ids, urls and modification times return (granule_names, granule_urls, granule_mtimes) +#-- PURPOSE: cmr queries for GRACE/GRACE-FO auxiliary data and documentation +def cmr_metadata(mission=None, center=None, release=None, level='L2', + version='0', provider='POCLOUD', endpoint='data', pattern='', + verbose=False, fid=sys.stdout): + """ + Query the NASA Common Metadata Repository (CMR) for GRACE/GRACE-FO + auxiliary data and documentation + + Parameters + ---------- + mission: str or NoneType, default None + GRACE (``'grace'``) or GRACE Follow-On (``'grace-fo'``) + center: str or NoneType, default None + GRACE/GRACE-FO processing center + release: str or NoneType, default None + GRACE/GRACE-FO data release + level: str, default 'L2' + GRACE/GRACE-FO product level + version: str, default '0' + GRACE/GRACE-FO Level-2 data version + provider: str, default 'POCLOUD' + CMR data provider + + - ``'PODAAC'``: PO.DAAC Drive + - ``'POCLOUD'``: PO.DAAC Cumulus + endpoint: str, default 'data' + url endpoint type + + - ``'documentation'``: PO.DAAC documentation archive + - ``'data'``: PO.DAAC https archive + - ``'s3'``: PO.DAAC Cumulus AWS S3 bucket + pattern: str, default '' + regular expression pattern for reducing list + verbose: bool, default False + print CMR query information + fid: obj, default sys.stdout + open file object to print if verbose + + Returns + ------- + collection_urls: list + urls from collection of endpoint type + """ + #-- create logger + loglevel = logging.INFO if verbose else logging.CRITICAL + logging.basicConfig(stream=fid, level=loglevel) + #-- build urllib2 opener with SSL context + #-- https://docs.python.org/3/howto/urllib2.html#id5 + handler = [] + #-- Create cookie jar for storing cookies + cookie_jar = CookieJar() + handler.append(urllib2.HTTPCookieProcessor(cookie_jar)) + handler.append(urllib2.HTTPSHandler(context=ssl.SSLContext())) + #-- create "opener" (OpenerDirector instance) + opener = urllib2.build_opener(*handler) + #-- build CMR query + cmr_query_type = 'collections' + cmr_format = 'json' + CMR_HOST = ['https://cmr.earthdata.nasa.gov','search', + f'{cmr_query_type}.{cmr_format}'] + #-- build list of CMR query parameters + CMR_KEYS = [] + CMR_KEYS.append(f'?provider={provider}') + #-- dictionary of product shortnames + short_names = cmr_product_shortname(mission, center, release, + level=level, version=version) + for short_name in short_names: + CMR_KEYS.append(f'&short_name={short_name}') + #-- full CMR query url + cmr_query_url = "".join([posixpath.join(*CMR_HOST),*CMR_KEYS]) + logging.info(f'CMR request={cmr_query_url}') + #-- query CMR for collection metadata + req = urllib2.Request(cmr_query_url) + response = opener.open(req) + #-- read the CMR search as JSON + search_page = json.loads(response.read().decode('utf8')) + #-- filter the JSON response for desired endpoint links + collection_urls = cmr_metadata_json(search_page, endpoint=endpoint) + #-- reduce using regular expression pattern + if pattern: + i = [i for i,f in enumerate(collection_urls) if re.search(pattern,f)] + #-- reduce list of collection_urls + collection_urls = [collection_urls[indice] for indice in i] + #-- return the list of collection urls + return collection_urls + #-- PURPOSE: create and compile regular expression operator to find GRACE files def compile_regex_pattern(PROC, DREL, DSET, mission=None, solution=r'BA01', version=r'\d+'): @@ -1388,11 +1516,11 @@ def compile_regex_pattern(PROC, DREL, DSET, mission=None, """ #-- verify inputs if mission and mission not in ('GRAC','GRFO'): - raise ValueError('Unknown mission {0}'.format(mission)) + raise ValueError(f'Unknown mission {mission}') if PROC not in ('CNES','CSR','GFZ','JPL'): - raise ValueError('Unknown processing center {0}'.format(PROC)) + raise ValueError(f'Unknown processing center {PROC}') if DSET not in ('GAA','GAB','GAC','GAD','GSM'): - raise ValueError('Unknown Level-2 product {0}'.format(DSET)) + raise ValueError(f'Unknown Level-2 product {DSET}') if isinstance(version, int): version = str(version).zfill(2) #-- compile regular expression operator for inputs @@ -1563,7 +1691,7 @@ def to_figshare(files,username=None,password=None,directory=None, os.path.basename(local_file)) #-- open local file and send bytes with open(os.path.expanduser(local_file),'rb') as fp: - ftps.storbinary('STOR {0}'.format(ftp_remote_path), fp, + ftps.storbinary(f'STOR {ftp_remote_path}', fp, blocksize=chunk, callback=None, rest=None) #-- PURPOSE: download satellite laser ranging files from CSR @@ -1755,7 +1883,7 @@ def icgem_list(host='http://icgem.gfz-potsdam.de/tom_longtime',timeout=None, request = urllib2.Request(host) tree = lxml.etree.parse(urllib2.urlopen(request,timeout=timeout),parser) except: - raise Exception('List error from {0}'.format(host)) + raise Exception(f'List error from {host}') else: #-- read and parse request for files colfiles = tree.xpath('//td[@class="tom-cell-modelfile"]//a/@href') diff --git a/gravity_toolkit/version.py b/gravity_toolkit/version.py index 22c7525a..a85116a2 100644 --- a/gravity_toolkit/version.py +++ b/gravity_toolkit/version.py @@ -8,4 +8,4 @@ # get version version = get_distribution("gravity_toolkit").version # append "v" before the version -full_version = "v{0}".format(version) +full_version = f"v{version}" diff --git a/scripts/aod1b_geocenter.py b/scripts/aod1b_geocenter.py index d36f1b88..d91005bd 100644 --- a/scripts/aod1b_geocenter.py +++ b/scripts/aod1b_geocenter.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" aod1b_geocenter.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Contributions by Hugo Lecomte (03/2021) Reads GRACE/GRACE-FO level-1b dealiasing data files for a specific product @@ -34,6 +34,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 12/2021: can use variable loglevels for verbose output Updated 11/2021: use gravity_toolkit geocenter class for operations @@ -102,7 +103,7 @@ def aod1b_geocenter(base_dir, fx = re.compile(r'AOD1B_\d+-\d+-(\d+)_X_\d+.asc(.gz)?$', re.VERBOSE) #-- compile regular expressions operator for the clm/slm headers #-- for the specific AOD1b product - hx = re.compile(r'^DATA.*SET.*{0}'.format(DSET), re.VERBOSE) + hx = re.compile(rf'^DATA.*SET.*{DSET}', re.VERBOSE) #-- compile regular expression operator to find numerical instances #-- will extract the data from the file regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' @@ -131,10 +132,10 @@ def aod1b_geocenter(base_dir, #-- AOD1B data products product = {} - product['atm'] = 'Atmospheric loading from {0}'.format(ATMOSPHERE) - product['ocn'] = 'Oceanic loading from {0}'.format(OCEAN_MODEL) + product['atm'] = f'Atmospheric loading from {ATMOSPHERE}' + product['ocn'] = f'Oceanic loading from {OCEAN_MODEL}' product['glo'] = 'Global atmospheric and oceanic loading' - product['oba'] = 'Ocean bottom pressure from {0}'.format(OCEAN_MODEL) + product['oba'] = f'Ocean bottom pressure from {OCEAN_MODEL}' #-- AOD1B directory and output geocenter directory grace_dir = os.path.join(base_dir,'AOD1B',DREL) @@ -151,7 +152,7 @@ def aod1b_geocenter(base_dir, YY,MM,SFX = tx.findall(i).pop() YY,MM = np.array([YY,MM], dtype=np.int64) #-- output monthly geocenter file - FILE = 'AOD1B_{0}_{1}_{2:4d}_{3:02d}.txt'.format(DREL,DSET,YY,MM) + FILE = f'AOD1B_{DREL}_{DSET}_{YY:4d}_{MM:02d}.txt' #-- if output file exists: check if input tar file is newer TEST = False OVERWRITE = ' (clobber)' diff --git a/scripts/aod1b_oblateness.py b/scripts/aod1b_oblateness.py index 2de05eef..701d0918 100644 --- a/scripts/aod1b_oblateness.py +++ b/scripts/aod1b_oblateness.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" aod1b_oblateness.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Contributions by Hugo Lecomte (03/2021) Reads GRACE/GRACE-FO level-1b dealiasing data files for a specific product @@ -36,6 +36,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 12/2021: can use variable loglevels for verbose output Updated 10/2021: using python logging for handling verbose output @@ -102,7 +103,7 @@ def aod1b_oblateness(base_dir, fx = re.compile(r'AOD1B_\d+-\d+-(\d+)_X_\d+.asc(.gz)?$', re.VERBOSE) #-- compile regular expressions operator for the clm/slm headers #-- for the specific AOD1b product - hx = re.compile(r'^DATA.*SET.*{0}'.format(DSET), re.VERBOSE) + hx = re.compile(rf'^DATA.*SET.*{DSET}', re.VERBOSE) #-- compile regular expression operator to find numerical instances #-- will extract the data from the file regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' @@ -131,10 +132,10 @@ def aod1b_oblateness(base_dir, #-- AOD1B data products product = {} - product['atm'] = 'Atmospheric loading from {0}'.format(ATMOSPHERE) - product['ocn'] = 'Oceanic loading from {0}'.format(OCEAN_MODEL) + product['atm'] = f'Atmospheric loading from {ATMOSPHERE}' + product['ocn'] = f'Oceanic loading from {OCEAN_MODEL}' product['glo'] = 'Global atmospheric and oceanic loading' - product['oba'] = 'Ocean bottom pressure from {0}'.format(OCEAN_MODEL) + product['oba'] = f'Ocean bottom pressure from {OCEAN_MODEL}' #-- AOD1B directory and output oblateness directory grace_dir = os.path.join(base_dir,'AOD1B',DREL) @@ -151,7 +152,7 @@ def aod1b_oblateness(base_dir, YY,MM,SFX = tx.findall(i).pop() YY,MM = np.array([YY,MM], dtype=np.int64) #-- output monthly oblateness file - FILE = 'AOD1B_{0}_{1}_{2:4d}_{3:02d}.txt'.format(DREL,DSET,YY,MM) + FILE = f'AOD1B_{DREL}_{DSET}_{YY:4d}_{MM:02d}.txt' #-- if output file exists: check if input tar file is newer TEST = False OVERWRITE = ' (clobber)' diff --git a/scripts/calc_degree_one.py b/scripts/calc_degree_one.py index 8f569141..23c19cdf 100755 --- a/scripts/calc_degree_one.py +++ b/scripts/calc_degree_one.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" calc_degree_one.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Calculates degree 1 variations using GRACE coefficients of degree 2 and greater, and ocean bottom pressure variations from ECCO and OMCT/MPIOM @@ -156,6 +156,7 @@ https://doi.org/10.1029/2007JB005338 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: add option to replace degree 4 zonal harmonics with SLR Updated 08/2022: set default land-sea mask file in arguments Updated 07/2022: set plot tick formatter to not use offsets @@ -261,10 +262,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: import GRACE/GRACE-FO GSM files for a given months range def load_grace_GSM(base_dir, PROC, DREL, START, END, MISSING, LMAX, @@ -375,26 +376,26 @@ def calc_degree_one(base_dir, PROC, DREL, MODEL, LMAX, RAD, slf_str = '_SLF' if FINGERPRINT else '' #-- output flag for low-degree harmonic replacements if SLR_21 in ('CSR','GFZ','GSFC'): - C21_str = '_w{0}_21'.format(SLR_21) + C21_str = f'_w{SLR_21}_21' else: C21_str = '' if SLR_22 in ('CSR','GSFC'): - C22_str = '_w{0}_22'.format(SLR_22) + C22_str = f'_w{SLR_22}_22' else: C22_str = '' if SLR_C30 in ('GSFC',): #-- C30 replacement now default for all solutions C30_str = '' elif SLR_C30 in ('CSR','GFZ','LARES'): - C30_str = '_w{0}_C30'.format(SLR_C30) + C30_str = f'_w{SLR_C30}_C30' else: C30_str = '' if SLR_C40 in ('CSR','GSFC','LARES'): - C40_str = '_w{0}_C40'.format(SLR_C40) + C40_str = f'_w{SLR_C40}_C40' else: C40_str = '' if SLR_C50 in ('CSR','GSFC','LARES'): - C50_str = '_w{0}_C50'.format(SLR_C50) + C50_str = f'_w{SLR_C50}_C50' else: C50_str = '' #-- combine satellite laser ranging flags @@ -1497,7 +1498,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/calc_mascon.py b/scripts/calc_mascon.py index 10140d98..a86cb880 100644 --- a/scripts/calc_mascon.py +++ b/scripts/calc_mascon.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" calc_mascon.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Calculates a time-series of regional mass anomalies through a least-squares mascon procedure from GRACE/GRACE-FO time-variable gravity data @@ -156,6 +156,7 @@ https://doi.org/10.1029/2005GL025305 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: add option to replace degree 4 zonal harmonics with SLR Updated 04/2022: use wrapper function for reading load Love numbers include utf-8 encoding in reads to be windows compliant @@ -256,10 +257,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: calculate a regional time-series through a least #-- squares mascon process @@ -327,17 +328,17 @@ def calc_mascon(base_dir, PROC, DREL, DSET, LMAX, RAD, rad_e = factors.rad_e #-- for datasets not GSM: will add a label for the dataset - dset_str = '' if (DSET == 'GSM') else '_{0}'.format(DSET) + dset_str = '' if (DSET == 'GSM') else f'_{DSET}' #-- atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' #-- output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX - order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' #-- Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) - gw_str = '_r{0:0.0f}km'.format(RAD) + gw_str = f'_r{RAD:0.0f}km' else: #-- else = 1 wt = np.ones((LMAX+1)) @@ -501,7 +502,7 @@ def calc_mascon(base_dir, PROC, DREL, DSET, LMAX, RAD, #-- if lower case, will capitalize mascon_base = mascon_base.upper() #-- if mascon name contains degree and order info, remove - mascon_name.append(mascon_base.replace('_L{0:d}'.format(LMAX),'')) + mascon_name.append(mascon_base.replace(f'_L{LMAX:d}', '')) #-- create single harmonics object from list mascon_Ylms = harmonics().from_list(mascon_list, date=False) @@ -1004,7 +1005,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/calc_sensitivity_kernel.py b/scripts/calc_sensitivity_kernel.py index c9e5021c..b13be33c 100644 --- a/scripts/calc_sensitivity_kernel.py +++ b/scripts/calc_sensitivity_kernel.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" calc_sensitivity_kernel.py -Written by Tyler Sutterley (07/2022) +Written by Tyler Sutterley (11/2022) Calculates spatial sensitivity kernels through a least-squares mascon procedure @@ -82,6 +82,7 @@ https://doi.org/10.1029/2009GL039401 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 07/2022: create mask for output gridded variables made creating the spatial outputs optional to improve compute time Updated 04/2022: use wrapper function for reading load Love numbers @@ -153,10 +154,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: calculate a regional time-series through a least #-- squares mascon process @@ -204,12 +205,12 @@ def calc_sensitivity_kernel(LMAX, RAD, #-- input/output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX - order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' #-- Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) - gw_str = '_r{0:0.0f}km'.format(RAD) + gw_str = f'_r{RAD:0.0f}km' else: #-- else = 1 wt = np.ones((LMAX+1)) @@ -261,7 +262,7 @@ def calc_sensitivity_kernel(LMAX, RAD, #-- if lower case, will capitalize mascon_base = mascon_base.upper() #-- if mascon name contains degree and order info, remove - mascon_name.append(mascon_base.replace('_L{0:d}'.format(LMAX),'')) + mascon_name.append(mascon_base.replace(f'_L{LMAX:d}', '')) #-- create single harmonics object from list mascon_Ylms = harmonics().from_list(mascon_list, date=False) @@ -609,7 +610,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/cnes_grace_sync.py b/scripts/cnes_grace_sync.py index b501ba9a..648557bc 100755 --- a/scripts/cnes_grace_sync.py +++ b/scripts/cnes_grace_sync.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" cnes_grace_sync.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) CNES/GRGS GRACE data download program for gravity field products https://grace.obs-mip.fr/ @@ -35,6 +35,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 12/2021: can use variable loglevels for verbose output Updated 10/2021: using python logging for handling verbose output @@ -168,10 +169,10 @@ def cnes_grace_sync(DIRECTORY, DREL=[], TIMEOUT=None, LOG=False, #-- output to log file #-- format: CNES_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'CNES_sync_{0}.log'.format(today) + LOGFILE = f'CNES_sync_{today}.log' fid1 = open(os.path.join(DIRECTORY,LOGFILE),'w') logging.basicConfig(stream=fid1,level=logging.INFO) - logging.info('CNES Sync Log ({0})'.format(today)) + logging.info(f'CNES Sync Log ({today})') else: #-- standard output (terminal output) logging.basicConfig(level=logging.INFO) @@ -181,7 +182,7 @@ def cnes_grace_sync(DIRECTORY, DREL=[], TIMEOUT=None, LOG=False, for rl in DREL: #-- datasets (GSM, GAA, GAB) for ds in DSET[rl]: - logging.info('CNES/{0}/{1}'.format(rl, ds)) + logging.info(f'CNES/{rl}/{ds}') #-- specific GRACE directory local_dir = os.path.join(DIRECTORY, 'CNES', rl, ds) #-- check if GRACE directory exists and recursively create if not @@ -204,7 +205,7 @@ def cnes_grace_sync(DIRECTORY, DREL=[], TIMEOUT=None, LOG=False, response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) #-- change modification time to remote - time_string=response.headers['last-modified'] + time_string = response.headers['last-modified'] remote_mtime=gravity_toolkit.utilities.get_unix_time(time_string, format='%a, %d %b %Y %H:%M:%S %Z') #-- keep remote modification time of file and local access time @@ -219,7 +220,7 @@ def cnes_grace_sync(DIRECTORY, DREL=[], TIMEOUT=None, LOG=False, for member in member_list: #-- local gzipped version of the file fi = os.path.basename(member.name) - local_file = os.path.join(local_dir,'{0}.gz'.format(fi)) + local_file = os.path.join(local_dir, f'{fi}.gz') gzip_copy_file(tar, member, local_file, CLOBBER, MODE) #-- close the tar file tar.close() @@ -264,8 +265,8 @@ def gzip_copy_file(tar, member, local_file, CLOBBER, MODE): #-- if file does not exist, is to be overwritten, or CLOBBERed if TEST or CLOBBER: #-- Printing files copied from tar file to new compressed file - logging.info('{0}/{1} --> '.format(tar.name,member.name)) - logging.info('\t{0}{1}\n'.format(local_file,OVERWRITE)) + logging.info(f'{tar.name}/{member.name} --> ') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- extract file contents to new compressed file f_in = tar.extractfile(member) with gzip.GzipFile(local_file, 'wb', 9, None, file1_mtime) as f_out: diff --git a/scripts/combine_harmonics.py b/scripts/combine_harmonics.py index 2035091d..68753bcc 100644 --- a/scripts/combine_harmonics.py +++ b/scripts/combine_harmonics.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" combine_harmonics.py -Written by Tyler Sutterley (07/2022) +Written by Tyler Sutterley (11/2022) Converts a file from the spherical harmonic domain into the spatial domain CALLING SEQUENCE: @@ -68,6 +68,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 07/2022: create mask for output gridded variables Updated 04/2022: use wrapper function for reading load Love numbers use argparse descriptions within sphinx documentation @@ -115,10 +116,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: converts from the spherical harmonic domain into the spatial domain def combine_harmonics(INPUT_FILE, OUTPUT_FILE, @@ -255,7 +256,7 @@ def combine_harmonics(INPUT_FILE, OUTPUT_FILE, #-- 5: mbar, millibars equivalent surface pressure dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mbar else: - raise ValueError('Invalid units code {0:d}'.format(UNITS)) + raise ValueError(f'Invalid units code {UNITS:d}') #-- Computing plms for converting to spatial domain theta = (90.0-grid.lat)*np.pi/180.0 @@ -415,7 +416,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) #-- run main program diff --git a/scripts/convert_harmonics.py b/scripts/convert_harmonics.py index 9beb170b..736cba28 100644 --- a/scripts/convert_harmonics.py +++ b/scripts/convert_harmonics.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" convert_harmonics.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Converts a file from the spatial domain into the spherical harmonic domain CALLING SEQUENCE: @@ -59,6 +59,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use wrapper function for reading load Love numbers use argparse descriptions within sphinx documentation Updated 12/2021: can use variable loglevels for verbose output @@ -97,10 +98,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: converts from the spatial domain into the spherical harmonic domain def convert_harmonics(INPUT_FILE, OUTPUT_FILE, @@ -287,7 +288,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) #-- run main program diff --git a/scripts/esa_costg_swarm_sync.py b/scripts/esa_costg_swarm_sync.py index a26ab6a8..586e92e2 100644 --- a/scripts/esa_costg_swarm_sync.py +++ b/scripts/esa_costg_swarm_sync.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" esa_costg_swarm_sync.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Syncs Swarm gravity field products from the ESA Swarm Science Server https://earth.esa.int/eogateway/missions/swarm/data https://www.esa.int/Applications/Observing_the_Earth/Swarm @@ -29,6 +29,7 @@ https://numpy.org/doc/stable/user/numpy-for-matlab-users.html UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 10/2021: using python logging for handling verbose output Written 09/2021 @@ -62,10 +63,10 @@ def esa_costg_swarm_sync(DIRECTORY, RELEASE=None, TIMEOUT=None, LOG=False, #-- output to log file #-- format: ESA_Swarm_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'ESA_Swarm_sync_{0}.log'.format(today) + LOGFILE = f'ESA_Swarm_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) - logging.info('ESA Swarm Sync Log ({0})'.format(today)) + logging.info(f'ESA Swarm Sync Log ({today})') else: #-- standard output (terminal output) logging.basicConfig(level=logging.INFO) @@ -101,7 +102,7 @@ def esa_costg_swarm_sync(DIRECTORY, RELEASE=None, TIMEOUT=None, LOG=False, #-- to list maxfiles number of files at position parameters = gravity_toolkit.utilities.urlencode({'maxfiles':prevmax, 'pos':pos,'file':posixpath.join('swarm','Level2longterm','EGF')}) - url=posixpath.join(HOST,'?do=list&{0}'.format(parameters)) + url=posixpath.join(HOST,f'?do=list&{parameters}') request = gravity_toolkit.utilities.urllib2.Request(url=url) response = gravity_toolkit.utilities.urllib2.urlopen(request, timeout=TIMEOUT) @@ -124,7 +125,7 @@ def esa_costg_swarm_sync(DIRECTORY, RELEASE=None, TIMEOUT=None, LOG=False, parameters = gravity_toolkit.utilities.urlencode({'file': posixpath.join('swarm','Level2longterm','EGF',colnames[i])}) remote_file = posixpath.join(HOST, - '?do=download&{0}'.format(parameters)) + f'?do=download&{parameters}') local_file = os.path.join(local_dir,colnames[i]) #-- check that file is not in file system unless overwriting http_pull_file(remote_file, collastmod[i], local_file, @@ -164,7 +165,7 @@ def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, #-- compare checksums if (local_hash != remote_hash): TEST = True - OVERWRITE = ' (checksums: {0} {1})'.format(local_hash,remote_hash) + OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): #-- check last modification time of local file local_mtime = os.stat(local_file).st_mtime @@ -179,8 +180,8 @@ def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - logging.info('{0} --> '.format(remote_file)) - logging.info('\t{0}{1}\n'.format(local_file,OVERWRITE)) + logging.info(f'{remote_file} --> ') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- if executing copy command (not only printing the files) if not LIST: #-- chunked transfer encoding size diff --git a/scripts/geocenter_compare_tellus.py b/scripts/geocenter_compare_tellus.py index 1ac96ff8..1c8bf4e8 100644 --- a/scripts/geocenter_compare_tellus.py +++ b/scripts/geocenter_compare_tellus.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" geocenter_compare_tellus.py -Written by Tyler Sutterley (05/2022) +Written by Tyler Sutterley (11/2022) Plots the GRACE/GRACE-FO geocenter time series for different GRACE/GRACE-FO processing centers comparing with the JPL GRACE Tellus product @@ -17,6 +17,7 @@ -M X, --missing X: Missing GRACE months in time series UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 05/2022: use argparse descriptions within documentation Updated 12/2021: adjust minimum x limit based on starting GRACE month Updated 11/2021: use gravity_toolkit geocenter class for operations @@ -123,7 +124,7 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): label='GFZ GravIS') #-- Running function read_tellus_geocenter.py - grace_file = 'TN-13_GEOC_{0}_{1}.txt'.format(pr,DREL) + grace_file = f'TN-13_GEOC_{pr}_{DREL}.txt' DEG1 = geocenter().from_tellus(os.path.join(grace_dir,grace_file),JPL=True) #-- indices for mean months kk, = np.nonzero((DEG1.month >= START_MON) & (DEG1.month <= 176)) @@ -193,7 +194,7 @@ def geocenter_compare_tellus(grace_dir,DREL,START_MON,END_MON,MISSING): #-- adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) #-- save figure to file - OUTPUT_FIGURE = 'TN13_SV19_{0}_{1}.pdf'.format(pr,DREL) + OUTPUT_FIGURE = f'TN13_SV19_{pr}_{DREL}.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() diff --git a/scripts/geocenter_monte_carlo.py b/scripts/geocenter_monte_carlo.py index f562d6cc..5f13aae5 100644 --- a/scripts/geocenter_monte_carlo.py +++ b/scripts/geocenter_monte_carlo.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" geocenter_monte_carlo.py -Written by Tyler Sutterley (05/2022) +Written by Tyler Sutterley (11/2022) CALLING SEQUENCE: python geocenter_monte_carlo.py --start 4 --end 237 @@ -15,6 +15,7 @@ -M X, --missing X: Missing GRACE months in time series UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 05/2022: use argparse descriptions within documentation Updated 12/2021: adjust minimum x limit based on starting GRACE month Written 11/2021 @@ -140,11 +141,11 @@ def geocenter_monte_carlo(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): tick.label.set_fontsize(14) #-- labels and set limits - ax[0].set_ylabel('{0} Geocenter Variation [mm]'.format(PROC), fontsize=14) + ax[0].set_ylabel(f'{PROC} Geocenter Variation [mm]', fontsize=14) #-- adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) #-- save figure to file - OUTPUT_FIGURE = 'SV19_{0}_{1}_monte_carlo.pdf'.format(PROC,DREL) + OUTPUT_FIGURE = f'SV19_{PROC}_{DREL}_monte_carlo.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() diff --git a/scripts/geocenter_ocean_models.py b/scripts/geocenter_ocean_models.py index 9b44f886..7d356f5f 100644 --- a/scripts/geocenter_ocean_models.py +++ b/scripts/geocenter_ocean_models.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" geocenter_ocean_models.py -Written by Tyler Sutterley (05/2022) +Written by Tyler Sutterley (11/2022) Plots the GRACE/GRACE-FO geocenter time series comparing results using different ocean bottom pressure estimates @@ -19,6 +19,7 @@ -O X, --ocean X: ocean bottom pressure products to use UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 05/2022: use argparse descriptions within documentation Updated 12/2021: adjust minimum x limit based on starting GRACE month Updated 11/2021: use gravity_toolkit geocenter class for operations @@ -134,7 +135,7 @@ def geocenter_ocean_models(grace_dir,PROC,DREL,MODEL,START_MON,END_MON,MISSING): #-- adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) #-- save figure to file - OUTPUT_FIGURE = 'SV19_{0}_{1}_ocean_models.pdf'.format(PROC,DREL) + OUTPUT_FIGURE = f'SV19_{PROC}_{DREL}_ocean_models.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() diff --git a/scripts/geocenter_processing_centers.py b/scripts/geocenter_processing_centers.py index eb324c06..1a733c24 100644 --- a/scripts/geocenter_processing_centers.py +++ b/scripts/geocenter_processing_centers.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" geocenter_processing_centers.py -Written by Tyler Sutterley (05/2022) +Written by Tyler Sutterley (11/2022) Plots the GRACE/GRACE-FO geocenter time series for different GRACE/GRACE-FO processing centers @@ -17,6 +17,7 @@ -M X, --missing X: Missing GRACE months in time series UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 05/2022: use argparse descriptions within documentation Updated 12/2021: adjust minimum x limit based on starting GRACE month make the list of processing centers an option @@ -150,7 +151,7 @@ def geocenter_processing_centers(grace_dir,PROC,DREL,START_MON,END_MON,MISSING): #-- adjust locations of subplots fig.subplots_adjust(left=0.06,right=0.98,bottom=0.12,top=0.94,wspace=0.05) #-- save figure to file - OUTPUT_FIGURE = 'SV19_{0}_centers.pdf'.format(DREL) + OUTPUT_FIGURE = f'SV19_{DREL}_centers.pdf' plt.savefig(os.path.join(grace_dir,OUTPUT_FIGURE), format='pdf', dpi=300) plt.clf() diff --git a/scripts/gfz_icgem_costg_ftp.py b/scripts/gfz_icgem_costg_ftp.py index f02d4726..4e95b001 100644 --- a/scripts/gfz_icgem_costg_ftp.py +++ b/scripts/gfz_icgem_costg_ftp.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" gfz_icgem_costg_ftp.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Syncs GRACE/GRACE-FO/Swarm COST-G data from the GFZ International Centre for Global Earth Models (ICGEM) @@ -38,6 +38,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 10/2021: using python logging for handling verbose output Written 09/2021 @@ -64,7 +65,7 @@ def compile_regex_pattern(MISSION, DSET): elif ((DSET != 'GSM') and (MISSION == 'Swarm')): regex=r'(GAA|GAB|GAC|GAD)_Swarm_(\d+)_(\d{2})_(\d{4})(\.gfc|\.ZIP)' else: - regex=r'{0}-2_(.*?)\.gfc$'.format(DSET) + regex=rf'{DSET}-2_(.*?)\.gfc$' #-- return the compiled regular expression operator used to find files return re.compile(regex, re.VERBOSE) @@ -94,21 +95,21 @@ def gfz_icgem_costg_ftp(DIRECTORY, MISSION=[], RELEASE=None, TIMEOUT=None, #-- output to log file #-- format: GFZ_ICGEM_COST-G_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'GFZ_ICGEM_COST-G_sync_{0}.log'.format(today) + LOGFILE = 'GFZ_ICGEM_COST-G_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) - logging.info('GFZ ICGEM COST-G Sync Log ({0})'.format(today)) + logging.info('GFZ ICGEM COST-G Sync Log ({today})') else: #-- standard output (terminal output) logging.basicConfig(level=logging.INFO) #-- find files for a particular mission - logging.info('{0} Spherical Harmonics:'.format(MISSION)) + logging.info(f'{MISSION} Spherical Harmonics:') #-- Sync gravity field dealiasing products for ds in DSET[MISSION]: #-- print string of exact data product - logging.info('{0}/{1}/{2}'.format(MISSION,RELEASE,ds)) + logging.info(f'{MISSION}/{RELEASE}/{ds}') #-- local directory for exact data product local_dir = os.path.join(DIRECTORY,LOCAL[MISSION],RELEASE,ds) #-- check if directory exists and recursively create if not @@ -178,7 +179,7 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, #-- compare checksums if (local_hash != remote_hash): TEST = True - OVERWRITE = ' (checksums: {0} {1})'.format(local_hash,remote_hash) + OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): #-- check last modification time of local file local_mtime = os.stat(local_file).st_mtime @@ -193,8 +194,9 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - arg=(posixpath.join('ftp://',*remote_path),local_file,OVERWRITE) - logging.info('{0} -->\n\t{1}{2}\n'.format(*arg)) + remote_ftp_url = posixpath.join('ftp://',*remote_path) + logging.info(f'{remote_ftp_url} -->') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- if executing copy command (not only printing the files) if not LIST: #-- copy file from ftp server or from bytesIO object @@ -208,7 +210,7 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, remote_file = posixpath.join(*remote_path[1:]) #-- copy remote file contents to local file with open(local_file, 'wb') as f: - ftp.retrbinary('RETR {0}'.format(remote_file), f.write) + ftp.retrbinary(f'RETR {remote_file}', f.write) #-- keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) diff --git a/scripts/gfz_isdc_dealiasing_ftp.py b/scripts/gfz_isdc_dealiasing_ftp.py index 608dcf0d..fb4b80fd 100644 --- a/scripts/gfz_isdc_dealiasing_ftp.py +++ b/scripts/gfz_isdc_dealiasing_ftp.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" gfz_isdc_dealiasing_ftp.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Syncs GRACE Level-1b dealiasing products from the GFZ Information System and Data Center (ISDC) Optionally outputs as monthly tar files @@ -30,6 +30,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 10/2021: using python logging for handling verbose output Updated 07/2021: added option to sync only specific months @@ -67,10 +68,10 @@ def gfz_isdc_dealiasing_ftp(base_dir, DREL, YEAR=None, MONTHS=None, TAR=False, #-- output to log file #-- format: GFZ_AOD1B_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'GFZ_AOD1B_sync_{0}.log'.format(today) + LOGFILE = f'GFZ_AOD1B_sync_{today}.log' logging.basicConfig(filename=os.path.join(base_dir,LOGFILE), level=logging.INFO) - logging.info('GFZ AOD1b Sync Log ({0})'.format(today)) + logging.info(f'GFZ AOD1b Sync Log ({today})') else: #-- standard output (terminal output) logging.basicConfig(level=logging.INFO) @@ -84,9 +85,9 @@ def gfz_isdc_dealiasing_ftp(base_dir, DREL, YEAR=None, MONTHS=None, TAR=False, if YEAR is None: regex_years = r'\d{4}' else: - regex_years = r'|'.join(r'{0:d}'.format(y) for y in YEAR) + regex_years = r'|'.join(rf'{y:d}' for y in YEAR) #-- compile regular expression operator for years to sync - R1 = re.compile(r'({0})'.format(regex_years), re.VERBOSE) + R1 = re.compile(rf'({regex_years})', re.VERBOSE) #-- suffix for each data release SUFFIX = dict(RL04='tar.gz',RL05='tar.gz',RL06='tgz') @@ -170,11 +171,12 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - arg = (posixpath.join('ftp://',*remote_path),local_file,OVERWRITE) - logging.info('{0} -->\n\t{1}{2}\n'.format(*arg)) + remote_ftp_url = posixpath.join('ftp://',*remote_path) + logging.info(f'{remote_ftp_url} -->') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- copy remote file contents to local file with open(local_file, 'wb') as f: - ftp.retrbinary('RETR {0}'.format(remote_file), f.write) + ftp.retrbinary(f'RETR {remote_file}', f.write) #-- keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) diff --git a/scripts/gfz_isdc_grace_ftp.py b/scripts/gfz_isdc_grace_ftp.py index 0bb454de..3828c82c 100644 --- a/scripts/gfz_isdc_grace_ftp.py +++ b/scripts/gfz_isdc_grace_ftp.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" gfz_isdc_grace_ftp.py -Written by Tyler Sutterley (10/2022) +Written by Tyler Sutterley (11/2022) Syncs GRACE/GRACE-FO data from the GFZ Information System and Data Center (ISDC) Syncs CSR/GFZ/JPL files for RL06 GAA/GAB/GAC/GAD/GSM GAA and GAB are GFZ/JPL only @@ -40,6 +40,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 10/2022: fix version check for mission Updated 08/2022: moved regular expression function to utilities Dynamically select newest version of granules for index @@ -99,10 +100,10 @@ def gfz_isdc_grace_ftp(DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- output to log file #-- format: GFZ_ISDC_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'GFZ_ISDC_sync_{0}.log'.format(today) + LOGFILE = f'GFZ_ISDC_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) - logging.info('GFZ ISDC Sync Log ({0})'.format(today)) + logging.info(f'GFZ ISDC Sync Log ({today})') logging.info('CENTERS={0}'.format(','.join(PROC))) logging.info('RELEASES={0}'.format(','.join(DREL))) else: @@ -196,10 +197,10 @@ def gfz_isdc_grace_ftp(DIRECTORY, PROC=[], DREL=[], VERSION=[], os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None #-- for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): - logging.info('{0} Newsletters:'.format(mi)) + logging.info(f'{mi} Newsletters:') #-- compile regular expression operator for remote files NAME = mi.upper().replace('-','_') - R1 = re.compile(r'{0}_SDS_NL_(\d+).pdf'.format(NAME), re.VERBOSE) + R1 = re.compile(rf'{NAME}_SDS_NL_(\d+).pdf', re.VERBOSE) #-- find years for GRACE/GRACE-FO newsletters years,_ = gravity_toolkit.utilities.ftp_list( [ftp.host,mi,'DOCUMENTS','NEWSLETTER'], timeout=TIMEOUT, @@ -238,13 +239,13 @@ def gfz_isdc_grace_ftp(DIRECTORY, PROC=[], DREL=[], VERSION=[], for i,mi in enumerate(['grace','grace-fo']): #-- modifiers for intermediate data releases if (int(VERSION[i]) > 0): - drel_str = '{0}.{1}'.format(rl,VERSION[i]) + drel_str = f'{rl}.{VERSION[i]}' else: drel_str = copy.copy(rl) #-- print string of exact data product - logging.info('{0}/{1}/{2}/{3}'.format(mi, pr, drel_str, ds)) + logging.info(f'{mi}/{pr}/{drel_str}/{ds}') #-- compile the regular expression operator to find files - R1 = re.compile(r'({0}-(.*?)(gz|txt|dif))'.format(ds)) + R1 = re.compile(rf'({ds}-(.*?)(gz|txt|dif))') #-- get filenames from remote directory remote_files,remote_mtimes = gravity_toolkit.utilities.ftp_list( [ftp.host,mi,'Level-2',pr,drel_str], timeout=TIMEOUT, @@ -269,7 +270,7 @@ def gfz_isdc_grace_ftp(DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): - print('{0}'.format(fi), file=fid) + print(fi, file=fid) #-- change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) @@ -300,7 +301,7 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, #-- compare checksums if (local_hash != remote_hash): TEST = True - OVERWRITE = ' (checksums: {0} {1})'.format(local_hash,remote_hash) + OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): #-- check last modification time of local file local_mtime = os.stat(local_file).st_mtime @@ -315,8 +316,9 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - arg=(posixpath.join('ftp://',*remote_path),local_file,OVERWRITE) - logging.info('{0} -->\n\t{1}{2}\n'.format(*arg)) + remote_ftp_url = posixpath.join('ftp://',*remote_path) + logging.info(f'{remote_ftp_url} -->') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- if executing copy command (not only printing the files) if not LIST: #-- copy file from ftp server or from bytesIO object @@ -330,7 +332,7 @@ def ftp_mirror_file(ftp,remote_path,remote_mtime,local_file, remote_file = posixpath.join(*remote_path[1:]) #-- copy remote file contents to local file with open(local_file, 'wb') as f: - ftp.retrbinary('RETR {0}'.format(remote_file), f.write) + ftp.retrbinary(f'RETR {remote_file}', f.write) #-- keep remote modification time of file and local access time os.utime(local_file, (os.stat(local_file).st_atime, remote_mtime)) os.chmod(local_file, MODE) diff --git a/scripts/grace_mean_harmonics.py b/scripts/grace_mean_harmonics.py index c3ebbcd3..40ef6681 100644 --- a/scripts/grace_mean_harmonics.py +++ b/scripts/grace_mean_harmonics.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" grace_mean_harmonics.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Calculates the temporal mean of the GRACE/GRACE-FO spherical harmonics for a given date range from a set of parameters @@ -72,6 +72,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: add option to replace degree 4 zonal harmonics with SLR Updated 04/2022: use argparse descriptions within documentation Updated 12/2021: can use variable loglevels for verbose output @@ -122,10 +123,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: import GRACE/GRACE-FO files for a given months range #-- calculate the mean of the spherical harmonics and output to file @@ -152,7 +153,7 @@ def grace_mean_harmonics(base_dir, PROC, DREL, DSET, LMAX, #-- output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX - order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' #-- data formats for output: ascii, netCDF4, HDF5, gfc suffix = dict(ascii='txt',netCDF4='nc',HDF5='H5',gfc='gfc')[MEANFORM] @@ -481,7 +482,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/grace_spatial_error.py b/scripts/grace_spatial_error.py index 259b5d1e..7440b660 100755 --- a/scripts/grace_spatial_error.py +++ b/scripts/grace_spatial_error.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" grace_spatial_error.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Calculates the GRACE/GRACE-FO errors following Wahr et al. (2006) @@ -116,6 +116,7 @@ http://dx.doi.org/10.1029/2005GL025305 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: add option to replace degree 4 zonal harmonics with SLR Updated 04/2022: use wrapper function for reading load Love numbers use argparse descriptions within sphinx documentation @@ -174,10 +175,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: import GRACE files for a given months range #-- Estimates the GRACE/GRACE-FO errors applying the specified procedures @@ -230,7 +231,7 @@ def grace_spatial_error(base_dir, PROC, DREL, DSET, LMAX, RAD, #-- Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) - gw_str = '_r{0:0.0f}km'.format(RAD) + gw_str = f'_r{RAD:0.0f}km' else: #-- else = 1 wt = np.ones((LMAX+1)) @@ -238,7 +239,7 @@ def grace_spatial_error(base_dir, PROC, DREL, DSET, LMAX, RAD, #-- flag for spherical harmonic order MMAX = np.copy(LMAX) if not MMAX else MMAX - order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' #-- atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' @@ -705,7 +706,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/grace_spatial_maps.py b/scripts/grace_spatial_maps.py index 0242646c..a52f6e31 100755 --- a/scripts/grace_spatial_maps.py +++ b/scripts/grace_spatial_maps.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" grace_spatial_maps.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Reads in GRACE/GRACE-FO spherical harmonic coefficients and exports monthly spatial fields @@ -147,6 +147,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: add option to replace degree 4 zonal harmonics with SLR Updated 07/2022: create mask for output gridded variables Updated 04/2022: use wrapper function for reading load Love numbers @@ -198,10 +199,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: import GRACE/GRACE-FO files for a given months range #-- Converts the GRACE/GRACE-FO harmonics applying the specified procedures @@ -260,7 +261,7 @@ def grace_spatial_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, #-- Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) - gw_str = '_r{0:0.0f}km'.format(RAD) + gw_str = f'_r{RAD:0.0f}km' else: #-- else = 1 wt = np.ones((LMAX+1)) @@ -268,7 +269,7 @@ def grace_spatial_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, #-- flag for spherical harmonic order MMAX = np.copy(LMAX) if not MMAX else MMAX - order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' #-- reading GRACE months for input date range #-- replacing low-degree harmonics with SLR values if specified @@ -424,7 +425,7 @@ def grace_spatial_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, #-- 5: mbar, millibars equivalent surface pressure dfactor = units(lmax=LMAX).harmonic(hl,kl,ll).mbar else: - raise ValueError('Invalid units code {0:d}'.format(UNITS)) + raise ValueError(f'Invalid units code {UNITS:d}') #-- output file format file_format = '{0}{1}_L{2:d}{3}{4}{5}_{6:03d}.{7}' @@ -782,7 +783,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/itsg_graz_grace_sync.py b/scripts/itsg_graz_grace_sync.py index f86500b0..99a9b871 100755 --- a/scripts/itsg_graz_grace_sync.py +++ b/scripts/itsg_graz_grace_sync.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" itsg_graz_grace_sync.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Syncs GRACE/GRACE-FO and auxiliary data from the ITSG GRAZ server CALLING SEQUENCE: @@ -39,6 +39,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 10/2021: using python logging for handling verbose output Written 09/2021 @@ -66,12 +67,12 @@ def itsg_graz_grace_sync(DIRECTORY, RELEASE=None, LMAX=None, TIMEOUT=0, #-- output to log file #-- format: ITSG_GRAZ_GRACE_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'ITSG_GRAZ_GRACE_sync_{0}.log'.format(today) + LOGFILE = f'ITSG_GRAZ_GRACE_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) - logging.info('ITSG GRAZ GRACE Sync Log ({0})'.format(today)) - logging.info('Release: {0}'.format(RELEASE)) - logging.info('LMAX: {0:d}'.format(LMAX)) + logging.info(f'ITSG GRAZ GRACE Sync Log ({today})') + logging.info(f'Release: {RELEASE}') + logging.info(f'LMAX: {LMAX:d}') else: #-- standard output (terminal output) logging.basicConfig(level=logging.INFO) @@ -79,7 +80,7 @@ def itsg_graz_grace_sync(DIRECTORY, RELEASE=None, LMAX=None, TIMEOUT=0, #-- ITSG GRAZ server HOST = ['http://ftp.tugraz.at','outgoing','ITSG','GRACE'] #-- open connection with ITSG GRAZ server at remote directory - release_directory = 'ITSG-{0}'.format(RELEASE) + release_directory = f'ITSG-{RELEASE}' #-- regular expression operators for ITSG data and models itsg_products = [] itsg_products.append(r'atmosphere') @@ -127,7 +128,7 @@ def itsg_graz_grace_sync(DIRECTORY, RELEASE=None, LMAX=None, TIMEOUT=0, TIMEOUT=TIMEOUT, LIST=LIST, CLOBBER=CLOBBER, MODE=MODE) #-- sync ITSG GRAZ data for truncation - subdir = 'monthly_n{0:d}'.format(LMAX) + subdir = f'monthly_n{LMAX:d}' REMOTE = [*HOST,release_directory,'monthly',subdir] files,mtimes = gravity_toolkit.utilities.http_list(REMOTE, timeout=TIMEOUT,pattern=R1,sort=True) @@ -186,8 +187,8 @@ def http_pull_file(remote_file,remote_mtime,local_file, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - logging.info('{0} --> '.format(remote_file)) - logging.info('\t{0}{1}\n'.format(local_file,OVERWRITE)) + logging.info(f'{remote_file} --> ') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- if executing copy command (not only printing the files) if not LIST: #-- Create and submit request. There are a wide range of exceptions diff --git a/scripts/make_grace_index.py b/scripts/make_grace_index.py index 22d22c45..9ebd5f6e 100644 --- a/scripts/make_grace_index.py +++ b/scripts/make_grace_index.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" make_grace_index.py -Written by Tyler Sutterley (08/2022) +Written by Tyler Sutterley (11/2022) Creates index files of GRACE/GRACE-FO Level-2 data CALLING SEQUENCE: @@ -23,6 +23,7 @@ https://numpy.org/doc/stable/user/numpy-for-matlab-users.html UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 08/2022: make the data product optional Written 08/2022 """ @@ -58,7 +59,7 @@ def make_grace_index(DIRECTORY, PROC=[], DREL=[], DSET=[], #-- for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): #-- print string of exact data product - logging.info('{0} {1}/{2}/{3}'.format(mi, pr, rl, ds)) + logging.info(f'{mi} {pr}/{rl}/{ds}') #-- regular expression operator for data product rx = compile_regex_pattern(pr, rl, ds, mission=shortname[mi], version=VERSION[i]) @@ -70,7 +71,7 @@ def make_grace_index(DIRECTORY, PROC=[], DREL=[], DSET=[], #-- outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): - print('{0}'.format(fi), file=fid) + print(fi, file=fid) #-- change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) diff --git a/scripts/mascon_reconstruct.py b/scripts/mascon_reconstruct.py index 66ecc52f..42e4084a 100644 --- a/scripts/mascon_reconstruct.py +++ b/scripts/mascon_reconstruct.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" mascon_reconstruct.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Calculates the equivalent spherical harmonics from a mascon time series @@ -74,6 +74,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use wrapper function for reading load Love numbers include utf-8 encoding in reads to be windows compliant use argparse descriptions within sphinx documentation @@ -128,10 +129,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: tilde-compress a file path string def tilde_compress(file_path): @@ -158,17 +159,17 @@ def mascon_reconstruct(DSET, LMAX, RAD, MODE=0o775): #-- for datasets not GSM: will add a label for the dataset - dset_str = '' if (DSET == 'GSM') else '_{0}'.format(DSET) + dset_str = '' if (DSET == 'GSM') else f'_{DSET}' #-- atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' #-- Gaussian smoothing string for radius RAD - gw_str = '_r{0:0.0f}km'.format(RAD) if (RAD != 0) else '' + gw_str = f'_r{RAD:0.0f}km' if (RAD != 0) else '' #-- input GIA spherical harmonic datafiles GIA_Ylms_rate = read_GIA_model(GIA_FILE,GIA=GIA,LMAX=LMAX,MMAX=MMAX) gia_str = '_{0}'.format(GIA_Ylms_rate['title']) if GIA else '' #-- output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX - order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' #-- filter grace coefficients flag ds_str = '_FL' if DESTRIPE else '' #-- output filename suffix @@ -237,7 +238,7 @@ def mascon_reconstruct(DSET, LMAX, RAD, #-- if lower case, will capitalize mascon_base = mascon_base.upper() #-- if mascon name contains degree and order info, remove - mascon_name = mascon_base.replace('_L{0:d}'.format(LMAX),'') + mascon_name = mascon_base.replace(f'_L{LMAX:d}', '') #-- input filename format (for both LMAX==MMAX and LMAX != MMAX cases): #-- mascon name, GRACE dataset, GIA model, LMAX, (MMAX,) @@ -422,7 +423,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) #-- run main program diff --git a/scripts/monte_carlo_degree_one.py b/scripts/monte_carlo_degree_one.py index 8a13aafc..21136b1e 100644 --- a/scripts/monte_carlo_degree_one.py +++ b/scripts/monte_carlo_degree_one.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" monte_carlo_degree_one.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Calculates degree 1 errors using GRACE coefficients of degree 2 and greater, and ocean bottom pressure variations from OMCT/MPIOM in a Monte Carlo scheme @@ -146,6 +146,7 @@ https://doi.org/10.1029/2005GL025305 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: add option to replace degree 4 zonal harmonics with SLR Updated 08/2022: set default land-sea mask file in arguments Updated 07/2022: set plot tick formatter to not use offsets @@ -219,10 +220,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: model the seasonal component of an initial degree 1 model #-- using preliminary estimates of annual and semi-annual variations from LWM @@ -295,7 +296,7 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, #-- delta coefficients flag for monte carlo run delta_str = '_monte_carlo' #-- output string for both LMAX==MMAX and LMAX != MMAX cases - order_str = 'M{0:d}'.format(MMAX) if MMAX and (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if MMAX and (MMAX != LMAX) else '' #-- atmospheric ECMWF "jump" flag (if ATM) atm_str = '_wATM' if ATM else '' #-- ocean model string @@ -304,26 +305,26 @@ def monte_carlo_degree_one(base_dir, PROC, DREL, LMAX, RAD, slf_str = '_SLF' if FINGERPRINT else '' #-- output flag for low-degree harmonic replacements if SLR_21 in ('CSR','GFZ','GSFC'): - C21_str = '_w{0}_21'.format(SLR_21) + C21_str = f'_w{SLR_21}_21' else: C21_str = '' if SLR_22 in ('CSR','GSFC'): - C22_str = '_w{0}_22'.format(SLR_22) + C22_str = f'_w{SLR_22}_22' else: C22_str = '' if SLR_C30 in ('GSFC',): #-- C30 replacement now default for all solutions C30_str = '' elif SLR_C30 in ('CSR','GFZ','LARES'): - C30_str = '_w{0}_C30'.format(SLR_C30) + C30_str = f'_w{SLR_C30}_C30' else: C30_str = '' if SLR_C40 in ('CSR','GSFC','LARES'): - C40_str = '_w{0}_C40'.format(SLR_C40) + C40_str = f'_w{SLR_C40}_C40' else: C40_str = '' if SLR_C50 in ('CSR','GSFC','LARES'): - C50_str = '_w{0}_C50'.format(SLR_C50) + C50_str = f'_w{SLR_C50}_C50' else: C50_str = '' #-- combine satellite laser ranging flags @@ -1320,7 +1321,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/podaac_cumulus.py b/scripts/podaac_cumulus.py index ca2bc058..1b9ed515 100644 --- a/scripts/podaac_cumulus.py +++ b/scripts/podaac_cumulus.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" podaac_cumulus.py -Written by Tyler Sutterley (08/2022) +Written by Tyler Sutterley (11/2022) Syncs GRACE/GRACE-FO data from NASA JPL PO.DAAC Cumulus AWS S3 bucket S3 Cumulus syncs are only available in AWS instances in us-west-2 @@ -51,6 +51,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: added CMR queries for GRACE/GRACE-FO technical notes Updated 08/2022: moved regular expression function to utilities Dynamically select newest version of granules for index Updated 04/2022: added option for GRACE/GRACE-FO Level-2 data version @@ -80,8 +81,6 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- check if directory exists and recursively create if not os.makedirs(DIRECTORY,MODE) if not os.path.exists(DIRECTORY) else None - #-- PO.DAAC cumulus bucket - bucket = 'podaac-ops-cumulus-protected' #-- mission shortnames shortname = {'grace':'GRAC', 'grace-fo':'GRFO'} #-- datasets for each processing center @@ -93,17 +92,69 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- create log file with list of synchronized files (or print to terminal) if LOG: #-- format: PODAAC_sync_2002-04-01.log - today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'PODAAC_sync_{0}.log'.format(today) + today = time.strftime('%Y-%m-%d', time.localtime()) + LOGFILE = f'PODAAC_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) - logging.info('PO.DAAC Cumulus Sync Log ({0})'.format(today)) + logging.info(f'PO.DAAC Cumulus Sync Log ({today})') logging.info('CENTERS={0}'.format(','.join(PROC))) logging.info('RELEASES={0}'.format(','.join(DREL))) else: #-- standard output (terminal output) logging.basicConfig(level=logging.INFO) + #-- Degree 1 (geocenter) coefficients + logging.info('Degree 1 Coefficients:') + #-- SLR C2,0 and C3,0 coefficients + logging.info('C2,0 and C3,0 Coefficients:') + #-- compile regular expression operator for remote files + R1 = re.compile(r'TN-13_GEOC_(CSR|GFZ|JPL)_(.*?).txt', re.VERBOSE) + R2 = re.compile(r'TN-(14)_C30_C20_GSFC_SLR.txt', re.VERBOSE) + #-- current time stamp to use for local files + mtime = time.time() + #-- for each processing center (CSR, GFZ, JPL) + for pr in PROC: + #-- for each data release (RL04, RL05, RL06) + for rl in DREL: + #-- for each unique version of data to sync + for version in set(VERSION): + #-- query CMR for product metadata + urls = gravity_toolkit.utilities.cmr_metadata( + mission='grace-fo', center=pr, release=rl, + version=version, provider='POCLOUD', + endpoint=ENDPOINT) + + #-- TN-13 JPL degree 1 files + url, = [url for url in urls if R1.search(url)] + granule = gravity_toolkit.utilities.url_split(url)[-1] + local_file = os.path.join(DIRECTORY,'geocenter',granule) + #-- access auxiliary data from endpoint + if (ENDPOINT == 'data'): + http_pull_file(url, mtime, local_file, + GZIP=GZIP, TIMEOUT=TIMEOUT, + CLOBBER=CLOBBER, MODE=MODE) + elif (ENDPOINT == 's3'): + bucket = gravity_toolkit.utilities.s3_bucket(url) + key = gravity_toolkit.utilities.s3_key(url) + response = client.get_object(Bucket=bucket, Key=key) + s3_pull_file(response, mtime, local_file, + GZIP=GZIP, CLOBBER=CLOBBER, MODE=MODE) + + #-- TN-14 SLR C2,0 and C3,0 files + url, = [url for url in urls if R2.search(url)] + granule = gravity_toolkit.utilities.url_split(url)[-1] + local_file = os.path.join(DIRECTORY,granule) + #-- access auxiliary data from endpoint + if (ENDPOINT == 'data'): + http_pull_file(url, mtime, local_file, + GZIP=GZIP, TIMEOUT=TIMEOUT, + CLOBBER=CLOBBER, MODE=MODE) + elif (ENDPOINT == 's3'): + bucket = gravity_toolkit.utilities.s3_bucket(url) + key = gravity_toolkit.utilities.s3_key(url) + response = client.get_object(Bucket=bucket, Key=key) + s3_pull_file(response, mtime, local_file, + GZIP=GZIP, CLOBBER=CLOBBER, MODE=MODE) #-- GRACE/GRACE-FO AOD1B dealiasing products if AOD1B: @@ -111,7 +162,7 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- for each data release (RL04, RL05, RL06) for rl in DREL: #-- print string of exact data product - logging.info('{0}/{1}/{2}'.format('GFZ','AOD1B',rl)) + logging.info(f'GFZ/AOD1B/{rl}') #-- local directory for exact data product local_dir = os.path.join(DIRECTORY,'AOD1B',rl) #-- check if directory exists and recursively create if not @@ -132,6 +183,7 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], http_pull_file(url, mtime, local_file, TIMEOUT=TIMEOUT, CLOBBER=CLOBBER, MODE=MODE) elif (ENDPOINT == 's3'): + bucket = gravity_toolkit.utilities.s3_bucket(url) key = gravity_toolkit.utilities.s3_key(url) response = client.get_object(Bucket=bucket, Key=key) s3_pull_file(response, mtime, local_file, @@ -155,11 +207,12 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): #-- print string of exact data product - logging.info('{0} {1}/{2}/{3}'.format(mi, pr, rl, ds)) + logging.info(f'{mi} {pr}/{rl}/{ds}') #-- query CMR for dataset ids,urls,mtimes = gravity_toolkit.utilities.cmr( mission=mi, center=pr, release=rl, product=ds, - version=VERSION[i], provider='POCLOUD', endpoint=ENDPOINT) + version=VERSION[i], provider='POCLOUD', + endpoint=ENDPOINT) #-- regular expression operator for data product rx = gravity_toolkit.utilities.compile_regex_pattern( pr, rl, ds, mission=shortname[mi]) @@ -168,14 +221,14 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- retrieve GRACE/GRACE-FO files granule = gravity_toolkit.utilities.url_split(url)[-1] suffix = '.gz' if GZIP else '' - local_file = os.path.join(local_dir, - '{0}{1}'.format(granule, suffix)) + local_file = os.path.join(local_dir, f'{granule}{suffix}') #-- access data from endpoint if (ENDPOINT == 'data'): http_pull_file(url, mtime, local_file, GZIP=GZIP, TIMEOUT=TIMEOUT, CLOBBER=CLOBBER, MODE=MODE) elif (ENDPOINT == 's3'): + bucket = gravity_toolkit.utilities.s3_bucket(url) key = gravity_toolkit.utilities.s3_key(url) response = client.get_object(Bucket=bucket, Key=key) s3_pull_file(response, mtime, local_file, @@ -190,7 +243,7 @@ def podaac_cumulus(client, DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): - print('{0}'.format(fi), file=fid) + print(fi, file=fid) #-- change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) @@ -220,8 +273,8 @@ def http_pull_file(remote_file, remote_mtime, local_file, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - logging.info('{0} --> '.format(remote_file)) - logging.info('\t{0}{1}\n'.format(local_file,OVERWRITE)) + logging.info(f'{remote_file} -->') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- chunked transfer encoding size CHUNK = 16 * 1024 #-- Create and submit request. @@ -263,7 +316,7 @@ def s3_pull_file(response, remote_mtime, local_file, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - logging.info('{0}{1}'.format(local_file, OVERWRITE)) + logging.info(f'{local_file}{OVERWRITE}') #-- chunked transfer encoding size CHUNK = 16 * 1024 #-- copy remote file contents to local file diff --git a/scripts/podaac_grace_sync.py b/scripts/podaac_grace_sync.py index 7bdd8230..e75cd316 100644 --- a/scripts/podaac_grace_sync.py +++ b/scripts/podaac_grace_sync.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" podaac_grace_sync.py -Written by Tyler Sutterley (08/2022) +Written by Tyler Sutterley (11/2022) Syncs GRACE/GRACE-FO and auxiliary data from the NASA JPL PO.DAAC Drive Server Syncs CSR/GFZ/JPL files for RL04/RL05/RL06 GAA/GAB/GAC/GAD/GSM @@ -70,6 +70,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 08/2022: moved regular expression function to utilities Dynamically select newest version of granules for index Updated 04/2022: added option for GRACE/GRACE-FO Level-2 data version @@ -214,10 +215,10 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], if LOG: #-- format: PODAAC_sync_2002-04-01.log today = time.strftime('%Y-%m-%d',time.localtime()) - LOGFILE = 'PODAAC_sync_{0}.log'.format(today) + LOGFILE = f'PODAAC_sync_{today}.log' logging.basicConfig(filename=os.path.join(DIRECTORY,LOGFILE), level=logging.INFO) - logging.info('PO.DAAC Sync Log ({0})'.format(today)) + logging.info(f'PO.DAAC Sync Log ({today})') logging.info('CENTERS={0}'.format(','.join(PROC))) logging.info('RELEASES={0}'.format(','.join(DREL))) else: @@ -226,7 +227,7 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- Degree 1 (geocenter) coefficients logging.info('Degree 1 Coefficients:') - PATH = [HOST,'drive','files','allData','tellus','L2','degree_1'] + PATH = [HOST,'drive','files','allData','gracefo','docs'] remote_dir = posixpath.join(*PATH) local_dir = os.path.join(DIRECTORY,'geocenter') #-- check if geocenter directory exists and recursively create if not @@ -315,12 +316,12 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], os.makedirs(local_dir,MODE) if not os.path.exists(local_dir) else None #-- for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): - logging.info('{0} Newsletters:'.format(mi)) + logging.info(f'{mi} Newsletters:') PATH = [HOST,'drive','files','allData',*newsletter_sub[mi]] remote_dir = posixpath.join(*PATH) #-- compile regular expression operator for remote files NAME = mi.upper().replace('-','_') - R1 = re.compile(r'{0}_SDS_NL_(\d+).pdf'.format(NAME), re.VERBOSE) + R1 = re.compile(rf'{NAME}_SDS_NL_(\d+).pdf', re.VERBOSE) #-- open connection with PO.DAAC drive server at remote directory files,mtimes = gravity_toolkit.utilities.drive_list(PATH, timeout=TIMEOUT,build=False,parser=parser,pattern=R1,sort=True) @@ -339,7 +340,7 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- for each data release (RL04, RL05, RL06) for rl in DREL: #-- print string of exact data product - logging.info('{0}/{1}/{2}/{3}'.format('L1B','GFZ','AOD1B',rl)) + logging.info(f'GFZ/AOD1B/{rl}') #-- remote and local directory for exact data product local_dir = os.path.join(DIRECTORY,'AOD1B',rl) #-- check if AOD1B directory exists and recursively create if not @@ -375,7 +376,7 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- for each satellite mission (grace, grace-fo) for i,mi in enumerate(['grace','grace-fo']): #-- print string of exact data product - logging.info('{0} {1}/{2}/{3}'.format(mi, pr, rl, ds)) + logging.info(f'{mi} {pr}/{rl}/{ds}') #-- query CMR for dataset ids,urls,mtimes = gravity_toolkit.utilities.cmr( mission=mi, center=pr, release=rl, product=ds, @@ -400,7 +401,7 @@ def podaac_grace_sync(DIRECTORY, PROC=[], DREL=[], VERSION=[], #-- outputting GRACE/GRACE-FO filenames to index with open(os.path.join(local_dir,'index.txt'),'w') as fid: for fi in sorted(grace_files): - print('{0}'.format(fi), file=fid) + print(fi, file=fid) #-- change permissions of index file os.chmod(os.path.join(local_dir,'index.txt'), MODE) @@ -433,7 +434,7 @@ def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, #-- compare checksums if (local_hash != remote_hash): TEST = True - OVERWRITE = ' (checksums: {0} {1})'.format(local_hash,remote_hash) + OVERWRITE = f' (checksums: {local_hash} {remote_hash})' elif os.access(local_file, os.F_OK): #-- check last modification time of local file local_mtime = os.stat(local_file).st_mtime @@ -448,8 +449,8 @@ def http_pull_file(remote_file, remote_mtime, local_file, TIMEOUT=120, #-- if file does not exist locally, is to be overwritten, or CLOBBER is set if TEST or CLOBBER: #-- Printing files transferred - logging.info('{0} --> '.format(remote_file)) - logging.info('\t{0}{1}\n'.format(local_file,OVERWRITE)) + logging.info(f'{remote_file} --> ') + logging.info(f'\t{local_file}{OVERWRITE}\n') #-- if executing copy command (not only printing the files) if not LIST: #-- chunked transfer encoding size @@ -564,11 +565,11 @@ def main(): except: #-- check that NASA Earthdata credentials were entered if not args.user: - prompt = 'Username for {0}: '.format(HOST) + prompt = f'Username for {HOST}: ' args.user = builtins.input(prompt) #-- enter WebDAV password securely from command-line if not args.webdav: - prompt = 'Password for {0}@{1}: '.format(args.user,HOST) + prompt = f'Password for {args.user}@{HOST}: ' args.webdav = getpass.getpass(prompt) #-- build a urllib opener for PO.DAAC Drive @@ -577,7 +578,7 @@ def main(): #-- check internet connection before attempting to run program #-- check JPL PO.DAAC Drive credentials before attempting to run program - DRIVE = 'https://{0}/drive/files'.format(HOST) + DRIVE = f'https://{HOST}/drive/files' if gravity_toolkit.utilities.check_credentials(DRIVE): podaac_grace_sync(args.directory, PROC=args.center, DREL=args.release, VERSION=args.version, diff --git a/scripts/podaac_webdav.py b/scripts/podaac_webdav.py index ec75d663..b38799b7 100644 --- a/scripts/podaac_webdav.py +++ b/scripts/podaac_webdav.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" podaac_webdav.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Retrieves and prints a user's PO.DAAC Drive WebDAV credentials @@ -46,6 +46,7 @@ utilities.py: download and management utilities for syncing files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 05/2021: use try/except for retrieving netrc credentials Updated 04/2021: set a default netrc file and check access @@ -85,7 +86,7 @@ def podaac_webdav(USER, PASSWORD, parser): ) #-- retrieve cookies from NASA Earthdata URS request = gravity_toolkit.utilities.urllib2.Request( - url=posixpath.join(URS,'oauth','authorize?{0}'.format(parameters))) + url=posixpath.join(URS,'oauth',f'authorize?{parameters}')) gravity_toolkit.utilities.urllib2.urlopen(request) #-- read and parse request for webdav password request = gravity_toolkit.utilities.urllib2.Request(url=HOST) @@ -137,11 +138,11 @@ def main(): except: #-- check that NASA Earthdata credentials were entered if not args.user: - prompt = 'Username for {0}: '.format(URS) + prompt = f'Username for {URS}: ' args.user = builtins.input(prompt) #-- enter password securely from command-line if not args.password: - prompt = 'Password for {0}@{1}: '.format(args.user,URS) + prompt = f'Password for {args.user}@{URS}: ' args.password = getpass.getpass(prompt) #-- check internet connection before attempting to run program @@ -150,14 +151,13 @@ def main(): #-- compile HTML parser for lxml WEBDAV = podaac_webdav(args.user, args.password, lxml.etree.HTMLParser()) #-- output to terminal or append to netrc file - a = (args.user,HOST,WEBDAV) if args.append: #-- append to netrc file and set permissions level with open(args.netrc,'a+') as f: - f.write('machine {1} login {0} password {2}\n'.format(*a)) + f.write(f'machine {args.user} login {HOST} password {WEBDAV}\n') os.chmod(args.netrc, 0o600) else: - print('\nWebDAV Password for {0}@{1}:\n\t{2}'.format(*a)) + print(f'\nWebDAV Password for {args.user}@{HOST}:\n\t{WEBDAV}') #-- run main program if __name__ == '__main__': diff --git a/scripts/regress_grace_maps.py b/scripts/regress_grace_maps.py index 43bf0482..d9402eb3 100755 --- a/scripts/regress_grace_maps.py +++ b/scripts/regress_grace_maps.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" regress_grace_maps.py -Written by Tyler Sutterley (04/2022) +Written by Tyler Sutterley (11/2022) Reads in GRACE/GRACE-FO spatial files from grace_spatial_maps.py and fits a regression model at each grid point @@ -60,6 +60,7 @@ utilities.py: download and management utilities for files UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use argparse descriptions within documentation Updated 12/2021: can use variable loglevels for verbose output Updated 10/2021: using python logging for handling verbose output @@ -103,10 +104,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- program module to run with specified parameters def regress_grace_maps(LMAX, RAD, @@ -132,9 +133,9 @@ def regress_grace_maps(LMAX, RAD, suffix = dict(ascii='txt', netCDF4='nc', HDF5='H5')[DATAFORM] #-- flag for spherical harmonic order - order_str = 'M{0:d}'.format(MMAX) if MMAX and (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if MMAX and (MMAX != LMAX) else '' #-- Calculating the Gaussian smoothing for radius RAD - gw_str = '_r{0:0.0f}km'.format(RAD) if (RAD != 0) else '' + gw_str = f'_r{RAD:0.0f}km' if (RAD != 0) else '' #-- destriped GRACE/GRACE-FO coefficients ds_str = '_FL' if DESTRIPE else '' #-- distributing removed mass uniformly over ocean @@ -571,7 +572,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/scripts/run_sea_level_equation.py b/scripts/run_sea_level_equation.py index 3b64776d..d0b800f6 100644 --- a/scripts/run_sea_level_equation.py +++ b/scripts/run_sea_level_equation.py @@ -1,6 +1,6 @@ #!/usr/bin/env python u""" -run_sea_level_equation.py (04/2022) +run_sea_level_equation.py (11/2022) Solves the sea level equation with the option of including polar motion feedback Uses a Clenshaw summation to calculate the spherical harmonic summation @@ -67,6 +67,7 @@ Bollettino di Geodesia e Scienze (1982) UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 04/2022: use wrapper function for reading load Love numbers use argparse descriptions within sphinx documentation Updated 12/2021: can use variable loglevels for verbose output @@ -119,10 +120,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: Computes Sea Level Fingerprints including polar motion feedback def run_sea_level_equation(INPUT_FILE, OUTPUT_FILE, @@ -327,7 +328,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) #-- run main program diff --git a/scripts/scale_grace_maps.py b/scripts/scale_grace_maps.py index 7d08ad87..98c61620 100644 --- a/scripts/scale_grace_maps.py +++ b/scripts/scale_grace_maps.py @@ -1,7 +1,7 @@ #!/usr/bin/env python u""" scale_grace_maps.py -Written by Tyler Sutterley (09/2022) +Written by Tyler Sutterley (11/2022) Reads in GRACE/GRACE-FO spherical harmonic coefficients and exports monthly scaled spatial fields, estimated scaling errors, @@ -155,6 +155,7 @@ https://doi.org/10.1029/2005GL025305 UPDATE HISTORY: + Updated 11/2022: use f-strings for formatting verbose or ascii output Updated 09/2022: add option to replace degree 4 zonal harmonics with SLR Updated 04/2022: use wrapper function for reading load Love numbers use argparse descriptions within sphinx documentation @@ -200,10 +201,10 @@ def info(args): logging.info(os.path.basename(sys.argv[0])) logging.info(args) - logging.info('module name: {0}'.format(__name__)) + logging.info(f'module name: {__name__}') if hasattr(os, 'getppid'): - logging.info('parent process: {0:d}'.format(os.getppid())) - logging.info('process id: {0:d}'.format(os.getpid())) + logging.info(f'parent process: {os.getppid():d}') + logging.info(f'process id: {os.getpid():d}') #-- PURPOSE: import GRACE/GRACE-FO files for a given months range @@ -268,7 +269,7 @@ def scale_grace_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, atm_str = '_wATM' if ATM else '' #-- output string for both LMAX==MMAX and LMAX != MMAX cases MMAX = np.copy(LMAX) if not MMAX else MMAX - order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else '' + order_str = f'M{MMAX:d}' if (MMAX != LMAX) else '' #-- output spatial units unit_str = 'cmwe' unit_name = 'Equivalent Water Thickness' @@ -278,7 +279,7 @@ def scale_grace_maps(base_dir, PROC, DREL, DSET, LMAX, RAD, #-- Calculating the Gaussian smoothing for radius RAD if (RAD != 0): wt = 2.0*np.pi*gauss_weights(RAD,LMAX) - gw_str = '_r{0:0.0f}km'.format(RAD) + gw_str = f'_r{RAD:0.0f}km' else: #-- else = 1 wt = np.ones((LMAX+1)) @@ -937,7 +938,7 @@ def main(): #-- if there has been an error exception #-- print the type, value, and stack trace of the #-- current exception being handled - logging.critical('process id {0:d} failed'.format(os.getpid())) + logging.critical(f'process id {os.getpid():d} failed') logging.error(traceback.format_exc()) if args.log:#-- write failed job completion log file output_error_log_file(args) diff --git a/setup.py b/setup.py index 06c2042f..d88c2257 100644 --- a/setup.py +++ b/setup.py @@ -39,10 +39,11 @@ 'Topic :: Scientific/Engineering :: Physics', 'License :: OSI Approved :: MIT License', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', ], keywords=keywords, packages=find_packages(),