From cdd4c1de299a420c329944affc790822fcace194 Mon Sep 17 00:00:00 2001 From: tsutterley Date: Thu, 19 Mar 2020 10:58:13 -0700 Subject: [PATCH] jupyter notebook on binder update documents and readme rename notebook updated program documentation add isabella's paper to readme fix references more reference fixes PREM Update read_tellus_geocenter.md --- GRACE-Spatial-Maps.ipynb | 782 ++++++++++++++++++++ README.md | 86 ++- doc/aod1b_geocenter.md | 28 + doc/combine_harmonics.md | 28 + doc/convert_calendar_decimal.md | 25 + doc/convert_julian.md | 28 + doc/destripe_harmonics.md | 25 + doc/gauss_weights.md | 18 + doc/geocenter.md | 22 + doc/grace_date.md | 29 + doc/grace_find_months.md | 26 + doc/grace_input_months.md | 57 ++ doc/hdf5_read.md | 31 + doc/hdf5_read_stokes.md | 29 + doc/hdf5_stokes.md | 31 + doc/hdf5_write.md | 30 + doc/ncdf_read.md | 31 + doc/ncdf_read_stokes.md | 29 + doc/ncdf_stokes.md | 31 + doc/ncdf_write.md | 30 + doc/plm_colombo.md | 22 + doc/plm_holmes.md | 22 + doc/plm_mohlenkamp.md | 22 + doc/read_CSR_monthly_6x1.md | 24 + doc/read_GRACE_harmonics.md | 34 + doc/read_SLR_C20.md | 28 + doc/read_SLR_C30.md | 26 + doc/read_SLR_geocenter.md | 31 + doc/read_love_numbers.md | 25 + doc/read_tellus_geocenter.md | 30 + environment.yml | 6 + gravity_toolkit/__init__.py | 27 + gravity_toolkit/aod1b_geocenter.py | 261 +++++++ gravity_toolkit/combine_harmonics.py | 79 ++ gravity_toolkit/convert_calendar_decimal.py | 167 +++++ gravity_toolkit/convert_julian.py | 106 +++ gravity_toolkit/destripe_harmonics.py | 247 +++++++ gravity_toolkit/gauss_weights.py | 69 ++ gravity_toolkit/geocenter.py | 51 ++ gravity_toolkit/grace_date.py | 241 ++++++ gravity_toolkit/grace_find_months.py | 66 ++ gravity_toolkit/grace_input_months.py | 399 ++++++++++ gravity_toolkit/hdf5_read.py | 107 +++ gravity_toolkit/hdf5_read_stokes.py | 124 ++++ gravity_toolkit/hdf5_stokes.py | 174 +++++ gravity_toolkit/hdf5_write.py | 122 +++ gravity_toolkit/ncdf_read.py | 122 +++ gravity_toolkit/ncdf_read_stokes.py | 130 ++++ gravity_toolkit/ncdf_stokes.py | 183 +++++ gravity_toolkit/ncdf_write.py | 125 ++++ gravity_toolkit/plm_colombo.py | 86 +++ gravity_toolkit/plm_holmes.py | 139 ++++ gravity_toolkit/plm_mohlenkamp.py | 105 +++ gravity_toolkit/read_CSR_monthly_6x1.py | 146 ++++ gravity_toolkit/read_GRACE_harmonics.py | 6 +- gravity_toolkit/read_SLR_C20.py | 280 +++++++ gravity_toolkit/read_SLR_C30.py | 166 +++++ gravity_toolkit/read_SLR_geocenter.py | 214 ++++++ gravity_toolkit/read_love_numbers.py | 98 +++ gravity_toolkit/read_tellus_geocenter.py | 161 ++++ requirements.txt | 4 + setup.py | 5 +- 62 files changed, 5840 insertions(+), 36 deletions(-) create mode 100644 GRACE-Spatial-Maps.ipynb create mode 100644 doc/aod1b_geocenter.md create mode 100644 doc/combine_harmonics.md create mode 100644 doc/convert_calendar_decimal.md create mode 100644 doc/convert_julian.md create mode 100644 doc/destripe_harmonics.md create mode 100644 doc/gauss_weights.md create mode 100644 doc/geocenter.md create mode 100644 doc/grace_date.md create mode 100644 doc/grace_find_months.md create mode 100644 doc/grace_input_months.md create mode 100644 doc/hdf5_read.md create mode 100644 doc/hdf5_read_stokes.md create mode 100644 doc/hdf5_stokes.md create mode 100644 doc/hdf5_write.md create mode 100644 doc/ncdf_read.md create mode 100644 doc/ncdf_read_stokes.md create mode 100644 doc/ncdf_stokes.md create mode 100644 doc/ncdf_write.md create mode 100644 doc/plm_colombo.md create mode 100644 doc/plm_holmes.md create mode 100644 doc/plm_mohlenkamp.md create mode 100644 doc/read_CSR_monthly_6x1.md create mode 100644 doc/read_GRACE_harmonics.md create mode 100644 doc/read_SLR_C20.md create mode 100644 doc/read_SLR_C30.md create mode 100644 doc/read_SLR_geocenter.md create mode 100644 doc/read_love_numbers.md create mode 100644 doc/read_tellus_geocenter.md create mode 100644 gravity_toolkit/aod1b_geocenter.py create mode 100755 gravity_toolkit/combine_harmonics.py create mode 100644 gravity_toolkit/convert_calendar_decimal.py create mode 100644 gravity_toolkit/convert_julian.py create mode 100644 gravity_toolkit/destripe_harmonics.py create mode 100755 gravity_toolkit/gauss_weights.py create mode 100644 gravity_toolkit/geocenter.py create mode 100644 gravity_toolkit/grace_date.py create mode 100644 gravity_toolkit/grace_find_months.py create mode 100644 gravity_toolkit/grace_input_months.py create mode 100755 gravity_toolkit/hdf5_read.py create mode 100755 gravity_toolkit/hdf5_read_stokes.py create mode 100755 gravity_toolkit/hdf5_stokes.py create mode 100755 gravity_toolkit/hdf5_write.py create mode 100755 gravity_toolkit/ncdf_read.py create mode 100755 gravity_toolkit/ncdf_read_stokes.py create mode 100755 gravity_toolkit/ncdf_stokes.py create mode 100755 gravity_toolkit/ncdf_write.py create mode 100755 gravity_toolkit/plm_colombo.py create mode 100755 gravity_toolkit/plm_holmes.py create mode 100755 gravity_toolkit/plm_mohlenkamp.py create mode 100644 gravity_toolkit/read_CSR_monthly_6x1.py create mode 100644 gravity_toolkit/read_SLR_C20.py create mode 100644 gravity_toolkit/read_SLR_C30.py create mode 100644 gravity_toolkit/read_SLR_geocenter.py create mode 100755 gravity_toolkit/read_love_numbers.py create mode 100644 gravity_toolkit/read_tellus_geocenter.py diff --git a/GRACE-Spatial-Maps.ipynb b/GRACE-Spatial-Maps.ipynb new file mode 100644 index 00000000..af67f352 --- /dev/null +++ b/GRACE-Spatial-Maps.ipynb @@ -0,0 +1,782 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "## GRACE/GRACE-FO Harmonic Processing Program\n", + "\n", + "```bash\n", + "pip3 install --user ipywidgets\n", + "jupyter nbextension enable --py --user widgetsnbextension\n", + "jupyter-notebook\n", + "```" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import os\n", + "import numpy as np\n", + "import matplotlib\n", + "matplotlib.rcParams['mathtext.default'] = 'regular'\n", + "matplotlib.rcParams[\"animation.html\"] = \"jshtml\"\n", + "matplotlib.rcParams[\"animation.embed_limit\"] = 40\n", + "import matplotlib.pyplot as plt\n", + "import matplotlib.colors as colors\n", + "import matplotlib.animation as animation\n", + "import cartopy.crs as ccrs\n", + "import ipywidgets as widgets\n", + "from IPython.display import HTML\n", + "\n", + "from gravity_toolkit.grace_find_months import grace_find_months\n", + "from gravity_toolkit.grace_input_months import grace_input_months\n", + "from gravity_toolkit.read_love_numbers import read_love_numbers\n", + "from gravity_toolkit.plm_holmes import plm_holmes\n", + "from gravity_toolkit.gauss_weights import gauss_weights\n", + "from gravity_toolkit.destripe_harmonics import destripe_harmonics\n", + "from gravity_toolkit.combine_harmonics import combine_harmonics\n", + "from gravity_toolkit.ncdf_write import ncdf_write\n", + "from gravity_toolkit.hdf5_write import hdf5_write" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set the GRACE/GRACE-FO Data Directory" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# set the directory with GRACE/GRACE-FO data\n", + "dirText = widgets.Text(\n", + " value=os.getcwd(),\n", + " description='Directory:',\n", + " disabled=False\n", + ")\n", + "display(dirText)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set GRACE/GRACE-FO Parameters\n", + "- GRACE/GRACE-FO Processing Center\n", + "- GRACE/GRACE-FO Data Release\n", + "- GRACE/GRACE-FO Data Product\n", + "- GRACE/GRACE-FO Date Range" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": { + "scrolled": true + }, + "outputs": [], + "source": [ + "# dropdown menu for setting processing center\n", + "proc_list = ['CSR', 'GFZ', 'JPL', 'CNES']\n", + "proc_default = 'CSR'\n", + "procDropdown = widgets.Dropdown(\n", + " options=proc_list,\n", + " value=proc_default,\n", + " description='Center:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# dropdown menu for setting data release\n", + "drel_list = ['RL04', 'RL05', 'RL06']\n", + "drel_default = 'RL06'\n", + "drelDropdown = widgets.Dropdown(\n", + " description='Release:',\n", + " options=drel_list,\n", + " value=drel_default,\n", + " disabled=False,\n", + ")\n", + "\n", + "# dropdown menu for setting data product\n", + "dset_list = ['GAC', 'GAD', 'GSM']\n", + "dset_default = 'GSM'\n", + "dsetDropdown = widgets.Dropdown(\n", + " description='Product:',\n", + " options=dset_list,\n", + " value=dset_default, \n", + " disabled=False,\n", + ")\n", + "\n", + "# extract directory value from widget\n", + "base_dir = os.path.expanduser(dirText.value)\n", + "# find available months for data product\n", + "total_months = grace_find_months(base_dir, procDropdown.value,\n", + " drelDropdown.value, DSET=dsetDropdown.value)\n", + "# select months to run\n", + "# https://tsutterley.github.io/data/GRACE-Months.html\n", + "options=['{0:03d}'.format(m) for m in total_months['months']]\n", + "monthsSelect = widgets.SelectMultiple(\n", + " options=options,\n", + " value=options,\n", + " description='Months:',\n", + " disabled=False\n", + ")\n", + "\n", + "# function for setting the data release\n", + "def set_release(sender):\n", + " if (procDropdown.value == 'CNES'):\n", + " drel_list = ['RL01', 'RL02', 'RL03']\n", + " drel_default = 'RL03'\n", + " else:\n", + " drel_list = ['RL04', 'RL05', 'RL06']\n", + " drel_default = 'RL06'\n", + " drelDropdown.options=drel_list\n", + " drelDropdown.value=drel_default\n", + "\n", + "# function for setting the data product\n", + "def set_product(sender):\n", + " if ((procDropdown.value == 'CNES') and (drelDropdown.value == 'RL01')):\n", + " dset_list = ['GAC', 'GSM']\n", + " elif ((procDropdown.value == 'CNES') and (drelDropdown.value == 'RL02')):\n", + " dset_list = ['GAA', 'GAB', 'GSM']\n", + " elif ((procDropdown.value == 'CNES') and (drelDropdown.value == 'RL03')):\n", + " dset_list = ['GSM']\n", + " elif (procDropdown.value == 'CSR'):\n", + " dset_list = ['GAC', 'GAD', 'GSM']\n", + " else:\n", + " dset_list = ['GAA', 'GAB', 'GAC', 'GAD', 'GSM']\n", + " dsetDropdown.options=dset_list\n", + " dsetDropdown.value=dset_default\n", + " \n", + "# function for updating the available month\n", + "def update_months(sender):\n", + " total_months = grace_find_months(base_dir, procDropdown.value,\n", + " drelDropdown.value, DSET=dsetDropdown.value)\n", + " options=['{0:03d}'.format(m) for m in total_months['months']]\n", + " monthsSelect.options=options\n", + " monthsSelect.value=options\n", + "\n", + "# watch widgets for changes\n", + "procDropdown.observe(set_release)\n", + "drelDropdown.observe(set_product)\n", + "procDropdown.observe(set_product)\n", + "procDropdown.observe(update_months)\n", + "drelDropdown.observe(update_months)\n", + "\n", + "# display widgets for setting GRACE/GRACE-FO parameters\n", + "widgets.VBox([procDropdown,drelDropdown,dsetDropdown,monthsSelect])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set Parameters for Reading GRACE/GRACE-FO Data\n", + "- Maximum Degree and Order\n", + "- Geocenter\n", + "- Oblateness\n", + "- Low Degree Zonals\n", + "- Pole Tide Correction\n", + "- Atmospheric Correction" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# set the spherical harmonic truncation parameters\n", + "lmax_default = {}\n", + "lmax_default['CNES'] = {'RL01':50,'RL02':50,'RL03':80}\n", + "# CSR RL04/5/6 at LMAX 60\n", + "lmax_default['CSR'] = {'RL04':60,'RL05':60,'RL06':60}\n", + "# GFZ RL04/5 at LMAX 90\n", + "# GFZ RL06 at LMAX 60\n", + "lmax_default['GFZ'] = {'RL04':90,'RL05':90,'RL06':60}\n", + "# JPL RL04/5/6 at LMAX 60\n", + "lmax_default['JPL'] = {'RL04':60,'RL05':60,'RL06':60}\n", + "# text entry for spherical harmonic degree\n", + "lmaxText = widgets.BoundedIntText(\n", + " min=0,\n", + " max=lmax_default[procDropdown.value][drelDropdown.value],\n", + " value=lmax_default[procDropdown.value][drelDropdown.value],\n", + " step=1,\n", + " description='max:',\n", + " disabled=False\n", + ")\n", + "\n", + "# text entry for spherical harmonic order\n", + "mmaxText = widgets.BoundedIntText(\n", + " min=0,\n", + " max=lmax_default[procDropdown.value][drelDropdown.value],\n", + " value=lmax_default[procDropdown.value][drelDropdown.value],\n", + " step=1, \n", + " description='mmax:',\n", + " disabled=False\n", + ")\n", + "\n", + "# dropdown menu for setting geocenter\n", + "# Tellus: GRACE/GRACE-FO TN-13 from PO.DAAC\n", + "# https://grace.jpl.nasa.gov/data/get-data/geocenter/\n", + "# SLR: satellite laser ranging from CSR\n", + "# ftp://ftp.csr.utexas.edu/pub/slr/geocenter/\n", + "# SLF: Sutterley and Velicogna, Remote Sensing (2019)\n", + "# https://www.mdpi.com/2072-4292/11/18/2108\n", + "geocenter_list = ['[none]', 'Tellus', 'SLR', 'SLF']\n", + "geocenter_default = 'SLF' if (dsetDropdown.value == 'GSM') else '[none]'\n", + "geocenterDropdown = widgets.Dropdown(\n", + " options=geocenter_list,\n", + " value=geocenter_default,\n", + " description='Geocenter:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# SLR C20\n", + "C20_list = ['[none]','CSR','GSFC']\n", + "C20_default = 'GSFC' if (dsetDropdown.value == 'GSM') else '[none]'\n", + "C20Dropdown = widgets.Dropdown(\n", + " options=C20_list,\n", + " value=C20_default,\n", + " description='SLR C20:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# SLR C30\n", + "C30_list = ['[none]','CSR','GSFC']\n", + "C30_default = 'GSFC' if (dsetDropdown.value == 'GSM') else '[none]'\n", + "C30Dropdown = widgets.Dropdown(\n", + " options=C30_list,\n", + " value=C30_default,\n", + " description='SLR C30:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# Pole Tide Drift (Wahr et al., 2015) for Release-5\n", + "poletide_default = True if ((drelDropdown.value == 'RL05')\n", + " and (dsetDropdown.value == 'GSM')) else False\n", + "poletideCheckbox = widgets.Checkbox(\n", + " value=poletide_default,\n", + " description='Pole Tide Corrections',\n", + " disabled=False\n", + ")\n", + "\n", + "\n", + "# ECMWF Atmospheric Jump Corrections for Release-5\n", + "atm_default = True if (dsetDropdown.value == 'RL05') else False\n", + "atmCheckbox = widgets.Checkbox(\n", + " value=atm_default,\n", + " description='ATM Corrections',\n", + " disabled=False\n", + ")\n", + "\n", + "# functions for setting the spherical harmonic truncation\n", + "def set_SHdegree(sender):\n", + " lmaxText.max=lmax_default[procDropdown.value][drelDropdown.value]\n", + " lmaxText.value=lmax_default[procDropdown.value][drelDropdown.value]\n", + "\n", + "def set_SHorder(sender):\n", + " mmaxText.max=lmaxText.value\n", + " mmaxText.value=lmaxText.value\n", + "\n", + "# functions for setting pole tide and atmospheric corrections for Release-5\n", + "def set_pole_tide(sender):\n", + " poletideCheckbox.value = True if ((drelDropdown.value == 'RL05')\n", + " and (dsetDropdown.value == 'GSM')) else False\n", + "\n", + "def set_atm_corr(sender):\n", + " atmCheckbox.value = True if (drelDropdown.value == 'RL05') else False\n", + "\n", + "# watch processing center widget for changes\n", + "procDropdown.observe(set_SHdegree)\n", + "# watch data release widget for changes\n", + "drelDropdown.observe(set_SHdegree)\n", + "drelDropdown.observe(set_pole_tide)\n", + "drelDropdown.observe(set_atm_corr)\n", + "# watch data product widget for changes\n", + "dsetDropdown.observe(set_pole_tide)\n", + "# watch spherical harmonic degree widget for changes\n", + "lmaxText.observe(set_SHorder)\n", + " \n", + "# display widgets for setting GRACE/GRACE-FO read parameters\n", + "widgets.VBox([lmaxText,mmaxText,geocenterDropdown,\n", + " C20Dropdown,C30Dropdown,poletideCheckbox,atmCheckbox])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Read GRACE/GRACE-FO data\n", + "- Extract Data Parameters" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# extract values from widgets\n", + "PROC = procDropdown.value\n", + "DREL = drelDropdown.value\n", + "DSET = dsetDropdown.value\n", + "months = [int(m) for m in monthsSelect.value]\n", + "LMAX = lmaxText.value\n", + "MMAX = mmaxText.value\n", + "DEG1 = geocenterDropdown.value\n", + "SLR_C20 = C20Dropdown.value\n", + "SLR_C30 = C30Dropdown.value\n", + "POLE_TIDE = poletideCheckbox.value\n", + "ATM = atmCheckbox.value\n", + "\n", + "# read GRACE/GRACE-FO data for parameters\n", + "start_mon = np.min(months)\n", + "end_mon = np.max(months)\n", + "missing = sorted(set(np.arange(start_mon,end_mon+1)) - set(months))\n", + "GRACE_Ylms = grace_input_months(base_dir, PROC, DREL, DSET,\n", + " LMAX, start_mon, end_mon, missing, SLR_C20, DEG1,\n", + " MMAX=MMAX, SLR_C30=SLR_C30, POLE_TIDE=POLE_TIDE,\n", + " ATM=ATM, MEAN=True)\n", + "# mid-date times in year-decimal\n", + "tdec = GRACE_Ylms['time'].copy()\n", + "# number of time steps\n", + "nt = len(months)\n", + "# flag for spherical harmonic order\n", + "order_str = 'M{0:d}'.format(MMAX) if (MMAX != LMAX) else ''" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set Parameters to Run Specific Analyses\n", + "- Gaussian Smoothing Radius in kilometers \n", + "- Filter (destripe) harmonics (Swenson et al., 2006) \n", + "- Spatial degree spacing \n", + "- Spatial degree interval \n", + "1) (-180:180,90:-90) \n", + "2) (degree spacing)/2 \n", + "- Output spatial units \n", + "1) equivalent water thickness (cm) \n", + "2) geoid height (mm) \n", + "3) elastic crustal deformation (mm) \n", + "4) gravitational perturbation (μGal) \n", + "5) equivalent surface pressure (Pa) " + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# text entry for Gaussian Smoothing Radius in km\n", + "gaussianText = widgets.BoundedFloatText(\n", + " value=0,\n", + " min=0,\n", + " max=1000.0,\n", + " step=50,\n", + " description='Gaussian:',\n", + " disabled=False\n", + ")\n", + "\n", + "# Destripe Spherical Harmonics\n", + "destripeCheckbox = widgets.Checkbox(\n", + " value=False,\n", + " description='Destripe',\n", + " disabled=False\n", + ")\n", + "\n", + "# text entry for Degree Spacing\n", + "spacingText = widgets.BoundedFloatText(\n", + " value=1.0,\n", + " min=0,\n", + " max=360.0,\n", + " step=0.5,\n", + " description='Spacing:',\n", + " disabled=False\n", + ")\n", + "\n", + "# dropdown menu for setting output data format\n", + "interval_list = ['(-180:180,90:-90)', '(Degree spacing)/2']\n", + "interval_default = '(Degree spacing)/2'\n", + "intervalDropdown = widgets.Dropdown(\n", + " options=interval_list,\n", + " value=interval_default,\n", + " description='Interval:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# dropdown menu for setting units\n", + "# 1: cm of water thickness\n", + "# 2: mm of geoid height\n", + "# 3: mm of elastic crustal deformation\n", + "# 4: microGal gravitational perturbation\n", + "# 5: millibar of equivalent surface pressure\n", + "unit_list = ['cmwe', 'mmGH', 'mmCU', u'\\u03BCGal', 'mbar']\n", + "unit_label = ['cm', 'mm', 'mm', u'\\u03BCGal', 'mb']\n", + "unit_name = ['Equivalent Water Thickness', 'Geoid Height',\n", + " 'Elastic Crustal Uplift', 'Gravitational Undulation',\n", + " 'Equivalent Surface Pressure']\n", + "unit_default = 'cmwe'\n", + "unitsDropdown = widgets.Dropdown(\n", + " options=unit_list,\n", + " value=unit_default,\n", + " description='Units:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# dropdown menu for setting output data format\n", + "format_list = ['[None]','netCDF4', 'HDF5']\n", + "format_default = '[None]'\n", + "formatDropdown = widgets.Dropdown(\n", + " options=format_list,\n", + " value=format_default,\n", + " description='Format:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# display widgets for setting GRACE/GRACE-FO read parameters\n", + "widgets.VBox([gaussianText,destripeCheckbox,spacingText,\n", + " intervalDropdown,unitsDropdown,formatDropdown])" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# Output degree spacing\n", + "dlon = spacingText.value\n", + "dlat = spacingText.value\n", + "# Output Degree Interval\n", + "INTERVAL = intervalDropdown.index + 1\n", + "if (INTERVAL == 1):\n", + " # (-180:180,90:-90)\n", + " nlon = np.int((360.0/dlon)+1.0)\n", + " nlat = np.int((180.0/dlat)+1.0)\n", + " glon = -180 + dlon*np.arange(0,nlon)\n", + " glat = 90.0 - dlat*np.arange(0,nlat)\n", + "elif (INTERVAL == 2):\n", + " # (Degree spacing)/2\n", + " glon = np.arange(-180+dlon/2.0,180+dlon/2.0,dlon)\n", + " glat = np.arange(90.0-dlat/2.0,-90.0-dlat/2.0,-dlat)\n", + " nlon = len(glon)\n", + " nlat = len(glat)\n", + "\n", + "# Computing plms for converting to spatial domain\n", + "theta = (90.0-glat)*np.pi/180.0\n", + "PLM,dPLM = plm_holmes(LMAX,np.cos(theta))\n", + "\n", + "# read load love numbers file\n", + "love_numbers_file = os.path.join(base_dir,'love_numbers')\n", + "# LMAX of load love numbers from Han and Wahr (1995) is 696.\n", + "# from Wahr (2007) linearly interpolating kl works\n", + "# however, as we are linearly extrapolating out, do not make\n", + "# LMAX too much larger than 696\n", + "if (LMAX > 696):\n", + " # Creates arrays of kl, hl, and ll Love Numbers\n", + " hl = np.zeros((LMAX+1))\n", + " kl = np.zeros((LMAX+1))\n", + " ll = np.zeros((LMAX+1))\n", + " hl[:697],kl[:697],ll[:697] = read_love_numbers(love_numbers_file)\n", + " # for degrees greater than 696\n", + " for l in range(697,LMAX+1):\n", + " hl[l] = 2.0*hl[l-1] - hl[l-2]# linearly extrapolating hl\n", + " kl[l] = 2.0*kl[l-1] - kl[l-2]# linearly extrapolating kl\n", + " ll[l] = 2.0*ll[l-1] - ll[l-2]# linearly extrapolating ll\n", + "else:\n", + " # read arrays of kl, hl, and ll Love Numbers\n", + " hl,kl,ll = read_love_numbers(love_numbers_file)\n", + "# setting Load Love Number (kl) to 0.027\n", + "# following Wahr (1998) and Trupin (1992)\n", + "kl[1] = -(hl[1]+2.0*ll[1])/3.0\n", + "\n", + "# gaussian smoothing radius in km (Jekeli, 1981)\n", + "RAD = gaussianText.value\n", + "if (RAD != 0):\n", + " wt = 2.0*np.pi*gauss_weights(RAD,LMAX)\n", + " gw_str = '_r{0:0.0f}km'.format(RAD)\n", + "else:\n", + " # else = 1\n", + " wt = np.ones((LMAX+1))\n", + " gw_str = ''\n", + "\n", + "# destriping the GRACE/GRACE-FO harmonics\n", + "ds_str = '_FL' if destripeCheckbox.value else ''\n", + " \n", + "# Earth Parameters\n", + "# Average Density of the Earth [g/cm^3]\n", + "rho_e = 5.517\n", + "# Average Radius of the Earth [cm]\n", + "rad_e = 6.371e8\n", + "# WGS84 Gravitational Constant of the Earth [cm^3/s^2]\n", + "GM_e = 3986004.418e14\n", + "# Gravitational Constant of the Earth's atmosphere\n", + "GM_atm = 3.5e14\n", + "# Gravitational Constant of the Earth (w/o atm)\n", + "GM = GM_e - GM_atm\n", + "# standard gravitational acceleration (World Meteorological Organization)\n", + "g_wmo = 9.80665\n", + "\n", + "# Setting units factor for output\n", + "UNITS = unitsDropdown.index + 1\n", + "# dfactor computes the degree dependent coefficients\n", + "l = np.arange(0,LMAX+1)\n", + "if (UNITS == 1):\n", + " # 1: cmwe, centimeters water equivalent\n", + " dfactor = rho_e*rad_e*(2.0*l+1.0)/(1.0 +kl[l])/3.0\n", + "elif (UNITS == 2):\n", + " # 2: mmGH, millimeters geoid height\n", + " dfactor = np.ones(LMAX+1)*(10.0*rad_e)\n", + "elif (UNITS == 3):\n", + " # 3: mmCU, millimeters elastic crustal deformation\n", + " dfactor = 10.0*rad_e*hl[l]/(1.0 +kl[l])\n", + "elif (UNITS == 4):\n", + " # 4: micGal, microGal gravity perturbations\n", + " dfactor = 1.e6*GM*(l+1.0)/(rad_e**2.0)\n", + "elif (UNITS == 5):\n", + " # 5: mbar, millibar equivalent surface pressure\n", + " dfactor = g_wmo*rho_e*rad_e*(2.0*l+1.0)/(1.0 +kl[l])/30.0\n", + "\n", + "# output spatial grid\n", + "spatial = np.zeros((nlat,nlon,nt))\n", + "# converting harmonics to truncated, smoothed coefficients in units\n", + "# combining harmonics to calculate output spatial fields\n", + "for i,t in enumerate(tdec):\n", + " # spherical harmonics for time t\n", + " clm = np.zeros((LMAX+1,MMAX+1))\n", + " slm = np.zeros((LMAX+1,MMAX+1))\n", + " # GRACE/GRACE-FO harmonics for time t\n", + " if destripeCheckbox.value:\n", + " Ylms = destripe_harmonics(GRACE_Ylms['clm'][:,:,i], \n", + " GRACE_Ylms['slm'][:,:,i], LMAX=LMAX, MMAX=MMAX)\n", + " grace_clm = Ylms['clm'][:,:MMAX+1].copy()\n", + " grace_slm = Ylms['slm'][:,:MMAX+1].copy()\n", + " else:\n", + " grace_clm = GRACE_Ylms['clm'][:,:MMAX+1,i].copy()\n", + " grace_slm = GRACE_Ylms['slm'][:,:MMAX+1,i].copy()\n", + " for l in range(LMAX+1):# LMAX+1 to include LMAX\n", + " clm[l,:] = grace_clm[l,:]*dfactor[l]*wt[l]\n", + " slm[l,:] = grace_slm[l,:]*dfactor[l]*wt[l]\n", + " # convert spherical harmonics to output spatial grid\n", + " spatial[:,:,i] = combine_harmonics(clm, slm, glon, glat,\n", + " LMAX=LMAX, MMAX=MMAX, PLM=PLM).T\n", + "\n", + "# output to netCDF4 or HDF5\n", + "file_format = '{0}_{1}_{2}{3}{4}_{5}_L{6:d}{7}{8}{9}_{10:03d}-{11:03d}.{12}'\n", + "if (formatDropdown.value == 'netCDF4'):\n", + " args = (PROC,DREL,DSET,'',GRACE_Ylms['title'],unit_list[UNITS-1],\n", + " LMAX,order_str,gw_str,ds_str,months[0],months[-1],'nc')\n", + " FILE = os.path.join(GRACE_Ylms['directory'],file_format.format(*args))\n", + " ncdf_write(spatial, glon, glat, tdec, FILENAME=FILE,\n", + " VARNAME='z', LONNAME='lon', LATNAME='lat', TIMENAME='time',\n", + " UNITS=unit_list[UNITS-1], LONGNAME=unit_name[UNITS-1],\n", + " TIME_UNITS='year', TIME_LONGNAME='Date_in_Decimal_Years',\n", + " TITLE='GRACE/GRACE-FO Spatial Data', VERBOSE=True)\n", + "elif (formatDropdown.value == 'HDF5'):\n", + " args = (PROC,DREL,DSET,'',GRACE_Ylms['title'],unit_list[UNITS-1],\n", + " LMAX,order_str,gw_str,ds_str,months[0],months[-1],'H5')\n", + " FILE = os.path.join(GRACE_Ylms['directory'],file_format.format(*args)) \n", + " hdf5_write(spatial, glon, glat, tdec, FILENAME=FILE,\n", + " VARNAME='z', LONNAME='lon', LATNAME='lat', TIMENAME='time',\n", + " UNITS=unit_list[UNITS-1], LONGNAME=unit_name[UNITS-1],\n", + " TIME_UNITS='year', TIME_LONGNAME='Date_in_Decimal_Years',\n", + " TITLE='GRACE/GRACE-FO Spatial Data', VERBOSE=True)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Set parameters for creating animation" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "# slider for the plot min and max for normalization\n", + "vmin = np.min(spatial).astype(np.int)\n", + "vmax = np.ceil(np.max(spatial)).astype(np.int)\n", + "rangeSlider = widgets.IntRangeSlider(\n", + " value=[vmin,vmax],\n", + " min=vmin,\n", + " max=vmax,\n", + " step=1,\n", + " description='Plot Range:',\n", + " disabled=False,\n", + " continuous_update=False,\n", + " orientation='horizontal',\n", + " readout=True,\n", + ")\n", + "\n", + "# slider for steps in color bar\n", + "stepSlider = widgets.IntSlider(\n", + " value=1,\n", + " min=0,\n", + " max=vmax-vmin,\n", + " step=1,\n", + " description='Plot Step:',\n", + " disabled=False,\n", + " continuous_update=False,\n", + " orientation='horizontal',\n", + " readout=True,\n", + ")\n", + "\n", + "# all listed color maps in matplotlib version\n", + "cmap_set = set(plt.cm.datad.keys()) | set(plt.cm.cmaps_listed.keys())\n", + "# color maps available in this program\n", + "# (no reversed, qualitative or miscellaneous)\n", + "cmaps = {}\n", + "cmaps['Perceptually Uniform Sequential'] = ['viridis',\n", + " 'plasma','inferno','magma','cividis']\n", + "cmaps['Sequential'] = ['Greys','Purples','Blues','Greens',\n", + " 'Oranges','Reds','YlOrBr','YlOrRd','OrRd','PuRd','RdPu',\n", + " 'BuPu','GnBu','PuBu','YlGnBu','PuBuGn','BuGn','YlGn']\n", + "cmaps['Sequential (2)'] = ['binary','gist_yarg','gist_gray', \n", + " 'gray','bone','pink','spring','summer','autumn','winter',\n", + " 'cool','Wistia','hot','afmhot','gist_heat','copper']\n", + "cmaps['Diverging'] = ['PiYG','PRGn','BrBG','PuOr','RdGy','RdBu',\n", + " 'RdYlBu','RdYlGn','Spectral','coolwarm', 'bwr','seismic']\n", + "cmaps['Cyclic'] = ['twilight','twilight_shifted','hsv']\n", + "# create list of available color maps in program\n", + "cmap_list = []\n", + "for key,val in cmaps.items():\n", + " cmap_list.extend(val)\n", + "# reduce color maps to available in program and matplotlib\n", + "cmap_set &= set(cmap_list)\n", + "# dropdown menu for setting color map\n", + "cmapDropdown = widgets.Dropdown(\n", + " options=sorted(cmap_set),\n", + " value='viridis',\n", + " description='Color Map:',\n", + " disabled=False,\n", + ")\n", + "\n", + "# Reverse the color map\n", + "cmapCheckbox = widgets.Checkbox(\n", + " value=False,\n", + " description='Reverse Color Map',\n", + " disabled=False\n", + ")\n", + "\n", + "# display widgets for setting GRACE/GRACE-FO plot parameters\n", + "widgets.VBox([rangeSlider,stepSlider,cmapDropdown,cmapCheckbox])" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "### Create animation of GRACE/GRACE-FO months" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "fig, ax1 = plt.subplots(num=1, nrows=1, ncols=1, figsize=(10.375,6.625),\n", + " subplot_kw=dict(projection=ccrs.PlateCarree()))\n", + "\n", + "# levels and normalization for plot range\n", + "vmin,vmax = rangeSlider.value\n", + "levels = np.arange(vmin,vmax+stepSlider.value,stepSlider.value)\n", + "norm = colors.Normalize(vmin=vmin,vmax=vmax)\n", + "cmap_reverse_flag = '_r' if cmapCheckbox.value else ''\n", + "cmap = plt.cm.get_cmap(cmapDropdown.value + cmap_reverse_flag)\n", + "im = ax1.imshow(np.zeros((nlat,nlon)), interpolation='nearest',\n", + " norm=norm, cmap=cmap, transform=ccrs.PlateCarree(),\n", + " extent=(-180,180,-90,90), origin='upper', animated=True)\n", + "ax1.coastlines('50m')\n", + "\n", + "# add date label (year-calendar month e.g. 2002-01)\n", + "time_text = ax1.text(0.025, 0.025, '', transform=fig.transFigure,\n", + " color='k', size=24, ha='left', va='baseline')\n", + "\n", + "# Add horizontal colorbar and adjust size\n", + "# extend = add extension triangles to upper and lower bounds\n", + "# options: neither, both, min, max\n", + "# pad = distance from main plot axis\n", + "# shrink = percent size of colorbar\n", + "# aspect = lengthXwidth aspect of colorbar\n", + "cbar = plt.colorbar(im, ax=ax1, extend='both', extendfrac=0.0375,\n", + " orientation='horizontal', pad=0.025, shrink=0.85,\n", + " aspect=22, drawedges=False)\n", + "# rasterized colorbar to remove lines\n", + "cbar.solids.set_rasterized(True)\n", + "# Add label to the colorbar\n", + "cbar.ax.set_xlabel(unit_name[UNITS-1], labelpad=10, fontsize=24)\n", + "cbar.ax.set_ylabel(unit_label[UNITS-1], fontsize=24, rotation=0)\n", + "cbar.ax.yaxis.set_label_coords(1.045, 0.1)\n", + "# Set the tick levels for the colorbar\n", + "cbar.set_ticks(levels)\n", + "cbar.set_ticklabels(['{0:d}'.format(ct) for ct in levels])\n", + "# ticks lines all the way across\n", + "cbar.ax.tick_params(which='both', width=1, length=26, labelsize=24,\n", + " direction='in')\n", + " \n", + "# stronger linewidth on frame\n", + "ax1.outline_patch.set_linewidth(2.0)\n", + "ax1.outline_patch.set_capstyle('projecting')\n", + "# adjust subplot within figure\n", + "fig.subplots_adjust(left=0.02,right=0.98,bottom=0.05,top=0.98)\n", + " \n", + "# animate frames\n", + "def animate_frames(i):\n", + " # set image\n", + " im.set_data(spatial[:,:,i])\n", + " # add date label (year-calendar month e.g. 2002-01)\n", + " year = np.floor(tdec[i]).astype(np.int)\n", + " month = np.int((months[i]-1) % 12) + 1\n", + " time_text.set_text(u'{0:4d}\\u2013{1:02d}'.format(year,month))\n", + "\n", + "# set animation\n", + "anim = animation.FuncAnimation(fig, animate_frames, frames=nt)\n", + "%matplotlib inline\n", + "HTML(anim.to_jshtml())" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.6.9" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} diff --git a/README.md b/README.md index 9e7e1a0d..a0a6fbe6 100644 --- a/README.md +++ b/README.md @@ -6,8 +6,9 @@ read-GRACE-harmonics [![Binder](https://mybinder.org/badge_logo.svg)](https://mybinder.org/v2/gh/tsutterley/read-GRACE-harmonics/master) [![Binder](https://binder.pangeo.io/badge.svg)](https://binder.pangeo.io/v2/gh/tsutterley/read-GRACE-harmonics/master) -Reads Level-2 spherical harmonic coefficients from the NASA/DLR Gravity Recovery and Climate Experiment (GRACE) and the NASA/GFZ Gravity Recovery and Climate Experiment Follow-On (GRACE-FO) missions +Python tools for obtaining and working with Level-2 spherical harmonic coefficients from the NASA/DLR Gravity Recovery and Climate Experiment (GRACE) and the NASA/GFZ Gravity Recovery and Climate Experiment Follow-On (GRACE-FO) missions +#### Resources - [NASA GRACE mission site](http://www.nasa.gov/mission_pages/Grace/index.html) - [JPL GRACE Tellus site](http://grace.jpl.nasa.gov/) - [JPL GRACE-FO site](https://gracefo.jpl.nasa.gov/) @@ -15,38 +16,61 @@ Reads Level-2 spherical harmonic coefficients from the NASA/DLR Gravity Recovery - [GRACE at the NASA Physical Oceanography Distributed Active Archive Center (PO.DAAC)](https://podaac.jpl.nasa.gov/grace) - [GRACE at the GFZ Information System and Data Center](http://isdc.gfz-potsdam.de/grace-isdc/) -#### Calling Sequence -``` -from gravity_toolkit.read_GRACE_harmonics import read_GRACE_harmonics -CSR_L2_input = read_GRACE_harmonics('GSM-2_2002095-2002120_0021_UTCSR_0060_0005.gz',60) -GFZ_L2_input = read_GRACE_harmonics('GSM-2_2002094-2002120_0024_EIGEN_G---_005a.gz',90) -JPL_L2_input = read_GRACE_harmonics('GSM-2_2002091-2002120_0018_JPLEM_0001_0005.gz',60) -JPLMSC_input = read_GRACE_harmonics('GSM-2_2003001-2003031_0029_JPLMSC_0719_0005',719) -``` - -#### Inputs - 1. full path to input GRACE file - 2. spherical harmonic degree of truncation (`LMAX`) - -#### Options - - `MMAX`: spherical harmonic order of truncation (default is `LMAX`) - - `POLE_TIDE`: correct GSM data for pole tide drift following [Wahr et al. (2015)](https://doi.org/10.1002/2015JB011986) - -#### Outputs - - `time`: mid-month date of GRACE file in year-decimal - - `start`: start date of range as Julian day - - `end`: end date of range as Julian day - - `clm`: cosine spherical harmonics of input data - - `slm`: sine spherical harmonics of input data - - `eclm`: cosine spherical harmonic uncalibrated standard deviations - - `eslm`: sine spherical harmonic uncalibrated standard deviations - - `header`: text header of the GRACE file (will parse new YAML headers) +#### Programs +- [`aod1b_geocenter`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/aod1b_geocenter.md) - Creates monthly files of geocenter variations due to non-tidal atmospheric or oceanic variation at 6-hour intervals +- [`combine_harmonics`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/combine_harmonics.md) - Returns the spatial field for a series of spherical harmonics +- [`convert_calendar_decimal`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/convert_calendar_decimal.md) - Converts from calendar date into decimal years taking into account leap years +- [`convert_julian`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/convert_julian.md) - Return the calendar date and time given Julian date +- [`destripe_harmonics`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/destripe_harmonics.md) - Filters spherical harmonic coefficients for correlated "striping" errors +- [`gauss_weights`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/gauss_weights.md) - Computes the Gaussian weights as a function of degree +- [`geocenter`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/geocenter.md) - Converts degree 1 spherical harmonic coefficients to geocenter variations +- [`grace_date`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/grace_date.md) - Calculates dates of each GRACE/GRACE-FO file and assigns the month number +- [`grace_find_months`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/grace_find_months.md) - Finds the months available for a GRACE/GRACE-FO product +- [`grace_input_months`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/grace_input_months.md) - Reads GRACE/GRACE-FO files for a specified spherical harmonic degree and order and for a specified date range +- [`hdf5_read_stokes`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/hdf5_read_stokes.md) - Reads spherical harmonic data from HDF5 files +- [`hdf5_read`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/hdf5_read.md) - Reads spatial data from HDF5 files +- [`hdf5_stokes`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/hdf5_stokes.md) - Writes spherical harmonic data to HDF5 files +- [`hdf5_write`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/hdf5_write.md) - Writes spatial data to HDF5 files +- [`ncdf_read_stokes`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/ncdf_read_stokes.md) - Reads spherical harmonic data from netCDF4 files +- [`ncdf_read`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/ncdf_read.md) - Reads spatial data from netCDF4 files +- [`ncdf_stokes`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/ncdf_stokes.md) - Writes spherical harmonic data to netCDF4 files +- [`ncdf_write`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/ncdf_write.md) - Writes spatial data to netCDF4 files +- [`plm_columbo`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/plm_columbo.md) - Computes fully-normalized associated Legendre Polynomials using the Colombo (1981) recursion relation +- [`plm_holmes`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/plm_holmes.md) - Computes fully-normalized associated Legendre Polynomials using the Holmes and Featherstone (2002) recursion relation +- [`plm_mohlenkamp`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/plm_mohlenkamp.md) - Computes fully-normalized associated Legendre Polynomials using Martin Mohlenkamp's recursion relation +- [`read_CSR_monthly_6x1`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/read_CSR_monthly_6x1.md) - Reads the monthly low-degree spherical harmonic data files from satellite laser ranging (SLR) +- [`read_GRACE_harmonics`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/read_GRACE_harmonics.md) - Reads GRACE/GRACE-FO files and extracts spherical harmonic data +- [`read_love_numbers`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/read_love_numbers.md) - Reads sets of load Love numbers output from the Preliminary Reference Earth Model (PREM) +- [`read_SLR_C20`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/read_SLR_C20.md) - Reads monthly oblateness spherical harmonic data files from satellite laser ranging (SLR) +- [`read_SLR_C30`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/read_SLR_C30.md) - Reads monthly degree 3 zonal spherical harmonic data files from satellite laser ranging (SLR) +- [`read_SLR_geocenter`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/read_SLR_geocenter.md) - Reads monthly geocenter spherical harmonic data files from satellite laser ranging (SLR) +- [`read_tellus_geocenter`](https://github.com/tsutterley/read-GRACE-harmonics/blob/master/doc/read_tellus_geocenter.md) - Reads monthly geocenter spherical harmonic data files from GRACE Tellus Technical Notes #### Dependencies - - [numpy: Scientific Computing Tools For Python](http://www.numpy.org) - - [PyYAML: YAML parser and emitter for Python](https://github.com/yaml/pyyaml) - - [lxml: processing XML and HTML in Python](https://pypi.python.org/pypi/lxml) - - [future: Compatibility layer between Python 2 and Python 3](http://python-future.org/) +- [numpy: Scientific Computing Tools For Python](http://www.numpy.org) +- [PyYAML: YAML parser and emitter for Python](https://github.com/yaml/pyyaml) +- [lxml: processing XML and HTML in Python](https://pypi.python.org/pypi/lxml) +- [future: Compatibility layer between Python 2 and Python 3](http://python-future.org/) +- [matplotlib: Python 2D plotting library](http://matplotlib.org/) +- [cartopy: Python package designed for geospatial data processing](https://scitools.org.uk/cartopy/docs/latest/) +- [netCDF4: Python interface to the netCDF C library](https://unidata.github.io/netcdf4-python/) +- [h5py: Python interface for Hierarchal Data Format 5 (HDF5)](https://www.h5py.org/) +- [read-GRACE-geocenter: Python reader for GRACE/GRACE-FO geocenter data](https://github.com/tsutterley/read-GRACE-geocenter/) + +#### References +I. Velicogna, Y. Mohajerani, G. A, F. Landerer, J. Mouginot, B. Noël, +E. Rignot, T. C. Sutterley, M. van den Broeke, J. M. van Wessem, and D. Wiese, +"Continuity of ice sheet mass loss in Greenland and Antarctica from the GRACE +and GRACE Follow‐On missions", *Geophysical Research Letters*, 47, +(2020). [doi:10.1029/2020GL087291]( https://doi.org/10.1029/2020GL087291) + +T. C. Sutterley, I. Velicogna, and C.-W. Hsu, "Self‐Consistent Ice Mass Balance +and Regional Sea Level From Time‐Variable Gravity", *Earth and Space Science*, 7, +(2020). [doi:10.1029/2019EA000860](https://doi.org/10.1029/2019EA000860) + +T. C. Sutterley and I. Velicogna, "Improved estimates of geocenter variability +from time-variable gravity and ocean model outputs", *Remote Sensing*, 11(18), +2108, (2019). [doi:10.3390/rs11182108](https://doi.org/10.3390/rs11182108) #### Download The program homepage is: diff --git a/doc/aod1b_geocenter.md b/doc/aod1b_geocenter.md new file mode 100644 index 00000000..2404b033 --- /dev/null +++ b/doc/aod1b_geocenter.md @@ -0,0 +1,28 @@ +aod1b_geocenter.py +================== + +- Reads GRACE/GRACE-FO level-1b dealiasing data files for a specific product + - `atm`: atmospheric loading from ECMWF + - `ocn`: oceanic loading from OMCT/MPIOM + - `glo`: global atmospheric and oceanic loading + - `oba`: ocean bottom pressure from OMCT/MPIOM +- Creates monthly files of geocenter variations at 6-hour intervals + +#### Calling Sequence +``` +from gravity_toolkit.aod1b_geocenter import aod1b_geocenter +aod1b_geocenter(base_dir, DREL='RL06', DSET='glo', CLOBBER=True) +``` + +#### Inputs + 1. `base_dir`: working data directory + +#### Options + - `DREL`: GRACE/GRACE-FO data release (RL05 or RL06) + - `DSET`: GRACE/GRACE-FO dataset (atm, ocn, glo, oba) + - `CLOBBER`: overwrite existing data + - `MODE`: Permission mode of directories and files + - `VERBOSE`: Output information for each output file + +#### Dependencies + - `geocenter.py`: converts degree 1 spherical harmonic coefficients to geocenter variations diff --git a/doc/combine_harmonics.md b/doc/combine_harmonics.md new file mode 100644 index 00000000..64a6c2c6 --- /dev/null +++ b/doc/combine_harmonics.md @@ -0,0 +1,28 @@ +combine_harmonics.py +==================== + + - Returns the spatial field for a series of spherical harmonics + +#### Calling Sequence +``` +from gravity_toolkit.combine_harmonics import combine_harmonics +spatial = combine_harmonics(clm,slm,lon,lat,LMAX=60) +``` + +#### Inputs: + 1. `clm`: cosine spherical harmonic coefficients + 2. `slm`: sine spherical harmonic coefficients + 3. `lon`: longitude + 4. `lat`: latitude + +#### Options: + - `LMIN`: Lower bound of Spherical Harmonic Degrees + - `LMAX`: Upper bound of Spherical Harmonic Degrees + - `MMAX`: Upper bound of Spherical Harmonic Orders + - `PLM`: Fully-normalized associated Legendre polynomials + +#### Outputs: + - `spatial`: spatial field [lon,lat] + +#### Dependencies + - `plm_holmes.py`: Computes fully-normalized associated Legendre polynomials diff --git a/doc/convert_calendar_decimal.md b/doc/convert_calendar_decimal.md new file mode 100644 index 00000000..7114bc8d --- /dev/null +++ b/doc/convert_calendar_decimal.md @@ -0,0 +1,25 @@ +convert_calendar_decimal.py +=========================== + + - Converts from calendar date into decimal years taking into account leap years + +#### Calling Sequence +``` +from gravity_toolkit.convert_calendar_decimal import convert_calendar_decimal +t_date = convert_calendar_decimal(year, month, DAY=day, \ + HOUR=hour, MINUTE=minute, SECOND=second) +``` + +#### Inputs + 1. `year`: calendar year + 2. `month`: calendar month (1 = January, ..., 12 = December) + +#### Options + - `DAY`: Number of day of the month + - `HOUR`: hour of the day + - `MINUTE`: minute of the hour + - `SECOND`: second (and fractions of a second) of the minute + - `DofY`: day of the year (January 1 = 1) + +#### Outputs + - `t_date`: date in decimal format (years) diff --git a/doc/convert_julian.md b/doc/convert_julian.md new file mode 100644 index 00000000..090cb1db --- /dev/null +++ b/doc/convert_julian.md @@ -0,0 +1,28 @@ +convert_julian.py +================= + + - Return the calendar date and time given Julian date + +#### Calling Sequence +``` +from gravity_toolkit.convert_julian import convert_julian +YEAR,MONTH,DAY,HOUR,MINUTE,SECOND = convert_julian(JD, FORMAT='tuple') +``` + +#### Inputs + 1. `JD`: Julian Day of the specified calendar date (days since -4712-01-01T12:00:00) + +#### Options + - `ASTYPE`: convert output to variable type + - `FORMAT`: format of output variables + - `'dict'`: dictionary with variable keys as listed above + - `'tuple'`: tuple with variable order YEAR,MONTH,DAY,HOUR,MINUTE,SECOND + - `'zip'`: aggregated variable sets + +#### Outputs + - `year`: Number of the desired year + - `month`: Number of the desired month (1 = January, ..., 12 = December) + - `day`: Number of day of the month + - `hour`: hour of the day + - `minute`: minute of the hour + - `second`: second (and fractions of a second) of the minute diff --git a/doc/destripe_harmonics.md b/doc/destripe_harmonics.md new file mode 100644 index 00000000..6e09b82b --- /dev/null +++ b/doc/destripe_harmonics.md @@ -0,0 +1,25 @@ +destripe_harmonics.py +===================== + + - Filters spherical harmonic coefficients for correlated "striping" errors following [Swenson and Wahr (2006)](http://dx.doi.org/10.1029/2005GL025285) + +#### Calling Sequence +``` +from gravity_toolkit.destripe_harmonics import destripe_harmonics +Ylms = destripe_harmonics(clm,slm,LMAX=60) +``` + +#### Inputs + 1. `clm`: cosine spherical harmonic coefficients + 2. `slm`: sine spherical harmonic coefficients + +#### Options + - `LMIN`: Lower bound of Spherical Harmonic Degrees + - `LMAX`: Upper bound of Spherical Harmonic Degrees + - `MMAX`: Upper bound of Spherical Harmonic Orders + - `ROUND`: use round to find nearest even (True) or use floor (False) + - `NARROW`: Clm=Slm=0 if number of points is less than window size (False) + +#### Outputs + - `Wclm`: filtered cosine spherical harmonic coefficients + - `Wslm`: filtered sine spherical harmonic coefficients diff --git a/doc/gauss_weights.md b/doc/gauss_weights.md new file mode 100644 index 00000000..94f3ef06 --- /dev/null +++ b/doc/gauss_weights.md @@ -0,0 +1,18 @@ +gauss_weights.py +================ + + - Computes the Gaussian weights as a function of degree + - A normalized version of [Christopher Jekeli's Gaussian averaging function](http://www.geology.osu.edu/~jekeli.1/OSUReports/reports/report_327.pdf) + +#### Calling Sequence +``` +from gravity_toolkit.gauss_weights import gauss_weights +wl = 2.0*np.pi*gauss_weights(hw,LMAX) +``` + +#### Inputs + 1. `hw`: Gaussian smoothing radius in km + 2. `LMAX`: Upper bound of Spherical Harmonic Degrees + +#### Outputs + - `wl`: Gaussian weights for each degree `l` diff --git a/doc/geocenter.md b/doc/geocenter.md new file mode 100644 index 00000000..2ea19e72 --- /dev/null +++ b/doc/geocenter.md @@ -0,0 +1,22 @@ +geocenter.py +============ + + - Calculates the geocenter variation (in mm) from degree 1 Stokes Coefficients + - Calculates the Degree 1 Stokes Coefficients of a geocenter variation (in mm) + +#### Calling Sequence +``` +from gravity_toolkit.geocenter import geocenter +xyz = geocenter(C10=C10, C11=C11, S11=S11) +Ylms = geocenter(X=x, Y=y, Z=z, INVERSE=True) +``` + +#### Options + 1. `C10`: Cosine spherical harmonic of degree 1 and order 0 + 2. `C11`: Cosine spherical harmonic of degree 1 and order 1 + 3. `S11`: Sine spherical harmonic of degree 1 and order 1 + 4. `X`: X-component of geocenter variation + 5. `Y`: Y-component of geocenter variation + 6. `Z`: Z-component of geocenter variation + 7. `RADIUS`: Earth's radius for calculating spherical harmonics + 8. `INVERSE`: calculates the Stokes Coefficients from geocenter diff --git a/doc/grace_date.md b/doc/grace_date.md new file mode 100644 index 00000000..aed65d78 --- /dev/null +++ b/doc/grace_date.md @@ -0,0 +1,29 @@ +grace_date.py +============= + + - Finds GRACE/GRACE-FO index file from `podaac_grace_sync.py` or `gfz_isdc_grace_ftp.py` + - Reads dates of each GRACE/GRACE-FO file and assigns the month number + - Creates an index of dates for GRACE/GRACE-FO files + +#### Calling Sequence +``` +from gravity_toolkit.grace_date import grace_date +grace_files = grace_date(base_dir, PROC=PROC, DREL=DREL, DSET=DSET) +``` + +#### Inputs + 1. Working data directory for GRACE/GRACE-FO data + +#### Options + - `PROC`: GRACE data processing center (CSR/CNES/JPL/GFZ) + - `DREL`: GRACE data release + - `DSET`: GRACE dataset (GAA/GAB/GAC/GAD/GSM) + - `GAA` is the non-tidal atmospheric correction + - `GAB` is the non-tidal oceanic correction + - `GAC` is the combined non-tidal atmospheric and oceanic correction + - `GAD` is the GRACE ocean bottom pressure product + - `GSM` is corrected monthly GRACE/GRACE-FO static field product + - `OUTPUT`: create index of dates for GRACE/GRACE-FO data + +#### Outputs + - dictionary of files mapped by GRACE/GRACE-FO month diff --git a/doc/grace_find_months.md b/doc/grace_find_months.md new file mode 100644 index 00000000..155edb27 --- /dev/null +++ b/doc/grace_find_months.md @@ -0,0 +1,26 @@ +grace_find_months.py +==================== + + - Finds the months available for a GRACE/GRACE-FO product + - Finds the all months missing from the product + +#### Calling Sequence +``` +from gravity_toolkit.grace_find_months import grace_find_months +grace_months = grace_find_months(base_dir, PROC, DREL, DSET=DSET) +``` + +#### Inputs + - `base_dir`: Working data directory for GRACE/GRACE-FO data + - `PROC`: GRACE/GRACE-FO data processing center (CSR, CNES, JPL, GFZ) + - `DREL`: GRACE/GRACE-FO data release (RL04, RL05, RL06) + +#### Options + - `DSET`: GRACE dataset (GSM, GAC, GAD, GAB, GAA) + +#### Outputs + - `start`: First month in a GRACE/GRACE-FO dataset + - `end`: Last month in a GRACE/GRACE-FO dataset + - `missing`: missing months in a GRACE/GRACE-FO dataset + - `months`: all available months in a GRACE/GRACE-FO dataset + - `time`: center dates of all available months in a GRACE/GRACE-FO dataset diff --git a/doc/grace_input_months.md b/doc/grace_input_months.md new file mode 100644 index 00000000..06656f06 --- /dev/null +++ b/doc/grace_input_months.md @@ -0,0 +1,57 @@ +grace_input_months.py +===================== + + - Reads GRACE/GRACE-FO files for a specified spherical harmonic degree and order and for a specified date range + - Replaces Degree 1 with with input values (if specified) + - Replaces C20 with SLR values (if specified) + - Replaces C30 with SLR values for months 179+ (if specified) + - Corrects for ECMWF atmospheric "jumps" using the GAE, GAF and GAG files following [Fagiolini et al. (2015)](https://doi.org/10.1093/gji/ggv276) + - Corrects for Pole Tide drift following [Wahr et al. (2015)](https://doi.org/10.1002/2015JB011986) + - Removes a temporal average gravity field to get geopotential anomalies + +#### Calling Sequence +``` +from gravity_toolkit.grace_input_months import grace_input_months +GRACE_Ylms = grace_input_months(base_dir, PROC, DREL, DSET, LMAX, + start_mon, end_mon, missing, SLR_C20, DEG1, SLR_C30=SLR_C30) +``` + +#### Inputs + - `base_dir`: Working data directory for GRACE/GRACE-FO data + - `PROC`: GRACE/GRACE-FO data processing center (CSR, CNES, JPL, GFZ) + - `DREL`: GRACE/GRACE-FO data release (RL04, RL05, RL06) + - `DSET`: GRACE/GRACE-FO data product (GAA, GAB, GAC, GAD, GSM) + - `LMAX`: Upper bound of Spherical Harmonic Degrees + - `start_mon`: starting month to consider in analysis + - `end_mon`: ending month to consider in analysis + - `missing`: missing months to not consider in analysis + - `SLR_C20`: Replaces C20 with values from Satellite Laser Ranging (SLR) + - `None`: use original values + - `CSR`: use values from CSR (TN-07, TN-09, TN-11) + - `GSFC`: use values from GSFC (TN-14) + - `DEG1`: Use Degree 1 coefficients + - `None`: No degree 1 + - `Tellus`: [GRACE/GRACE-FO TN-13 coefficients from PO.DAAC](https://grace.jpl.nasa.gov/data/get-data/geocenter/) + - `SLR`: [Satellite laser ranging coefficients from CSR](ftp://ftp.csr.utexas.edu/pub/slr/geocenter/) + - `SLF`: [Sutterley and Velicogna coefficients, Remote Sensing (2019)](https://doi.org/10.6084/m9.figshare.7388540) + +#### Options + - `MMAX`: Upper bound of Spherical Harmonic Orders + - `SLR_C30`: Replaces C30 with values from Satellite Laser Ranging (SLR) + - `None`: use original values + - `CSR`: use values from CSR (5x5 with 6,1) + - `GSFC`: use values from GSFC (TN-14) + - `POLE_TIDE`: correct GSM data for pole tide drift + - `ATM`: correct data with ECMWF "jump" corrections GAE, GAF and GAG + - `MODEL_DEG1`: least-squares model missing degree 1 coefficients + - `DEG1_GIA`: GIA-correction used when calculating degree 1 coefficients + - `MEAN`: remove mean of harmonics + +#### Outputs + - `clm`: GRACE/GRACE-FO cosine spherical harmonics + - `slm`: GRACE/GRACE-FO sine spherical harmonics + - `time`: time of each GRACE measurement (mid-month) + - `month`: GRACE/GRACE-FO months of input datasets + - `title`: string denoting low degree zonals replacement, geocenter usage and corrections + - `mean`: mean spherical harmonic fields as a dictionary with fields clm/slm + - `directory`: directory of exact GRACE/GRACE-FO product diff --git a/doc/hdf5_read.md b/doc/hdf5_read.md new file mode 100644 index 00000000..18e888d3 --- /dev/null +++ b/doc/hdf5_read.md @@ -0,0 +1,31 @@ +hdf5_read.py +============ + + - Reads spatial data from HDF5 files + +#### Calling Sequence +``` +from gravity_toolkit.hdf5_read import hdf5_read +file_inp = hdf5_read(filename, DATE=True, VERBOSE=False) +``` + +#### Inputs + - `filename`: HDF5 file to be opened and read + +#### Options + - `DATE`: HDF5 file has date information + - `MISSING`: HDF5 variables have missing values + - `VERBOSE`: will print to screen the HDF5 structure parameters + - `VARNAME`: z variable name in HDF5 file + - `LONNAME`: longitude variable name in HDF5 file + - `LATNAME`: latitude variable name in HDF5 file + - `TIMENAME`: time variable name in HDF5 file + - `ATTRIBUTES`: HDF5 variables contain attribute parameters + - `TITLE`: HDF5 file contains description attribute parameter + +#### Outputs + - `data`: z value of dataset + - `lon`: longitudinal array + - `lat`: latitudinal array + - `time`: time value of dataset (if specified by DATE) + - `attributes`: HDF5 attributes (for variables and title) diff --git a/doc/hdf5_read_stokes.md b/doc/hdf5_read_stokes.md new file mode 100644 index 00000000..edcce45b --- /dev/null +++ b/doc/hdf5_read_stokes.md @@ -0,0 +1,29 @@ +hdf5_read_stokes.py +=================== + + - Reads spherical harmonic data from HDF5 files + +#### Calling Sequence +``` +from gravity_toolkit.hdf5_read_stokes import hdf5_read_stokes +file_inp = hdf5_read_stokes(filename, DATE=True, VERBOSE=False) +``` + +#### Inputs + - `filename`: HDF5 file to be opened and read + +#### Options + - `DATE`: HDF5 file has date information + - `VERBOSE`: will print to screen the HDF5 structure parameters + +#### Outputs + - `clm`: Cosine spherical harmonic coefficients + - `slm`: Sine spherical harmonic coefficients + - `l`: spherical harmonic degree + - `m`: spherical harmonic order + - `time`: time of measurement (if specified by DATE) + - `month`: GRACE/GRACE-FO month (if specified by DATE) + - `attributes`: HDF5 attributes for: + - spherical harmonics (`clm`,`slm`) + - variables (`l`,`m`,`time`,`month`) + - title diff --git a/doc/hdf5_stokes.md b/doc/hdf5_stokes.md new file mode 100644 index 00000000..36e06abb --- /dev/null +++ b/doc/hdf5_stokes.md @@ -0,0 +1,31 @@ +hdf5_stokes.py +============== + + - Writes spherical harmonic coefficients to HDF5 files + +#### Calling Sequence +``` +from gravity_toolkit.hdf5_stokes import hdf5_stokes +hdf5_stokes(clm, slm, linp, minp, tinp, month, FILENAME=output_HDF5_file) +``` + +#### Inputs + - `clm`: Cosine spherical harmonic coefficients + - `slm`: Sine spherical harmonic coefficients + - `linp`: spherical harmonic degree (l) + - `minp`: spherical harmonic order (m) + - `tinp`: date of measurement + - `month`: GRACE/GRACE-FO month + +#### Options + - `FILENAME`: output filename HDF5 + - `UNITS`: spherical harmonic units + - `TIME_UNITS`: time variable units + - `TIME_LONGNAME`: time variable description + - `MONTHS_NAME`: name of months variable within HDF5 file + - `MONTHS_UNITS`: months variable units + - `MONTHS_LONGNAME`: months variable description + - `TITLE`: title attribute of dataset + - `CLOBBER`: will overwrite an existing HDF5 file + - `VERBOSE`: will print to screen the HDF5 structure parameters + - `DATE`: harmonics have date information diff --git a/doc/hdf5_write.md b/doc/hdf5_write.md new file mode 100644 index 00000000..63c1ede7 --- /dev/null +++ b/doc/hdf5_write.md @@ -0,0 +1,30 @@ +hdf5_write.py +============= + + - Writes spatial data to HDF5 files + +#### Calling Sequence +``` +from gravity_toolkit.hdf5_write import hdf5_write +hdf5_write(data, lon, lat, tim, FILENAME=output_netcdf4_file) +``` + +#### Inputs + - `data`: z data + - `lon`: longitude array + - `lat`: latitude array + - `tim`: time array + +#### Options + - `FILENAME`: output HDF5 filename + - `VARNAME`: z variable name in HDF5 file + - `LONNAME`: longitude variable name in HDF5 file + - `LATNAME`: latitude variable name in HDF5 file + - `UNITS`: z variable units + - `LONGNAME`: z variable description + - `FILL_VALUE`: missing value for z variable + - `TIME_UNITS`: time variable units + - `TIME_LONGNAME`: time variable description + - `TITLE`: title attribute of dataset + - `CLOBBER`: will overwrite an existing HDF5 file + - `VERBOSE`: will print to screen the HDF5 structure parameters diff --git a/doc/ncdf_read.md b/doc/ncdf_read.md new file mode 100644 index 00000000..07d837e4 --- /dev/null +++ b/doc/ncdf_read.md @@ -0,0 +1,31 @@ +ncdf_read.py +============ + + - Reads spatial data from COARDS-compliant netCDF4 files + +#### Calling Sequence +``` +from gravity_toolkit.ncdf_read import ncdf_read +file_inp = ncdf_read(filename, DATE=True, VERBOSE=False) +``` + +#### Inputs + - `filename`: netCDF4 file to be opened and read + +#### Options + - `DATE`: netCDF4 file has date information + - `MISSING`: netCDF4 variables have missing values + - `VERBOSE`: will print to screen the netCDF4 structure parameters + - `VARNAME`: z variable name in netCDF4 file + - `LONNAME`: longitude variable name in netCDF4 file + - `LATNAME`: latitude variable name in netCDF4 file + - `TIMENAME`: time variable name in netCDF4 file + - `ATTRIBUTES`: netCDF4 variables contain attribute parameters + - `TITLE`: netCDF4 file contains description attribute parameter + +#### Outputs + - `data`: z value of dataset + - `lon`: longitudinal array + - `lat`: latitudinal array + - `time`: time value of dataset (if specified by DATE) + - `attributes`: netCDF4 attributes (for variables and title) diff --git a/doc/ncdf_read_stokes.md b/doc/ncdf_read_stokes.md new file mode 100644 index 00000000..6ad2089e --- /dev/null +++ b/doc/ncdf_read_stokes.md @@ -0,0 +1,29 @@ +ncdf_read_stokes.py +=================== + +- Reads spherical harmonic data from netCDF4 files + +#### Calling Sequence +``` +from gravity_toolkit.ncdf_read_stokes import ncdf_read_stokes +file_inp = ncdf_read_stokes(filename, DATE=True, VERBOSE=False) +``` + +#### Inputs + - `filename`: netCDF4 file to be opened and read + +#### Options + - `DATE`: netCDF4 file has date information + - `VERBOSE`: will print to screen the netCDF4 structure parameters + +#### Outputs + - `clm`: Cosine spherical harmonic coefficients + - `slm`: Sine spherical harmonic coefficients + - `l`: spherical harmonic degree + - `m`: spherical harmonic order + - `time`: time of measurement (if specified by DATE) + - `month`: GRACE/GRACE-FO month (if specified by DATE) + - `attributes`: netCDF4 attributes for: + - spherical harmonics (`clm`,`slm`) + - variables (`l`,`m`,`time`,`month`) + - title diff --git a/doc/ncdf_stokes.md b/doc/ncdf_stokes.md new file mode 100644 index 00000000..b847b421 --- /dev/null +++ b/doc/ncdf_stokes.md @@ -0,0 +1,31 @@ +ncdf_stokes.py +============== + + - Writes spherical harmonic coefficients to netCDF4 files + +#### Calling Sequence +``` +from gravity_toolkit.ncdf_stokes import ncdf_stokes +ncdf_stokes(clm, slm, linp, minp, tinp, month, FILENAME=output_netcdf4_file) +``` + +#### Inputs + - `clm`: Cosine spherical harmonic coefficients + - `slm`: Sine spherical harmonic coefficients + - `linp`: spherical harmonic degree (l) + - `minp`: spherical harmonic order (m) + - `tinp`: date of measurement + - `month`: GRACE/GRACE-FO month + +#### Options + - `FILENAME`: output filename netCDF4 + - `UNITS`: spherical harmonic units + - `TIME_UNITS`: time variable units + - `TIME_LONGNAME`: time variable description + - `MONTHS_NAME`: name of months variable within netCDF4 file + - `MONTHS_UNITS`: months variable units + - `MONTHS_LONGNAME`: months variable description + - `TITLE`: title attribute of dataset + - `CLOBBER`: will overwrite an existing netCDF4 file + - `VERBOSE`: will print to screen the netCDF4 structure parameters + - `DATE`: harmonics have date information diff --git a/doc/ncdf_write.md b/doc/ncdf_write.md new file mode 100644 index 00000000..2dc0511a --- /dev/null +++ b/doc/ncdf_write.md @@ -0,0 +1,30 @@ +ncdf_write.py +============= + + - Writes spatial data to COARDS-compliant NetCDF4 files + +#### Calling Sequence +``` +from gravity_toolkit.ncdf_write import ncdf_write +ncdf_write(data, lon, lat, tim, FILENAME=output_netcdf4_file) +``` + +#### Inputs + - `data`: z data + - `lon`: longitude array + - `lat`: latitude array + - `tim`: time array + +#### Options + - `FILENAME`: output netCDF4 filename + - `VARNAME`: z variable name in netCDF4 file + - `LONNAME`: longitude variable name in netCDF4 file + - `LATNAME`: latitude variable name in netCDF4 file + - `UNITS`: z variable units + - `LONGNAME`: z variable description + - `FILL_VALUE`: missing value for z variable + - `TIME_UNITS`: time variable units + - `TIME_LONGNAME`: time variable description + - `TITLE`: title attribute of dataset + - `CLOBBER`: will overwrite an existing netCDF4 file + - `VERBOSE`: will print to screen the netCDF4 structure parameters diff --git a/doc/plm_colombo.md b/doc/plm_colombo.md new file mode 100644 index 00000000..0d230717 --- /dev/null +++ b/doc/plm_colombo.md @@ -0,0 +1,22 @@ +plm_columbo.py +============== + + - Computes fully-normalized associated Legendre Polynomials for a vector of x values using a standard forward column method + - Uses the Colombo (1981) recursion relation listed in the [Geoid Cookbook](http://mitgcm.org/~mlosch/geoidcookbook.pdf) and [Holmes-Featherstone (2002)](https://doi.org/10.1007/s00190-002-0216-2) + +#### Calling Sequence +``` +from gravity_toolkit.plm_colombo import plm_colombo +plm,dplm = plm_colombo(LMAX, x) +``` + +#### Inputs + - `LMAX`: Upper bound of Spherical Harmonic Degrees + - `x`: typically cos(theta), where theta is the colatitude in radians + +#### Options + - `ASTYPE`: output variable type. Default is 64-bit floating point + +#### Outputs + - `plms`: Legendre polynomials of x (geodesy normalization) + - `dplms`: first differentials of Legendre polynomials of x diff --git a/doc/plm_holmes.md b/doc/plm_holmes.md new file mode 100644 index 00000000..4a670ffa --- /dev/null +++ b/doc/plm_holmes.md @@ -0,0 +1,22 @@ +plm_holmes.py +============= + + - Computes fully-normalized associated Legendre Polynomials for a vector of x values using the [Holmes and Featherstone (2002)](https://doi.org/10.1007/s00190-002-0216-2) recursion relation + - Recursion relation is stable up to very high degree and order + +#### Calling Sequence +``` +from gravity_toolkit.plm_holmes import plm_holmes +plm,dplm = plm_holmes(LMAX, x) +``` + +#### Inputs + - `LMAX`: Upper bound of Spherical Harmonic Degrees + - `x`: typically cos(theta), where theta is the colatitude in radians + +#### Options + - `ASTYPE`: output variable type. Default is 64-bit floating point + +#### Outputs + - `plms`: Legendre polynomials of x (geodesy normalization) + - `dplms`: first differentials of Legendre polynomials of x diff --git a/doc/plm_mohlenkamp.md b/doc/plm_mohlenkamp.md new file mode 100644 index 00000000..4079ccdf --- /dev/null +++ b/doc/plm_mohlenkamp.md @@ -0,0 +1,22 @@ +plm_mohlenkamp.py +================= + + - Computes fully-normalized associated Legendre Polynomials for a vector of x values using Martin Mohlenkamp's recursion relation as listed in his [Guide to Spherical Harmonics](http://www.ohio.edu/people/mohlenka/research/uguide.pdf) + - Derived from [Gabor Szegö (1939)](https://people.math.osu.edu/nevai.1/AT/SZEGO/szego=szego1975=ops=OCR.pdf) recurrence formula for Jacobi Polynomials (Pg 71) + + +#### Calling Sequence +``` +from gravity_toolkit.plm_mohlenkamp import plm_mohlenkamp +plm = plm_mohlenkamp(LMAX, x) +``` + +#### Inputs + - `LMAX`: Upper bound of Spherical Harmonic Degrees + - `x`: typically cos(theta), where theta is the colatitude in radians + +#### Options + - `MMAX`: Upper bound of Spherical Harmonic Orders (default = LMAX) + +#### Outputs + - `plms`: Legendre polynomials of x (geodesy normalization) diff --git a/doc/read_CSR_monthly_6x1.md b/doc/read_CSR_monthly_6x1.md new file mode 100644 index 00000000..6af93324 --- /dev/null +++ b/doc/read_CSR_monthly_6x1.md @@ -0,0 +1,24 @@ +read_CSR_monthly_6x1.py +======================= + + - Reads in monthly 5x5 spherical harmonic coefficients with 1 coefficient from degree 6 all calculated from satellite laser ranging (SLR) measurements calculated by the [University of Texas Center for Space Research (CSR)](https://doi.org/10.1029/2010JB000850) + +#### Calling Sequence +``` +from gravity_toolkit.read_CSR_monthly_6x1 import read_CSR_monthly_6x1 +Ylms = read_CSR_monthly_6x1(input_file) +``` + +#### Inputs + - `input_file`: input satellite laser ranging file + +#### Options + - `HEADER`: file contains header text to be skipped (default: True) + +#### Outputs + - `clm`: Cosine spherical harmonic coefficients + - `slm`: Sine spherical harmonic coefficients + - `error/clm`: Cosine spherical harmonic coefficient uncertainty + - `error/slm`: Sine spherical harmonic coefficients uncertainty + - `MJD`: output date as Modified Julian Day + - `date`: output date in year-decimal diff --git a/doc/read_GRACE_harmonics.md b/doc/read_GRACE_harmonics.md new file mode 100644 index 00000000..6f125067 --- /dev/null +++ b/doc/read_GRACE_harmonics.md @@ -0,0 +1,34 @@ +read_GRACE_harmonics.py +======================= + + - Reads GRACE/GRACE-FO files and extracts spherical harmonic data and drift rates (RL04) + - Adds drift rates to clm and slm for release 4 harmonics + - Correct GSM data for drift in pole tide following Wahr et al. (2015) + - Extracts date of GRACE/GRACE-FO files and calculates mean of range + +#### Calling Sequence +``` +from gravity_toolkit.read_GRACE_harmonics import read_GRACE_harmonics +CSR_L2_input = read_GRACE_harmonics('GSM-2_2002095-2002120_0021_UTCSR_0060_0005.gz',60) +GFZ_L2_input = read_GRACE_harmonics('GSM-2_2002094-2002120_0024_EIGEN_G---_005a.gz',90) +JPL_L2_input = read_GRACE_harmonics('GSM-2_2002091-2002120_0018_JPLEM_0001_0005.gz',60) +JPLMSC_input = read_GRACE_harmonics('GSM-2_2003001-2003031_0029_JPLMSC_0719_0005',719) +``` + +#### Inputs + 1. full path to input GRACE file + 2. spherical harmonic degree of truncation (`LMAX`) + +#### Options + - `MMAX`: spherical harmonic order of truncation (default is `LMAX`) + - `POLE_TIDE`: correct GSM data for pole tide drift following [Wahr et al. (2015)](https://doi.org/10.1002/2015JB011986) + +#### Outputs + - `time`: mid-month date of GRACE file in year-decimal + - `start`: start date of range as Julian day + - `end`: end date of range as Julian day + - `clm`: cosine spherical harmonics of input data + - `slm`: sine spherical harmonics of input data + - `eclm`: cosine spherical harmonic uncalibrated standard deviations + - `eslm`: sine spherical harmonic uncalibrated standard deviations + - `header`: text header of the GRACE file (will parse new YAML headers) diff --git a/doc/read_SLR_C20.md b/doc/read_SLR_C20.md new file mode 100644 index 00000000..da178de8 --- /dev/null +++ b/doc/read_SLR_C20.md @@ -0,0 +1,28 @@ +read_SLR_C20.py +=============== + + - Reads monthly oblateness (degree 2 zonal) spherical harmonic data files from satellite laser ranging (SLR) + +#### Calling Sequence +``` +from gravity_toolkit.read_SLR_C20 import read_SLR_C20 +SLR_C20 = read_SLR_C20(SLR_file) +``` + +#### Inputs + - `SLR_file`: oblateness file from satellite laser ranging + - RL04: TN-05_C20_SLR.txt + - RL05: TN-07_C20_SLR.txt + - RL06: TN-11_C20_SLR.txt + - CSR: C20_RL05.txt + - GSFC: TN-14_C30_C30_GSFC_SLR.txt + +#### Options + - `HEADER`: file contains header text to be skipped (default: True) + - `AOD`: remove background De-aliasing product from the SLR solution (for CSR) + +#### Outputs + - `data`: cosine degree 2 order 0 spherical harmonic coefficients (C20) + - `error`: cosine degree 2 order 0 spherical harmonic coefficient errors (eC20) + - `month`: GRACE/GRACE-FO month of measurement + - `time`: date of SLR measurement diff --git a/doc/read_SLR_C30.md b/doc/read_SLR_C30.md new file mode 100644 index 00000000..d92008cf --- /dev/null +++ b/doc/read_SLR_C30.md @@ -0,0 +1,26 @@ +read_SLR_C30.py +=============== + + - Reads monthly degree 3 zonal spherical harmonic data files from satellite laser ranging (SLR) + +#### Calling Sequence +``` +from gravity_toolkit.read_SLR_C30 import read_SLR_C30 +SLR_C30 = read_SLR_C30(SLR_file) +``` + +#### Inputs + - `SLR_file`: low degree zonal file from satellite laser ranging + - CSR: CSR_Monthly_5x5_Gravity_Harmonics.txt + - GSFC: TN-14_C30_C30_GSFC_SLR.txt + - LARES: C30_LARES_filtered.txt + +#### Options + - `HEADER`: file contains header text to be skipped (default: True) + - `C30_MEAN`: mean C30 to add to LARES C30 anomalies + +#### Outputs + - `data`: cosine degree 3 order 0 spherical harmonic coefficients (C30) + - `error`: cosine degree 3 order 0 spherical harmonic coefficient errors (eC30) + - `month`: GRACE/GRACE-FO month of measurement + - `time`: date of SLR measurement diff --git a/doc/read_SLR_geocenter.md b/doc/read_SLR_geocenter.md new file mode 100644 index 00000000..191d6db6 --- /dev/null +++ b/doc/read_SLR_geocenter.md @@ -0,0 +1,31 @@ +read_SLR_geocenter.py +===================== + + - Reads monthly geocenter spherical harmonic data files from [satellite laser ranging (SLR)](ftp://ftp.csr.utexas.edu/pub/slr/geocenter/) + +#### Calling Sequence +``` +from gravity_toolkit.read_SLR_geocenter import read_SLR_geocenter +deg1_input = read_SLR_geocenter(geocenter_file) +``` + +#### Inputs + - `geocenter_file`: degree 1 file + - RL04: GCN_RL04.txt + - RL05: GCN_RL05.txt + - RL06: GCN_RL06.txt + - CF-CM: GCN_L1_L2_30d_CF-CM.txt + +#### Options + - `RADIUS`: Earth's radius for calculating spherical harmonics from SLR data + - `skiprows`: Rows of data to skip when importing data + +#### Outputs + - `C10`: Cosine degree 1, order 0 spherical harmonic coefficients + - `C11`: Cosine degree 1, order 1 spherical harmonic coefficients + - `S11`: Sine degree 1, order 1 spherical harmonic coefficients + - `eC10`: Cosine degree 1, order 0 spherical harmonic coefficients Error + - `eC11`: Cosine degree 1, order 1 spherical harmonic coefficients Error + - `eS11`: Sine degree 1, order 1 spherical harmonic coefficients Error + - `month`: GRACE/GRACE-FO month (Apr 2002 = 004) + - `time`: date of GRACE/GRACE-FO month in decimal format diff --git a/doc/read_love_numbers.md b/doc/read_love_numbers.md new file mode 100644 index 00000000..f7c8f54b --- /dev/null +++ b/doc/read_love_numbers.md @@ -0,0 +1,25 @@ +read_love_numbers.py +==================== + + - Reads sets of load Love numbers computed using outputs from the Preliminary Reference Earth Model (PREM) as described by [Han and Wahr (1995)](https://10.1111/j.1365-246X.1995.tb01819.x) + +#### Calling Sequence +``` +from gravity_toolkit.read_love_numbers import read_love_numbers +hl,kl,ll = read_love_numbers(love_numbers_file, FORMAT='tuple') +``` + +#### Inputs + - `love_numbers_file`: Elastic load Love numbers file + +#### Options + - `HEADER`: file contains header text to be skipped (default: True) + - `FORMAT`: format of output variables + - `'dict'`: dictionary with variable keys as listed above + - `'tuple'`: tuple with variable order hl,kl,ll + - `'zip'`: aggregated variable sets + +#### Outputs + - `kl`: Love number of Gravitational Potential + - `hl`: Love number of Vertical Displacement + - `ll`: Love number of Horizontal Displacement diff --git a/doc/read_tellus_geocenter.md b/doc/read_tellus_geocenter.md new file mode 100644 index 00000000..b125c638 --- /dev/null +++ b/doc/read_tellus_geocenter.md @@ -0,0 +1,30 @@ +read_tellus_geocenter.py +======================== + +- Reads monthly geocenter spherical harmonic data files from [GRACE Tellus Technical Notes (TN-13)](https://podaac-tools.jpl.nasa.gov/drive/files/allData/tellus/L2/degree_1) calculated following [Swenson et al. (2008)](https://doi.org/10.1029/2007JB005338) + +#### Calling Sequence +``` +from gravity_toolkit.read_tellus_geocenter import read_tellus_geocenter +deg1_input = read_tellus_geocenter(geocenter_file, JPL=True) +``` + +#### Inputs + - `geocenter_file`: degree 1 file + - CSR: TN-13_GEOC_CSR_RL06.txt + - GFZ: TN-13_GEOC_GFZ_RL06.txt + - JPL: TN-13_GEOC_JPL_RL06.txt + +#### Options + - `HEADER`: file contains header text to be skipped (default: True) + - `JPL`: use JPL TN-13 geocenter files calculated following [Sun et al., (2016)](https://doi.org/10.1007/s00190-015-0852-y) + +#### Outputs + - `C10`: Cosine degree 1, order 0 spherical harmonic coefficients + - `C11`: Cosine degree 1, order 1 spherical harmonic coefficients + - `S11`: Sine degree 1, order 1 spherical harmonic coefficients + - `eC10`: Cosine degree 1, order 0 spherical harmonic coefficients Error + - `eC11`: Cosine degree 1, order 1 spherical harmonic coefficients Error + - `eS11`: Sine degree 1, order 1 spherical harmonic coefficients Error + - `month`: GRACE/GRACE-FO month (Apr 2002 = 004) + - `time`: date of GRACE/GRACE-FO month in decimal format diff --git a/environment.yml b/environment.yml index c55c0fc1..1f28a130 100644 --- a/environment.yml +++ b/environment.yml @@ -8,3 +8,9 @@ dependencies: - pyyaml - lxml - future + - matplotlib + - cartopy + - netCDF4 + - h5py + - pip: + - git+https://github.com/tsutterley/read-GRACE-geocenter.git diff --git a/gravity_toolkit/__init__.py b/gravity_toolkit/__init__.py index 7f16252f..54018e6b 100644 --- a/gravity_toolkit/__init__.py +++ b/gravity_toolkit/__init__.py @@ -1 +1,28 @@ +from gravity_toolkit.convert_julian import convert_julian +from gravity_toolkit.convert_calendar_decimal import convert_calendar_decimal +from gravity_toolkit.grace_date import grace_date +from gravity_toolkit.grace_find_months import grace_find_months +from gravity_toolkit.grace_input_months import grace_input_months +from gravity_toolkit.geocenter import geocenter +from gravity_toolkit.aod1b_geocenter import aod1b_geocenter +from gravity_toolkit.read_tellus_geocenter import read_tellus_geocenter +from gravity_toolkit.read_SLR_geocenter import read_SLR_geocenter +from gravity_toolkit.read_SLR_C20 import read_SLR_C20 +from gravity_toolkit.read_SLR_C30 import read_SLR_C30 +from gravity_toolkit.read_CSR_monthly_6x1 import read_CSR_monthly_6x1 from gravity_toolkit.read_GRACE_harmonics import read_GRACE_harmonics +from gravity_toolkit.read_love_numbers import read_love_numbers +from gravity_toolkit.destripe_harmonics import destripe_harmonics +from gravity_toolkit.gauss_weights import gauss_weights +from gravity_toolkit.plm_colombo import plm_colombo +from gravity_toolkit.plm_holmes import plm_holmes +from gravity_toolkit.plm_mohlenkamp import plm_mohlenkamp +from gravity_toolkit.combine_harmonics import combine_harmonics +from gravity_toolkit.ncdf_read import ncdf_read +from gravity_toolkit.hdf5_read import hdf5_read +from gravity_toolkit.ncdf_write import ncdf_write +from gravity_toolkit.hdf5_write import hdf5_write +from gravity_toolkit.ncdf_read_stokes import ncdf_read_stokes +from gravity_toolkit.hdf5_read_stokes import hdf5_read_stokes +from gravity_toolkit.ncdf_stokes import ncdf_stokes +from gravity_toolkit.hdf5_stokes import hdf5_stokes diff --git a/gravity_toolkit/aod1b_geocenter.py b/gravity_toolkit/aod1b_geocenter.py new file mode 100644 index 00000000..abec622a --- /dev/null +++ b/gravity_toolkit/aod1b_geocenter.py @@ -0,0 +1,261 @@ +#!/usr/bin/env python +u""" +aod1b_geocenter.py +Written by Tyler Sutterley (06/2019) + +Reads GRACE/GRACE-FO level-1b dealiasing data files for a specific product + atm: atmospheric loading from ECMWF + ocn: oceanic loading from OMCT/MPIOM + glo: global atmospheric and oceanic loading + oba: ocean bottom pressure from OMCT/MPIOM + +Creates monthly files of geocenter variations at 6-hour intervals + +NOTE: this reads the GFZ AOD1B files downloaded from PO.DAAC +https://podaac-uat.jpl.nasa.gov/drive/files/allData/grace/L1B/GFZ/AOD1B/RL06/ + +CALLING SEQUENCE: + aod1b_geocenter(base_dir, DREL='RL06', DSET='glo', CLOBBER=True) + +INPUTS: + base_dir: working data directory + +OPTIONS: + DREL: GRACE/GRACE-FO data release (RL05 or RL06) + DSET: GRACE/GRACE-FO dataset (atm, ocn, glo, oba) + CLOBBER: overwrite existing data + MODE: Permission mode of directories and files + VERBOSE: Output information for each output file + +COMMAND LINE OPTIONS: + -D X, --directory=X: Working Data Directory + -R X, --release=X: GRACE Data Release (RL05 or RL06) + -C, --clobber: Overwrite existing data + -M X, --mode=X: Permission mode of directories and files + -V, --verbose: Output information for each output file + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +PROGRAM DEPENDENCIES: + geocenter.py: converts degree 1 Stokes Coefficients to geocenter variations + +UPDATED HISTORY: + Updated 06/2019: using python3 compatible regular expression patterns + Updated 10/2018: using future division for python3 Compatibility + Updated 08/2018: using full release string (RL05 instead of 5) + Updated 06/2018: can read RL06 AOD1B files from PO.DAAC + Updated 03/2018: can read tar files from GFZ_AOD1b_sync.py + Updated 04/2017: slight modifications to the regular expression patterns + to verify that the suffix is the end of a given filename (no .xml files) + added more significant digits to match spherical harmonic precision + Updated 02/2017: using getopt to set parameters and data directory + do not extract tar files to temp, extract contents of files to memory + Updated 05-06/2016: oba=ocean bottom pressure, absolute import of shutil + Written 05/2016 +""" +from __future__ import print_function, division + +import sys +import os +import re +import gzip +import getopt +import tarfile +import numpy as np +from gravity_toolkit.geocenter import geocenter + +#-- aod1b data products +product = {} +product['atm'] = 'Atmospheric loading from ECMWF' +product['ocn'] = 'Oceanic loading from OMCT' +product['glo'] = 'Global atmospheric and oceanic loading' +product['oba'] = 'Ocean bottom pressure from OMCT' + +#-- program module to read the degree 1 coefficients of the AOD1b data +def aod1b_geocenter(base_dir, DREL='', DSET='', CLOBBER=False, MODE=0o775, + VERBOSE=False): + + #-- compile regular expressions operators for file dates + #-- will extract the year and month from the tar file (.tar.gz) + tx = re.compile(r'AOD1B_(\d+)-(\d+)_\d+\.(tar\.gz|tgz)$', re.VERBOSE) + #-- and the calendar day from the ascii file (.asc or gzipped .asc.gz) + fx = re.compile(r'AOD1B_\d+-\d+-(\d+)_X_\d+.asc(.gz)?$', re.VERBOSE) + #-- compile regular expressions operator for the clm/slm headers + #-- for the specific AOD1b product + hx = re.compile(r'^DATA.*SET.*{0}'.format(DSET), re.VERBOSE) + #-- compile regular expression operator to find numerical instances + #-- will extract the data from the file + regex_pattern = r'[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' + rx = re.compile(regex_pattern, re.VERBOSE) + + #-- Maximum spherical harmonic degree (LMAX) + LMAX = 100 + #-- Calculating the number of cos and sin harmonics up to LMAX + n_harm = (LMAX**2 + 3*LMAX)//2 + 1 + + #-- AOD1B directory and output geocenter directory + grace_dir = os.path.join(base_dir,'AOD1B',DREL) + output_dir = os.path.join(grace_dir,'geocenter') + if not os.access(output_dir, os.F_OK): + os.mkdir(output_dir, MODE) + + #-- finding all of the tar files in the AOD1b directory + input_tar_files = [tf for tf in os.listdir(grace_dir) if tx.match(tf)] + + #-- for each tar file + for i in sorted(input_tar_files): + #-- extract the year and month from the file + YY,MM,SFX = tx.findall(i).pop() + YY,MM = np.array([YY,MM], dtype=np.int) + #-- output monthly geocenter file + FILE = 'AOD1B_{0}_{1}_{2:4d}_{3:02d}.txt'.format(DREL,DSET,YY,MM) + #-- if output file exists: check if input tar file is newer + TEST = False + OVERWRITE = ' (clobber)' + #-- check if output file exists + if os.access(os.path.join(output_dir,FILE), os.F_OK): + #-- check last modification time of input and output files + input_mtime = os.stat(os.path.join(grace_dir,i)).st_mtime + output_mtime = os.stat(os.path.join(output_dir,FILE)).st_mtime + #-- if input tar file is newer: overwrite the output file + if (input_mtime > output_mtime): + TEST = True + OVERWRITE = ' (overwrite)' + else: + TEST = True + OVERWRITE = ' (new)' + #-- As there are so many files.. this will only read the new files + #-- or will rewrite if CLOBBER is set (if wanting something changed) + if TEST or CLOBBER: + #-- if verbose: output information about the geocenter file + if VERBOSE: + print('{0}{1}'.format(os.path.join(output_dir,FILE),OVERWRITE)) + #-- open output monthly geocenter file + f = open(os.path.join(output_dir,FILE), 'w') + args = ('Geocenter time series',DREL,DSET) + print('# {0} from {1} AOD1b {2} Product'.format(*args), file=f) + print('# {0}'.format(product[DSET]), file=f) + args = ('ISO-Time','X','Y','Z') + print('# {0:^15} {1:^12} {2:^12} {3:^12}'.format(*args), file=f) + + #-- open the AOD1B monthly tar file + tar = tarfile.open(name=os.path.join(grace_dir,i), mode='r:gz') + + #-- Iterate over every member within the tar file + for member in tar.getmembers(): + #-- get calendar day from file + DD,SFX = fx.findall(member.name).pop() + DD = np.int(DD) + #-- open data file for day + if (SFX == '.gz'): + fid = gzip.GzipFile(fileobj=tar.extractfile(member)) + else: + fid = tar.extractfile(member) + #-- degree 1 spherical harmonics for day and hours + C10 = np.zeros((4)) + C11 = np.zeros((4)) + S11 = np.zeros((4)) + hours = np.zeros((4),dtype=np.int) + + #-- create counter for hour in dataset + c = 0 + #-- while loop ends when dataset is read + while (c < 4): + #-- read line + file_contents = fid.readline().decode('ISO-8859-1') + #-- find file header for data product + if bool(hx.search(file_contents)): + #-- extract hour from header and convert to float + HH, = re.findall(r'(\d+):\d+:\d+',file_contents) + hours[c] = np.int(HH) + #-- read each line of spherical harmonics + for k in range(0,n_harm): + file_contents = fid.readline().decode('ISO-8859-1') + #-- find numerical instances in the data line + line_contents = rx.findall(file_contents) + #-- spherical harmonic degree and order + l1 = np.int(line_contents[0]) + m1 = np.int(line_contents[1]) + if (l1 == 1) and (m1 == 0): + C10[c] = np.float(line_contents[2]) + elif (l1 == 1) and (m1 == 1): + C11[c] = np.float(line_contents[2]) + S11[c] = np.float(line_contents[3]) + #-- add 1 to hour counter + c += 1 + #-- close the input file for day + fid.close() + #-- convert from spherical harmonics into geocenter + XYZ = geocenter(C10=C10, C11=C11, S11=S11) + #-- write to file for each hour (iterates each 6-hour block) + for h,X,Y,Z in zip(hours,XYZ['x'],XYZ['y'],XYZ['z']): + print(('{0:4d}-{1:02d}-{2:02d}T{3:02d}:00:00 {4:12.8f} ' + '{5:12.8f} {6:12.8f}').format(YY,MM,DD,h,X,Y,Z), file=f) + + #-- close the tar file + tar.close() + #-- close the output file + f.close() + #-- set the permissions mode of the output file + os.chmod(os.path.join(output_dir,FILE), MODE) + +#-- PURPOSE: help module to describe the optional input parameters +def usage(): + print('\nHelp: {}'.format(os.path.basename(sys.argv[0]))) + print(' -D X, --directory=X\tWorking Data Directory') + print(' -R X, --release=X\tGRACE Data Release') + print(' -C, --clobber\t\tOverwrite existing data') + print(' -M X, --mode=X\t\tPermission mode of directories and files') + print(' -V, --verbose\t\tOutput information for each output file\n') + +#-- Main program that calls aod1b_geocenter() +def main(): + #-- Read the system arguments listed after the program + long_options = ['help','directory=','release=','clobber','mode=','verbose'] + optlist,arglist = getopt.getopt(sys.argv[1:],'hD:R:CM:V',long_options) + + #-- working data directory + base_dir = os.getcwd() + #-- Data release + DREL = 'RL06' + #-- clobber will overwrite the existing data + CLOBBER = False + #-- permissions mode of the local directories and files (number in octal) + MODE = 0o775 + #-- verbose will output information about each output file + VERBOSE = False + for opt, arg in optlist: + if opt in ('-h','--help'): + usage() + sys.exit() + elif opt in ("-D","--directory"): + base_dir = os.path.expanduser(arg) + elif opt in ("-R","--release"): + DREL = arg + elif opt in ("-C","--clobber"): + CLOBBER = True + elif opt in ("-M","--mode"): + MODE = int(arg, 8) + elif opt in ("-V","--verbose"): + VERBOSE = True + + #-- check that DSET was entered as system argument + if not arglist: + raise Exception('No System Arguments Listed') + + for DSET in arglist: + #-- verify case + DSET = DSET.lower() if DSET.isupper() else DSET + #-- check that DSET was correctly entered + if DSET not in product.keys(): + for key,val in product.items(): + print('{0}: {1}'.format(key, val)) + raise ValueError('Incorrect Data Product Entered (atm,glo,ocn,oba)') + #-- run AOD1b geocenter program with parameters + aod1b_geocenter(base_dir,DREL=DREL,DSET=DSET,CLOBBER=CLOBBER,MODE=MODE, + VERBOSE=VERBOSE) + +#-- run main program +if __name__ == '__main__': + main() diff --git a/gravity_toolkit/combine_harmonics.py b/gravity_toolkit/combine_harmonics.py new file mode 100755 index 00000000..d74f49c8 --- /dev/null +++ b/gravity_toolkit/combine_harmonics.py @@ -0,0 +1,79 @@ +#!/usr/bin/env python +u""" +combine_harmonics.py +Written by Tyler Sutterley (05/2015) + +Returns the spatial field for a series of spherical harmonics + +CALLING SEQUENCE: + spatial = combine_harmonics(clm1, slm1, lon, lat, LMIN=0, LMAX=60) + +INPUTS: + clm: cosine spherical harmonic coefficients + slm: sine spherical harmonic coefficients + lon: longitude + lat: latitude + +OPTIONS: + LMIN: Lower bound of Spherical Harmonic Degrees + LMAX: Upper bound of Spherical Harmonic Degrees + MMAX: Upper bound of Spherical Harmonic Orders (default = LMAX) + PLM: plm coefficients (if computed outside the function) + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +PROGRAM DEPENDENCIES: + plm_holmes.py: Computes fully-normalized associated Legendre polynomials + +UPDATE HISTORY: + Updated 05/2015: added parameter MMAX for MMAX != LMAX. + Written 05/2013 +""" +import numpy as np +from gravity_toolkit.plm_holmes import plm_holmes + +def combine_harmonics(clm1,slm1,lon,lat,LMIN=0,LMAX=0,MMAX=None,PLM=None): + + #-- if LMAX is not specified, will use the size of the input harmonics + if (LMAX == 0): + LMAX = np.shape(clm1)[0]-1 + #-- upper bound of spherical harmonic orders (default = LMAX) + if MMAX is None: + MMAX = np.copy(LMAX) + + #-- Longitude in radians + phi = (np.squeeze(lon)*np.pi/180.0)[np.newaxis,:] + #-- Colatitude in radians + th = (90.0 - np.squeeze(lat))*np.pi/180.0 + thmax = len(th) + + #-- Calculate fourier coefficients from legendre coefficients + d_cos = np.zeros((MMAX+1,thmax))#-- [m,th] + d_sin = np.zeros((MMAX+1,thmax))#-- [m,th] + if PLM is None: + #-- if plms are not pre-computed: calculate Legendre polynomials + PLM,dPLM = plm_holmes(LMAX,np.cos(th)) + + #-- Truncating harmonics to degree and order LMAX + #-- removing coefficients below LMIN and above MMAX + mm = np.arange(0,MMAX+1) + clm = np.zeros((LMAX+1,MMAX+1)) + slm = np.zeros((LMAX+1,MMAX+1)) + clm[LMIN:LMAX+1,mm] = clm1[LMIN:LMAX+1,mm] + slm[LMIN:LMAX+1,mm] = slm1[LMIN:LMAX+1,mm] + for k in range(0,thmax): + #-- summation over all spherical harmonic degrees + d_cos[:,k] = np.sum(PLM[:,mm,k]*clm[:,mm],axis=0) + d_sin[:,k] = np.sum(PLM[:,mm,k]*slm[:,mm],axis=0) + + #-- Final signal recovery from fourier coefficients + m = np.arange(0,MMAX+1)[:,np.newaxis] + #-- Calculating cos(m*phi) and sin(m*phi) + ccos = np.cos(np.dot(m,phi)) + ssin = np.sin(np.dot(m,phi)) + #-- summation of cosine and sine harmonics + s = np.dot(np.transpose(ccos),d_cos) + np.dot(np.transpose(ssin),d_sin) + + #-- return output data + return s diff --git a/gravity_toolkit/convert_calendar_decimal.py b/gravity_toolkit/convert_calendar_decimal.py new file mode 100644 index 00000000..58dfa051 --- /dev/null +++ b/gravity_toolkit/convert_calendar_decimal.py @@ -0,0 +1,167 @@ +#!/usr/bin/env python +u""" +convert_calendar_decimal.py +Written by Tyler Sutterley (05/2015) + +Converts from calendar date into decimal years +Converts year, month (day, hour, minute, second) + into decimal years taking into account leap years + +CALLING SEQUENCE: + t_date = convert_calendar_decimal(year, month) + t_date = convert_calendar_decimal(year, month, DAY=day, \ + HOUR=hour, MINUTE=minute, SECOND=second) + +INPUTS: + year: can be a single value or an array of dates + month: can be a single value or an array of dates + +OPTION: + DAY: can be a single value or an array of dates + HOUR: can be a single value or an array of dates + MINUTE: can be a single value or an array of dates + SECOND: can be a single value or an array of dates + DofY: day of the year (January 1 = 1) + can be a single value or an array of dates + +OUTPUTS: + t_date: date in decimal format (years) + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python + (http://www.numpy.org) + +NOTES: + Dershowitz, N. and E.M. Reingold. 2008. Calendrical Calculations. + Cambridge: Cambridge University Press. + +UPDATE HISTORY: + Updated 05/2015: updated comments and minor update to nonzero statement + Updated 05/2014: added option for day of year + Updated 04/2014: new code from convert_J2000.py + Updated 04/2014: updated comments and improved rules + for leap years to include mod 100 and mod 400 + Written 04/2014 +""" +import numpy as np + +def convert_calendar_decimal(year, month, DAY=None, HOUR=None, MINUTE=None, + SECOND=None, DofY=None): + + #-- number of dates + if (np.ndim(np.squeeze(year)) == 0): + #-- single date entered + n_dates = 1 + else: + #-- array of dates entered + n_dates = len(np.squeeze(year)) + + #-- create arrays for calendar date variables + cal_date = {} + cal_date['year'] = np.zeros((n_dates)) + cal_date['month'] = np.zeros((n_dates)) + cal_date['day'] = np.zeros((n_dates)) + cal_date['hour'] = np.zeros((n_dates)) + cal_date['minute'] = np.zeros((n_dates)) + cal_date['second'] = np.zeros((n_dates)) + #-- day of the year + cal_date['DofY'] = np.zeros((n_dates)) + + #-- remove singleton dimensions and use year and month + cal_date['year'][:] = np.squeeze(year) + cal_date['month'][:] = np.squeeze(month) + + #-- create output date variable + t_date = np.zeros((n_dates)) + + #-- days per month in a leap and a standard year + #-- only difference is February (29 vs. 28) + dpm_leap=np.array([31,29,31,30,31,30,31,31,30,31,30,31], dtype=np.float) + dpm_stnd=np.array([31,28,31,30,31,30,31,31,30,31,30,31], dtype=np.float) + + #-- Rules in the Gregorian calendar for a year to be a leap year: + #-- divisible by 4, but not by 100 unless divisible by 400 + #-- True length of the year is about 365.2422 days + #-- Adding a leap day every four years ==> average 365.25 + #-- Subtracting a leap year every 100 years ==> average 365.24 + #-- Adding a leap year back every 400 years ==> average 365.2425 + #-- Subtracting a leap year every 4000 years ==> average 365.24225 + m4 = (cal_date['year'] % 4) + m100 = (cal_date['year'] % 100) + m400 = (cal_date['year'] % 400) + m4000 = (cal_date['year'] % 4000) + #-- find indices for standard years and leap years using criteria + leap, = np.nonzero((m4 == 0) & (m100 != 0) | (m400 == 0) & (m4000 != 0)) + stnd, = np.nonzero((m4 != 0) | (m100 == 0) & (m400 != 0) | (m4000 == 0)) + + #-- calculate the day of the year + if DofY is not None: + #-- if entered directly as an input + #-- remove 1 so day 1 (Jan 1st) = 0.0 in decimal format + cal_date['DofY'][:] = np.squeeze(DofY)-1 + else: + #-- use calendar month and day of the month to calculate day of the year + #-- month minus 1: January = 0, February = 1, etc (indice of month) + #-- in decimal form: January = 0.0 + month_m1 = np.array(cal_date['month'],dtype=np.int) - 1 + + #-- day of month + if DAY is not None: + #-- remove 1 so 1st day of month = 0.0 in decimal format + cal_date['day'][:] = np.squeeze(DAY)-1.0 + else: + #-- if not entering days as an input + #-- will use the mid-month value + cal_date['day'][leap] = dpm_leap[month_m1[leap]]/2.0 + cal_date['day'][stnd] = dpm_stnd[month_m1[stnd]]/2.0 + + #-- create matrix with the lower half = 1 + #-- this matrix will be used in a matrix multiplication + #-- to calculate the total number of days for prior months + #-- the -1 will make the diagonal == 0 + #-- i.e. first row == all zeros and the + #-- last row == ones for all but the last element + mon_mat=np.tri(12,12,-1) + #-- using a dot product to calculate total number of days + #-- for the months before the input date + #-- basically is sum(i*dpm) + #-- where i is 1 for all months < the month of interest + #-- and i is 0 for all months >= the month of interest + #-- month of interest is zero as the exact days will be + #-- used to calculate the date + + #-- calculate the day of the year for leap and standard + #-- use total days of all months before date + #-- and add number of days before date in month + cal_date['DofY'][stnd] = cal_date['day'][stnd] + \ + np.dot(mon_mat[month_m1[stnd],:],dpm_stnd) + cal_date['DofY'][leap] = cal_date['day'][leap] + \ + np.dot(mon_mat[month_m1[leap],:],dpm_leap) + + #-- hour of day (else is zero) + if HOUR is not None: + cal_date['hour'][:] = np.squeeze(HOUR) + + #-- minute of hour (else is zero) + if MINUTE is not None: + cal_date['minute'][:] = np.squeeze(MINUTE) + + #-- second in minute (else is zero) + if SECOND is not None: + cal_date['second'][:] = np.squeeze(SECOND) + + #-- calculate decimal date + #-- convert hours, minutes and seconds into days + #-- convert calculated fractional days into decimal fractions of the year + #-- Leap years + t_date[leap] = cal_date['year'][leap] + \ + (cal_date['DofY'][leap] + cal_date['hour'][leap]/24. + \ + cal_date['minute'][leap]/1440. + \ + cal_date['second'][leap]/86400.)/np.sum(dpm_leap) + #-- Standard years + t_date[stnd] = cal_date['year'][stnd] + \ + (cal_date['DofY'][stnd] + cal_date['hour'][stnd]/24. + \ + cal_date['minute'][stnd]/1440. + \ + cal_date['second'][stnd]/86400.)/np.sum(dpm_stnd) + + return t_date diff --git a/gravity_toolkit/convert_julian.py b/gravity_toolkit/convert_julian.py new file mode 100644 index 00000000..357a1c2c --- /dev/null +++ b/gravity_toolkit/convert_julian.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +u""" +convert_julian.py +Written by Tyler Sutterley (10/2017) + +Return the calendar date and time given Julian date. + +CALLING SEQUENCE: + YEAR,MONTH,DAY,HOUR,MINUTE,SECOND = convert_julian(JD, FORMAT='tuple') + +INPUTS: + JD: Julian Day of the specified calendar date. + +OUTPUTS: + year: Number of the desired year + month: Number of the desired month (1 = January, ..., 12 = December) + day: Number of day of the month + hour: hour of the day + minute: minute of the hour + second: second (and fractions of a second) of the minute + +OPTIONS: + ASTYPE: convert output to variable type (e.g. int). Default is float + FORMAT: format of output variables + 'dict': dictionary with variable keys as listed above + 'tuple': tuple with variable order YEAR,MONTH,DAY,HOUR,MINUTE,SECOND + 'zip': aggregated variable sets + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +NOTES: + Translated from caldat in "Numerical Recipes in C", by William H. Press, + Brian P. Flannery, Saul A. Teukolsky, and William T. Vetterling. + Cambridge University Press, 1988 (second printing). + Hatcher, D. A., "Simple Formulae for Julian Day Numbers and Calendar Dates", + Quarterly Journal of the Royal Astronomical Society, 25(1), 1984. + +UPDATE HISTORY: + Updated 10/2017: updated comments and formatting of algorithm + Updated 06/2017: added option FORMAT to change the output variables format + Updated 06/2016: added option to convert output to variable type (e.g. int) + Updated 11/2015: extracting the values from singleton dimension cases + Updated 03/2015: remove singleton dimensions if initially importing value + Updated 03/2014: updated to be able to convert arrays + Written 05/2013 +""" +import numpy as np + +def convert_julian(JD, ASTYPE=None, FORMAT='dict'): + #-- convert to array if only a single value was imported + if (np.ndim(JD) == 0): + JD = np.array([JD]) + SINGLE_VALUE = True + else: + SINGLE_VALUE = False + + JDO = np.floor(JD + 0.5) + C = np.zeros_like(JD) + #-- calculate C for dates before and after the switch to Gregorian + IGREG = 2299161.0 + ind1, = np.nonzero(JDO < IGREG) + C[ind1] = JDO[ind1] + 1524.0 + ind2, = np.nonzero(JDO >= IGREG) + B = np.floor((JDO[ind2] - 1867216.25)/36524.25) + C[ind2] = JDO[ind2] + B - np.floor(B/4.0) + 1525.0 + #-- calculate coefficients for date conversion + D = np.floor((C - 122.1)/365.25) + E = np.floor((365.0 * D) + np.floor(D/4.0)) + F = np.floor((C - E)/30.6001) + #-- calculate day, month, year and hour + DAY = np.floor(C - E + 0.5) - np.floor(30.6001*F) + MONTH = F - 1.0 - 12.0*np.floor(F/14.0) + YEAR = D - 4715.0 - np.floor((7.0+MONTH)/10.0) + HOUR = np.floor(24.0*(JD + 0.5 - JDO)) + #-- calculate minute and second + G = (JD + 0.5 - JDO) - HOUR/24.0 + MINUTE = np.floor(G*1440.0) + SECOND = (G - MINUTE/1440.0) * 86400.0 + + #-- convert all variables to output type (from float) + if ASTYPE is not None: + YEAR = YEAR.astype(ASTYPE) + MONTH = MONTH.astype(ASTYPE) + DAY = DAY.astype(ASTYPE) + HOUR = HOUR.astype(ASTYPE) + MINUTE = MINUTE.astype(ASTYPE) + SECOND = SECOND.astype(ASTYPE) + + #-- if only a single value was imported initially: remove singleton dims + if SINGLE_VALUE: + YEAR = YEAR.item(0) + MONTH = MONTH.item(0) + DAY = DAY.item(0) + HOUR = HOUR.item(0) + MINUTE = MINUTE.item(0) + SECOND = SECOND.item(0) + + #-- return date variables in output format (default python dictionary) + if (FORMAT == 'dict'): + return dict(year=YEAR, month=MONTH, day=DAY, + hour=HOUR, minute=MINUTE, second=SECOND) + elif (FORMAT == 'tuple'): + return (YEAR, MONTH, DAY, HOUR, MINUTE, SECOND) + elif (FORMAT == 'zip'): + return zip(YEAR, MONTH, DAY, HOUR, MINUTE, SECOND) diff --git a/gravity_toolkit/destripe_harmonics.py b/gravity_toolkit/destripe_harmonics.py new file mode 100644 index 00000000..ca2aaa1e --- /dev/null +++ b/gravity_toolkit/destripe_harmonics.py @@ -0,0 +1,247 @@ +#!/usr/bin/env python +u""" +destripe_harmonics.py +Original Fortran program remove_errors.f written by Isabella Velicogna +Adapted by Chia-Wei Hsu (05/2018) +Updated by Tyler Sutterley (03/2020) + +Filters spherical harmonic coefficients for correlated "striping" errors + +ALGORITHM: + clm1, slm1 are the spherical harmonic coefficients + after the mean field has been removed + Smooth values over l, for l=even and l=odd separately + by fitting a quadratic function to every 7 points + Remove those smoothed values + +CALLING SEQUENCE: + Ylms = destripe_harmonics(clm,slm,LMAX=60) + Wclm = WYlms['clm'] + Wslm = WYlms['slm'] + +INPUTS: + clm1: cosine spherical harmonic coefficients (matrix 2 dims) + slm1: sine spherical harmonic coefficients (matrix 2 dims) + clm1 and slm1 are matrix with 2 dimensions + the dimensions are in the following order [l,m] + +OUTPUTS: + Wclm: filtered cosine spherical harmonic coefficients + Wslm: filtered sine spherical harmonic coefficients + +OPTIONS: + LMIN: Lower bound of Spherical Harmonic Degrees (default = 2) + LMAX: Upper bound of Spherical Harmonic Degrees (default = 60) + MMAX: Upper bound of Spherical Harmonic Orders (default = LMAX) + ROUND: use round to find nearest even (True) or use floor (False) + NARROW: Clm=Slm=0 if number of points is less than window size (False) + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +REFERENCE: + Sean Swenson and John Wahr, "Post-processing removal of correlated errors in + GRACE data", Geophysical Research Letters, 33(L08402), 2006 + http://dx.doi.org/10.1029/2005GL025285 + +UPDATE HISTORY: + Updated 03/2020: Updated for public release + Updated 05/2018: using __future__ print and updated flags comments + Updated 08/2015: changed from sys.exit to raise ValueError + Updated 05/2015: added parameter MMAX for MMAX != LMAX + Updated 02/2014: generalization for GRACE GUI and other routines +""" +from __future__ import print_function +import numpy as np + +def destripe_harmonics(clm1, slm1, LMIN=2, LMAX=60, MMAX=None, + ROUND=True, NARROW=False): + + #-- tests if spherical harmonics have been imported + if (clm1.shape[0] == 1) or (slm1.shape[0] == 1): + raise ValueError('Input harmonics need to be matrices') + + #-- upper bound of spherical harmonic orders (default = LMAX) + if MMAX is None: + MMAX = np.copy(LMAX) + + #-- output filtered coefficients (copy to not modify input) + Wclm = clm1.copy() + Wslm = slm1.copy() + #-- matrix size declarations + clmeven = np.zeros((LMAX), dtype=np.float64) + slmeven = np.zeros((LMAX), dtype=np.float64) + clmodd = np.zeros((LMAX+1), dtype=np.float64) + slmodd = np.zeros((LMAX+1), dtype=np.float64) + clmsm = np.zeros((LMAX+1,MMAX+1), dtype=np.float64) + slmsm = np.zeros((LMAX+1,MMAX+1), dtype=np.float64) + + #-- start of the smoothing over orders (m) + for m in range(int(MMAX+1)): + smooth = np.exp(-np.float64(m)/10.0)*15.0 + if ROUND: + #-- round(smooth) to nearest even instead of int(smooth) + nsmooth = np.around(smooth) + else: + #-- Sean's method for finding nsmooth (use floor of smooth) + nsmooth = np.int64(smooth) + + if (nsmooth < 2): + #-- Isabella's method of picking nsmooth sets minimum to 2 + nsmooth = np.int64(2) + + rmat = np.zeros((3,3), dtype=np.float64) + lll = np.arange(np.float64(nsmooth)*2.+1.)-np.float64(nsmooth) + #-- create design matrix to have the following form: + # [ 1 ll ll^2 ] + # [ ll ll^2 ll^3 ] + # [ ll^2 ll^3 ll^4 ] + for i,ill in enumerate(lll): + rmat[0,0] += 1.0 + rmat[0,1] += ill + rmat[0,2] += ill**2 + + rmat[1,0] += ill + rmat[1,1] += ill**2 + rmat[1,2] += ill**3 + + rmat[2,0] += ill**2 + rmat[2,1] += ill**3 + rmat[2,2] += ill**4 + + #-- put the even and odd l's into their own arrays + ieven = -1 + iodd = -1 + leven = np.zeros((LMAX), dtype=np.int) + lodd = np.zeros((LMAX), dtype=np.int) + + for l in range(int(m),int(LMAX+1)): + #-- check if degree is odd or even + if np.remainder(l,2).astype(np.bool): + iodd += 1 + lodd[iodd] = l + clmodd[iodd] = clm1[l,m].copy() + slmodd[iodd] = slm1[l,m].copy() + else: + ieven += 1 + leven[ieven] = l + clmeven[ieven] = clm1[l,m].copy() + slmeven[ieven] = slm1[l,m].copy() + + #-- smooth, by fitting a quadratic polynomial to 7 points at a time + #-- deal with even stokes coefficients + l1 = 0 + l2 = ieven + + if (l1 > (l2-2*nsmooth)): + for l in range(l1,l2+1): + if NARROW: + #-- Sean's method + #-- Clm=Slm=0 if number of points is less than window size + clmsm[leven[l],m] = 0.0 + slmsm[leven[l],m] = 0.0 + else: + #-- Isabella's method + #-- Clm and Slm passed through unaltered + clmsm[leven[l],m] = clm1[leven[l],m].copy() + slmsm[leven[l],m] = slm1[leven[l],m].copy() + else: + for l in range(int(l1+nsmooth),int(l2-nsmooth+1)): + rhsc = np.zeros((3), dtype=np.float64) + rhss = np.zeros((3), dtype=np.float64) + for ll in range(int(-nsmooth),int(nsmooth+1)): + rhsc[0] += clmeven[l+ll] + rhsc[1] += clmeven[l+ll]*np.float64(ll) + rhsc[2] += clmeven[l+ll]*np.float64(ll**2) + rhss[0] += slmeven[l+ll] + rhss[1] += slmeven[l+ll]*np.float64(ll) + rhss[2] += slmeven[l+ll]*np.float64(ll**2) + + #-- fit design matrix to coefficients + #-- to get beta parameters + bhsc = np.linalg.lstsq(rmat,rhsc.T,rcond=-1)[0] + bhss = np.linalg.lstsq(rmat,rhss.T,rcond=-1)[0] + + #-- all other l is assigned as bhsc + clmsm[leven[l],m] = bhsc[0].copy() + #-- all other l is assigned as bhss + slmsm[leven[l],m] = bhss[0].copy() + + if (l == (l1+nsmooth)): + #-- deal with l=l1+nsmooth + for ll in range(int(-nsmooth),0): + clmsm[leven[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ + bhsc[2]*np.float64(ll**2) + slmsm[leven[l+ll],m] = bhss[0]+bhss[1]*np.float64(ll) + \ + bhss[2]*np.float64(ll**2) + + if (l == (l2-nsmooth)): + #-- deal with l=l2-nsmnooth + for ll in range(1,int(nsmooth+1)): + clmsm[leven[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ + bhsc[2]*np.float64(ll**2) + slmsm[leven[l+ll],m] = bhss[0]+bhss[1]*np.float64(ll) + \ + bhss[2]*np.float64(ll**2) + + #-- deal with odd stokes coefficients + l1 = 0 + l2 = iodd + + if (l1 > (l2-2*nsmooth)): + for l in range(l1,l2+1): + if NARROW: + #-- Sean's method + #-- Clm=Slm=0 if number of points is less than window size + clmsm[lodd[l],m] = 0.0 + slmsm[lodd[l],m] = 0.0 + else: + #-- Isabella's method + #-- Clm and Slm passed through unaltered + clmsm[lodd[l],m] = clm1[lodd[l],m].copy() + slmsm[lodd[l],m] = slm1[lodd[l],m].copy() + else: + for l in range(int(l1+nsmooth),int(l2-nsmooth+1)): + rhsc = np.zeros((3), dtype=np.float64) + rhss = np.zeros((3), dtype=np.float64) + for ll in range(int(-nsmooth),int(nsmooth+1)): + rhsc[0] += clmodd[l+ll] + rhsc[1] += clmodd[l+ll]*np.float64(ll) + rhsc[2] += clmodd[l+ll]*np.float64(ll**2) + rhss[0] += slmodd[l+ll] + rhss[1] += slmodd[l+ll]*np.float64(ll) + rhss[2] += slmodd[l+ll]*np.float64(ll**2) + + #-- fit design matrix to coefficients + #-- to get beta parameters + bhsc = np.linalg.lstsq(rmat,rhsc.T,rcond=-1)[0] + bhss = np.linalg.lstsq(rmat,rhss.T,rcond=-1)[0] + + #-- all other l is assigned as bhsc + clmsm[lodd[l],m] = bhsc[0].copy() + #-- all other l is assigned as bhss + slmsm[lodd[l],m] = bhss[0].copy() + + if (l == (l1+nsmooth)): + #-- deal with l=l1+nsmooth + for ll in range(int(-nsmooth),0): + clmsm[lodd[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ + bhsc[2]*np.float64(ll**2) + slmsm[lodd[l+ll],m] = bhss[0]+bhss[1]*np.float64(ll) + \ + bhss[2]*np.float64(ll**2) + + if (l == (l2-nsmooth)): + #-- deal with l=l2-nsmnooth + for ll in range(1,int(nsmooth+1)): + clmsm[lodd[l+ll],m] = bhsc[0]+bhsc[1]*np.float64(ll) + \ + bhsc[2]*np.float64(ll**2) + slmsm[lodd[l+ll],m] = bhss[0]+bhss[1]*np.float64(ll) + \ + bhss[2]*np.float64(ll**2) + + #-- deal with m greater than or equal to 5 + for l in range(int(m),int(LMAX+1)): + if (m >= 5): + #-- remove smoothed clm/slm from original spherical harmonics + Wclm[l,m] -= clmsm[l,m] + Wslm[l,m] -= slmsm[l,m] + + return {'clm':Wclm,'slm':Wslm} diff --git a/gravity_toolkit/gauss_weights.py b/gravity_toolkit/gauss_weights.py new file mode 100755 index 00000000..e45099f1 --- /dev/null +++ b/gravity_toolkit/gauss_weights.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +u""" +gauss_weights.py +Original IDL code gauss_weights.pro written by Sean Swenson +Adapted by Tyler Sutterley (05/2013) + +Computes the Gaussian weights as a function of degree +A normalized version of Jekeli's Gaussian averaging function + +Christopher Jekeli (1981) +Alternative Methods to Smooth the Earth's Gravity Field +http://www.geology.osu.edu/~jekeli.1/OSUReports/reports/report_327.pdf + +CALLING SEQUENCE: + wl = gauss_weights(hw, LMAX) + +INPUTS: + hw: Gaussian smoothing radius in kilometers + Radius r corresponds to the distance at which the weight + drops to half its peak value at the shortest wavelength + LMAX: Maximum degree of Stokes coefficients + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +NOTES: + Differences from recurs function in combine.mac.f: + weighting from gauss_weights is normalized outside of the function + wt = 2.0*pi*gauss_weights(rad,LMAX) + weighting from recurs is normalized inside of the function + call recurs(alpha,bcoef) calculates bcoef up to LMAX 150 (=wt[0:150]) + alpha = alog(2.)/(1.-cos(rad/6371.)) +""" +import numpy as np + +def gauss_weights(hw, LMAX): + #-- allocate for output weights + wl = np.zeros((LMAX+1)) + #-- radius of the Earth in km + rad_e = 6371.0 + if (hw < 1.0e-10): + #-- distance is smaller than cutoff + wl[:]=1.0/(2.0*np.pi) + else: + #-- calculate gaussian weights using recursion + b = np.log(2.0)/(1.0 - np.cos(hw/rad_e)) + #-- weight for degree 0 + wl[0] = 1.0/(2.0*np.pi) + #-- weight for degree 1 + wl[1] = wl[0]*((1.0 +np.exp(-2.0*b))/(1. -np.exp(-2.0*b))-1.0/b) + #-- valid flag + valid = True + #-- spherical harmonic degree + l = 2 + #-- while valid (within cutoff) + #-- and spherical harmonic degree is less than LMAX + while (valid and (l <= LMAX)): + #-- calculate weight with recursion + wl[l] = (1.0-2.0*l)/b*wl[l-1]+wl[l-2] + #-- weight is less than cutoff + if (np.abs(wl[l]) < 1.0e-10): + #-- set all weights to cutoff + wl[l:LMAX+1] = 1.0e-10 + #-- set valid flag + valid = False + #-- add 1 to l + l += 1 + #-- return the gaussian weights + return wl diff --git a/gravity_toolkit/geocenter.py b/gravity_toolkit/geocenter.py new file mode 100644 index 00000000..f3b68706 --- /dev/null +++ b/gravity_toolkit/geocenter.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +u""" +geocenter.py +Written by Tyler Sutterley (06/2019) + +Calculates the geocenter variation (in mm) from degree 1 Stokes Coefficients +Calculates the Degree 1 Stokes Coefficients of a geocenter variation (in mm) + +CALLING SEQUENCE: + xyz = geocenter(C10=C10, C11=C11, S11=S11) + Ylms = geocenter(X=x, Y=y, Z=z, INVERSE=True) + +OPTIONS: + RADIUS: Earth's radius for calculating spherical harmonics from SLR data + INVERSE: calculates the Stokes Coefficients from geocenter (True/False) + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +UPDATE HISTORY: + Updated 06/2019: added option RADIUS to manually set the Earth's radius + Updated 04/2017: changed option from INV to INVERSE and made True/False + Updated 04/2015: calculate radius of the Earth directly in program + Updated 02/2014: minor update to if statement + Updated 03/2013: converted to python +""" +import numpy as np + +def geocenter(C10=0,C11=0,S11=0,X=0,Y=0,Z=0,RADIUS=None,INVERSE=False): + if RADIUS is None: + #-- WGS84 ellipsoid parameters + a_axis = 6378137.0#-- [m] semimajor axis of the ellipsoid + flat = 1.0/298.257223563#-- flattening of the ellipsoid + #-- Mean Earth's Radius in mm having the same volume as WGS84 ellipsoid + #-- (4pi/3)R^3 = (4pi/3)(a^2)b = (4pi/3)(a^3)(1D -f) + rad_e = 1000.0*a_axis*(1.0 -flat)**(1.0/3.0) + else: + rad_e = np.copy(RADIUS) + #-- check if calculating spherical harmonics or geocenter coefficients + if INVERSE: + #-- inverse: geocenter to Stokes Coefficients + C10 = Z/(rad_e*np.sqrt(3.0)) + C11 = X/(rad_e*np.sqrt(3.0)) + S11 = Y/(rad_e*np.sqrt(3.0)) + else: + #-- Stokes Coefficients to geocenter + Z = C10*rad_e*np.sqrt(3.0) + X = C11*rad_e*np.sqrt(3.0) + Y = S11*rad_e*np.sqrt(3.0) + + return {'C10':C10, 'C11':C11, 'S11':S11, 'x':X, 'y':Y, 'z':Z} diff --git a/gravity_toolkit/grace_date.py b/gravity_toolkit/grace_date.py new file mode 100644 index 00000000..912ac04e --- /dev/null +++ b/gravity_toolkit/grace_date.py @@ -0,0 +1,241 @@ +#!/usr/bin/env python +u""" +grace_date.py +Written by Tyler Sutterley (03/2020) + +Reads dates of each GRACE/GRACE-FO data file and assigns the month number + reads the start and end date from the filename, + calculates the mean date in decimal format (correcting for leap years) +Creates an index of dates for GRACE/GRACE-FO data if specified + +INPUTS: + base_dir: Working data directory for GRACE/GRACE-FO data + +OPTIONS: + PROC: GRACE data processing center (CSR/CNES/JPL/GFZ) + DREL: GRACE data release (RL03 for CNES) (RL06 for CSR/GFZ/JPL) + DSET: GRACE dataset (GAA/GAB/GAC/GAD/GSM) + GAA is the non-tidal atmospheric correction + GAB is the non-tidal oceanic correction + GAC is the combined non-tidal atmospheric and oceanic correction + GAD is the GRACE ocean bottom pressure product + GSM is corrected monthly GRACE/GRACE-FO static field product + OUTPUT: create index of dates for GRACE/GRACE-FO data + +OUTPUTS: + dictionary of files mapped by GRACE/GRACE-FO month + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + future: Compatibility layer between Python 2 and Python 3 + (http://python-future.org/) + +PROGRAM DEPENDENCIES: + convert_julian.py: converts a julian date into a calendar date + +UPDATE HISTORY: + Updated 03/2020 for public release +""" +from __future__ import print_function + +import sys +import os +import re +import getopt +import numpy as np +from gravity_toolkit.convert_julian import convert_julian + +def grace_date(base_dir, PROC='', DREL='', DSET='', OUTPUT=True, MODE=0o775): + #-- Directory of exact product + grace_dir = os.path.join(base_dir, PROC, DREL, DSET) + #-- input index file containing GRACE data filenames + with open(os.path.join(grace_dir, 'index.txt'),'r') as f: + input_files = f.read().splitlines() + + #-- number of lines in input_files + n_files = len(input_files) + + #-- define date variables + start_yr = np.zeros((n_files))#-- year start date + end_yr = np.zeros((n_files))#-- year end date + start_day = np.zeros((n_files))#-- day number start date + end_day = np.zeros((n_files))#-- day number end date + mid_day = np.zeros((n_files))#-- mid-month day + JD = np.zeros((n_files))#-- Julian date of mid-month + tot_days = np.zeros((n_files))#-- number of days since Jan 2002 + tdec = np.zeros((n_files))#-- tdec is the date in decimal form + mon = np.zeros((n_files,),dtype=np.int)#-- GRACE/GRACE-FO month number + + #-- compile numerical expression operator for parameters from files + #-- will work with previous releases and releases for GRACE-FO + #-- UTCSR: The University of Texas at Austin Center for Space Research + #-- EIGEN: GFZ German Research Center for Geosciences (RL01-RL05) + #-- GFZOP: GFZ German Research Center for Geosciences (RL06+GRACE-FO) + #-- JPLEM: NASA Jet Propulsion Laboratory (harmonic solutions) + #-- JPLMSC: NASA Jet Propulsion Laboratory (mascon solutions) + regex_pattern = ('(.*?)-2_(\d+)-(\d+)_(.*?)_({0})_(.*?)_(\d+)(.*?)' + '(\.gz|\.gfc)?$').format('UTCSR|EIGEN|GFZOP|JPLEM|JPLMSC') + rx = re.compile(regex_pattern, re.VERBOSE) + + #-- Output GRACE date ascii file + if OUTPUT: + date_file = '{0}_{1}_DATES.txt'.format(PROC, DREL) + fid = open(os.path.join(grace_dir,date_file), 'w') + #-- date file header information + args = ('Mid-date','Month','Start_Day','End_Day','Total_Days') + print('{0} {1:>10} {2:>11} {3:>10} {4:>13}'.format(*args),file=fid) + + #-- create python dictionary mapping input file names with GRACE months + grace_files = {} + + #-- for each data file + for t, infile in enumerate(input_files): + #-- extract parameters from input filename + PFX,start_date,end_date,AUX,PRC,F1,DRL,F2,SFX = rx.findall(infile).pop() + #-- find start date, end date and number of days + start_yr[t] = np.float(start_date[:4]) + end_yr[t] = np.float(end_date[:4]) + start_day[t] = np.float(start_date[4:]) + end_day[t] = np.float(end_date[4:]) + #-- end_day (will be changed if the month crosses 2 years) + end_plus = np.copy(end_day[t]) + + #-- calculate mid-month date taking into account if measurements are + #-- on different years + if ((start_yr[t] % 4) == 0):#-- Leap Year (% = modulus) + dpy = 366.0 + else:#-- Standard Year + dpy = 365.0 + #-- For data that crosses years + if (start_yr[t] != end_yr[t]): + #-- end_yr - start_yr should be 1 + end_plus = (end_yr[t]-start_yr[t])*dpy + end_day[t] + #-- Calculation of Mid-month value + mid_day[t] = np.mean([start_day[t], end_plus]) + + #-- Calculation of the Julian date from start_yr and mid_day + JD[t] = np.float(367.0*start_yr[t] - \ + np.floor(7.0*(start_yr[t] + np.floor(10.0/12.0))/4.0) - \ + np.floor(3.0*(np.floor((start_yr[t] - 8.0/7.0)/100.0) + 1.0)/4.0) +\ + np.floor(275.0/9.0) + mid_day[t] + 1721028.5) + #-- convert the julian date into calendar dates (hour, day, month, year) + cal_date = convert_julian(JD[t]) + + #-- Calculating the mid-month date in decimal form + tdec[t] = start_yr[t] + mid_day[t]/dpy + + #-- Calculation of total days since start of campaign + count = 0 + n_yrs = np.int(start_yr[t]-2002) + #-- for each of the GRACE years up to the file year + for iyr in range(n_yrs): + #-- year i + year = 2002 + iyr + #-- number of days in year i (if leap year or standard year) + if ((year % 4) == 0): + #-- Leap Year + dpm=[31,29,31,30,31,30,31,31,30,31,30,31] + else: + #-- Standard Year + dpm=[31,28,31,30,31,30,31,31,30,31,30,31] + #-- add all days from prior years to count + count += np.sum(dpm) + + #-- calculating the total number of days since 2002 + tot_days[t] = np.mean([count+start_day[t], count+end_plus]) + + #-- Calculates the month number (or 10-day number for CNES RL01,RL02) + if ((PROC == 'CNES') and (DREL in ('RL01','RL02'))): + mon[t] = np.round(1.0+(tot_days[t]-tot_days[0])/10.0) + else: + #-- calculate the GRACE/GRACE-FO month (Apr02 == 004) + #-- https://grace.jpl.nasa.gov/data/grace-months/ + #-- Notes on special months (e.g. 119, 120) below + mon[t] = 12*(cal_date['year']-2002) + cal_date['month'] + + #-- The 'Special Months' (Nov 2011, Dec 2011 and April 2012) with + #-- Accelerometer shutoffs make this relation between month number + #-- and date more complicated as days from other months are used + #-- For CSR and GFZ: Nov11 (month 119) is centered in Oct11 (118) + #-- For JPL: Dec 2011 (month 120) is centered in Jan12 (121) + #-- For all: May15 (month 161) is centered in Apr15 (160) + if PROC in ('CSR','GFZ') and (mon[t] == mon[t-1]) and (mon[t-1] == 118): + mon[t] = mon[t-1] + 1 + elif (mon[t] == mon[t-1]) and (mon[t-1] == 160): + mon[t] = mon[t-1] + 1 + elif PROC in ('JPL') and (mon[t-1] == 119): + mon[t] = mon[t-1] + 1 + + #-- add file to python dictionary mapped to GRACE/GRACE-FO month + grace_files[mon[t]] = os.path.join(grace_dir,infile) + #-- print to GRACE DATES ascii file (NOTE: tot_days will be rounded up) + if OUTPUT: + print(('{0:13.8f} {1:03d} {2:8.0f} {3:03.0f} {4:8.0f} {5:03.0f} ' + '{6:8.0f}').format(tdec[t],mon[t],start_yr[t],start_day[t], + end_yr[t],end_day[t],tot_days[t]), file=fid) + + #-- close date file + #-- set permissions level of output date file + if OUTPUT: + fid.close() + os.chmod(os.path.join(grace_dir, date_file), MODE) + + #-- return the python dictionary that maps GRACE months with GRACE files + return grace_files + +#-- PURPOSE: help module to describe the optional input parameters +def usage(): + print('\nHelp: {}'.format(os.path.basename(sys.argv[0]))) + print(' --directory=X\t\tGRACE/GRACE-FO working directory') + print(' -C X, --center=X\tGRACE/GRACE-FO Processing Center (CSR,GFZ,JPL)') + print(' -R X, --release=X\tGRACE/GRACE-FO data releases (RL04,RL05,RL06)') + print(' -D X, --dataset=X\tGRACE/GRACE-FO dataset (GAC,GAD,GSM)') + print(' -O, --output\t\tOutput GRACE/GRACE-FO ascii date file') + print(' -M X, --mode=X\t\tPermissions mode of output files\n') + +#-- PURPOSE: program that calls grace_date() with set parameters +def main(): + #-- Read the system arguments listed after the program + long_options = ['help','directory=','center=','release=','dataset=', + 'output','mode='] + optlist,arglist = getopt.getopt(sys.argv[1:],'hC:R:D:OM:',long_options) + + #-- GRACE/GRACE-FO directory + base_dir = os.getcwd() + #-- GRACE/GRACE-FO Processing Centers to run + PROC = ['CSR','GFZ','JPL'] + #-- Data release + DREL = ['RL06'] + #-- Dataset + DSET = ['GAC','GAD','GSM'] + #-- output GRACE/GRACE-FO ascii date file + OUTPUT = False + #-- permissions mode of output files (e.g. 0o775) + MODE = 0o775 + for opt, arg in optlist: + if opt in ('-h','--help'): + usage() + sys.exit() + elif opt in ("--directory"): + base_dir = os.path.expanduser(arg) + elif opt in ("-C","--center"): + PROC = arg.upper().split(',') + elif opt in ("-R","--release"): + DREL = arg.upper().split(',') + elif opt in ("-D","--dataset"): + DSET = arg.upper().split(',') + elif opt in ("-O","--output"): + OUTPUT = True + elif opt in ("-M","--mode"): + MODE = int(arg, 8) + + #-- run GRACE/GRACE-FO date program + for pr in PROC: + for rl in DREL: + for ds in DSET: + grace_date(base_dir, PROC=pr, DREL=rl, DSET=ds, + OUTPUT=OUTPUT, MODE=MODE) + +#-- run main program +if __name__ == '__main__': + main() diff --git a/gravity_toolkit/grace_find_months.py b/gravity_toolkit/grace_find_months.py new file mode 100644 index 00000000..7c360c40 --- /dev/null +++ b/gravity_toolkit/grace_find_months.py @@ -0,0 +1,66 @@ +#!/usr/bin/env python +u""" +grace_find_months.py +Written by Tyler Sutterley (03/2020) + +Finds the months available for a GRACE/GRACE-FO product +Finds the all months missing from the product + +INPUTS: + base_dir: Working data directory for GRACE/GRACE-FO data + PROC: GRACE/GRACE-FO data processing center (CSR, CNES, JPL, GFZ) + DREL: GRACE/GRACE-FO data release (RL04, RL05, RL06) + +OPTIONS: + DSET: GRACE dataset (GSM, GAC, GAD, GAB, GAA) + +OUTPUTS: + start: First month in a GRACE/GRACE-FO dataset + end: Last month in a GRACE/GRACE-FO dataset + missing: missing months in a GRACE/GRACE-FO dataset + months: all available months in a GRACE/GRACE-FO dataset + time: center dates of all available months in a GRACE/GRACE-FO dataset + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +PROGRAM DEPENDENCIES: + grace_date.py: reads GRACE index file and calculates dates for each month + +UPDATE HISTORY: + Updated 03/2020 for public release +""" +from __future__ import print_function + +import os +import numpy as np +from gravity_toolkit.grace_date import grace_date + +def grace_find_months(base_dir, PROC, DREL, DSET='GSM'): + #-- Directory of exact product (using date index from GSM) + grace_dir = os.path.join(base_dir, PROC, DREL, DSET) + + #-- check that GRACE/GRACE-FO date file exists + date_file = os.path.join(grace_dir,'{0}_{1}_DATES.txt'.format(PROC, DREL)) + if not os.access(date_file, os.F_OK): + grace_date(base_dir,PROC=PROC,DREL=DREL,DSET=DSET,OUTPUT=True) + + #-- read GRACE/GRACE-FO date ascii file from grace_date.py + #-- skip the header row and extract dates (decimal format) and months + date_input = np.loadtxt(date_file, skiprows=1) + tdec = date_input[:,0] + months = date_input[:,1].astype(np.int) + + #-- array of all possible months (or in case of CNES RL01/2: 10-day sets) + all_months = np.arange(1,months.max(),dtype=np.int) + #-- missing months (values in all_months but not in months) + missing = sorted(set(all_months)-set(months)) + #-- If CNES RL01/2: simply convert into numpy array + #-- else: remove months 1-3 and convert into numpy array + if ((PROC == 'CNES') & (DREL in ('RL01','RL02'))): + missing = np.array(missing,dtype=np.int) + else: + missing = np.array(missing[3:],dtype=np.int) + + return {'time':tdec, 'start':months[0], 'end':months[-1], 'months':months, + 'missing':missing} diff --git a/gravity_toolkit/grace_input_months.py b/gravity_toolkit/grace_input_months.py new file mode 100644 index 00000000..a80f65a0 --- /dev/null +++ b/gravity_toolkit/grace_input_months.py @@ -0,0 +1,399 @@ +#!/usr/bin/env python +u""" +grace_input_months.py +Written by Tyler Sutterley (03/2020) + +Reads GRACE/GRACE-FO files for a specified spherical harmonic degree and order + and for a specified date range +Replaces Degree 1 with with input values (if specified) +Replaces C20 with SLR values (if specified) +Replaces C30 with SLR values for months 179+ (if specified) +Corrects for ECMWF atmospheric "jumps" using the GAE, GAF and GAG files +Corrects for Pole Tide drift following Wahr et al. (2015) +Removes a temporal average gravity field to get geopotential anomalies + +INPUTS: + base_dir: Working data directory for GRACE/GRACE-FO data + PROC: (CSR/CNES/JPL/GFZ) data processing center + DREL: (RL01,RL02,RL03,RL04,RL05,RL06) data release + DSET: (GAA/GAB/GAC/GAD/GSM) data product + LMAX: Upper bound of Spherical Harmonic Degrees (e.g. 60) + start_mon: starting month to consider in analysis + end_mon: ending month to consider in analysis + missing: missing months to not consider in analysis + SLR_C20: Replaces C20 with SLR values + N: use original values + CSR: use values from CSR (TN-07,TN-09,TN-11) + GSFC: use values from GSFC (TN-14) + DEG1: Use Degree 1 coefficients + None: No degree 1 + Tellus: GRACE/GRACE-FO TN-13 coefficients from PO.DAAC + https://grace.jpl.nasa.gov/data/get-data/geocenter/ + SLR: satellite laser ranging coefficients from CSR + ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + SLF: Sutterley and Velicogna coefficients, Remote Sensing (2019) + https://doi.org/10.6084/m9.figshare.7388540 + +OUTPUTS: + clm: GRACE/GRACE-FO cosine spherical harmonic with lmax: LMAX + slm: GRACE/GRACE-FO sine spherical harmonic with lmax: LMAX + time: time of each GRACE/GRACE-FO measurement (mid-month) + month: GRACE/GRACE-FO months of input datasets + title: string denoting low degree zonals, geocenter and corrections + mean: mean spherical harmonic fields as a dictionary with fields clm/slm + directory: directory of exact GRACE/GRACE-FO product + +OPTIONS: + MMAX: Upper bound of Spherical Harmonic Orders (default=LMAX) + SLR_C30: replaces C30 with SLR values + None: use original values + CSR: use values from CSR (5x5 with 6,1) + GSFC: use values from GSFC (TN-14) + POLE_TIDE: correct GSM data with pole tides following Wahr et al (2015) + ATM: correct data with ECMWF "jump" corrections GAE, GAF and GAG + MODEL_DEG1: least-squares model missing degree 1 coefficients (True/False) + DEG1_GIA: GIA-correction used when calculating degree 1 coefficients + MEAN: remove mean of harmonics (True/False) + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + scipy: Scientific Tools for Python (http://www.scipy.org/) + PyYAML: YAML parser and emitter for Python (https://github.com/yaml/pyyaml) + +PROGRAM DEPENDENCIES: + grace_date.py: reads GRACE index file and calculates dates for each month + read_SLR_C20.py: reads C20 files from satellite laser ranging (CSR or GSFC) + read_SLR_C30.py: reads C30 files from satellite laser ranging (CSR or GSFC) + convert_julian.py: Return the calendar date and time given Julian date + read_tellus_geocenter.py: reads PO.DAAC degree 1 files + read_SLR_geocenter.py: reads degree 1 files from Satellite Laser Ranging + read_GRACE_geocenter.py: reads degree 1 files from Sutterley et al. (2019) + read_GRACE_harmonics.py: reads an input GRACE data file and calculates date + +UPDATE HISTORY: + Updated 03/2020 for public release +""" +from __future__ import print_function, division + +import os +import re +import gzip +import numpy as np +from gravity_toolkit.grace_date import grace_date +from gravity_toolkit.read_SLR_C20 import read_SLR_C20 +from gravity_toolkit.read_SLR_C30 import read_SLR_C30 +from gravity_toolkit.read_tellus_geocenter import read_tellus_geocenter +from gravity_toolkit.read_SLR_geocenter import aod_corrected_SLR_geocenter +from read_GRACE_geocenter.read_GRACE_geocenter import read_GRACE_geocenter +from gravity_toolkit.read_GRACE_harmonics import read_GRACE_harmonics + +def grace_input_months(base_dir, PROC, DREL, DSET, LMAX, + start_mon, end_mon, missing, SLR_C20, DEG1, MMAX=None, + SLR_C30='', MODEL_DEG1=False, DEG1_GIA='', ATM=False, + POLE_TIDE=False, MEAN=True): + + #-- Directory of exact GRACE product + grace_dir = os.path.join(base_dir, PROC, DREL, DSET) + + #-- upper bound of spherical harmonic orders (default = LMAX) + MMAX = np.copy(LMAX) if (MMAX is None) else MMAX + + #-- Replacing C2,0 with SLR C2,0 + #-- Running function read_SLR_C20.py + #-- reading SLR C2,0 file for given release if specified + if (SLR_C20 == 'CSR'): + if (DREL == 'RL04'): + SLR_file = os.path.join(base_dir,'TN-05_C20_SLR.txt') + elif (DREL == 'RL05'): + SLR_file = os.path.join(base_dir,'TN-07_C20_SLR.txt') + elif (DREL == 'RL06'): + SLR_file = os.path.join(base_dir,'TN-11_C20_SLR.txt') + C20_input = read_SLR_C20(SLR_file) + C20_str = '_wCSR_C20' + elif (SLR_C20 == 'GSFC'): + SLR_file=os.path.join(base_dir,'TN-14_C30_C20_GSFC_SLR.txt') + C20_input = read_SLR_C20(SLR_file) + C20_str = '_wGSFC_C20' + else: + C20_str = '' + + #-- Replacing C3,0 with SLR C3,0 + #-- Running function read_SLR_C30.py + if (SLR_C30 == 'CSR'): + SLR_file=os.path.join(base_dir,'CSR_Monthly_5x5_Gravity_Harmonics.txt') + C30_input = read_SLR_C30(SLR_file) + C30_str = '_wCSR_C30' + elif (SLR_C30 == 'LARES'): + SLR_file=os.path.join(base_dir,'C30_LARES_filtered.txt') + C30_input = read_SLR_C30(SLR_file) + C30_str = '_wLARES_C30' + elif (SLR_C30 == 'GSFC'): + SLR_file=os.path.join(base_dir,'TN-14_C30_C20_GSFC_SLR.txt') + C30_input = read_SLR_C30(SLR_file) + C30_str = '_wGSFC_C30' + else: + C30_str = '' + + #-- Correcting for Degree 1 (geocenter variations) + #-- reading degree 1 file for given release if specified + if (DEG1 == 'Tellus'): + #-- Tellus (PO.DAAC) degree 1 + if DREL in ('RL04','RL05'): + DEG1_file = os.path.join(base_dir,'geocenter', + 'deg1_coef_{0}.txt'.format(DREL)) + JPL = False + else: + DEG1_file = os.path.join(base_dir,'geocenter', + 'TN-13_GEOC_{0}_{1}.txt'.format(PROC,DREL)) + JPL = True + #-- Running function read_tellus_geocenter.py + DEG1_input = read_tellus_geocenter(DEG1_file,JPL=JPL) + DEG1_str = '_w{0}_DEG1'.format(DEG1) + elif (DEG1 == 'SLR'): + #-- CSR Satellite Laser Ranging (SLR) degree 1 + # #-- SLR-derived degree-1 mass variations + # #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + # DEG1_file=os.path.join(base_dir,'geocenter','GCN_{0}.txt'.format(DREL)) + # DEG1_input=aod_corrected_slr_deg1(DEG1_file,DREL,skiprows=16) + + #-- new CF-CM file of degree-1 mass variations + #-- https://cddis.nasa.gov/lw20/docs/2016/papers/14-Ries_paper.pdf + #-- ftp://ftp.csr.utexas.edu/pub/slr/geocenter/GCN_L1_L2_30d_CF-CM.txt + DEG1_file = os.path.join(base_dir,'geocenter','GCN_L1_L2_30d_CF-CM.txt') + DEG1_input = aod_corrected_slr_deg1(DEG1_file,DREL,skiprows=111) + DEG1_str = '_w{0}_DEG1'.format(DEG1) + elif (DEG1 == 'SLF'): + #-- read iterated degree one files from Sutterley and Velicogna (2019) + #-- that includes self-attraction and loading effects + #-- include flag for datasets with different GIA corrections + MODEL = dict(RL04='OMCT', RL05='OMCT', RL06='MPIOM') + args = (PROC,DREL,MODEL[DREL],'SLF_iter',DEG1_GIA) + DEG1_file = os.path.join(base_dir,'geocenter', + '{0}_{1}_{2}_{3}{4}.txt'.format(*args)) + DEG1_input = read_GRACE_geocenter(DEG1_file) + DEG1_str = '_w{0}_DEG1'.format(DEG1) + else:#-- not using a degree 1 file (non-GSM or only using degree 2+) + DEG1_str = '' + + #-- atmospheric flag if correcting ECMWF "jumps" (using GAE/GAF/GAG files) + atm_str = '_wATM' if ATM else '' + #-- pole tide flag if correcting for pole tide drift (Wahr et al. 2015) + pt_str = '_wPT' if POLE_TIDE else '' + #-- full output string (C20, C30, geocenter and atmospheric flags) + out_str = C20_str + C30_str + DEG1_str + atm_str + pt_str + + #-- Range of months from start_mon to end_mon (end_mon+1 to include end_mon) + #-- Removing the missing months and months not to consider + months = sorted(set(np.arange(start_mon,end_mon+1)) - set(missing)) + #-- number of months to consider in analysis + n_cons = len(months) + + #-- Initializing input data matrices + grace_clm = np.zeros((LMAX+1,MMAX+1,n_cons)) + grace_slm = np.zeros((LMAX+1,MMAX+1,n_cons)) + tdec = np.zeros((n_cons)) + mon = np.zeros((n_cons),dtype=np.int) + + #-- associate GRACE/GRACE-FO files with each GRACE/GRACE-FO month + grace_files=grace_date(base_dir,PROC=PROC,DREL=DREL,DSET=DSET,OUTPUT=False) + + #-- importing data from GRACE/GRACE-FO files + for i,grace_month in enumerate(months): + #-- Effects of Pole tide drift will be compensated if soecified + infile = grace_files[grace_month] + Ylms = read_GRACE_harmonics(infile,LMAX,MMAX=MMAX,POLE_TIDE=POLE_TIDE) + grace_clm[:,:,i] = Ylms['clm'][0:LMAX+1,0:MMAX+1] + grace_slm[:,:,i] = Ylms['slm'][0:LMAX+1,0:MMAX+1] + tdec[i] = Ylms['time'] + mon[i] = np.int(grace_month) + + #-- Replace C20 with SLR coefficients + if SLR_C20 in ('CSR','GSFC'): + #-- verify that there are replacement C20 months for specified range + months_test = sorted(set(months) - set(C20_input['month'])) + if months_test: + gm = ','.join('{0:03d}'.format(gm) for gm in months_test) + raise IOError('No Matching C20 Months ({0})'.format(gm)) + #-- replace C20 with SLR coefficients + for i,grace_month in enumerate(months): + count = np.count_nonzero(C20_input['month'] == grace_month) + if (count != 0): + k, = np.nonzero(C20_input['month'] == grace_month) + grace_clm[2,0,i] = C20_input['data'][k] + + #-- Replace C30 with SLR coefficients for single-accelerometer months + if SLR_C30 in ('CSR','GSFC','LARES'): + #-- verify that there are replacement C30 months for specified range + months_test = sorted(set(mon[mon > 176]) - set(C30_input['month'])) + if months_test: + gm = ','.join('{0:03d}'.format(gm) for gm in months_test) + raise IOError('No Matching C30 Months ({0})'.format(gm)) + #-- replace C30 with SLR coefficients + for i,grace_month in enumerate(months): + count = np.count_nonzero(C30_input['month'] == grace_month) + if (count != 0) and (grace_month > 176): + k, = np.nonzero(C30_input['month'] == grace_month) + grace_clm[3,0,i] = C30_input['data'][k] + + #-- Use Degree 1 coefficients + #-- Tellus: Tellus Degree 1 (PO.DAAC following Sun et al., 2016) + #-- SLR: CSR Satellite Laser Ranging (SLR) Degree 1 - GRACE AOD + #-- SLF: OMCT/MPIOM coefficients with Sea Level Fingerprint land-water mass + if DEG1 in ('Tellus','SLR','SLF'): + #-- check if modeling degree 1 or if all months are available + if MODEL_DEG1: + #-- least-squares modeling the degree 1 coefficients + #-- fitting annual, semi-annual, linear and quadratic terms + C10_model = regress_model(DEG1_input['time'], DEG1_input['C10'], + tdec, ORDER=2, CYCLES=[0.5,1.0]) + C11_model = regress_model(DEG1_input['time'], DEG1_input['C11'], + tdec, ORDER=2, CYCLES=[0.5,1.0]) + S11_model = regress_model(DEG1_input['time'], DEG1_input['S11'], + tdec, ORDER=2, CYCLES=[0.5,1.0]) + else: + #-- check that all months are available for a given geocenter + months_test = sorted(set(months) - set(DEG1_input['month'])) + if months_test: + gm = ','.join('{0:03d}'.format(gm) for gm in months_test) + raise IOError('No Matching Geocenter Months ({0})'.format(gm)) + #-- for each considered date + for i,grace_month in enumerate(months): + k, = np.nonzero(DEG1_input['month'] == grace_month) + count = np.count_nonzero(DEG1_input['month'] == grace_month) + #-- Degree 1 is missing for particular month + if (count == 0) and MODEL_DEG1: + #-- using least-squares modeled coefficients from + #-- lsq_model_degree_one.py + grace_clm[1,0,i] = C10_model[i] + grace_clm[1,1,i] = C11_model[i] + grace_slm[1,1,i] = S11_model[i] + else:#-- using coefficients from data file + grace_clm[1,0,i] = DEG1_input['C10'][k] + grace_clm[1,1,i] = DEG1_input['C11'][k] + grace_slm[1,1,i] = DEG1_input['S11'][k] + + #-- read and add/remove the GAE and GAF atmospheric correction coefficients + if ATM: + #-- read ECMWF correction files from Fagiolini et al. (2015) + atm_corr = read_ecmwf_corrections(base_dir,LMAX,months,MMAX=MMAX) + #-- Removing GAE/GAF/GAG from RL05 GSM Products + if (DSET == 'GSM'): + for m in range(0,MMAX+1):#-- MMAX+1 to include l + for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + grace_clm[l,m,:] -= atm_corr['clm'][l,m,:] + grace_slm[l,m,:] -= atm_corr['slm'][l,m,:] + #-- Adding GAE/GAF/GAG to RL05 Atmospheric Products (GAA,GAC) + elif DSET in ('GAC','GAA'): + for m in range(0,MMAX+1):#-- MMAX+1 to include l + for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + grace_clm[l,m,:] += atm_corr['clm'][l,m,:] + grace_slm[l,m,:] += atm_corr['slm'][l,m,:] + + #-- Computing the mean gravitational field + mean_Ylms = {} + mean_Ylms['clm'] = np.zeros((LMAX+1,MMAX+1)) + mean_Ylms['slm'] = np.zeros((LMAX+1,MMAX+1)) + #-- Computes the mean of each spectral degree and order for + #-- the imported months + for m in range(0,MMAX+1):#-- MMAX+1 to include l + for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + #-- calculate static field using mean of input months + mean_Ylms['clm'][l,m] = np.mean(grace_clm[l,m,:]) + mean_Ylms['slm'][l,m] = np.mean(grace_slm[l,m,:]) + #-- Removing the mean from the Stokes coefficients + #-- removes the static component of gravity + if MEAN: + #-- Calculating the time-variable gravity field + grace_clm[l,m,:] -= mean_Ylms['clm'][l,m] + grace_slm[l,m,:] -= mean_Ylms['slm'][l,m] + + return {'clm':grace_clm, 'slm':grace_slm, 'time':tdec, 'month':mon, + 'mean':mean_Ylms, 'title':out_str, 'directory':grace_dir} + +#-- PURPOSE: read atmospheric jump corrections from Fagiolini et al. (2015) +def read_ecmwf_corrections(base_dir, LMAX, months, MMAX=None): + #-- correction files + corr_file = {} + corr_file['GAE'] = 'TN-08_GAE-2_2006032-2010031_0000_EIGEN_G---_0005.gz' + corr_file['GAF'] = 'TN-09_GAF-2_2010032-2015131_0000_EIGEN_G---_0005.gz' + corr_file['GAG'] = 'TN-10_GAG-2_2015132-2099001_0000_EIGEN_G---_0005.gz' + #-- atmospheric correction coefficients + atm_corr_clm = {} + atm_corr_slm = {} + #-- number of months to consider in analysis + n_cons = len(months) + #-- set maximum order if not equal to maximum degree + MMAX = LMAX if (MMAX is None) else MMAX + #-- iterate through python dictionary keys (GAE, GAF, GAG) + for key, val in corr_file.items(): + #-- allocate for clm and slm of atmospheric corrections + atm_corr_clm[key] = np.zeros((LMAX+1,MMAX+1)) + atm_corr_slm[key] = np.zeros((LMAX+1,MMAX+1)) + #-- GRACE correction files are compressed gz files + with gzip.open(os.path.join(base_dir, val),'rb') as f: + file_contents = f.read().decode('ISO-8859-1').splitlines() + #-- for each line in the GRACE correction file + for line in file_contents: + #-- find if line starts with GRCOF2 + if bool(re.match('GRCOF2',line)): + #-- split the line into individual components + line_contents = line.split() + #-- degree and order for the line + l1 = np.int(line_contents[1]) + m1 = np.int(line_contents[2]) + #-- if degree and order are below the truncation limits + if ((l1 <= LMAX) and (m1 <= MMAX)): + atm_corr_clm[key][l1,m1] = np.float(line_contents[3]) + atm_corr_slm[key][l1,m1] = np.float(line_contents[4]) + + #-- create output atmospheric corrections to be removed/added to data + atm_corr = {} + atm_corr['clm'] = np.zeros((LMAX+1,LMAX+1,n_cons)) + atm_corr['slm'] = np.zeros((LMAX+1,LMAX+1,n_cons)) + #-- for each considered date + for i,grace_month in enumerate(months): + #-- remove correction based on dates + if (grace_month >= 50) & (grace_month <= 97): + atm_corr['clm'][:,:,i] = atm_corr_clm['GAE'][:,:] + atm_corr['slm'][:,:,i] = atm_corr_slm['GAE'][:,:] + elif (grace_month >= 98) & (grace_month <= 161): + atm_corr['clm'][:,:,i] = atm_corr_clm['GAF'][:,:] + atm_corr['slm'][:,:,i] = atm_corr_slm['GAF'][:,:] + elif (grace_month > 161): + atm_corr['clm'][:,:,i] = atm_corr_clm['GAG'][:,:] + atm_corr['slm'][:,:,i] = atm_corr_slm['GAG'][:,:] + + #-- return the atmospheric corrections + return atm_corr + +#-- PURPOSE: calculate a regression model for extrapolating values +def regress_model(t_in, d_in, t_out, ORDER=2, CYCLES=None, RELATIVE=None): + + #-- remove singleton dimensions + t_in = np.squeeze(t_in) + d_in = np.squeeze(d_in) + t_out = np.squeeze(t_out) + #-- check dimensions of output + if (np.ndim(t_out) == 0): + t_out = np.array([t_out]) + + #-- CREATING DESIGN MATRIX FOR REGRESSION + DMAT = [] + MMAT = [] + #-- add polynomial orders (0=constant, 1=linear, 2=quadratic) + for o in range(ORDER+1): + DMAT.append((t_in-RELATIVE)**o) + MMAT.append((t_out-RELATIVE)**o) + #-- add cyclical terms (0.5=semi-annual, 1=annual) + for c in CYCLES: + DMAT.append(np.sin(2.0*np.pi*t_in/np.float(c))) + DMAT.append(np.cos(2.0*np.pi*t_in/np.float(c))) + MMAT.append(np.sin(2.0*np.pi*t_out/np.float(c))) + MMAT.append(np.cos(2.0*np.pi*t_out/np.float(c))) + + #-- Calculating Least-Squares Coefficients + #-- Standard Least-Squares fitting (the [0] denotes coefficients output) + beta_mat = np.linalg.lstsq(np.transpose(DMAT), d_in, rcond=-1)[0] + + #-- return modeled time-series + return np.dot(np.transpose(MMAT),beta_mat) diff --git a/gravity_toolkit/hdf5_read.py b/gravity_toolkit/hdf5_read.py new file mode 100755 index 00000000..ef4bb702 --- /dev/null +++ b/gravity_toolkit/hdf5_read.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +u""" +hdf5_read.py +Written by Tyler Sutterley (10/2019) + +Reads spatial data from HDF5 files + +CALLING SEQUENCE: + file_inp = hdf5_read(filename, DATE=False, VERBOSE=False) + +INPUTS: + filename: HDF5 file to be opened and read + +OUTPUTS: + data: z value of dataset + lon: longitudinal array + lat: latitudinal array + time: time value of dataset (if specified by DATE) + attributes: HDF5 attributes (for variables and title) + +OPTIONS: + DATE: HDF5 file has date information + MISSING: HDF5 dataset has missing values + VERBOSE: will print to screen the HDF5 structure parameters + VARNAME: z variable name in HDF5 file + LONNAME: longitude variable name in HDF5 file + LATNAME: latitude variable name in HDF5 file + TIMENAME: time variable name in HDF5 file + ATTRIBUTES: HDF5 variables contain attribute parameters + TITLE: HDF5 file contains description attribute parameter + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + h5py: Python interface for Hierarchal Data Format 5 (HDF5) + (http://h5py.org) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 06/2018: extract fill_value and title without variable attributes + Updated 06/2016: using __future__ print function + Updated 05/2016: will only transpose if data is 2 dimensional (not 3) + added parameter to read the TITLE variable + Updated 05/2015: added parameter TIMENAME for time variable name + Updated 11/2014: got back to writing this + in working condition with updated attributes as in netcdf equivalent + Updated 12/2013: converted ncdf code to HDF5 code (alternative data type) + Updated 07/2013: switched from Scientific Python to Scipy + Updated 05/2013: converted to Python + Updated 03/2013: converted to Octave + Updated 01/2013: adding time variable + Written 07/2012 for GMT and for archiving datasets + Motivation for archival: netCDF files are much smaller than ascii + files and more portable/transferable than IDL .sav files + (possible to connect with geostatistics packages in R?) +""" +from __future__ import print_function + +import h5py +import numpy as np + +def hdf5_read(filename, DATE=False, MISSING=False, VERBOSE=False, VARNAME='z', + LONNAME='lon', LATNAME='lat', TIMENAME='time', ATTRIBUTES=True, TITLE=True): + #-- Open the HDF5 file for reading + fileID = h5py.File(filename, 'r') + #-- allocate python dictionary for output variables + dinput = {} + + #-- Output HDF5 file information + if VERBOSE: + print(fileID.filename) + print(list(fileID.keys())) + + #-- Getting the data from each HDF5 variable + dinput['lon'] = fileID[LONNAME][:] + dinput['lat'] = fileID[LATNAME][:] + dinput['data'] = fileID[VARNAME][:] + if DATE: + dinput['time'] = fileID[TIMENAME][:] + #-- switching data array to lon/lat if lat/lon + sz = dinput['data'].shape + if (np.ndim(dinput['data']) == 2) and (len(dinput['lat']) == sz[0]): + dinput['data'] = np.transpose(dinput['data']) + + #-- Getting attributes of included variables + dinput['attributes'] = {} + if ATTRIBUTES: + dinput['attributes']['lon'] = [fileID[LONNAME].attrs['units'], \ + fileID[LONNAME].attrs['long_name']] + dinput['attributes']['lat'] = [fileID[LATNAME].attrs['units'], \ + fileID[LATNAME].attrs['long_name']] + dinput['attributes']['data'] = [fileID[VARNAME].attrs['units'], \ + fileID[VARNAME].attrs['long_name']] + #-- time attributes + if DATE: + dinput['attributes']['time'] = [fileID['time'].attrs['units'], \ + fileID['time'].attrs['long_name']] + #-- missing data fill value + if MISSING: + dinput['attributes']['_FillValue'] = fileID[VARNAME].attrs['_FillValue'] + #-- Global attribute description + if TITLE: + dinput['attributes']['title'] = fileID.attrs['description'] + + #-- Closing the HDF5 file + fileID.close() + return dinput diff --git a/gravity_toolkit/hdf5_read_stokes.py b/gravity_toolkit/hdf5_read_stokes.py new file mode 100755 index 00000000..802b9807 --- /dev/null +++ b/gravity_toolkit/hdf5_read_stokes.py @@ -0,0 +1,124 @@ +#!/usr/bin/env python +u""" +hdf5_read_stokes.py +Written by Tyler Sutterley (10/2019) + +Reads spherical harmonic data from HDF5 files + +CALLING SEQUENCE: + file_inp = hdf5_read_stokes(filename, DATE=True, VERBOSE=False) + +INPUTS: + filename: HDF5 file to be opened and read + +OUTPUTS: + clm: Cosine Stokes Coefficient + slm: Sine Stokes Coefficient + l: degree (l) + m: order (m) + time: time of measurement (if specified by DATE) + month: GRACE/GRACE-FO month (if specified by DATE) + attributes: HDF5 attributes for: + spherical harmonics (clm,slm), variables (l,m,time,month), and title + +OPTIONS: + DATE: HDF5 file has date information + VERBOSE: will print to screen the HDF5 structure parameters + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + h5py: Python interface for Hierarchal Data Format 5 (HDF5) + (http://h5py.org) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False. check if time is array + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 06/2016: using __future__ print function + Updated 02/2016: capitalized LMAX and MMAX variables to match other programs + Updated 05/2015: minor change for MMAX != LMAX + Updated 11/2014: got back to writing this + in working condition with updated attributes as in netcdf equivalent + Updated 12/2013: converted ncdf code to HDF5 code (alternative data type) + Updated 07/2013: switched from Scientific Python to Scipy + Updated 03/2013: switched I/O to column arrays instead of matrix + Written 07/2012 +""" +from __future__ import print_function + +import h5py +import numpy as np + +def hdf5_read_stokes(filename, DATE=True, VERBOSE=False): + #-- Open the HDF5 file for reading + fileID = h5py.File(filename, 'r') + #-- allocate python dictionary for output variables + dinput = {} + + #-- Output HDF5 file information + if VERBOSE: + print(fileID.filename) + print(list(fileID.keys())) + + #-- Getting the data from each HDF5 variable + #-- converting HDF5 objects into numpy arrays + ll = np.array(fileID['l'][:]) + mm = np.array(fileID['m'][:]) + #-- Spherical harmonic files have date information + if DATE: + dinput['time'] = fileID['time'][:].copy() + dinput['month'] = fileID['month'][:].copy() + n_time = len(dinput['time']) + else: + n_time = 0 + + #-- Restructuring input array back into matrix format + LMAX = np.max(ll) + MMAX = np.max(mm) + #-- LMAX+1 to include LMAX (LMAX+1 elements) + dinput['l'] = np.arange(0,LMAX+1) + dinput['m'] = np.arange(0,MMAX+1) + #-- convert input clm/slm to numpy arrays + CLM = np.array(fileID['clm'][:]) + SLM = np.array(fileID['slm'][:]) + #-- size of the input grids + n_harm, = fileID['l'].shape + #-- import spherical harmonic data + if (DATE and (n_time > 1)): + #-- contains multiple dates + dinput['clm'] = np.zeros((LMAX+1,MMAX+1,n_time)) + dinput['slm'] = np.zeros((LMAX+1,MMAX+1,n_time)) + for lm in range(n_harm): + dinput['clm'][ll[lm],mm[lm],:] = CLM[lm,:] + dinput['slm'][ll[lm],mm[lm],:] = SLM[lm,:] + else: + #-- contains either no dates or a single date + dinput['clm'] = np.zeros((LMAX+1,MMAX+1)) + dinput['slm'] = np.zeros((LMAX+1,MMAX+1)) + for lm in range(n_harm): + dinput['clm'][ll[lm],mm[lm]] = CLM[lm] + dinput['slm'][ll[lm],mm[lm]] = SLM[lm] + + #-- Getting attributes of clm/slm and included variables + dinput['attributes'] = {} + dinput['attributes']['l'] = [fileID['l'].attrs['units'], \ + fileID['l'].attrs['long_name']] + dinput['attributes']['m'] = [fileID['m'].attrs['units'], \ + fileID['m'].attrs['long_name']] + dinput['attributes']['clm'] = [fileID['clm'].attrs['units'], \ + fileID['clm'].attrs['long_name']] + dinput['attributes']['slm'] = [fileID['slm'].attrs['units'], \ + fileID['slm'].attrs['long_name']] + #-- time attributes + if DATE: + dinput['attributes']['time'] = [fileID['time'].attrs['units'], \ + fileID['time'].attrs['long_name']] + dinput['attributes']['month'] = [fileID['month'].attrs['units'], \ + fileID['month'].attrs['long_name']] + #-- Global attribute description + dinput['attributes']['title'] = fileID.attrs['description'] + + #-- Closing the HDF5 file + fileID.close() + + #-- return the output variable + return dinput diff --git a/gravity_toolkit/hdf5_stokes.py b/gravity_toolkit/hdf5_stokes.py new file mode 100755 index 00000000..171424e6 --- /dev/null +++ b/gravity_toolkit/hdf5_stokes.py @@ -0,0 +1,174 @@ +#!/usr/bin/env python +u""" +hdf5_stokes.py +Written by Tyler Sutterley (10/2019) + +Writes spherical harmonic coefficients to HDF5 files + +CALLING SEQUENCE: + hdf5_stokes(clm, slm, linp, minp, tinp, month, FILENAME=output_HDF5_file) + +INPUTS: + clm: Cosine Stokes Coefficient + slm: Sine Stokes Coefficient + linp: degree (l) + minp: order (m) + tinp: date of measurement + month: GRACE/GRACE-FO month + +OPTIONS: + FILENAME: output filename HDF5 + UNITS: spherical harmonic units + TIME_UNITS: time variable units + TIME_LONGNAME: time variable description + MONTHS_NAME: name of months variable within HDF5 file + MONTHS_UNITS: months variable units + MONTHS_LONGNAME: months variable description + TITLE: title attribute of dataset + CLOBBER: will overwrite an existing HDF5 file + VERBOSE: will print to screen the HDF5 structure parameters + DATE: harmonics have date information + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + h5py: Python interface for Hierarchal Data Format 5 (HDF5) + (http://h5py.org) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False + Updated 08/2019: don't include time (HH:MM:SS) in creation date + Updated 07/2019: added creation date as a global attribute + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 12/2018: using python dictionaries to improve readability + Updated 10/2018: using future division for python3 Compatibility + Updated 02/2017: added MONTHS_UNITS, MONTHS_LONGNAME, MONTHS_NAME parameters + aligned TIME_LONGNAME and TIME_UNITS with attributes + can output a HDF5 file with multiple dates similar to the netcdf program + Updated 06/2016: using __future__ print function + Updated 03/2016: direct calculation of number of harmonics n_harm + Updated 05/2015: minor change for MMAX != LMAX + Updated 11/2014: got back to writing this + in working condition with updated attributes as in netcdf equivalent + Updated 12/2013: converted ncdf code to HDF5 code (alternative data type) + Updated 07/2013: switched from Scientific Python to Scipy + Updated 05/2013 made UNITS an option in case converting the units to + mass harmonics or other harmonic variant + Updated 03/2013: added units to clm and slm as 'Geodesy Normalization' + switched I/O to column arrays for smaller file sizes and compatibility + between languages + made date an option for datasets that have no date + Updated 01/2013 to add time and GRACE/GRACE-FO month number + Written 07/2012 +""" +from __future__ import print_function, division + +import time +import h5py +import numpy as np + +def hdf5_stokes(clm1, slm1, linp, minp, tinp, month, FILENAME=None, + UNITS='Geodesy_Normalization', TIME_UNITS=None, TIME_LONGNAME=None, + MONTHS_NAME='month', MONTHS_UNITS='number', MONTHS_LONGNAME='GRACE_month', + TITLE=None, DATE=True, CLOBBER=True, VERBOSE=False): + + #-- setting HDF5 clobber attribute + if CLOBBER: + clobber = 'w' + else: + clobber = 'w-' + + #-- opening HDF5 file for writing + fileID = h5py.File(FILENAME, clobber) + + #-- Maximum spherical harmonic degree (LMAX) and order (MMAX) + LMAX = np.max(linp) + MMAX = np.max(minp) + #-- Calculating the number of cos and sin harmonics up to LMAX + #-- taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) + n_harm = (LMAX**2 + 3*LMAX - (LMAX-MMAX)**2 - (LMAX-MMAX))//2 + 1 + + #-- Restructuring output matrix to array format + #-- will reduce matrix size and insure compatibility between platforms + if DATE: + if (np.ndim(tinp) == 0): + n_time = 1 + clm = np.zeros((n_harm)) + slm = np.zeros((n_harm)) + else: + n_time = len(tinp) + clm = np.zeros((n_harm,n_time)) + slm = np.zeros((n_harm,n_time)) + else: + n_time = 0 + clm = np.zeros((n_harm)) + slm = np.zeros((n_harm)) + + #-- restructured degree and order + lout = np.zeros((n_harm,), dtype=np.int32) + mout = np.zeros((n_harm,), dtype=np.int32) + #-- create counter variable lm + lm = 0 + for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX + for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + lout[lm] = np.int(l) + mout[lm] = np.int(m) + if (DATE and (n_time > 1)): + clm[lm,:] = clm1[l,m,:] + slm[lm,:] = slm1[l,m,:] + else: + clm[lm] = clm1[l,m] + slm[lm] = slm1[l,m] + #-- add 1 to lm counter variable + lm += 1 + + #-- Defining the HDF5 dataset variables + h5 = {} + h5['l'] = fileID.create_dataset('l', (n_harm,), \ + data=lout, dtype=np.int, compression='gzip') + h5['m'] = fileID.create_dataset('m', (n_harm,), \ + data=mout, dtype=np.int, compression='gzip') + if DATE: + h5['time'] = fileID.create_dataset('time', (n_time,), \ + data=tinp, dtype=np.float, compression='gzip') + h5['month'] = fileID.create_dataset(MONTHS_NAME, (n_time,), \ + data=month, dtype=np.int, compression='gzip') + #-- if more than 1 date in file + if (n_time > 1): + h5['clm'] = fileID.create_dataset('clm', (n_harm,n_time,), \ + data=clm, dtype=np.float, compression='gzip') + h5['slm'] = fileID.create_dataset('slm', (n_harm,n_time,), \ + data=slm, dtype=np.float, compression='gzip') + else: + h5['clm'] = fileID.create_dataset('clm', (n_harm,), \ + data=clm, dtype=np.float, compression='gzip') + h5['slm'] = fileID.create_dataset('slm', (n_harm,), \ + data=slm, dtype=np.float, compression='gzip') + + #-- filling HDF5 dataset attributes + #-- Defining attributes for degree and order + h5['l'].attrs['long_name'] = 'spherical_harmonic_degree'#-- degree long name + h5['l'].attrs['units'] = 'Wavenumber'#-- SH degree units + h5['m'].attrs['long_name'] = 'spherical_harmonic_order'#-- order long name + h5['m'].attrs['units'] = 'Wavenumber'#-- SH order units + #-- Defining attributes for dataset + h5['clm'].attrs['long_name'] = 'cosine_spherical_harmonics' + h5['clm'].attrs['units'] = UNITS + h5['slm'].attrs['long_name'] = 'sine_spherical_harmonics' + h5['slm'].attrs['units'] = UNITS + if DATE: + #-- Defining attributes for date and month (or integer date) + h5['time'].attrs['long_name'] = TIME_LONGNAME + h5['time'].attrs['units'] = TIME_UNITS + h5['month'].attrs['long_name'] = MONTHS_LONGNAME + h5['month'].attrs['units'] = MONTHS_UNITS + #-- description of file + fileID.attrs['description'] = TITLE + fileID.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) + + #-- Output HDF5 structure information + if VERBOSE: + print(FILENAME) + print(list(fileID.keys())) + + #-- Closing the HDF5 file + fileID.close() diff --git a/gravity_toolkit/hdf5_write.py b/gravity_toolkit/hdf5_write.py new file mode 100755 index 00000000..73bbef17 --- /dev/null +++ b/gravity_toolkit/hdf5_write.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +u""" +hdf5_write.py +Written by Tyler Sutterley (09/2019) + +Writes spatial data to HDF5 files + +CALLING SEQUENCE: + hdf5_write(data, lon, lat, tim, FILENAME=output_HDF5_file) + +INPUTS: + data: z data + lon: longitude array + lat: latitude array + tim: time array + +OPTIONS: + FILENAME: output filename HDF5 + VARNAME: z variable name in HDF5 file + LONNAME: longitude variable name in HDF5 file + LATNAME: latitude variable name in HDF5 file + UNITS: z variable units + LONGNAME: z variable description + FILL_VALUE: missing value for z variable + TIME_UNITS: time variable units + TIME_LONGNAME: time variable description + TITLE: title attribute of dataset + CLOBBER: will overwrite an existing HDF5 file + VERBOSE: will print to screen the HDF5 structure parameters + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + h5py: Python interface for Hierarchal Data Format 5 (HDF5) + (http://h5py.org) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False + Updated 09/2019 for public release + Updated 08/2019: don't include time (HH:MM:SS) in creation date + Updated 07/2019: added creation date as a global attribute + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 08/2018: use n_time variable for output HDF5 dimensions + Updated 03/2018: added option TIMENAME to specify the variable name of time + Updated 02/2017: TIME_LONGNAME and TIME_UNITS with attributes, + updated TIME_LONGNAME to Date_in_Decimal_Years + Updated 06/2016: using __future__ print function + Updated 05/2016: output data types same as input data types + Updated 11/2014: got back to writing this + in working condition with updated attributes as in netcdf equivalent + Updated 12/2013: converted ncdf code to HDF5 code (alternative data type) + Updated 07/2013: switched from Scientific Python to Scipy + Updated 01/2013: adding time as a variable + Updated 10/2012: changed from variable names x and y to lon and lat. + Written 07/2012 +""" +from __future__ import print_function + +import time +import h5py +import numpy as np + +def hdf5_write(data, lon, lat, tim, FILENAME=None, VARNAME='z', LONNAME='lon', + LATNAME='lat', TIMENAME='time', UNITS=None, LONGNAME=None, FILL_VALUE=None, + TIME_UNITS=None, TIME_LONGNAME=None, TITLE=None, CLOBBER=True, + VERBOSE=False): + + #-- setting HDF5 clobber attribute + if CLOBBER in ('Y','y'): + clobber = 'w' + else: + clobber = 'w-' + + #-- Dimensions of time parameters + n_time = 1 if (np.ndim(tim) == 0) else len(tim) + + #-- opening HDF5 file for writing + fileID = h5py.File(FILENAME, clobber) + #-- Defining the HDF5 dataset variables + h5 = {} + h5[LONNAME] = fileID.create_dataset(LONNAME, lon.shape, data=lon, + dtype=lon.dtype, compression='gzip') + h5[LATNAME] = fileID.create_dataset(LATNAME, lat.shape, data=lat, + dtype=lat.dtype, compression='gzip') + h5[TIMENAME] = fileID.create_dataset(TIMENAME, (n_time,), data=tim, + dtype=np.float, compression='gzip') + h5[VARNAME] = fileID.create_dataset(VARNAME, data.shape, data=data, + dtype=data.dtype, fillvalue=FILL_VALUE, compression='gzip') + #-- add dimensions + h5[VARNAME].dims[0].label=LATNAME + h5[VARNAME].dims[0].attach_scale(h5[LATNAME]) + h5[VARNAME].dims[1].label=LONNAME + #-- if more than 1 date in file + if (n_time > 1): + h5[VARNAME].dims[2].label=TIMENAME + h5[VARNAME].dims[2].attach_scale(h5[TIMENAME]) + + #-- filling HDF5 dataset attributes + #-- Defining attributes for longitude and latitude + h5[LONNAME].attrs['long_name'] = 'longitude' + h5[LONNAME].attrs['units'] = 'degrees_east' + h5[LATNAME].attrs['long_name'] = 'latitude' + h5[LATNAME].attrs['units'] = 'degrees_north' + #-- Defining attributes for dataset + h5[VARNAME].attrs['long_name'] = LONGNAME + h5[VARNAME].attrs['units'] = UNITS + #-- Dataset contains missing values + if (FILL_VALUE is not None): + h5[VARNAME].attrs['_FillValue'] = FILL_VALUE + #-- Defining attributes for date + h5[TIMENAME].attrs['long_name'] = TIME_LONGNAME + h5[TIMENAME].attrs['units'] = TIME_UNITS + #-- description of file + fileID.attrs['description'] = TITLE + fileID.attrs['date_created'] = time.strftime('%Y-%m-%d',time.localtime()) + + #-- Output HDF5 structure information + if VERBOSE in ('Y','y'): + print(FILENAME) + print(list(fileID.keys())) + + #-- Closing the HDF5 file + fileID.close() diff --git a/gravity_toolkit/ncdf_read.py b/gravity_toolkit/ncdf_read.py new file mode 100755 index 00000000..182b6c01 --- /dev/null +++ b/gravity_toolkit/ncdf_read.py @@ -0,0 +1,122 @@ +#!/usr/bin/env python +u""" +ncdf_read.py +Written by Tyler Sutterley (10/2019) + +Reads spatial data from COARDS-compliant netCDF4 files + +CALLING SEQUENCE: + file_inp = ncdf_read(filename, DATE=False, VERBOSE=False) + +INPUTS: + filename: netCDF4 file to be opened and read + +OUTPUTS: + data: z value of dataset + lon: longitudinal array + lat: latitudinal array + time: time value of dataset (if specified by DATE) + attributes: netCDF4 attributes (for variables and title) + +OPTIONS: + DATE: netCDF4 file has date information + MISSING: netCDF4 dataset has missing values + VERBOSE: will print to screen the netCDF4 structure parameters + VARNAME: z variable name in netCDF4 file + LONNAME: longitude variable name in netCDF4 file + LATNAME: latitude variable name in netCDF4 file + TIMENAME: time variable name in netCDF4 file + ATTRIBUTES: netCDF4 variables contain attribute parameters + TITLE: netCDF4 file contains title attribute parameter + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + netCDF4: Python interface to the netCDF C library + (https://unidata.github.io/netcdf4-python/netCDF4/index.html) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 06/2018: extract fill_value and title without variable attributes + Updated 07-09/2016: using netCDF4-python + Updated 06/2016: using __future__ print, output filename if VERBOSE + Updated 05/2016: will only transpose if data is 2 dimensional (not 3) + added parameter to read the TITLE variable + Updated 07/2015: updated read title for different cases with regex + Updated 05/2015: added parameter TIMENAME for time variable name + Updated 04/2015: fix attribute outputs (forgot to copy to new dictionary) + Updated 02/2015: added copy for variable outputs + fixes new error flag from mmap=True + Updated 11/2014: new parameters for variable names and attributes + all variables in a single python dictionary + Updated 05/2014: new parameter for missing value + new outputs: all attributes, fill value + added try for TITLE attribute + converting time to numpy array + Updated 02/2014: minor update to if statements + Updated 07/2013: switched from Scientific Python to Scipy + Updated 01/2013: adding time variable + Written 07/2012 +""" +from __future__ import print_function + +import netCDF4 +import numpy as np +import re + +def ncdf_read(filename, DATE=False, MISSING=False, VERBOSE=False, VARNAME='z', + LONNAME='lon', LATNAME='lat', TIMENAME='time', ATTRIBUTES=True, TITLE=True): + #-- Open the NetCDF file for reading + fileID = netCDF4.Dataset(filename, 'r') + #-- create python dictionary for output variables + dinput = {} + + #-- Output NetCDF file information + if VERBOSE: + print(fileID.filepath()) + print(list(fileID.variables.keys())) + + #-- netcdf variable names + NAMES = {} + NAMES['lon'] = LONNAME + NAMES['lat'] = LATNAME + NAMES['data'] = VARNAME + if DATE: + NAMES['time'] = TIMENAME + #-- for each variable + for key in NAMES.keys(): + #-- Getting the data from each NetCDF variable + #-- filling numpy arrays with NetCDF objects + nc_variable = fileID.variables[NAMES[key]][:].copy() + dinput[key] = np.asarray(nc_variable).squeeze() + + #-- switching data array to lon/lat if lat/lon + sz = dinput['data'].shape + if (np.ndim(dinput['data']) == 2) and (len(dinput['lat']) == sz[0]): + dinput['data'] = np.transpose(dinput['data']) + + #-- getting attributes of included variables + dinput['attributes'] = {} + if ATTRIBUTES: + #-- create python dictionary for variable attributes + attributes = {} + #-- for each variable + #-- get attributes for the included variables + for key in NAMES.keys(): + attributes[key] = [fileID.variables[NAMES[key]].units, \ + fileID.variables[NAMES[key]].long_name] + #-- put attributes in output python dictionary + dinput['attributes'] = attributes + #-- missing data fill value + if MISSING: + dinput['attributes']['_FillValue']=fileID.variables[VARNAME]._FillValue + #-- Global attribute (title of dataset) + if TITLE: + rx = re.compile('TITLE',re.IGNORECASE) + title, = [st for st in dir(fileID) if rx.match(st)] + dinput['attributes']['title'] = getattr(fileID, title) + + #-- Closing the NetCDF file + fileID.close() + #-- return the output variable + return dinput diff --git a/gravity_toolkit/ncdf_read_stokes.py b/gravity_toolkit/ncdf_read_stokes.py new file mode 100755 index 00000000..85b5b73c --- /dev/null +++ b/gravity_toolkit/ncdf_read_stokes.py @@ -0,0 +1,130 @@ +#!/usr/bin/env python +u""" +ncdf_read_stokes.py +Written by Tyler Sutterley (10/2019) + +Reads spherical harmonic data from netCDF4 files + +CALLING SEQUENCE: + file_inp = ncdf_read_stokes(filename, DATE=True, VERBOSE=False) + +INPUTS: + filename: netCDF4 file to be opened and read + +OUTPUTS: + clm: Cosine Stokes Coefficient + slm: Sine Stokes Coefficient + l: degree (l) + m: order (m) + time: time of measurement (if specified by DATE) + month: GRACE/GRACE-FO month (if specified by DATE) + attributes: netCDF4 attributes for: + spherical harmonics (clm,slm), variables (l,m,time,month), and title + +OPTIONS: + DATE: netCDF4 file has date information + VERBOSE: will print to screen the netCDF4 structure parameters + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + netCDF4: Python interface to the netCDF C library + (https://unidata.github.io/netcdf4-python/netCDF4/index.html) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 09/2016: slicing of clm and slm on numpy arrays not netcdf variables + Updated 07/2016: using netCDF4-python + Updated 06/2016: using __future__ print, output filename if VERBOSE + Updated 02/2016: capitalized LMAX and MMAX variables to match other programs + Updated 07/2015: updated read title for different cases with regex + Updated 06/2015: can input a single netcdf with multiple dates + Updated 05/2015: minor change for MMAX != LMAX + Updated 02/2015: simplified attributes with for loop + Updated 01/2015: added copy for variable outputs + fixes new error flag from mmap=True + Updated 11/2014: all variables in a single python dictionary + Updated 05/2014: converted time and month to numpy arrays + Updated 05/2014: output all attributes under single variable + added try for TITLE attribute + Updated 02/2014: minor update to if statements + Updated 07/2013: switched from Scientific Python to Scipy + Updated 07/2013: switched from Scientific Python to Scipy + Updated 03/2013: switched I/O to column arrays instead of matrix + Written 07/2012 +""" +from __future__ import print_function + +import netCDF4 +import numpy as np +import re + +def ncdf_read_stokes(filename, DATE=True, VERBOSE=False): + #-- Open the NetCDF file for reading + fileID = netCDF4.Dataset(filename, 'r') + #-- create python dictionary for output variables + dinput = {} + #-- create python dictionary for variable attributes + attributes = {} + + #-- Output NetCDF file information + if VERBOSE: + print(fileID.filepath()) + print(list(fileID.variables.keys())) + + #-- Getting the data from each NetCDF variable + #-- converting NetCDF objects into numpy arrays + ll = fileID.variables['l'][:].copy() + mm = fileID.variables['m'][:].copy() + clm = fileID.variables['clm'][:].copy() + slm = fileID.variables['slm'][:].copy() + #-- save date variables if specified + if DATE: + dinput['time'] = fileID.variables['time'][:].copy() + dinput['month'] = fileID.variables['month'][:].copy() + n_time = len(dinput['time']) + else: + n_time = 0 + + #-- Restructuring input array back into matrix format + LMAX = np.max(ll) + MMAX = np.max(mm) + #-- output spherical harmonic degree and order + #-- LMAX+1 to include LMAX (LMAX+1 elements) + dinput['l'] = np.arange(0,LMAX+1) + dinput['m'] = np.arange(0,MMAX+1) + #-- number of harmonics + n_harm, = fileID.variables['l'].shape + #-- import spherical harmonic data + if (DATE and (n_time > 1)): + #-- contains multiple dates + dinput['clm'] = np.zeros((LMAX+1,MMAX+1,n_time)) + dinput['slm'] = np.zeros((LMAX+1,MMAX+1,n_time)) + for lm in range(n_harm): + dinput['clm'][ll[lm],mm[lm],:] = clm[lm,:] + dinput['slm'][ll[lm],mm[lm],:] = slm[lm,:] + else: + #-- contains either no dates or a single date + dinput['clm'] = np.zeros((LMAX+1,MMAX+1)) + dinput['slm'] = np.zeros((LMAX+1,MMAX+1)) + for lm in range(n_harm): + dinput['clm'][ll[lm],mm[lm]] = clm[lm] + dinput['slm'][ll[lm],mm[lm]] = slm[lm] + + #-- for each variable + #-- get attributes for the included variables + for key in dinput.keys(): + attributes[key] = [fileID.variables[key].units, \ + fileID.variables[key].long_name] + #-- put attributes in output python dictionary + dinput['attributes'] = attributes + #-- Global attribute (title of dataset) + rx = re.compile('TITLE',re.IGNORECASE) + title, = [st for st in dir(fileID) if rx.match(st)] + dinput['attributes']['title'] = getattr(fileID, title) + + #-- Closing the NetCDF file + fileID.close() + + #-- return output variable + return dinput diff --git a/gravity_toolkit/ncdf_stokes.py b/gravity_toolkit/ncdf_stokes.py new file mode 100755 index 00000000..88583e1d --- /dev/null +++ b/gravity_toolkit/ncdf_stokes.py @@ -0,0 +1,183 @@ +#!/usr/bin/env python +u""" +ncdf_stokes.py +Written by Tyler Sutterley (10/2019) + +Writes spherical harmonic coefficients to netCDF4 files + +CALLING SEQUENCE: + ncdf_stokes(clm, slm, linp, minp, tinp, month, FILENAME=output_netcdf4_file) + +INPUTS: + clm: Cosine Stokes Coefficient + slm: Sine Stokes Coefficient + linp: degree (l) + minp: order (m) + tinp: date of measurement + month: GRACE/GRACE-FO month + +OPTIONS: + FILENAME: output netCDF4 filename + UNITS: spherical harmonic units + TIME_UNITS: time variable units + TIME_LONGNAME: time variable description + MONTHS_NAME: name of months variable within netCDF4 file + MONTHS_UNITS: months variable units + MONTHS_LONGNAME: months variable description + TITLE: title attribute of dataset + CLOBBER: will overwrite an existing netCDF4 file + VERBOSE: will print to screen the netCDF4 structure parameters + DATE: harmonics have date information + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + netCDF4: Python interface to the netCDF C library + (https://unidata.github.io/netcdf4-python/netCDF4/index.html) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False + Updated 08/2019: don't include time (HH:MM:SS) in creation date + Updated 07/2019: added creation date as a global attribute + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 12/2018: using python dictionaries to improve readability + Updated 10/2018: using future division for python3 Compatibility + Updated 02/2017: added MONTHS_UNITS, MONTHS_LONGNAME, MONTHS_NAME parameters + aligned TIME_LONGNAME and TIME_UNITS with attributes + Updated 07/2016: using netCDF4-python + Updated 06/2016: using __future__ print function + Updated 03/2016: direct calculation of number of harmonics n_harm + Updated 07/2015: forgot to add case for DATE=False + Updated 06/2015: can output single netcdf with multiple dates + Updated 05/2015: minor change for MMAX != LMAX + Updated 05/2014: new parameters for time attributes + Updated 02/2014: minor update to if statements + Updated 07/2013: switched from Scientific Python to Scipy + Updated 05/2013 made UNITS an option in case converting the units to + mass harmonics or other harmonic variant + Updated 03/2013: added units to clm and slm as 'Geodesy Normalization' + switched I/O to column arrays for smaller file sizes and compatibility + between languages + made date an option for datasets that have no date (e.g. GIA) + Updated 01/2013 to add time and GRACE/GRACE-FO month number + Written 07/2012 +""" +from __future__ import print_function, division + +import time +import netCDF4 +import numpy as np + +def ncdf_stokes(clm1, slm1, linp, minp, tinp, month, FILENAME=None, + UNITS='Geodesy_Normalization', TIME_UNITS=None, TIME_LONGNAME=None, + MONTHS_NAME='month', MONTHS_UNITS='number', MONTHS_LONGNAME='GRACE_month', + TITLE=None, DATE=True, CLOBBER=True, VERBOSE=False): + + #-- setting netCDF clobber attribute + if CLOBBER: + clobber = 'w' + else: + clobber = 'a' + + #-- opening netCDF file for writing + fileID = netCDF4.Dataset(FILENAME, clobber, format="NETCDF4") + + #-- Maximum spherical harmonic degree (LMAX) and order (MMAX) + LMAX = np.max(linp) + MMAX = np.max(minp) + #-- Calculating the number of cos and sin harmonics up to LMAX + #-- taking into account MMAX (if MMAX == LMAX then LMAX-MMAX=0) + n_harm = (LMAX**2 + 3*LMAX - (LMAX-MMAX)**2 - (LMAX-MMAX))//2 + 1 + + #-- Restructuring output matrix to array format + #-- will reduce matrix size and insure compatibility between platforms + if DATE: + if (np.ndim(tinp) == 0): + n_time = 1 + clm = np.zeros((n_harm)) + slm = np.zeros((n_harm)) + else: + n_time = len(tinp) + clm = np.zeros((n_harm,n_time)) + slm = np.zeros((n_harm,n_time)) + else: + n_time = 0 + clm = np.zeros((n_harm)) + slm = np.zeros((n_harm)) + + #-- restructured degree and order + lout = np.zeros((n_harm,), dtype=np.int32) + mout = np.zeros((n_harm,), dtype=np.int32) + #-- create counter variable lm + lm = 0 + for m in range(0,MMAX+1):#-- MMAX+1 to include MMAX + for l in range(m,LMAX+1):#-- LMAX+1 to include LMAX + lout[lm] = np.int(l) + mout[lm] = np.int(m) + if (DATE and (n_time > 1)): + clm[lm,:] = clm1[l,m,:] + slm[lm,:] = slm1[l,m,:] + else: + clm[lm] = clm1[l,m] + slm[lm] = slm1[l,m] + #-- add 1 to lm counter variable + lm += 1 + + #-- Defining the netCDF dimensions + fileID.createDimension('lm', n_harm) + if DATE: + fileID.createDimension('time', n_time) + + #-- defining the netCDF variables + nc = {} + #-- degree and order + nc['l'] = fileID.createVariable('l', 'i', ('lm',)) + nc['m'] = fileID.createVariable('m', 'i', ('lm',)) + #-- spherical harmonics + if (DATE and (n_time > 1)): + nc['clm'] = fileID.createVariable('clm', 'd', ('lm','time',)) + nc['slm'] = fileID.createVariable('slm', 'd', ('lm','time',)) + else: + nc['clm'] = fileID.createVariable('clm', 'd', ('lm',)) + nc['slm'] = fileID.createVariable('slm', 'd', ('lm',)) + if DATE: + #-- time (in decimal form) + nc['time'] = fileID.createVariable('time', 'd', ('time',)) + #-- GRACE/GRACE-FO month (or integer date) + nc['month'] = fileID.createVariable(MONTHS_NAME, 'i', ('time',)) + + #-- filling netCDF variables + nc['l'][:] = lout.copy() + nc['m'][:] = mout.copy() + nc['clm'][:] = clm.copy() + nc['slm'][:] = slm.copy() + if DATE: + nc['time'][:] = tinp + nc['month'][:] = month + + #-- Defining attributes for degree and order + nc['l'].long_name = 'spherical_harmonic_degree'#-- SH degree long name + nc['l'].units = 'Wavenumber'#-- SH degree units + nc['m'].long_name = 'spherical_harmonic_order'#-- SH order long name + nc['m'].units = 'Wavenumber'#-- SH order units + #-- Defining attributes for harmonics + nc['clm'].long_name = 'cosine_spherical_harmonics' + nc['clm'].units = UNITS + nc['slm'].long_name = 'sine_spherical_harmonics' + nc['slm'].units = UNITS + if DATE: + #-- Defining attributes for date and month + nc['time'].long_name = TIME_LONGNAME + nc['time'].units = TIME_UNITS + nc['month'].long_name = MONTHS_LONGNAME + nc['month'].units = MONTHS_UNITS + #-- global variable of netCDF file + fileID.TITLE = TITLE + fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) + + #-- Output netCDF structure information + if VERBOSE: + print(FILENAME) + print(list(fileID.variables.keys())) + + #-- Closing the netCDF file + fileID.close() diff --git a/gravity_toolkit/ncdf_write.py b/gravity_toolkit/ncdf_write.py new file mode 100755 index 00000000..3c73d7e3 --- /dev/null +++ b/gravity_toolkit/ncdf_write.py @@ -0,0 +1,125 @@ +#!/usr/bin/env python +u""" +ncdf_write.py +Written by Tyler Sutterley (10/2019) + +Writes spatial data to COARDS-compliant NetCDF4 files + +CALLING SEQUENCE: + ncdf_write(data, lon, lat, tim, FILENAME=output_netcdf4_file) + +INPUTS: + data: z data + lon: longitude array + lat: latitude array + tim: time array + +OPTIONS: + FILENAME: output netCDF4 filename + VARNAME: z variable name in netCDF4 file + LONNAME: longitude variable name in netCDF4 file + LATNAME: latitude variable name in netCDF4 file + UNITS: z variable units + LONGNAME: z variable description + FILL_VALUE: missing value for z variable + TIME_UNITS: time variable units + TIME_LONGNAME: time variable description + TITLE: title attribute of dataset + CLOBBER: will overwrite an existing netCDF4 file + VERBOSE: will print to screen the netCDF4 structure parameters + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + netCDF4: Python interface to the netCDF C library + (https://unidata.github.io/netcdf4-python/netCDF4/index.html) + +UPDATE HISTORY: + Updated 10/2019: changing Y/N flags to True/False + Updated 09/2019 for public release + Updated 08/2019: don't include time (HH:MM:SS) in creation date + Updated 07/2019: added creation date as a global attribute + Updated 03/2019: print variables keys in list for Python3 compatibility + Updated 03/2018: added option TIMENAME to specify the variable name of time + Updated 02/2017: TIME_LONGNAME and TIME_UNITS with attributes, + updated TIME_LONGNAME to Date_in_Decimal_Years + Updated 07/2016: using netCDF4-python with zlib compression + Updated 06/2016: using __future__ print function + Updated 05/2016: output data types same as input data types + Updated 11/2014: new parameters for variable names and attributes + Updated 05/2014: new parameters for time attributes, and missing values + Updated 02/2014: minor update to if statements + Updated 07/2013: switched from Scientific Python to Scipy + Updated 01/2013: adding time as a variable + Updated 10/2012: changed from variable names x and y to lon and lat. + Written 07/2012 +""" +from __future__ import print_function + +import time +import netCDF4 +import numpy as np + +def ncdf_write(data, lon, lat, tim, FILENAME=None, VARNAME='z', LONNAME='lon', + LATNAME='lat', TIMENAME='time', UNITS=None, LONGNAME=None, FILL_VALUE=None, + TIME_UNITS=None, TIME_LONGNAME=None, TITLE=None, CLOBBER=True, + VERBOSE=False): + + #-- setting NetCDF clobber attribute + if CLOBBER: + clobber = 'w' + else: + clobber = 'a' + + #-- opening NetCDF file for writing + #-- Create the NetCDF file + fileID = netCDF4.Dataset(FILENAME, clobber, format="NETCDF4") + + #-- Defining the NetCDF dimensions + n_time = 1 if (np.ndim(tim) == 0) else len(tim) + fileID.createDimension(LONNAME, len(lon)) + fileID.createDimension(LATNAME, len(lat)) + fileID.createDimension(TIMENAME, n_time) + + #-- defining the NetCDF variables + nc = {} + #-- lat and lon + nc[LONNAME] = fileID.createVariable(LONNAME, lon.dtype, (LONNAME,)) + nc[LATNAME] = fileID.createVariable(LATNAME, lat.dtype, (LATNAME,)) + #-- spatial data + if (n_time > 1): + nc[VARNAME] = fileID.createVariable(VARNAME, data.dtype, + (LATNAME,LONNAME,TIMENAME,), fill_value=FILL_VALUE, zlib=True) + else: + nc[VARNAME] = fileID.createVariable(VARNAME, data.dtype, + (LATNAME,LONNAME,), fill_value=FILL_VALUE, zlib=True) + #-- time + nc[TIMENAME] = fileID.createVariable(TIMENAME, 'f8', (TIMENAME,)) + + #-- filling NetCDF variables + nc[LONNAME][:] = lon + nc[LATNAME][:] = lat + nc[VARNAME][:,:] = data + nc[TIMENAME][:] = tim + + #-- Defining attributes for longitude and latitude + nc[LONNAME].long_name = 'longitude' + nc[LONNAME].units = 'degrees_east' + nc[LATNAME].long_name = 'latitude' + nc[LATNAME].units = 'degrees_north' + #-- Defining attributes for dataset + nc[VARNAME].long_name = LONGNAME + nc[VARNAME].units = UNITS + #-- Defining attributes for date + nc[TIMENAME].long_name = TIME_LONGNAME + nc[TIMENAME].units = TIME_UNITS + #-- global variable of NetCDF file + fileID.TITLE = TITLE + fileID.date_created = time.strftime('%Y-%m-%d',time.localtime()) + + #-- Output NetCDF structure information + if VERBOSE: + print(FILENAME) + print(list(fileID.variables.keys())) + + #-- Closing the NetCDF file + fileID.close() diff --git a/gravity_toolkit/plm_colombo.py b/gravity_toolkit/plm_colombo.py new file mode 100755 index 00000000..a69873c4 --- /dev/null +++ b/gravity_toolkit/plm_colombo.py @@ -0,0 +1,86 @@ +#!/usr/bin/env python +u""" +plm_colombo.py +Written by Tyler Sutterley (07/2017) + +Computes fully-normalized associated Legendre Polynomials + for a vector of x values (can also be singular) +Uses the Colombo (1981) recursion relation + Listed in the Geoid Cookbook and Holmes-Featherstone (2002) + as the most popular recursive algorithm used for computing + fully-normalized Legendre Polynomials in Geodesy +This is a Standard forward column method + +Geoid Cookbook +http://mitgcm.org/~mlosch/geoidcookbook.pdf + +CALLING SEQUENCE: + plm,dplm = plm_colombo(LMAX, np.cos(theta)) + +INPUTS: + LMAX: Upper bound of Spherical Harmonic Degrees + x: typically cos(theta), where theta is the colatitude in radians + +OUTPUT: + plms: Legendre polynomials of x (geodesy normalization) + dplms: first differentials of Legendre polynomials of x + +OPTIONS: + ASTYPE: output variable type (e.g. np.float128). Default is np.float64 + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +UPDATE HISTORY: + Updated 07/2017: output first differential of legendre polynomials + Updated 09/2013: new format for file headers + Written 03/2013 +""" +import numpy as np + +def plm_colombo(LMAX, x, ASTYPE=np.float): + + #-- Verify LMAX as integer + LMAX = np.int(LMAX) + + if (np.ndim(x) > 0): + #-- length of the x array + jm = np.shape(x)[0] + else: + jm = 1 + + #-- allocating for the plm matrix and differentials + plm = np.zeros((LMAX+1,LMAX+1,jm)) + dplm = np.zeros((LMAX+1,LMAX+1,jm)) + #-- removing singleton dimensions of x + x = np.squeeze(x) + u = np.sqrt(1.0 - x**2)#-- for x=cos(th): u=sin(th) + + #-- Calculating the initial polynomials for the recursion + plm[0,0,:] = 1.0 + plm[1,0,:] = np.sqrt(3.0)*x + plm[1,1,:] = np.sqrt(3.0)*u + #-- calculating first derivatives for harmonics of degree 1 + dplm[1,0,:] = (1.0/u)*(x*plm[1,0,:] - np.sqrt(3)*plm[0,0,:]) + dplm[1,1,:] = (x/u)*plm[1,1,:] + for l in range(2, LMAX+1): + for m in range(0, l):#-- Zonal and Tesseral harmonics (non-sectorial) + #-- Computes the non-sectorial terms from previously computed + #-- sectorial terms. + alm = np.sqrt(((2.0*l-1.0)*(2.0*l+1.0))/((l-m)*(l+m))) + blm = np.sqrt(((2.0*l+1.0)*(l+m-1.0)*(l-m-1.0))/((l-m)*(l+m)*(2.0*l-3.0))) + #-- if (m == l-1): plm[l-2,m,:] will be 0 + plm[l,m,:] = alm*x*plm[l-1,m,:] - blm*plm[l-2,m,:] + #-- calculate first derivatives + flm = np.sqrt(((l**2.0 - m**2.0)*(2.0*l + 1.0))/(2.0*l - 1.0)) + dplm[l,m,:] = (1.0/u)*(l*x*plm[l,m,:] - flm*plm[l-1,m,:]) + + #-- Sectorial harmonics + #-- The sectorial harmonics serve as seed values for the recursion + #-- starting with P00 and P11 (outside the loop) + plm[l,l,:] = u*np.sqrt((2.0*l+1.0)/(2.0*l))*np.squeeze(plm[l-1,l-1,:]) + #-- calculate first derivatives for sectorial harmonics + dplm[l,l,:] = np.float128(l)*(x/u)*plm[l,l,:] + + #-- return the legendre polynomials and their first derivative + return plm,dplm diff --git a/gravity_toolkit/plm_holmes.py b/gravity_toolkit/plm_holmes.py new file mode 100755 index 00000000..07309161 --- /dev/null +++ b/gravity_toolkit/plm_holmes.py @@ -0,0 +1,139 @@ +#!/usr/bin/env python +u""" +plm_holmes.py +Written by Tyler Sutterley (10/2018) + +Computes fully-normalized associated Legendre Polynomials + for a vector of x values (can also be singular) + +Uses Holmes and Featherstone (2002) recursion relation + +This recursion relation is better conditioned for high + degree and order than the Martin Mohlenkamp relation +It is stable up to very high degree and order (at least 3000). + +This is achieved by individually computing the sectorials to P_m,m +and then iterating up to P_l,m divided by P_m,m and the scale factor 1e280. +Eventually, the result is multiplied again with these to terms. + +Journal of Geodesy (2002) 76: 279-299 +DOI: 10.1007/s00190-002-0216-2 +http://link.springer.com/10.1007/s00190-002-0216-2 + +Geoid Cookbook +http://mitgcm.org/~mlosch/geoidcookbook.pdf + +CALLING SEQUENCE: + plm,dplm = plm_holmes(LMAX, np.cos(theta)) + +INPUTS: + LMAX: Upper bound of Spherical Harmonic Degrees + x: typically cos(theta), where theta is the colatitude in radians + must be -1 <= x <= +1 + +OUTPUT: + plms: Legendre polynomials of x (geodesy normalization) + dplms: first differentials of Legendre polynomials of x + +OPTIONS: + ASTYPE: output variable type (e.g. np.float128). Default is np.float64 + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +UPDATE HISTORY: + Updated 10/2018: using future division for python3 Compatibility + Updated 07/2017: output first differential of legendre polynomials + Written 05/2015 +""" +from __future__ import division +import numpy as np + +def plm_holmes(LMAX, x, ASTYPE=np.float): + + if (np.ndim(x) > 0): + #-- length of the x array + jm = np.shape(x)[0] + else: + jm = 1 + + LMAX = np.int(LMAX) + #-- removing singleton dimensions of x + x = np.squeeze(x).astype(ASTYPE) + #-- scaling factor + scalef = 1.0e-280 + + #-- allocate for multiplicative factors, and plms + f1 = np.zeros(((LMAX+1)*(LMAX+2)//2),dtype=ASTYPE) + f2 = np.zeros(((LMAX+1)*(LMAX+2)//2),dtype=ASTYPE) + p = np.zeros(((LMAX+1)*(LMAX+2)//2,jm),dtype=ASTYPE) + plm = np.zeros((LMAX+1,LMAX+1,jm),dtype=ASTYPE) + dplm = np.zeros((LMAX+1,LMAX+1,jm),dtype=ASTYPE) + + #-- Precompute multiplicative factors used in recursion relationships + #-- Note that prefactors are not used for the case when m=l and m=l-1, + #-- as a different recursion is used for these two values. + k = 2#-- k = l*(l+1)/2 + m + for l in range(2, LMAX+1): + k += 1 + f1[k] = np.sqrt(2.0*l-1.0)*np.sqrt(2.0*l+1.0)/np.float128(l) + f2[k] = np.float128(l-1.0)*np.sqrt(2.0*l+1.0)/(np.sqrt(2.0*l-3.0)*np.float128(l)) + for m in range(1, l-1): + k += 1 + f1[k] = np.sqrt(2.0*l+1.0)*np.sqrt(2.0*l-1.0)/(np.sqrt(l+m)*np.sqrt(l-m)) + f2[k] = np.sqrt(2.0*l+1.0)*np.sqrt(l-m-1.0)*np.sqrt(l+m-1.0)/ \ + (np.sqrt(2.0*l-3.0)*np.sqrt(l+m)*np.sqrt(l-m)) + k += 2 + + #-- u is sine of colatitude (cosine of latitude) so that 0 <= s <= 1 + u = np.sqrt(1.0 - x**2)#-- for x=cos(th): u=sin(th) + + #-- Calculate P(l,0). These are not scaled. + p[0,:] = 1.0 + p[1,:] = np.sqrt(3.0)*x + k = 1 + for l in range(2, LMAX+1): + k += l + p[k,:] = f1[k]*x*p[k-l,:] - f2[k]*p[k-2*l+1,:] + + #-- Calculate P(m,m), P(m+1,m), and P(l,m) + pmm = np.sqrt(2.0)*scalef + rescalem = 1.0/scalef + kstart = 0 + + for m in range(1, LMAX): + rescalem = rescalem * u + #-- Calculate P(m,m) + kstart += m+1 + pmm = pmm * np.sqrt(2*m+1)/np.sqrt(2*m) + p[kstart,:] = pmm + #-- Calculate P(m+1,m) + k = kstart+m+1 + p[k,:] = x*np.sqrt(2*m+3)*pmm + #-- Calculate P(l,m) + for l in range(m+2, LMAX+1): + k += l + p[k,:] = x*f1[k]*p[k-l,:] - f2[k]*p[k-2*l+1,:] + p[k-2*l+1,:] = p[k-2*l+1,:] * rescalem + #-- rescale + p[k,:] = p[k,:] * rescalem + p[k-LMAX,:] = p[k-LMAX,:] * rescalem + + #-- Calculate P(LMAX,LMAX) + rescalem = rescalem * u + kstart += m+2 + p[kstart,:] = pmm * np.sqrt(2*LMAX+1) / np.sqrt(2*LMAX) * rescalem + #-- reshape Legendre polynomials to output dimensions + for m in range(LMAX+1): + for l in range(m,LMAX+1): + lm = (l*(l+1))//2 + m + plm[l,m,:] = p[lm,:] + #-- calculate first derivatives + if (l == m): + dplm[l,m,:] = np.float128(m)*(x/u)*plm[l,m,:] + else: + flm = np.sqrt(((l**2.0 - m**2.0)*(2.0*l + 1.0))/(2.0*l - 1.0)) + dplm[l,m,:]= (1.0/u)*(l*x*plm[l,m,:] - flm*plm[l-1,m,:]) + + #-- return the legendre polynomials and their first derivative + return plm,dplm diff --git a/gravity_toolkit/plm_mohlenkamp.py b/gravity_toolkit/plm_mohlenkamp.py new file mode 100755 index 00000000..7b1a14c2 --- /dev/null +++ b/gravity_toolkit/plm_mohlenkamp.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +u""" +plm_mohlenkamp.py +Written by Tyler Sutterley (05/2015) + +Computes fully-normalized associated Legendre Polynomials + for an array of x values +Uses Martin Mohlenkamp's recursion relation derived from the + Szego (1939) Recurrence formula for Jacobi Polynomials (Pg 71) + + A User's Guide to Spherical Harmonics + http://www.ohio.edu/people/mohlenka/research/uguide.pdf + +With this algorithm, the associated Legendre Functions are + constructed as an amplitude times a Jacobi Polynomial + P[l,m](cos(theta)) = (sin(theta)^2)*J[l-m,m,m](cos(theta)) + +CALLING SEQUENCE: + plm = plm_mohlenkamp(LMAX, np.cos(theta)) + +INPUTS: + LMAX: Upper bound of Spherical Harmonic Degrees + x: typically cos(theta), where theta is the colatitude in radians + +OUTPUT: + plm: Legendre polynomials (geodesy normalization) + +OPTIONS: + MMAX: Upper bound of Spherical Harmonic Orders (default = LMAX) + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +NOTES: + Modified and updated from IDL plm_x.pro coded by Sean Swenson + Difference from martin function in geoid_mk.mac.f: + plms from plm_mohlenkamp are normalized inside the function + plms from martin are normalized outside the function + For large spherical harmonic degrees this recurrence relation + is poorly conditioned + For spherical harmonic orders above ~1000 can cause overflows + +UPDATE HISTORY: + Updated 05/2015: added parameter MMAX for MMAX != LMAX + Written 09/2013 +""" +import numpy as np + +def plm_mohlenkamp(LMAX, x, MMAX=None): + + #-- Verify LMAX as integer + LMAX = np.int(LMAX) + #-- upper bound of spherical harmonic orders (default = LMAX) + if MMAX is None: + MMAX = np.copy(LMAX) + + #-- size of the x array + #-- x is an array + if (np.ndim(x) > 0): + sx = np.shape(x)[0] + else:#-- x is a single value + sx =1 + + #-- Initialize the output Legendre polynomials + plm=np.zeros((LMAX+1,MMAX+1,sx)) + #-- Jacobi polynomial for the recurrence relation + jlmm=np.zeros((LMAX+1,MMAX+1,sx)) + #-- for x=cos(th): rsin= sin(th) + rsin=np.sqrt(1.0 - x**2) + + #-- for all spherical harmonic orders of interest + for mm in range(0,MMAX+1):#-- equivalent to 0:MMAX + #-- Initialize the recurrence relation + #-- J-1,m,m Term == 0 + #-- J0,m,m Term + if (mm > 0): + #-- j ranges from 1 to mm for the product + j = np.arange(0,mm)+1.0 + jlmm[0,mm,:] = np.prod(np.sqrt(1.0 + 1.0/(2.0*j)))/np.sqrt(2.0) + else: #-- if mm == 0: jlmm = 1/sqrt(2) + jlmm[0,mm,:] = 1.0/np.sqrt(2.0) + #-- Jk,m,m Terms + for k in range (1,(LMAX+1)):#-- computation for SH degrees + #-- Initialization begins at -1 + #-- this is to make the formula parallel the function written in + #-- Martin Mohlenkamp's Guide to Spherical Harmonics + #-- Jacobi General Terms + if (k == 1):#-- for degree 1 terms + jlmm[k,mm,:] = 2.0*x * jlmm[k-1,mm,:] * \ + np.sqrt(1.0 + (mm - 0.5)/k) * \ + np.sqrt(1.0 - (mm - 0.5)/(k + 2.0*mm)) + else:#-- for all other spherical harmonic degrees + jlmm[k,mm,:] = 2.0*x * jlmm[k-1,mm,:] * \ + np.sqrt(1.0 + (mm - 0.5)/k) * \ + np.sqrt(1.0 - (mm - 0.5)/(k + 2.0*mm)) - \ + jlmm[k-2,mm,:] * np.sqrt(1.0 + 4.0/(2.0*k + 2.0*mm - 3.0)) * \ + np.sqrt(1.0 - (1.0/k)) * np.sqrt(1.0 - 1.0/(k + 2.0*mm)) + #-- Normalization is geodesy convention + for l in range(mm,LMAX+1): #-- equivalent to mm:LMAX + if (mm == 0):#-- Geodesy normalization (m=0) == sqrt(2)*sin(th)^0 + #-- rsin^mm term is dropped as rsin^0 = 1 + plm[l,mm,:] = np.sqrt(2.0)*jlmm[l-mm,mm,:] + else:#-- Geodesy normalization all others == 2*sin(th)^mm + plm[l,mm,:] = 2.0*(rsin**mm)*jlmm[l-mm,mm,:] + return plm diff --git a/gravity_toolkit/read_CSR_monthly_6x1.py b/gravity_toolkit/read_CSR_monthly_6x1.py new file mode 100644 index 00000000..3fb373a1 --- /dev/null +++ b/gravity_toolkit/read_CSR_monthly_6x1.py @@ -0,0 +1,146 @@ +#!/usr/bin/env python +u""" +read_CSR_monthly_6x1.py +Written by Tyler Sutterley (07/2019) + +Reads in monthly 5x5 spherical harmonic coefficients with 1 + coefficient from degree 6 all calculated from SLR measurements + +Dataset distributed by UTCSR + ftp://ftp.csr.utexas.edu/outgoing/cheng/slrgeo.5d561_187_naod + +OPTIONS: + HEADER: file contains header text to be skipped (default: True) + +OUTPUTS: + clm: Cosine spherical harmonic coefficients + slm: Sine spherical harmonic coefficients + error/clm: Cosine spherical harmonic coefficient uncertainty + error/slm: Sine spherical harmonic coefficients uncertainty + MJD: output date as Modified Julian Day + time: output date in year-decimal + +REFERENCE: + Cheng, M., J. C. Ries, and B. D. Tapley, 'Variations of the Earth's Figure + Axis from Satellite Laser Ranging and GRACE', J. Geophys. Res., 116, B01409, + 2011, DOI:10.1029/2010JB000850. + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +PROGRAM DEPENDENCIES: + convert_calendar_decimal.py: converts from calendar dates to decimal years + +UPDATE HISTORY: + Updated 07/2019: following new format with mean field in header and no C6,0 + Updated 10/2018: using future division for python3 Compatibility + Updated 10/2017: include the 6,0 and 6,1 coefficients in output Ylms + Written 10/2017 +""" +from __future__ import print_function, division + +import os +import re +import numpy as np +from gravity_toolkit.convert_calendar_decimal import convert_calendar_decimal + +#-- PURPOSE: read low degree harmonic data from Satellite Laser Ranging (SLR) +def read_CSR_monthly_6x1(input_file, HEADER=True): + #-- read the file and get contents + with open(os.path.expanduser(input_file),'r') as f: + file_contents = f.read().splitlines() + file_lines = len(file_contents) + + #-- spherical harmonic degree range (full 5x5 with 6,1) + LMIN = 1 + LMAX = 6 + n_harm = (LMAX**2 + 3*LMAX - LMIN**2 - LMIN)//2 - 5 + + #-- counts the number of lines in the header + count = 0 + #-- Reading over header text + while HEADER: + #-- file line at count + line = file_contents[count] + #-- find end within line to set HEADER flag to False when found + HEADER = not bool(re.match(r'end\sof\sheader',line)) + if bool(re.match(80*r'=',line)): + indice = count + 1 + #-- add 1 to counter + count += 1 + + #-- number of dates within the file + n_dates = (file_lines - count)//(n_harm + 1) + + #-- read mean fields from the header + mean_Ylms = {} + mean_Ylm_error = {} + mean_Ylms['clm'] = np.zeros((LMAX+1,LMAX+1)) + mean_Ylms['slm'] = np.zeros((LMAX+1,LMAX+1)) + mean_Ylm_error['clm'] = np.zeros((LMAX+1,LMAX+1)) + mean_Ylm_error['slm'] = np.zeros((LMAX+1,LMAX+1)) + for i in range(n_harm+1): + #-- split the line into individual components + line = file_contents[indice+i].split() + #-- degree and order for the line + l1 = np.int(line[0]) + m1 = np.int(line[1]) + #-- fill mean field Ylms + mean_Ylms['clm'][l1,m1] = np.float(line[2].replace('D','E')) + mean_Ylms['slm'][l1,m1] = np.float(line[3].replace('D','E')) + mean_Ylm_error['clm'][l1,m1] = np.float(line[4].replace('D','E')) + mean_Ylm_error['slm'][l1,m1] = np.float(line[5].replace('D','E')) + + #-- output spherical harmonic fields + Ylms = {} + Ylms['error'] = {} + Ylms['MJD'] = np.zeros((n_dates)) + Ylms['time'] = np.zeros((n_dates)) + Ylms['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + Ylms['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + Ylms['error']['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + Ylms['error']['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + #-- input spherical harmonic anomalies and errors + Ylm_anomalies = {} + Ylm_anomaly_error = {} + Ylm_anomalies['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + Ylm_anomalies['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + Ylm_anomaly_error['clm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + Ylm_anomaly_error['slm'] = np.zeros((LMAX+1,LMAX+1,n_dates)) + #-- for each date + for d in range(n_dates): + #-- split the date line into individual components + line_contents = file_contents[count].split() + #-- modified Julian date of the middle of the month + Ylms['MJD'][d] = np.mean(np.array(line_contents[5:7],dtype=np.float)) + #-- date of the mid-point of the arc given in years + YY,MM = np.array(line_contents[3:5]) + Ylms['time'][d] = convert_calendar_decimal(YY,MM) + #-- add 1 to counter + count += 1 + + #-- read the anomaly field + for i in range(n_harm): + #-- split the line into individual components + line = file_contents[count].split() + #-- degree and order for the line + l1 = np.int(line[0]) + m1 = np.int(line[1]) + #-- fill anomaly field Ylms (variations and sigmas scaled by 1.0e10) + Ylm_anomalies['clm'][l1,m1,d] = np.float(line[2])*1e-10 + Ylm_anomalies['slm'][l1,m1,d] = np.float(line[3])*1e-10 + Ylm_anomaly_error['clm'][l1,m1,d] = np.float(line[6])*1e-10 + Ylm_anomaly_error['slm'][l1,m1,d] = np.float(line[7])*1e-10 + #-- add 1 to counter + count += 1 + + #-- calculate full coefficients and full errors + Ylms['clm'][:,:,d] = Ylm_anomalies['clm'][:,:,d] + mean_Ylms['clm'][:,:] + Ylms['slm'][:,:,d] = Ylm_anomalies['slm'][:,:,d] + mean_Ylms['slm'][:,:] + Ylms['error']['clm'][:,:,d]=np.sqrt(Ylm_anomaly_error['clm'][:,:,d]**2 + + mean_Ylm_error['clm'][:,:]**2) + Ylms['error']['slm'][:,:,d]=np.sqrt(Ylm_anomaly_error['slm'][:,:,d]**2 + + mean_Ylm_error['slm'][:,:]**2) + + #-- return spherical harmonic fields and date information + return Ylms diff --git a/gravity_toolkit/read_GRACE_harmonics.py b/gravity_toolkit/read_GRACE_harmonics.py index 080e6bc7..be0ef132 100644 --- a/gravity_toolkit/read_GRACE_harmonics.py +++ b/gravity_toolkit/read_GRACE_harmonics.py @@ -3,13 +3,13 @@ read_GRACE_harmonics.py Written by Tyler Sutterley (08/2019) -Reads GRACE datafile and extracts spherical harmonic data and drift rates (RL04) +Reads GRACE files and extracts spherical harmonic data and drift rates (RL04) Adds drift rates to clm and slm for release 4 harmonics Correct GSM data for drift in pole tide following Wahr et al. (2015) -Extracts date of GRACE datafile from file name and calculates mean of range +Extracts date of GRACE/GRACE-FO files and calculates mean of range INPUTS: - input_file: GRACE Level-2 spherical harmonic datafile + input_file: GRACE Level-2 spherical harmonic data file LMAX: Maximum degree of spherical harmonics (degree of truncation) OPTIONS: diff --git a/gravity_toolkit/read_SLR_C20.py b/gravity_toolkit/read_SLR_C20.py new file mode 100644 index 00000000..64e64710 --- /dev/null +++ b/gravity_toolkit/read_SLR_C20.py @@ -0,0 +1,280 @@ +#!/usr/bin/env python +u""" +read_SLR_C20.py +Written by Tyler Sutterley (08/2019) + +Reads in C20 spherical harmonic coefficients derived from SLR measurements + +Dataset distributed by NASA PO.DAAC + https://podaac-tools.jpl.nasa.gov/drive/files/GeodeticsGravity/grace/docs + TN-05_C20_SLR.txt + TN-07_C20_SLR.txt + TN-11_C20_SLR.txt + TN-14_C30_C30_GSFC_SLR.txt +Additional dataset distributed by UTCSR + ftp://ftp.csr.utexas.edu/pub/slr/degree_2/C20_RL05.txt + +REFERENCE: + Cheng, M. and Tapley, B. D., "Variations in the Earth's oblateness during + the past 28 years", Journal of Geophysical Research: Solid Earth, + 109(B9), B09402, 2004. 10.1029/2004JB003028 + +CALLING SEQUENCE: + SLR_C20 = read_SLR_C20(SLR_file) + +INPUTS: + SLR_file: + RL04: TN-05_C20_SLR.txt + RL05: TN-07_C20_SLR.txt + RL06: TN-11_C20_SLR.txt + CSR: C20_RL05.txt + +OUTPUTS: + data: SLR degree 2 order 0 cosine stokes coefficients (C20) + error: SLR degree 2 order 0 cosine stokes coefficient error (eC20) + month: GRACE/GRACE-FO month of measurement (Apr. 2002 = 004) + date: date of SLR measurement + +OPTIONS: + AOD: remove background De-aliasing product from the SLR solution (for CSR) + HEADER: file contains header text to be skipped (default: True) + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +PROGRAM DEPENDENCIES: + convert_julian.py: returns the calendar date and time given a Julian date + convert_calendar_decimal.py: Return the decimal year for a calendar date + +UPDATE HISTORY: + Updated 08/2019: add catch to verify input SLR file exists + Updated 07/2019: added tilde-expansion of input SLR file + Updated 06/2019: added new GRACE-FO special month (October 2018) + Updated 11/2018: new TN-11 files only list GRACE months available + Updated 06/2016: added option HEADER for files that do not have header text + Updated 05/2016: added option AOD to not remove the AOD correction + Updated 03/2016: minor update to read PO.DAAC + Updated 05/2015: minor change to file determination (only regular expressions) + Updated 02/2015: updated UT/CSR portion and comments + Updated 09/2014: rewrite of the TN-07 read program + using regular expressions and convert_calendar_decimal + Updated 01/2014: updated to use UT/CSR monthly time-series + as an alternative to PO.DAAC as it is updated more regularly + Updated 05/2013: adapted for python + Updated 09/2012: Changed month scheme to output. + Used to remove the GRACE missing months in this program by feeding in the GRACE months + BUT, as the new SLR files start with an earlier date, decided to parallel + the degree-1 read program, and remove the missing months in the read_grace program + Updated 06/2012: OVERHAUL of dating and modification for 'special' GRACE months + Initiated from an incorrect date tag in the SLR data file + New dating will convert from the MJD file into date fraction + Some GRACE 'months' have the accelerometer turned off + for half the month to preserve battery power + These months use half of the prior month in the GRACE global gravity solution + For these months the SLR file has a second dataline for the modified period + Will use these marked (*) data to replace the GRACE C2,0 + ALSO converted the mon and slrdate inputs into options + Updated 01/2012: Updated to feed in SLR file from outside + Update makes this program universal for each computer + Won't have to update file on each computer pointing to the SLR file + Will accommodate upcoming GRACE RL05, which will use different SLR files + Written 12/2011 +""" +import os +import re +import numpy as np +from gravity_toolkit.convert_julian import convert_julian +from gravity_toolkit.convert_calendar_decimal import convert_calendar_decimal + +#-- PURPOSE: read oblateness data from Satellite Laser Ranging (SLR) +def read_SLR_C20(SLR_file, HEADER=True, AOD=True): + + #-- check that SLR file exists + if not os.access(os.path.expanduser(SLR_file), os.F_OK): + raise IOError('SLR file not found in file system') + + #-- determine if imported file is from PO.DAAC or CSR + if bool(re.search('C20_RL\d+',SLR_file)): + #-- SLR C20 file from CSR + #-- Just for checking new months when TN series isn't up to date as the + #-- SLR estimates always use the full set of days in each calendar month. + #-- format of the input file (note 64 bit floating point for C20) + #-- Column 1: Approximate mid-point of monthly solution (years) + #-- Column 2: C20 from SLR (normalized) + #-- Column 3: Delta C20 relative to a mean value of -4.841694723127E-4 (1E-10) + #-- Column 4: Solution sigma (1E-10) + #-- Column 5: Mean value of Atmosphere-Ocean De-aliasing model (1E-10) + #-- Columns 6-7: Start and end dates of data used in solution + dtype = {} + dtype['names'] = ('time','C20','delta','sigma','AOD','start','end') + dtype['formats'] = ('f','f8','f','f','f','f','f') + #-- header text is commented and won't be read + file_input = np.loadtxt(os.path.expanduser(SLR_file),dtype=dtype) + #-- date and GRACE/GRACE-FO month + tdec = file_input['time'] + grace_month = 1 + np.floor((tdec-2002.)*12.) + C20 = file_input['C20'] + eC20 = file_input['sigma']*1e-10 + #-- Background gravity model includes solid earth and ocean tides, solid + #-- earth and ocean pole tides, and the Atmosphere-Ocean De-aliasing + #-- product. The monthly mean of the AOD model has been restored. + if AOD:#-- Removing AOD product that was restored in the solution + C20 -= file_input['AOD']*1e-10 + elif bool(re.search('TN-(11|14)',SLR_file)): + #-- SLR C20 RL06 file from PO.DAAC + with open(os.path.expanduser(SLR_file),'r') as f: + file_contents = f.read().splitlines() + #-- number of lines contained in the file + file_lines = len(file_contents) + + #-- counts the number of lines in the header + count = 0 + #-- Reading over header text + while HEADER: + #-- file line at count + line = file_contents[count] + #-- find PRODUCT: within line to set HEADER flag to False when found + HEADER = not bool(re.match('PRODUCT:+',line,re.IGNORECASE)) + #-- add 1 to counter + count += 1 + + #-- number of months within the file + n_mon = file_lines - count + date_conv = np.zeros((n_mon)) + C20_input = np.zeros((n_mon)) + eC20_input = np.zeros((n_mon)) + mon = np.zeros((n_mon),dtype=np.int) + #-- time count + t = 0 + #-- for every other line: + for line in file_contents[count:]: + #-- find numerical instances in line including exponents, + #-- decimal points and negatives + line_contents = re.findall('[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) + #-- check for empty lines as there are + #-- slight differences in RL04 TN-05_C20_SLR.txt + #-- with blanks between the PRODUCT: line and the data + count = len(line_contents) + #-- if count is greater than 0 + if (count > 0): + #-- modified julian date for line + MJD = np.float(line_contents[0]) + #-- converting from MJD into month, day and year + YY,MM,DD,hh,mm,ss = convert_julian(MJD+2400000.5,FORMAT='tuple') + #-- converting from month, day, year into decimal year + date_conv[t] = convert_calendar_decimal(YY, MM, DAY=DD, HOUR=hh) + #-- Spherical Harmonic data for line + C20_input[t] = np.float(line_contents[2]) + eC20_input[t] = np.float(line_contents[4])*1e-10 + #-- GRACE/GRACE-FO month of SLR solutions + mon[t] = 1 + np.round((date_conv[t]-2002.)*12.) + #-- The GRACE/GRACE-FO 'Special Months' + #-- (November 2011, December 2012, April 2012, October 2019) + #-- Accelerometer shutoffs make the relation between month number + #-- and date more complicated as days from other months are used + #-- Nov11 (month 119) is centered in Oct11 (118) + #-- May15 (month 161) is centered in Apr15 (160) + #-- Oct18 (month 202) is centered in Nov18 (203) + if (mon[t] == mon[t-1]) and (mon[t-1] == 118): + mon[t] = mon[t-1] + 1 + elif (mon[t] == mon[t-1]) and (mon[t-1] == 121): + mon[t-1] = mon[t] - 1 + elif (mon[t] == mon[t-1]) and (mon[t-1] == 160): + mon[t] = mon[t-1] + 1 + elif (mon[t] == mon[t-1]) and (mon[t-1] == 203): + mon[t-1] = mon[t] - 1 + #-- add to t count + t += 1 + #-- convert to output variables and truncate if necessary + tdec = date_conv[:t] + C20 = C20_input[:t] + eC20 = eC20_input[:t] + grace_month = mon[:t] + else: + #-- SLR C20 file from PO.DAAC + with open(os.path.expanduser(SLR_file),'r') as f: + file_contents = f.read().splitlines() + #-- number of lines contained in the file + file_lines = len(file_contents) + + #-- counts the number of lines in the header + count = 0 + #-- Reading over header text + while HEADER: + #-- file line at count + line = file_contents[count] + #-- find PRODUCT: within line to set HEADER flag to False when found + HEADER = not bool(re.match('PRODUCT:+',line)) + #-- add 1 to counter + count += 1 + + #-- number of months within the file + n_mon = file_lines - count + date_conv = np.zeros((n_mon)) + C20_input = np.zeros((n_mon)) + eC20_input = np.zeros((n_mon)) + slr_flag = np.zeros((n_mon),dtype=np.bool) + #-- time count + t = 0 + #-- for every other line: + for line in file_contents[count:]: + #-- find numerical instances in line including exponents, + #-- decimal points and negatives + line_contents = re.findall('[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) + #-- check for empty lines as there are + #-- slight differences in RL04 TN-05_C20_SLR.txt + #-- with blanks between the PRODUCT: line and the data + count = len(line_contents) + #-- if count is greater than 0 + if (count > 0): + #-- modified julian date for line + MJD = np.float(line_contents[0]) + #-- converting from MJD into month, day and year + YY,MM,DD,hh,mm,ss = convert_julian(MJD+2400000.5,FORMAT='tuple') + #-- converting from month, day, year into decimal year + date_conv[t] = convert_calendar_decimal(YY, MM, DAY=DD, HOUR=hh) + #-- Spherical Harmonic data for line + C20_input[t] = np.float(line_contents[2]) + eC20_input[t] = np.float(line_contents[4])*1e-10 + #-- line has * flag + if bool(re.search('\*',line)): + slr_flag[t] = True + #-- add to t count + t += 1 + + #-- truncate for RL04 if necessary + date_conv = date_conv[:t] + C20_input = C20_input[:t] + eC20_input = eC20_input[:t] + slr_flag = slr_flag[:t] + + #-- GRACE/GRACE-FO month of SLR solutions + mon = 1 + np.round((date_conv-2002.)*12.) + #-- number of unique months + grace_month = np.unique(mon) + n_uniq = len(grace_month) + #-- Removing overlapping months to use the data for + #-- months with limited GRACE accelerometer use + tdec = np.zeros((n_uniq)) + C20 = np.zeros((n_uniq)) + eC20 = np.zeros((n_uniq)) + #-- New SLR datasets have * flags for the modified GRACE periods + #-- these GRACE months use half of a prior month in their solution + #-- this will find these months (marked above with slr_flag) + for t in range(n_uniq): + count = np.count_nonzero(mon == grace_month[t]) + #-- there is only one solution for the month + if (count == 1): + i = np.nonzero(mon == grace_month[t]) + tdec[t] = date_conv[i] + C20[t] = C20_input[i] + eC20[t] = eC20_input[i] + #-- there is a special solution for the month + #-- will the solution flagged with slr_flag + elif (count == 2): + i = np.nonzero((mon == grace_month[t]) & slr_flag) + tdec[t] = date_conv[i] + C20[t] = C20_input[i] + eC20[t] = eC20_input[i] + + return {'data':C20, 'error':eC20, 'month':grace_month, 'time':tdec} diff --git a/gravity_toolkit/read_SLR_C30.py b/gravity_toolkit/read_SLR_C30.py new file mode 100644 index 00000000..64b36790 --- /dev/null +++ b/gravity_toolkit/read_SLR_C30.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python +u""" +read_SLR_C30.py +Written by Yara Mohajerani and Tyler Sutterley (08/2019) + +Reads monthly degree 3 zonal spherical harmonic data files from SLR + https://neptune.gsfc.nasa.gov/gngphys/index.php?section=519 + +Dataset distributed by NASA PO.DAAC + https://podaac-tools.jpl.nasa.gov/drive/files/GeodeticsGravity/gracefo/docs + TN-14_C30_C30_GSFC_SLR.txt + ftp://ftp.csr.utexas.edu/pub/slr/degree_5/ + CSR_Monthly_5x5_Gravity_Harmonics.txt + +REFERENCE: + Loomis, B. D., Rachlin, K. E., and Luthcke, S. B., "Improved Earth + Oblateness Rate Reveals Increased Ice Sheet Losses and Mass-Driven Sea + Level Rise", Geophysical Research Letters, 46(12), 6910-6917, 2019. + https://doi.org/10.1029/2019GL082929 + +CALLING SEQUENCE: + SLR_C30 = read_SLR_C30(SLR_file) + +INPUTS: + SLR_file: + GSFC: TN-14_C30_C30_GSFC_SLR.txt + CSR: CSR_Monthly_5x5_Gravity_Harmonics.txt + LARES: C30_LARES_filtered.txt + +OUTPUTS: + data: SLR degree 3 order 0 cosine stokes coefficients (C30) + error: SLR degree 3 order 0 cosine stokes coefficient error (eC30) + month: GRACE/GRACE-FO month of measurement (Apr. 2002 = 004) + time: date of SLR measurement + +OPTIONS: + HEADER: file contains header text to be skipped (default: True) + C30_MEAN: mean C30 to add to LARES C30 anomalies + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +PROGRAM DEPENDENCIES: + convert_julian.py: returns the calendar date and time given a Julian date + convert_calendar_decimal.py: Return the decimal year for a calendar date + read_CSR_monthly_6x1.py: reads monthly 5x5 spherical harmonic coefficients + +UPDATE HISTORY: + Updated 08/2019: new GSFC format with more columns + add catch to verify input SLR file exists + added LARES filtered C30 files from John Ries (C30_LARES_filtered.txt) + add C30 mean (9.5717395773300e-07) to LARES solutions + Updated 07/2019: added SLR C3,0 files from PO.DAAC (GSFC) + read CSR monthly 5x5 file and extract C3,0 coefficients + Written 05/2019 +""" +import os +import re +import numpy as np +from gravity_toolkit.convert_julian import convert_julian +from gravity_toolkit.convert_calendar_decimal import convert_calendar_decimal +from gravity_toolkit.read_CSR_monthly_6x1 import read_CSR_monthly_6x1 + +#-- PURPOSE: read Degree 3 zonal data from Satellite Laser Ranging (SLR) +def read_SLR_C30(SLR_file, HEADER=True, C30_MEAN=9.5717395773300e-07): + + #-- check that SLR file exists + if not os.access(os.path.expanduser(SLR_file), os.F_OK): + raise IOError('SLR file not found in file system') + #-- output dictionary with input data + dinput = {} + + if bool(re.search('TN-(14)',SLR_file)): + + #-- SLR C30 RL06 file from PO.DAAC + with open(os.path.expanduser(SLR_file),'r') as f: + file_contents = f.read().splitlines() + #-- number of lines contained in the file + file_lines = len(file_contents) + + #-- counts the number of lines in the header + count = 0 + #-- Reading over header text + while HEADER: + #-- file line at count + line = file_contents[count] + #-- find PRODUCT: within line to set HEADER flag to False when found + HEADER = not bool(re.match('Product:+',line)) + #-- add 1 to counter + count += 1 + + #-- number of months within the file + n_mon = file_lines - count + date_conv = np.zeros((n_mon)) + C30_input = np.zeros((n_mon)) + eC30_input = np.zeros((n_mon)) + mon = np.zeros((n_mon),dtype=np.int) + #-- time count + t = 0 + #-- for every other line: + for line in file_contents[count:]: + #-- find numerical instances in line including exponents, + #-- decimal points and negatives + line_contents = re.findall('[-+]?\d*\.\d*(?:[eE][-+]?\d+)?',line) + count = len(line_contents) + #-- only read lines where C30 data exists (don't read NaN lines) + if (count > 7): + #-- modified julian date for line + MJD = np.float(line_contents[0]) + #-- converting from MJD into month, day and year + YY,MM,DD,hh,mm,ss = convert_julian(MJD+2400000.5,FORMAT='tuple') + #-- converting from month, day, year into decimal year + date_conv[t] = convert_calendar_decimal(YY, MM, DAY=DD, HOUR=hh) + #-- Spherical Harmonic data for line + C30_input[t] = np.float(line_contents[5]) + eC30_input[t] = np.float(line_contents[7])*1e-10 + #-- GRACE/GRACE-FO month of SLR solutions + mon[t] = 1 + np.round((date_conv[t]-2002.)*12.) + #-- The GRACE/GRACE-FO 'Special Months' + #-- (November 2011, December 2012, April 2012, October 2019) + #-- Accelerometer shutoffs make relation between month number + #-- and date more complicated as days from other months are used + #-- Nov11 (month 119) is centered in Oct11 (118) + #-- May15 (month 161) is centered in Apr15 (160) + #-- Oct18 (month 202) is centered in Nov18 (203) + if (mon[t] == mon[t-1]) and (mon[t-1] == 118): + mon[t] = mon[t-1] + 1 + elif (mon[t] == mon[t-1]) and (mon[t-1] == 160): + mon[t] = mon[t-1] + 1 + elif (mon[t] == mon[t-1]) and (mon[t-1] == 203): + mon[t-1] = mon[t] - 1 + #-- add to t count + t += 1 + #-- verify that there imported C30 solutions + #-- (TN-14 data format has changed in the past) + if (t == 0): + raise Exception('No GSFC C30 data imported') + #-- convert to output variables and truncate if necessary + dinput['time'] = date_conv[:t] + dinput['data'] = C30_input[:t] + dinput['error'] = eC30_input[:t] + dinput['month'] = mon[:t] + elif bool(re.search('C30_LARES',SLR_file)): + #-- read LARES filtered values + LARES_input = np.loadtxt(SLR_file,skiprows=1) + dinput['time'] = LARES_input[:,0].copy() + #-- convert C30 from anomalies to absolute + dinput['data'] = 1e-10*LARES_input[:,1] + C30_MEAN + #-- filtered data does not have errors + dinput['error'] = np.zeros_like(LARES_input[:,1]) + #-- calculate GRACE/GRACE-FO month + dinput['month'] = 1 + np.array(12.0*(LARES_input[:,0]-2002.0),dtype='i') + else: + #-- CSR 5x5 + 6,1 file from CSR and extract C3,0 coefficients + Ylms = read_CSR_monthly_6x1(SLR_file, HEADER=True) + #-- extract dates, C30 harmonics and errors + dinput['time'] = Ylms['time'].copy() + dinput['data'] = Ylms['clm'][3,0,:].copy() + dinput['error'] = Ylms['error']['clm'][3,0,:].copy() + #-- converting from MJD into month, day and year + YY,MM,DD,hh,mm,ss = convert_julian(Ylms['MJD']+2400000.5,FORMAT='tuple') + #-- calculate GRACE/GRACE-FO month + dinput['month'] = np.array(12.0*(YY - 2002.) + MM, dtype=np.int) + + #-- return the input C30 data, year-decimal date, and GRACE/GRACE-FO month + return dinput diff --git a/gravity_toolkit/read_SLR_geocenter.py b/gravity_toolkit/read_SLR_geocenter.py new file mode 100644 index 00000000..3bb45e92 --- /dev/null +++ b/gravity_toolkit/read_SLR_geocenter.py @@ -0,0 +1,214 @@ +#!/usr/bin/env python +u""" +read_SLR_geocenter.py +Written by Tyler Sutterley (08/2019) + +Reads monthly geocenter spherical harmonic data files from SLR provided by CSR + ftp://ftp.csr.utexas.edu/pub/slr/geocenter/ + RL04: GCN_RL04.txt + RL05: GCN_RL05.txt +New CF-CM geocenter dataset to reflect the true degree-1 mass variations + ftp://ftp.csr.utexas.edu/pub/slr/geocenter/README_L1_L2 + ftp://ftp.csr.utexas.edu/pub/slr/geocenter/GCN_L1_L2_30d_CF-CM.txt + +CALLING SEQUENCE: + geocenter = read_SLR_geocenter(geocenter_file) + +INPUTS: + geocenter_file: degree 1 file + +OPTIONS: + RADIUS: Earth's radius for calculating spherical harmonics from SLR data + skiprows: rows of data to skip when importing data + +OUTPUTS: + C10: Cosine d1/o0 Stokes Coefficients + C11: Cosine d1/o1 Stokes Coefficients + S11: Sine d1/o1 Stokes Coefficients + month: GRACE/GRACE-FO month (Apr 2002 = 004) + time: date of GRACE/GRACE-FO month in decimal format + eC10: Cosine d1/o0 Stokes Coefficients Error + eC11: Cosine d1/o1 Stokes Coefficients Error + eS11: Sine d1/o1 Stokes Coefficients Error + +UPDATE HISTORY: + Updated 08/2019: add catch to verify input geocenter file exists + Updated 06/2019: added option RADIUS for setting the Earth's radius + Updated 08/2018: using full release string (RL05 instead of 5) + Updated 04/2017: parallels updates to geocenter function INVERSE option + use enumerate to iterate over dates. added option skiprows for headers + Updated 06/2016: using __future__ print function + Updated 05/2016: use geocenter files from 6-hour AOD1b glo Ylms calculated + in aod1b_geocenter.py + Updated 09/2015: added second function for AOD corrected geocenter values + Updated 04/2015: using Julian dates to calculate GRACE/GRACE-FO month + Written 08/2013 +""" +from __future__ import print_function + +import os +import re +import time +import numpy as np +from gravity_toolkit.geocenter import geocenter +from gravity_toolkit.convert_julian import convert_julian + +#-- PURPOSE: read geocenter data from Satellite Laser Ranging (SLR) +def read_SLR_geocenter(geocenter_file, RADIUS=None, skiprows=0): + + #-- check that geocenter file exists + if not os.access(os.path.expanduser(geocenter_file), os.F_OK): + raise IOError('Geocenter file not found in file system') + + #-- Input degree 1 file and skip header text (if skiprows) + file_contents = np.loadtxt(os.path.expanduser(geocenter_file), + skiprows=skiprows) + ndate = np.shape(file_contents)[0] + + #-- first column of data = date + date = file_contents[:,0] + #-- initializing output data + #-- Degree 1 Stokes Coefficients + C10 = np.zeros((ndate)) + C11 = np.zeros((ndate)) + S11 = np.zeros((ndate)) + #-- Degree 1 Stokes Coefficient Errors + eC10 = np.zeros((ndate)) + eC11 = np.zeros((ndate)) + eS11 = np.zeros((ndate)) + #-- Date information + JD = np.zeros((ndate)) + mon = np.zeros((ndate), dtype=np.int32) + + #-- for each date + for t,tdec in enumerate(date): + #-- converting from geocenter into spherical harmonics + CS1 = geocenter(X=file_contents[t,1], Y=file_contents[t,2], + Z=file_contents[t,3], RADIUS=RADIUS, INVERSE=True) + dCS1 = geocenter(X=file_contents[t,4], Y=file_contents[t,5], + Z=file_contents[t,6], RADIUS=RADIUS, INVERSE=True) + #-- output harmonics + C10[t],C11[t],S11[t] = (CS1['C10'], CS1['C11'], CS1['S11']) + eC10[t],eC11[t],eS11[t] = (dCS1['C10'], dCS1['C11'], dCS1['S11']) + + #-- calendar year of date + year = np.floor(tdec) + #-- check if year is a leap year + if ((year % 4) == 0): + #-- Leap Year + dpy = 366.0 + else: + #-- Standard Year + dpy = 365.0 + #-- calculation of day of the year (with decimals for fraction of day) + DofY = dpy*(tdec % 1) + #-- Calculation of the Julian date from year and DofY + JD[t] = np.float(367.0*year - \ + np.floor(7.0*(year + np.floor(10.0/12.0))/4.0) - \ + np.floor(3.0*(np.floor((year - 8.0/7.0)/100.0) + 1.0)/4.0) + \ + np.floor(275.0/9.0) + DofY + 1721028.5) + #-- convert the julian date into calendar dates (hour, day, month, year) + cal_date = convert_julian(JD[t]) + #-- calculate the GRACE/GRACE-FO month (Apr02 == 004) + #-- https://grace.jpl.nasa.gov/data/grace-months/ + mon[t] = 12*(cal_date['year']-2002) + cal_date['month'] + + return {'C10':C10, 'C11':C11, 'S11':S11, 'eC10':eC10, 'eC11':eC11, + 'eS11':eS11, 'month':mon, 'time':date} + +#-- special function for outputting AOD corrected SLR geocenter values +#-- need to run aod1b_geocenter.py to calculate the monthly geocenter dealiasing +def aod_corrected_SLR_geocenter(geocenter_file, DREL, RADIUS=None, skiprows=0): + #-- directory setup for AOD1b data starting with input degree 1 file + #-- this will verify that the input paths work + AOD1B_dir = os.path.abspath(os.path.join(geocenter_file,os.path.pardir, + os.path.pardir,'AOD1B',DREL,'geocenter')) + + #-- Input degree 1 file and skip header text (if skiprows) + file_contents = np.loadtxt(os.path.expanduser(geocenter_file), + skiprows=skiprows) + ndate = np.shape(file_contents)[0] + + #-- first column of data = date + date = file_contents[:,0] + #-- initializing output data + #-- Degree 1 Stokes Coefficients + C10 = np.zeros((ndate)) + C11 = np.zeros((ndate)) + S11 = np.zeros((ndate)) + #-- Degree 1 Stokes Coefficient Errors + eC10 = np.zeros((ndate)) + eC11 = np.zeros((ndate)) + eS11 = np.zeros((ndate)) + #-- Date information + JD = np.zeros((ndate)) + mon = np.zeros((ndate), dtype=np.int32) + + #-- for each date + for t,tdec in enumerate(date): + #-- converting from geocenter into spherical harmonics + CS1 = geocenter(X=file_contents[t,1], Y=file_contents[t,2], + Z=file_contents[t,3], RADIUS=RADIUS, INVERSE=True) + dCS1 = geocenter(X=file_contents[t,4], Y=file_contents[t,5], + Z=file_contents[t,6], RADIUS=RADIUS, INVERSE=True) + + #-- calendar year of date + year = np.floor(tdec) + #-- check if year is a leap year + if ((year % 4) == 0): + #-- Leap Year + dpy = 366.0 + else: + #-- Standard Year + dpy = 365.0 + #-- calculation of day of the year (with decimals for fraction of day) + DofY = dpy*(tdec % 1) + #-- Calculation of the Julian date from year and DofY + JD[t] =np.float(367.*year - np.floor(7.*(year + np.floor(10./12.))/4.) - + np.floor(3.0*(np.floor((year - 8.0/7.0)/100.0) + 1.0)/4.0) + + np.floor(275.0/9.0) + DofY + 1721028.5) + #-- convert the julian date into calendar dates (hour, day, month, year) + cal_date = convert_julian(JD[t], ASTYPE=np.int) + #-- full path to AOD geocenter for month (using glo coefficients) + args = (DREL, 'glo', cal_date['year'], cal_date['month']) + AOD1B_file = 'AOD1B_{0}_{1}_{2:4d}_{3:02d}.txt'.format(*args) + Ylms = read_AOD1b_geocenter(os.path.join(AOD1B_dir,AOD1B_file), + cal_date['month']) + #-- remove AOD from output harmonics + C10[t] = CS1['C10'] - Ylms['C10'] + C11[t] = CS1['C11'] - Ylms['C11'] + S11[t] = CS1['S11'] - Ylms['S11'] + eC10[t],eC11[t],eS11[t] = (dCS1['C10'], dCS1['C11'], dCS1['S11']) + #-- calculate the GRACE/GRACE-FO month (Apr02 == 004) + #-- https://grace.jpl.nasa.gov/data/grace-months/ + mon[t] = 12*(cal_date['year']-2002) + cal_date['month'] + + return {'C10':C10, 'C11':C11, 'S11':S11, 'eC10':eC10, 'eC11':eC11, + 'eS11':eS11, 'month':mon, 'time':date} + +#-- PURPOSE: read AOD1b geocenter for month and calculate the mean harmonics +#-- need to run aod1b_geocenter.py to write these monthly geocenter files +def read_AOD1b_geocenter(AOD1B_file, calendar_month): + #-- check that file exists + if not os.access(AOD1B_file, os.F_OK): + raise IOError('AOD1b File {0} not in File System'.format(AOD1B_file)) + #-- read AOD1b geocenter skipping over commented header text + with open(AOD1B_file, 'r') as f: + file_contents=[i for i in f.read().splitlines() if not re.match('#',i)] + #-- extract X,Y,Z from each line in the file + #-- first column: ISO-formatted date and time + #-- second-fourth columns: X, Y and Z geocenter variations + n_lines = len(file_contents) + X = np.zeros((n_lines)) + Y = np.zeros((n_lines)) + Z = np.zeros((n_lines)) + month = np.zeros((n_lines),dtype=np.int) + for i,line in enumerate(file_contents): + line_contents = line.split() + AOD1B_time = time.strptime(line_contents[0],'%Y-%m-%dT%H:%M:%S') + month[i] = AOD1B_time.tm_mon + X[i],Y[i],Z[i] = np.array(line_contents[1:],dtype=np.float) + #-- use only dates within month (should be all) + ii, = np.nonzero(month == calendar_month) + #-- convert mean X,Y,Z into spherical harmonics + return geocenter(X=X[ii].mean(),Y=Y[ii].mean(),Z=Z[ii].mean(),INVERSE=True) diff --git a/gravity_toolkit/read_love_numbers.py b/gravity_toolkit/read_love_numbers.py new file mode 100755 index 00000000..e3bcfee8 --- /dev/null +++ b/gravity_toolkit/read_love_numbers.py @@ -0,0 +1,98 @@ +#!/usr/bin/env python +u""" +read_love_numbers.py +Written by Tyler Sutterley (03/2020) + +Reads sets of load Love numbers output from PREM + +INPUTS: + love_numbers_file: Elastic load Love numbers computed using Preliminary + Reference Earth Model (PREM) outputs as described by Han and Wahr (1995) + +OUTPUTS: + kl: Love number of Gravitational Potential + hl: Love number of Vertical Displacement + ll: Love number of Horizontal Displacement + +OPTIONS: + HEADER: file contains header text to be skipped (default: True) + FORMAT: format of output variables + 'dict': dictionary with variable keys as listed above + 'tuple': tuple with variable order hl,kl,ll + 'zip': aggregated variable sets + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +NOTES: + love_numbers file must be in the base directory + for l=1 coordinate system center is the center of mass of the system + to change to a center of figure reference frame: + replace kl[1] with -(hl[1]+2.0*ll[1])/3.0 + following Wahr (1998) and Trupin (1992) + +UPDATE HISTORY: + Updated 03/2020 for public release +""" +import os +import re +import numpy as np + +#-- PURPOSE: read load love numbers from PREM +def read_love_numbers(love_numbers_file, HEADER=True, FORMAT='tuple'): + + #-- check that load love number data file is present in file system + if not os.access(os.path.expanduser(love_numbers_file), os.F_OK): + #-- raise error if love_numbers file is not found in path + raise IOError('{0} not found'.format(love_numbers_file)) + + #-- Input load love number data file and read contents + with open(os.path.expanduser(love_numbers_file),'r') as f: + file_contents = f.read().splitlines() + + #-- counts the number of lines in the header + count = 0 + #-- Reading over header text + while HEADER: + #-- file line at count + line = file_contents[count] + #-- find the final line within the header text + #-- to set HEADER flag to False when found + HEADER = not bool(re.match('\*\*\*',line)) + #-- add 1 to counter + count += 1 + + #-- compile regular expression operator to find numerical instances + regex_pattern = '[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' + rx = re.compile(regex_pattern, re.VERBOSE) + + #-- maximum spherical harmonic degree in file + #-- from the final line + LMAX = np.int(rx.findall(file_contents[-1])[0]) + + #-- vertical displacement hl + hl = np.zeros((LMAX+1)) + #-- gravitational potential kl + kl = np.zeros((LMAX+1)) + #-- horizontal displacement ll + ll = np.zeros((LMAX+1)) + #-- for each line in the file (skipping the 2 header lines) + for file_line in file_contents[count:]: + #-- find numerical instances in line + #-- Replacing IDL double precision exponential with + #-- standard E exponential for kl and ll + love_numbers = rx.findall(file_line.replace('D','E')) + #-- spherical harmonic degree + l = np.int(love_numbers[0]) + #-- convert love numbers to float + hl[l] = np.float(love_numbers[1]) + kl[l] = np.float(love_numbers[2]) + ll[l] = np.float(love_numbers[3]) + + #-- return love numbers in output format (default python dictionary) + if (FORMAT == 'dict'): + return {'kl':kl, 'hl':hl, 'll':ll} + elif (FORMAT == 'tuple'): + return (hl, kl, ll) + elif (FORMAT == 'zip'): + return zip(hl, kl, ll) diff --git a/gravity_toolkit/read_tellus_geocenter.py b/gravity_toolkit/read_tellus_geocenter.py new file mode 100644 index 00000000..45d2c293 --- /dev/null +++ b/gravity_toolkit/read_tellus_geocenter.py @@ -0,0 +1,161 @@ +#!/usr/bin/env python +u""" +read_tellus_geocenter.py +Written by Tyler Sutterley (08/2019) + +Reads monthly geocenter spherical harmonic data files from GRACE Tellus + Technical Notes (TN-13) calculated using GRACE/GRACE-FO measurements and + Ocean Models of Degree 1 + +Datasets distributed by NASA PO.DAAC +https://podaac-tools.jpl.nasa.gov/drive/files/allData/tellus/L2/degree_1 + +Swenson, S., D. Chambers, and J. Wahr, "Estimating geocenter variations + from a combination of GRACE and ocean model output", J. Geophys. Res., + 113(B08410), 2008. doi:10.1029/2007JB005338 + +Sun, Y., R. Riva, and P. Ditmar, "Observed changes in the Earth's dynamic + oblateness from GRACE data and geophysical models", J. Geodesy., + 90(1), 81-89, 2016. doi:10.1007/s00190-015-0852-y + +CALLING SEQUENCE: + geocenter = read_tellus_geocenter(file) + +INPUTS: + file: degree 1 file + +OUTPUTS: + C10: Cosine d1/o0 Stokes Coefficients + C11: Cosine d1/o1 Stokes Coefficients + S11: Sine d1/o1 Stokes Coefficients + eC10: Cosine d1/o0 Stokes Coefficients Error + eC11: Cosine d1/o1 Stokes Coefficients Error + eS11: Sine d1/o1 Stokes Coefficients Error + month: GRACE/GRACE-FO month (Apr 2002 = 004) + time: date of each month in year-decimal + +OPTIONS: + HEADER: file contains header text to be skipped (default: True) + JPL: use JPL TN-13 geocenter files with self-attraction and loading + +PYTHON DEPENDENCIES: + numpy: Scientific Computing Tools For Python (http://www.numpy.org) + +UPDATE HISTORY: + Updated 08/2019: add catch to verify input geocenter file exists + Updated 07/2019: month adjustments for new TN-13 geocenter files + calculate GRACE/GRACE-FO month based on mean time for JPL TN-13 data files + Updated 06/2019: can use the new JPL TN-13 geocenter files from Tellus + Updated 10/2018: using future division for python3 Compatibility + UPDATED 06/2016: added option HEADER for files that do not have header text + UPDATED 04/2015: added time output with convert_calendar_decimal + UPDATED 03/2015: minor update to read and regular expression + UPDATED 10/2014: rewrote with general code updates. + using regular expressions to extract data + UPDATED 05/2013: adapted for python + UPDATED 03/2013: changed outputs to be C10, C11, S11 instead of C1, S1 +""" +from __future__ import print_function, division + +import os +import re +import numpy as np +from gravity_toolkit.convert_calendar_decimal import convert_calendar_decimal + +#-- PURPOSE: read geocenter data from PO.DAAC +def read_tellus_geocenter(geocenter_file, HEADER=True, JPL=False): + + #-- check that geocenter file exists + if not os.access(os.path.expanduser(geocenter_file), os.F_OK): + raise IOError('Geocenter file not found in file system') + + #-- read degree 1 file and get contents + with open(os.path.expanduser(geocenter_file),'r') as f: + file_contents = f.read().splitlines() + #-- number of lines contained in the file + file_lines = len(file_contents) + + #-- counts the number of lines in the header + count = 0 + #-- Reading over header text + header_flag = r"end\sof\sheader" if JPL else r"'\(a6," + while HEADER: + #-- file line at count + line = file_contents[count] + #-- find header_flag within line to set HEADER flag to False when found + HEADER = not bool(re.match(header_flag,line)) + #-- add 1 to counter + count += 1 + + #-- number of months within the file + n_mon = (file_lines - count)//2 + #-- calendar dates + year = np.zeros((n_mon)) + month = np.zeros((n_mon)) + #-- grace month of data line + mon = np.zeros((n_mon), dtype=np.int) + tdec = np.zeros((n_mon)) + #-- spherical harmonic data + C1 = np.zeros((n_mon,2)) + S1 = np.zeros((n_mon,2)) + eC1 = np.zeros((n_mon,2)) + eS1 = np.zeros((n_mon,2)) + + #-- compile numerical expression operator + regex_pattern = '[-+]?(?:(?:\d*\.\d+)|(?:\d+\.?))(?:[Ee][+-]?\d+)?' + rx = re.compile(regex_pattern, re.VERBOSE) + + #-- time count + t = 0 + #-- for every other line: + for line in file_contents[count:]: + #-- find numerical instances in line including integers, exponents, + #-- decimal points and negatives + line_contents = rx.findall(line) + #-- calendar year and month + if JPL: + #-- start and end dates of month + start_yr = np.float(line_contents[7][0:4]) + start_mon = np.float(line_contents[7][4:6]) + start_day = np.float(line_contents[7][6:8]) + end_yr = np.float(line_contents[8][0:4]) + end_mon = np.float(line_contents[8][4:6]) + end_day = np.float(line_contents[8][6:8]) + #-- convert date to year decimal + t_start = convert_calendar_decimal(start_yr,start_mon,DAY=start_day) + t_end = convert_calendar_decimal(end_yr,end_mon,DAY=end_day) + #-- calculate mean time + tdec[t] = np.mean([t_start,t_end]) + year[t] = np.floor(tdec[t]) + month[t] = np.int(12*(tdec[t] % 1) + 1) + else: + year[t] = np.float(line_contents[0][0:4]) + month[t] = np.float(line_contents[0][4:6]) + #-- convert date to year decimal + tdec[t], = convert_calendar_decimal(year[t],month[t]) + #-- grace month + mon[t] = np.int(12.0*(year[t] - 2002.) + month[t]) + #-- Accelerometer shutoffs complicate the relations between month number + if (mon[t] == mon[t-1]) and (mon[t-1] in (118,123,160,169,185,205)): + mon[t] = mon[t-1] + 1 + #-- spherical harmonic order + m = np.int(line_contents[2]) + #-- extract spherical harmonic data + C1[t,m] = np.float(line_contents[3]) + S1[t,m] = np.float(line_contents[4]) + eC1[t,m] = np.float(line_contents[5]) + eS1[t,m] = np.float(line_contents[6]) + #-- will only advance in time after reading the + #-- order 1 coefficients (t+0=t) + t += m + + #-- reforming outputs to be individual variables + C10 = np.squeeze(C1[:,0]) + C11 = np.squeeze(C1[:,1]) + S11 = np.squeeze(S1[:,1]) + eC10 = np.squeeze(eC1[:,0]) + eC11 = np.squeeze(eC1[:,1]) + eS11 = np.squeeze(eS1[:,1]) + + return {'month':mon, 'C10':C10, 'C11':C11, 'S11':S11, \ + 'eC10':eC10, 'eC11':eC11, 'eS11':eS11, 'time':tdec} diff --git a/requirements.txt b/requirements.txt index 26bbb75e..ecf67ae7 100644 --- a/requirements.txt +++ b/requirements.txt @@ -2,3 +2,7 @@ numpy pyyaml lxml future +matplotlib +cartopy +netCDF4 +h5py diff --git a/setup.py b/setup.py index d4ec62e0..73eb5276 100644 --- a/setup.py +++ b/setup.py @@ -1,7 +1,7 @@ from setuptools import setup, find_packages setup( name='read-GRACE-harmonics', - version='1.0.0.8', + version='1.0.1.0', description='Reads Level-2 spherical harmonic coefficients from the NASA/DLR GRACE and NASA/GFZ GRACE Follow-on missions', url='https://github.com/tsutterley/read-GRACE-harmonics', author='Tyler Sutterley', @@ -17,5 +17,6 @@ ], keywords='GRACE, GRACE-FO, Gravity, satellite geodesy, spherical harmonics', packages=find_packages(), - install_requires=['numpy','pyyaml','lxml','future'], + install_requires=['numpy','pyyaml','lxml','future','matplotlib','cartopy','netCDF4','h5py'], + dependency_links=['https://github.com/tsutterley/read-GRACE-geocenter/tarball/master'], )