From 71a62be9e667efbbe9e6c9ee10493cbb99b88396 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 16:42:21 -0400 Subject: [PATCH 01/38] MAINT: migrate to pyproject.toml --- pyproject.toml | 82 ++++++++++++++++++++++++++++++++++++++++++++++++++ setup.cfg | 68 ----------------------------------------- setup.py | 17 ----------- tox.ini | 6 ++++ 4 files changed, 88 insertions(+), 85 deletions(-) create mode 100644 pyproject.toml delete mode 100644 setup.cfg delete mode 100644 setup.py create mode 100644 tox.ini diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..33f9ab53 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,82 @@ +[build-system] +requires = ["setuptools", "pip >= 10"] +build-backend = "setuptools.build_meta" + +[project] +name = "pysatNASA" +version = "0.0.4.1" +description = "pysat support for NASA Instruments" +readme = "README.md" +requires-python = ">=3.8" +license = {file = "LICENSE"} +authors = [ + {name = "Jeff Klenzing, et al", email = "pysat.developers@gmail.com"}, +] +classifiers = [ + "Development Status :: 3 - Alpha", + "Topic :: Scientific/Engineering :: Astronomy", + "Topic :: Scientific/Engineering :: Physics", + "Topic :: Scientific/Engineering :: Atmospheric Science", + "Intended Audience :: Science/Research", + "License :: OSI Approved :: BSD License", + "Natural Language :: English", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Operating System :: POSIX :: Linux", + "Operating System :: MacOS :: MacOS X", + "Operating System :: Microsoft :: Windows" +] +keywords = [ + "pysat", + "ionosphere" +] +dependencies = [ + "netCDF4", + "requests", + "beautifulsoup4", + "lxml", + "cdflib >= 0.4.4", + "numpy", + "pandas", + "pysat >= 3.0.2", + "xarray" +] + +[project.optional-dependencies] +pysatcdf = ["pysatCDF"] +test = [ + "coveralls<3.3", + "pytest", + "pytest-cov", + "pytest-ordering" +] +doc = [ + "flake8", + "flake8-docstrings", + "hacking>=1.0", + "ipython", + "m2r2", + "numpydoc", + "sphinx", + "sphinx_rtd_theme" +] + +[project.urls] +Documentation = "https://pysatnasa.readthedocs.io/en/latest/" +Source = "https://github.com/pysat/pysatNASA" + +[tool.coverage.report] +omit = ["*/instruments/templates/"] + +[tool.pytest.ini_options] +addopts = "-vs --cov=pysatNASA" +markers = [ + "all_inst", + "download", + "no_download", + "load_options", + "first", + "second" +] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 49912451..00000000 --- a/setup.cfg +++ /dev/null @@ -1,68 +0,0 @@ -[metadata] -name = pysatNASA -version = file: pysatNASA/version.txt -url = https://github.com/pysat/pysatNASA -author = Jeff Klenzing -author_email = jeffrey.klenzing@nasa.gov -description = 'pysat support for NASA Instruments' -keywords = - pysat - ionosphere -classifiers = - Development Status :: 3 - Alpha - Topic :: Scientific/Engineering :: Physics - Topic :: Scientific/Engineering :: Atmospheric Science - Intended Audience :: Science/Research - License :: OSI Approved :: BSD License - Natural Language :: English - Programming Language :: Python :: 3.5 - Programming Language :: Python :: 3.6 - Programming Language :: Python :: 3.7 - Operating System :: MacOS :: MacOS X - Operating System :: POSIX :: Linux -license_file = LICENSE -long_description = file: README.md -long_description_content_type = text/markdown - -[options] -python_requires = >= 3.5 -setup_requires = - setuptools >= 38.6 - pip >= 10 -include_package_data = True -zip_safe = False -packages = find: -install_requires = - netCDF4 - requests - beautifulsoup4 - lxml - cdflib - numpy - pandas - xarray - pysat - -[options.extras_require] -all = - pysatCDF - -[coverage:report] -omit = - */instruments/templates/ - -[flake8] -max-line-length = 80 -ignore = - D200 - D202 - W503 - -[tool:pytest] -markers = - all_inst: tests all instruments - download: tests for downloadable instruments - no_download: tests for instruments without download support - load_options: tests for instruments with additional options - first: first tests to run - second: second tests to run diff --git a/setup.py b/setup.py deleted file mode 100644 index 4c0bbd8e..00000000 --- a/setup.py +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/env python -# -*- coding: utf-8 -*- -# Copyright (C) 2020, Authors -# Full license can be found in License.md and AUTHORS.md -# ----------------------------------------------------------------------------- -"""Setup routines for pysatNASA. - -Note ----- -Package metadata stored in setup.cfg - -""" - -from setuptools import setup - -# Run setup -setup() diff --git a/tox.ini b/tox.ini new file mode 100644 index 00000000..ccced987 --- /dev/null +++ b/tox.ini @@ -0,0 +1,6 @@ +[flake8] +max-line-length = 80 +ignore = + D200 + D202 + W503 From 03eefb013360669894c4006592c1f242d0223332 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 16:42:33 -0400 Subject: [PATCH 02/38] MAINT: one version --- pysatNASA/__init__.py | 9 +++------ pysatNASA/version.txt | 1 - 2 files changed, 3 insertions(+), 7 deletions(-) delete mode 100644 pysatNASA/version.txt diff --git a/pysatNASA/__init__.py b/pysatNASA/__init__.py index 1df92789..2f8f3370 100644 --- a/pysatNASA/__init__.py +++ b/pysatNASA/__init__.py @@ -6,13 +6,10 @@ """ -import os +import pkg_resources + from pysatNASA import constellations # noqa F401 from pysatNASA import instruments # noqa F401 # set version -here = os.path.abspath(os.path.dirname(__file__)) -version_filename = os.path.join(here, 'version.txt') -with open(version_filename, 'r') as version_file: - __version__ = version_file.read().strip() -del here, version_filename, version_file +__version__ = pkg_resources.get_distribution('pysatNASA').version diff --git a/pysatNASA/version.txt b/pysatNASA/version.txt deleted file mode 100644 index bcab45af..00000000 --- a/pysatNASA/version.txt +++ /dev/null @@ -1 +0,0 @@ -0.0.3 From 16c924a8479c55c079bc7a077f77554211028cd8 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 16:42:43 -0400 Subject: [PATCH 03/38] DOC: update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index d3ec7e0d..84469a77 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -20,6 +20,7 @@ This project adheres to [Semantic Versioning](https://semver.org/). * Include flake8 linting of docstrings and style in Github Actions * Move OMNI HRO custom functions to a methods module * Deprecate OMNI HRO custom functions in instrument module + * Use pyproject.toml to manage setup * Documentation * New logo added From e71fce49b75769064360da1ba83730a5a9b7f695 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 16:42:55 -0400 Subject: [PATCH 04/38] TST: try new docs install --- .github/workflows/docs.yml | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 2f8cd626..fed56ae4 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -25,8 +25,7 @@ jobs: - name: Install dependencies run: | python -m pip install --upgrade pip - pip install -r test_requirements.txt - pip install -r requirements.txt + pip install .[doc] - name: Set up pysat run: | From 172c325d5ce0bb463f43694b29f0d03368e51c97 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:03:27 -0400 Subject: [PATCH 05/38] STY: docstring --- pysatNASA/instruments/ses14_gold.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pysatNASA/instruments/ses14_gold.py b/pysatNASA/instruments/ses14_gold.py index 05435fe2..e5f29ac8 100644 --- a/pysatNASA/instruments/ses14_gold.py +++ b/pysatNASA/instruments/ses14_gold.py @@ -68,7 +68,7 @@ def init(self): Runs once upon instantiation. Parameters - ----------- + ---------- self : pysat.Instrument Instrument class object From f6a774a3934192aad4c02e7913cd7b1f4bdad8cf Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:08:26 -0400 Subject: [PATCH 06/38] DOC/BUG: update docs --- README.md | 8 ++++---- docs/conf.py | 9 ++++----- docs/figures/{logo.png => pysatnasa_logo.png} | Bin docs/installation.rst | 13 ++++++------- docs/overview.rst | 2 +- pyproject.toml | 3 ++- 6 files changed, 17 insertions(+), 18 deletions(-) rename docs/figures/{logo.png => pysatnasa_logo.png} (100%) diff --git a/README.md b/README.md index f5782a5e..1fceb954 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,6 @@
- pysatNASA + pysatNASA
# pysatNASA: pysat support for NASA Space Science instruments @@ -40,17 +40,17 @@ Currently, the main way to get pysatNASA is through github. git clone https://github.com/pysat/pysatNASA.git ``` -Change directories into the repository folder and run the setup.py file. For +Change directories into the repository folder and build the project. For a local install use the "--user" flag after "install". ``` cd pysatNASA/ -python setup.py install +pip install . ``` Note: pre-1.0.0 version ----------------------- -pysatNASA is currently in an initial development phase and requires pysat 3.0.0. +pysatNASA is currently in an initial development phase and requires pysat 3.0.4. # Using with pysat diff --git a/docs/conf.py b/docs/conf.py index adff053a..4a6c06f2 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,6 +10,7 @@ import json import os +import pkg_resources import sys sys.path.insert(0, os.path.abspath('..')) @@ -62,9 +63,7 @@ # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. -doc_dir = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(doc_dir, "..", project, "version.txt"), "r") as fin: - version = fin.read().strip() +version = pkg_resources.get_distribution('pysatNASA').version release = '{:s}-alpha'.format(version) # Include alpha/beta/rc tags. # The language for content autogenerated by Sphinx. Refer to documentation @@ -72,7 +71,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -96,7 +95,7 @@ # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -html_logo = os.path.join(os.path.abspath('.'), 'figures', 'pysatnasa_logo.jpg') +html_logo = os.path.join(os.path.abspath('.'), 'figures', 'pysatnasa_logo.png') html_theme_options = {'logo_only': True} # Add any paths that contain custom static files (such as style sheets) here, diff --git a/docs/figures/logo.png b/docs/figures/pysatnasa_logo.png similarity index 100% rename from docs/figures/logo.png rename to docs/figures/pysatnasa_logo.png diff --git a/docs/installation.rst b/docs/installation.rst index c149522c..5975d708 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -40,24 +40,23 @@ Installation Options 2. Install pysatNASA: - Change directories into the repository folder and run the setup.py file. + Change directories into the repository folder and build the project. There are a few ways you can do this: A. Install on the system (root privileges required):: - sudo python setup.py install + sudo pip install . + B. Install at the user level:: - python setup.py install --user - C. Install with the intent to develop locally:: + pip install . + C. Install with the intent to develop locally:: - python setup.py develop --user -.. extras-require:: all - :setup.cfg: + pip install -e . .. _post-install: diff --git a/docs/overview.rst b/docs/overview.rst index 5cd631ad..4c8a8d4e 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -5,7 +5,7 @@ This is a library of ``pysat`` instrument modules and methods designed to suppor NASA instruments and missions archived at the Community Data Analysis Web portal. -.. image:: figures/pysatnasa_logo.jpg +.. image:: figures/pysatnasa_logo.png :width: 400px :align: center :alt: pysatNASA Logo, a blue planet with red orbiting python and the module name superimposed diff --git a/pyproject.toml b/pyproject.toml index 33f9ab53..591ff662 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -40,7 +40,7 @@ dependencies = [ "cdflib >= 0.4.4", "numpy", "pandas", - "pysat >= 3.0.2", + "pysat >= 3.0.4", "xarray" ] @@ -53,6 +53,7 @@ test = [ "pytest-ordering" ] doc = [ + "extras_require", "flake8", "flake8-docstrings", "hacking>=1.0", From 4f6bad4c5178a3de68f4f33830d9bfdac29d0bac Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:09:30 -0400 Subject: [PATCH 07/38] TST: update main job --- .github/workflows/main.yml | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b0ea9322..57b084e7 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,13 +31,12 @@ jobs: if: ${{ matrix.numpy_ver != 'latest'}} run: | pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} - pip install "xarray<2022.06" + pip install .[test,pysatcdf] - name: Install standard dependencies + if: ${{ matrix.numpy_ver == 'latest'}} run: | - pip install -r requirements.txt - pip install pysatCDF --no-binary=pysatCDF - pip install -r test_requirements.txt + pip install .[test,pysatcdf] - name: Set up pysat run: | @@ -51,8 +50,7 @@ jobs: run: flake8 . --count --exit-zero --max-complexity=10 --statistics - name: Test with pytest - run: | - pytest -vs --cov=pysatNASA/ + run: pytest - name: Publish results to coveralls env: From 2e42767ebd457c2831b22219e530283179f21953 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:14:16 -0400 Subject: [PATCH 08/38] TST: remove pysatcdf --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 57b084e7..e532b1bb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,12 +31,12 @@ jobs: if: ${{ matrix.numpy_ver != 'latest'}} run: | pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} - pip install .[test,pysatcdf] + pip install .[test] - name: Install standard dependencies if: ${{ matrix.numpy_ver == 'latest'}} run: | - pip install .[test,pysatcdf] + pip install .[test] - name: Set up pysat run: | From ab26d38256ea5890f948caa380af29c8e4502ab2 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:15:36 -0400 Subject: [PATCH 09/38] MAINT: drop requirements files --- requirements.txt | 9 --------- test_requirements.txt | 13 ------------- 2 files changed, 22 deletions(-) delete mode 100644 requirements.txt delete mode 100644 test_requirements.txt diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index a7141a59..00000000 --- a/requirements.txt +++ /dev/null @@ -1,9 +0,0 @@ -netCDF4 -requests -beautifulsoup4 -lxml -cdflib>=0.4.4 -numpy -pandas -pysat>=3.0.2 -xarray diff --git a/test_requirements.txt b/test_requirements.txt deleted file mode 100644 index 6c5e6eff..00000000 --- a/test_requirements.txt +++ /dev/null @@ -1,13 +0,0 @@ -coveralls<3.3 -extras_require -flake8 -flake8-docstrings -hacking>=1.0 -ipython -m2r2 -numpydoc -pytest -pytest-cov -pytest-ordering -sphinx -sphinx_rtd_theme From 4ac41675d03ffab950c0cd45d4b07dad8937dbab Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:21:37 -0400 Subject: [PATCH 10/38] BUG: install flake8 --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e532b1bb..da6f8cfa 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,12 +31,12 @@ jobs: if: ${{ matrix.numpy_ver != 'latest'}} run: | pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} - pip install .[test] + pip install .[doc,test] - name: Install standard dependencies if: ${{ matrix.numpy_ver == 'latest'}} run: | - pip install .[test] + pip install .[doc,test] - name: Set up pysat run: | From 64496cf6e250dee2991ce8e46316070ade32b490 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:29:46 -0400 Subject: [PATCH 11/38] STY: whitespace --- pyproject.toml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 591ff662..33571708 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -47,7 +47,7 @@ dependencies = [ [project.optional-dependencies] pysatcdf = ["pysatCDF"] test = [ - "coveralls<3.3", + "coveralls < 3.3", "pytest", "pytest-cov", "pytest-ordering" @@ -56,7 +56,7 @@ doc = [ "extras_require", "flake8", "flake8-docstrings", - "hacking>=1.0", + "hacking >= 1.0", "ipython", "m2r2", "numpydoc", From 1301eeeec9e3606227624f5f2cfd39e148c56ec5 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:36:04 -0400 Subject: [PATCH 12/38] STY: group optional dependencies --- .github/workflows/main.yml | 4 ++-- pyproject.toml | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index da6f8cfa..e532b1bb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,12 +31,12 @@ jobs: if: ${{ matrix.numpy_ver != 'latest'}} run: | pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} - pip install .[doc,test] + pip install .[test] - name: Install standard dependencies if: ${{ matrix.numpy_ver == 'latest'}} run: | - pip install .[doc,test] + pip install .[test] - name: Set up pysat run: | diff --git a/pyproject.toml b/pyproject.toml index 33571708..9148089c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -48,15 +48,15 @@ dependencies = [ pysatcdf = ["pysatCDF"] test = [ "coveralls < 3.3", + "flake8", + "flake8-docstrings", + "hacking >= 1.0", "pytest", "pytest-cov", "pytest-ordering" ] doc = [ "extras_require", - "flake8", - "flake8-docstrings", - "hacking >= 1.0", "ipython", "m2r2", "numpydoc", From 2788454288da08f1598b9783b394e3cd4be0169d Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:36:41 -0400 Subject: [PATCH 13/38] DOC: show optional requirements --- docs/installation.rst | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/docs/installation.rst b/docs/installation.rst index 5975d708..0431da34 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -58,6 +58,15 @@ Installation Options pip install -e . +.. extras-require:: pysatcdf + :pyproject: + +.. extras-require:: test + :pyproject: + +.. extras-require:: doc + :pyproject: + .. _post-install: Post Installation From 2a8b50c99fbd56e3075141e279edb6d17c646865 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:40:20 -0400 Subject: [PATCH 14/38] TST: improve nep29 --- .github/workflows/main.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index e532b1bb..09c22e1c 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,7 +31,7 @@ jobs: if: ${{ matrix.numpy_ver != 'latest'}} run: | pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} - pip install .[test] + pip install --upgrade-strategy only-if-needed .[test] - name: Install standard dependencies if: ${{ matrix.numpy_ver == 'latest'}} From 4b2a31756e03918e2fc9958589ab7d27b1adf10e Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Tue, 25 Oct 2022 17:40:29 -0400 Subject: [PATCH 15/38] TST: optional pysatcdf --- .github/workflows/main.yml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 09c22e1c..52735d1a 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -38,6 +38,9 @@ jobs: run: | pip install .[test] + - name: Install pysatCDF + run: pip install pysatCDF --no-binary=pysatCDF + - name: Set up pysat run: | mkdir pysatData From cd7a86f170385024e4c14695c651b0b637735f89 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Tue, 25 Oct 2022 22:14:26 -0400 Subject: [PATCH 16/38] Update main.yml --- .github/workflows/main.yml | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 52735d1a..4d00d2ac 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -31,6 +31,8 @@ jobs: if: ${{ matrix.numpy_ver != 'latest'}} run: | pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} + # Force install version compatible with NEP29 + pip install "pandas<1.5" pip install --upgrade-strategy only-if-needed .[test] - name: Install standard dependencies @@ -58,4 +60,4 @@ jobs: - name: Publish results to coveralls env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - run: coveralls --rcfile=setup.cfg --service=github + run: coveralls --rcfile=pyproject.toml --service=github From d48ff062177573271d554366f18b4f60628ad183 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Wed, 26 Oct 2022 10:48:12 -0400 Subject: [PATCH 17/38] STY: cleanup --- .github/workflows/main.yml | 1 + docs/installation.rst | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 0def20f7..b4f19747 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -41,6 +41,7 @@ jobs: pip install .[test] - name: Install pysatCDF + # Need custom install until pysatCDF is updated run: pip install pysatCDF --no-binary=pysatCDF - name: Set up pysat diff --git a/docs/installation.rst b/docs/installation.rst index 0431da34..7ba6d0ba 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -51,7 +51,7 @@ Installation Options B. Install at the user level:: - pip install . + pip install --user . C. Install with the intent to develop locally:: From e22664b9a002ef0e6f5a7cfe822eb7909ecf7360 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Wed, 26 Oct 2022 11:02:16 -0400 Subject: [PATCH 18/38] BUG: optional pysatCDF --- .github/workflows/main.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b4f19747..fec0dc3f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -41,6 +41,8 @@ jobs: pip install .[test] - name: Install pysatCDF + # Optional package, continue with tests if install fails + continue-on-error: true # Need custom install until pysatCDF is updated run: pip install pysatCDF --no-binary=pysatCDF From 9faf534865a7e858e13fa79c71ec807ca2cbfabe Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Thu, 6 Apr 2023 14:05:29 -0400 Subject: [PATCH 19/38] Merge branch 'develop' into sty/pyproject --- .github/ISSUE_TEMPLATE/bug_report.md | 34 +- .github/ISSUE_TEMPLATE/feature_request.md | 24 +- .github/ISSUE_TEMPLATE/question.md | 19 ++ .github/pull_request_template.md | 13 +- .github/workflows/docs.yml | 6 +- .github/workflows/main.yml | 21 +- .github/workflows/pip_rc_install.yml | 30 ++ .github/workflows/pysat_rc.yml | 51 +++ .gitignore | 3 + .zenodo.json | 5 + CHANGELOG.md | 46 ++- CONTRIBUTING.md | 55 +-- LICENSE | 2 +- README.md | 46 ++- docs/conf.py | 11 +- docs/installation.rst | 2 +- docs/supported_constellations.rst | 2 + docs/supported_instruments.rst | 64 ++++ pysatNASA/constellations/de2.py | 23 +- pysatNASA/constellations/icon.py | 22 +- pysatNASA/instruments/__init__.py | 10 +- pysatNASA/instruments/ace_epam_l2.py | 106 ++++++ pysatNASA/instruments/ace_mag_l2.py | 110 ++++++ pysatNASA/instruments/ace_sis_l2.py | 100 ++++++ pysatNASA/instruments/ace_swepam_l2.py | 104 ++++++ pysatNASA/instruments/cnofs_ivm.py | 45 ++- pysatNASA/instruments/cnofs_plp.py | 22 +- pysatNASA/instruments/cnofs_vefi.py | 22 +- pysatNASA/instruments/de2_fpi.py | 107 ++++++ pysatNASA/instruments/de2_lang.py | 21 +- pysatNASA/instruments/de2_nacs.py | 23 +- pysatNASA/instruments/de2_rpa.py | 48 ++- pysatNASA/instruments/de2_vefi.py | 105 ++++++ pysatNASA/instruments/de2_wats.py | 23 +- pysatNASA/instruments/dmsp_ssusi.py | 123 +++++++ pysatNASA/instruments/formosat1_ivm.py | 11 +- pysatNASA/instruments/icon_euv.py | 27 +- pysatNASA/instruments/icon_fuv.py | 34 +- pysatNASA/instruments/icon_ivm.py | 29 +- pysatNASA/instruments/icon_mighti.py | 56 ++- pysatNASA/instruments/iss_fpmu.py | 9 +- pysatNASA/instruments/jpl_gps.py | 22 +- pysatNASA/instruments/methods/__init__.py | 5 + pysatNASA/instruments/methods/ace.py | 54 +++ pysatNASA/instruments/methods/cdaweb.py | 326 +++++++++++++++--- pysatNASA/instruments/methods/de2.py | 17 +- pysatNASA/instruments/methods/dmsp.py | 16 + pysatNASA/instruments/methods/general.py | 4 + pysatNASA/instruments/methods/gold.py | 24 -- pysatNASA/instruments/methods/jhuapl.py | 395 ++++++++++++++++++++++ pysatNASA/instruments/methods/ses14.py | 24 ++ pysatNASA/instruments/methods/timed.py | 33 ++ pysatNASA/instruments/omni_hro.py | 58 +--- pysatNASA/instruments/ses14_gold.py | 78 ++--- pysatNASA/instruments/timed_guvi.py | 197 +++++++++++ pysatNASA/instruments/timed_saber.py | 44 +-- pysatNASA/instruments/timed_see.py | 53 +-- pysatNASA/tests/test_instruments.py | 16 +- pysatNASA/tests/test_methods_cdaweb.py | 37 +- pysatNASA/tests/test_methods_platform.py | 129 +++++++ pysatNASA/tests/test_omni_hro.py | 15 +- requirements.txt | 10 + tox.ini => setup.cfg | 0 63 files changed, 2603 insertions(+), 568 deletions(-) create mode 100644 .github/ISSUE_TEMPLATE/question.md create mode 100644 .github/workflows/pip_rc_install.yml create mode 100644 .github/workflows/pysat_rc.yml create mode 100644 pysatNASA/instruments/ace_epam_l2.py create mode 100644 pysatNASA/instruments/ace_mag_l2.py create mode 100644 pysatNASA/instruments/ace_sis_l2.py create mode 100644 pysatNASA/instruments/ace_swepam_l2.py create mode 100644 pysatNASA/instruments/de2_fpi.py create mode 100644 pysatNASA/instruments/de2_vefi.py create mode 100644 pysatNASA/instruments/dmsp_ssusi.py create mode 100644 pysatNASA/instruments/methods/ace.py create mode 100644 pysatNASA/instruments/methods/dmsp.py delete mode 100644 pysatNASA/instruments/methods/gold.py create mode 100644 pysatNASA/instruments/methods/jhuapl.py create mode 100644 pysatNASA/instruments/methods/ses14.py create mode 100644 pysatNASA/instruments/methods/timed.py create mode 100644 pysatNASA/instruments/timed_guvi.py create mode 100644 pysatNASA/tests/test_methods_platform.py create mode 100644 requirements.txt rename tox.ini => setup.cfg (100%) diff --git a/.github/ISSUE_TEMPLATE/bug_report.md b/.github/ISSUE_TEMPLATE/bug_report.md index 23d236d6..57640182 100644 --- a/.github/ISSUE_TEMPLATE/bug_report.md +++ b/.github/ISSUE_TEMPLATE/bug_report.md @@ -1,35 +1,29 @@ --- name: Bug report -about: Create a report to help us improve +about: Create a report to report a problem that needs to be fixed +labels: bug +title: "BUG: " --- -**Describe the bug** -A clear and concise description of what the bug is. +# Description +A clear and concise description of what the bug is, including a description +of what you expected the outcome to be. -**To Reproduce** +# To Reproduce this bug: Steps to reproduce the behavior: 1. Go to '...' 2. Click on '....' 3. Scroll down to '....' 4. See error -or +Consider including images or test files to help others reproduce the bug and +solve the problem. -``` -# test code here -``` - -**Expected behavior** -A clear and concise description of what you expected to happen. - -**Screenshots** -If applicable, add screenshots to help explain your problem. - -**Desktop (please complete the following information):** - - OS: [e.g. iOS] - - Version [e.g. Python 3.7] +## Test configuration + - OS: [e.g. Hal] + - Version [e.g. Python 3.47] - Other details about your setup that could be relevant -**Additional context** -Add any other context about the problem here. +# Additional context +Add any other context about the problem here, including expected behaviour. diff --git a/.github/ISSUE_TEMPLATE/feature_request.md b/.github/ISSUE_TEMPLATE/feature_request.md index 066b2d92..d02da2ef 100644 --- a/.github/ISSUE_TEMPLATE/feature_request.md +++ b/.github/ISSUE_TEMPLATE/feature_request.md @@ -1,17 +1,27 @@ --- name: Feature request about: Suggest an idea for this project +title: "ENH: " +labels: enhancement --- -**Is your feature request related to a problem? Please describe.** -A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] +# Description +A clear and concise description of the new feature or behaviour you would like. -**Describe the solution you'd like** +## Potential impact + +- Is the feature related to an existing problem? +- How critical is this feature to your workflow? +- How wide of an impact to you anticipate this enhancement having? +- Would this break any existing functionality? + +## Potential solution(s) A clear and concise description of what you want to happen. -**Describe alternatives you've considered** -A clear and concise description of any alternative solutions or features you've considered. +# Alternatives +A clear description of any alternative solutions or features you've considered. -**Additional context** -Add any other context or screenshots about the feature request here. +# Additional context +Add any other context or screenshots about the feature request here, potentially +including your operational configuration. diff --git a/.github/ISSUE_TEMPLATE/question.md b/.github/ISSUE_TEMPLATE/question.md new file mode 100644 index 00000000..da43edc7 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/question.md @@ -0,0 +1,19 @@ +--- +name: Question +about: A question about this project +title: "QUEST: " +labels: question + +--- + +# Description +A clear and concise summary of your query + +## Example code (optional) +If relevant, include sample code, images, or files so that others can understand +the full context of your question. + +## Configuration + - OS: [e.g. Hal] + - Version: [e.g. Python 3.47] + - Other details about your setup that could be relevant diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index 5331eb30..e1d2dbe5 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -1,12 +1,12 @@ # Description -Addresses # (issue) +Addresses #(issue) Please include a summary of the change and which issue is fixed. Please also include relevant motivation and context. List any dependencies that are required for this change. Please see ``CONTRIBUTING.md`` for more guidelines. -## Type of change +# Type of change Please delete options that are not relevant. @@ -25,9 +25,9 @@ your test configuration - Test A - Test B -## Test Configuration -* Operating system: [Os Type] -* Version number: [Python 2.9] +**Test Configuration**: +* Operating system: Hal +* Version number: Python 3.X * Any details about your local setup that are relevant # Checklist: @@ -43,3 +43,6 @@ your test configuration - [ ] Any dependent changes have been merged and published in downstream modules - [ ] Add a note to ``CHANGELOG.md``, summarizing the changes - [ ] Update zenodo.json file for new code contributors + +If this is a release PR, replace the first item of the above checklist with the release +checklist on the wiki: https://github.com/pysat/pysat/wiki/Checklist-for-Release diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index 4baa1b71..fe6d6ca4 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -1,4 +1,4 @@ -# This workflow will install Python dependencies, run tests and lint with a variety of Python versions +# This workflow will install Python dependencies and check the sphinx build, links in the docs, and the readability of the zenodo file # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions name: Documentation Check @@ -8,11 +8,11 @@ on: [push, pull_request] jobs: build: - runs-on: ubuntu-latest + runs-on: ["ubuntu-latest"] strategy: fail-fast: false matrix: - python-version: [3.9] + python-version: ["3.10"] # Keep this version at the highest supported Python version name: Documentation tests steps: diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index fec0dc3f..80643508 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -10,13 +10,13 @@ jobs: strategy: fail-fast: false matrix: - os: [ubuntu-latest, windows-latest] + os: ["ubuntu-latest", "macos-latest", "windows-latest"] python-version: ["3.9", "3.10"] - numpy_ver: [latest] + numpy_ver: ["latest"] include: - python-version: "3.8" - numpy_ver: "1.20" - os: ubuntu-latest + numpy_ver: "1.21" + os: "ubuntu-latest" name: Python ${{ matrix.python-version }} on ${{ matrix.os }} with numpy ${{ matrix.numpy_ver }} runs-on: ${{ matrix.os }} @@ -27,25 +27,22 @@ jobs: with: python-version: ${{ matrix.python-version }} + - name: Reinstall fortran on MacOS + if: ${{ matrix.os == 'macos-latest' }} + run: brew reinstall gcc + - name: Install NEP29 dependencies if: ${{ matrix.numpy_ver != 'latest'}} run: | pip install --no-binary :numpy: numpy==${{ matrix.numpy_ver }} - # Force install version compatible with NEP29 - pip install "pandas<1.5" pip install --upgrade-strategy only-if-needed .[test] + pip install pysatCDF --no-binary=pysatCDF - name: Install standard dependencies if: ${{ matrix.numpy_ver == 'latest'}} run: | pip install .[test] - - name: Install pysatCDF - # Optional package, continue with tests if install fails - continue-on-error: true - # Need custom install until pysatCDF is updated - run: pip install pysatCDF --no-binary=pysatCDF - - name: Set up pysat run: | mkdir pysatData diff --git a/.github/workflows/pip_rc_install.yml b/.github/workflows/pip_rc_install.yml new file mode 100644 index 00000000..45d835e5 --- /dev/null +++ b/.github/workflows/pip_rc_install.yml @@ -0,0 +1,30 @@ +# This workflow will install Python dependencies and the latest RC of pysatNASA from test pypi. +# This test should be manually run before a pysatNASA RC is officially approved and versioned. +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Test install of latest RC from pip + +on: [workflow_dispatch] + +jobs: + build: + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest", "macos-latest", "windows-latest"] + python-version: ["3.10"] # Keep this version at the highest supported Python version + + name: Python ${{ matrix.python-version }} on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install standard dependencies + run: pip install -r requirements.txt + + - name: Install pysatNASA RC + run: pip install --no-deps -i https://test.pypi.org/simple/ pysatNASA diff --git a/.github/workflows/pysat_rc.yml b/.github/workflows/pysat_rc.yml new file mode 100644 index 00000000..c73bcf07 --- /dev/null +++ b/.github/workflows/pysat_rc.yml @@ -0,0 +1,51 @@ +# This workflow will install Python dependencies and the latest RC of pysat from test pypi. +# All unit tests for pysatNASA will be run using the pysat RC. +# This test should be manually run before a pysat RC is officially approved and versioned. +# For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions + +name: Test with latest pysat RC + +on: [workflow_dispatch] + +jobs: + build: + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest", "macos-latest", "windows-latest"] + python-version: ["3.10"] # Keep this version at the highest supported Python version + + name: Python ${{ matrix.python-version }} on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Reinstall fortran on MacOS + if: ${{ matrix.os == 'macos-latest' }} + run: brew reinstall gcc + + - name: Install pysat RC + run: pip install --no-deps -i https://test.pypi.org/simple/ pysat + + - name: Install standard dependencies + run: | + pip install -r requirements.txt + pip install pysatCDF --no-binary=pysatCDF + pip install -r test_requirements.txt + + - name: Set up pysat + run: | + mkdir pysatData + python -c "import pysat; pysat.params['data_dirs'] = 'pysatData'" + + - name: Test with pytest + run: pytest -vs --cov=pysatNASA/ + + - name: Publish results to coveralls + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + run: coveralls --rcfile=setup.cfg --service=github diff --git a/.gitignore b/.gitignore index 6db464db..c8350919 100644 --- a/.gitignore +++ b/.gitignore @@ -70,3 +70,6 @@ custom_lint.sh # IDEs .idea/ + +# vscode +.vscode diff --git a/.zenodo.json b/.zenodo.json index a5bcff9a..1052a817 100644 --- a/.zenodo.json +++ b/.zenodo.json @@ -27,6 +27,11 @@ { "name": "Spence, Carey", "orcid": "0000-0001-8340-5625" + }, + { + "affilitation":"University of Colorado at Boulder", + "name": "Navarro, Luis", + "orcid": "0000-0002-6362-6575" } ] } diff --git a/CHANGELOG.md b/CHANGELOG.md index e1761132..d06311f0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,7 +2,46 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/). -## [0.0.4] - 2022-XX-XX +## [0.X.X] - 2023-XX-XX +* New Instruments + * ACE EPAM + * ACE MAG + * ACE SIS + * ACE SWEPAM + * DE2 Fabry-Perot Interferometer (FPI) + * DE2 Vector Electric Field Instrument (VEFI) and magnetometer + * DMSP SSUSI EDR-Aurora data + * TIMED GUVI +* Add TIMED GUVI platform to support L1C intensity datasets. + * Type of sensor source handled by inst_id with options of + spectrograph, imaging + * Resolution of dataset handled by tag with + low, high +* Added CDAWeb methods that can use cdasws to get the remote file list +* Bug Fixes + * Updated CDAWeb routines to allow for data stored by year/day-of-year + * Updated GOLD nmax to sort scans by time. + * Added 1 usec to GOLD nmax channel B times to ensure uniqueness +* Documentation + * Added TIMED-GUVI platform + * Added missing sub-module imports + * Added discussion of ICON constellation to docstrings, including caveats +* Enhancements + * Updated platform methods to follow a consistent style and work with the + general `init` function + * Added unit tests for the different platform method attributes + * xarray support for TIMED SEE +* Maintenance + * Removed duplicate tests if pysatCDF not isntalled + * Only test pysatCDF on GitHub Actions for older numpy versions + * Updated actions and templates based on pysatEcosystem docs + * Remove pandas cap on NEP29 tests + * Updated dosctring style for consistency + * Removed version cap for xarray + * Added manual workflow to check that latest RC is installable through test pip + * Use pyproject.toml to manage setup + +## [0.0.4] - 2022-11-07 * Update instrument tests with new test class * Support xarray datasets through cdflib * Preferentially loads data into pandas using pysatCDF if installed @@ -22,7 +61,10 @@ This project adheres to [Semantic Versioning](https://semver.org/). * Move OMNI HRO custom functions to a methods module * Deprecate OMNI HRO custom functions in instrument module * Update GitHub actions to the latest versions - * Use pyproject.toml to manage setup + * Added downstream test to test code with pysat RC + * Remove deprecated `convert_timestamp_to_datetime` calls + * Remove deprecated pandas syntax + * Added version cap for xarray 2022.11 * Documentation * New logo added diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5e9b2a04..45943ed4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -78,33 +78,44 @@ To set up `pysatNASA` for local development: 4. When you're done making changes, run all the checks to ensure that nothing - is broken on your local system, as well as check for flake8 compliance: + is broken on your local system: - ``` - pytest -vs --flake8 pysatNASA - ``` + ``` + pytest pysatNASA + ``` + +5. You should also check for flake8 style compliance: + + ``` + flake8 . --count --select=D,E,F,H,W --show-source --statistics + ``` -5. Update/add documentation (in ``docs``), if relevant + Note that pysat uses the `flake-docstrings` and `hacking` packages to ensure + standards in docstring formatting. -6. Add your name to the .zenodo.json file as an author -7. Commit your changes: - ``` - git add . - git commit -m "AAA: Brief description of your changes" - ``` - Where AAA is a standard shorthand for the type of change (eg, BUG or DOC). - `pysat` follows the [numpy development workflow](https://numpy.org/doc/stable/dev/development_workflow.html), - see the discussion there for a full list of this shorthand notation. +6. Update/add documentation (in ``docs``), if relevant -8. Once you are happy with the local changes, push to Github: - ``` - git push origin name-of-your-bugfix-or-feature - ``` - Note that each push will trigger the Continuous Integration workflow. +7. Add your name to the .zenodo.json file as an author + +8. Commit your changes: + ``` + git add . + git commit -m "AAA: Brief description of your changes" + ``` + Where AAA is a standard shorthand for the type of change (eg, BUG or DOC). + `pysat` follows the [numpy development workflow](https://numpy.org/doc/stable/dev/development_workflow.html), + see the discussion there for a full list of this shorthand notation. + +9. Once you are happy with the local changes, push to Github: + ``` + git push origin name-of-your-bugfix-or-feature + ``` + Note that each push will trigger the Continuous Integration workflow. -9. Submit a pull request through the GitHub website. Pull requests should be - made to the ``develop`` branch. +10. Submit a pull request through the GitHub website. Pull requests should be + made to the ``develop`` branch. Note that automated tests will be run on + github actions, but these must be initialized by a member of the pysat team. Pull Request Guidelines ----------------------- @@ -160,3 +171,5 @@ These include: * Block and inline comments should use proper English grammar and punctuation with the exception of single sentences in a block, which may then omit the final period +* When casting is necessary, use `np.int64` and `np.float64` to ensure operating + system agnosticism diff --git a/LICENSE b/LICENSE index c9a10064..666d8f5d 100644 --- a/LICENSE +++ b/LICENSE @@ -1,6 +1,6 @@ BSD 3-Clause License -Copyright (c) 2020, pysat +Copyright (c) 2023, pysat All rights reserved. Redistribution and use in source and binary forms, with or without diff --git a/README.md b/README.md index 1fceb954..9bdce9c5 100644 --- a/README.md +++ b/README.md @@ -22,20 +22,23 @@ pysatNASA uses common Python modules, as well as modules developed by and for the Space Physics community. This module officially supports Python 3.8+. -| Common modules | Community modules | -| ---------------- | ----------------- | -| beautifulsoup4 | cdflib | -| lxml | pysat>=3.0.4 | -| netCDF4 | | -| numpy | | -| pandas | | -| requests | | -| xarray | | +| Common modules | Community modules | Optional Modules | +| ---------------- | ----------------- |------------------| +| beautifulsoup4 | cdflib | pysatCDF | +| lxml | pysat>=3.0.4 | | +| netCDF4 | | | +| numpy<1.24 | | | +| pandas | | | +| requests | | | +| xarray | | | + +## PyPi Installation +``` +pip install pysatNASA +``` ## GitHub Installation -Currently, the main way to get pysatNASA is through github. - ``` git clone https://github.com/pysat/pysatNASA.git ``` @@ -51,6 +54,7 @@ pip install . Note: pre-1.0.0 version ----------------------- pysatNASA is currently in an initial development phase and requires pysat 3.0.4. +Feedback and contributions are appreciated. # Using with pysat @@ -62,7 +66,9 @@ from pysatNASA.instruments import icon_ivm ivm = pysat.Instrument(inst_module=icon_ivm, inst_id='a') ``` -Another way to use the instruments in an external repository is to register the instruments. This only needs to be done the first time you load an instrument. Afterward, pysat will identify them using the `platform` and `name` keywords. +Another way to use the instruments in an external repository is to register the +instruments. This only needs to be done the first time you load an instrument. +Afterward, pysat will identify them using the `platform` and `name` keywords. ``` import pysat @@ -70,3 +76,19 @@ import pysat pysat.utils.registry.register(['pysatNASA.instruments.icon_ivm']) ivm = pysat.Instrument('icon', 'ivm', inst_id='a') ``` + +# CDF Integration +For data products stored as CDF files, this package can use either `cdflib` or +`pysatCDF`. Note that `cdflib` is a pure python package and more readily +deployable across systems, whereas `pysatCDF` interfaces with the fortran. +This is a faster approach for loading data, but may not install on all systems. +There are known issues with `numpy`>=1.24. Therefore, `pysatCDF` is optional +rather than required. + +You can specify which load routine to use via the optional `use_cdflib` kwarg. +If no kwarg is specified, `pysatNASA` will default to `pysatCDF` if it is +successfully installed, and default to `cdflib` otherwise. + +``` +ivm = pysat.Instrument('cnofs', 'ivm', use_cdflib=True) +``` diff --git a/docs/conf.py b/docs/conf.py index 4a6c06f2..43245c9d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -53,11 +53,12 @@ master_doc = 'index' # General information about the project. -zenodo = json.loads(open('../.zenodo.json').read()) project = 'pysatNASA' -author = ', '.join([creator['name'] for creator in zenodo['creators']]) -manual_copyright = ', '.join(['2021', author]) title = '{:s} Documentation'.format(project) +zenodo = json.loads(open('../.zenodo.json').read()) +author = ', '.join([creator['name'] for creator in zenodo['creators']]) +manual_copyright = ', '.join(['2023', author]) +category = 'Space Physics' description = 'Tools for NASA CDAWeb instruments.' # The version info for the project you're documenting, acts as replacement for @@ -147,8 +148,8 @@ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) -texinfo_documents = [(master_doc, project, title, author, project, description, - 'Space Physics')] +texinfo_documents = [(master_doc, project, title, author, project, + description, category)] # -- Options for Epub output ---------------------------------------------- diff --git a/docs/installation.rst b/docs/installation.rst index 7ba6d0ba..ae8eedda 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -25,7 +25,7 @@ Python 3.8+ and pysat 3.0.4+. numpy pandas requests - xarray + xarray<2022.11 ================== ================= diff --git a/docs/supported_constellations.rst b/docs/supported_constellations.rst index f5ebf71f..d7e7eaa3 100644 --- a/docs/supported_constellations.rst +++ b/docs/supported_constellations.rst @@ -6,10 +6,12 @@ DE2 The Dynamics Explorer 2 spacecraft. Includes the instruments +- :ref:`de2_fpi` - :ref:`de2_lang` - :ref:`de2_nacs` - :ref:`de2_rpa` - :ref:`de2_wats` +- :ref:`de2_vefi` .. automodule:: pysatNASA.constellations.de2 :members: diff --git a/docs/supported_instruments.rst b/docs/supported_instruments.rst index 054bc982..99bbe094 100644 --- a/docs/supported_instruments.rst +++ b/docs/supported_instruments.rst @@ -1,6 +1,38 @@ Supported Instruments ===================== +.. _ace_epam: + +ACE EPAM +-------- + +.. automodule:: pysatNASA.instruments.ace_epam_l2 + :members: + +.. _ace_mag: + +ACE MAG +------- + +.. automodule:: pysatNASA.instruments.ace_mag_l2 + :members: + +.. _ace_sis: + +ACE SIS +------- + +.. automodule:: pysatNASA.instruments.ace_sis_l2 + :members: + +.. _ace_swepam: + +ACE SWEPAM +---------- + +.. automodule:: pysatNASA.instruments.ace_swepam_l2 + :members: + .. _cnofs_ivm: C/NOFS IVM @@ -25,6 +57,14 @@ C/NOFS VEFI .. automodule:: pysatNASA.instruments.cnofs_vefi :members: +.. _de2_fpi: + +DE2 FPI +-------- + +.. automodule:: pysatNASA.instruments.de2_fpi + :members: + .. _de2_lang: DE2 LANG @@ -49,6 +89,14 @@ DE2 RPA .. automodule:: pysatNASA.instruments.de2_rpa :members: +.. _de2_vefi: + +DE2 VEFI +-------- + +.. automodule:: pysatNASA.instruments.de2_vefi + :members: + .. _de2_wats: DE2 WATS @@ -57,6 +105,14 @@ DE2 WATS .. automodule:: pysatNASA.instruments.de2_wats :members: +.. _dmsp_ssusi: + +DMSP SSUSI +---------- + +.. automodule:: pysatNASA.instruments.dmsp_ssusi + :members: + .. _formosat1_ivm: FORMOSAT-1 IVM @@ -130,6 +186,14 @@ SES14 GOLD .. automodule:: pysatNASA.instruments.ses14_gold :members: +.. _timed_guvi: + +TIMED GUVI +---------- + +.. automodule:: pysatNASA.instruments.timed_guvi + :members: + .. _timed_saber: TIMED SABER diff --git a/pysatNASA/constellations/de2.py b/pysatNASA/constellations/de2.py index eca99b6a..2a5ad572 100644 --- a/pysatNASA/constellations/de2.py +++ b/pysatNASA/constellations/de2.py @@ -1,13 +1,32 @@ -"""Creates a constellation from the NASA DE2 satellite platform.""" +"""Creates a constellation from the NASA DE2 satellite platform. + +Includes the core supported instruments. + + +Examples +-------- +:: + + import pysat + import pysatNASA + + de2 = pysat.Constellation(const_module=pysatNASA.constellations.de2) + + de2.load(1983, 1) + + +""" import pysat from pysatNASA import instruments +fpi = pysat.Instrument(inst_module=instruments.de2_fpi) lang = pysat.Instrument(inst_module=instruments.de2_lang) nacs = pysat.Instrument(inst_module=instruments.de2_nacs) rpa = pysat.Instrument(inst_module=instruments.de2_rpa) wats = pysat.Instrument(inst_module=instruments.de2_wats) +vefi = pysat.Instrument(inst_module=instruments.de2_vefi) -instruments = [lang, nacs, rpa, wats] +instruments = [fpi, lang, nacs, rpa, wats, vefi] diff --git a/pysatNASA/constellations/icon.py b/pysatNASA/constellations/icon.py index 3df198a0..42cd6462 100644 --- a/pysatNASA/constellations/icon.py +++ b/pysatNASA/constellations/icon.py @@ -1,4 +1,24 @@ -"""Creates a constellation from NASA the ICON satellite platform.""" +"""Creates a constellation from NASA the ICON satellite platform. + +Includes the core instruments without the line of sight winds. + +Note that IVM A and B are nominally never active at the same time. This +constellation should be initialized with `common_index=False`. This forgoes +the pysat check that ensures all instruments load data. + +Examples +-------- +:: + + import pysat + import pysatNASA + + icon = pysat.Constellation(const_module=pysatNASA.constellations.icon, + common_index=False) + + icon.load(2020, 1) + +""" import pysat diff --git a/pysatNASA/instruments/__init__.py b/pysatNASA/instruments/__init__.py index c3a8bfae..1b028313 100644 --- a/pysatNASA/instruments/__init__.py +++ b/pysatNASA/instruments/__init__.py @@ -4,13 +4,15 @@ Each instrument is contained within a subpackage of this set. """ +from pysatNASA.instruments import methods # noqa F401 -__all__ = ['cnofs_ivm', 'cnofs_plp', 'cnofs_vefi', - 'de2_lang', 'de2_nacs', 'de2_rpa', 'de2_wats', - 'formosat1_ivm', +__all__ = ['ace_epam_l2', 'ace_mag_l2', 'ace_sis_l2', 'ace_swepam_l2', + 'cnofs_ivm', 'cnofs_plp', 'cnofs_vefi', 'de2_fpi', + 'de2_lang', 'de2_nacs', 'de2_rpa', 'de2_vefi', 'de2_wats', + 'dmsp_ssusi', 'formosat1_ivm', 'icon_euv', 'icon_fuv', 'icon_ivm', 'icon_mighti', 'iss_fpmu', 'jpl_gps', 'omni_hro', 'ses14_gold', - 'timed_saber', 'timed_see'] + 'timed_guvi', 'timed_saber', 'timed_see'] for inst in __all__: exec("from pysatNASA.instruments import {x}".format(x=inst)) diff --git a/pysatNASA/instruments/ace_epam_l2.py b/pysatNASA/instruments/ace_epam_l2.py new file mode 100644 index 00000000..670ed3ad --- /dev/null +++ b/pysatNASA/instruments/ace_epam_l2.py @@ -0,0 +1,106 @@ +# -*- coding: utf-8 -*- +"""Module for the Advanced Composition Explorer (ACE) EPAM instrument. + +Properties +---------- +platform + 'ace' +name + 'epam_l2' +tag + 'base' or 'key' +inst_id + '12sec', '5min', '1hr' + +References +---------- +- Stone, E., Frandsen, A., Mewaldt, R. et al. The Advanced Composition Explorer. + Space Science Reviews 86, 1–22 (1998). https://doi.org/10.1023/A:1005082526237 +- Gold, R., Krimigis, S., Hawkins, S. et al. Electron, Proton, and Alpha Monitor + on the Advanced Composition Explorer spacecraft. Space Science Reviews 86, + 541–562 (1998). https://doi.org/10.1023/A:1005088115759 + +Note +---- +- Level 1 ACE data is maintained at pysatSpaceWeather. +- Release notes at + https://cdaweb.gsfc.nasa.gov/pub/data/ace/epam/epam_level2_release_notes.txt + +Warnings +-------- +- The cleaning parameters for the instrument are still under development. + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen + +from pysatNASA.instruments.methods import ace as mm_ace +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import general as mm_nasa + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'ace' +name = 'epam_l2' +tags = {'base': 'ACE/EPAM Solar Energetic Particle Base Data', + 'key': 'ACE/EPAM Solar Energetic Particle Key Parameters'} +inst_ids = {'12sec': ['base'], + '5min': ['key', 'base'], + '1hr': ['key', 'base']} +pandas_format = False + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {id: {tag: dt.datetime(2022, 1, 1) for tag in inst_ids[id]} + for id in inst_ids.keys()} + +# ---------------------------------------------------------------------------- +# Instrument methods + + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_ace, name=name) + +# Use default ace clean +clean = mm_ace.clean + +# ---------------------------------------------------------------------------- +# Instrument functions +# + +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +strid = {'12sec': {'base': 'h3'}, + '5min': {'base': 'h1', 'key': 'k0'}, + '1hr': {'base': 'h2', 'key': 'k1'}} +fname = ''.join(('ac_{sid:s}_epm_{{year:4d}}{{month:02d}}{{day:02d}}_', + 'v{{version:02d}}.cdf')) +supported_tags = {id: {tag: fname.format(sid=strid[id][tag]) + for tag in inst_ids[id]} + for id in inst_ids.keys()} +list_files = functools.partial(mm_gen.list_files, + supported_tags=supported_tags) + +# Set the load routine +meta_translation = {'CATDESC': 'desc', 'FILLVAL': 'fill', + 'LABLAXIS': 'plot_label', 'VALIDMAX': 'value_max', + 'VALIDMIN': 'value_min', 'VAR_NOTES': 'notes'} +load = functools.partial(cdw.load, pandas_format=pandas_format, + meta_translation=meta_translation) + +# Set the download routine +download_tags = {'12sec': {'base': 'AC_H3_EPM'}, + '5min': {'base': 'AC_H1_EPM', 'key': 'AC_K0_EPM'}, + '1hr': {'base': 'AC_H2_EPM', 'key': 'AC_K1_EPM'}} + +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) + +# Set the list_remote_files routine +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) diff --git a/pysatNASA/instruments/ace_mag_l2.py b/pysatNASA/instruments/ace_mag_l2.py new file mode 100644 index 00000000..8b5c4ea6 --- /dev/null +++ b/pysatNASA/instruments/ace_mag_l2.py @@ -0,0 +1,110 @@ +# -*- coding: utf-8 -*- +"""Module for the Advanced Composition Explorer (ACE) MAG instrument. + +Properties +---------- +platform + 'ace' +name + 'mag_l2' +tag + 'base' or 'key' +inst_id + '1sec', '16sec', '4min', '5min', '1hr' + +References +---------- +- Stone, E., Frandsen, A., Mewaldt, R. et al. The Advanced Composition Explorer. + Space Science Reviews 86, 1–22 (1998). https://doi.org/10.1023/A:1005082526237 +- Smith, C., L'Heureux, J., Ness, N. et al. The ACE Magnetic Fields Experiment. + Space Science Reviews 86, 613–632 (1998). + https://doi.org/10.1023/A:1005092216668 + +Note +---- +- Level 1 ACE data is maintained at pysatSpaceWeather. +- Release notes at + https://cdaweb.gsfc.nasa.gov/pub/data/ace/mag/mag_level2_release_notes.txt + +Warnings +-------- +- The cleaning parameters for the instrument are still under development. + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen + +from pysatNASA.instruments.methods import ace as mm_ace +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import general as mm_nasa + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'ace' +name = 'mag_l2' +tags = {'base': 'ACE Magnetic Field Base Data', + 'key': 'ACE Magnetic Field Key Parameters'} +inst_ids = {'1sec': ['base'], + '16sec': ['base', 'key'], + '4min': ['base'], + '5min': ['key'], + '1hr': ['base', 'key']} +pandas_format = False + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {id: {tag: dt.datetime(2022, 1, 1) for tag in inst_ids[id]} + for id in inst_ids.keys()} + +# ---------------------------------------------------------------------------- +# Instrument methods + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_ace, name=name) + +# Use default ace clean +clean = mm_ace.clean + +# ---------------------------------------------------------------------------- +# Instrument functions +# +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +strid = {'1sec': {'base': 'h3'}, + '16sec': {'base': 'h0', 'key': 'k1'}, + '4min': {'base': 'h1'}, + '5min': {'key': 'k0'}, + '1hr': {'base': 'h2', 'key': 'k2'}} +fname = ''.join(('ac_{sid:s}_mfi_{{year:4d}}{{month:02d}}{{day:02d}}_', + 'v{{version:02d}}.cdf')) +supported_tags = {id: {tag: fname.format(sid=strid[id][tag]) + for tag in inst_ids[id]} + for id in inst_ids.keys()} +list_files = functools.partial(mm_gen.list_files, + supported_tags=supported_tags) + +# Set the load routine +meta_translation = {'CATDESC': 'desc', 'FILLVAL': 'fill', + 'LABLAXIS': 'plot_label', 'VALIDMAX': 'value_max', + 'VALIDMIN': 'value_min', 'VAR_NOTES': 'notes'} +load = functools.partial(cdw.load, pandas_format=pandas_format, + meta_translation=meta_translation) + +# Set the download routine +download_tags = {'1sec': {'base': 'AC_H3_MFI'}, + '16sec': {'base': 'AC_H0_MFI', 'key': 'AC_K1_MFI'}, + '4min': {'base': 'AC_H1_MFI'}, + '5min': {'key': 'AC_K0_MFI'}, + '1hr': {'base': 'AC_H2_MFI', 'key': 'AC_K2_MFI'}} + +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) + +# Set the list_remote_files routine +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) diff --git a/pysatNASA/instruments/ace_sis_l2.py b/pysatNASA/instruments/ace_sis_l2.py new file mode 100644 index 00000000..f69a4308 --- /dev/null +++ b/pysatNASA/instruments/ace_sis_l2.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +"""Module for the Advanced Composition Explorer (ACE) SIS instrument. + +Properties +---------- +platform + 'ace' +name + 'sis_l2' +tag + 'base' or 'key' +inst_id + '256sec' or '1hr' + +References +---------- +- Stone, E., Frandsen, A., Mewaldt, R. et al. The Advanced Composition Explorer. + Space Science Reviews 86, 1–22 (1998). https://doi.org/10.1023/A:1005082526237 +- Stone, E., Cohen, C., Cook, W. et al. The Solar Isotope Spectrometer for the + Advanced Composition Explorer. Space Science Reviews 86, 357–408 (1998). + https://doi.org/10.1023/A:1005027929871 + +Note +---- +- Level 1 ACE data is maintained at pysatSpaceWeather. + +Warnings +-------- +- The cleaning parameters for the instrument are still under development. + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen + +from pysatNASA.instruments.methods import ace as mm_ace +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import general as mm_nasa + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'ace' +name = 'sis_l2' +tags = {'base': 'ACE/SIS Solar Isotope Spectrometer Base Data', + 'key': 'ACE/SIS Solar Isotope Spectrometer Key Parameters'} +inst_ids = {'256sec': ['base'], + '1hr': ['base', 'key']} +pandas_format = False + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {id: {tag: dt.datetime(2022, 1, 1) for tag in inst_ids[id]} + for id in inst_ids.keys()} + +# ---------------------------------------------------------------------------- +# Instrument methods + + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_ace, name=name) + +# Use default ace clean +clean = mm_ace.clean + +# ---------------------------------------------------------------------------- +# Instrument functions +# +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +strid = {'256sec': {'base': 'h1'}, + '1hr': {'base': 'h2', 'key': 'k0'}} +fname = ''.join(('ac_{sid:s}_sis_{{year:4d}}{{month:02d}}{{day:02d}}_', + 'v{{version:02d}}.cdf')) +supported_tags = {id: {tag: fname.format(sid=strid[id][tag]) + for tag in inst_ids[id]} + for id in inst_ids.keys()} +list_files = functools.partial(mm_gen.list_files, + supported_tags=supported_tags) + +# Set the load routine +meta_translation = {'CATDESC': 'desc', 'FILLVAL': 'fill', + 'LABLAXIS': 'plot_label', 'VALIDMAX': 'value_max', + 'VALIDMIN': 'value_min', 'VAR_NOTES': 'notes'} +load = functools.partial(cdw.load, pandas_format=pandas_format, + meta_translation=meta_translation) + +# Set the download routine +download_tags = {'256sec': {'base': 'AC_H1_SIS'}, + '1hr': {'base': 'AC_H2_SIS', 'key': 'AC_K0_SIS'}} + +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) + +# Set the list_remote_files routine +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) diff --git a/pysatNASA/instruments/ace_swepam_l2.py b/pysatNASA/instruments/ace_swepam_l2.py new file mode 100644 index 00000000..a4a5ae20 --- /dev/null +++ b/pysatNASA/instruments/ace_swepam_l2.py @@ -0,0 +1,104 @@ +# -*- coding: utf-8 -*- +"""Module for the Advanced Composition Explorer (ACE) EPAM instrument. + +Properties +---------- +platform + 'ace' +name + 'swepam_l2' +tag + 'base' or 'key' +inst_id + '64sec', '5min', '1hr' + +References +---------- +- Stone, E., Frandsen, A., Mewaldt, R. et al. The Advanced Composition Explorer. + Space Science Reviews 86, 1–22 (1998). https://doi.org/10.1023/A:1005082526237 +- McComas, D., Bame, S., Barker, P. et al. Solar Wind Electron Proton Alpha + Monitor (SWEPAM) for the Advanced Composition Explorer. Space Science Reviews + 86, 563–612 (1998). https://doi.org/10.1023/A:1005040232597 + +Note +---- +- Level 1 ACE data is maintained at pysatSpaceWeather. +- Release notes at + https://cdaweb.gsfc.nasa.gov/pub/data/ace/swepam/swepam_level2_release_notes.txt + +Warnings +-------- +- The cleaning parameters for the instrument are still under development. + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen + +from pysatNASA.instruments.methods import ace as mm_ace +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import general as mm_nasa + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'ace' +name = 'swepam_l2' +tags = {'base': 'ACE/SWEPAM Solar Wind Experiment Base Data', + 'key': 'ACE/SWEPAM Solar Wind Experiment Key Parameters'} +inst_ids = {'64sec': ['base'], + '5min': ['key'], + '1hr': ['key', 'base']} +pandas_format = False + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {id: {tag: dt.datetime(2021, 1, 1) for tag in inst_ids[id]} + for id in inst_ids.keys()} + +# ---------------------------------------------------------------------------- +# Instrument methods + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_ace, name=name) + +# Use default ace clean +clean = mm_ace.clean + +# ---------------------------------------------------------------------------- +# Instrument functions +# +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +strid = {'64sec': {'base': 'h0'}, + '5min': {'key': 'k0'}, + '1hr': {'base': 'h2', 'key': 'k1'}} +fname = ''.join(('ac_{sid:s}_swe_{{year:4d}}{{month:02d}}{{day:02d}}_', + 'v{{version:02d}}.cdf')) +supported_tags = {id: {tag: fname.format(sid=strid[id][tag]) + for tag in inst_ids[id]} + for id in inst_ids.keys()} +list_files = functools.partial(mm_gen.list_files, + supported_tags=supported_tags) + +# Set the load routine +meta_translation = {'CATDESC': 'desc', 'FILLVAL': 'fill', + 'LABLAXIS': 'plot_label', 'VALIDMAX': 'value_max', + 'VALIDMIN': 'value_min', 'VAR_NOTES': 'notes'} +load = functools.partial(cdw.load, pandas_format=pandas_format, + meta_translation=meta_translation) + +# Set the download routine +download_tags = {'64sec': {'base': 'AC_H0_SWE'}, + '5min': {'key': 'AC_K0_SWE'}, + '1hr': {'base': 'AC_H2_SWE', 'key': 'AC_K1_SWE'}} + +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) + +# Set the list_remote_files routine +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) diff --git a/pysatNASA/instruments/cnofs_ivm.py b/pysatNASA/instruments/cnofs_ivm.py index a7693648..59dbf57b 100644 --- a/pysatNASA/instruments/cnofs_ivm.py +++ b/pysatNASA/instruments/cnofs_ivm.py @@ -16,23 +16,6 @@ motion of the satellite the angle is converted into ion motion along two orthogonal directions, perpendicular to the satellite track. -References ----------- -A brief discussion of the C/NOFS mission and instruments can be found at -de La Beaujardière, O., et al. (2004), C/NOFS: A mission to forecast -scintillations, J. Atmos. Sol. Terr. Phys., 66, 1573–1591, -doi:10.1016/j.jastp.2004.07.030. - -Discussion of cleaning parameters for ion drifts can be found in: -Burrell, Angeline G., Equatorial topside magnetic field-aligned ion drifts -at solar minimum, The University of Texas at Dallas, ProQuest -Dissertations Publishing, 2012. 3507604. - -Discussion of cleaning parameters for ion temperature can be found in: -Hairston, M. R., W. R. Coley, and R. A. Heelis (2010), Mapping the -duskside topside ionosphere with CINDI and DMSP, J. Geophys. Res.,115, -A08324, doi:10.1029/2009JA015051. - Properties ---------- @@ -45,6 +28,7 @@ inst_id None supported + Warnings -------- - The sampling rate of the instrument changes on July 29th, 2010. @@ -52,6 +36,24 @@ - The cleaning parameters for the instrument are still under development. + +References +---------- +A brief discussion of the C/NOFS mission and instruments can be found at +de La Beaujardière, O., et al. (2004), C/NOFS: A mission to forecast +scintillations, J. Atmos. Sol. Terr. Phys., 66, 1573–1591, +doi:10.1016/j.jastp.2004.07.030. + +Discussion of cleaning parameters for ion drifts can be found in: +Burrell, Angeline G., Equatorial topside magnetic field-aligned ion drifts +at solar minimum, The University of Texas at Dallas, ProQuest +Dissertations Publishing, 2012. 3507604. + +Discussion of cleaning parameters for ion temperature can be found in: +Hairston, M. R., W. R. Coley, and R. A. Heelis (2010), Mapping the +duskside topside ionosphere with CINDI and DMSP, J. Geophys. Res.,115, +A08324, doi:10.1029/2009JA015051. + """ import datetime as dt @@ -245,12 +247,9 @@ def clean(self): # Set the load routine load = cdw.load -# Set the download routine -basic_tag = {'remote_dir': '/pub/data/cnofs/cindi/ivm_500ms_cdf/{year:4d}/', - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'CNOFS_CINDI_IVM_500MS'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/cnofs_plp.py b/pysatNASA/instruments/cnofs_plp.py index fb43ab9e..71eb1064 100644 --- a/pysatNASA/instruments/cnofs_plp.py +++ b/pysatNASA/instruments/cnofs_plp.py @@ -29,12 +29,6 @@ The data is PRELIMINARY, and as such, is intended for BROWSE PURPOSES ONLY. -References ----------- -A brief discussion of the C/NOFS mission and instruments can be found at -de La Beaujardière, O., et al. (2004), C/NOFS: A mission to forecast -scintillations, J. Atmos. Sol. Terr. Phys., 66, 1573–1591, -doi:10.1016/j.jastp.2004.07.030. Properties ---------- @@ -54,6 +48,14 @@ - Currently no cleaning routine. - Module not written by PLP team. + +References +---------- +A brief discussion of the C/NOFS mission and instruments can be found at +de La Beaujardière, O., et al. (2004), C/NOFS: A mission to forecast +scintillations, J. Atmos. Sol. Terr. Phys., 66, 1573–1591, +doi:10.1016/j.jastp.2004.07.030. + """ import datetime as dt @@ -119,11 +121,9 @@ def clean(self): load = cdw.load # Set the download routine -basic_tag = {'remote_dir': '/pub/data/cnofs/plp/plasma_1sec/{year:4d}/', - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'CNOFS_PLP_PLASMA_1SEC'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/cnofs_vefi.py b/pysatNASA/instruments/cnofs_vefi.py index 78d54e36..f2e4bf24 100644 --- a/pysatNASA/instruments/cnofs_vefi.py +++ b/pysatNASA/instruments/cnofs_vefi.py @@ -26,12 +26,6 @@ The data is PRELIMINARY, and as such, is intended for BROWSE PURPOSES ONLY. -References ----------- -A brief discussion of the C/NOFS mission and instruments can be found at -de La Beaujardière, O., et al. (2004), C/NOFS: A mission to forecast -scintillations, J. Atmos. Sol. Terr. Phys., 66, 1573–1591, -doi:10.1016/j.jastp.2004.07.030. Properties ---------- @@ -56,6 +50,14 @@ - Limited cleaning routine. - Module not written by VEFI team. + +References +---------- +A brief discussion of the C/NOFS mission and instruments can be found at +de La Beaujardière, O., et al. (2004), C/NOFS: A mission to forecast +scintillations, J. Atmos. Sol. Terr. Phys., 66, 1573–1591, +doi:10.1016/j.jastp.2004.07.030. + """ import datetime as dt @@ -122,11 +124,9 @@ def clean(self): load = cdw.load # Set the download routine -basic_tag = {'remote_dir': '/pub/data/cnofs/vefi/bfield_1sec/{year:4d}/', - 'fname': fname} -download_tags = {'': {'dc_b': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'dc_b': 'CNOFS_VEFI_BFIELD_1SEC'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/de2_fpi.py b/pysatNASA/instruments/de2_fpi.py new file mode 100644 index 00000000..ad5e064d --- /dev/null +++ b/pysatNASA/instruments/de2_fpi.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +"""The DE2 FPI instrument. + +Supports the Fabry-Perot Interferometer (FPI) instrument on Dynamics Explorer 2 +(DE2). + +From CDAWeb: + +The Fabry-Perot Interferometer (FPI) was a high-resolution remote sensing +instrument designed to measure the thermospheric temperature, meridional wind, +and density of the following metastable atoms: atomic oxygen (singlet S and D) +and the 2P state of ionic atomic oxygen. The FPI performed a wavelength analysis +on the light detected from the thermospheric emission features by spatially +scanning the interference fringe plane with a multichannel array detector. The +wavelength analysis characterized the Doppler line profile of the emitting +species. A sequential altitude scan performed by a commandable horizon scan +mirror provided a cross-sectional view of the thermodynamic and dynamic state of +the thermosphere below the DE 2 orbit. The information obtained from this +investigation was used to study the dynamic response of the thermosphere to the +energy sources caused by magnetospheric electric fields and the absorption of +solar ultraviolet light in the thermosphere. The instrument was based on the +visible airglow experiment (VAE) used in the AE program. The addition of a +scanning mirror, the Fabry-Perot etalon, an image plane detector, and a +calibration lamp were the principal differences. Interference filters isolated +lines at (in Angstroms) 5577, 6300, 7320, 5896, and 5200. The FPI had a field of +view of 0.53 deg (half-cone angle). More details are found in P. B. Hays et al., +Space Sci. Instrum., v. 5, n. 4, p. 395, 1981. From February 16, 1982 to +September 11, 1982 the DE satellite was inverted and the FPI measured galactic +emissions. + +Properties +---------- +platform + 'de2' +name + 'fpi' +inst_id + None Supported +tag + None Supported + + +Warnings +-------- +- Currently no cleaning routine. + + +References +---------- +Hays, P B, Killeen, T L, and Kennedy, B C. "Fabry-Perot interferometer on +Dynamics Explorer". Space Sci. Instrum., 5, 395-416, 1981. + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen + +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import de2 as mm_de2 +from pysatNASA.instruments.methods import general as mm_nasa + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'de2' +name = 'fpi' +tags = {'': '8 s cadence Fabry-Perot Interferometer data'} +inst_ids = {'': ['']} + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {'': {'': dt.datetime(1983, 1, 1)}} + +# ---------------------------------------------------------------------------- +# Instrument methods + + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_de2, name=name) + +# No cleaning, use standard warning function instead +clean = mm_nasa.clean_warn + +# ---------------------------------------------------------------------------- +# Instrument functions +# +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +fname = 'de2_neutral8s_fpi_{year:04d}{month:02d}{day:02d}_v{version:02d}.cdf' +supported_tags = {'': {'': fname}} +list_files = functools.partial(mm_gen.list_files, + supported_tags=supported_tags) + +# Use the default CDAWeb method +load = cdw.load + +# Support download routine +download_tags = {'': {'': 'DE2_NEUTRAL8S_FPI'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) + +# Support listing files currently on CDAWeb +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) diff --git a/pysatNASA/instruments/de2_lang.py b/pysatNASA/instruments/de2_lang.py index 07430357..5afeb803 100644 --- a/pysatNASA/instruments/de2_lang.py +++ b/pysatNASA/instruments/de2_lang.py @@ -25,12 +25,6 @@ or correct the inflight processed data. Time resolution was 0.5 seconds. -References ----------- -J. P. Krehbiel, L. H. Brace, R. F. Theis, W. H. Pinkus, and R. B. Kaplan, -"The Dynamics Explorer 2 Langmuir Probe (LANG)", Space Sci. Instrum., 5, -493-502, 1981. - Properties ---------- platform @@ -48,6 +42,12 @@ - Currently no cleaning routine. +References +---------- +J. P. Krehbiel, L. H. Brace, R. F. Theis, W. H. Pinkus, and R. B. Kaplan, +"The Dynamics Explorer 2 Langmuir Probe (LANG)", Space Sci. Instrum., 5, +493-502, 1981. + """ import datetime as dt @@ -97,12 +97,9 @@ load = cdw.load # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/de/de2/plasma_lang', - '/plasma500ms_lang_cdaweb/{year:4d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'DE2_PLASMA500MS_LANG'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/de2_nacs.py b/pysatNASA/instruments/de2_nacs.py index b03212fe..e6b9fa3a 100644 --- a/pysatNASA/instruments/de2_nacs.py +++ b/pysatNASA/instruments/de2_nacs.py @@ -50,12 +50,6 @@ were lost between 12 March 1982 and 31 March 1982 when the counter overflowed. -References ----------- -G. R. Carrignan, B. P. Block, J. C. Maurer, A. E. Hedin, C. A. Reber, -N. W. Spencer, "The neutral mass spectrometer on Dynamics Explorer B", -Space Sci. Instrum., 5, 429-441, 1981. - Properties ---------- platform @@ -67,10 +61,18 @@ tag None Supported + Warnings -------- - Currently no cleaning routine. + +References +---------- +G. R. Carrignan, B. P. Block, J. C. Maurer, A. E. Hedin, C. A. Reber, +N. W. Spencer, "The neutral mass spectrometer on Dynamics Explorer B", +Space Sci. Instrum., 5, 429-441, 1981. + """ import datetime as dt @@ -120,12 +122,9 @@ load = cdw.load # Support download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/de/de2/neutral_gas_nacs', - '/neutral1s_nacs_cdaweb/{year:4d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'DE2_NEUTRAL1S_NACS'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Support listing files currently on CDAWeb -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/de2_rpa.py b/pysatNASA/instruments/de2_rpa.py index 912dd66f..c9161118 100644 --- a/pysatNASA/instruments/de2_rpa.py +++ b/pysatNASA/instruments/de2_rpa.py @@ -26,21 +26,11 @@ spectrum; and the concentration of H+, He+, O+, and Fe+, and of molecular ions near perigee. -It includes the DUCT portion of the high resolutiondata from the Dynamics -Explorer 2 (DE-2) Retarding Potential Analyzer (RPA) for the whole DE-2 mission -time period in ASCII format. This version was generated at NSSDC from the -PI-provided binary data (SPIO-00232). The DUCT files include RPA measurements -ofthe total ion concentration every 64 times per second. Due to a failure in -the instrument memory system RPA data are not available from 81317 06:26:40 UT -to 82057 13:16:00 UT. This data set is based on the revised version of the RPA -files that was submitted by the PI team in June of 1995. The revised RPA data -include a correction to the spacecraft potential. +Due to a failure in the instrument memory system RPA data are not available +from 81317 06:26:40 UT to 82057 13:16:00 UT. This data set is based on the +revised version of the RPA files that was submitted by the PI team in June of +1995. The revised RPA data include a correction to the spacecraft potential. -References ----------- -W. B. Hanson, R. A. Heelis, R. A. Power, C. R. Lippincott, D. R. Zuccaro, -B. J. Holt, L. H. Harmon, and S. Sanatani, “The retarding potential analyzer -for dynamics explorer-B,” Space Sci. Instrum. 5, 503–510 (1981). Properties ---------- @@ -53,10 +43,18 @@ tag None Supported + Warnings -------- - Currently no cleaning routine. + +References +---------- +W. B. Hanson, R. A. Heelis, R. A. Power, C. R. Lippincott, D. R. Zuccaro, +B. J. Holt, L. H. Harmon, and S. Sanatani, “The retarding potential analyzer +for dynamics explorer-B,” Space Sci. Instrum. 5, 503–510 (1981). + """ import datetime as dt @@ -73,13 +71,13 @@ platform = 'de2' name = 'rpa' -tags = {'': '2 sec cadence RPA data'} # this is the default cadence -inst_ids = {'': ['']} +tags = {'': '2 sec cadence RPA data'} +inst_ids = {'': [tag for tag in tags]} # ---------------------------------------------------------------------------- # Instrument test attributes -_test_dates = {'': {'': dt.datetime(1983, 1, 1)}} +_test_dates = {'': {tag: dt.datetime(1983, 1, 1) for tag in tags}} # ---------------------------------------------------------------------------- # Instrument methods @@ -97,8 +95,11 @@ # Use the default CDAWeb and pysat methods # Set the list_files routine -fname = 'de2_ion2s_rpa_{year:04d}{month:02d}{day:02d}_v{version:02d}.cdf' -supported_tags = {'': {'': fname}} +datestr = '{year:04d}{month:02d}{day:02d}_v{version:02d}' +dataproduct = {'': 'ion2s'} +fname = 'de2_{dp:s}_rpa_{datestr:s}.cdf' +supported_tags = {'': {tag: fname.format(dp=dataproduct[tag], datestr=datestr) + for tag in tags}} list_files = functools.partial(mm_gen.list_files, supported_tags=supported_tags) @@ -106,12 +107,9 @@ load = cdw.load # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/de/de2/plasma_rpa', - '/ion2s_cdaweb/{year:4d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'DE2_ION2S_RPA'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/de2_vefi.py b/pysatNASA/instruments/de2_vefi.py new file mode 100644 index 00000000..45bec0ad --- /dev/null +++ b/pysatNASA/instruments/de2_vefi.py @@ -0,0 +1,105 @@ +"""Module for the DE2 VEFI instrument. + +From CDAWeb (adpated): +This directory gathers data for the VEFI instrument that flew on the DE 2 +spacecraft which was launched on 3 August 1981 into an elliptical orbit with +an altitude range of 300 km to 1000 km and re-entered the atmosphere on +19 February 1983. + +dca (NSSDC ID: 81-070B-02C) + +This data set contains the averaged (2 samples per second) DC electric fields in +spacecraft coordinates and orbit information in ASCII format. + +ac (NSSDC ID: 81-070B-02E) + +This data set contains the averaged AC electric field data (1 or 2 points per +second) and orbit information. + +References +---------- +Maynard, N. C., E. A. Bielecki, H. G. Burdick, Instrumentation for vector +electric field measurements from DE-B, Space Sci. Instrum., 5, 523, 1981. + +Properties +---------- +platform + 'de2' +name + 'vefi' +inst_id + None Supported +tag + 'dca' or 'ac' + + +Warnings +-------- +- Currently no cleaning routine. + + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import de2 as mm_de2 +from pysatNASA.instruments.methods import general as mm_nasa + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'de2' +name = 'vefi' +tags = {'': '62 ms combination of Electric Field and Magnetometer', + 'dca': '500 ms cadence DC Averaged Electric Field data', + 'ac': '500 ms cadence AC Electric Field data'} +inst_ids = {'': [tag for tag in tags]} + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {'': {tag: dt.datetime(1983, 1, 1) for tag in tags}} + + +# ---------------------------------------------------------------------------- +# Instrument methods + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_de2, name=name) + +# No cleaning, use standard warning function instead +clean = mm_nasa.clean_warn + +# ---------------------------------------------------------------------------- +# Instrument functions +# +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +datestr = '{year:04d}{month:02d}{day:02d}_v{version:02d}' +fid = {'': '62ms_vefimagb', + 'ac': 'ac500ms_vefi', + 'dca': 'dca500ms_vefi'} +fname = 'de2_{fid:s}_{datestr:s}.cdf' +supported_tags = {'': {tag: fname.format(fid=fid[tag], datestr=datestr) + for tag in tags}} +list_files = functools.partial(mm_gen.list_files, + supported_tags=supported_tags) + +# Set the load routine +# Forcing use of cdflib as default since pysatCDF has a known issue with vefi +# data. See pysat/pysatCDF#48 +load = functools.partial(cdw.load, use_cdflib=True) + +# Set the download routine +download_tags = {'': {'': 'DE2_62MS_VEFIMAGB', + 'ac': 'DE2_AC500MS_VEFI', + 'dca': 'DE2_DCA500MS_VEFI'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) + +# Set the list_remote_files routine +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) diff --git a/pysatNASA/instruments/de2_wats.py b/pysatNASA/instruments/de2_wats.py index 1345841a..cdcd7ef2 100644 --- a/pysatNASA/instruments/de2_wats.py +++ b/pysatNASA/instruments/de2_wats.py @@ -47,12 +47,6 @@ about the processing done at NSSDC is given in WATS_NSSDC_PRO_DE.DOC. -References ----------- -N. W. Spencer, L. E. Wharton, H. B. Niemann, A. E. Hedin, G. R. Carrignan, -J. C. Maurer, "The Dynamics Explorer Wind and Temperature Spectrometer", -Space Sci. Instrum., 5, 417-428, 1981. - Properties ---------- platform @@ -64,10 +58,18 @@ tag None Supported + Warnings -------- - Currently no cleaning routine. + +References +---------- +N. W. Spencer, L. E. Wharton, H. B. Niemann, A. E. Hedin, G. R. Carrignan, +J. C. Maurer, "The Dynamics Explorer Wind and Temperature Spectrometer", +Space Sci. Instrum., 5, 417-428, 1981. + """ import datetime as dt @@ -117,12 +119,9 @@ load = cdw.load # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/de/de2/neutral_gas_wats', - '/wind2s_wats_cdaweb/{year:4d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'DE2_WIND2S_WATS'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/dmsp_ssusi.py b/pysatNASA/instruments/dmsp_ssusi.py new file mode 100644 index 00000000..65f3fa4b --- /dev/null +++ b/pysatNASA/instruments/dmsp_ssusi.py @@ -0,0 +1,123 @@ +# -*- coding: utf-8 -*- +"""Module for the DMSP SSUSI instrument. + +Supports the Special Sensor Ultraviolet Spectrographic Imager (SSUSI) +instrument on Defense Meteorological Satellite Program (DMSP). + +From JHU APL: + +SSUSI was designed for the DMSP Block 5D-3 satellites. These satellites are +placed into nearly polar, sun-synchronous orbits at an altitude of about 850 km. +SSUSI is a remote-sensing instrument which measures ultraviolet (UV) emissions +in five different wavelength bands from the Earth's upper atmosphere. SSUSI is +mounted on a nadir-looking panel of the satellite. The multicolor images from +SSUSI cover the visible Earth disk from horizon to horizon and the anti-sunward +limb up to an altitude of approximately 520 km. + +The UV images and the derived environmental data provide the Air Force Weather +Agency (Offutt Air Force Base, Bellevue, NE) with near real-time information +that can be utilized in a number of applications, such as maintenance of high +frequency (HF) communication links and related systems and assessment of the +environmental hazard to astronauts on the Space Station. + + +Properties +---------- +platform + 'dmsp' +name + 'ssusi' +tag + 'edr-aurora' +inst_id + 'f16', 'f17', 'f18', 'f19' + + +Warnings +-------- +- Currently no cleaning routine. + + +References +---------- +Larry J. Paxton, Daniel Morrison, Yongliang Zhang, Hyosub Kil, Brian Wolven, +Bernard S. Ogorzalek, David C. Humm, and Ching-I. Meng "Validation of remote +sensing products produced by the Special Sensor Ultraviolet Scanning Imager +(SSUSI): a far UV-imaging spectrograph on DMSP F-16", Proc. SPIE 4485, Optical +Spectroscopic Techniques, Remote Sensing, and Instrumentation for Atmospheric +and Space Research IV, (30 January 2002); doi:10.1117/12.454268 + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen + +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import dmsp as mm_dmsp +from pysatNASA.instruments.methods import general as mm_nasa +from pysatNASA.instruments.methods import jhuapl + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'dmsp' +name = 'ssusi' +tags = {'edr-aurora': ''.join(['Electron energy flux and mean energy, auroral', + ' boundaries, identified discrete auroral arcs,', + ' hemispheric power, and magnetic field lines ', + 'traced to 4 Earth radii'])} +inst_ids = {sat_id: list(tags.keys()) + for sat_id in ['f16', 'f17', 'f18', 'f19']} + +pandas_format = False +multi_file_day = True + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {inst_id: {tag: dt.datetime(2015, 1, 1) for tag in tags.keys()} + for inst_id in inst_ids.keys()} + +# ---------------------------------------------------------------------------- +# Instrument methods + + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_dmsp, name=name) + +# No cleaning, use standard warning function instead +clean = mm_nasa.clean_warn + +# ---------------------------------------------------------------------------- +# Instrument functions +# +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +fname = ''.join(['dmsp{inst_id:s}_ssusi_{tag:s}_{{year:04d}}{{day:03d}}T', + '{{hour:02d}}{{minute:02d}}{{second:02d}}-???????T??????-REV', + '?????_vA{{version:1d}}.?.?r{{cycle:03d}}.nc']) +supported_tags = {sat_id: {tag: fname.format(tag=tag, inst_id=sat_id) + for tag in tags.keys()} + for sat_id in inst_ids.keys()} +list_files = functools.partial(mm_gen.list_files, + supported_tags=supported_tags) + +# Set the load routine +load = functools.partial(jhuapl.load_edr_aurora, pandas_format=pandas_format) + +# Set the download routine +basic_tag = {'remote_dir': ''.join(('/pub/data/dmsp/dmsp{inst_id:s}/ssusi/', + '/data/{tag:s}/{{year:4d}}/{{day:03d}}/')), + 'fname': fname} +download_tags = { + sat_id: {tag: {btag: basic_tag[btag].format(tag=tag, inst_id=sat_id) + for btag in basic_tag.keys()} for tag in tags.keys()} + for sat_id in inst_ids.keys()} +download = functools.partial(cdw.download, supported_tags=download_tags) + +# Set the list_remote_files routine +list_remote_files = functools.partial(cdw.list_remote_files, + supported_tags=download_tags) diff --git a/pysatNASA/instruments/formosat1_ivm.py b/pysatNASA/instruments/formosat1_ivm.py index 24be65bd..6eb49a92 100644 --- a/pysatNASA/instruments/formosat1_ivm.py +++ b/pysatNASA/instruments/formosat1_ivm.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -"""Module for the ICON EUV instrument. +"""Module for the Formosat-1 IVM instrument. Supports the Ion Velocity Meter (IVM) onboard the Formosat-1 (formerly ROCSAT-1) mission. Downloads data from the NASA Coordinated Data Analysis @@ -92,12 +92,9 @@ def init(self): load = cdw.load # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/formosat-rocsat/formosat-1', - '/ipei/{year:4d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'RS_K0_IPEI'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/icon_euv.py b/pysatNASA/instruments/icon_euv.py index 5599856b..c16e8e7c 100644 --- a/pysatNASA/instruments/icon_euv.py +++ b/pysatNASA/instruments/icon_euv.py @@ -78,13 +78,12 @@ def preprocess(self, keep_original_names=False): Parameters ---------- - keep_original_names : boolean + keep_original_names : bool if True then the names as given in the netCDF ICON file will be used as is. If False, a preamble is removed. (default=False) """ - mm_gen.convert_timestamp_to_datetime(self, sec_mult=1.0e-3) if not keep_original_names: mm_gen.remove_leading_text(self, target='ICON_L26_') return @@ -128,13 +127,11 @@ def clean(self): supported_tags=supported_tags) # Set the download routine -basic_tag = {'remote_dir': '/pub/data/icon/l2/l2-6_euv/{year:04d}/', - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'ICON_L2-6_EUV'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) @@ -148,7 +145,7 @@ def filter_metadata(meta_dict): Returns ------- - dict + meta_dict : dict Filtered EUV metadata """ @@ -169,7 +166,7 @@ def filter_metadata(meta_dict): return meta_dict -def load(fnames, tag=None, inst_id=None, keep_original_names=False): +def load(fnames, tag='', inst_id='', keep_original_names=False): """Load ICON EUV data into `xarray.Dataset` object and `pysat.Meta` objects. This routine is called as needed by pysat. It is not intended @@ -180,13 +177,13 @@ def load(fnames, tag=None, inst_id=None, keep_original_names=False): fnames : array-like Iterable of filename strings, full path, to data files to be loaded. This input is nominally provided by pysat itself. - tag : string + tag : str Tag name used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. (default=None) - inst_id : string - Satellite ID used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. (default=None) - keep_original_names : boolean + This input is nominally provided by pysat itself. (default='') + inst_id : str + Instrument ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + keep_original_names : bool If True then the names as given in the netCDF ICON file will be used as is. If False, a preamble is removed. (default=False) diff --git a/pysatNASA/instruments/icon_fuv.py b/pysatNASA/instruments/icon_fuv.py index 6e74f63f..c5333ee9 100644 --- a/pysatNASA/instruments/icon_fuv.py +++ b/pysatNASA/instruments/icon_fuv.py @@ -79,13 +79,12 @@ def preprocess(self, keep_original_names=False): Parameters ---------- - keep_original_names : boolean + keep_original_names : bool if True then the names as given in the netCDF ICON file will be used as is. If False, a preamble is removed. (default=False) """ - mm_gen.convert_timestamp_to_datetime(self, sec_mult=1.0e-3) if not keep_original_names: mm_icon.remove_preamble(self) return @@ -120,16 +119,13 @@ def clean(self): supported_tags=supported_tags) # Set the download routine -basic_tag24 = {'remote_dir': '/pub/data/icon/l2/l2-4_fuv_day/{year:04d}/', - 'fname': fname24} -basic_tag25 = {'remote_dir': '/pub/data/icon/l2/l2-5_fuv_night/{year:04d}/', - 'fname': fname25} -download_tags = {'': {'day': basic_tag24, 'night': basic_tag25}} +download_tags = {'': {'day': 'ICON_L2-4_FUV_DAY', + 'night': 'ICON_L2-5_FUV_NIGHT'}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) @@ -143,7 +139,7 @@ def filter_metadata(meta_dict): Returns ------- - dict + meta_dict : dict Filtered FUV metadata """ @@ -169,7 +165,7 @@ def filter_metadata(meta_dict): return meta_dict -def load(fnames, tag=None, inst_id=None, keep_original_names=False): +def load(fnames, tag='', inst_id='', keep_original_names=False): """Load ICON FUV data into xarray.Dataset object and pysat.Meta objects. This routine is called as needed by pysat. It is not intended @@ -180,15 +176,15 @@ def load(fnames, tag=None, inst_id=None, keep_original_names=False): fnames : array-like iterable of filename strings, full path, to data files to be loaded. This input is nominally provided by pysat itself. - tag : string - tag name used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. - inst_id : string - Satellite ID used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. - keep_original_names : boolean + tag : str + Tag name used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + inst_id : str + Instrument ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + keep_original_names : bool if True then the names as given in the netCDF ICON file - will be used as is. If False, a preamble is removed. + will be used as is. If False, a preamble is removed. (default=False) Returns ------- diff --git a/pysatNASA/instruments/icon_ivm.py b/pysatNASA/instruments/icon_ivm.py index e9a55a58..6df533a0 100644 --- a/pysatNASA/instruments/icon_ivm.py +++ b/pysatNASA/instruments/icon_ivm.py @@ -87,7 +87,7 @@ def preprocess(self, keep_original_names=False): Parameters ---------- - keep_original_names : boolean + keep_original_names : bool if True then the names as given in the netCDF ICON file will be used as is. If False, a preamble is removed. (default=False) @@ -192,13 +192,12 @@ def clean(self): # Set the download routine dirstr = '/pub/data/icon/l2/l2-7_ivm-{id:s}/{{year:4d}}/' -download_tags = {id: {'': {'remote_dir': dirstr.format(id=id), - 'fname': supported_tags[id]['']}} - for id in ['a', 'b']} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'a': {'': 'ICON_L2-7_IVM-A'}, 'b': {'': 'ICON_L2-7_IVM-B'}} + +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) @@ -229,7 +228,7 @@ def filter_metadata(meta_dict): return meta_dict -def load(fnames, tag=None, inst_id=None, keep_original_names=False): +def load(fnames, tag='', inst_id='', keep_original_names=False): """Load ICON IVM data into `pandas.DataFrame` and `pysat.Meta` objects. This routine is called as needed by pysat. It is not intended @@ -240,15 +239,15 @@ def load(fnames, tag=None, inst_id=None, keep_original_names=False): fnames : array-like iterable of filename strings, full path, to data files to be loaded. This input is nominally provided by pysat itself. - tag : string - tag name used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. - inst_id : string - Satellite ID used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. - keep_original_names : boolean + tag : str + Tag name used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + inst_id : str + Instrument ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + keep_original_names : bool if True then the names as given in the netCDF ICON file - will be used as is. If False, a preamble is removed. + will be used as is. If False, a preamble is removed. (default=False) Returns ------- diff --git a/pysatNASA/instruments/icon_mighti.py b/pysatNASA/instruments/icon_mighti.py index 584fd3d4..d2f11ab1 100644 --- a/pysatNASA/instruments/icon_mighti.py +++ b/pysatNASA/instruments/icon_mighti.py @@ -99,13 +99,12 @@ def preprocess(self, keep_original_names=False): Parameters ---------- - keep_original_names : boolean + keep_original_names : bool if True then the names as given in the netCDF ICON file will be used as is. If False, a preamble is removed. (default=False) """ - mm_gen.convert_timestamp_to_datetime(self, sec_mult=1.0e-3) if not keep_original_names: mm_icon.remove_preamble(self) return @@ -227,30 +226,23 @@ def _clean_vars(var_list, flag, min_level): supported_tags=supported_tags) # Set the download routine -dirstr1 = '/pub/data/icon/l2/l2-1_mighti-{{id:s}}_los-wind-{color:s}/' -dirstr2 = '/pub/data/icon/l2/l2-2_mighti_vector-wind-{color:s}/' -dirstr3 = '/pub/data/icon/l2/l2-3_mighti-{id:s}_temperature/' -dirnames = {'los_wind_green': dirstr1.format(color='green'), - 'los_wind_red': dirstr1.format(color='red'), - 'vector_wind_green': dirstr2.format(color='green'), - 'vector_wind_red': dirstr2.format(color='red'), - 'temperature': dirstr3} - -download_tags = {} -for inst_id in supported_tags.keys(): - download_tags[inst_id] = {} - for tag in supported_tags[inst_id].keys(): - fname = supported_tags[inst_id][tag] - - download_tags[inst_id][tag] = { - 'remote_dir': ''.join((dirnames[tag].format(id=inst_id), - '{year:04d}/')), - 'fname': fname} - -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'vector': + {'vector_wind_green': 'ICON_L2-2_MIGHTI_VECTOR-WIND-GREEN', + 'vector_wind_red': 'ICON_L2-2_MIGHTI_VECTOR-WIND-RED'}, + 'a': + {'los_wind_green': 'ICON_L2-1_MIGHTI-A_LOS-WIND-GREEN', + 'los_wind_red': 'ICON_L2-1_MIGHTI-A_LOS-WIND-RED', + 'temperature': 'ICON_L2-3_MIGHTI-A_TEMPERATURE'}, + 'b': + {'los_wind_green': 'ICON_L2-1_MIGHTI-B_LOS-WIND-GREEN', + 'los_wind_red': 'ICON_L2-1_MIGHTI-B_LOS-WIND-RED', + 'temperature': 'ICON_L2-3_MIGHTI-B_TEMPERATURE'}} + + +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) @@ -264,7 +256,7 @@ def filter_metadata(meta_dict): Returns ------- - dict + meta_dict : dict Filtered FUV metadata """ @@ -283,7 +275,7 @@ def filter_metadata(meta_dict): return meta_dict -def load(fnames, tag=None, inst_id=None, keep_original_names=False): +def load(fnames, tag='', inst_id='', keep_original_names=False): """Load ICON MIGHTI data into `xarray.Dataset` and `pysat.Meta` objects. This routine is called as needed by pysat. It is not intended @@ -294,12 +286,12 @@ def load(fnames, tag=None, inst_id=None, keep_original_names=False): fnames : array-like iterable of filename strings, full path, to data files to be loaded. This input is nominally provided by pysat itself. - tag : str or NoneType - tag name used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. (default=None) - inst_id : str or NoneType - Satellite ID used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. (default=None) + tag : str + Tag name used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + inst_id : str + Instrument ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') keep_original_names : bool if True then the names as given in the netCDF ICON file will be used as is. If False, a preamble is removed. (default=False) diff --git a/pysatNASA/instruments/iss_fpmu.py b/pysatNASA/instruments/iss_fpmu.py index b85c6aa5..3fc44673 100644 --- a/pysatNASA/instruments/iss_fpmu.py +++ b/pysatNASA/instruments/iss_fpmu.py @@ -115,12 +115,9 @@ def clean(self): load = cdw.load # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/international_space_station_iss', - '/sp_fpmu/{year:4d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'ISS_SP_FPMU'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/jpl_gps.py b/pysatNASA/instruments/jpl_gps.py index ddbe66cf..63787548 100644 --- a/pysatNASA/instruments/jpl_gps.py +++ b/pysatNASA/instruments/jpl_gps.py @@ -16,16 +16,6 @@ the global network of International GNSS Service and the regional network of Continuous Operating Reference Station (CORS). -References ----------- -Pi, X., A. J. Mannucci, U. J. Lindqwister, and C. M. Ho, Monitoring of global -ionospheric irregularities using the worldwide GPS network, Geophys. Res. -Lett., 24, 2283, 1997. - -Pi, X., F. J. Meyer, K. Chotoo, Anthony Freeman, R. G. Caton, and C. T. -Bridgwood, Impact of ionospheric scintillation on Spaceborne SAR observations -studied using GNSS, Proc. ION-GNSS, pp.1998-2006, 2012. - Properties ---------- @@ -38,10 +28,22 @@ inst_id None supported + Warnings -------- - The cleaning parameters for the instrument are still under development. + +References +---------- +Pi, X., A. J. Mannucci, U. J. Lindqwister, and C. M. Ho, Monitoring of global +ionospheric irregularities using the worldwide GPS network, Geophys. Res. +Lett., 24, 2283, 1997. + +Pi, X., F. J. Meyer, K. Chotoo, Anthony Freeman, R. G. Caton, and C. T. +Bridgwood, Impact of ionospheric scintillation on Spaceborne SAR observations +studied using GNSS, Proc. ION-GNSS, pp.1998-2006, 2012. + """ import datetime as dt diff --git a/pysatNASA/instruments/methods/__init__.py b/pysatNASA/instruments/methods/__init__.py index 5bde9a41..8542fcbc 100644 --- a/pysatNASA/instruments/methods/__init__.py +++ b/pysatNASA/instruments/methods/__init__.py @@ -4,6 +4,11 @@ from pysatNASA.instruments.methods import cdaweb # noqa F401 from pysatNASA.instruments.methods import cnofs # noqa F401 from pysatNASA.instruments.methods import de2 # noqa F401 +from pysatNASA.instruments.methods import dmsp # noqa F401 from pysatNASA.instruments.methods import general # noqa F401 +from pysatNASA.instruments.methods import gps # noqa F401 from pysatNASA.instruments.methods import icon # noqa F401 +from pysatNASA.instruments.methods import jhuapl # noqa F401 from pysatNASA.instruments.methods import omni # noqa F401 +from pysatNASA.instruments.methods import ses14 # noqa F401 +from pysatNASA.instruments.methods import timed # noqa F401 diff --git a/pysatNASA/instruments/methods/ace.py b/pysatNASA/instruments/methods/ace.py new file mode 100644 index 00000000..69e70e7e --- /dev/null +++ b/pysatNASA/instruments/methods/ace.py @@ -0,0 +1,54 @@ +# -*- coding: utf-8 -*- +"""Provides non-instrument specific routines for ACE data.""" + +import numpy as np + +ackn_str = ' '.join(("Please acknowledge the NASA National Space Science Data", + "Center, the Space Physics Data Facility, and the ACE", + "Principal Investigator, Edward C. Stone of the", + "California Institute of Technology, for usage of ACE", + "data from this site in publications and presentations.")) + +refs = {'mission': ' '.join(('Stone, E., Frandsen, A., Mewaldt, R. et al.', + 'The Advanced Composition Explorer. Space Science', + 'Reviews 86, 1–22 (1998).', + 'https://doi.org/10.1023/A:1005082526237')), + 'epam_l2': ' '.join(('Gold, R., Krimigis, S., Hawkins, S. et al.', + 'Electron, Proton, and Alpha Monitor on the', + 'Advanced Composition Explorer spacecraft.', + 'Space Science Reviews 86, 541–562 (1998).', + 'https://doi.org/10.1023/A:1005088115759')), + 'mag_l2': ' '.join(("Smith, C., L'Heureux, J., Ness, N. et al. The ACE", + "Magnetic Fields Experiment. Space Science Reviews", + "86, 613–632 (1998).", + "https://doi.org/10.1023/A:1005092216668")), + 'sis_l2': ' '.join(('Stone, E., Cohen, C., Cook, W. et al. The Solar', + 'Isotope Spectrometer for the Advanced Composition', + 'Explorer. Space Science Reviews 86, 357–408', + '(1998). https://doi.org/10.1023/A:1005027929871')), + 'swepam_l2': ' '.join(('McComas, D., Bame, S., Barker, P. et al. Solar', + 'Wind Electron Proton Alpha Monitor (SWEPAM)', + 'for the Advanced Composition Explorer. Space', + 'Science Reviews 86, 563–612 (1998).', + 'https://doi.org/10.1023/A:1005040232597')) + } + + +def clean(self): + """Clean ACE data to the specified level. + + Note + ---- + Basic cleaning to replace fill values with NaN + + """ + + for key in self.variables: + if key != 'time': + fill = self.meta[key, self.meta.labels.fill_val] + + # Replace fill with nan + fill_mask = self[key] == fill + self[key] = self.data[key].where(~fill_mask) + self.meta[key] = {self.meta.labels.fill_val: np.nan} + return diff --git a/pysatNASA/instruments/methods/cdaweb.py b/pysatNASA/instruments/methods/cdaweb.py index cb2f3986..acfbc92a 100644 --- a/pysatNASA/instruments/methods/cdaweb.py +++ b/pysatNASA/instruments/methods/cdaweb.py @@ -9,6 +9,7 @@ import cdflib import datetime as dt +import numpy as np import os import pandas as pds import requests @@ -16,6 +17,7 @@ import xarray as xr from bs4 import BeautifulSoup +from cdasws import CdasWs import pysat from pysat.instruments.methods import general @@ -31,6 +33,35 @@ auto_CDF = libCDF +def try_inst_dict(inst_id, tag, supported_tags): + """Check that the inst_id and tag combination is valid. + + Parameters + ---------- + tag : str + Data product tag (default='') + inst_id : str + Instrument ID (default='') + supported_tags : dict + dict of dicts. Keys are supported tag names for download. Value is + a dict with 'remote_dir', 'fname'. Inteded to be + pre-set with functools.partial then assigned to new instrument code. + (default=None) + + Returns + ------- + inst_dict : dict or str + dictionary containing file location in spdf archive, or dataset ID for + cdasws + """ + try: + inst_dict = supported_tags[inst_id][tag] + except KeyError: + raise ValueError('inst_id / tag combo unknown.') + + return inst_dict + + def load(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), flatten_twod=True, pandas_format=True, epoch_name='Epoch', meta_processor=None, meta_translation=None, drop_meta_labels=None, @@ -42,9 +73,9 @@ def load(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), fnames : pandas.Series Series of filenames tag : str - tag or None (default='') + Data product tag (default='') inst_id : str - satellite id or None (default='') + Instrument ID (default='') file_cadence : dt.timedelta or pds.DateOffset pysat assumes a daily file cadence, but some instrument data files contain longer periods of time. This parameter allows the specification @@ -93,8 +124,10 @@ def load(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), Note ---- - This routine is intended to be used by pysat instrument modules supporting - a particular NASA CDAWeb dataset + - This routine is intended to be used by pysat instrument modules supporting + a particular NASA CDAWeb dataset + - pysatCDF (as of v0.3.2) does not support numpy>=1.24. Load errors may + arise. See https://github.com/pysat/pysatCDF/issues/46 """ @@ -110,6 +143,7 @@ def load(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), data, meta = load_xarray(fnames, tag=tag, inst_id=inst_id, epoch_name=epoch_name, + file_cadence=file_cadence, meta_processor=meta_processor, meta_translation=meta_translation, drop_meta_labels=drop_meta_labels) @@ -125,9 +159,9 @@ def load_pandas(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), fnames : pandas.Series Series of filenames tag : str - tag or None (default='') + Data product tag (default='') inst_id : str - satellite id or None (default='') + Instrument ID (default='') file_cadence : dt.timedelta or pds.DateOffset pysat assumes a daily file cadence, but some instrument data files contain longer periods of time. This parameter allows the specification @@ -224,13 +258,12 @@ def load_pandas(fnames, tag='', inst_id='', file_cadence=dt.timedelta(days=1), def load_xarray(fnames, tag='', inst_id='', - labels={'units': ('units', str), 'name': ('long_name', str), - 'notes': ('notes', str), 'desc': ('desc', str), - 'plot': ('plot_label', str), 'axis': ('axis', str), - 'scale': ('scale', str), - 'min_val': ('value_min', float), - 'max_val': ('value_max', float), - 'fill_val': ('fill', float)}, + file_cadence=dt.timedelta(days=1), + labels={'units': ('Units', str), 'name': ('Long_Name', str), + 'notes': ('Var_Notes', str), 'desc': ('CatDesc', str), + 'min_val': ('ValidMin', float), + 'max_val': ('ValidMax', float), + 'fill_val': ('FillVal', float)}, epoch_name='Epoch', meta_processor=None, meta_translation=None, drop_meta_labels=None): """Load NASA CDAWeb CDF files into an xarray Dataset. @@ -240,9 +273,14 @@ def load_xarray(fnames, tag='', inst_id='', fnames : pandas.Series Series of filenames tag : str - tag or (default='') + Data product tag (default='') inst_id : str - satellite id (default='') + Instrument ID (default='') + file_cadence : dt.timedelta or pds.DateOffset + pysat assumes a daily file cadence, but some instrument data files + contain longer periods of time. This parameter allows the specification + of regular file cadences greater than or equal to a day (e.g., weekly, + monthly, or yearly). (default=dt.timedelta(days=1)) labels : dict Dict where keys are the label attribute names and the values are tuples that have the label values and value types in that order. @@ -308,7 +346,16 @@ def load_xarray(fnames, tag='', inst_id='', # metadata for pysat using some assumptions. Depending upon your needs # the resulting pandas DataFrame may need modification. ldata = [] - for lfname in fnames: + + # Find unique files for monthly / yearly cadence. + # Arbitrary timestamp needed for comparison. + t0 = dt.datetime(2009, 1, 1) + if (t0 + file_cadence) > (t0 + dt.timedelta(days=1)): + lfnames = list(np.unique([fname[:-11] for fname in fnames])) + else: + lfnames = fnames + + for lfname in lfnames: temp_data = cdflib.cdf_to_xarray(lfname, to_datetime=True) ldata.append(temp_data) @@ -396,9 +443,10 @@ def load_xarray(fnames, tag='', inst_id='', return data, meta +# TODO(#103): Include support to unzip / untar files after download. def download(date_array, tag='', inst_id='', supported_tags=None, remote_url='https://cdaweb.gsfc.nasa.gov', data_path=None): - """Download NASA CDAWeb CDF data. + """Download NASA CDAWeb data. This routine is intended to be used by pysat instrument modules supporting a particular NASA CDAWeb dataset. @@ -408,9 +456,9 @@ def download(date_array, tag='', inst_id='', supported_tags=None, date_array : array-like Array of datetimes to download data for. Provided by pysat. tag : str - tag or None (default='') + Data product tag (default='') inst_id : str - satellite id or None (default='') + Instrument ID (default='') supported_tags : dict dict of dicts. Keys are supported tag names for download. Value is a dict with 'remote_dir', 'fname'. Inteded to be pre-set with @@ -427,7 +475,7 @@ def download(date_array, tag='', inst_id='', supported_tags=None, -------- :: - # download support added to cnofs_vefi.py using code below + # Download support added to cnofs_vefi.py using code below fn = 'cnofs_vefi_bfield_1sec_{year:4d}{month:02d}{day:02d}_v05.cdf' dc_b_tag = {'remote_dir': ''.join(('/pub/data/cnofs/vefi/bfield_1sec', '/{year:4d}/')), @@ -439,10 +487,7 @@ def download(date_array, tag='', inst_id='', supported_tags=None, """ - try: - inst_dict = supported_tags[inst_id][tag] - except KeyError: - raise ValueError('inst_id / tag combo unknown.') + inst_dict = try_inst_dict(inst_id, tag, supported_tags) # Naming scheme for files on the CDAWeb server remote_dir = inst_dict['remote_dir'] @@ -455,14 +500,23 @@ def download(date_array, tag='', inst_id='', supported_tags=None, stop=date_array[-1]) # Download only requested files that exist remotely - for date, fname in remote_files.iteritems(): + for date, fname in remote_files.items(): # Format files for specific dates and download location - formatted_remote_dir = remote_dir.format(year=date.year, - month=date.month, - day=date.day, - hour=date.hour, - min=date.minute, - sec=date.second) + # Year and day found in remote_dir: day is assumed to be day of year + if 'day' in remote_dir and 'month' not in remote_dir: + doy = date.timetuple().tm_yday + formatted_remote_dir = remote_dir.format(year=date.year, + day=doy, + hour=date.hour, + min=date.minute, + sec=date.second) + else: + formatted_remote_dir = remote_dir.format(year=date.year, + month=date.month, + day=date.day, + hour=date.hour, + min=date.minute, + sec=date.second) remote_path = '/'.join((remote_url.strip('/'), formatted_remote_dir.strip('/'), fname)) @@ -488,7 +542,102 @@ def download(date_array, tag='', inst_id='', supported_tags=None, return -def list_remote_files(tag=None, inst_id=None, start=None, stop=None, +def cdas_download(date_array, tag='', inst_id='', supported_tags=None, + data_path=None): + """Download NASA CDAWeb CDF data using cdasws. + + This routine is intended to be used by pysat instrument modules supporting + a particular NASA CDAWeb dataset. + + Parameters + ---------- + date_array : array-like + Array of datetimes to download data for. Provided by pysat. + tag : str + Data product tag (default='') + inst_id : str + Instrument ID (default='') + supported_tags : dict + dict of dicts. Keys are supported tag names for download. Value is + a dict with 'remote_dir', 'fname'. Inteded to be pre-set with + functools.partial then assigned to new instrument code. + (default=None) + data_path : str or NoneType + Path to data directory. If None is specified, the value previously + set in Instrument.files.data_path is used. (default=None) + + Note + ---- + Supported tags for this function use the cdaweb dataset naming convention. + You can find the data set names on CDAWeb or you can use cdasws. + + Starting from scratch using cdasws + :: + from cdasws import CdasWs + cdas = CdasWs() + + # Get list of available observatories/platforms. + cdas.get_observatories() + + # Once your observatory is located, list the available instruments. + cdas.get_instruments(observatory=‘observatory_name’) + + # Now list the available data sets for one instrument. + cdas.get_datasets(observatory=‘observatory_name’, + instrument=‘instrument_name’) + + # You can also list all of the datasets for an observatory. + cdas.get_datasets(observatory=‘observatory_name’) + + Alternatively + :: + Visit https://cdaweb.gsfc.nasa.gov/ + Select the observatory you want from the list and press submit. + The following page will have a list of the data sets. + The bolded names are in the format that cdasws uses. + + Examples + -------- + :: + # Download support added to cnofs_vefi.py using code below + download_tags = {'': {'dc_b': 'CNOFS_VEFI_BFIELD_1SEC'}} + download = functools.partial(cdw.cdas_download, + supported_tags=download_tags) + + """ + + start = date_array[0] + stop = date_array[-1] + remote_files = cdas_list_remote_files(tag=tag, inst_id=inst_id, + start=start, stop=stop, + supported_tags=supported_tags, + series_out=False) + + for file in remote_files: + + fname = file.split('/')[-1] + saved_local_fname = os.path.join(data_path, fname) + + # Perform download + logger.info(' '.join(('Attempting to download file: ', + file))) + try: + with requests.get(file) as req: + if req.status_code != 404: + with open(saved_local_fname, 'wb') as open_f: + open_f.write(req.content) + logger.info('Successfully downloaded {:}.'.format( + saved_local_fname)) + else: + logger.info(' '.join(('File: "', file, + '" is not available'))) + except requests.exceptions.RequestException as exception: + logger.info(' '.join((str(exception), '- File: "', file, + '" Is not available'))) + return + + +def list_remote_files(tag='', inst_id='', start=None, stop=None, remote_url='https://cdaweb.gsfc.nasa.gov', supported_tags=None, two_digit_year_break=None, delimiter=None): @@ -500,11 +649,9 @@ def list_remote_files(tag=None, inst_id=None, start=None, stop=None, Parameters ---------- tag : str - Denotes type of file to load. Accepted types are . - (default='') + Data product tag (default='') inst_id : str - Specifies the satellite ID for a constellation. - (default='') + Instrument ID (default='') start : dt.datetime or NoneType Starting time for file list. A None value will start with the first file found. @@ -553,10 +700,7 @@ def list_remote_files(tag=None, inst_id=None, start=None, stop=None, """ - try: - inst_dict = supported_tags[inst_id][tag] - except KeyError: - raise ValueError('inst_id / tag combo unknown.') + inst_dict = try_inst_dict(inst_id, tag, supported_tags) # Naming scheme for files on the CDAWeb server format_str = '/'.join((inst_dict['remote_dir'].strip('/'), @@ -601,18 +745,27 @@ def list_remote_files(tag=None, inst_id=None, start=None, stop=None, stop = dt.datetime.now() if (stop is None) else stop if 'year' in search_dir['keys']: + url_list = [] if 'month' in search_dir['keys']: search_times = pds.date_range(start, stop + pds.DateOffset(months=1), freq='M') + for time in search_times: + subdir = format_dir.format(year=time.year, month=time.month) + url_list.append('/'.join((remote_url, subdir))) else: - search_times = pds.date_range(start, - stop + pds.DateOffset(years=1), - freq='Y') - url_list = [] - for time in search_times: - subdir = format_dir.format(year=time.year, month=time.month) - url_list.append('/'.join((remote_url, subdir))) + if 'day' in search_dir['keys']: + search_times = pds.date_range(start, stop + + pds.DateOffset(days=1), + freq='D') + else: + search_times = pds.date_range(start, stop + + pds.DateOffset(years=1), + freq='Y') + for time in search_times: + doy = int(time.strftime('%j')) + subdir = format_dir.format(year=time.year, day=doy) + url_list.append('/'.join((remote_url, subdir))) try: for top_url in url_list: for level in range(n_layers + 1): @@ -656,3 +809,82 @@ def list_remote_files(tag=None, inst_id=None, start=None, stop=None, stored_list = stored_list[mask] return stored_list + + +def cdas_list_remote_files(tag='', inst_id='', start=None, stop=None, + supported_tags=None, series_out=True): + """Return a list of every file for chosen remote data. + + This routine is intended to be used by pysat instrument modules supporting + a particular NASA CDAWeb dataset. + + Parameters + ---------- + tag : str + Data product tag (default='') + inst_id : str + Instrument ID (default='') + start : dt.datetime or NoneType + Starting time for file list. A None value will start with the first + file found. + (default=None) + stop : dt.datetime or NoneType + Ending time for the file list. A None value will stop with the last + file found. + (default=None) + supported_tags : dict + dict of dicts. Keys are supported tag names for download. Value is + a dict with 'remote_dir', 'fname'. Inteded to be + pre-set with functools.partial then assigned to new instrument code. + (default=None) + series_out : bool + boolean to determine output type. True for pandas series of file names, + and False for a list of the full web address. + + Returns + ------- + file_list : list + A list containing the verified available files + + Note + ---- + Supported tags for this function use the cdaweb dataset naming convention. + You can find the dataset names on cdaweb or you can use cdasws. + + Examples + -------- + :: + download_tags = {'': {'dc_b': 'CNOFS_VEFI_BFIELD_1SEC'}} + list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) + + download_tags = {'': {'': 'CNOFS_CINDI_IVM_500MS'}} + list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) + """ + cdas = CdasWs() + dataset = try_inst_dict(inst_id, tag, supported_tags) + + if start is None and stop is None: + # Use the topmost directory without variables + start = cdas.get_inventory(identifier=dataset)[0].start + stop = cdas.get_inventory(identifier=dataset)[-1].end + elif stop is None: + stop = start + dt.timedelta(days=1) + elif start == stop: + stop = start + dt.timedelta(days=1) + + if isinstance(start, pds._libs.tslibs.timestamps.Timestamp): + start = start.tz_localize('utc') + stop = stop.tz_localize('utc') + + og_files = cdas.get_original_files(dataset=dataset, start=start, end=stop) + + if series_out: + name_list = [os.path.basename(f['Name']) for f in og_files[1]] + t_stamp = [pds.Timestamp(f['StartTime'][:10]) for f in og_files[1]] + file_list = pds.Series(data=name_list, index=t_stamp) + else: + file_list = [f['Name'] for f in og_files[1]] + + return file_list diff --git a/pysatNASA/instruments/methods/de2.py b/pysatNASA/instruments/methods/de2.py index ea33e23b..8e44575d 100644 --- a/pysatNASA/instruments/methods/de2.py +++ b/pysatNASA/instruments/methods/de2.py @@ -3,7 +3,10 @@ ackn_str = "The Dynamics Explorer 2 satellite data is provided through CDAWeb" -refs = {'lang': ' '.join(('J. P. Krehbiel, L. H. Brace, R. F. Theis, W. H.', +refs = {'fpi': ' '.join(('Hays, P B, Killeen, T L, and Kennedy, B C.', + '"Fabry-Perot interferometer on Dynamics Explorer".', + 'Space Sci. Instrum., v. 5, p. 395-416, 1981.')), + 'lang': ' '.join(('J. P. Krehbiel, L. H. Brace, R. F. Theis, W. H.', 'Pinkus, and R. B. Kaplan, The Dynamics Explorer 2', 'Langmuir Probe (LANG), Space Sci. Instrum., v. 5,', 'n. 4, p. 493, 1981.')), @@ -15,9 +18,17 @@ 'Lippincott, D. R. Zuccaro, B. J. Holt, L. H. Harmon,', 'and S. Sanatani, The retarding potential analyzer', 'for dynamics explorer-B, Space Sci. Instrum. 5,', - '503–510 (1981).')), + '503–510 (1981).\n', + 'Heelis, R. A., W. B. Hanson, C. R. Lippincott, D. R.', + 'Zuccaro, L. L. Harmon, B. J. Holt, J. E. Doherty, R.', + 'A. Power, The ion drift meter for Dynamics', + 'Explorer-B, Space Sci. Instrum., 5, 511, 1981.')), 'wats': ' '.join(('N. W. Spencer, L. E. Wharton, H. B. Niemann, A. E.', 'Hedin, G. R. Carrignan, J. C. Maurer, The', 'Dynamics Explorer Wind and Temperature Spectrometer', - 'Space Sci. Instrum., v. 5, n. 4, p. 417, 1981.')) + 'Space Sci. Instrum., v. 5, n. 4, p. 417, 1981.')), + 'vefi': ' '.join(('Maynard, N. C., E. A. Bielecki, H. G. Burdick,', + 'Instrumentation for vector electric field', + 'measurements from DE-B, Space Sci. Instrum., 5,', + '523, 1981.')) } diff --git a/pysatNASA/instruments/methods/dmsp.py b/pysatNASA/instruments/methods/dmsp.py new file mode 100644 index 00000000..3f930c0b --- /dev/null +++ b/pysatNASA/instruments/methods/dmsp.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +"""Provides non-instrument specific routines for the DMSP data.""" + +ackn_str = "".join(["This Defense Meteorological Satellite Program (DMSP) ", + "satellite data is provided through CDAWeb"]) + +refs = {'ssusi': ''.join(('Larry J. Paxton, Daniel Morrison, Yongliang Zhang,', + ' Hyosub Kil, Brian Wolven, Bernard S. Ogorzalek, ', + 'David C. Humm, and Ching-I. Meng "Validation of ', + 'remote sensing products produced by the Special ', + 'Sensor Ultraviolet Scanning Imager (SSUSI): a far ', + 'UV-imaging spectrograph on DMSP F-16", Proc. SPIE ', + '4485, Optical Spectroscopic Techniques, Remote ', + 'Sensing, and Instrumentation for Atmospheric and ', + 'Space Research IV, (30 January 2002); ', + 'doi:10.1117/12.454268'))} diff --git a/pysatNASA/instruments/methods/general.py b/pysatNASA/instruments/methods/general.py index 60ead57f..013e8c7c 100644 --- a/pysatNASA/instruments/methods/general.py +++ b/pysatNASA/instruments/methods/general.py @@ -23,6 +23,10 @@ def init(self, module, name): # Set acknowledgements self.acknowledgements = getattr(module, 'ackn_str') + + if hasattr(module, 'rules_url'): + self.acknowledgements.format(getattr(module, 'rules_url')[name]) + pysat.logger.info(self.acknowledgements) # Set references diff --git a/pysatNASA/instruments/methods/gold.py b/pysatNASA/instruments/methods/gold.py deleted file mode 100644 index 9ac51312..00000000 --- a/pysatNASA/instruments/methods/gold.py +++ /dev/null @@ -1,24 +0,0 @@ -# -*- coding: utf-8 -*- -"""Provides non-instrument specific routines for GOLD data.""" - -ack_str = ' '.join(('This is a data product from the NASA Global-scale', - 'Observations of the Limb and Disk (GOLD) mission, an', - 'Heliophysics Explorer mission of opportunity launched', - 'in January 2018.\n Responsibility of the mission', - 'science falls to the Principal Investigator, Dr.', - 'Richard Eastes at University of Colorado/LASP.\n', - 'Validation of the L1B data products falls to the', - 'instrument lead investigators/scientists.\n* EUV', - 'Dr. Bill McClintock\nValidation of the L2 data', - 'products falls to Computational Physics, Inc.\n* Dr.', - 'Jerry Lumpe\n (https://gold.cs.ucf.edu/).\nOverall', - 'validation of the products is overseen by the GOLD', - 'Project Scientist Dr. Alan Burns.\nUsers of these', - 'data should contact and acknowledge the Principal', - 'Investigator Dr. Richard Eastes and the party', - 'directly responsible for the data product and the', - 'NASA Explorers Project Office.')) -ref_str = ' '.join(('Eastes, R.W., McClintock, W.E., Burns, A.G. et al.', - 'The Global-Scale Observations of the Limb and Disk', - '(GOLD) Mission. Space Sci Rev 212, 383–408 (2017).', - 'https://doi.org/10.1007/s11214-017-0392-2')) diff --git a/pysatNASA/instruments/methods/jhuapl.py b/pysatNASA/instruments/methods/jhuapl.py new file mode 100644 index 00000000..e11971ea --- /dev/null +++ b/pysatNASA/instruments/methods/jhuapl.py @@ -0,0 +1,395 @@ +# -*- coding: utf-8 -*- +"""Module for data sets created by JHU APL.""" + +import datetime as dt +import numpy as np +import pandas as pds +import xarray as xr + +from pysat.utils.io import load_netcdf + + +def build_dtimes(data, var, epoch=None, epoch_var='time'): + """Build datetime objects from standard JHU APL time variables. + + Parameters + ---------- + data : xr.Dataset + Xarray dataset with time variables + var : str + Common string to identify desired year, day of year, and seconds of day + epoch : dt.datetime or NoneType + Epoch to subtract from data or NoneType to get seconds of day from + `data` (default=None) + epoch_var : str + Epoch variable containing time data that seconds of day will be + obtained from if `epoch` != None (default='time') + + Returns + ------- + dtimes : list-like + List of datetime objects + + """ + ykey = 'YEAR{:s}'.format(var) + dkey = 'DOY{:s}'.format(var) + skey = 'TIME{:s}'.format(var) + + if epoch is None: + hours = [int(np.floor(sec / 3600.0)) for sec in data[skey].values] + mins = [int(np.floor((sec - hours[i] * 3600) / 60.0)) + for i, sec in enumerate(data[skey].values)] + secs = [int(np.floor((sec - hours[i] * 3600 - mins[i] * 60))) + for i, sec in enumerate(data[skey].values)] + dtimes = [ + dt.datetime.strptime( + "{:4d}-{:03d}-{:02d}-{:02d}-{:02d}-{:06.0f}".format( + int(data[ykey].values[i]), int(data[dkey].values[i]), + hours[i], mins[i], secs[i], + (sec - hours[i] * 3600 - mins[i] * 60 - secs[i]) * 1.0e6), + '%Y-%j-%H-%M-%S-%f') + for i, sec in enumerate(data[skey].values)] + else: + dtimes = [ + dt.datetime.strptime("{:4d}-{:03d}".format( + int(data[ykey].values[i]), int(data[dkey].values[i])), '%Y-%j') + + (pds.to_datetime(etime).to_pydatetime() - epoch) + for i, etime in enumerate(data[epoch_var].values)] + + return dtimes + + +def expand_coords(data_list, mdata, dims_equal=False): + """Ensure that dimensions do not vary from file to file. + + Parameters + ---------- + data_list : list-like + List of xr.Dataset objects with the same dimensions and variables + mdata : pysat.Meta + Metadata for the data in `data_list` + dims_equal : bool + Assert that all xr.Dataset objects have the same dimensions if True + (default=False) + + Returns + ------- + out_list : list-like + List of xr.Dataset objects with the same dimensions and variables, + now with dimensions that all have the same values and data padded + when needed. + + """ + # Get a list of all the dimensions + if dims_equal: + dims = list(data_list[0].dims.keys()) if len(data_list) > 0 else [] + else: + dims = list() + for sdata in data_list: + if len(dims) == 0: + dims = list(sdata.dims.keys()) + else: + for dim in list(sdata.dims.keys()): + if dim not in dims: + dims.append(dim) + + # After loading all the data, determine which dimensions may need to be + # expanded, as they could differ in dimensions from file to file + combo_dims = {dim: max([sdata.dims[dim] for sdata in data_list + if dim in sdata.dims]) for dim in dims} + + # Expand the data so that all dimensions are the same shape + out_list = list() + for i, sdata in enumerate(data_list): + # Determine which dimensions need to be updated + fix_dims = [dim for dim in sdata.dims.keys() + if sdata.dims[dim] < combo_dims[dim]] + + new_data = {} + update_new = False + for dvar in sdata.data_vars.keys(): + # See if any dimensions need to be updated + update_dims = list(set(sdata[dvar].dims) & set(fix_dims)) + + # Save the old data as is, or pad it to have the right dims + if len(update_dims) > 0: + update_new = True + new_shape = list(sdata[dvar].values.shape) + old_slice = [slice(0, ns) for ns in new_shape] + + for dim in update_dims: + idim = list(sdata[dvar].dims).index(dim) + new_shape[idim] = combo_dims[dim] + + # Set the new data for output + new_dat = np.full(shape=new_shape, fill_value=mdata[ + dvar, mdata.labels.fill_val]) + new_dat[tuple(old_slice)] = sdata[dvar].values + new_data[dvar] = (sdata[dvar].dims, new_dat) + else: + new_data[dvar] = sdata[dvar] + + # Get the updated dataset + out_list.append(xr.Dataset(new_data) if update_new else sdata) + + return out_list + + +def load_edr_aurora(fnames, tag='', inst_id='', pandas_format=False): + """Load JHU APL EDR Aurora data and meta data. + + Parameters + ---------- + fnames : array-like + Iterable of filename strings, full path, to data files to be loaded. + tag : str + Tag name used to identify particular data set to be loaded (default='') + inst_id : str + Instrument ID name used to identify different instrument carriers + (default='') + pandas_format : bool + False for xarray format, True for pandas (default=False) + + Returns + ------- + data : pds.DataFrame or xr.Dataset + Data to be assigned to the pysat.Instrument.data object. + mdata : pysat.Meta + Pysat Meta data for each data variable. + + Note + ---- + Logger warning 'Epoch label: TIME is not a dimension.' is raised due to + the data format and pysat file expectations. + + Examples + -------- + :: + + inst = pysat.Instrument('timed', 'guvi', tag='edr-aur') + inst.load(2003, 1) + + """ + # Define the input variables + labels = {'units': ('UNITS', str), 'desc': ('TITLE', str)} + + # CDAWeb stores these files in the NetCDF format instead of the CDF format + single_data = list() + for fname in fnames: + # There are multiple files per day, with time as a variable rather + # than a dimension or coordinate. Additionally, no coordinates + # are assigned. + sdata, mdata = load_netcdf(fname, epoch_name='TIME', epoch_unit='s', + labels=labels, pandas_format=pandas_format) + + # Calculate the time for this data file. The pysat `load_netcdf` routine + # converts the 'TIME' parameter (seconds of day) into datetime using + # the UNIX epoch as the date offset + ftime = dt.datetime.strptime( + "{:4d}-{:03d}".format( + sdata['YEAR'].values.astype(int), + sdata['DOY'].values.astype(int)), '%Y-%j') + ( + pds.to_datetime(sdata['time'].values).to_pydatetime() + - dt.datetime(1970, 1, 1)) + + # Assign a datetime variable, making indexing possible + sdata['time'] = ftime + sdata = sdata.assign_coords( + {'time': sdata['time']}).expand_dims(dim='time') + + # Save the data in the file list + single_data.append(sdata) + + # Update the meta data + # TODO(https://github.com/pysat/pysat/issues/1078): Update the metadata by + # removing 'TIME', once possible + for var in mdata.keys(): + # Update the fill value, using information from the global header + mdata[var] = {mdata.labels.fill_val: mdata.header.NO_DATA_IN_BIN_VALUE} + + # After loading all the data, determine which dimensions need to be + # expanded. Pad the data so that all dimensions are the same shape + single_data = expand_coords(single_data, mdata, dims_equal=True) + + # Combine all the data, indexing along time + data = xr.combine_by_coords(single_data) + + return data, mdata + + +def load_sdr_aurora(fnames, tag='', inst_id='', pandas_format=False, + combine_times=False): + """Load JHU APL SDR data and meta data. + + Parameters + ---------- + fnames : array-like + Iterable of filename strings, full path, to data files to be loaded. + tag : str + Tag name used to identify particular data set to be loaded (default='') + inst_id : str + Instrument ID name used to identify different instrument carriers + (default='') + pandas_format : bool + False for xarray format, True for pandas (default=False) + combine_times : bool + For SDR data, optionally combine the different datetime coordinates + into a single time coordinate (default=False) + + Returns + ------- + data : pds.DataFrame or xr.Dataset + Data to be assigned to the pysat.Instrument.data object. + mdata : pysat.Meta + Pysat Meta data for each data variable. + + Note + ---- + Logger warning 'Epoch label: TIME is not a dimension.' is raised due to + the data format and pysat file expectations. + + Examples + -------- + :: + + inst = pysat.Instrument('timed', 'guvi', tag='edr-aur') + inst.load(2003, 1) + + """ + # Define the input variables and working variables + labels = {'units': ('UNITS', str), 'desc': ('TITLE', str)} + load_time = 'TIME_DAY' + time_vars = ['YEAR_DAY', 'DOY_DAY', 'TIME_EPOCH_DAY', 'YEAR_NIGHT', + 'DOY_NIGHT', 'TIME_NIGHT', 'TIME_EPOCH_NIGHT'] + coords = ['PIERCEPOINT_NIGHT_LATITUDE', 'PIERCEPOINT_NIGHT_LONGITUDE', + 'PIERCEPOINT_NIGHT_ALTITUDE', 'PIERCEPOINT_NIGHT_SZA', + 'PIERCEPOINT_DAY_LATITUDE', 'PIERCEPOINT_DAY_LONGITUDE', + 'PIERCEPOINT_DAY_ALTITUDE', 'PIERCEPOINT_DAY_SZA'] + time_dims = ['time'] + rename_dims = {'nAlongDay': 'nAlong', 'nAlongNight': 'nAlong'} + + if tag == 'sdr-imaging': + time_vars.extend(["YEAR_DAY_AURORAL", "DOY_DAY_AURORAL", + "TIME_DAY_AURORAL", "TIME_EPOCH_DAY_AURORAL"]) + coords.extend(['PIERCEPOINT_DAY_LATITUDE_AURORAL', + 'PIERCEPOINT_DAY_LONGITUDE_AURORAL', + 'PIERCEPOINT_DAY_ALTITUDE_AURORAL', + 'PIERCEPOINT_DAY_SZA_AURORAL']) + time_dims.append('time_auroral') + rename_dims['nCrossDay'] = 'nCross' + rename_dims['nCrossNight'] = 'nCross' + rename_dims['nAlongDayAur'] = 'time_auroral' + elif tag == 'sdr-spectrograph': + coords.extend(['PIERCEPOINT_NIGHT_ZENITH_ANGLE', + 'PIERCEPOINT_NIGHT_SAZIMUTH', + 'PIERCEPOINT_DAY_ZENITH_ANGLE', + 'PIERCEPOINT_DAY_SAZIMUTH']) + + if inst_id == 'low_res': + time_vars.extend(["YEAR_GAIM_DAY", "DOY_GAIM_DAY", "TIME_GAIM_DAY", + "TIME_GAIM_NIGHT", "YEAR_GAIM_NIGHT", + "DOY_GAIM_NIGHT"]) + time_dims.extend(['time_gaim_day', 'time_gaim_night']) + rename_dims['nAlongGAIMDay'] = 'time_gaim_day' + rename_dims['nAlongGAIMNight'] = 'time_gaim_night' + + # CDAWeb stores these files in the NetCDF format instead of the CDF format + inners = None + for fname in fnames: + # There are multiple files per day, with time as a variable rather + # than a dimension or coordinate. Additionally, no coordinates + # are assigned. + sdata, mdata = load_netcdf(fname, epoch_name=load_time, epoch_unit='s', + labels=labels, pandas_format=pandas_format) + + # Calculate the time for this data file. The pysat `load_netcdf` routine + # converts the 'TIME' parameter (seconds of day) into datetime using + # the UNIX epoch as the date offset + ftime = build_dtimes(sdata, '_DAY', dt.datetime(1970, 1, 1)) + + # Ensure identical day and night dimensions + if sdata.dims['nAlongDay'] != sdata.dims['nAlongNight']: + raise ValueError('Along-track day and night dimensions differ') + + if 'nCrossDay' in rename_dims.keys(): + if sdata.dims['nCrossDay'] != sdata.dims['nCrossNight']: + raise ValueError('Cross-track day and night dimensions differ') + + # Combine identical dimensions and rename 'nAlong' to 'time' + sdata = sdata.rename_dims(rename_dims) + + if tag == 'sdr-imaging': + sdata = sdata.assign(time_auroral=build_dtimes(sdata, + '_DAY_AURORAL')) + elif tag == 'sdr-spectrograph' and inst_id == 'low_res': + sdata = sdata.assign(time_gaim_day=build_dtimes( + sdata, '_GAIM_DAY'), time_gaim_night=build_dtimes( + sdata, '_GAIM_NIGHT')) + + # Test that day and night times are consistent to the nearest second + for i, ntime in enumerate(build_dtimes(sdata, '_NIGHT')): + if abs(ntime - ftime[i]).total_seconds() > 1.0: + raise ValueError('Day and night times differ') + + # Remove redundant time variables and rname the 'nAlong' dimension + sdata = sdata.drop_vars(time_vars).swap_dims({'nAlong': 'time'}) + + # Assign time as a coordinate for combining files indexing + sdata['time'] = ftime + + # Separate into inner datasets + inner_keys = {dim: [key for key in sdata.keys() + if dim in sdata[key].dims] for dim in time_dims} + inner_dat = {dim: sdata.get(inner_keys[dim]) for dim in time_dims} + + # Add 'single_var's into 'time' dataset to keep track + sv_keys = [val.name for val in sdata.values() + if 'single_var' in val.dims] + singlevar_set = sdata.get(sv_keys) + inner_dat['time'] = xr.merge([inner_dat['time'], singlevar_set]) + + # Concatenate along desired dimension with previous files' data + if inners is None: + # No previous data, assign the data separated by dimension + inners = dict(inner_dat) + else: + # Concatenate with existing data + inners = {dim: xr.concat([inners[dim], inner_dat[dim]], dim=dim) + for dim in time_dims} + + # Update the meta data + # TODO(https://github.com/pysat/pysat/issues/1078): Update the metadata by + # removing dimensions and time, once possible + for var in mdata.keys(): + # Update the fill value, using information from the global header + mdata[var] = {mdata.labels.fill_val: mdata.header.NO_DATA_IN_BIN_VALUE} + + # Combine all time dimensions + if combine_times: + data_list = expand_coords([inners[dim] if dim == 'time' else + inners[dim].rename_dims({dim: 'time'}) + for dim in time_dims], mdata, + dims_equal=False) + else: + data_list = [inners[dim] for dim in time_dims] + + # Combine all the data, indexing along time + data = xr.merge(data_list) + + # Set additional coordinates + data = data.set_coords(coords).assign_coords({'time': data['time']}) + if tag == 'sdr-imaging': + data = data.assign_coords( + {'nchan': ["121.6nm", "130.4nm", "135.6nm", "LBHshort", "LBHlong"], + "nchanAur": ["121.6nm", "130.4nm", "135.6nm", "LBHshort", + "LBHlong"], + "nCross": sdata.nCross.data, + "nCrossDayAur": sdata.nCrossDayAur.data}) + elif tag == 'sdr-spectrograph': + data = data.assign_coords({"nchan": ["121.6nm", "130.4nm", "135.6nm", + "LBHshort", "LBHlong", "?"]}) + + # Ensure the data is ordered correctly + data = data.sortby('time') + + return data, mdata diff --git a/pysatNASA/instruments/methods/ses14.py b/pysatNASA/instruments/methods/ses14.py new file mode 100644 index 00000000..5307d592 --- /dev/null +++ b/pysatNASA/instruments/methods/ses14.py @@ -0,0 +1,24 @@ +# -*- coding: utf-8 -*- +"""Provides non-instrument specific routines for SES14 instruments.""" + +ackn_str = ' '.join(('This is a data product from the NASA Global-scale', + 'Observations of the Limb and Disk (GOLD) mission, an', + 'Heliophysics Explorer mission of opportunity launched', + 'in January 2018.\n Responsibility of the mission', + 'science falls to the Principal Investigator, Dr.', + 'Richard Eastes at University of Colorado/LASP.\n', + 'Validation of the L1B data products falls to the', + 'instrument lead investigators/scientists.\n* EUV', + 'Dr. Bill McClintock\nValidation of the L2 data', + 'products falls to Computational Physics, Inc.\n* Dr.', + 'Jerry Lumpe\n (https://gold.cs.ucf.edu/).\nOverall', + 'validation of the products is overseen by the GOLD', + 'Project Scientist Dr. Alan Burns.\nUsers of these', + 'data should contact and acknowledge the Principal', + 'Investigator Dr. Richard Eastes and the party', + 'directly responsible for the data product and the', + 'NASA Explorers Project Office.')) +refs = {'gold': ' '.join(('Eastes, R.W., McClintock, W.E., Burns, A.G. et', + 'al., The Global-Scale Observations of the Limb', + 'and Disk (GOLD) Mission. Space Sci Rev 212,', + '383–408 (2017). doi:10.1007/s11214-017-0392-2'))} diff --git a/pysatNASA/instruments/methods/timed.py b/pysatNASA/instruments/methods/timed.py new file mode 100644 index 00000000..e6d4e2fa --- /dev/null +++ b/pysatNASA/instruments/methods/timed.py @@ -0,0 +1,33 @@ +# -*- coding: utf-8 -*- +"""Provides non-instrument specific routines for the TIMED data.""" + +rules_url = {'guvi': 'http://guvitimed.jhuapl.edu/home_guvi-datausage', + 'saber': 'https://saber.gats-inc.com/data_services.php', + 'see': 'https://www.timed.jhuapl.edu/WWW/scripts/mdc_rules.pl'} + +ackn_str = "".join(["This Thermosphere Ionosphere Mesosphere Energetics ", + "Dynamics (TIMED) satellite data is provided through ", + "CDAWeb. Please see the Rules of the Road at {:s}"]) + +refs = {'guvi': ''.join(['Larry J. Paxton, Andrew B. Christensen, David C. ', + 'Humm, Bernard S. Ogorzalek, C. Thompson Pardoe, ', + 'Daniel Morrison, Michele B. Weiss, W. Crain, ', + 'Patricia H. Lew, Dan J. Mabry, John O. Goldsten, ', + 'Stephen A. Gary, David F. Persons, Mark J. Harold, ', + 'E. Brian Alvarez, Carl J. Ercol, Douglas J. ', + 'Strickland, and Ching-I. Meng "Global ultraviolet ', + 'imager (GUVI): measuring composition and energy ', + 'inputs for the NASA Thermosphere Ionosphere ', + 'Mesosphere Energetics and Dynamics (TIMED) mission",', + 'Proc. SPIE 3756, Optical Spectroscopic Techniques ', + 'and Instrumentation for Atmospheric and Space ', + 'Research III, (20 October 1999); ', + 'doi:10.1117/12.366380']), + 'saber': '', + 'see': ' '.join(('Woods, T. N., Eparvier, F. G., Bailey,', + 'S. M., Chamberlin, P. C., Lean, J.,', + 'Rottman, G. J., Solomon, S. C., Tobiska,', + 'W. K., and Woodraska, D. L. (2005),', + 'Solar EUV Experiment (SEE): Mission', + 'overview and first results, J. Geophys.', + 'Res., 110, A01312, doi:10.1029/2004JA010765.'))} diff --git a/pysatNASA/instruments/omni_hro.py b/pysatNASA/instruments/omni_hro.py index d709c1b5..aad7d05d 100644 --- a/pysatNASA/instruments/omni_hro.py +++ b/pysatNASA/instruments/omni_hro.py @@ -46,7 +46,6 @@ from pysat.instruments.methods import general as mm_gen from pysat import logger -from pysat.utils import time as pysat_time from pysatNASA.instruments.methods import cdaweb as cdw from pysatNASA.instruments.methods import omni as mm_omni @@ -140,59 +139,12 @@ def clean(self): file_cadence=pds.DateOffset(months=1)) # Set the list_remote_files routine -remote_dir = '/pub/data/omni/omni_cdaweb/hro_{tag:s}/{{year:4d}}/' -download_tags = {inst_id: {tag: {'remote_dir': remote_dir.format(tag=tag), - 'fname': supported_tags[inst_id][tag]} - for tag in tags.keys()} - for inst_id in inst_ids.keys()} -list_remote_files = functools.partial(cdw.list_remote_files, - supported_tags=download_tags) - - -# Set the download routine -def download(date_array, tag, inst_id, data_path, update_files=False): - """Download OMNI HRO data from CDAWeb. - - Parameters - ---------- - date_array : array-like - Sequence of dates for which files will be downloaded. - tag : str - Denotes type of file to load. - inst_id : str - Specifies the satellite ID for a constellation. - data_path : str - Path to data directory. - update_files : bool - Re-download data for files that already exist if True (default=False) - - Raises - ------ - IOError - If a problem is encountered connecting to the gateway or retrieving - data from the repository. - - Warnings - -------- - Only able to download current forecast data, not archived forecasts. - - Note - ---- - Called by pysat. Not intended for direct use by user. - - """ - - # Set the download tags - - # Adjust the date_array for monthly downloads - if date_array.freq != 'MS': - date_array = pysat_time.create_date_range( - dt.datetime(date_array[0].year, date_array[0].month, 1), - date_array[-1], freq='MS') +download_tags = {'': {'1min': 'OMNI_HRO_1MIN', '5min': 'OMNI_HRO_5MIN'}} +download = functools.partial(cdw.cdas_download, + supported_tags=download_tags) - cdw.download(date_array, tag=tag, inst_id=inst_id, - supported_tags=download_tags, data_path=data_path) - return +list_remote_files = functools.partial(cdw.cdas_list_remote_files, + supported_tags=download_tags) # Set the load routine diff --git a/pysatNASA/instruments/ses14_gold.py b/pysatNASA/instruments/ses14_gold.py index e5f29ac8..7da4c7df 100644 --- a/pysatNASA/instruments/ses14_gold.py +++ b/pysatNASA/instruments/ses14_gold.py @@ -14,9 +14,13 @@ Warnings -------- -- The cleaning parameters for the instrument are still under development. -- strict_time_flag must be set to False +The cleaning parameters for the instrument are still under development. +Note +---- +In roughly 0.3% of daily files, Channel A and Channel B scans begin at the same +time. One microsecond is added to Channel B to ensure uniqueness in the xarray +index. The nominal scan rate for each channel is every 30 minutes. Examples -------- @@ -24,8 +28,7 @@ import datetime as dt import pysat - nmax = pysat.Instrument(platform='ses14', name='gold', tag='nmax' - strict_time_flag=False) + nmax = pysat.Instrument(platform='ses14', name='gold', tag='nmax') nmax.download(dt.datetime(2020, 1, 1), dt.datetime(2020, 1, 31)) nmax.load(2020, 1) @@ -36,12 +39,11 @@ import numpy as np from pysat.instruments.methods import general as ps_gen -from pysat import logger from pysat.utils.io import load_netcdf from pysatNASA.instruments.methods import cdaweb as cdw from pysatNASA.instruments.methods import general as mm_nasa -from pysatNASA.instruments.methods import gold as mm_gold +from pysatNASA.instruments.methods import ses14 as mm_gold # ---------------------------------------------------------------------------- # Instrument attributes @@ -61,35 +63,11 @@ # ---------------------------------------------------------------------------- # Instrument methods - -def init(self): - """Initialize the Instrument object with instrument specific values. - - Runs once upon instantiation. - - Parameters - ---------- - self : pysat.Instrument - Instrument class object - - """ - - logger.info(mm_gold.ack_str) - logger.warning(' '.join(('Time stamps may be non-unique because Channel A', - 'and B are different instruments. An upgrade to', - 'the pysat.Constellation object is required to', - 'solve this issue. See pysat issue #614 for more', - 'info.'))) - self.acknowledgements = mm_gold.ack_str - self.references = mm_gold.ref_str - - return - +init = functools.partial(mm_nasa.init, module=mm_gold, name=name) # No cleaning, use standard warning function instead clean = mm_nasa.clean_warn - # ---------------------------------------------------------------------------- # Instrument functions # @@ -104,19 +82,15 @@ def init(self): supported_tags=supported_tags) # Set the download routine -download_tags = {inst_id: - {tag: {'remote_dir': ''.join(('/pub/data/gold/level2/', tag, - '/{year:4d}/')), - 'fname': supported_tags[''][tag]} - for tag in tags.keys()} for inst_id in inst_ids.keys()} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'nmax': 'GOLD_L2_NMAX'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) -def load(fnames, tag=None, inst_id=None): +def load(fnames, tag='', inst_id=''): """Load GOLD NMAX data into `xarray.Dataset` and `pysat.Meta` objects. This routine is called as needed by pysat. It is not intended @@ -127,12 +101,12 @@ def load(fnames, tag=None, inst_id=None): fnames : array-like iterable of filename strings, full path, to data files to be loaded. This input is nominally provided by pysat itself. - tag : string - tag name used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. - inst_id : string - Satellite ID used to identify particular data set to be loaded. - This input is nominally provided by pysat itself. + tag : str + Tag name used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') + inst_id : str + Instrument ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. (default='') **kwargs : extra keywords Passthrough for additional keyword arguments specified when instantiating an Instrument object. These additional keywords @@ -185,9 +159,17 @@ def load(fnames, tag=None, inst_id=None): drop_meta_labels='FILLVAL') if tag == 'nmax': - # Add time coordinate from scan_start_time - data['time'] = [dt.datetime.strptime(str(val), "b'%Y-%m-%dT%H:%M:%SZ'") - for val in data['scan_start_time'].values] + # Add time coordinate from scan_start_time. + time = [dt.datetime.strptime(str(val), "b'%Y-%m-%dT%H:%M:%SZ'") + for val in data['scan_start_time'].values] + + # Add a delta of 1 microsecond for channel B. + delta_time = [1 if ch == b'CHB' else 0 for ch in data['channel'].values] + data['time'] = [time[i] + dt.timedelta(microseconds=delta_time[i]) + for i in range(0, len(time))] + + # Sort times to ensure monotonic increase. + data = data.sortby('time') # Update coordinates with dimensional data data = data.assign_coords({'nlats': data['nlats'], diff --git a/pysatNASA/instruments/timed_guvi.py b/pysatNASA/instruments/timed_guvi.py new file mode 100644 index 00000000..36b8111b --- /dev/null +++ b/pysatNASA/instruments/timed_guvi.py @@ -0,0 +1,197 @@ +# -*- coding: utf-8 -*- +"""Module for the TIMED GUVI instrument. + +Supports the Global UltraViolet Imager (GUVI) instrument on the Thermosphere +Ionosphere Mesosphere Energetics Dynamics (TIMED) satellite data from the +NASA Coordinated Data Analysis Web (CDAWeb). + +From JHU APL: + +The Global Ultraviolet Imager (GUVI) is one of four instruments that constitute +the TIMED spacecraft, the first mission of the NASA Solar Connections program. +The TIMED spacecraft is being built by Johns Hopkins University Applied Physics +Laboratory and GUVI is a joint collaboration between JHU/APL and the Aerospace +Corporation. TIMED will be used to study the energetics and dynamics of the +Mesosphere and lower Thermosphere between an altitude of approximately 60 to 180 +kilometers. + +References +---------- +Larry J. Paxton, Andrew B. Christensen, David C. Humm, Bernard S. Ogorzalek, C. +Thompson Pardoe, Daniel Morrison, Michele B. Weiss, W. Crain, Patricia H. Lew, +Dan J. Mabry, John O. Goldsten, Stephen A. Gary, David F. Persons, Mark J. +Harold, E. Brian Alvarez, Carl J. Ercol, Douglas J. Strickland, and Ching-I. +Meng "Global ultraviolet imager (GUVI): measuring composition and energy inputs +for the NASA Thermosphere Ionosphere Mesosphere Energetics and Dynamics (TIMED) +mission", Proc. SPIE 3756, Optical Spectroscopic Techniques and Instrumentation +for Atmospheric and Space Research III, (20 October 1999); +https://doi.org/10.1117/12.366380 + +Properties +---------- +platform + 'timed' +name + 'guvi' +tag + 'edr-aur' + 'sdr-imaging' + 'sdr-spectrograph' +inst_id + '' + 'high_res' + 'low_res' + +Warnings +-------- +- Currently no cleaning routine. + +Example +------- +:: + + import pysat + guvi = pysat.Instrument(platform='timed', name='guvi', + inst_id='sdr-imaging', tag='low_res') + guvi.download(dt.datetime(2005, 6, 28), dt.datetime(2005, 6, 29)) + guvi.load(date=dt.datetime(2005, 6, 28)) + +""" + +import datetime as dt +import functools + +from pysat.instruments.methods import general as mm_gen + +from pysatNASA.instruments.methods import cdaweb as cdw +from pysatNASA.instruments.methods import general as mm_nasa +from pysatNASA.instruments.methods import jhuapl +from pysatNASA.instruments.methods import timed as mm_timed + +# ---------------------------------------------------------------------------- +# Instrument attributes + +platform = 'timed' +name = 'guvi' +tags = {'edr-aur': 'Level 2 Auroral disk imaging mode', + 'sdr-imaging': 'Level 1C imaging data', + 'sdr-spectrograph': 'Level 1C spectrograph data'} +inst_ids = {'': ['edr-aur'], + 'high_res': ['sdr-imaging', 'sdr-spectrograph'], + 'low_res': ['sdr-imaging', 'sdr-spectrograph']} + +pandas_format = False +multi_file_day = True + +# ---------------------------------------------------------------------------- +# Instrument test attributes + +_test_dates = {iid: {tag: dt.datetime(2005, 6, 28) for tag in inst_ids[iid]} + for iid in inst_ids.keys()} +_test_load_opt = {iid: {tag: {'combine_times': True} + for tag in inst_ids[iid]} for iid in ['high_res', + 'low_res']} + +# ---------------------------------------------------------------------------- +# Instrument methods + +# Use standard init routine +init = functools.partial(mm_nasa.init, module=mm_timed, name=name) + +# No cleaning, use standard warning function instead +clean = mm_nasa.clean_warn + +# ---------------------------------------------------------------------------- +# Instrument functions +# +# Use the default CDAWeb and pysat methods + +# Set the list_files routine +fname = ''.join(('TIMED_GUVI_{lvl:s}{mode:s}_{{year:04d}}{{day:03d}}', + '{{hour:02d}}{{minute:02d}}{{second:02d}}-?????????????_REV', + '??????_Av{{version:02d}}-??r{{revision:03d}}.nc')) +file_lvl = {'low_res': 'L1C-2-disk', 'high_res': 'L1C-disk', '': 'L2B'} +mode = {'sdr-imaging': '-IMG', 'sdr-spectrograph': '-SPECT', + 'edr-aur': '-edr-aur-IMG'} +supported_tags = {inst_id: {tag: fname.format(lvl=file_lvl[inst_id], + mode=mode[tag]) + for tag in tags.keys()} + for inst_id in inst_ids.keys()} +list_files = functools.partial(mm_gen.list_files, supported_tags=supported_tags) + +# Set the download routine +url = ''.join(('/pub/data/timed/guvi/levels_v13/{lvl:s}/{mode:s}/', + '{{year:4d}}/{{day:03d}}/')) +url_lvl = {'sdr-imaging': 'level1c', 'sdr-spectrograph': 'level1c', + 'edr-aur': 'level2b'} +url_mode = {tag: 'imaging/edr-aur' if tag == 'edr-aur' else tag.split('-')[1] + for tag in tags.keys()} +download_tags = {iid: {tag: {'remote_dir': url.format(lvl=url_lvl[tag], + mode=url_mode[tag]), + 'fname': fname.format(lvl=file_lvl[iid], + mode=mode[tag])} + for tag in tags.keys()} for iid in inst_ids.keys()} +download = functools.partial(cdw.download, supported_tags=download_tags) + +# Set the list_remote_files routine +list_remote_files = functools.partial(cdw.list_remote_files, + supported_tags=download_tags) + + +# Set the load routine +def load(fnames, tag='', inst_id='', combine_times=False): + """Load TIMED GUVI data into `xarray.DataSet` and `pysat.Meta` objects. + + This routine is called as needed by pysat. It is not intended + for direct user interaction. + + Parameters + ---------- + fnames : array-like + iterable of filename strings, full path, to data files to be loaded. + This input is nominally provided by pysat itself. + tag : str + tag name used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. + inst_id : str + Satellite ID used to identify particular data set to be loaded. + This input is nominally provided by pysat itself. + combine_times : bool + For SDR data, optionally combine the different datetime coordinates + into a single time coordinate (default=False) + + Returns + ------- + data : xr.DataSet + A xarray DataSet with data prepared for the pysat.Instrument + meta : pysat.Meta + Metadata formatted for a pysat.Instrument object. + + Raises + ------ + ValueError + If temporal dimensions are not consistent + + Note + ---- + Any additional keyword arguments passed to pysat.Instrument + upon instantiation are passed along to this routine. + + Examples + -------- + :: + + inst = pysat.Instrument('timed', 'guvi', + inst_id='high_res', tag='sdr-imaging') + inst.load(2005, 179) + + """ + if tag == 'edr-aur': + data, meta = jhuapl.load_edr_aurora(fnames, tag, inst_id, + pandas_format=pandas_format) + else: + data, meta = jhuapl.load_sdr_aurora(fnames, tag, inst_id, + pandas_format=pandas_format, + combine_times=combine_times) + + return data, meta diff --git a/pysatNASA/instruments/timed_saber.py b/pysatNASA/instruments/timed_saber.py index 038bc3cc..16357673 100644 --- a/pysatNASA/instruments/timed_saber.py +++ b/pysatNASA/instruments/timed_saber.py @@ -7,17 +7,19 @@ Properties ---------- -platform : string +platform : str 'timed' -name : string +name : str 'saber' -tag : string +tag : str None supported -inst_id : string +inst_id : str None supported Note ---- +Note on Temperature Errors: https://saber.gats-inc.com/temp_errors.php + SABER "Rules of the Road" for DATA USE Users of SABER data are asked to respect the following guidelines @@ -36,9 +38,9 @@ - Pre-prints of publications and conference abstracts should be widely distributed to interested parties within the mission and related projects. + Warnings -------- -- Note on Temperature Errors: http://saber.gats-inc.com/temp_errors.php - No cleaning routine """ @@ -48,10 +50,10 @@ # CDAWeb methods prewritten for pysat from pysat.instruments.methods import general as mm_gen -from pysat import logger from pysatNASA.instruments.methods import cdaweb as cdw from pysatNASA.instruments.methods import general as mm_nasa +from pysatNASA.instruments.methods import timed as mm_timed # ---------------------------------------------------------------------------- # Instrument attributes @@ -76,33 +78,18 @@ # ---------------------------------------------------------------------------- # Instrument methods - -def init(self): - """Initialize the Instrument object with instrument specific values. - - Runs once upon instantiation. - - """ - - rules_url = 'https://saber.gats-inc.com/data_services.php' - ackn_str = ' '.join(('Please see the Rules of the Road at', rules_url)) - - logger.info(ackn_str) - self.acknowledgements = ackn_str - self.references = '' - - return - +init = functools.partial(mm_nasa.init, module=mm_timed, name=name) # No cleaning, use standard warning function instead clean = mm_nasa.clean_warn - # ---------------------------------------------------------------------------- # Instrument functions # # Use the default CDAWeb and pysat methods +# TODO(#104): Switch to netCDF4 files once unzip (#103) is supported. + # Set the list_files routine fname = ''.join(('timed_l2a_saber_{year:04d}{month:02d}{day:02d}', '{hour:02d}{minute:02d}_v{version:02d}-{revision:02d}-', @@ -115,12 +102,9 @@ def init(self): load = cdw.load # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/timed/saber/level2a_cdf', - '/{year:4d}/{month:02d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'TIMED_L2A_SABER'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/instruments/timed_see.py b/pysatNASA/instruments/timed_see.py index 903058ab..1980443e 100644 --- a/pysatNASA/instruments/timed_see.py +++ b/pysatNASA/instruments/timed_see.py @@ -17,19 +17,12 @@ None inst_id None supported -flatten_twod - If True, then two dimensional data is flattened across - columns. Name mangling is used to group data, first column - is 'name', last column is 'name_end'. In between numbers are - appended 'name_1', 'name_2', etc. All data for a given 2D array - may be accessed via, data.loc[:, 'item':'item_end'] - If False, then 2D data is stored as a series of DataFrames, - indexed by Epoch. data.loc[0, 'item'] - (default=True) Note ---- - no tag required +- cdflib load routine raises ISTP Compliance Warnings for several variables. + This is due to how the Epoch is listed in the original files. Warnings -------- @@ -42,10 +35,10 @@ import pandas as pds from pysat.instruments.methods import general as mm_gen -from pysat import logger from pysatNASA.instruments.methods import cdaweb as cdw from pysatNASA.instruments.methods import general as mm_nasa +from pysatNASA.instruments.methods import timed as mm_timed # ---------------------------------------------------------------------------- # Instrument attributes @@ -54,6 +47,7 @@ name = 'see' tags = {'': ''} inst_ids = {'': [tag for tag in tags.keys()]} +pandas_format = False # ---------------------------------------------------------------------------- # Instrument test attributes @@ -63,39 +57,18 @@ # ---------------------------------------------------------------------------- # Instrument methods - -def init(self): - """Initialize the Instrument object with instrument specific values. - - Runs once upon instantiation. - - """ - - rules_url = 'https://www.timed.jhuapl.edu/WWW/scripts/mdc_rules.pl' - ackn_str = ' '.join(('Please see the Rules of the Road at', rules_url)) - logger.info(ackn_str) - self.acknowledgements = ackn_str - self.references = ' '.join(('Woods, T. N., Eparvier, F. G., Bailey,', - 'S. M., Chamberlin, P. C., Lean, J.,', - 'Rottman, G. J., Solomon, S. C., Tobiska,', - 'W. K., and Woodraska, D. L. (2005),', - 'Solar EUV Experiment (SEE): Mission', - 'overview and first results, J. Geophys.', - 'Res., 110, A01312,', - 'doi:10.1029/2004JA010765.')) - - return - +init = functools.partial(mm_nasa.init, module=mm_timed, name=name) # No cleaning, use standard warning function instead clean = mm_nasa.clean_warn - # ---------------------------------------------------------------------------- # Instrument functions # # Use the default CDAWeb and pysat methods +# TODO(#104): Switch to netCDF4 files once unzip (#103) is supported. + # Set the list_files routine fname = 'timed_l3a_see_{year:04d}{month:02d}{day:02d}_v{version:02d}.cdf' supported_tags = {'': {'': fname}} @@ -104,15 +77,13 @@ def init(self): file_cadence=pds.DateOffset(months=1)) # Set the load routine -load = functools.partial(cdw.load, file_cadence=pds.DateOffset(months=1)) +load = functools.partial(cdw.load, pandas_format=pandas_format, + file_cadence=pds.DateOffset(months=1)) # Set the download routine -basic_tag = {'remote_dir': ''.join(('/pub/data/timed/see/data/level3a_cdf', - '/{year:4d}/{month:02d}/')), - 'fname': fname} -download_tags = {'': {'': basic_tag}} -download = functools.partial(cdw.download, supported_tags=download_tags) +download_tags = {'': {'': 'TIMED_L3A_SEE'}} +download = functools.partial(cdw.cdas_download, supported_tags=download_tags) # Set the list_remote_files routine -list_remote_files = functools.partial(cdw.list_remote_files, +list_remote_files = functools.partial(cdw.cdas_list_remote_files, supported_tags=download_tags) diff --git a/pysatNASA/tests/test_instruments.py b/pysatNASA/tests/test_instruments.py index 07e850f4..25776e41 100644 --- a/pysatNASA/tests/test_instruments.py +++ b/pysatNASA/tests/test_instruments.py @@ -16,6 +16,15 @@ # Import the test classes from pysat from pysat.tests.classes import cls_instrument_library as clslib +try: + import pysatCDF # noqa: F401 + # If this successfully imports, tests need to be run with both pysatCDF + # and cdflib + cdflib_only = False +except ImportError: + # pysatCDF is not present, standard tests default to cdflib. + cdflib_only = True + # Tell the standard tests which instruments to run each test on. # Need to return instrument list for custom tests. @@ -27,7 +36,9 @@ for inst in instruments['download']: fname = inst['inst_module'].supported_tags[inst['inst_id']][inst['tag']] if '.cdf' in fname: - instruments['cdf'].append(inst) + temp_inst, _ = clslib.initialize_test_inst_and_date(inst) + if temp_inst.pandas_format: + instruments['cdf'].append(inst) class TestInstruments(clslib.InstLibTests): @@ -42,6 +53,9 @@ class TestInstruments(clslib.InstLibTests): @pytest.mark.second @pytest.mark.parametrize("inst_dict", instruments['cdf']) + @pytest.mark.skipif(cdflib_only, + reason=" ".join(("Additional load tests not required", + "when pysatCDF not installed"))) def test_load_cdflib(self, inst_dict): """Test that instruments load at each cleaning level. diff --git a/pysatNASA/tests/test_methods_cdaweb.py b/pysatNASA/tests/test_methods_cdaweb.py index cae3c747..f601517d 100644 --- a/pysatNASA/tests/test_methods_cdaweb.py +++ b/pysatNASA/tests/test_methods_cdaweb.py @@ -1,6 +1,7 @@ """Unit tests for the cdaweb instrument methods.""" import datetime as dt +import pandas as pds import requests import pytest @@ -13,14 +14,14 @@ class TestCDAWeb(object): """Unit tests for `pysat.instrument.methods.cdaweb`.""" - def setup(self): + def setup_method(self): """Set up the unit test environment for each method.""" - self.download_tags = pysatNASA.instruments.cnofs_plp.download_tags + self.download_tags = pysatNASA.instruments.timed_guvi.download_tags self.kwargs = {'tag': None, 'inst_id': None} return - def teardown(self): + def teardown_method(self): """Clean up the unit test environment after each method.""" del self.download_tags, self.kwargs @@ -31,7 +32,7 @@ def test_remote_file_list_connection_error_append(self): with pytest.raises(Exception) as excinfo: # Giving a bad remote_site address yields similar ConnectionError - cdw.list_remote_files(tag='', inst_id='', + cdw.list_remote_files(tag='sdr-imaging', inst_id='high_res', supported_tags=self.download_tags, remote_url='https://bad/path') @@ -80,11 +81,33 @@ def test_bad_kwarg_list_remote_files(self, bad_key, bad_val, err_msg): assert str(excinfo.value).find(err_msg) >= 0 return - def test_remote_file_list_all(self): - """Test that remote_file_list works if start/stop dates unspecified.""" + @pytest.mark.parametrize("start, stop", + [(None, None), + (dt.datetime(2009, 1, 1), None), + (dt.datetime(2009, 1, 1), + dt.datetime(2009, 1, 1)), + (pds.Timestamp(2009, 1, 1), + pds.Timestamp(2009, 1, 2))]) + def test_remote_file_list_all(self, start, stop): + """Test that remote_file_list works for all start and stop cases.""" self.module = pysatNASA.instruments.cnofs_plp self.test_inst = pysat.Instrument(inst_module=self.module) - files = self.test_inst.remote_file_list() + files = self.test_inst.remote_file_list(start, stop) assert len(files) > 0 return + + @pytest.mark.parametrize("series_out", [(True), (False)]) + def test_cdas_remote_files(self, series_out): + """Test that cdas_list_remote_files can return pandas series.""" + start = dt.datetime(2009, 1, 1) + stop = dt.datetime(2009, 1, 2) + self.module = pysatNASA.instruments.cnofs_plp + self.test_inst = pysat.Instrument(inst_module=self.module) + files = self.test_inst.remote_file_list(start, stop, + series_out=series_out) + if series_out is True: + assert isinstance(files, pds.Series) + else: + assert isinstance(files, list) + return diff --git a/pysatNASA/tests/test_methods_platform.py b/pysatNASA/tests/test_methods_platform.py new file mode 100644 index 00000000..a1cd6611 --- /dev/null +++ b/pysatNASA/tests/test_methods_platform.py @@ -0,0 +1,129 @@ +"""Unit tests for the common NASA platform method attributes.""" + + +from pysatNASA.instruments import methods + + +class TestTIMEDMethods(object): + """Unit tests for `pysat.instruments.methods.timed`.""" + + def setup_method(self): + """Set up the unit test environment for each method.""" + self.names = ['see', 'saber', 'guvi'] + self.module = methods.timed + self.platform_str = '(TIMED)' + return + + def teardown_method(self): + """Clean up the unit test environment after each method.""" + + del self.names, self.module, self.platform_str + return + + def test_ack(self): + """Test that the acknowledgements reference the correct platform.""" + + assert self.module.ackn_str.find(self.platform_str) >= 0 + return + + def test_rules(self): + """Test that the rules of the road exist for each instrument.""" + + if hasattr(self.module, "rules_url"): + for name in self.names: + assert name in self.module.rules_url.keys( + ), "No rules URL for {:}".format(name) + return + + def test_ref(self): + """Test that all instruments have references.""" + + for name in self.names: + assert name in self.module.refs.keys( + ), "No reference for {:}".format(name) + return + + +class TestDMSPMethods(TestTIMEDMethods): + """Unit tests for `pysat.instruments.methods.dmsp`.""" + + def setup_method(self): + """Set up the unit test environment for each method.""" + self.names = ['ssusi'] + self.module = methods.dmsp + self.platform_str = '(DMSP)' + return + + def teardown_method(self): + """Clean up the unit test environment after each method.""" + + del self.names, self.module, self.platform_str + return + + +class TestCNOFSMethods(TestTIMEDMethods): + """Unit tests for `pysat.instruments.methods.cnofs`.""" + + def setup_method(self): + """Set up the unit test environment for each method.""" + self.names = ['ivm', 'plp', 'vefi'] + self.module = methods.cnofs + self.platform_str = '(C/NOFS)' + return + + def teardown_method(self): + """Clean up the unit test environment after each method.""" + + del self.names, self.module, self.platform_str + return + + +class TestDE2Methods(TestTIMEDMethods): + """Unit tests for `pysat.instruments.methods.de2`.""" + + def setup_method(self): + """Set up the unit test environment for each method.""" + self.names = ['lang', 'nacs', 'rpa', 'wats'] + self.module = methods.de2 + self.platform_str = 'Dynamics Explorer 2' + return + + def teardown_method(self): + """Clean up the unit test environment after each method.""" + + del self.names, self.module, self.platform_str + return + + +class TestSES14Methods(TestTIMEDMethods): + """Unit tests for `pysat.instruments.methods.ses14`.""" + + def setup_method(self): + """Set up the unit test environment for each method.""" + self.names = ['gold'] + self.module = methods.ses14 + self.platform_str = 'Global-scale Observations of the Limb and Disk' + return + + def teardown_method(self): + """Clean up the unit test environment after each method.""" + + del self.names, self.module, self.platform_str + return + + +class TestGPSMethods(TestTIMEDMethods): + """Unit tests for `pysat.instruments.methods.gps`.""" + + def setup_method(self): + """Set up the unit test environment for each method.""" + self.names = ['roti15min_jpl'] + self.module = methods.gps + self.platform_str = 'GPS Total Electron Content' + return + + def teardown_method(self): + """Clean up the unit test environment after each method.""" + + del self.names, self.module, self.platform_str + return diff --git a/pysatNASA/tests/test_omni_hro.py b/pysatNASA/tests/test_omni_hro.py index 5a3c3f92..b3032024 100644 --- a/pysatNASA/tests/test_omni_hro.py +++ b/pysatNASA/tests/test_omni_hro.py @@ -15,7 +15,7 @@ class TestOMNICustom(object): """Unit tests for `pysat.instrument.methods.omni`.""" - def setup(self): + def setup_method(self): """Set up the unit test environment for each method.""" # Load a test instrument @@ -25,10 +25,9 @@ def setup(self): self.test_inst.load(2009, 1) # Recast time in minutes rather than seconds - self.test_inst.data.index = \ - pds.Series([t + dt.timedelta(seconds=(60 - i)) - + dt.timedelta(minutes=i) - for i, t in enumerate(self.test_inst.data.index)]) + self.test_inst.data.index = pds.Series( + [t + dt.timedelta(seconds=(60 - i)) + dt.timedelta(minutes=i) + for i, t in enumerate(self.test_inst.data.index)]) # Add IMF data self.test_inst['BX_GSM'] = pds.Series([3.17384966, 5.98685138, @@ -63,7 +62,7 @@ def setup(self): return - def teardown(self): + def teardown_method(self): """Clean up the unit test environment after each method.""" del self.test_inst @@ -168,7 +167,7 @@ def test_dayside_recon(self): class TestDeprecation(object): """Unit tests for deprecation warnings in `pysat.instrument.omni_hro`.""" - def setup(self): + def setup_method(self): """Set up the unit test environment for each method.""" # Use an empty instrument to test redirect @@ -178,7 +177,7 @@ def setup(self): return - def teardown(self): + def teardown_method(self): """Clean up test environment after each method.""" del self.test_inst diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 00000000..a1dafc41 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,10 @@ +netCDF4 +requests +beautifulsoup4 +lxml +cdflib>=0.4.4 +numpy +pandas +pysat>=3.0.4 +cdasws +xarray diff --git a/tox.ini b/setup.cfg similarity index 100% rename from tox.ini rename to setup.cfg From e38bcd5b45bba1ae560c1a1867a3b8894391f9ca Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Thu, 6 Apr 2023 14:09:37 -0400 Subject: [PATCH 20/38] STY: clean up reqs --- pyproject.toml | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 9148089c..5c14a2a2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pysatNASA" -version = "0.0.4.1" +version = "0.0.5" description = "pysat support for NASA Instruments" readme = "README.md" requires-python = ">=3.8" @@ -33,14 +33,15 @@ keywords = [ "ionosphere" ] dependencies = [ - "netCDF4", - "requests", "beautifulsoup4", - "lxml", + "cdasws", "cdflib >= 0.4.4", + "lxml", + "netCDF4", "numpy", "pandas", "pysat >= 3.0.4", + "requests", "xarray" ] From fc14e6207e57e67b006f16f53e8f094c8ae7f394 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Thu, 6 Apr 2023 14:09:49 -0400 Subject: [PATCH 21/38] STY: restore backup requirement lists --- requirements.txt | 8 ++++---- test_requirements.txt | 13 +++++++++++++ 2 files changed, 17 insertions(+), 4 deletions(-) create mode 100644 test_requirements.txt diff --git a/requirements.txt b/requirements.txt index a1dafc41..6aedff4e 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,10 +1,10 @@ -netCDF4 -requests beautifulsoup4 -lxml +cdasws cdflib>=0.4.4 +lxml +netCDF4 numpy pandas pysat>=3.0.4 -cdasws +requests xarray diff --git a/test_requirements.txt b/test_requirements.txt new file mode 100644 index 00000000..6c5e6eff --- /dev/null +++ b/test_requirements.txt @@ -0,0 +1,13 @@ +coveralls<3.3 +extras_require +flake8 +flake8-docstrings +hacking>=1.0 +ipython +m2r2 +numpydoc +pytest +pytest-cov +pytest-ordering +sphinx +sphinx_rtd_theme From 1e7e65a5f6852c33ecec8f44b8e31d179d310331 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing Date: Thu, 6 Apr 2023 16:08:04 -0400 Subject: [PATCH 22/38] MAINT: delete version --- pysatNASA/version.txt | 1 - 1 file changed, 1 deletion(-) delete mode 100644 pysatNASA/version.txt diff --git a/pysatNASA/version.txt b/pysatNASA/version.txt deleted file mode 100644 index 81340c7e..00000000 --- a/pysatNASA/version.txt +++ /dev/null @@ -1 +0,0 @@ -0.0.4 From 92846cb1b31376dfe09ab9df32c6eabc2a357445 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Fri, 5 May 2023 14:30:48 -0400 Subject: [PATCH 23/38] Apply suggestions from code review Co-authored-by: Angeline Burrell --- CHANGELOG.md | 2 +- pyproject.toml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9952469a..23d73a47 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -47,7 +47,7 @@ This project adheres to [Semantic Versioning](https://semver.org/). * Removed version cap for xarray * Added manual workflow to check that latest RC is installable through test pip * Update meta label type for instruments - * Use pyproject.toml to manage setup + * Use pyproject.toml to manage installation and metadata ## [0.0.4] - 2022-11-07 * Update instrument tests with new test class diff --git a/pyproject.toml b/pyproject.toml index 5c14a2a2..a5628f4d 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -7,10 +7,10 @@ name = "pysatNASA" version = "0.0.5" description = "pysat support for NASA Instruments" readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.6" license = {file = "LICENSE"} authors = [ - {name = "Jeff Klenzing, et al", email = "pysat.developers@gmail.com"}, + {name = "Jeff Klenzing, et al.", email = "pysat.developers@gmail.com"}, ] classifiers = [ "Development Status :: 3 - Alpha", From 08000ab7323517a3498e122365bbee94253e3092 Mon Sep 17 00:00:00 2001 From: jklenzing Date: Fri, 5 May 2023 16:30:04 -0400 Subject: [PATCH 24/38] DOC: update docs --- README.md | 4 ++-- docs/installation.rst | 4 ++-- pyproject.toml | 6 +++++- 3 files changed, 9 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 9bdce9c5..c0235964 100644 --- a/README.md +++ b/README.md @@ -20,14 +20,14 @@ some examples on how to use the routines pysatNASA uses common Python modules, as well as modules developed by and for the Space Physics community. This module officially supports -Python 3.8+. +Python 3.6+. | Common modules | Community modules | Optional Modules | | ---------------- | ----------------- |------------------| | beautifulsoup4 | cdflib | pysatCDF | | lxml | pysat>=3.0.4 | | | netCDF4 | | | -| numpy<1.24 | | | +| numpy | | | | pandas | | | | requests | | | | xarray | | | diff --git a/docs/installation.rst b/docs/installation.rst index ae8eedda..f74c7e4b 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -53,10 +53,10 @@ Installation Options pip install --user . - C. Install with the intent to develop locally:: + C. Install with the intent to change the code:: - pip install -e . + pip install --user -e . .. extras-require:: pysatcdf :pyproject: diff --git a/pyproject.toml b/pyproject.toml index a5628f4d..7d65f625 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,6 +21,7 @@ classifiers = [ "License :: OSI Approved :: BSD License", "Natural Language :: English", "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", @@ -30,7 +31,10 @@ classifiers = [ ] keywords = [ "pysat", - "ionosphere" + "ionosphere", + "magnetosphere", + "solar wind", + "thermosphere" ] dependencies = [ "beautifulsoup4", From 23da8f8c968ee1ce9d82ab71a06748ff9f151265 Mon Sep 17 00:00:00 2001 From: jklenzing Date: Fri, 5 May 2023 16:31:41 -0400 Subject: [PATCH 25/38] TST: update tested versions --- .github/workflows/main.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 80643508..899b2005 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -11,12 +11,15 @@ jobs: fail-fast: false matrix: os: ["ubuntu-latest", "macos-latest", "windows-latest"] - python-version: ["3.9", "3.10"] + python-version: ["3.10", "3.11"] numpy_ver: ["latest"] include: - - python-version: "3.8" + - python-version: "3.9" numpy_ver: "1.21" - os: "ubuntu-latest" + os: ubuntu-latest + - python-version: "3.6.8" + numpy_ver: "1.19.5" + os: "ubuntu-20.04" name: Python ${{ matrix.python-version }} on ${{ matrix.os }} with numpy ${{ matrix.numpy_ver }} runs-on: ${{ matrix.os }} From a8567676688d3f2f585443102c8313ad1dc3a862 Mon Sep 17 00:00:00 2001 From: jklenzing Date: Fri, 5 May 2023 16:32:08 -0400 Subject: [PATCH 26/38] DOC: update meta --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7d65f625..0f5cc26f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -22,9 +22,9 @@ classifiers = [ "Natural Language :: English", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.6", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", "Operating System :: POSIX :: Linux", "Operating System :: MacOS :: MacOS X", "Operating System :: Microsoft :: Windows" From 1d48f497a8ff6c0d0a2962d60a58dbb70605e77e Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Tue, 16 May 2023 13:51:31 +0900 Subject: [PATCH 27/38] BUG: operational tests --- .github/workflows/main.yml | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 45a03045..2b614b9f 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -34,8 +34,15 @@ jobs: if: ${{ matrix.os == 'macos-latest' }} run: brew reinstall gcc + - name: Install Operational dependencies + if: ${{ matrix.numpy_ver == "1.19.5"}} + run: | + pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} + pip install -r requirements.txt + pip install -r test_requirements.txt + - name: Install NEP29 dependencies - if: ${{ matrix.numpy_ver != 'latest'}} + if: ${{ matrix.numpy_ver == "1.21"}} run: | pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} pip install --upgrade-strategy only-if-needed .[test] From 859635bedd790341e1d0fe47ea4b60da019a74fb Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Tue, 16 May 2023 13:55:20 +0900 Subject: [PATCH 28/38] Update main.yml --- .github/workflows/main.yml | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2b614b9f..2fec7b86 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -13,13 +13,16 @@ jobs: os: ["ubuntu-latest", "macos-latest", "windows-latest"] python-version: ["3.10", "3.11"] numpy_ver: ["latest"] + test_config: ["latest"] include: - python-version: "3.9" numpy_ver: "1.21" os: ubuntu-latest + test_config: "NEP29" - python-version: "3.6.8" numpy_ver: "1.19.5" os: "ubuntu-20.04" + test_config: "Ops" name: Python ${{ matrix.python-version }} on ${{ matrix.os }} with numpy ${{ matrix.numpy_ver }} runs-on: ${{ matrix.os }} @@ -35,20 +38,20 @@ jobs: run: brew reinstall gcc - name: Install Operational dependencies - if: ${{ matrix.numpy_ver == "1.19.5"}} + if: ${{ matrix.test_config == "Ops"}} run: | pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} pip install -r requirements.txt pip install -r test_requirements.txt - name: Install NEP29 dependencies - if: ${{ matrix.numpy_ver == "1.21"}} + if: ${{ matrix.test_config == "NEP29"}} run: | pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} pip install --upgrade-strategy only-if-needed .[test] - name: Install standard dependencies - if: ${{ matrix.numpy_ver == 'latest'}} + if: ${{ matrix.test_config == 'latest'}} run: | pip install .[test] From 0149729268778969209262c836455065f175c51c Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Tue, 16 May 2023 13:58:47 +0900 Subject: [PATCH 29/38] BUG: quotes --- .github/workflows/main.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2fec7b86..357befc7 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -38,14 +38,14 @@ jobs: run: brew reinstall gcc - name: Install Operational dependencies - if: ${{ matrix.test_config == "Ops"}} + if: ${{ matrix.test_config == 'Ops'}} run: | pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} pip install -r requirements.txt pip install -r test_requirements.txt - name: Install NEP29 dependencies - if: ${{ matrix.test_config == "NEP29"}} + if: ${{ matrix.test_config == 'NEP29'}} run: | pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} pip install --upgrade-strategy only-if-needed .[test] From e0ffbad65f36789f2ffd706d6da9dd7e69cd99c5 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Tue, 16 May 2023 14:18:28 +0900 Subject: [PATCH 30/38] Update main.yml --- .github/workflows/main.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 357befc7..2bb51850 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -43,6 +43,7 @@ jobs: pip install --no-cache-dir numpy==${{ matrix.numpy_ver }} pip install -r requirements.txt pip install -r test_requirements.txt + pip install . - name: Install NEP29 dependencies if: ${{ matrix.test_config == 'NEP29'}} From f554299b8ab7e8032ee646034697f800d689b6a8 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Thu, 18 May 2023 08:58:05 +0900 Subject: [PATCH 31/38] Update __init__.py --- pysatNASA/__init__.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/pysatNASA/__init__.py b/pysatNASA/__init__.py index 2f8f3370..31d763a5 100644 --- a/pysatNASA/__init__.py +++ b/pysatNASA/__init__.py @@ -6,10 +6,10 @@ """ -import pkg_resources +import importlib from pysatNASA import constellations # noqa F401 from pysatNASA import instruments # noqa F401 # set version -__version__ = pkg_resources.get_distribution('pysatNASA').version +__version__ = importlib.metadata.version('pysatNASA') From edd0c41b08340f733944a5774ec441752c501ea5 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Thu, 18 May 2023 09:01:43 +0900 Subject: [PATCH 32/38] Update __init__.py --- pysatNASA/__init__.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/pysatNASA/__init__.py b/pysatNASA/__init__.py index 31d763a5..ea9f8ff2 100644 --- a/pysatNASA/__init__.py +++ b/pysatNASA/__init__.py @@ -7,9 +7,14 @@ """ import importlib +import importlib_metadata from pysatNASA import constellations # noqa F401 from pysatNASA import instruments # noqa F401 # set version -__version__ = importlib.metadata.version('pysatNASA') +try: + __version__ = importlib.metadata.version('pysatNASA') +except AttributeError: + # Python 3.6 requires a different version + __version__ = importlib_metadata.version('pysatNASA') From ba59f29e7f5ce5d89f09b1339d1198677361b331 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Thu, 18 May 2023 09:55:18 +0900 Subject: [PATCH 33/38] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 0f5cc26f..55fc7da7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools", "pip >= 10"] +requires = ["setuptools >= 38.6", "pip >= 10"] build-backend = "setuptools.build_meta" [project] From 771939dad1059957e4b76e9e526aab2b0f91aa78 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Thu, 18 May 2023 10:02:46 +0900 Subject: [PATCH 34/38] Update pyproject.toml --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 55fc7da7..420becd5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [build-system] requires = ["setuptools >= 38.6", "pip >= 10"] -build-backend = "setuptools.build_meta" +build-backend = "setuptools.build_meta:__legacy__" [project] name = "pysatNASA" From 879f3a503cd1ec61ff40510d19ac8865a904d5a9 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Thu, 18 May 2023 10:42:47 +0900 Subject: [PATCH 35/38] Update setup.cfg --- setup.cfg | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/setup.cfg b/setup.cfg index ccced987..17163008 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,3 +1,7 @@ +[metadata] +name = "pysatNASA" +version = "0.0.5" + [flake8] max-line-length = 80 ignore = From 5e0cd34a855ba5d09bbf1fa83193b5566ebe39c6 Mon Sep 17 00:00:00 2001 From: Jeff Klenzing <19592220+jklenzing@users.noreply.github.com> Date: Thu, 18 May 2023 10:57:12 +0900 Subject: [PATCH 36/38] Update setup.cfg --- setup.cfg | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/setup.cfg b/setup.cfg index 17163008..b507df90 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,6 +1,6 @@ [metadata] -name = "pysatNASA" -version = "0.0.5" +name = pysatNASA +version = 0.0.5 [flake8] max-line-length = 80 From f86fd9d3fea6072350f2387b047c48b2d449d520 Mon Sep 17 00:00:00 2001 From: jklenzing Date: Tue, 30 May 2023 13:36:08 -0400 Subject: [PATCH 37/38] STY: update meta --- pyproject.toml | 5 ++--- setup.cfg | 1 - 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 420becd5..fe9c33c5 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,10 +1,9 @@ [build-system] -requires = ["setuptools >= 38.6", "pip >= 10"] -build-backend = "setuptools.build_meta:__legacy__" +requires = ["setuptools", "pip >= 10"] +build-backend = "setuptools.build_meta" [project] name = "pysatNASA" -version = "0.0.5" description = "pysat support for NASA Instruments" readme = "README.md" requires-python = ">=3.6" diff --git a/setup.cfg b/setup.cfg index b507df90..400e122a 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,4 @@ [metadata] -name = pysatNASA version = 0.0.5 [flake8] From 2b41e01dbbd5e662a1cd3d945a0e67128fadb64d Mon Sep 17 00:00:00 2001 From: jklenzing Date: Tue, 30 May 2023 13:40:07 -0400 Subject: [PATCH 38/38] BUG: fix metadata --- pyproject.toml | 1 + setup.cfg | 3 +++ 2 files changed, 4 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index fe9c33c5..0f5cc26f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,6 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "pysatNASA" +version = "0.0.5" description = "pysat support for NASA Instruments" readme = "README.md" requires-python = ">=3.6" diff --git a/setup.cfg b/setup.cfg index 400e122a..08661e10 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,4 +1,7 @@ +# name and version must be maintained here as well for python 3.6 compatibility + [metadata] +name = pysatNASA version = 0.0.5 [flake8]