From 463c64ab13351fcfc401e2b4423719a21d1bcff4 Mon Sep 17 00:00:00 2001 From: matt-long Date: Sun, 29 Jan 2023 09:29:20 -0700 Subject: [PATCH] init --- .github/dependabot.yml | 11 + .github/workflows/ci.yaml | 67 ++ .github/workflows/pypi-release.yaml | 78 +++ .github/workflows/upstream-dev-ci.yaml | 89 +++ .gitignore | 135 ++++ .pre-commit-config.yaml | 47 ++ .prettierrc.toml | 3 + CHANGELOG.md | 1 + LICENSE | 201 ++++++ MANIFEST.in | 15 + README.md | 24 + ci/environment-docs.yml | 16 + ci/environment.yml | 26 + ci/upstream-dev-environment.yml | 7 + codecov.yml | 20 + docs/Makefile | 180 +++++ docs/make.bat | 242 +++++++ docs/source/changelog.md | 3 + docs/source/conf.py | 140 ++++ docs/source/explanation/index.md | 1 + docs/source/how-to/index.md | 11 + docs/source/how-to/install-xdev-project.md | 33 + docs/source/index.md | 34 + docs/source/reference/index.md | 1 + docs/source/tutorials/index.md | 0 ocean_c_lab/__init__.py | 10 + ocean_c_lab/box_models.py | 552 +++++++++++++++ ocean_c_lab/co2calc.py | 738 +++++++++++++++++++++ ocean_c_lab/csys_diag_definitions.yml | 30 + ocean_c_lab/forcing_variables.yml | 53 ++ ocean_c_lab/gasex.py | 69 ++ ocean_c_lab/glodap.py | 184 +++++ ocean_c_lab/state_variables.yml | 6 + pyproject.toml | 7 + readthedocs.yml | 7 + requirements.txt | 0 setup.cfg | 23 + setup.py | 51 ++ tests/test_box_model_simulation.py | 21 + tests/test_gasex.py | 21 + 40 files changed, 3157 insertions(+) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/ci.yaml create mode 100644 .github/workflows/pypi-release.yaml create mode 100644 .github/workflows/upstream-dev-ci.yaml create mode 100644 .gitignore create mode 100644 .pre-commit-config.yaml create mode 100644 .prettierrc.toml create mode 100644 CHANGELOG.md create mode 100644 LICENSE create mode 100644 MANIFEST.in create mode 100644 README.md create mode 100644 ci/environment-docs.yml create mode 100644 ci/environment.yml create mode 100644 ci/upstream-dev-environment.yml create mode 100644 codecov.yml create mode 100644 docs/Makefile create mode 100644 docs/make.bat create mode 100644 docs/source/changelog.md create mode 100644 docs/source/conf.py create mode 100644 docs/source/explanation/index.md create mode 100644 docs/source/how-to/index.md create mode 100644 docs/source/how-to/install-xdev-project.md create mode 100644 docs/source/index.md create mode 100644 docs/source/reference/index.md create mode 100644 docs/source/tutorials/index.md create mode 100644 ocean_c_lab/__init__.py create mode 100644 ocean_c_lab/box_models.py create mode 100644 ocean_c_lab/co2calc.py create mode 100644 ocean_c_lab/csys_diag_definitions.yml create mode 100644 ocean_c_lab/forcing_variables.yml create mode 100644 ocean_c_lab/gasex.py create mode 100644 ocean_c_lab/glodap.py create mode 100644 ocean_c_lab/state_variables.yml create mode 100644 pyproject.toml create mode 100644 readthedocs.yml create mode 100644 requirements.txt create mode 100644 setup.cfg create mode 100644 setup.py create mode 100644 tests/test_box_model_simulation.py create mode 100644 tests/test_gasex.py diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..b4b3fa4 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,11 @@ +version: 2 +updates: + # - package-ecosystem: pip + # directory: "/" + # schedule: + # interval: daily + - package-ecosystem: 'github-actions' + directory: '/' + schedule: + # Check for updates once a week + interval: 'weekly' diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml new file mode 100644 index 0000000..53d2e10 --- /dev/null +++ b/.github/workflows/ci.yaml @@ -0,0 +1,67 @@ +name: CI +on: + push: + pull_request: + schedule: + - cron: '0 0 * * *' # Daily “At 00:00” + workflow_dispatch: # allows you to trigger manually + +jobs: + skip-duplicate-jobs: + runs-on: ubuntu-latest + if: | + github.repository == 'c-worthy-ocean/ocean-c-lab' + outputs: + should_skip: ${{ steps.skip_check.outputs.should_skip }} + steps: + - id: skip_check + uses: fkirc/skip-duplicate-actions@v4.0.0 + with: + # For workflows which are triggered concurrently with the same + # contents, attempt to execute them exactly once. + concurrent_skipping: 'same_content_newer' + paths_ignore: '["**/doc/**"]' + + build: + name: python-${{ matrix.python-version }} + needs: skip-duplicate-jobs + if: ${{ needs.skip-duplicate-jobs.outputs.should_skip != 'true' }} + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + python-version: ['3.8', '3.9', '3.10'] + steps: + - uses: actions/checkout@v3 + - uses: conda-incubator/setup-miniconda@v2 + with: + channels: conda-forge,nodefaults + channel-priority: strict + activate-environment: c_worthy + auto-update-conda: false + python-version: ${{ matrix.python-version }} + environment-file: ci/environment.yml + mamba-version: '*' + use-mamba: true + miniforge-variant: Mambaforge + + - name: Install ocean-c-lab + run: | + python -m pip install -e . --no-deps --force-reinstall + conda list + + - name: Run Tests + run: | + python -m pytest + + - name: Upload code coverage to Codecov + uses: codecov/codecov-action@v3.1.0 + with: + file: ./coverage.xml + flags: unittests + env_vars: OS,PYTHON + name: codecov-umbrella + fail_ci_if_error: false diff --git a/.github/workflows/pypi-release.yaml b/.github/workflows/pypi-release.yaml new file mode 100644 index 0000000..87e9605 --- /dev/null +++ b/.github/workflows/pypi-release.yaml @@ -0,0 +1,78 @@ +name: Build distribution +on: + release: + types: + - published + push: + +jobs: + build-artifacts: + runs-on: ubuntu-latest + if: github.repository == 'c-worthy-ocean/ocean-c-lab' + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-python@v3.1.2 + name: Install Python + with: + python-version: 3.8 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + python -m pip install setuptools setuptools-scm wheel twine check-manifest + + - name: Build tarball and wheels + run: | + git clean -xdf + git restore -SW . + python -m build --sdist --wheel . + + - name: Check built artifacts + run: | + python -m twine check dist/* + pwd + if [ -f dist/ocean-c-lab-0.0.0.tar.gz ]; then + echo "❌ INVALID VERSION NUMBER" + exit 1 + else + echo "✅ Looks good" + fi + - uses: actions/upload-artifact@v3 + with: + name: releases + path: dist + + test-built-dist: + needs: build-artifacts + runs-on: ubuntu-latest + steps: + - uses: actions/setup-python@v3.1.2 + name: Install Python + with: + python-version: 3.8 + - uses: actions/download-artifact@v3 + with: + name: releases + path: dist + - name: List contents of built dist + run: | + ls -ltrh + ls -ltrh dist + + upload-to-pypi: + needs: test-built-dist + if: github.event_name == 'release' + runs-on: ubuntu-latest + steps: + - uses: actions/download-artifact@v3 + with: + name: releases + path: dist + - name: Publish package to PyPI + uses: pypa/gh-action-pypi-publish@v1.5.0 + with: + user: __token__ + password: ${{ secrets.PYPI_TOKEN }} + verbose: true diff --git a/.github/workflows/upstream-dev-ci.yaml b/.github/workflows/upstream-dev-ci.yaml new file mode 100644 index 0000000..9d7ac9d --- /dev/null +++ b/.github/workflows/upstream-dev-ci.yaml @@ -0,0 +1,89 @@ +name: Upstream CI +on: + push: + schedule: + - cron: '0 0 * * *' # Daily “At 00:00” UTC + workflow_dispatch: # allows you to trigger the workflow run manually + +jobs: + upstream-dev: + name: upstream-dev + runs-on: ubuntu-latest + defaults: + run: + shell: bash -l {0} + strategy: + fail-fast: false + matrix: + python-version: ['3.10'] + steps: + - uses: actions/checkout@v3 + - uses: conda-incubator/setup-miniconda@v2 + id: conda + with: + channels: conda-forge,nodefaults + channel-priority: strict + activate-environment: c-worthy-upstream + auto-update-conda: false + python-version: ${{ matrix.python-version }} + environment-file: ci/upstream-dev-environment.yml + mamba-version: '*' + use-mamba: true + miniforge-variant: Mambaforge + + - name: Install ocean-c-lab + id: install + run: | + python -m pip install -e . --no-deps --force-reinstall + conda list + + - name: Run Tests + id: test + run: | + python -m pytest + + - name: Report Status + if: | + always() + && (steps.conda.outcome != 'success' || steps.install.outcome != 'success' || steps.install.outcome != 'success') + + uses: actions/github-script@v6 + with: + script: | + const title = '⚠️ Upstream CI Failed ⚠️' + const creator = 'github-actions[bot]' + const issueLabel = 'CI' + const workflow_url = `https://github.com/${process.env.GITHUB_REPOSITORY}/actions/runs/${process.env.GITHUB_RUN_ID}` + const issue_body = `[Workflow Run URL](${workflow_url})\n\n` + let foundIssue = false + const issues = await github.rest.issues.listForRepo({ + owner: context.repo.owner, + repo: context.repo.repo, + }) + for (let issue of issues.data) { + if ( + issue.user.login === creator && + issue.state === 'open' && + issue.labels.some((label) => label.name === issueLabel) + ) { + github.rest.issues.update({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: issue.number, + body: issue_body, + }) + core.info(`Updated an existing issue: ${issue.number}.`) + foundIssue = true + break + } + } + if (!foundIssue) { + await github.rest.issues.create({ + owner: context.repo.owner, + repo: context.repo.repo, + title: title, + body: issue_body, + labels: [issueLabel], + }) + core.info('Opened a new issue') + } diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..405e18a --- /dev/null +++ b/.gitignore @@ -0,0 +1,135 @@ +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# Dask +dask-worker-space/ + +# Vscode +.vscode/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 0000000..04ab34a --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,47 @@ +ci: + autoupdate_schedule: monthly + +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.1.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-docstring-first + - id: check-json + - id: check-yaml + - id: double-quote-string-fixer + - id: debug-statements + - id: mixed-line-ending + + - repo: https://github.com/asottile/pyupgrade + rev: v2.31.1 + hooks: + - id: pyupgrade + args: + - '--py37-plus' + + - repo: https://github.com/psf/black + rev: 22.3.0 + hooks: + - id: black-jupyter + + - repo: https://github.com/keewis/blackdoc + rev: v0.3.4 + hooks: + - id: blackdoc + + - repo: https://github.com/PyCQA/flake8 + rev: 4.0.1 + hooks: + - id: flake8 + + - repo: https://github.com/PyCQA/isort + rev: 5.10.1 + hooks: + - id: isort + + - repo: https://github.com/pre-commit/mirrors-prettier + rev: v2.6.2 + hooks: + - id: prettier diff --git a/.prettierrc.toml b/.prettierrc.toml new file mode 100644 index 0000000..addd6d3 --- /dev/null +++ b/.prettierrc.toml @@ -0,0 +1,3 @@ +tabWidth = 2 +semi = false +singleQuote = true diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..825c32f --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1 @@ +# Changelog diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..a350f8d --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2022-Onwards NCAR Xdev Team Developers + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..eecc217 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,15 @@ +include CHANGELOG.md +include CONTRIBUTING.md +include LICENSE +include README.md +include requirements.txt +include pyproject.toml + +recursive-include docs/source * +include docs/Makefile docs/make.bat +recursive-include ocean_c_lab *.yaml +recursive-include ocean_c_lab *.py +recursive-exclude * __pycache__ +recursive-exclude * *.py[co] +prune tests* +prune ci* diff --git a/README.md b/README.md new file mode 100644 index 0000000..8113972 --- /dev/null +++ b/README.md @@ -0,0 +1,24 @@ +# python-project-template + +| CI | [![GitHub Workflow Status][github-ci-badge]][github-ci-link] [![Code Coverage Status][codecov-badge]][codecov-link] [![pre-commit.ci status][pre-commit.ci-badge]][pre-commit.ci-link] | +| :---------- | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: | +| **Docs** | [![Documentation Status][rtd-badge]][rtd-link] | +| **Package** | [![Conda][conda-badge]][conda-link] [![PyPI][pypi-badge]][pypi-link] | +| **License** | [![License][license-badge]][repo-link] | + +An Xdev template for a developing a Python project/package + +[github-ci-badge]: https://img.shields.io/github/workflow/status/ncar-xdev/python-project-template/CI?label=CI&logo=github +[github-ci-link]: https://github.com/ncar-xdev/xdev-project/actions?query=workflow%3ACI +[codecov-badge]: https://img.shields.io/codecov/c/github/ncar-xdev/xdev-project.svg?logo=codecov +[codecov-link]: https://codecov.io/gh/ncar-xdev/xdev-project +[rtd-badge]: https://img.shields.io/readthedocs/xdev-project/latest.svg +[rtd-link]: https://xdev-project.readthedocs.io/en/latest/?badge=latest +[pypi-badge]: https://img.shields.io/pypi/v/xdev-project?logo=pypi +[pypi-link]: https://pypi.org/project/xdev-project +[conda-badge]: https://img.shields.io/conda/vn/conda-forge/xdev-project?logo=anaconda +[conda-link]: https://anaconda.org/conda-forge/xdev-project +[license-badge]: https://img.shields.io/github/license/ncar-xdev/python-project-template +[repo-link]: https://github.com/ncar-xdev/python-project-template +[pre-commit.ci-badge]: https://results.pre-commit.ci/badge/github/ncar-xdev/python-project-template/main.svg +[pre-commit.ci-link]: https://results.pre-commit.ci/latest/github/ncar-xdev/python-project-template/main diff --git a/ci/environment-docs.yml b/ci/environment-docs.yml new file mode 100644 index 0000000..222ba38 --- /dev/null +++ b/ci/environment-docs.yml @@ -0,0 +1,16 @@ +name: c-worthy-docs +channels: + - conda-forge + - nodefaults +dependencies: + - furo + - jupyterlab + - myst-nb + - pip + - python=3.10 + - sphinx-autobuild + - sphinx-copybutton + - sphinx-inline-tabs + - pip: + - sphinxext-opengraph + - -e .. diff --git a/ci/environment.yml b/ci/environment.yml new file mode 100644 index 0000000..97ecf23 --- /dev/null +++ b/ci/environment.yml @@ -0,0 +1,26 @@ +name: c-worthy +channels: + - conda-forge +dependencies: + - codecov + - dask + - jupyter + - jupyter-book + - jupyter_client==6.1.12 + - jupyterlab + - matplotlib + - myst-nb + - nbformat + - nbsphinx + - nc-time-axis + - netcdf4 + - numpydoc + - pip + - pre-commit + - pytest + - pytest-cov + - scipy + - xarray + - pip: + - sphinx-click + - sphinxcontrib-autoyaml diff --git a/ci/upstream-dev-environment.yml b/ci/upstream-dev-environment.yml new file mode 100644 index 0000000..2395610 --- /dev/null +++ b/ci/upstream-dev-environment.yml @@ -0,0 +1,7 @@ +name: c-worthy-upstream +channels: + - conda-forge + - nodefaults +dependencies: + - pytest-cov + - pre-commit diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 0000000..aa1da5f --- /dev/null +++ b/codecov.yml @@ -0,0 +1,20 @@ +codecov: + require_ci_to_pass: no + max_report_age: off + +comment: false + +ignore: + - 'tests/*.py' + - 'setup.py' + +coverage: + precision: 2 + round: down + status: + project: + default: + target: 95 + informational: true + patch: off + changes: off diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 0000000..3032ce6 --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,180 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# User-friendly check for sphinx-build +ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) +$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) +endif + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source +# the i18n builder cannot share the environment and doctrees with the others +I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source + +.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " singlehtml to make a single large HTML file" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " devhelp to make HTML files and a Devhelp project" + @echo " epub to make an epub" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " latexpdf to make LaTeX files and run them through pdflatex" + @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" + @echo " text to make text files" + @echo " man to make manual pages" + @echo " texinfo to make Texinfo files" + @echo " info to make Texinfo files and run them through makeinfo" + @echo " gettext to make PO message catalogs" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " xml to make Docutils-native XML files" + @echo " pseudoxml to make pseudoxml-XML files for display purposes" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +singlehtml: + $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml + @echo + @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/complexity.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/complexity.qhc" + +devhelp: + $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp + @echo + @echo "Build finished." + @echo "To view the help file:" + @echo "# mkdir -p $$HOME/.local/share/devhelp/complexity" + @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/complexity" + @echo "# devhelp" + +epub: + $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub + @echo + @echo "Build finished. The epub file is in $(BUILDDIR)/epub." + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make' in that directory to run these through (pdf)latex" \ + "(use \`make latexpdf' here to do that automatically)." + +latexpdf: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through pdflatex..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +latexpdfja: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo "Running LaTeX files through platex and dvipdfmx..." + $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja + @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." + +text: + $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text + @echo + @echo "Build finished. The text files are in $(BUILDDIR)/text." + +man: + $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man + @echo + @echo "Build finished. The manual pages are in $(BUILDDIR)/man." + +texinfo: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo + @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." + @echo "Run \`make' in that directory to run these through makeinfo" \ + "(use \`make info' here to do that automatically)." + +info: + $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo + @echo "Running Texinfo files through makeinfo..." + make -C $(BUILDDIR)/texinfo info + @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." + +gettext: + $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale + @echo + @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." + +xml: + $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml + @echo + @echo "Build finished. The XML files are in $(BUILDDIR)/xml." + +pseudoxml: + $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml + @echo + @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." + +livehtml: + sphinx-autobuild "$(SOURCEDIR)" "$(BUILDDIR)" $(ALLSPHINXOPTS $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 0000000..2df9a8c --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,242 @@ +@ECHO OFF + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +set I18NSPHINXOPTS=%SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% + set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. singlehtml to make a single large HTML file + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. devhelp to make HTML files and a Devhelp project + echo. epub to make an epub + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. text to make text files + echo. man to make manual pages + echo. texinfo to make Texinfo files + echo. gettext to make PO message catalogs + echo. changes to make an overview over all changed/added/deprecated items + echo. xml to make Docutils-native XML files + echo. pseudoxml to make pseudoxml-XML files for display purposes + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + + +%SPHINXBUILD% 2> nul +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.http://sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "singlehtml" ( + %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\complexity.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\complexity.ghc + goto end +) + +if "%1" == "devhelp" ( + %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. + goto end +) + +if "%1" == "epub" ( + %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The epub file is in %BUILDDIR%/epub. + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + if errorlevel 1 exit /b 1 + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdf" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "latexpdfja" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + cd %BUILDDIR%/latex + make all-pdf-ja + cd %BUILDDIR%/.. + echo. + echo.Build finished; the PDF files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "text" ( + %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The text files are in %BUILDDIR%/text. + goto end +) + +if "%1" == "man" ( + %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The manual pages are in %BUILDDIR%/man. + goto end +) + +if "%1" == "texinfo" ( + %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. + goto end +) + +if "%1" == "gettext" ( + %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The message catalogs are in %BUILDDIR%/locale. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + if errorlevel 1 exit /b 1 + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + if errorlevel 1 exit /b 1 + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + if errorlevel 1 exit /b 1 + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +if "%1" == "xml" ( + %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The XML files are in %BUILDDIR%/xml. + goto end +) + +if "%1" == "pseudoxml" ( + %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml + if errorlevel 1 exit /b 1 + echo. + echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. + goto end +) + +:end diff --git a/docs/source/changelog.md b/docs/source/changelog.md new file mode 100644 index 0000000..4d6e28c --- /dev/null +++ b/docs/source/changelog.md @@ -0,0 +1,3 @@ +```{include} ../../CHANGELOG.md + +``` diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 0000000..7704b5d --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,140 @@ +import datetime + +import xdev_project + +extensions = [ + 'sphinx.ext.autodoc', + 'sphinx.ext.viewcode', + 'sphinx.ext.autosummary', + 'sphinx.ext.doctest', + 'sphinx.ext.intersphinx', + 'sphinx.ext.extlinks', + 'sphinx.ext.intersphinx', + 'sphinx.ext.napoleon', + 'myst_nb', + 'sphinxext.opengraph', + 'sphinx_copybutton', + 'sphinx_inline_tabs', +] + +autodoc_member_order = 'groupwise' + +# MyST config +myst_enable_extensions = ['amsmath', 'colon_fence', 'deflist', 'html_image'] +myst_url_schemes = ['http', 'https', 'mailto'] + +# sphinx-copybutton configurations +copybutton_prompt_text = r'>>> |\.\.\. |\$ |In \[\d*\]: | {2,5}\.\.\.: | {5,8}: ' +copybutton_prompt_is_regexp = True + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# Autosummary pages will be generated by sphinx-autogen instead of sphinx-build +autosummary_generate = [] +autodoc_typehints = 'none' + +# Napoleon configurations + +napoleon_google_docstring = False +napoleon_numpy_docstring = True +napoleon_use_param = False +napoleon_use_rtype = False +napoleon_preprocess_types = False + + +jupyter_execute_notebooks = 'cache' +execution_timeout = 600 +execution_allow_errors = True + + +# The master toctree document. +master_doc = 'index' + +# General information about the project. +current_year = datetime.datetime.now().year +project = 'xdev-project' +copyright = f'{current_year}, xdev-project developers' +author = 'xdev-project developers' + + +# The short X.Y version. +version = xdev_project.__version__.split('+')[0] +# The full version, including alpha/beta/rc tags. +release = xdev_project.__version__ + + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +exclude_patterns = ['_build', '**.ipynb_checkpoints', 'Thumbs.db', '.DS_Store'] + + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. +html_theme = 'furo' +html_title = '' + +html_context = { + 'github_user': 'ncar-xdev', + 'github_repo': 'xdev-project', + 'github_version': 'main', + 'doc_path': 'docs', +} +html_theme_options = dict( + # analytics_id='' this is configured in rtfd.io + # canonical_url="", +) + + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +# html_logo = '../_static/images/NSF_4-Color_bitmap_Logo.png' + + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +html_last_updated_fmt = '%b %d, %Y' + + +# Output file base name for HTML help builder. +htmlhelp_basename = 'xdev-projectdoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +latex_elements = { + # The paper size ('letterpaper' or 'a4paper'). + # 'papersize': 'letterpaper', + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', +} + + +latex_documents = [('index', 'xdev-project.tex', 'xdev-project Documentation', author, 'manual')] + +man_pages = [('index', 'xdev-project', 'xdev-project Documentation', [author], 1)] + +texinfo_documents = [ + ( + 'index', + 'xdev-project', + 'xdev-project Documentation', + author, + 'xdev-project', + 'One line description of project.', + 'Miscellaneous', + ) +] + + +intersphinx_mapping = { + 'python': ('https://docs.python.org/3/', None), + 'xarray': ('http://xarray.pydata.org/en/stable/', None), + 'pandas': ('https://pandas.pydata.org/pandas-docs/stable/', None), +} diff --git a/docs/source/explanation/index.md b/docs/source/explanation/index.md new file mode 100644 index 0000000..b1303b4 --- /dev/null +++ b/docs/source/explanation/index.md @@ -0,0 +1 @@ +# Explanation diff --git a/docs/source/how-to/index.md b/docs/source/how-to/index.md new file mode 100644 index 0000000..d3fdf06 --- /dev/null +++ b/docs/source/how-to/index.md @@ -0,0 +1,11 @@ +# How to + +How to: + +```{toctree} +--- +maxdepth: 1 +--- +install-xdev-project.md + +``` diff --git a/docs/source/how-to/install-xdev-project.md b/docs/source/how-to/install-xdev-project.md new file mode 100644 index 0000000..def2b46 --- /dev/null +++ b/docs/source/how-to/install-xdev-project.md @@ -0,0 +1,33 @@ +# Install xdev-project + +xdev-project can be installed in three ways: + +```{eval-rst} +.. tab:: pip + + Using the `pip `__ package manager: + + .. code:: bash + + $ python -m pip install xdev-project + +.. tab:: conda + + Using the `conda `__ package manager that comes with the + Anaconda/Miniconda distribution: + + .. code:: bash + + $ conda install xdev-project --channel conda-forge + +.. tab:: Development version + + To install a development version from source: + + .. code:: bash + + $ git clone https://github.com/ncar-xdev/xdev-project + $ cd xdev-project + $ python -m pip install -e . + +``` diff --git a/docs/source/index.md b/docs/source/index.md new file mode 100644 index 0000000..52bd493 --- /dev/null +++ b/docs/source/index.md @@ -0,0 +1,34 @@ +# Welcome to xdev-project's documentation! + +xdev-project ... + +## Get in touch + +- If you encounter any errors or problems with **xdev-project**, please open an issue at the GitHub [main repository](http://github.com/ncar-xdev/xdev-project/issues). +- If you have a question like "How do I find x?", ask on [GitHub discussions](https://github.com/ncar-xdev/xdev-project/discussions). Please include a self-contained reproducible example if possible. + +```{toctree} +--- +maxdepth: 1 +hidden: +--- + +tutorials/index.md +how-to/index.md +explanation/index.md +reference/index.md + +``` + +```{toctree} +--- +maxdepth: 2 +caption: Contribute to xdev-project +hidden: +--- + +changelog.md +GitHub Repo +GitHub discussions + +``` diff --git a/docs/source/reference/index.md b/docs/source/reference/index.md new file mode 100644 index 0000000..b0d5c88 --- /dev/null +++ b/docs/source/reference/index.md @@ -0,0 +1 @@ +# API Reference diff --git a/docs/source/tutorials/index.md b/docs/source/tutorials/index.md new file mode 100644 index 0000000..e69de29 diff --git a/ocean_c_lab/__init__.py b/ocean_c_lab/__init__.py new file mode 100644 index 0000000..44f5727 --- /dev/null +++ b/ocean_c_lab/__init__.py @@ -0,0 +1,10 @@ +from pkg_resources import DistributionNotFound, get_distribution + +try: + __version__ = get_distribution(__name__).version +except DistributionNotFound: # pragma: no cover + __version__ = '0.0.0' # pragma: no cover + +from .box_models import * +from co2calc import * +from .glodap import open_glodap diff --git a/ocean_c_lab/box_models.py b/ocean_c_lab/box_models.py new file mode 100644 index 0000000..bfbe97d --- /dev/null +++ b/ocean_c_lab/box_models.py @@ -0,0 +1,552 @@ +import os +import yaml + +import cftime + +from scipy.optimize import fsolve +import numpy as np +import dask +import xarray as xr + +from . import gasex +from . import co2calc + +path_to_here = os.path.dirname(os.path.realpath(__file__)) + +s_per_d = 86400.0 + + +@dask.delayed +def sim_single_box(nday, ic_data, do_spinup=False, **init_kwargs): + """run simulation with single_box""" + + # instantiate model + m = box_model_simulation( + model=single_box, + **init_kwargs, + ) + + # optionally find a steady-state solution + if do_spinup: + ds_eq = m.spinup(ic_data) + ic_data = {k: ds_eq[k] for k in ic_data.keys()} + + # run the model and return output dataset + m.run( + nday=nday, + ic_data=ic_data, + ) + return m.ds + + +class indexing_type(object): + def __init__(self, **kwargs): + for k, v in kwargs.items(): + assert isinstance(v, int) + self.__dict__[k] = v + + def __getitem__(self, key): + return self.__dict__[key] + + +class box_model_simulation(object): + def __init__( + self, + model, + forcing=None, + calendar='noleap', + **init_kwargs, + ): + """Run box model integrations. + + Parameters + ---------- + + model : obj + Box model to integrate. + + forcing : xarray.Dataset + Forcing data to run the model. + + calendar : string + String describing the CF-conventions calendar. See: + http://cfconventions.org/cf-conventions/cf-conventions#calendar + This must match the calendar used in `forcing`. + + init_kwargs : dict, optional + Keyword arguments to pass to `model`. + + """ + + self.calendar = calendar + self._init_forcing(forcing) + self.obj = model( + **init_kwargs, + ) + + def _init_forcing(self, forcing): + """initialize forcing Dataset""" + + if forcing is None: + self.forcing = None + + else: + assert ( + forcing.time.encoding['calendar'] == self.calendar + ), "forcing calendar and simulation calendar mismatch" + + self.forcing = forcing.copy() + + # determine the data_vars for interpolation + not_data_vars = [] + tb_var = None + if 'bounds' in forcing.time.attrs: + tb_var = forcing.time.attrs['bounds'] + not_data_vars.append(tb_var) + self.forcing_data_vars = list( + filter(lambda v: v not in not_data_vars, forcing.data_vars), + ) + + def _init_state(self, ic_data): + """initialize model state variables""" + for v in self.obj.state_names: + i = self.obj.sind[v] + self.obj.state_data[i, :] = ic_data[v] + + def _init_output_arrays(self, start_date, nt): + + time, time_bnds = gen_daily_cftime_coord( + start_date, + nt, + calendar=self.calendar, + ) + + # integration time set to beginning of interval + self.time = time_bnds[:, 0].data + + self._ds = xr.Dataset() + self._ds[time.bounds] = time_bnds + for v, attrs in self.obj.diag_attrs.items(): + self._ds[v] = xr.DataArray( + np.zeros((nt, self.obj.nx)), + dims=("time", "nx"), + attrs=attrs, + coords={"time": time}, + ) + for v, attrs in self.obj.state_attrs.items(): + self._ds[v] = xr.DataArray( + np.zeros((nt, self.obj.nx)), + dims=("time", "nx"), + attrs=attrs, + coords={"time": time}, + ) + + def _forcing_t(self, t): + + if self.forcing is None: + return None + + interp_time = t + if interp_time <= self.forcing.time[0]: + return self.forcing[self.forcing_data_vars].isel(time=0) + elif interp_time >= self.forcing.time[-1]: + return self.forcing[self.forcing_data_vars].isel(time=-1) + else: + return self.forcing[self.forcing_data_vars].interp( + time=interp_time, + kwargs=dict(bounds_error=True), + ) + + def _post_data(self, n, state_t): + for i, v in enumerate(self.obj.state_names): + self._ds[v][n, :] = state_t[i, :] + + for i, v in enumerate(self.obj.diag_names): + self._ds[v][n, :] = self.obj.diag_data[i, :] + + @property + def ds(self): + """Data comprising the output from ``box_model_instance``.""" + return self._ds + + def _compute_tendency(self, t, state_t, run_kwargs): + """Return the feisty time tendency.""" + return self.obj.compute_tendencies( + state_data=state_t, + forcing_t_ds=self._forcing_t(t), + **run_kwargs, + ) + + def _solve(self, nt, method, run_kwargs): + """Call a numerical ODE solver to integrate the feisty model in time.""" + + state_t = self.obj.state_data + + if method == "euler": + self._solve_foward_euler(nt, state_t, run_kwargs) + + elif method in ["Radau", "RK45"]: + # TODO: make input arguments + self._solve_scipy(nt, state_t, method, run_kwargs) + else: + raise ValueError(f"unknown method: {method}") + + def _solve_foward_euler(self, nt, state_t, run_kwargs): + """use forward-euler to integrate model""" + for n in range(nt): + dsdt = self._compute_tendency(self.time[n], state_t, run_kwargs) * s_per_d + state_t[:, :] = state_t[:, :] + dsdt[:, :] * self.dt + self._post_data(n, state_t) + + def _solve_scipy(self, nt, state_t, method, run_kwargs): + """use a SciPy solver to integrate the model equation.""" + raise NotImplementedError("scipy solvers not implemented") + + def run( + self, + nday, + ic_data, + start_date='0001-01-01', + file_out=None, + method="euler", + run_kwargs={}, + ): + """Integrate the FEISTY model. + + Parameters + ---------- + + nday : integer + Number of days to run. + + ic_data : dict_like + Dataset or dictionary that includes data for each of the model's + state variables. + + start_date : string, optional + Date to start the model integration; must have format 'YYYY-MM-DD'. + + file_out : string, optional + File name to write model output data. + + method : string + Method of integrating model equations. Options: ['euler', 'Radau', 'RK45']. + + .. note:: + Only ``method='euler'`` is supported currently. + """ + + # time step + self.dt = 1.0 # day + nt = nday + + self._init_state(ic_data) + self._init_output_arrays(start_date, nt) + self._solve(nt, method, run_kwargs) + self._shutdown(file_out) + + def _shutdown(self, file_out): + """Close out integration: + Tasks: + - write output + """ + if file_out is not None: + self._ds.to_netcdf(file_out) + + def spinup(self, ic_data, nday=1, run_kwargs={}): + """use SciPy.fsolve to find an equilibrium solution""" + + assert set(self.obj.state_names) == set(ic_data.keys()) + + ntracers, nx = self.obj.state_data.shape + + def wrap_model(state_in_flat): + state_in = state_in_flat.reshape((ntracers, nx)) + self.run( + nday=nday, + ic_data={k: state_in[i, :] for i, k in enumerate(self.obj.state_names)}, + run_kwargs=run_kwargs, + ) + state_out = self.obj.state_data.ravel() + + return (state_out - state_in_flat) ** 2 + + # setup dataset for output + state_equil = xr.Dataset() + for v, attrs in self.obj.state_attrs.items(): + state_equil[v] = xr.DataArray( + np.zeros((self.obj.nx)), + dims=("nx"), + attrs=attrs, + ) + + # initial guess + state0_flat = np.vstack([ic_data[k] for k in self.obj.state_names]).ravel() + statef_flat = fsolve(wrap_model, state0_flat, xtol=1e-7, maxfev=2000) + statef = statef_flat.reshape((ntracers, nx)) + + for i, v in enumerate(self.obj.state_names): + state_equil[v].data[:] = statef[i, :] + + return state_equil + + +class single_box(object): + """ + A box model + """ + + def __init__(self, **kwargs): + """Initialize model""" + + # static attributes + self.boxes = [ + "surface", + ] + self.state_names = [ + "dic", + "alk", + ] + self.settings = [ + 'lapply_alk_flux', + 'lventilate', + 'tracer_boundary_conc', + 'diag_list', + ] + self.forcing_vars = [ + 'salt', + 'temp', + 'fice', + 'patm', + 'u10', + 'h', + 'area', + 'Xco2atm', + 'ventilation_flow', + 'alk_flux', + ] + + # validate kwargs + unknown_args = set(kwargs.keys()) - set(self.settings + self.forcing_vars) + assert not unknown_args, f'Unknown keyword argument(s): {unknown_args}' + + # initialization sequence + self._init_state() + self._init_diag() + self._init_model(**kwargs) + self._init_forcing(**kwargs) + self._pH0 = 8.0 + + def _init_state(self): + """initialize model state""" + with open(f"{path_to_here}/state_variables.yml") as fid: + tracer_attrs = yaml.safe_load(fid) + + self.nx = len(self.boxes) + self.ntracers = len(self.state_names) + self.state_attrs = {k: tracer_attrs[k] for k in self.state_names} + + def _init_diag(self, diag_list=None): + """initialize model diagnostics""" + diag_attrs_files = [ + f"{path_to_here}/csys_diag_definitions.yml", + ] + + diag_defs = {} + for file in diag_attrs_files: + with open(file) as fid: + diag_defs.update(yaml.safe_load(fid)) + + self.diag_attrs = {k: v['attrs'] for k, v in diag_defs.items()} + if diag_list is not None: + diag_defs = {k: v for k, v in diag_defs.items() if k in diag_list} + self.diag_attrs = {k: v for k, v in self.diag_attrs.items() if k in diag_list} + + self.diag_units_convert_factor = {} + for k, v in diag_defs.items(): + try: + self.diag_units_convert_factor[k] = v['units_convert_factor'] + except KeyError: + self.diag_units_convert_factor[k] = 1.0 + + self.diag_names = list(self.diag_attrs.keys()) + self.ndiag = len(self.diag_names) + + def _init_model(self, **kwargs): + """initialize memory and model settings""" + + # parse the settings_dict + self.lapply_alk_flux = kwargs.pop('lapply_alk_flux', False) + self.lventilate = kwargs.pop('lventilate', False) + tracer_boundary_conc_dict = kwargs.pop('tracer_boundary_conc', None) + + # initialize arrays + self.state_data = np.zeros((self.ntracers, self.nx)) + self.tendency_data = np.zeros((self.ntracers, self.nx)) + self.diag_data = np.zeros((self.ndiag, self.nx)) + self.tracer_boundary_conc = np.zeros((self.ntracers, self.nx)) + + if self.lventilate: + assert ( + tracer_boundary_conc_dict is not None + ), 'lventilate=True requires `tracer_boundary_conc_dict` to be set' + for i, v in enumerate(self.state_names): + self.tracer_boundary_conc[i, :] = tracer_boundary_conc_dict[v] + + # initialize indexers + self.sind = indexing_type( + **{k: i for i, k in enumerate(self.state_names)}, + ) + self.dind = indexing_type( + **{k: i for i, k in enumerate(self.diag_names)}, + ) + + def _init_forcing(self, **kwargs): + """initialize forcing data""" + self.forcing_constant = {v: kwargs.pop(v, None) for v in self.forcing_vars} + + def compute_tendencies(self, state_data, forcing_t_ds=None, **kwargs): + """compute tendencies""" + + # unpack inputs into local variables + dic_data = state_data[self.sind.dic, :] + alk_data = state_data[self.sind.alk, :] + + forcing_data = {} + for v in self.forcing_vars: + if self.forcing_constant[v] is not None: + forcing_data[v] = self.forcing_constant[v] + else: + forcing_data[v] = forcing_t_ds[v].data + + salt_data = forcing_data['salt'] + temp_data = forcing_data['temp'] + fice_data = forcing_data['fice'] + patm_data = forcing_data['patm'] + u10_data = forcing_data['u10'] + Xco2atm_data = forcing_data['Xco2atm'] + h_data = forcing_data['h'] + area_data = forcing_data['area'] + alk_flux_data = forcing_data['alk_flux'] + ventilation_flow_data = forcing_data['ventilation_flow'] + + vol = area_data * h_data + + # initialize tendency + self.tendency_data[:, :] = 0.0 + + # compute dic tendency terms + co2sol = co2calc.co2sol(salt_data, temp_data) # mmol/m^3/atm + thermodyn = co2calc.co2_eq_const(salt_data, temp_data) + + # solve carbonate system + co2aq, pH = co2calc.calc_csys_iter( + dic_data, alk_data, salt_data, temp_data, pH0=self._pH0, thermodyn=thermodyn + ) + + # carbonate system diagnostics + self.diag_data[self.dind.pH, :] = pH + self._pH0 = pH + + self.diag_data[self.dind.pco2, :] = 1.0e6 * co2aq / co2sol + + rf, ddicdco2 = co2calc.rf_ddicdco2( + salt_data, temp_data, dic_data, co2aq, pH, thermodyn=thermodyn + ) + self.diag_data[self.dind.revelle_factor, :] = rf + self.diag_data[self.dind.dDICdCO2, :] = ddicdco2 + + # compute gas exchange + k_gas = gasex.gas_transfer_velocity(u10_data, temp_data) # m/s + self.diag_data[self.dind.xkw, :] = k_gas + + co2atm = patm_data * (Xco2atm_data * 1.0e-6) * co2sol # mmol/m^3 + gasex_co2 = (1.0 - fice_data) * k_gas * (co2atm - co2aq) # mmol/m^2/s + self.diag_data[self.dind.fgco2, :] = gasex_co2 * self.diag_units_convert_factor['fgco2'] + self.tendency_data[self.sind.dic, :] += gasex_co2 * area_data # mmol/s + + # apply alk forcing + if self.lapply_alk_flux: + self.tendency_data[self.sind.alk, :] += alk_flux_data * area_data # mmol/s + + # apply boundary flow + if self.lventilate: + self.tendency_data[:, :] += ( + ventilation_flow_data * self.tracer_boundary_conc + - ventilation_flow_data * self.state_data + ) # m^3/s * mmol/m^3 --> mmol/s + + # normalize by volume + self.tendency_data /= vol # mmol/m^3/s + + return self.tendency_data + + +def get_forcing_defaults(values_only=False): + with open(f"{path_to_here}/forcing_variables.yml") as fid: + forcing_var_defs = yaml.safe_load(fid) + if values_only: + return {k: d['default_value'] for k, d in forcing_var_defs.items()} + else: + return forcing_var_defs + + +def gen_forcing_dataset(nday, start_date='0001-01-01', calendar='noleap', **kwargs): + + time, time_bnds = gen_daily_cftime_coord(start_date, nday, calendar='noleap') + + forcing_var_defs = get_forcing_defaults() + + forcing_values = {k: v['default_value'] for k, v in forcing_var_defs.items()} + assert not set(kwargs.keys()) - set(forcing_values.keys()) + + forcing_values.update(kwargs) + + forcing = xr.Dataset() + for v in forcing_values.keys(): + if np.isscalar(forcing_values[v]): + data = forcing_values[v] * np.ones(nday) + else: + data = forcing_values[v][:] + + forcing[v] = xr.DataArray( + data, + dims=("time"), + attrs=forcing_var_defs[v]['attrs'], + coords={'time': time}, + ) + forcing[time.bounds] = time_bnds + return forcing + + +def gen_daily_cftime_coord( + start_date, + nday, + units='days since 0001-01-01 00:00:00', + calendar='gregorian', +): + time = xr.cftime_range(start=start_date, periods=nday, freq='D', calendar=calendar) + + num_time = cftime.date2num(time, units, calendar=calendar) + time_bounds_data = np.vstack((num_time, num_time + 1)).T + time_data = cftime.num2date(time_bounds_data.mean(axis=1), units, calendar=calendar) + + time = xr.DataArray(time_data, dims=('time'), name='time') + time.encoding['units'] = units + time.encoding['calendar'] = calendar + time.encoding['dtype'] = np.float64 + time.encoding['_FillValue'] = None + + time.attrs['bounds'] = 'time_bnds' + + time_bnds = xr.DataArray( + cftime.num2date(time_bounds_data, units, calendar), + dims=('time', 'd2'), + coords={'time': time}, + name='time_bnds', + ) + + time_bnds.encoding['dtype'] = np.float64 + time_bnds.encoding['_FillValue'] = None + + return time, time_bnds diff --git a/ocean_c_lab/co2calc.py b/ocean_c_lab/co2calc.py new file mode 100644 index 0000000..2975b4d --- /dev/null +++ b/ocean_c_lab/co2calc.py @@ -0,0 +1,738 @@ +import numpy as np + +T0_Kelvin = 273.15 +rho_ref = 1026.0 + +co2_chem_tol = 1.0e-12 +co2_ph_high = 10.0 +co2_ph_low = 6.0 + + +def _mmolm3_to_molkg(value): + return value * 1.0e-3 / rho_ref + + +def co2_eq_const(S, T): + """Compute carbonate system equilibrium constants""" + + t_kel = T + T0_Kelvin + t_sca = t_kel * 0.01 + t_sq = t_sca * t_sca + t_inv = 1.0 / t_kel + t_log = np.log(t_kel) + + s_sq = S * S + s_sqrt = np.sqrt(S) + s_1p5 = S ** 1.5 + s_cl = S / 1.80655 + + s_sc = 19.924 * S / (1000.0 - 1.005 * S) + s_sc_sq = s_sc * s_sc + s_sc_sqrt = np.sqrt(s_sc) + + eq = {} + # Mehrbach on pH_total + eq["k_h2co3"] = 10.0 ** ( + -3633.86 / t_kel + 61.2172 - 9.6777 * t_log + 0.011555 * S - 0.0001152 * s_sq + ) + + eq["k_hco3"] = 10.0 ** ( + -471.78 / t_kel - 25.9290 + 3.16967 * t_log + 0.01781 * S - 0.0001122 * s_sq + ) + + eq["k_h3po4"] = np.exp( + -4576.752 * t_inv + + 115.540 + - 18.453 * t_log + + (-106.736 * t_inv + 0.69171) * s_sqrt + + (-0.65643 * t_inv - 0.01844) * S + ) + + eq["k_h2po4"] = np.exp( + -8814.715 * t_inv + + 172.0883 + - 27.927 * t_log + + (-160.340 * t_inv + 1.3566) * s_sqrt + + (0.37335 * t_inv - 0.05778) * S + ) + + eq["k_hpo4"] = np.exp( + -3070.75 * t_inv + - 18.126 + + (17.27039 * t_inv + 2.81197) * s_sqrt + + (-44.99486 * t_inv - 0.09984) * S + ) + + eq["k_sioh4"] = np.exp( + -8904.2 * t_inv + + 117.385 + - 19.334 * t_log + + (-458.79 * t_inv + 3.5913) * s_sc_sqrt + + (188.74 * t_inv - 1.5998) * s_sc + + (-12.1652 * t_inv + 0.07871) * s_sc_sq + + np.log(1.0 - 0.001005 * S) + ) + + # following Zeebe and Wolf-Gladrow + eq["k_oh"] = np.exp( + -13847.26 * t_inv + + 148.96502 + - 23.6521 * t_log + + (118.67 * t_inv - 5.977 + 1.0495 * t_log) * s_sqrt + - 0.01615 * S + ) + + eq["k_hso4"] = np.exp( + -4276.1 * t_inv + + 141.328 + - 23.093 * t_log + + (-13856.0 * t_inv + 324.57 - 47.986 * t_log) * s_sc_sqrt + + (35474 * t_inv - 771.54 + 114.723 * t_log) * s_sc + - 2698 * t_inv * s_sc ** 1.5 + + 1776 * t_inv * s_sc_sq + + np.log(1.0 - 0.001005 * S) + ) + + eq["boron_total"] = 0.000232 * s_cl / 10.811 + eq["sulfate"] = 0.14 * s_cl / 96.062 + eq["fluoride"] = 0.000067 * s_cl / 18.9984 + + eq["k_hf"] = np.exp( + 1590.2 * t_inv + - 12.641 + + 1.525 * s_sc_sqrt + + np.log(1.0 - 0.001005 * S) + + np.log(1.0 + (eq["sulfate"] / eq["k_hso4"])) + ) + + eq["k_hbo2"] = np.exp( + (-8966.90 - 2890.53 * s_sqrt - 77.942 * S + 1.728 * s_1p5 - 0.0996 * s_sq) * t_inv + + (148.0248 + 137.1942 * s_sqrt + 1.62142 * S) + + (-24.4344 - 25.085 * s_sqrt - 0.2474 * S) * t_log + + 0.053105 * s_sqrt * t_kel + ) + return eq + + +def newton_safe(funcd, x_guess, x1, x2, xacc, **kwargs): + MAXIT = 100 + fl, df = funcd(x1, **kwargs) + fh, df = funcd(x2, **kwargs) + + xl = np.where(fl < 0, x1, x2) + xh = np.where(fl < 0, x2, x1) + + rts = x_guess + dxold = abs(x2 - x1) + dx = dxold + f, df = funcd(rts, **kwargs) + + for j in range(0, MAXIT): + if ((rts - xh) * df - f) * ((rts - xl) * df - f) >= 0 or abs(2.0 * f) > abs(dxold * df): + dxold = dx + dx = 0.5 * (xh - xl) + rts = xl + dx + if xl == rts: + return rts + else: + dxold = dx + dx = f / df + temp = rts + rts = rts - dx + if temp == rts: + return rts + if abs(dx) < xacc: + return rts + + f, df = funcd(rts, **kwargs) + + if f < 0: + xl = rts + else: + xh = rts + + return rts + + +def _calc_pH_from_alk_pco2( + h, + alk, + pco2, + boron_total, + fluoride, + phosphate, + silicate, + sulfate, + k0, + k_h2co3, + k_hco3, + k_hbo2, + k_h3po4, + k_h2po4, + k_hpo4, + k_hf, + k_hso4, + k_oh, + k_sioh4, +): + + h_2 = h * h + h_3 = h_2 * h + k_01 = k0 * k_h2co3 + k_012 = k_01 * k_hco3 + k_12 = k_h2co3 * k_hco3 + k_12p = k_h3po4 * k_h2po4 + k_123p = k_12p * k_hpo4 + c = 1.0 + sulfate / k_hso4 + fluoride / k_hf + a = h_3 + k_h3po4 * h_2 + k_12p * h + k_123p + a2 = a * a + da = 3.0 * h_2 + 2.0 * k_h3po4 * h + k_12p + b = h_2 + k_h2co3 * h + k_12 + + # Calculate F: + # F = HCO3 + CO3 + Borate + OH + HPO4 + 2 * PO4 + Silicate + HFREE + # + HSO4 + HF + H3PO4 - TA + f = ( + k_01 * pco2 / h + + 2 * k_012 * pco2 / h_2 + + boron_total / (1.0 + h / k_hbo2) + + k_oh / h + + (k_12p * h + 2.0 * k_123p - h_3) * phosphate / a + + silicate / (1.0 + h / k_sioh4) + - h / c + - sulfate / (1.0 + k_hso4 * c / h) + - fluoride / (1.0 + k_hf * c / h) + - alk + ) + + # calculate df=df/dh + + df = ( + -k_01 * pco2 / h_2 + - 4 * k_012 * pco2 / h_3 + - boron_total / (k_hbo2 * (1.0 + h / k_hbo2) ** 2) + - k_oh / h_2 + + (k_12p * (a - h * da) - 2.0 * k_123p * da - h_2 * (3.0 * a - h * da)) * phosphate / a2 + - silicate / (k_sioh4 * (1.0 + h / k_sioh4) ** 2) + - 1.0 / c + - sulfate / (1.0 + k_hso4 * c / h) ** 2.0 * (k_hso4 * c / h_2) + - fluoride / (1.0 + k_hf * c / h) ** 2.0 * (k_hf * c / h_2) + ) + + return f, df + + +def _calc_pH_from_dic_alk( + x, + dic, + alk, + boron_total, + fluoride, + phosphate, + silicate, + sulfate, + k_h2co3, + k_hco3, + k_hbo2, + k_h3po4, + k_h2po4, + k_hpo4, + k_hf, + k_hso4, + k_oh, + k_sioh4, +): + + x_2 = x * x + x_3 = x_2 * x + k_12 = k_h2co3 * k_hco3 + k_12p = k_h3po4 * k_h2po4 + k_123p = k_12p * k_hpo4 + c = 1.0 + sulfate / k_hso4 + fluoride / k_hf + a = x_3 + k_h3po4 * x_2 + k_12p * x + k_123p + a2 = a * a + da = 3.0 * x_2 + 2.0 * k_h3po4 * x + k_12p + b = x_2 + k_h2co3 * x + k_12 + b2 = b * b + db = 2.0 * x + k_h2co3 + + # Calculate F: + # F = HCO3 + CO3 + Borate + OH + HPO4 + 2 * PO4 + Silicate + HFREE \ + # + HSO4 + HF + H3PO4 - TA + f = ( + (k_h2co3 * x + 2.0 * k_12) * dic / b + + boron_total / (1.0 + x / k_hbo2) + + k_oh / x + + (k_12p * x + 2.0 * k_123p - x_3) * phosphate / a + + silicate / (1.0 + x / k_sioh4) + - x / c + - sulfate / (1.0 + k_hso4 * c / x) + - fluoride / (1.0 + k_hf * c / x) + - alk + ) + + # calculate df=df/dx + + df = ( + ((b - x * db) * k_h2co3 - 2.0 * k_12 * db) * dic / b2 + - boron_total / (k_hbo2 * (1.0 + x / k_hbo2) ** 2) + - k_oh / x_2 + + (k_12p * (a - x * da) - 2.0 * k_123p * da - x_2 * (3.0 * a - x * da)) * phosphate / a2 + - silicate / (k_sioh4 * (1.0 + x / k_sioh4) ** 2) + - 1.0 / c + - sulfate / (1.0 + k_hso4 * c / x) ** 2.0 * (k_hso4 * c / x_2) + - fluoride / (1.0 + k_hf * c / x) ** 2.0 * (k_hf * c / x_2) + ) + + return f, df + + +def calc_pH_from_dic_alk( + DIC, + ALK, + S, + T, + PO4=0.5, + SiO3=10.0, + input_in_gravimetric_units=False, + pH0=8.0, + thermodyn=None, +): + + if thermodyn is None: + thermodyn = co2_eq_const(S, T) + + if not input_in_gravimetric_units: + dic_loc = _mmolm3_to_molkg(DIC) + alk_loc = _mmolm3_to_molkg(ALK) + phosphate_loc = _mmolm3_to_molkg(PO4) + silicate_loc = _mmolm3_to_molkg(SiO3) + else: + # assume units are µmol/kg, covert to mol/kg + dic_loc = 1e-6 * DIC + alk_loc = 1e-6 * ALK + phosphate_loc = 1e-6 * PO4 + silicate_loc = 1e-6 * SiO3 + + h_total = np.vectorize(newton_safe)( + _calc_pH_from_dic_alk, + 10.0 ** (-pH0), + 10.0 ** (-co2_ph_low), + 10.0 ** (-co2_ph_high), + co2_chem_tol, + dic=dic_loc, + alk=alk_loc, + phosphate=phosphate_loc, + silicate=silicate_loc, + **thermodyn, + ) + return -1.0 * np.log10(h_total) + + +def calc_co2( + DIC, + ALK, + S, + T, + PO4=0.5, + SiO3=10.0, + input_in_gravimetric_units=False, + pH0=8.0, + thermodyn=None, +): + """ + Compute CO2aq from DIC and ALk + """ + + if thermodyn is None: + thermodyn = co2_eq_const(S, T) + + pH = calc_pH_from_dic_alk( + DIC, + ALK, + S, + T, + PO4, + SiO3, + input_in_gravimetric_units, + pH0, + thermodyn=thermodyn, + ) + + if input_in_gravimetric_units: + # assume units are µmol/kg, covert to mol/kg + dic_loc = 1.0e-6 * DIC + else: + dic_loc = _mmolm3_to_molkg(DIC) + + h_total = 10.0 ** (-1.0 * pH) + h2 = h_total * h_total + + co2aq = ( + dic_loc + * h2 + / (h2 + thermodyn["k_h2co3"] * h_total + thermodyn["k_h2co3"] * thermodyn["k_hco3"]) + ) + if input_in_gravimetric_units: + return co2aq * 1.0e6 # µmol/kg + else: + return co2aq * rho_ref * 1.0e3 # covert to mmol/m^3 + + +def calc_csys_iter( + DIC, + ALK, + S, + T, + PO4=0.5, + SiO3=10.0, + pH0=8.0, + thermodyn=None, +): + """ + Compute CO2aq from DIC and ALk + """ + + if thermodyn is None: + thermodyn = co2_eq_const(S, T) + + dic_loc = _mmolm3_to_molkg(DIC) + alk_loc = _mmolm3_to_molkg(ALK) + phosphate_loc = _mmolm3_to_molkg(PO4) + silicate_loc = _mmolm3_to_molkg(SiO3) + + h_total = np.vectorize(newton_safe)( + _calc_pH_from_dic_alk, + 10.0 ** (-pH0), + 10.0 ** (-co2_ph_low), + 10.0 ** (-co2_ph_high), + co2_chem_tol, + dic=dic_loc, + alk=alk_loc, + phosphate=phosphate_loc, + silicate=silicate_loc, + **thermodyn, + ) + pH = -1.0 * np.log10(h_total) + h2 = h_total * h_total + + co2aq = ( + ( + dic_loc + * h2 + / (h2 + thermodyn["k_h2co3"] * h_total + thermodyn["k_h2co3"] * thermodyn["k_hco3"]) + ) + * rho_ref + * 1.0e3 + ) # covert to mmol/m^3 + + return co2aq, pH + + +def calc_dic( + ALK, + pCO2, + S, + T, + PO4=0.5, + SiO3=10.0, + input_in_gravimetric_units=False, + pH0=8.0, + thermodyn=None, +): + + if thermodyn is None: + thermodyn = co2_eq_const(S, T) + + if not input_in_gravimetric_units: + alk_loc = _mmolm3_to_molkg(ALK) + phosphate_loc = _mmolm3_to_molkg(PO4) + silicate_loc = _mmolm3_to_molkg(SiO3) + else: + # assume units are µmol/kg, covert to mol/kg + alk_loc = 1e-6 * ALK + phosphate_loc = 1e-6 * PO4 + silicate_loc = 1e-6 * SiO3 + + pco2_loc = pCO2 * 1e-6 + k0 = 1e-6 * co2sol(S, T, return_in_gravimetric_units=True) + + h_total = np.vectorize(newton_safe)( + _calc_pH_from_alk_pco2, + 10.0 ** (-pH0), + 10.0 ** (-co2_ph_low), + 10.0 ** (-co2_ph_high), + co2_chem_tol, + alk=alk_loc, + pco2=pco2_loc, + phosphate=phosphate_loc, + silicate=silicate_loc, + k0=k0, + **thermodyn, + ) + + # Solve carbonate chemistry in surface + h2 = h_total * h_total + co2aq = k0 * pco2_loc # mol kg^{-1} + + dic = co2aq * ( + 1.0 + thermodyn["k_h2co3"] / h_total + thermodyn["k_h2co3"] * thermodyn["k_hco3"] / h2 + ) + + if not input_in_gravimetric_units: + return dic * 1.0e3 * rho_ref # covert to mmol/m^3 + else: + return dic * 1.0e6 # µmol/kg + + +def co2sol(S, T, return_in_gravimetric_units=False): + """ + Solubility of CO2 in sea water + INPUT: + S = salinity [PSS] + T = temperature [degree C] + + conc = solubility of CO2 [mmol/m^3/ppm] + Weiss & Price (1980, Mar. Chem., 8, 347-359; + Eq 13 with table 6 values) + """ + + a = np.array([-162.8301, 218.2968, 90.9241, -1.47696]) + b = np.array([0.025695, -0.025225, 0.0049867]) + + T_sc = (T + T0_Kelvin) * 0.01 + T_sq = T_sc * T_sc + T_inv = 1.0 / T_sc + log_T = np.log(T_sc) + d0 = b[2] * T_sq + b[1] * T_sc + b[0] + + # compute CO2 solubility in mol.kg^{-1}.atm^{-1} + co2_sol = np.exp(a[0] + a[1] * T_inv + a[2] * log_T + a[3] * T_sq + d0 * S) + + if return_in_gravimetric_units: + return 1.0e6 * co2_sol # µmol/kg/atm + else: + # convert to mmol/m^3/atm + return co2_sol * rho_ref * 1.0e3 + + +def revelle_factor(S, T, dic, co2, pH, gamma=0.0, thermodyn=None): + """Compute the Revelle Factor + + Reference: + + CO2 in seawater: Equilibrium, kinetics, isotopes + Edited by Richard E. Zeebe, Dieter Wolf-Gladrow, Volume 65 + https://www.sciencedirect.com/bookseries/elsevier-oceanography-series/vol/65 + + See page 78 + + Parameters + ---------- + + S : numeric + Salinity + + T : numeric + Temperature + + dic : numeric + Dissolved inorganic carbon concentration [mol/kg] + + co2 : numeric + Aqueous CO2 concentration [mol/kg] + + pH : numeric + pH + + gamma : numeric, scalar + gamma is the rain ratio paramter + + thermodyn : dict_like, optional + Thermodynamic constants computed from `co2_eq_const` + """ + + if thermodyn is None: + thermodyn = co2_eq_const(S, T) + + h = 10.0 ** (-pH) + h2 = h * h + h3 = h * h * h + k1 = thermodyn['k_h2co3'] + k2 = thermodyn['k_hco3'] + k1k2 = k1 * k2 + kb = thermodyn['k_hbo2'] + bt = thermodyn['boron_total'] + kb_p_h_sq = (kb + h) ** 2 + kw = thermodyn['k_oh'] + + # dDIC/d[CO2], pH = constant + Ds = 1 + k1 / h + k1k2 / h2 + + # dDIC/d[H+], [CO2] = constant + Dh = -co2 * (k1 / h2 + 2 * k1k2 / h3) + + # dAlk/d[CO2], pH = constant + As = k1 / h + 2 * k1k2 / h2 + + # dAlk/d[H+], [CO2] = constant + Ah = -co2 * (k1 / h2 + 4 * k1k2 / h3) - kb * bt / kb_p_h_sq - kw / h2 - 1 + + # dhdco2 = -As .* ( Ah ** (-1) ); + # ddicdco2 = Ds - Dh .* As ./ Ah; + # + # rf0 = ( ddicdco2 .* co2 ./ dic ) ** -1; + + d = Dh * As - Ds * Ah + c0 = -(dic / co2) * (Ah / d) + c1 = (dic / co2) * (2 * Dh / d) + + return c0 + gamma * c1 + + +def ddicdco2(S, T, dic, co2, pH, thermodyn=None): + """ + Compute the partial derivative of DIC wrt CO2 + + + Parameters + ---------- + + S : numeric + Salinity + + T : numeric + Temperature + + dic : numeric + Dissolved inorganic carbon concentration [mol/kg] + + co2 : numeric + Aqueous CO2 concentration [mol/kg] + + pH : numeric + pH + + Returns + -------- + + dDICdCO2 : numeric + Partial derivative of DIC wrt CO2 (unitless) + + """ + + if thermodyn is None: + thermodyn = co2_eq_const(S, T) + + k1 = thermodyn['k_h2co3'] + k2 = thermodyn['k_hco3'] + kb = thermodyn['k_hbo2'] + bt = thermodyn['boron_total'] + kw = thermodyn['k_oh'] + + # preliminaries + h = 10 ** (-pH) + h2 = h * h + h3 = h * h * h + k1k2 = k1 * k2 + kb_p_h_sq = (kb + h) ** 2 + + # dDIC/d[CO2], pH = constant + Ds = 1 + k1 / h + k1k2 / h2 + + # dDIC/d[H+], [CO2] = constant + Dh = -co2 * (k1 / h2 + 2 * k1k2 / h3) + + # dAlk/d[CO2], pH = constant + As = k1 / h + 2 * k1k2 / h2 + + # dAlk/d[H+], [CO2] = constant + Ah = -co2 * (k1 / h2 + 4 * k1k2 / h3) - kb * bt / kb_p_h_sq - kw / h2 - 1 + + # the result + return Ds - Dh * As / Ah + + +def rf_ddicdco2(S, T, dic, co2, pH, gamma=0.0, thermodyn=None): + """Compute the Revelle Factor + + Reference: + + CO2 in seawater: Equilibrium, kinetics, isotopes + Edited by Richard E. Zeebe, Dieter Wolf-Gladrow, Volume 65 + https://www.sciencedirect.com/bookseries/elsevier-oceanography-series/vol/65 + + See page 78 + + Parameters + ---------- + + S : numeric + Salinity + + T : numeric + Temperature + + dic : numeric + Dissolved inorganic carbon concentration [mol/kg] + + co2 : numeric + Aqueous CO2 concentration [mol/kg] + + pH : numeric + pH + + gamma : numeric, scalar + gamma is the rain ratio paramter + + thermodyn : dict_like, optional + Thermodynamic constants computed from `co2_eq_const` + + Returns + -------- + + revelle_factor : numeric + The Revelle Factor + + ddicdco2 : numeric + Partial derivative of DIC wrt CO2 (unitless) + """ + + if thermodyn is None: + thermodyn = co2_eq_const(S, T) + + h = 10.0 ** (-pH) + h2 = h * h + h3 = h * h * h + k1 = thermodyn['k_h2co3'] + k2 = thermodyn['k_hco3'] + k1k2 = k1 * k2 + kb = thermodyn['k_hbo2'] + bt = thermodyn['boron_total'] + kb_p_h_sq = (kb + h) ** 2 + kw = thermodyn['k_oh'] + + # dDIC/d[CO2], pH = constant + Ds = 1 + k1 / h + k1k2 / h2 + + # dDIC/d[H+], [CO2] = constant + Dh = -co2 * (k1 / h2 + 2 * k1k2 / h3) + + # dAlk/d[CO2], pH = constant + As = k1 / h + 2 * k1k2 / h2 + + # dAlk/d[H+], [CO2] = constant + Ah = -co2 * (k1 / h2 + 4 * k1k2 / h3) - kb * bt / kb_p_h_sq - kw / h2 - 1 + + # dhdco2 = -As .* ( Ah ** (-1) ); + ddicdco2 = Ds - Dh * As / Ah + # + # rf0 = ( ddicdco2 .* co2 ./ dic ) ** -1; + + d = Dh * As - Ds * Ah + c0 = -(dic / co2) * (Ah / d) + c1 = (dic / co2) * (2 * Dh / d) + + revelle_factor = c0 + gamma * c1 + + return revelle_factor, ddicdco2 diff --git a/ocean_c_lab/csys_diag_definitions.yml b/ocean_c_lab/csys_diag_definitions.yml new file mode 100644 index 0000000..75d9a6c --- /dev/null +++ b/ocean_c_lab/csys_diag_definitions.yml @@ -0,0 +1,30 @@ +pco2: + attrs: + long_name: "pCO$_2$" + units: ppm + +fgco2: + units_convert_factor: 31536.0 # seconds/year * (1 mol/1e3 mmol) + attrs: + long_name: "Air-sea CO$_2$ flux" + units: "mol m$^{-2}$ yr$^{-1}$" + +pH: + attrs: + long_name: pH + units: '' + +revelle_factor: + attrs: + long_name: Revelle Factor + units: '' + +dDICdCO2: + attrs: + long_name: dDICdCO2 + units: '' + +xkw: + attrs: + long_name: Gas transfer velocity + units: m/s diff --git a/ocean_c_lab/forcing_variables.yml b/ocean_c_lab/forcing_variables.yml new file mode 100644 index 0000000..746c65d --- /dev/null +++ b/ocean_c_lab/forcing_variables.yml @@ -0,0 +1,53 @@ +u10: + default_value: 7.5 + attrs: + units: "m/s" + long_name: "Wind speed" + +salt: + default_value: 35.0 + attrs: + units: "psu" + long_name: "Salinity" + +temp: + default_value: 20.0 + attrs: + units: "deg C" + long_name: Temperature + +patm: + default_value: 1.0 + attrs: + units: "atm" + long_name: "Atmospheric pressure" + +alk_flux: + default_value: 0.0 + attrs: + units: "mmol/m^2/s" + long_name: "Alkalinity surface forcing" + +fice: + default_value: 0.0 + attrs: + units: "fraction" + long_name: "Ice fraction" + +h: + default_value: 100.0 + attrs: + units: "m" + long_name: "MLD" + +Xco2atm: + default_value: 425.0 + attrs: + units: "ppm" + long_name: "CO2" + +area: + default_value: 10000000000.0 + attrs: + units: m^2 + long_name: area diff --git a/ocean_c_lab/gasex.py b/ocean_c_lab/gasex.py new file mode 100644 index 0000000..368a807 --- /dev/null +++ b/ocean_c_lab/gasex.py @@ -0,0 +1,69 @@ +import numpy as np + + +# gas exchange coefficient +xkw_coef_cm_per_hr = 0.251 + +# (cm/hr s^2/m^2) --> (m/s s^2/m^2) +xkw_coef = xkw_coef_cm_per_hr * 3.6e-5 + + +def gas_transfer_velocity(u10, temp): + """ + Compute gas transfer velocity. + + Parameters + ---------- + + u10 : numeric + Wind speed [m/s] + + temp : numeric + Sea surface Temperature [°C] + + Returns + ------- + + k : numeric + Gas transfer velocity [m/s] + """ + sc = schmidt_co2(temp) + u10sq = u10 * u10 + return xkw_coef * u10sq * (np.sqrt(sc / 660.0)) + + +def schmidt_co2(sst): + """ + Compute Schmidt number of CO2 in seawater as function of SST. + + Range of validity of fit is -2:40 + Reference: + Wanninkhof 2014, Relationship between wind speed + and gas exchange over the ocean revisited, + Limnol. Oceanogr.: Methods, 12, + doi:10.4319/lom.2014.12.351 + + Check value at 20°C = 668.344 + + Parameters + ---------- + + sst : numeric + Temperature + + Returns + ------- + + sc : numeric + Schmidt number + """ + a = 2116.8 + b = -136.25 + c = 4.7353 + d = -0.092307 + e = 0.0007555 + + # enforce bounds + sst_loc = np.where(sst < -2.0, -2.0, np.where(sst > 40.0, 40.0, sst)) + + return a + sst_loc * (b + sst_loc * (c + sst_loc * (d + sst_loc * e))) diff --git a/ocean_c_lab/glodap.py b/ocean_c_lab/glodap.py new file mode 100644 index 0000000..b9b4590 --- /dev/null +++ b/ocean_c_lab/glodap.py @@ -0,0 +1,184 @@ +import os +from glob import glob +from subprocess import check_call +import urllib + +import numpy as np +import xarray as xr + +cache_dir = os.environ['TMPDIR'] + +known_products = [ + "GLODAPv2.2016b_MappedClimatologies", +] + +depth_bnds = xr.DataArray( + np.array( + [ + [-5.0, 5.0], + [5.0, 15.0], + [15.0, 25.0], + [25.0, 40.0], + [40.0, 62.5], + [62.5, 87.5], + [87.5, 112.5], + [112.5, 137.5], + [137.5, 175.0], + [175.0, 225.0], + [225.0, 275.0], + [275.0, 350.0], + [350.0, 450.0], + [450.0, 550.0], + [550.0, 650.0], + [650.0, 750.0], + [750.0, 850.0], + [850.0, 950.0], + [950.0, 1050.0], + [1050.0, 1150.0], + [1150.0, 1250.0], + [1250.0, 1350.0], + [1350.0, 1450.0], + [1450.0, 1625.0], + [1625.0, 1875.0], + [1875.0, 2250.0], + [2250.0, 2750.0], + [2750.0, 3250.0], + [3250.0, 3750.0], + [3750.0, 4250.0], + [4250.0, 4750.0], + [4750.0, 5250.0], + [5250.0, 5750.0], + ] + ), + dims=("depth", "bnds"), +) + + +def _ensure_datafiles(product_name="GLODAPv2.2016b_MappedClimatologies"): + """ + get data files from website and return dictionary + + product_name='GLODAPv2.2016b_MappedClimatologies' + Variables returned = 'Cant', 'NO3', 'OmegaA', 'OmegaC', 'PI_TCO2', 'PO4', + 'TAlk', 'TCO2', 'oxygen', 'pHts25p0', 'pHtsinsitutp', + 'salinity', 'silicate', 'temperature', + + Alternative to default: + product_name='GLODAPv2_Mapped_Climatologies' + Variables returned = 'OmegaAinsitu', 'OmegaCinsitu', 'nitrate', 'oxygen', + 'pHts25p0', 'pHtsinsitu', 'phosphate', 'salinity', + 'silicate', 'talk', 'tco2', 'theta', + """ + + url = "https://www.nodc.noaa.gov/archive/arc0107/0162565/2.2/data/0-data/mapped" + + filename = ( + "GLODAPv2_Mapped_Climatology.tar.gz" + if product_name == "GLODAPv2_Mapped_Climatologies" + else f"{product_name}.tar.gz" + ) + + files = sorted(glob(f"{cache_dir}/{product_name}/*.nc")) + if not files: + os.makedirs(cache_dir, exist_ok=True) + local_file = f"{cache_dir}/{filename}" + urllib.request.urlretrieve(f"{url}/{filename}", local_file) + check_call(["gunzip", local_file]) + check_call(["tar", "-xvf", local_file.replace(".gz", ""), "-C", cache_dir]) + files = sorted(glob(f"{cache_dir}/{product_name}/*.nc")) + + return {f.split(".")[-2]: f for f in files} + + +def open_glodap(product="GLODAPv2.2016b_MappedClimatologies"): + """return GLODAP dataset""" + assert product in known_products + + obs_files = _ensure_datafiles(product) + ds_list = [] + for varname, file_in in obs_files.items(): + ds = xr.open_dataset(file_in) + depth = "Depth" if "Depth" in ds else "depth" + ds_list.append(ds[[depth, varname]]) + ds = xr.merge(ds_list) + ds = ds.rename({"Depth": "depth"}) + ds = ds.rename({"depth_surface": "depth"}).set_coords("depth") + ds = ds.rename( + { + "TAlk": "ALK", + "TCO2": "DIC", + "oxygen": "O2", + "silicate": "SiO3", + "temperature": "TEMP", + "salinity": "SALT", + } + ) + for v in ds.data_vars: + if 'units' in ds[v].attrs and ds[v].attrs['units'] == 'micro-mol kg-1': + ds[v].attrs['units'] = 'µmol kg$^{-1}$' + + ds.DIC.attrs['long_name'] = 'DIC' + ds.ALK.attrs['long_name'] = 'Alkalinity' + + ds["area"] = compute_grid_area(ds) + ds["depth_bnds"] = depth_bnds + ds["dz"] = depth_bnds.diff("bnds").squeeze() + if 'Comment' in ds.attrs: + del ds.attrs['Comment'] + return ds + + +def lat_weights_regular_grid(lat): + """ + Generate latitude weights for equally spaced (regular) global grids. + Weights are computed as sin(lat+dlat/2)-sin(lat-dlat/2) and sum to 2.0. + """ + dlat = np.abs(np.diff(lat)) + np.testing.assert_almost_equal(dlat, dlat[0]) + w = np.abs(np.sin(np.radians(lat + dlat[0] / 2.0)) - np.sin(np.radians(lat - dlat[0] / 2.0))) + + if np.abs(lat[0]) > 89.9999: + w[0] = np.abs(1.0 - np.sin(np.radians(np.pi / 2 - dlat[0]))) + + if np.abs(lat[-1]) > 89.9999: + w[-1] = np.abs(1.0 - np.sin(np.radians(np.pi / 2 - dlat[0]))) + + return w + + +def compute_grid_area(ds, check_total=True): + """Compute the area of grid cells. + + Parameters + ---------- + + ds : xarray.Dataset + Input dataset with latitude and longitude fields + + check_total : Boolean, optional + Test that total area is equal to area of the sphere. + + Returns + ------- + + area : xarray.DataArray + DataArray with area field. + + """ + + radius_earth = 6.37122e6 # m, radius of Earth + area_earth = 4.0 * np.pi * radius_earth ** 2 # area of earth [m^2]e + + lon_name = "lon" + lat_name = "lat" + + weights = lat_weights_regular_grid(ds[lat_name]) + area = weights + 0.0 * ds[lon_name] # add 'lon' dimension + area = (area_earth / area.sum(dim=(lat_name, lon_name))) * area + + if check_total: + np.testing.assert_approx_equal(np.sum(area), area_earth) + + return xr.DataArray( + area, dims=(lat_name, lon_name), attrs={"units": "m^2", "long_name": "area"} + ) diff --git a/ocean_c_lab/state_variables.yml b/ocean_c_lab/state_variables.yml new file mode 100644 index 0000000..f3ea9b6 --- /dev/null +++ b/ocean_c_lab/state_variables.yml @@ -0,0 +1,6 @@ +dic: + long_name: Dissolved inorganic carbon + units: mmol/m^3 +alk: + long_name: Alkalinity + units: meq/m^3 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..fff3350 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,7 @@ +[tool.black] +line-length = 100 +target-version = ['py38'] +skip-string-normalization = true + +[build-system] +requires = ["setuptools>=45", "wheel", "setuptools_scm>=6.2"] diff --git a/readthedocs.yml b/readthedocs.yml new file mode 100644 index 0000000..68e5895 --- /dev/null +++ b/readthedocs.yml @@ -0,0 +1,7 @@ +version: 2 +conda: + environment: ci/environment-docs.yml +build: + os: 'ubuntu-20.04' + tools: + python: 'mambaforge-4.10' diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..e69de29 diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..0f5b2f3 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,23 @@ +[flake8] +exclude = docs +ignore = E203,E266,E501,W503,E722,E402,C901,E731 +max-line-length = 100 +max-complexity = 18 +select = B,C,E,F,W,T4,B9 + + +[isort] +known_first_party=ocean_c_lab +known_third_party= +multi_line_output=3 +include_trailing_comma=True +force_grid_wrap=0 +combine_as_imports=True +line_length=100 +skip= + docs/source/conf.py + setup.py + +[tool:pytest] +console_output_style = count +addopts = --cov=./ --cov-report=xml --verbose diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..7215b23 --- /dev/null +++ b/setup.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python3 + +"""The setup script.""" + +from setuptools import find_packages, setup + +with open('requirements.txt') as f: + install_requires = f.read().strip().split('\n') + +with open('README.md') as f: + long_description = f.read() + + +CLASSIFIERS = [ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Intended Audience :: Science/Research', + 'Programming Language :: Python', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Topic :: Scientific/Engineering', +] + +setup( + name='ocean-C-lab', + description='Tools for ocean carbon cycle calcs', + long_description=long_description, + long_description_content_type='text/markdown', + python_requires='>=3.8', + maintainer='[C]worthy', + maintainer_email='mclong@ucar.edu', + classifiers=CLASSIFIERS, + url='https://ocean-c-lab.readthedocs.io', + project_urls={ + 'Documentation': 'https://ocean-c-lab.readthedocs.io', + 'Source': 'https://github.com/c-worthy-ocean/ocean-c-lab', + 'Tracker': 'https://github.com/c-worthy-ocean/ocean-c-lab/issues', + }, + packages=find_packages(exclude=('tests',)), + package_dir={'ocean-c-lab': 'ocean-c-lab'}, + include_package_data=True, + install_requires=install_requires, + license='Apache 2.0', + zip_safe=False, + entry_points={}, + keywords='c-worthy-ocean', + use_scm_version={'version_scheme': 'post-release', 'local_scheme': 'dirty-tag'}, +) diff --git a/tests/test_box_model_simulation.py b/tests/test_box_model_simulation.py new file mode 100644 index 0000000..e6d1faa --- /dev/null +++ b/tests/test_box_model_simulation.py @@ -0,0 +1,21 @@ +import onets_lab + + +class dummy_class(object): + def __init__(self, **kwargs): + pass + + def compute_tendency(self): + pass + + +def test_init(): + bm_sim = onets_lab.box_model_simulation( + dummy_class, + ) + assert isinstance(bm_sim, onets_lab.box_model_simulation) + + +def test_single_box_init(): + obj = onets_lab.single_box() + assert isinstance(obj, onets_lab.single_box) diff --git a/tests/test_gasex.py b/tests/test_gasex.py new file mode 100644 index 0000000..7a56172 --- /dev/null +++ b/tests/test_gasex.py @@ -0,0 +1,21 @@ +import numpy as np + +import onets_lab.gasex as gasex + + +def test_schmidt_co2(): + # check value + np.testing.assert_almost_equal(gasex.schmidt_co2(20.0), 668.344) + + # values outside bounds -2.0, -40.0 should resolve to function value at bounds + np.testing.assert_equal( + gasex.schmidt_co2(np.array([-3.0, 42.0])), + gasex.schmidt_co2(np.array([-2.0, 40.0])), + ) + + +def test_gas_transfer_velocity(): + np.testing.assert_almost_equal( + gasex.gas_transfer_velocity(np.array([1.0, 5.0, 10.0]), 20.0), + np.array([9.09293908e-06, 2.27323477e-04, 9.09293908e-04]), + )