diff --git a/.circleci/config.yml b/.circleci/config.yml index 5957a5e7e3..82492e724f 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -158,7 +158,7 @@ jobs: test_installation_from_source_test_mode: # Test installation from source docker: - - image: condaforge/mambaforge:latest + - image: condaforge/miniforge3:latest resource_class: large steps: - test_installation_from_source: @@ -167,7 +167,7 @@ jobs: test_installation_from_source_develop_mode: # Test development installation docker: - - image: condaforge/mambaforge:latest + - image: condaforge/miniforge3:latest resource_class: large steps: - test_installation_from_source: @@ -179,7 +179,7 @@ jobs: # purpose of this test to discover backward-incompatible changes early on in # the development cycle. docker: - - image: condaforge/mambaforge:latest + - image: condaforge/miniforge3:latest resource_class: large steps: - run: @@ -216,8 +216,8 @@ jobs: conda activate esmvaltool mkdir -p ~/climate_data esmvaltool config get_config_user - echo "search_esgf: when_missing" >> ~/.esmvaltool/config-user.yml - cat ~/.esmvaltool/config-user.yml + echo "search_esgf: when_missing" >> ~/.config/esmvaltool/config-user.yml + cat ~/.config/esmvaltool/config-user.yml for recipe in esmvaltool/recipes/testing/recipe_*.yml; do esmvaltool run "$recipe" done @@ -233,7 +233,7 @@ jobs: build_documentation: # Test building documentation docker: - - image: condaforge/mambaforge:latest + - image: condaforge/miniforge3:latest resource_class: medium steps: - checkout @@ -257,7 +257,7 @@ jobs: test_installation_from_conda: # Test conda package installation docker: - - image: condaforge/mambaforge:latest + - image: condaforge/miniforge3:latest resource_class: large steps: - run: diff --git a/.github/workflows/create-condalock-file.yml b/.github/workflows/create-condalock-file.yml index a88f919c17..7babd2a456 100644 --- a/.github/workflows/create-condalock-file.yml +++ b/.github/workflows/create-condalock-file.yml @@ -27,9 +27,8 @@ jobs: with: auto-update-conda: true activate-environment: esmvaltool-fromlock - python-version: "3.11" + python-version: "3.12" miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true - name: Show conda config run: | @@ -37,7 +36,9 @@ jobs: conda --version # setup-miniconda@v3 installs an old conda and mamba # forcing a modern mamba updates both mamba and conda - conda install -c conda-forge "mamba>=1.4.8" + # unpin mamba after conda-lock=3 release + # see github.com/ESMValGroup/ESMValTool/issues/3782 + conda install -c conda-forge "mamba>=1.4.8,<2" conda config --show-sources conda config --show conda --version diff --git a/.github/workflows/install-from-conda.yml b/.github/workflows/install-from-conda.yml index b08390040d..185add02a8 100644 --- a/.github/workflows/install-from-conda.yml +++ b/.github/workflows/install-from-conda.yml @@ -20,14 +20,13 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] name: Linux Python ${{ matrix.python-version }} steps: - uses: conda-incubator/setup-miniconda@v3 with: python-version: ${{ matrix.python-version }} miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true - run: mkdir -p conda_install_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions diff --git a/.github/workflows/install-from-condalock-file.yml b/.github/workflows/install-from-condalock-file.yml index a03e297a80..0f11cddc6e 100644 --- a/.github/workflows/install-from-condalock-file.yml +++ b/.github/workflows/install-from-condalock-file.yml @@ -30,7 +30,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] fail-fast: false name: Linux Python ${{ matrix.python-version }} steps: diff --git a/.github/workflows/install-from-source.yml b/.github/workflows/install-from-source.yml index 3d7456337b..018fcb2a0a 100644 --- a/.github/workflows/install-from-source.yml +++ b/.github/workflows/install-from-source.yml @@ -19,7 +19,7 @@ jobs: runs-on: "ubuntu-latest" strategy: matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] fail-fast: false name: Linux Python ${{ matrix.python-version }} steps: @@ -32,7 +32,6 @@ jobs: environment-file: environment.yml python-version: ${{ matrix.python-version }} miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true - run: mkdir -p source_install_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions diff --git a/.github/workflows/pypi-build-and-deploy.yml b/.github/workflows/pypi-build-and-deploy.yml index 4dff1e4d69..d6df3626e6 100644 --- a/.github/workflows/pypi-build-and-deploy.yml +++ b/.github/workflows/pypi-build-and-deploy.yml @@ -17,10 +17,10 @@ jobs: - uses: actions/checkout@v4 with: fetch-depth: 0 - - name: Set up Python 3.11 + - name: Set up Python 3.12 uses: actions/setup-python@v1 with: - python-version: "3.11" + python-version: "3.12" - name: Install pep517 run: >- python -m diff --git a/.github/workflows/run-tests-monitor.yml b/.github/workflows/run-tests-monitor.yml index 168d8940e5..1fc657e387 100644 --- a/.github/workflows/run-tests-monitor.yml +++ b/.github/workflows/run-tests-monitor.yml @@ -23,7 +23,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] name: Linux Python ${{ matrix.python-version }} steps: - uses: actions/checkout@v4 @@ -35,7 +35,6 @@ jobs: environment-file: environment.yml python-version: ${{ matrix.python-version }} miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions @@ -67,7 +66,7 @@ jobs: runs-on: "macos-latest" strategy: matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] architecture: ["x64"] # need to force Intel, arm64 builds have issues fail-fast: false name: OSX Python ${{ matrix.python-version }} @@ -82,7 +81,6 @@ jobs: environment-file: environment_osx.yml python-version: ${{ matrix.python-version }} miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true # - name: Install libomp with homebrew # run: brew install libomp diff --git a/.github/workflows/test-development.yml b/.github/workflows/test-development.yml index 2dba36577d..f6718a866e 100644 --- a/.github/workflows/test-development.yml +++ b/.github/workflows/test-development.yml @@ -26,7 +26,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] name: Linux Python ${{ matrix.python-version }} steps: - uses: actions/checkout@v4 @@ -38,7 +38,6 @@ jobs: environment-file: environment.yml python-version: ${{ matrix.python-version }} miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true - run: mkdir -p develop_test_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index f3822e5449..8b3c9ceb39 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -20,7 +20,7 @@ jobs: strategy: fail-fast: false matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] name: Linux Python ${{ matrix.python-version }} steps: - uses: actions/checkout@v4 @@ -32,7 +32,6 @@ jobs: environment-file: environment.yml python-version: ${{ matrix.python-version }} miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true - run: mkdir -p test_linux_artifacts_python_${{ matrix.python-version }} - name: Record versions @@ -45,6 +44,8 @@ jobs: run: conda list - name: Install ESMValTool run: pip install -e .[develop] 2>&1 | tee test_linux_artifacts_python_${{ matrix.python-version }}/install.txt + - name: Examine conda environment + run: conda list - name: Install Julia dependencies run: esmvaltool install Julia - name: Export Python minor version @@ -72,7 +73,7 @@ jobs: runs-on: "macos-latest" strategy: matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] architecture: ["x64"] # need to force Intel, arm64 builds have issues fail-fast: false name: OSX Python ${{ matrix.python-version }} @@ -87,7 +88,6 @@ jobs: environment-file: environment_osx.yml python-version: ${{ matrix.python-version }} miniforge-version: "latest" - miniforge-variant: Mambaforge use-mamba: true # - name: Install libomp with homebrew # run: brew install libomp diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 071686d373..974ac2ee78 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -7,20 +7,13 @@ version: 2 # Set the version of Python and other tools you might need build: - os: ubuntu-22.04 + os: ubuntu-lts-latest tools: - # updated and deployed from Aug 1, 2023 - python: "mambaforge-22.9" + # try miniforge3 when available? see github.com/ESMValGroup/ESMValTool/issues/3779 + # DO NOT use mambaforge-*; that is currently sunsetted + python: "miniconda-latest" jobs: - pre_create_environment: - # update mamba just in case - - mamba update --yes --quiet --name=base mamba 'zstd=1.5.2' - - mamba --version - - mamba list --name=base post_create_environment: - - conda run -n ${CONDA_DEFAULT_ENV} mamba list - # use conda run executable wrapper to have all env variables - - conda run -n ${CONDA_DEFAULT_ENV} mamba --version - conda run -n ${CONDA_DEFAULT_ENV} pip install . --no-deps # Declare the requirements required to build your docs diff --git a/CITATION.cff b/CITATION.cff index f66cd861d5..ab158d2436 100644 --- a/CITATION.cff +++ b/CITATION.cff @@ -291,6 +291,11 @@ authors: family-names: Phillips given-names: Adam orcid: "https://orcid.org/0000-0003-4859-8585" + - + affiliation: "ACCESS-NRI, Australia" + family-names: Proft + given-names: Max + orcid: "https://orcid.org/0009-0003-1611-9516" - affiliation: "University of Arizona, USA" family-names: Russell diff --git a/README.md b/README.md index b196f7fbb8..4ac7d694ee 100644 --- a/README.md +++ b/README.md @@ -6,11 +6,11 @@ [![CircleCI](https://circleci.com/gh/ESMValGroup/ESMValTool/tree/main.svg?style=svg)](https://circleci.com/gh/ESMValGroup/ESMValTool/tree/main) [![Test in Full Development Mode](https://github.com/ESMValGroup/ESMValTool/actions/workflows/test-development.yml/badge.svg)](https://github.com/ESMValGroup/ESMValTool/actions/workflows/test-development.yml) [![Codacy Badge](https://app.codacy.com/project/badge/Grade/79bf6932c2e844eea15d0fb1ed7e415c)](https://app.codacy.com/gh/ESMValGroup/ESMValTool/dashboard?utm_source=gh&utm_medium=referral&utm_content=&utm_campaign=Badge_grade) -[![Docker Build Status](https://img.shields.io/docker/cloud/build/esmvalgroup/esmvaltool.svg)](https://hub.docker.com/r/esmvalgroup/esmvaltool/) +[![Docker Build Status](https://img.shields.io/docker/automated/esmvalgroup/esmvaltool)](https://hub.docker.com/r/esmvalgroup/esmvaltool/) [![Anaconda-Server Badge](https://img.shields.io/conda/vn/conda-forge/ESMValTool?color=blue&label=conda-forge&logo=conda-forge&logoColor=white)](https://anaconda.org/conda-forge/esmvaltool) ![stand with Ukraine](https://badgen.net/badge/stand%20with/UKRAINE/?color=0057B8&labelColor=FFD700) -![esmvaltoollogo](https://raw.githubusercontent.com/ESMValGroup/ESMValTool/main/doc/sphinx/source/figures/ESMValTool-logo-2.png) +![esmvaltoollogo](https://raw.githubusercontent.com/ESMValGroup/ESMValTool/main/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png) - [**Documentation**](https://docs.esmvaltool.org/en/latest/) - [**ESMValTool Website**](https://www.esmvaltool.org/) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index af2625f1b7..a3ad9b680c 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -1,48 +1,59 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: 6e839dcc54104cc7c8d7d0b0165df84d0b927a0baf129e4169a57ac283fe3f98 +# input_hash: fafc256cb40a5d6ebcbc180cb08e91d1bd9ca77a04c258188faad5c05c60f1b9 @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/_py-xgboost-mutex-2.0-gpu_0.tar.bz2#7702188077361f43a4d61e64c694f850 https://conda.anaconda.org/conda-forge/noarch/_r-mutex-1.0.1-anacondar_1.tar.bz2#19f9db5f4f1b7f5ef5f6d67207f25f38 -https://conda.anaconda.org/conda-forge/noarch/_sysroot_linux-64_curr_repodata_hack-3-h69a702a_16.conda#1c005af0c6ff22814b7c52ee448d4bea https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 https://conda.anaconda.org/conda-forge/noarch/cuda-version-11.8-h70ddcb2_3.conda#670f0e1593b8c1d84f57ad5fe5256799 https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb -https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_2.conda#cbbe59391138ea5ad3658c76912e147f -https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-hf3520f5_7.conda#b80f2f396ca2c28b8c14c437a4ed1e74 -https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.4-ha770c72_0.conda#61c94057aaa5ae6145137ce1fddb2c04 +https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 +https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-he073ed8_18.conda#ad8527bf134a90e1c9ed35fa0b64318c +https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.5-ha770c72_0.conda#2889e6b9c666c3a564ab90cedc5832fd https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda#d8d7293c5b37f39b2ac32940621c6592 -https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.11-5_cp311.conda#139a8d40c8a2f430df31048949e450de -https://conda.anaconda.org/conda-forge/noarch/tzdata-2024a-h8827d51_1.conda#8bfdead4e0fff0383ae4c9c50d0531bd -https://conda.anaconda.org/conda-forge/linux-64/xorg-imake-1.0.7-0.tar.bz2#23acfc5a339a6a34cc2241f64e4111be +https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.12-5_cp312.conda#0424ae29b104430108f5218a66db7260 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 -https://conda.anaconda.org/conda-forge/noarch/kernel-headers_linux-64-3.10.0-h4a8ded7_16.conda#ff7f38675b226cfb855aebfc32a13e31 -https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-14.1.0-h5d3d1c9_101.conda#713834677de996ac1bc1b0b305ba46ba -https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.1.0-h77fa898_1.conda#23c255b008c4f2ae008f81edcabaca89 -https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-14.1.0-h5d3d1c9_101.conda#e007246a554aaf42f73fbfd4be8db3e4 +https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 +https://conda.anaconda.org/conda-forge/noarch/libgcc-devel_linux-64-14.2.0-h41c2201_101.conda#fb126e22f5350c15fec6ddbd062f4871 +https://conda.anaconda.org/conda-forge/linux-64/libgomp-14.2.0-h77fa898_1.conda#cc3573974587f12dda90d96e3e55a702 +https://conda.anaconda.org/conda-forge/noarch/libstdcxx-devel_linux-64-14.2.0-h41c2201_101.conda#60b9a16fd147f7184b5a964aa08f3b0f +https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_18.conda#0ea96f90a10838f58412aa84fdd9df09 https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2#73aaf86a425cc6e73fcf236a5a46396d +https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.43-h4bf12b8_2.conda#cf0c5521ac2a20dfa6c662a4009eeef6 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab -https://conda.anaconda.org/conda-forge/noarch/sysroot_linux-64-2.17-h4a8ded7_16.conda#223fe8a3ff6d5e78484a9d58eb34d055 -https://conda.anaconda.org/conda-forge/linux-64/binutils_impl_linux-64-2.40-ha1999f0_7.conda#3f840c7ed70a96b5ebde8044b2f36f32 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.1.0-h77fa898_1.conda#002ef4463dd1e2b44a94a4ace468f5d2 +https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.28-hb9d3cd8_0.conda#1b53af320b24547ce0fb8196d2604542 +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.3-heb4867d_0.conda#09a6c610d002e54e18353c06ef61a253 https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 -https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.3-h5888daf_0.conda#59f4c43bb1b5ef1c71946ff2cbf59524 -https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.1.0-h69a702a_1.conda#1efc0ad219877a73ef977af7dbb51f17 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.1.0-hc5f4f2c_1.conda#10a0cef64b784d6ab6da50ebca4e984d -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.1.0-hc0a3c3a_1.conda#9dbb9699ea467983ba8a4ba89b08b066 -https://conda.anaconda.org/conda-forge/linux-64/make-4.4.1-hb9d3cd8_1.conda#cd0fbfe1f70b630a94e40007dae3328d +https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c +https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b +https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 +https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 +https://conda.anaconda.org/conda-forge/linux-64/make-4.4.1-hb9d3cd8_2.conda#33405d2a66b1411db9f7242c8b97c9e7 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.3.2-hb9d3cd8_0.conda#4d638782050ab6faa27275bed57e9b4e +https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda#db124840386e1f842f93372897d1b857 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.9.15-hd590300_0.conda#ad8955a300fd09e97e76c38638ac7157 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda#19608a9656912805b2b9a2f6bd257b04 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda#77cbc488235ebbaab2b6e912d3934bae +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-hb9d3cd8_1004.conda#bc4cd53a083b6720d61a1519a1900878 +https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda#7c21106b851ec72c037b162c216d8f05 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.7.4-hfd43aa1_1.conda#f301eb944d297fc879c441fffe461d8a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.19-h756ea98_1.conda#5e08c385a1b8a79b52012b74653bbb99 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.19-h756ea98_3.conda#bfe6623096906d2502c78ccdbfc3bc7a +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-h756ea98_11.conda#eadcc12bedac44f13223a2909c0e5bcc https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.33.1-heb4867d_0.conda#0d3c60291342c0c025db231353376dfb https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 -https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.3-h5888daf_0.conda#6595440079bed734b113de44ffd3cd0a +https://conda.anaconda.org/conda-forge/linux-64/expat-2.6.4-h5888daf_0.conda#1d6afef758879ef5ee78127eb4cd2c4a https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/gettext-tools-0.22.5-he02047a_3.conda#fcd2016d1d299f654f81021e27496818 +https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda#d411fc29e338efb48c5fd4576d71d881 +https://conda.anaconda.org/conda-forge/linux-64/ghostscript-10.04.0-h5888daf_0.conda#3b8d7a2df810ad5109a51472b23dbd8e https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3bf7b9fd5a7136126e0234db4b87c8b6 https://conda.anaconda.org/conda-forge/linux-64/jbig-2.1-h7f98852_2003.tar.bz2#1aa0cee79792fa97b7ff4545110b60bf https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h1220068_1.conda#f8f0f0c4338bad5c34a4e9e11460481d @@ -50,490 +61,459 @@ https://conda.anaconda.org/conda-forge/linux-64/jxrlib-1.1-hd590300_3.conda#5aea https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.19-hd590300_0.conda#1635570038840ee3f9c71d22aa5b8b6d +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.21-h4bc722e_0.conda#36ce76665bf67f5aac36be7a0d21b7f3 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-0.22.5-he02047a_3.conda#efab66b82ec976930b96d62a976de8e7 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.1.0-h69a702a_1.conda#591e631bc1ae62c64f2ab4f66178c097 +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e -https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-hd590300_1.conda#323e90742f0f48fc22bea908735f55e6 -https://conda.anaconda.org/conda-forge/linux-64/libnl-3.10.0-h4bc722e_0.conda#6221e705f55cf0533f0777ae54ad86c6 +https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 +https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libopenlibm4-0.8.1-hd590300_1.conda#e6af610e01d04927a5060c95ce4e0875 -https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-14.1.0-hcba0ae0_1.conda#b56e6664bb9a57a29fd91df582223409 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 +https://conda.anaconda.org/conda-forge/linux-64/libsanitizer-14.2.0-h2a3dede_1.conda#160623b9425f5c04941586da43bd1a9c https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda#a587892d3c13b6621a6091be690dbca2 -https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.1.0-h4852527_1.conda#bd2598399a70bb86d8218e95548d735e -https://conda.anaconda.org/conda-forge/linux-64/libtool-2.4.7-he02047a_1.conda#2ca22c3c01cf286675450d3c455c717e +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2#ede4266dc02e875fe1ea77b25dd43747 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.2-hd590300_1.conda#049b7df8bae5e184d1de42cdf64855f8 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 +https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/libxcrypt-4.4.36-hd590300_1.conda#5aa797f8787fe7a17d1b0821485b5adc -https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-h4ab18f5_1.conda#57d7dc60e9325e3de37ff8dffd18e814 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 -https://conda.anaconda.org/conda-forge/linux-64/metis-5.1.0-h59595ed_1007.conda#40ccb8318df2500f83bd868dd8fcd201 +https://conda.anaconda.org/conda-forge/linux-64/metis-5.1.0-hd0bcaf9_1007.conda#28eb714416de4eb83e2cbc47e99a1b45 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe +https://conda.anaconda.org/conda-forge/linux-64/nspr-4.36-h5888daf_0.conda#de9cd5bca9e4918527b9b72b6e2e1409 https://conda.anaconda.org/conda-forge/linux-64/pkg-config-0.29.2-h4bc722e_1009.conda#1bee70681f504ea424fb07cdb090c001 -https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2#22dad4df6e8630e8dff2428f6f6a7036 https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda#77d9955b4abddb811cb8ab1aa7d743e4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.4.12-h06160fa_0.conda#bf1899cfd6dea061a220fa7e96a1f4bd +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.5-h3931f03_0.conda#334dba9982ab9f5d62033c61698a8683 https://conda.anaconda.org/conda-forge/linux-64/sed-4.8-he412f7d_0.tar.bz2#7362f0042e95681f5d371c46c83ebd08 -https://conda.anaconda.org/conda-forge/linux-64/xorg-inputproto-2.3.2-h7f98852_1002.tar.bz2#bcd1b3396ec6960cbc1d2855a9e60b2b -https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2#4b230e8381279d76131116660f5a241a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda#b462a33c0be1421532f28bfe8f4a7514 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda#2c80dc38fface310c9bd81b17037fee5 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2#be93aabceefa2fac576e971aef407908 -https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2#06feff3d2634e3097ce2fe681474b534 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda#bce9f945da8ad2ae9b1d7165a64d0f87 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2#b4a4381d54784606820704f7b5f05a15 +https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd +https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/xorg-imake-1.0.10-h5888daf_0.conda#040f0ca9f518151897759ad09ea98b2d https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae -https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.0.7-h0b41bf4_0.conda#49e8329110001f04923fe7e864990b0c +https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-h5888daf_2.conda#e0409515c467b87176b070bff5d9442e +https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 +https://conda.anaconda.org/conda-forge/linux-64/zlib-ng-2.2.2-h5888daf_0.conda#135fd3c66bccad3d2254f50f9809e86a https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.6.11-heb1d5e4_0.conda#98784bb35b316e2ba8698f4a75326e9a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.2.18-hce8ee76_3.conda#b19224a5179ecb512c4aac9f8a6d57a7 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.1.15-hce8ee76_3.conda#0c4f0205a1ae4ca6c89af922ec54271c -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.1.18-hce8ee76_3.conda#9aa734a17b9b0b793c7696435fe7789a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.18-h2af50b2_12.conda#700f1883f5a0a28c30fd98c43d4d946f https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 +https://conda.anaconda.org/conda-forge/linux-64/bwidget-1.9.14-ha770c72_1.tar.bz2#5746d6202ba2abad4a4707f2a2462795 https://conda.anaconda.org/conda-forge/linux-64/charls-2.4.2-h59595ed_0.conda#4336bd67920dd504cd8c6761d6a99645 -https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-14.1.0-h3c94d91_1.conda#4e32ec060bf4a30c6fff81a920dc0ec9 -https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda#3fdf79ef322c8379ae83be491d805369 -https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-he1b5a44_1004.tar.bz2#cddaf2c63ea4a5901cf09524c490ecdc -https://conda.anaconda.org/conda-forge/linux-64/ghostscript-10.03.1-h59595ed_0.conda#be973b4541601270b77232bc46249a3a +https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda#995f7e13598497691c1dc476d889bc04 +https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb +https://conda.anaconda.org/conda-forge/linux-64/gcc_impl_linux-64-14.2.0-h6b349bd_1.conda#0551d01d65027359bf011c049f9c6401 +https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.2-he02047a_1.conda#aab9195bc018b82dc77a84584b36cce9 +https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda#ff862eebdfeb2fd048ae9dc92510baca https://conda.anaconda.org/conda-forge/linux-64/gmp-6.3.0-hac33072_2.conda#c94a5994ef49749880a8139cf9afcbe1 https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda#f87c7b7c2cb45f323ffbce941c78ab7c https://conda.anaconda.org/conda-forge/linux-64/gtest-1.14.0-h434a139_2.conda#89971b339bb4dfbf3759f1f2528d81b1 -https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda#c3e9338e15d90106f467377017352b97 -https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda#cc47e1facc155f91abd89b11e48e72ff +https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 +https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240116.2-cxx17_he02047a_1.conda#c48fc56ec03229f294176923c3265c05 https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa https://conda.anaconda.org/conda-forge/linux-64/libasprintf-0.22.5-he8f35ee_3.conda#4fab9799da9571266d05ca5503330655 https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 +https://conda.anaconda.org/conda-forge/linux-64/libde265-1.0.15-h00ab1b0_0.conda#407fee7a5d7ab2dca12c9ca7f62310ad https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libgettextpo-devel-0.22.5-he02047a_3.conda#9aba7960731e6b4547b3a52f812ed801 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.1.0-h69a702a_1.conda#16cec94c5992d7f42ae3f9fa8b25df8d +https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda#0a7f4cd238267c88e5d69f7826a407eb +https://conda.anaconda.org/conda-forge/linux-64/libhwy-1.1.0-h00ab1b0_0.conda#88928158ccfe797eac29ef5e03f7d23d https://conda.anaconda.org/conda-forge/linux-64/libllvm14-14.0.6-hcd5def8_4.conda#73301c133ded2bf71906aa2104edae8b -https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.58.0-h47da74e_1.conda#700ac6ea6d53d5510591c4344d5c989a -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.43-h2797004_0.conda#009981dd9cfcaa4dbfa25ffaed86bcae -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.46.1-hadc24fc_0.conda#36f79405ab16bf271edb55b213836dac -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe -https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.19.0-hb90f79a_1.conda#8cdb7d41faa0260875ba92414c487e2d +https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 +https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.28-pthreads_h3e26593_1.conda#9d5c316d93ee4c5effd9afda8e8af823 +https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.20.0-h0e7cc3e_1.conda#d0ed81c4591775b70384f4cc78e05cd1 https://conda.anaconda.org/conda-forge/linux-64/libunwind-1.6.2-h9c3ff4c_0.tar.bz2#a730b2badd586580c5752cc73842e068 -https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda#33277193f5b92bad9fdd230eb700929c -https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_3.conda#ac79812548e7e8cf61f7b0abdef01d3b +https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba https://conda.anaconda.org/conda-forge/linux-64/libzopfli-1.0.3-h9c3ff4c_0.tar.bz2#c66fe2d123249af7651ebde8984c51c2 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/mbedtls-3.5.1-h59595ed_0.conda#a7b444a6e008b804b35521895e3440e2 -https://conda.anaconda.org/conda-forge/linux-64/nccl-2.22.3.1-hee583db_1.conda#f6ec6886214a80beace66f0b9fdf7e4b -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda#da0ec11a6454ae19bff5b02ed881a2b1 +https://conda.anaconda.org/conda-forge/linux-64/nccl-2.23.4.1-h03a54cd_2.conda#a08604ac3f9c3dbd128bb24e089dee5f +https://conda.anaconda.org/conda-forge/linux-64/nss-3.106-hdf54f9c_0.conda#efe735c7dc47dddbb14b3433d11c6feb https://conda.anaconda.org/conda-forge/linux-64/openlibm-0.8.1-hd590300_1.conda#6eba22eb06d69e53d0ca01eef42bc675 https://conda.anaconda.org/conda-forge/linux-64/p7zip-16.02-h9c3ff4c_1001.tar.bz2#941066943c0cac69d5aa52189451aa5f -https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2#69e2c796349cd9b273890bee0febfe1b +https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 https://conda.anaconda.org/conda-forge/linux-64/perl-5.32.1-7_hd590300_perl5.conda#f2cfec9406850991f4e3d960cc9e3321 https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 -https://conda.anaconda.org/conda-forge/linux-64/rdma-core-53.0-he02047a_0.conda#d60e9a23682287a041a4428927ea7aa5 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-hdb0a2a9_1.conda#78b8b85bdf1f42b8a2b3cb577d8742d1 -https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.2.1-h5888daf_0.conda#0d9c441855be3d8dfdb2e800fe755059 -https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f +https://conda.anaconda.org/conda-forge/linux-64/tktable-2.10-h8bc8fbc_6.conda#dff3627fec2c0584ded391205295abf0 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 -https://conda.anaconda.org/conda-forge/linux-64/xorg-fixesproto-5.0-h7f98852_1002.tar.bz2#65ad6e1eb4aed2b0611855aff05e04f6 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda#93ee23f12bc2e684548181256edd2cf6 +https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda#05a8ea5f446de33006171a7afe6ae857 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda#0b666058a179b744a622d0a4a0c56353 https://conda.anaconda.org/conda-forge/linux-64/xorg-makedepend-1.0.9-h59595ed_0.conda#71c756cfcc6649ed7614eb07712bfce0 -https://conda.anaconda.org/conda-forge/linux-64/zfp-1.0.1-hac33072_1.conda#df96b7266e49529d82de467b23977452 -https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-h4ab18f5_1.conda#9653f1bf3766164d0e65fa723cabbc54 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.14.7-hbfbeace_6.conda#d6382461de9a91a2665e964f92d8da0a -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda#009521b7ed97cca25f8f997f9e745976 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.3-h235a6dd_1.conda#c05358e3a231195f7f0b3f592078bb0c +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.9-h5e77a74_0.conda#d7714013c40363f45850a25113e2cb05 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/bwidget-1.9.14-ha770c72_1.tar.bz2#5746d6202ba2abad4a4707f2a2462795 -https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.12.0-hb4ffafa_0.conda#1a9b16afb84d734a1bb2d196c308d477 +https://conda.anaconda.org/conda-forge/linux-64/c-blosc2-2.15.1-hc57e6cf_0.conda#5f84961d86d0ef78851cb34f9d5e31fe https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hf1063bd_110.conda#ee3e687b78b778db7b304e5b00a4dca6 -https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb -https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-14.1.0-he4a1faa_1.conda#0ae35a9298e2475dc877da9adaa8e490 -https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda#ff862eebdfeb2fd048ae9dc92510baca -https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-14.1.0-h8d00ecb_1.conda#6ae4069622b29253444c3326613a8e1a -https://conda.anaconda.org/conda-forge/linux-64/hdfeos2-2.20-hebf79cf_1003.conda#23bb57b64a629bc3b33379beece7f0d7 +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee +https://conda.anaconda.org/conda-forge/linux-64/gfortran_impl_linux-64-14.2.0-hc73f493_1.conda#131a59b3bb1dbbfc63ec0f21eb0e8c65 +https://conda.anaconda.org/conda-forge/linux-64/gxx_impl_linux-64-14.2.0-h2c03514_1.conda#41664acd4c99ef4d192e12950ff68ca6 +https://conda.anaconda.org/conda-forge/linux-64/hdfeos2-2.20-h3e53b52_1004.conda#c21dc684e0e8efa507aba61a030f65e7 https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libasprintf-devel-0.22.5-he8f35ee_3.conda#1091193789bb830127ed067a9e01ac57 -https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h104a339_1.conda#9ef052c2eee74c792833ac2e820e481e -https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.7.1-hca3a8ce_0.conda#6af97ac284ffaf76d8f63cc1f9d64f7a +https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda#21e468ed3786ebcb2124b123aa2484b7 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b +https://conda.anaconda.org/conda-forge/linux-64/libgit2-1.8.4-hd24f944_0.conda#94887b4deb460378a34e1533beaacfd5 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 +https://conda.anaconda.org/conda-forge/linux-64/libjxl-0.11.0-hdb8da77_2.conda#9c4554fafc94db681543804037e65de2 https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.27-pthreads_hac2b453_1.conda#ae05ece66d3924ac3d48b4aa3fa96cec -https://conda.anaconda.org/conda-forge/linux-64/libopenblas-ilp64-0.3.28-pthreads_h3e26593_0.conda#2bd7dc48907a3b6bf766ed87867f3459 -https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-h08a7969_0.conda#6945825cebd2aeb16af4c69d97c32c13 +https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.25.3-hd5b35b9_1.conda#06def97690ef90781a91b786cb48a0a9 https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2023.09.01-h5a48ba9_2.conda#41c69fba59d495e8cf5ffda48a607e35 -https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hb58d41b_14.conda#264f9a3a4ea52c8f4d3e8ae1213a3335 -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.6.0-h29866fb_1.conda#4e9afd30f4ccb2f98645e51005f82236 -https://conda.anaconda.org/conda-forge/linux-64/libxgboost-2.1.1-cuda118_h09a87be_2.conda#1ef0261ebd8ecdab6ca149ef568ba0bf -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.12.7-h4c95cb1_3.conda#0ac9aff6010a7751961c8e4b863a40e7 +https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hc670b87_16.conda#3d9f3a2e5d7213c34997e4464d2f938c +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-h6565414_0.conda#80eaf80d84668fa5620ac9ec1b4bf56f +https://conda.anaconda.org/conda-forge/linux-64/libxgboost-2.1.2-cuda118_h09a87be_0.conda#d59c3f95f80071f24ebce434494ead0a +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.4-hb346dea_2.conda#69b90b70c434b916abf5a1d5ee5d55fb https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 https://conda.anaconda.org/conda-forge/linux-64/mpfr-4.2.1-h90cbb55_3.conda#2eeb50cab6652538eee8fc0bc3340c81 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.104-hd34e28f_0.conda#0664e59f6937a660eba9f3d2f9123fa8 -https://conda.anaconda.org/conda-forge/linux-64/python-3.11.10-hc5c86c4_0_cpython.conda#43a02ff0a2dafe8a8a1b6a9eacdbd2cc +https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.28-pthreads_h3d04fff_1.conda#fdaa89df7b34f5c904f8f1348e5a62a5 +https://conda.anaconda.org/conda-forge/linux-64/python-3.12.7-hc5c86c4_0_cpython.conda#0515111a9cdf69f83278f7c197db9807 https://conda.anaconda.org/conda-forge/linux-64/s2geometry-0.10.0-h8413349_4.conda#d19f88cf8812836e6a4a2a7902ed0e77 -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.46.1-h9eae976_0.conda#b2b3e737da0ae347e16ef1970a5d3f14 -https://conda.anaconda.org/conda-forge/linux-64/tktable-2.10-h8bc8fbc_6.conda#dff3627fec2c0584ded391205295abf0 -https://conda.anaconda.org/conda-forge/linux-64/ucx-1.15.0-ha691c75_8.conda#3f9bc6137b240642504a6c9b07a10c25 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.9-h8ee46fc_0.conda#077b6e8ad6a3ddb741fce2496dd01bec +https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda#909188c8979846bac8e586908cf1ca6a +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 +https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda#a7a49a8b85122b49214798321e2e96b4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hb9d3cd8_2.conda#d8602724ac0d276c380b97e9eb0f814b https://conda.anaconda.org/conda-forge/noarch/affine-2.4.0-pyhd8ed1ab_0.conda#ae5f4ad87126c55ba3f690ef07f81d64 -https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.0-pyhd8ed1ab_0.conda#0482cd2217e27b3ce47676d570ac3d45 +https://conda.anaconda.org/conda-forge/noarch/aiohappyeyeballs-2.4.3-pyhd8ed1ab_0.conda#ec763b0a58960558ca0ad7255a51a237 https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c -https://conda.anaconda.org/conda-forge/noarch/antlr-python-runtime-4.11.1-pyhd8ed1ab_0.tar.bz2#15109c4977d39ad7aa3423f57243e286 https://conda.anaconda.org/conda-forge/noarch/asciitree-0.3.3-py_2.tar.bz2#c0481c9de49f040272556e2cedf42816 +https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.5-py312h7900ff3_0.conda#e1ed4d572a4a16b97368ab00fd646487 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda#6732fa52eb8e66e5afeb32db8701a791 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.4.2-h01f5eca_8.conda#afb85fc0f01032d115c57c961950e7d8 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.8.1-hdb68c23_10.conda#cb6065938167da2d2f078c2f08473b84 -https://conda.anaconda.org/conda-forge/linux-64/backports.zoneinfo-0.2.1-py311h38be061_9.conda#6ba5ba862ef1fa30e87292df09e6b73b -https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py311hfdbb021_2.conda#d21daab070d76490cb39a8f1d1729d79 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.30-hec5e740_0.conda#bc1b9f70ea7fa533aefa6a8b6fbe8da7 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.5-h0009854_0.conda#d393d0a6c9b993771fbc67a998fccf6c +https://conda.anaconda.org/conda-forge/linux-64/backports.zoneinfo-0.2.1-py312h7900ff3_9.conda#6df4f61b215587c40ec93810734778ca +https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py312h2ec8cdc_2.conda#b0b867af6fc74b2a0aa206da29c0f3cf https://conda.anaconda.org/conda-forge/linux-64/brunsli-0.1-h9c3ff4c_0.tar.bz2#c1ac6229d0bfd14f8354ff9ad2a26cad +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda#fceaedf1cdbcb02df9699a0d9b005292 https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.3.2-pyhd8ed1ab_0.conda#7f4a9e3fcff3f6356ae99244a014da6a +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda#a374efa97290b8799046df7c5ca17164 https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.0.0-pyhd8ed1ab_0.conda#753d29fe41bb881e4b9c004f0abf973f +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 https://conda.anaconda.org/conda-forge/noarch/codespell-2.3.0-pyhd8ed1ab_0.conda#6e67fa19bedafa7eb7d6ea91de53e03d https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 https://conda.anaconda.org/conda-forge/noarch/config-0.5.1-pyhd8ed1ab_0.tar.bz2#97275d4898af65967b1ad57923cef770 https://conda.anaconda.org/conda-forge/noarch/configargparse-1.7-pyhd8ed1ab_0.conda#0d07dc29b1c1cc973f76b74beb44915f https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py311hfdbb021_2.conda#e0ee31128372cd4c6873372a756964bb +https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb +https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py312h8fd2918_3.conda#21e433caf1bb1e4c95832f8bb731d64c https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2#961b3a227b437d82ad7054484cfa71b2 -https://conda.anaconda.org/conda-forge/noarch/dill-0.3.8-pyhd8ed1ab_0.conda#78745f157d56877a2c6e7b386f66f3e2 -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.8-pyhd8ed1ab_0.conda#db16c66b759a64dc5183d69cc3745a52 +https://conda.anaconda.org/conda-forge/noarch/dill-0.3.9-pyhd8ed1ab_0.conda#27faec84454995f6774786c7e5833cd6 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda#fe521c1608280cc2803ebd26dc252212 https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda#e8cd5d629f65bdf0f3bb312cde14659e https://conda.anaconda.org/conda-forge/noarch/dodgy-0.2.1-py_0.tar.bz2#62a69d073f7446c90f417b0787122f5b https://conda.anaconda.org/conda-forge/noarch/ecmwf-api-client-1.6.3-pyhd8ed1ab_0.tar.bz2#15621abf59053e184114d3e1d4f9d01e https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2#3cf04868fee0a029769bd41f4b2fbf2d -https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-1.1.0-pyhd8ed1ab_0.conda#a2f2138597905eaa72e561d8efb42cf3 +https://conda.anaconda.org/conda-forge/noarch/et_xmlfile-2.0.0-pyhd8ed1ab_0.conda#cdcdbe90dfab4075fc1f3c4cf2e4b4e5 https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 https://conda.anaconda.org/conda-forge/noarch/fasteners-0.17.3-pyhd8ed1ab_0.tar.bz2#348e27e78a5e39090031448c72f66d5e -https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.0-pyhd8ed1ab_0.conda#ec288789b07ae3be555046e099798a56 +https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda#0f69b688f52ff6da70bccb7ff7001d1d https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda#12e6988845706b2cfbc3bc35c9a61a95 -https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.4.1-py311h9ecbd09_1.conda#4605a44155b0c25da37e8f40318c78a4 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.9.0-pyhff2d567_0.conda#ace4329fbff4c69ab0309db6da182987 +https://conda.anaconda.org/conda-forge/linux-64/frozenlist-1.5.0-py312h66e93f0_0.conda#f98e36c96b2c66d9043187179ddb04f4 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2#6b1f32359fc5d2ab7b491d0029bfffeb https://conda.anaconda.org/conda-forge/linux-64/gettext-0.22.5-he02047a_3.conda#c7f243bbaea97cd6ea1edd693270100e +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 -https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd8ed1ab_6.conda#2ed1fe4b9079da97c44cfe9c2e5078fd +https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd81877a_7.conda#74fbff91ca7c1b9a36b15903f2242f86 https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 -https://conda.anaconda.org/conda-forge/noarch/idna-3.8-pyhd8ed1ab_0.conda#99e164522f6bdf23c177c8d9ae63f975 +https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda#7ba2ede0e7c795ff95088daf0dc59753 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 +https://conda.anaconda.org/conda-forge/noarch/isodate-0.7.2-pyhd8ed1ab_0.conda#d68d25aca67d1a06bf6f5b43aea9430d https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda#ff7ca04134ee8dde1d7cf491a78ef7c7 -https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py311hd18a35c_0.conda#be34c90cce87090d24da64a7c239ca96 -https://conda.anaconda.org/conda-forge/linux-64/lazy-object-proxy-1.10.0-py311h459d7ec_0.conda#d39020c78fd00ed774ff9c876e8aba07 -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-h7f713cb_2.conda#9ab79924a3760f85a799f21bc99bd655 +https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py312h68727a3_0.conda#444266743652a4f1538145e9362f6d3b +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 +https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.1-pyh5b84bb0_3.conda#f258b7f54b5d9ddd02441f10c4dca2ac https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda#32ddb97f897740641d8d46a829ce1704 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-23_linux64_openblas.conda#96c8450a40aa2b9733073a9460de972c +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.9.1-hdb1bdb2_0.conda#7da1d242ca3591e174a3c7d82230d3c0 -https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.1-default_hecaa2ac_1000.conda#f54aeebefb5c5ff84eca4fb05ca8aa3a -https://conda.anaconda.org/conda-forge/linux-64/libllvm16-16.0.6-hb3ce162_3.conda#a4d48c40dd5c60edbab7fd69c9a88967 -https://conda.anaconda.org/conda-forge/linux-64/libpq-16.4-h2d7952a_1.conda#7e3173fd1299939a02ebf9ec32aa77c4 -https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.3.2-hdffd6e0_0.conda#a8661c87c873d8c8f90479318ebf0a17 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-ha6d2627_1004.conda#df069bea331c8486ac21814969301c1f +https://conda.anaconda.org/conda-forge/linux-64/libheif-1.18.2-gpl_hffcb242_100.conda#76ac2c07b62d45c192940f010eea11fa +https://conda.anaconda.org/conda-forge/linux-64/libhwloc-2.11.2-default_he43201b_1000.conda#36247217c4e1018085bd9db41eb3526a +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-1.4.0-h2c329e2_0.conda#80030debaa84cfc31755d53742df3ca6 https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 -https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py311h9c9ff8c_1.conda#9ab40f5700784bf16ff7cf8012a646e8 +https://conda.anaconda.org/conda-forge/linux-64/llvmlite-0.43.0-py312h374181b_1.conda#ed6ead7e9ab9469629c6cfb363b5c6e2 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py311h2cbdf9a_1.conda#867a4aa23ae6c0e9c84cf9aa4f2df0fe -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-2.1.5-py311h9ecbd09_1.conda#c30e9e5aef9e9ff7fb593736ce2a4546 +https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py312hb3f7f12_1.conda#b99d90ef4e77acdab74828f79705a919 +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py312h178313f_0.conda#a755704ea0e2503f8c227d84829a8e81 https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_0.tar.bz2#34fc335fc50eef0b5ea708f2b5f54e0c https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda#5cbee699846772cc939bef23a0d524ed -https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py311hd18a35c_0.conda#682f76920687f7d9283039eb542fdacf -https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py311h9ecbd09_0.conda#afada76949d16eb7d7128ca1dc7d2f10 -https://conda.anaconda.org/conda-forge/noarch/munch-4.0.0-pyhd8ed1ab_0.conda#376b32e8f9d3eacbd625f37d39bd507d +https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py312h68727a3_0.conda#5c9b020a3f86799cdc6115e55df06146 +https://conda.anaconda.org/conda-forge/linux-64/multidict-6.1.0-py312h178313f_1.conda#e397d9b841c37fc3180b73275ce7e990 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/mypy_extensions-1.0.0-pyha770c72_0.conda#4eccaeba205f0aed9ac3a9ea58568ca3 https://conda.anaconda.org/conda-forge/noarch/natsort-8.4.0-pyhd8ed1ab_0.conda#70959cd1db3cf77b2a27a0836cfd08a7 -https://conda.anaconda.org/conda-forge/noarch/networkx-3.3-pyhd8ed1ab_1.conda#d335fd5704b46f4efb89a6774e81aef0 -https://conda.anaconda.org/conda-forge/linux-64/openblas-ilp64-0.3.28-pthreads_h3d04fff_0.conda#eb2736b14329cf5650917caa43a549c6 +https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyhd8ed1ab_1.conda#1d4c088869f206413c59acdd309908b7 https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 -https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.0-h1e5e2c1_0.conda#53e8f030579d34e1a36a735d527c021f +https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.2-h669347b_0.conda#1e6c10f7d749a490612404efeb179eb8 https://conda.anaconda.org/conda-forge/noarch/packaging-24.1-pyhd8ed1ab_0.conda#cbe1bb1f21567018ce595d9c2be0f0db https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2#457c2c8c08e54905d6954e79cb5b5db9 https://conda.anaconda.org/conda-forge/noarch/pathspec-0.12.1-pyhd8ed1ab_0.conda#17064acba08d3686f1135b5ec1b32b12 https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda#405678b942f2481cecdb3e010f4925d9 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.2-pyhd8ed1ab_0.conda#e1a2dfcd5695f0744f1bcd3bbfe02523 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.0.0-py311h9ecbd09_1.conda#493e283ab843404fa36add81fcc49f6c -https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.9.1-pyhd8ed1ab_0.tar.bz2#0191dd7efe1a94262812770183b68892 +https://conda.anaconda.org/conda-forge/linux-64/propcache-0.2.0-py312h66e93f0_2.conda#2c6c0c68f310bc33972e7c83264d7786 +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py312h66e93f0_0.conda#0524eb91d3d78d76d671c6e3cd7cee82 +https://conda.anaconda.org/conda-forge/noarch/pycodestyle-2.12.1-pyhd8ed1ab_0.conda#72453e39709f38d0494d096bb5f678b7 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 -https://conda.anaconda.org/conda-forge/noarch/pyflakes-2.5.0-pyhd8ed1ab_0.tar.bz2#1b3bef4313288ae8d35b1dfba4cd84a3 +https://conda.anaconda.org/conda-forge/noarch/pyflakes-3.2.0-pyhd8ed1ab_0.conda#0cf7fef6aa123df28adb21a590065e3d https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.1.4-pyhd8ed1ab_0.conda#4d91352a50949d049cf9714c8563d433 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda#b98d2018c01ce9980c03ee2850690fab -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.1-pyhd8ed1ab_0.conda#98206ea9954216ee7540f0c773f2104d -https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py311h9ecbd09_1.conda#b1796d741ca619dbacb79917b20e5a05 -https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad -https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py311h9ecbd09_1.conda#abeb54d40f439b86f75ea57045ab8496 +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda#986287f89929b2d629bd6ef6497dc307 +https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py312h66e93f0_1.conda#39aed2afe4d0cf76ab3d6b09eecdbea7 +https://conda.anaconda.org/conda-forge/noarch/pytz-2024.2-pyhd8ed1ab_0.conda#260009d03c9d5c0f111904d851f053dc +https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py312h66e93f0_1.conda#549e5930e768548a89c23f595dac5a95 https://conda.anaconda.org/conda-forge/linux-64/re2-2023.09.01-h7f4b329_2.conda#8f70e36268dea8eb666ef14c29bd3cda -https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.20.0-py311h9e33e62_1.conda#3989f9a93796221aff20be94300e3b93 -https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.8-py311h459d7ec_0.conda#7865c897d89a39abc0056d89e37bd9e9 +https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.21.0-py312h12e396e_0.conda#37f4ad7cb4214c799f32e5f411c6c69f +https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.8-py312h66e93f0_1.conda#532c3e5d0280be4fea52396ec1fa7d5d https://conda.anaconda.org/conda-forge/noarch/semver-3.0.2-pyhd8ed1ab_0.conda#5efb3fccda53974aed800b6d575f72ed https://conda.anaconda.org/conda-forge/noarch/setoptconf-tmp-0.3.1-pyhd8ed1ab_0.tar.bz2#af3e36d4effb85b9b9f93cd1db0963df -https://conda.anaconda.org/conda-forge/noarch/setuptools-73.0.1-pyhd8ed1ab_0.conda#f0b618d7673d1b2464f600b34d912f6f -https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py311h9ecbd09_1.conda#b208b9b6336362211c787547f92a5464 +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.3.0-pyhd8ed1ab_0.conda#2ce9825396daf72baabaade36cee16da +https://conda.anaconda.org/conda-forge/linux-64/simplejson-3.19.3-py312h66e93f0_1.conda#c8d1a609d5f3358d715c2273011d9f4d https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 https://conda.anaconda.org/conda-forge/noarch/smmap-5.0.0-pyhd8ed1ab_0.tar.bz2#62f26a3d1387acee31322208f0cfa3e0 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/sqlparse-0.5.1-pyhd8ed1ab_0.conda#e8af29e73e8b5906d8882c1f67222d34 https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f https://conda.anaconda.org/conda-forge/noarch/tenacity-9.0.0-pyhd8ed1ab_0.conda#42af51ad3b654ece73572628ad2882ae -https://conda.anaconda.org/conda-forge/noarch/termcolor-2.4.0-pyhd8ed1ab_0.conda#a5033708ad9283907c3b1bc1f90d0d0d +https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_0.conda#29a5d22565b850099cd9959862d1b154 https://conda.anaconda.org/conda-forge/noarch/threadpoolctl-3.5.0-pyhc1e730c_0.conda#df68d78237980a159bd7149f33c0e8fd -https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.16.3-hf0b6e87_3.conda#1e28da846782f91a696af3952a2472f9 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.1-pyhd8ed1ab_0.tar.bz2#5844808ffab9ebdb694585b50ba02a96 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.0.2-pyhd8ed1ab_0.conda#e977934e00b355ff55ed154904044727 https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_0.conda#0062a5f3347733f67b0f33ca48cc21dd -https://conda.anaconda.org/conda-forge/noarch/toolz-0.12.1-pyhd8ed1ab_0.conda#2fcb582444635e2c402e8569bb94e039 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py311h9ecbd09_1.conda#616fed0b6f5c925250be779b05d1d7f7 +https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py312h66e93f0_1.conda#af648b62462794649066366af4ecd5b0 https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda#3df84416a021220d8b5700c613af2dc5 -https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2024.7.2-pyhd8ed1ab_0.conda#2b9f52c7ecb8d017e50f91852aead307 +https://conda.anaconda.org/conda-forge/noarch/trove-classifiers-2024.10.21.16-pyhd8ed1ab_0.conda#501f6d3288160a31d99a2f1321e77393 https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 -https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py311hfdbb021_1.conda#273cf8bedf58f24aec8d960831f89c5a +https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py312h2ec8cdc_1.conda#96226f62dddc63226472b7477d783967 +https://conda.anaconda.org/conda-forge/linux-64/unicodedata2-15.1.0-py312h66e93f0_1.conda#588486a61153f94c7c13816f7069e440 https://conda.anaconda.org/conda-forge/noarch/untokenize-0.1.1-pyhd8ed1ab_1.conda#6042b782b893029aa40335782584a092 https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda#daf5160ff9cde3a468556965329085b9 -https://conda.anaconda.org/conda-forge/noarch/webob-1.8.8-pyhd8ed1ab_0.conda#ae69b699c308c3bd20388219764235b0 -https://conda.anaconda.org/conda-forge/noarch/wheel-0.44.0-pyhd8ed1ab_0.conda#d44e3b085abcaef02983c6305b84b584 -https://conda.anaconda.org/conda-forge/linux-64/wrapt-1.16.0-py311h9ecbd09_1.conda#810ae646bcc50a017380336d874e4014 +https://conda.anaconda.org/conda-forge/noarch/wheel-0.45.0-pyhd8ed1ab_0.conda#f9751d7c71df27b2d29f5cab3378982e https://conda.anaconda.org/conda-forge/noarch/xlsxwriter-3.2.0-pyhd8ed1ab_0.conda#a1f7264726115a2f8eac9773b1f27eba -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda#82b6df12252e6f32402b96dacc656fec -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-5.0.3-h7f98852_1004.tar.bz2#e9a21aa4d5e3e5f1aed71e8cefd46b6a -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda#ed67c36f215b310412b2af935bf3e530 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxt-1.3.0-hd590300_1.conda#ae92aab42726eb29d16488924f7312cb +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxmu-1.2.1-hb9d3cd8_1.conda#f35a9a2da717ade815ffa70c0e8bdfbd https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda#156c91e778c1d4d57b709f8c5333fd06 https://conda.anaconda.org/conda-forge/noarch/yapf-0.32.0-pyhd8ed1ab_0.tar.bz2#177cba0b4bdfacad5c5fbb0ed31504c4 -https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-ha4adb4c_5.conda#e8372041ebb377237db9d0d24c7b5962 +https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_6.conda#113506c8d2d558e733f5c38f6bf08c50 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.20.1-pyhd8ed1ab_0.conda#74a4befb4b38897e19a107693e49da20 +https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda#fee389bf8a4843bd7a2248ce11b7f188 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a https://conda.anaconda.org/conda-forge/noarch/aiosignal-1.3.1-pyhd8ed1ab_0.tar.bz2#d1e1eb7e21a9e2c74279d87dafb68156 -https://conda.anaconda.org/conda-forge/noarch/asgiref-3.8.1-pyhd8ed1ab_0.conda#b5c2e1034ccc76fb14031637924880eb -https://conda.anaconda.org/conda-forge/linux-64/astroid-2.15.8-py311h38be061_0.conda#46d70fcb74472aab178991f0231ee3c6 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.7.17-he0b1f16_2.conda#ea6d998135d5f8932cffc91381104690 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.10.3-h50844eb_4.conda#e72fdd8942f266ea79c70ec085661d6c -https://conda.anaconda.org/conda-forge/noarch/babel-2.14.0-pyhd8ed1ab_0.conda#9669586875baeced8fc30c0826c3270e +https://conda.anaconda.org/conda-forge/linux-64/arpack-3.9.1-nompi_h77f6705_101.conda#ff39030debb47f6b53b45bada38e0903 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.6.5-hbaf354b_4.conda#2cefeb144de7712995d1b52cc6a3864c +https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.13.0-h935415a_0.conda#debd1677c2fea41eb2233a260f48a298 +https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_0.conda#6d4e9ecca8d88977147e109fc7053184 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 -https://conda.anaconda.org/conda-forge/noarch/bleach-6.1.0-pyhd8ed1ab_0.conda#0ed9d7c0e9afa7c025807a9a8136ea3e -https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py311hf29c0ef_0.conda#55553ecd5328336368db611f350b7039 -https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.3.0-hbdc6101_0.conda#797554b8b7603011e8677884381fbcc5 +https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_0.conda#461bcfab8e65c166e297222ae919a2d4 +https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py312h06ac9bb_0.conda#a861504bbea4161a9170b85d4d2be840 +https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-hf8ad068_0.conda#1b7a01fd02d11efe0eb5a676842a7b7d https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2#4fd2c6b53934bd7d96d1f3fdaf99b79f https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2#a29b7c141d6b2de4bb67788a5f107734 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.1-py311h9ecbd09_1.conda#a36ccf0f3d2eb95a0ecc293f5f56e080 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.4-py312h178313f_0.conda#a32fbd2322865ac80c7db74c553f5306 https://conda.anaconda.org/conda-forge/linux-64/curl-8.9.1-h18eb788_0.conda#2e7dedf73dfbfcee662e2a0f6175e4bb -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-0.12.3-py311h459d7ec_0.conda#13d385f635d7fbe9acc93600f67a6cb4 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py312h66e93f0_1.conda#a921e2fe122e7f38417b9b17c7a13343 https://conda.anaconda.org/conda-forge/noarch/docformatter-1.7.5-pyhd8ed1ab_0.conda#3a941b6083e945aa87e739a9b85c82e9 https://conda.anaconda.org/conda-forge/noarch/docrep-0.3.2-pyh44b312d_0.tar.bz2#235523955bc1bfb019d7ec8a2bb58f9a -https://conda.anaconda.org/conda-forge/noarch/fire-0.6.0-pyhd8ed1ab_0.conda#e9ed10aa8fa1dd6782940b95c942a6ae -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.53.1-py311h9ecbd09_1.conda#89ed1820af1523df84171049199ed915 +https://conda.anaconda.org/conda-forge/noarch/fire-0.7.0-pyhd8ed1ab_0.conda#c8eefdf1e822c56a6034602e67bc92a5 +https://conda.anaconda.org/conda-forge/noarch/flake8-7.1.1-pyhd8ed1ab_0.conda#a25e5df6b26be3c2d64be307c1ef0b37 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.54.1-py312h178313f_1.conda#bbbf5fa5cab622c33907bc8d7eeea9f7 +https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda#358c17429c97883b2cb9ab5f64bc161b +https://conda.anaconda.org/conda-forge/linux-64/git-2.46.0-pl5321hb5640b7_0.conda#825d146359bc8b85083d92259d0a0e1b https://conda.anaconda.org/conda-forge/noarch/gitdb-4.0.11-pyhd8ed1ab_0.conda#623b19f616f2ca0c261441067e18ae40 +https://conda.anaconda.org/conda-forge/linux-64/gsl-2.7-he838d99_0.tar.bz2#fec079ba39c9cca093bf4c00001825de https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda#7e1729554e209627636a0f6fabcdd115 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.4.0-pyha770c72_0.conda#6e3dbc422d3749ad72659243d6ac8b2b +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-7.2.1-pyha770c72_0.conda#b9f5330c0853ccabc39a9878c6f1a2ab https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda#c808991d29b9838fb4d96ce8267ec9ec -https://conda.anaconda.org/conda-forge/noarch/isodate-0.6.1-pyhd8ed1ab_0.tar.bz2#4a62c93c1b5c0b920508ae3fd285eaf5 https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_0.conda#1d25ed2b95b92b026aaa795eabec8d91 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 https://conda.anaconda.org/conda-forge/noarch/joblib-1.4.2-pyhd8ed1ab_0.conda#25df261d4523d9f9783bcdb7208d872f -https://conda.anaconda.org/conda-forge/linux-64/jupyter_core-5.7.2-py311h38be061_0.conda#f85e78497dfed6f6a4b865191f42de2e +https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda#0a2980dada0dd7fd0998f0342308b1b1 https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda#afcd1b53bcac8844540358e33f33d28f https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-23_linux64_openblas.conda#eede29b40efa878cbe5bdcb767e97310 -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-he9388d3_8.conda#f3abc6e6ab60fa404c23094f5a03ec9b -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.78.1-hebfc3b9_0.conda#ddd09e8904fde46b85f41896621803e6 -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.0-hac7e632_1003.conda#50c389a09b6b7babaef531eb7cb5e0ca https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.62.2-h15f2491_0.conda#8dabe607748cb3d7002ad73cd06f1325 -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-23_linux64_openblas.conda#2af0879961951987e464722fd00ec1e0 https://conda.anaconda.org/conda-forge/noarch/logilab-common-1.7.3-py_0.tar.bz2#6eafcdf39a7eb90b6d951cfff59e8d3b -https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py311hcfaa980_1.conda#b76d6a1a47942ad2021a9d3d7fe527bd +https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py312he28fd5a_2.conda#3acf38086326f49afed094df4ba7c9d9 https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 -https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py311h50c5138_1.conda#7d777fcd827bbd67fd1b8b01b7f8f333 +https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py312heda63a1_0.conda#d8285bea2a350f63fab23bf460221f3f +https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 +https://conda.anaconda.org/conda-forge/linux-64/openpyxl-3.1.5-py312h710cb58_1.conda#69a8838436435f59d72ddcb8dfd24a28 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/linux-64/pillow-10.0.1-py311h8aef010_1.conda#4d66ee2081a7cd444ff6f30d95873eef -https://conda.anaconda.org/conda-forge/noarch/pip-24.2-pyh8b19718_1.conda#6c78fbb8ddfd64bcb55b5cbafd2d2c43 -https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.0-pyhd8ed1ab_0.conda#80a4a0867ded2a66687e78bca0bc70fc -https://conda.anaconda.org/conda-forge/linux-64/postgresql-16.4-hb2eb5c0_1.conda#1aaec5dbae29b3f0a2c20eeb84e9e38a -https://conda.anaconda.org/conda-forge/linux-64/proj-9.3.0-h1d62c97_2.conda#b5e57a0c643da391bef850922963eece +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py312h7b63e92_0.conda#385f46a4df6f97892503a841121a9acf +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh8b19718_0.conda#5dd546fe99b44fda83963d15f84263b7 +https://conda.anaconda.org/conda-forge/noarch/plotly-5.24.1-pyhd8ed1ab_0.conda#81bb643d6c3ab4cbeaf724e9d68d0a6a +https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda#0854b9ff0cc10a1f6f67b0f352b8e75a +https://conda.anaconda.org/conda-forge/linux-64/proj-9.4.1-h54d7996_1.conda#e479d1991c725e1a355f33c0e40dbc66 https://conda.anaconda.org/conda-forge/noarch/pydocstyle-6.3.0-pyhd8ed1ab_0.conda#7e23a61a7fbaedfef6eb0e1ac775c8e5 -https://conda.anaconda.org/conda-forge/noarch/pyproject_hooks-1.1.0-pyhd8ed1ab_0.conda#03736d8ced74deece64e54be348ddd3e +https://conda.anaconda.org/conda-forge/noarch/pyproject_hooks-1.2.0-pyh7850678_0.conda#5003da197661e40a2509e9c4651f1eea https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0-pyhd8ed1ab_0.conda#2cf4264fffb9e6eff6031c5b6884d61c -https://conda.anaconda.org/conda-forge/noarch/python-utils-3.8.2-pyhd8ed1ab_0.conda#89703b4f38bd1c0353881f085bc8fdaa -https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py311h7deb3e3_2.conda#5d3fc8b5c5765e1f207c53554a713907 +https://conda.anaconda.org/conda-forge/noarch/python-utils-3.9.0-pyhff2d567_0.conda#ae8d4e318695c0d3e3464ed95cc8b385 +https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py312hbf22597_3.conda#746ce19f0829ec3e19c93007b1a224d3 +https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyh0610db2_0.conda#325219de79481bcf5b6446d327e3d492 https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda#0fc8b52192a8898627c3efae1003e9f6 +https://conda.anaconda.org/conda-forge/noarch/requirements-detector-1.3.1-pyhd8ed1ab_0.conda#f921ea6a1138cc7edee77de8ed12b226 https://conda.anaconda.org/conda-forge/noarch/retrying-1.3.3-pyhd8ed1ab_3.conda#1f7482562f2082f1b2abf8a3e2a41b63 -https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.18.6-py311h459d7ec_0.conda#4dccc0bc3bb4d6e5c30bccbd053c4f90 -https://conda.anaconda.org/conda-forge/linux-64/tbb-2021.13.0-h84d6215_0.conda#ee6f7fd1e76061ef1fa307d41fa86a96 -https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.3.0-pyhd8ed1ab_0.conda#8662629d9a05f9cff364e31ca106c1ac -https://conda.anaconda.org/conda-forge/noarch/tqdm-4.66.5-pyhd8ed1ab_0.conda#c6e94fc2b2ec71ea33fe7c7da259acb4 +https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.18.6-py312h66e93f0_1.conda#28ed869ade5601ee374934a31c9d628e +https://conda.anaconda.org/conda-forge/linux-64/tbb-2022.0.0-hceb3a55_0.conda#79f0161f3ca73804315ca980f65d9c60 +https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 +https://conda.anaconda.org/conda-forge/noarch/tqdm-4.67.0-pyhd8ed1ab_0.conda#196a9e6ab4e036ceafa516ea036619b0 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.26.4-pyhd8ed1ab_0.conda#14c15fa7def506fe7d1a0e3abdc212d6 -https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-hac6953d_0.conda#63b80ca78d29380fe69e69412dcbe4ac -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.7.10-h4bc722e_1.conda#749baebe7e2ff3360630e069175e528b -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxmu-1.1.3-h4ab18f5_1.conda#4d6c9925cdcda27e9d022e40eb3eac05 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxpm-3.5.17-hd590300_0.conda#12bf78e12f71405775e1c092902959d3 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f +https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_0.conda#ff98f23ad74d2a3256debcd9df65d37d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxpm-3.5.17-hb9d3cd8_1.conda#f35bec7fface97f67f44ca952fc740b7 https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhca7485f_0.conda#c089f90a086b6214c5606368d0d3bad0 https://conda.anaconda.org/conda-forge/noarch/yamllint-1.35.1-pyhd8ed1ab_0.conda#a1240b99a7ccd953879dc63111823986 -https://conda.anaconda.org/conda-forge/linux-64/yarl-1.11.1-py311h9ecbd09_0.conda#3dfc4a6fef3ef9683494e3266fca27ea -https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.5-py311h61187de_0.conda#4b255c4b54de2a41bc8dc63ee78098e4 -https://conda.anaconda.org/conda-forge/linux-64/arpack-3.7.0-hdefa2d7_2.tar.bz2#8763fe86163198ef1778d1d8d22bb078 -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-hd4edc92_1.tar.bz2#6c72ec3e660a51736913ef6ea68c454b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.5.7-hb7bd14b_1.conda#82bd3d7da86d969c62ff541bab19526a -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-h3faef2a_0.conda#f907bb958910dc404647326ca80c263e -https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.0-pyhd8ed1ab_0.conda#1e5ac693650d3312e6421e766a5abadd -https://conda.anaconda.org/conda-forge/linux-64/cryptography-43.0.1-py311hafd3f86_0.conda#2653b58a992032d6c3ff4d82fc1c6c82 -https://conda.anaconda.org/conda-forge/noarch/django-5.1.1-pyhd8ed1ab_0.conda#d1e2ab198eca6bf9fcd81f6fd790e2c5 -https://conda.anaconda.org/conda-forge/noarch/flake8-5.0.4-pyhd8ed1ab_0.tar.bz2#8079ea7dec0a917dd0cb6c257f7ea9ea -https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-hac7e632_2.conda#6e553df297f6e64668efb54302e0f139 +https://conda.anaconda.org/conda-forge/linux-64/yarl-1.16.0-py312h66e93f0_0.conda#c3f4a6b56026c22319bf31514662b283 +https://conda.anaconda.org/conda-forge/linux-64/aiohttp-3.10.10-py312h178313f_0.conda#d2f9e490ab2eae3e661b281346618a82 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.28.2-h6c0439f_6.conda#4e472c316d08af60faeb71f86d7563e1 +https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.8.0-hd126650_2.conda#36df3cf05459de5d0a41c77c4329634b +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.7.0-h10ac4d7_1.conda#ab6d507ad16dbe2157920451d662e4a1 +https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_0.conda#ac582de2324988b79870b50c89c91c75 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py312hc0a28a1_1.conda#990033147b0a998e756eaaed6b28f48d +https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py312h68727a3_2.conda#ff28f374b31937c048107521c814791e +https://conda.anaconda.org/conda-forge/linux-64/cryptography-43.0.3-py312hda17c39_0.conda#2abada8c216dd6e32514535a3fa245d4 +https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.1-pyhd8ed1ab_1.conda#5fc43108dee4106f23050acc7a101233 +https://conda.anaconda.org/conda-forge/noarch/flake8-polyfill-1.0.2-py_0.tar.bz2#a53db35e3d07f0af2eccd59c2a00bffe https://conda.anaconda.org/conda-forge/noarch/funcargparse-0.2.5-pyhd8ed1ab_0.tar.bz2#e557b70d736251fa0bbb7c4497852a92 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.10-h6c15284_3.conda#06f97c8b69157d91993af0c4f2e16bdc -https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.1-hee599c5_13.conda#8c55dacddd589be64b2bd6a5d4264be6 -https://conda.anaconda.org/conda-forge/linux-64/git-2.42.0-pl5321h86e50cf_0.conda#96ad24c67e0056d171385859c43218a2 +https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-hf7fa9e8_2.conda#1d6bdc6b2c62c8cc90c67b50142d7b7f https://conda.anaconda.org/conda-forge/noarch/gitpython-3.1.43-pyhd8ed1ab_0.conda#0b2154c1818111e17381b1df5b4b0176 -https://conda.anaconda.org/conda-forge/linux-64/gsl-2.7-he838d99_0.tar.bz2#fec079ba39c9cca093bf4c00001825de -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/linux-64/hdfeos5-5.1.16-hf1a501a_15.conda#d2e16a32f41d67c7d280da11b2846328 -https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-8.4.0-hd8ed1ab_0.conda#01b7411c765c3d863dcc920207f258bd -https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2023.12.1-pyhd8ed1ab_0.conda#a0e4efb5f35786a05af4809a2fb1f855 +https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2024.6.1-py312h6d9a048_4.conda#a810fadedc4edc06b4282d1222467837 +https://conda.anaconda.org/conda-forge/noarch/imageio-2.36.0-pyh12aca89_1.conda#36349844ff73fcd0140ee7f30745f0bf +https://conda.anaconda.org/conda-forge/noarch/importlib_metadata-7.2.1-hd8ed1ab_0.conda#d6c936d009aa63e5f82d216c95cdcaee +https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 +https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda#720745920222587ef942acfbc578b584 +https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda#a14218cfb29662b4a19ceb04e93e298e https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hf8d3e68_2.conda#ffe68c611ae0ccfda4e7a605195e22b3 https://conda.anaconda.org/conda-forge/noarch/lazy-loader-0.4-pyhd8ed1ab_1.conda#4809b9f4c6ce106d443c3f90b8e10db2 -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.22.0-h9be4e54_1.conda#4b4e36a91e7dabf7345b82d85767a7c3 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.28.0-h26d7fe4_0.conda#2c51703b4d775f8943c08a361788131b https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda#a908e463c710bd6b10a9eaa89fdf003c -https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h090f1da_1.conda#9a2d6acaa8ce6d53a150248e7b11165e -https://conda.anaconda.org/conda-forge/linux-64/numpy-1.26.4-py311h64a7726_0.conda#a502d7aad449a1206efb366d6a12c52d +https://conda.anaconda.org/conda-forge/linux-64/libpq-17.0-h04577a9_4.conda#392cae2a58fbcb9db8c2147c6d6d1620 +https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h15fa968_9.conda#4957a903bd6a68cc2e53e47476f9c6f4 +https://conda.anaconda.org/conda-forge/noarch/magics-python-1.5.8-pyhd8ed1ab_1.conda#3fd7e3db129f12362642108f23fde521 +https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py312h83e6fd3_0.conda#e064ca33edf91ac117236c4b5dee207a +https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.1-py312hf9745cd_0.conda#33c27209bfd7af6766211facd24839ce +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py312hfb8ada1_0.conda#d0745ae74c2b26571b692ddde112eebb +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 +https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.6-pyhd8ed1ab_0.conda#a5b55d1cb110cdcedc748b5c3e16e687 https://conda.anaconda.org/conda-forge/noarch/progressbar2-4.5.0-pyhd8ed1ab_0.conda#6f9eb38d0a87898cf5a7c91adaccd691 https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba -https://conda.anaconda.org/conda-forge/noarch/pylint-2.17.7-pyhd8ed1ab_0.conda#3cab6aee60038b3f621bce3e50f52bed -https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py311h1facc83_4.conda#75d504c6787edc377ebdba087a26a61b -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-5.0.0-pyhd8ed1ab_0.conda#c54c0107057d67ddf077751339ec2c63 -https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.4-pyhd8ed1ab_0.conda#638cfd3bf6904125e868176d89c2ae0b +https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.1-pyhd8ed1ab_0.conda#2a3426f75e2172c932131f4e3d51bcf4 +https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.6.1-py312h9211aeb_9.conda#173afeb0d112c854fd1a9fcac4b5cce3 +https://conda.anaconda.org/conda-forge/linux-64/pys2index-0.1.5-py312hfb10629_0.conda#325cc5f0e0dc36562f3de2a4dbded572 +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c +https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.5-pyhd8ed1ab_0.conda#ecd5e850bcd3eca02143e7df030ee50f https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0.conda#52b91ecba854d55b28ad916a8b10da24 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 -https://conda.anaconda.org/conda-forge/noarch/python-build-1.2.2-pyhd8ed1ab_0.conda#7309d5de1e4e866df29bcd8ea5550035 -https://conda.anaconda.org/conda-forge/noarch/rdflib-7.0.0-pyhd8ed1ab_0.conda#44d14ef95495b3d4438f28998e0296a9 -https://conda.anaconda.org/conda-forge/noarch/requirements-detector-1.2.2-pyhd8ed1ab_0.conda#6626918380d99292df110f3c91b6e5ec -https://conda.anaconda.org/conda-forge/linux-64/suitesparse-5.10.1-h5a4f163_3.conda#f363554b9084fb9d5e3366fbbc0d18e0 -https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py311hd18a35c_5.conda#4e8447ca8558a203ec0577b4730073f3 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxaw-1.0.14-h7f98852_1.tar.bz2#45b68dc2fc7549c16044d533ceaf340e -https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py311hbc35293_1.conda#aec590674ba365e50ae83aa2d6e1efae -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.26.6-hf567797_4.conda#ffb662b31aef333e68a00dd17fda2027 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py311h9f3472d_1.conda#2c3c4f115d28ed9e001a271d5d8585aa -https://conda.anaconda.org/conda-forge/noarch/colorspacious-1.1.2-pyh24bf2e0_0.tar.bz2#b73afa0d009a51cabd3ec99c4d2ef4f3 -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.0-py311hd18a35c_1.conda#f709f23e2b1b93b3b6a20e9e7217a258 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.8.2-pyhd8ed1ab_0.conda#8e7524a2fb561506260db789806c7ee9 -https://conda.anaconda.org/conda-forge/noarch/eofs-1.4.1-pyhd8ed1ab_1.conda#5fc43108dee4106f23050acc7a101233 -https://conda.anaconda.org/conda-forge/noarch/flake8-polyfill-1.0.2-py_0.tar.bz2#a53db35e3d07f0af2eccd59c2a00bffe -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-8.3.0-h3d44ed6_0.conda#5a6f6c00ef982a9bc83558d9ac8f64a0 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.0-pyhd8ed1ab_0.conda#f80cc5989f445f23b1622d6c455896d9 -https://conda.anaconda.org/conda-forge/linux-64/imagecodecs-2023.9.18-py311h9b38416_0.conda#67bed2bd92ffa76b20506d83427706ae -https://conda.anaconda.org/conda-forge/noarch/imageio-2.35.1-pyh12aca89_0.conda#b03ff3631329c8ef17bae35d2bb216f7 -https://conda.anaconda.org/conda-forge/linux-64/jasper-4.0.0-h32699f2_1.conda#fdde5424ecef5f7ad310b4242229291c +https://conda.anaconda.org/conda-forge/noarch/python-build-1.2.2.post1-pyhff2d567_0.conda#bd5ae3c630d5eed353badb091fd3e603 +https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py312hc0a28a1_2.conda#8300d634adec4a6aed35a87e90e9cb07 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py312h62794b6_1.conda#b43233a9e2f62fb94affe5607ea79473 +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py312h6cab151_1.conda#5be02e05e1adaa42826cc6800ce399bc +https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_1.conda#5abeaa41ec50d4d1421a8bc8fbc93054 +https://conda.anaconda.org/conda-forge/linux-64/suitesparse-7.8.3-hb42a789_0.conda#216922e19843f5662a2b260f905640cb +https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py312h68727a3_5.conda#f9664ee31aed96c85b7319ab0a693341 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxaw-1.0.16-hb9d3cd8_0.conda#7c0a9bf62d573409d12ad14b362a96e5 +https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py312hef9b889_1.conda#8b7069e9792ee4e5b4919a7a306d2e67 +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.379-h5a9005d_9.conda#5dc18b385893b7991a3bbeb135ad7c3e +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.12.0-hd2e3451_0.conda#61f1c193452f0daa582f39634627ea33 +https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.1-pyhd8ed1ab_0.conda#e88d74bb7b9b89d4c9764286ceb94cc9 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py312hc0a28a1_0.conda#8b5b812d4c18cb37bda7a7c8d3a6acb3 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.0-pyhd8ed1ab_0.conda#75c96f0655908f596a57be60251b78d4 +https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.38.3-h8bb6dbc_1.conda#73265d4acc551063cc5c5beab37f33c5 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.2-pyhd8ed1ab_0.conda#636950f839e065401e2031624a414f0b +https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda#da304c192ad59975202859b367d0f6a2 -https://conda.anaconda.org/conda-forge/linux-64/julia-1.9.3-h06b7c97_0.conda#6214d0563598ae0cc9b954344b9f9c10 -https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.2-pyhd8ed1ab_0.conda#3cdbb2fa84490e5fd44c9f9806c0d292 +https://conda.anaconda.org/conda-forge/linux-64/julia-1.10.4-hf18f99d_1.conda#cc0ef9c191bab16211970a29b6787d69 https://conda.anaconda.org/conda-forge/noarch/lazy_loader-0.4-pyhd8ed1ab_1.conda#ec6f70b8a5242936567d4f886726a372 -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.22.0-hc7a4891_1.conda#7811f043944e010e54640918ea82cecd -https://conda.anaconda.org/conda-forge/noarch/magics-python-1.5.8-pyhd8ed1ab_1.conda#3fd7e3db129f12362642108f23fde521 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.9.2-h353785f_1.conda#c363d0b330b4b21b4c1b10e0981d3a99 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.28.0-ha262f82_0.conda#9e7960f0b9ab3895ef73d92477c47dae +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py312hd3ec401_2.conda#2380c9ba933ffaac9ad16d8eac8e3318 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h22f9119_106.conda#5b911bfe75855326bae6857451268e59 -https://conda.anaconda.org/conda-forge/linux-64/numba-0.60.0-py311h4bc866e_0.conda#e32a210e9caf97383c35685fd2343512 -https://conda.anaconda.org/conda-forge/linux-64/numcodecs-0.13.0-py311h044e617_0.conda#9d783b29b6fc53e4d9a94f5befdfd34b -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.1.4-py311h320fe9a_0.conda#e44ccb61b6621bf3f8053ae66eba7397 -https://conda.anaconda.org/conda-forge/noarch/patsy-0.5.6-pyhd8ed1ab_0.conda#a5b55d1cb110cdcedc748b5c3e16e687 -https://conda.anaconda.org/conda-forge/linux-64/poppler-23.08.0-hf2349cb_2.conda#fb75401ae7e2e3f354dff72e9da95cae -https://conda.anaconda.org/conda-forge/noarch/pylint-plugin-utils-0.7-pyhd8ed1ab_0.tar.bz2#1657976383aee04dbb3ae3bdf654bb58 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.1-nompi_py312h21d6d8e_102.conda#9049ba34261ce7106220711d313fcf61 +https://conda.anaconda.org/conda-forge/noarch/pep8-naming-0.10.0-pyh9f0ad1d_0.tar.bz2#b3c5536e4f9f58a4b16adb6f1e11732d +https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.0-h1122569_4.conda#028ea131f116f13bb2a4a382b5863a04 +https://conda.anaconda.org/conda-forge/noarch/pylint-plugin-utils-0.8.2-pyhd8ed1ab_0.conda#84377261c09c02182d76fbe79e69c9bf https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.2.1-pyhd8ed1ab_2.conda#85fa2fdd26d5a38792eb57bc72463f07 -https://conda.anaconda.org/conda-forge/linux-64/pys2index-0.1.5-py311h92ebd52_0.conda#ee757dff4cdb96bb972200c85b37f9e8 https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_0.conda#4d2040212307d18392a2687772b3a96d -https://conda.anaconda.org/conda-forge/linux-64/pywavelets-1.7.0-py311h07ce7c0_0.conda#73a9996e4b765455696b53bf74865b09 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py311he1f765f_0.conda#eb7e2a849cd47483d7e9eeb728c7a8c5 -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.2-py311he06c224_0.conda#c90e2469d7512f3bba893533a82d7a02 -https://conda.anaconda.org/conda-forge/noarch/snuggs-1.4.7-pyhd8ed1ab_1.conda#5abeaa41ec50d4d1421a8bc8fbc93054 +https://conda.anaconda.org/conda-forge/linux-64/r-base-4.2.3-h32f4cee_16.conda#feee98a221344be7a447b80b410df867 +https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.2-py312h7a48858_1.conda#6b5f4c68483bd0c22bca9094dafc606b +https://conda.anaconda.org/conda-forge/noarch/seawater-3.3.5-pyhd8ed1ab_0.conda#8e1b01f05e8f97b0fcc284f957175903 +https://conda.anaconda.org/conda-forge/noarch/sparse-0.15.4-pyh267e887_1.conda#40d80cd9fa4cc759c6dba19ea96642db +https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.4-py312hc0a28a1_0.conda#97dc960f3d9911964d73c2cf240baea5 https://conda.anaconda.org/conda-forge/linux-64/tempest-remap-2.2.0-h13910d2_3.conda#7f10762cd62c8ad03323c4dc3ee544b1 -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.2-pyhd8ed1ab_1.conda#e804c43f58255e977093a2298e442bb8 -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.267-hbf3e495_6.conda#a6caf5a0d9ca940d95f21d40afe8f857 -https://conda.anaconda.org/conda-forge/noarch/bokeh-3.5.2-pyhd8ed1ab_0.conda#38d785787ec83d0431b3855328395113 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.2.0-py311h9f3472d_6.conda#ac7dc7f70f8d2c1d96ecb7e4cb196498 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.8.2-pyhd8ed1ab_0.conda#44d22b5d98a219a4c35cafe9bf3b9ce2 -https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.32.1-h35c6de3_0.conda#09d044f9206700e021916675a16d1e4d -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.6.1-nompi_h4441c20_3.conda#1afc1e85414e228916732df2b8c5d93b -https://conda.anaconda.org/conda-forge/noarch/imagehash-4.3.1-pyhd8ed1ab_0.tar.bz2#132ad832787a2156be1f1b309835001a -https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.7.2-h6238fc3_5.conda#2fef4283b2bb45a66f8b81099d36721e -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py311h74b4f7c_0.conda#de8e36c9792f14eed7e11e672f03fbf0 +https://conda.anaconda.org/conda-forge/noarch/tifffile-2024.9.20-pyhd8ed1ab_0.conda#6de55c7859ed314159eaf2b7b4f19cc7 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 +https://conda.anaconda.org/conda-forge/noarch/xarray-2024.10.0-pyhd8ed1ab_0.conda#53e365732dfa053c4d19fc6b927392c4 +https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_0.conda#41abde21508578e02e3fd492e82a05cd +https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.11.0-h325d260_1.conda#11d926d1f4a75a1b03d1c053ca20424b +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.23.0-py312hf9745cd_2.conda#cc3ecff140731b46b970a7c4787b1823 +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_0.conda#9437cfe346eab83b011b4def99f0e879 +https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.11.0-pyhd8ed1ab_0.conda#497f3535cbb69cd2f02158e2e18ee0bb +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.4.2-nompi_h9e768e6_3.conda#c330e87e698bae8e7381c0315cf25dd0 +https://conda.anaconda.org/conda-forge/linux-64/gdal-3.9.2-py312h1299960_7.conda#9cf27e3f9d97ea13f250db9253a25dc8 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.9.2-h2db6552_7.conda#524e64f1aa0ebc87230109e684f392f4 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.9.2-hc3b29a1_7.conda#56a7436a66a1a4636001ce4b621a3a33 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.9.2-hd5ecb85_7.conda#9c8431dc0b83d5fe9c12a2c0b6861a72 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.9.2-h6283f77_7.conda#c8c82df3aece4e23804d178a8a8b308a +https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.9.2-h1b2c38e_7.conda#f0f86f8cb8835bb91acb8c7fa2c350b0 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.9.2-h600f43f_7.conda#567066db0820f4983a6741e429c651d1 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.9.2-h5e77dd0_7.conda#e86b26f53ae868565e95fde5b10753d3 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.9.2-h5e77dd0_7.conda#3392965ffc4e8b7c66a532750ce0e91f +https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.9.2-h03c987c_7.conda#165f12373452e8d17889e9c877431acf +https://conda.anaconda.org/conda-forge/linux-64/magics-4.15.4-h24e9adf_1.conda#9731bb0d2a3917cab718fd7c90dea857 https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_0.conda#bcdbeb2b693eba886583a907840c6421 https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda#0b57b5368ab7fc7cdc9e3511fa867214 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.1-nompi_py311hae66bec_102.conda#87b59caea7db5b79766e0776953d8c66 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.50.14-ha41ecd1_2.conda#1a66c10f6a0da3dbd2f3a68127e7f6a0 -https://conda.anaconda.org/conda-forge/noarch/pep8-naming-0.10.0-pyh9f0ad1d_0.tar.bz2#b3c5536e4f9f58a4b16adb6f1e11732d -https://conda.anaconda.org/conda-forge/noarch/pre-commit-3.8.0-pyha770c72_1.conda#004cff3a7f6fafb0a041fb575de85185 +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c +https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.5.1-py312h7900ff3_1.conda#f110e71421e5c86e50232cc027c6d85c +https://conda.anaconda.org/conda-forge/noarch/py-xgboost-2.1.2-cuda118_pyh40095f8_0.conda#aa5881b02bd9555a7b06c709aa33bd20 https://conda.anaconda.org/conda-forge/noarch/pylint-celery-0.3-py_1.tar.bz2#e29456a611a62d3f26105a2f9c68f759 -https://conda.anaconda.org/conda-forge/noarch/pylint-django-2.5.3-pyhd8ed1ab_0.tar.bz2#00d8853fb1f87195722ea6a582cc9b56 +https://conda.anaconda.org/conda-forge/noarch/pylint-django-2.6.1-pyhd8ed1ab_0.conda#d1023ccf92d8235cd4808ef53e274a5e https://conda.anaconda.org/conda-forge/noarch/pylint-flask-0.6-py_0.tar.bz2#5a9afd3d0a61b08d59eed70fab859c1b -https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py311h9f3472d_3.conda#a7c4169b1c920361597ddacb461350fd -https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 -https://conda.anaconda.org/conda-forge/linux-64/scikit-learn-1.5.1-py311hd632256_0.conda#f3928b428ad924ecb8f0e9b71124ed7f -https://conda.anaconda.org/conda-forge/noarch/seawater-3.3.5-pyhd8ed1ab_0.conda#8e1b01f05e8f97b0fcc284f957175903 -https://conda.anaconda.org/conda-forge/noarch/sparse-0.15.4-pyhd8ed1ab_0.conda#846d12530687ba836791dd54db1f45c5 -https://conda.anaconda.org/conda-forge/linux-64/statsmodels-0.14.2-py311h18e1886_0.conda#82c29bf38b3fb66da09736106609b5fe -https://conda.anaconda.org/conda-forge/noarch/tifffile-2024.8.30-pyhd8ed1ab_0.conda#330700f370f15c7c5660ef6865e9cc43 -https://conda.anaconda.org/conda-forge/noarch/xarray-2024.7.0-pyhd8ed1ab_0.conda#a7d4ff4bf1502eaba3fbbaeba66969ec -https://conda.anaconda.org/conda-forge/noarch/zarr-2.18.3-pyhd8ed1ab_0.conda#41abde21508578e02e3fd492e82a05cd -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.23.0-py311h14de704_1.conda#27e5956e552c6e71f56cb1ec042617a8 -https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.9.4-pyhd8ed1ab_0.conda#c8b6a3126f659e311d3b5c61be254d95 -https://conda.anaconda.org/conda-forge/noarch/chart-studio-1.1.0-pyh9f0ad1d_0.tar.bz2#acd9a12a35e5a0221bdf39eb6e4811dc -https://conda.anaconda.org/conda-forge/noarch/cmocean-4.0.3-pyhd8ed1ab_0.conda#53df00540de0348ed1b2a62684dd912b -https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.6.1-pyhc1e730c_0.conda#25a9661177fd68bfdb4314fd658e5c3b -https://conda.anaconda.org/conda-forge/linux-64/gdal-3.7.2-py311h815a124_5.conda#84a14fd830b72b09ef886a23de557a16 -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h90689f9_2.tar.bz2#957a0255ab58aaf394a91725d73ab422 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-15.0.2-h176673d_2_cpu.conda#c130ba0c765437749dbc37fa9de85ce5 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.56.3-he3f83f7_1.conda#03bd1ddcc942867a19528877143b9852 -https://conda.anaconda.org/conda-forge/linux-64/magics-4.14.2-haee2765_1.conda#0c46d548472ee1b043c65d4ab4ad6a83 -https://conda.anaconda.org/conda-forge/noarch/multiurl-0.3.1-pyhd8ed1ab_0.conda#4dff4abb5728f7662ecaaa8bee3a0260 -https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda#15b51397e0fe8ea7d7da60d83eb76ebc -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/linux-64/ncl-6.6.2-he3b17a9_50.conda#a37fcb5a2da31cfebe6734b0fda20bd5 -https://conda.anaconda.org/conda-forge/linux-64/nco-5.2.8-hf7c1f58_0.conda#6cd18a9c6b8269b0cd101ba9cc3d02ab -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed -https://conda.anaconda.org/conda-forge/noarch/prospector-1.10.3-pyhd8ed1ab_0.conda#f551d4d859a1d70c6abff8310a655481 -https://conda.anaconda.org/conda-forge/linux-64/psyplot-1.5.1-py311h38be061_0.conda#b980793f61c0dc532b62faa0a0f0a271 -https://conda.anaconda.org/conda-forge/noarch/py-xgboost-2.1.1-cuda118_pyhf54b869_2.conda#35d99c71383da3c2f88a97d471f79e1f -https://conda.anaconda.org/conda-forge/noarch/pyroma-4.2-pyhd8ed1ab_0.conda#fe2aca9a5d4cb08105aefc451ef96950 -https://conda.anaconda.org/conda-forge/linux-64/r-base-4.2.3-h0887e52_8.conda#34cb3750c8a6da10a490e470f87e670b -https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.9-py311h40fbdff_0.conda#dcee6ba4d1ac6af18827d0941b6a1b42 -https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b -https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.24.0-py311h044e617_2.conda#5ea04101a9da03787ba90e9c741eb818 -https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.2-pyhd8ed1ab_2.conda#b713b116feaf98acdba93ad4d7f90ca1 -https://conda.anaconda.org/conda-forge/noarch/cads-api-client-1.3.2-pyhd8ed1ab_0.conda#3d0aba33db35ed85eb23ee6948ff79a0 -https://conda.anaconda.org/conda-forge/linux-64/cdo-2.3.0-h24bcfa3_0.conda#238311a432a8e49943d3348e279af714 -https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhca7485f_3.conda#1d43833138d38ad8324700ce45a7099a -https://conda.anaconda.org/conda-forge/linux-64/fiona-1.9.5-py311hbac4ec9_0.conda#786d3808394b1bdfd3f41f2e2c67279e -https://conda.anaconda.org/conda-forge/linux-64/graphviz-8.1.0-h28d9a01_0.conda#33628e0e3de7afd2c8172f76439894cb -https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_1.conda#b7212cd8247ce909631fdcb77015914a -https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-15.0.2-hac33072_2_cpu.conda#12951edff85582aedcd2db0b79393102 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-flight-15.0.2-hd42f311_2_cpu.conda#dcc3a1e12157bbbbae96029d9d34fd0e -https://conda.anaconda.org/conda-forge/linux-64/libarrow-gandiva-15.0.2-hd4ab825_2_cpu.conda#a4aa5cd69e0d1959f7c965437e7ac93d -https://conda.anaconda.org/conda-forge/linux-64/libparquet-15.0.2-h6a7eafb_2_cpu.conda#b06caaa4ef20db071dc832323701e5e3 -https://conda.anaconda.org/conda-forge/noarch/lime-0.2.0.1-pyhd8ed1ab_1.tar.bz2#789ce01416721a5533fb74aa4361fd13 -https://conda.anaconda.org/conda-forge/noarch/mapgenerator-1.0.7-pyhd8ed1ab_0.conda#d18db96ef2a920b0ecefe30282b0aecf -https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda#e2d2abb421c13456a9a9f80272fdf543 -https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.5.1-py311h38be061_0.conda#65a408ecf84afc51b1d437f888d8e80f -https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.8.0-pyhd8ed1ab_0.conda#fba377622e74ee0bbeb8ccae9fa593d3 +https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py312hc0a28a1_0.conda#476b0357e207e10d2b7b13ed82156e6d +https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py312hc0a28a1_3.conda#81bbcb20ea4a53b05a8cf51f31496038 https://conda.anaconda.org/conda-forge/noarch/r-abind-1.4_5-r42hc72bb7e_1005.conda#f2744985b083b1bbffd4df19437cf1e8 https://conda.anaconda.org/conda-forge/linux-64/r-backports-1.5.0-r42hb1dbf0f_0.conda#d879e1fbd80113312364a5db3682c789 https://conda.anaconda.org/conda-forge/noarch/r-bigmemory.sri-0.1.8-r42hc72bb7e_0.conda#383f36b5a0b7dd7c467aa1b6b5fe7307 @@ -543,7 +523,6 @@ https://conda.anaconda.org/conda-forge/linux-64/r-colorspace-2.1_0-r42h57805ef_1 https://conda.anaconda.org/conda-forge/linux-64/r-contfrac-1.1_12-r42h57805ef_1004.conda#bc308888aa4b4fb4e37a7a17fdc911c9 https://conda.anaconda.org/conda-forge/noarch/r-cpp11-0.4.7-r42hc72bb7e_0.conda#941d7bcf2b94a682419ea1fbf6789d1f https://conda.anaconda.org/conda-forge/noarch/r-crayon-1.5.3-r42hc72bb7e_0.conda#4a74a6114bbea1ad8d488e99b83df3da -https://conda.anaconda.org/conda-forge/noarch/r-dbi-1.2.3-r42hc72bb7e_0.conda#b283bb5431a4b960cfa3f82043d1437b https://conda.anaconda.org/conda-forge/linux-64/r-desolve-1.40-r42hd9ac46e_0.conda#7232f8b5707fc9739cb2f8fdc5b4b64d https://conda.anaconda.org/conda-forge/linux-64/r-digest-0.6.36-r42ha18555a_0.conda#332551d9a37018826d528cf16701bd2b https://conda.anaconda.org/conda-forge/noarch/r-docopt-0.7.1-r42hc72bb7e_3.conda#99be998b67c40ef6eb1a5af90e307c1d @@ -554,13 +533,12 @@ https://conda.anaconda.org/conda-forge/linux-64/r-farver-2.1.2-r42ha18555a_0.con https://conda.anaconda.org/conda-forge/noarch/r-functional-0.6-r42ha770c72_1004.conda#9e27c34589b883accd340d651bdeaa02 https://conda.anaconda.org/conda-forge/noarch/r-generics-0.1.3-r42hc72bb7e_2.conda#c492355d73e184353c82b62f5087a601 https://conda.anaconda.org/conda-forge/noarch/r-geomapdata-2.0_2-r42hc72bb7e_0.conda#799a671bad7a89ac1d9da5cb98f75367 -https://conda.anaconda.org/conda-forge/linux-64/r-git2r-0.33.0-r42hbae1c7c_0.conda#2cdc8746b3283f02e5ba387bcfc51aa1 +https://conda.anaconda.org/conda-forge/linux-64/r-git2r-0.30.1-r42hf72769b_1.tar.bz2#f64adeea481006f0cb22bdcc066680df https://conda.anaconda.org/conda-forge/linux-64/r-glue-1.7.0-r42h57805ef_0.conda#eab803a28d66337ae3732b04c5f5604f https://conda.anaconda.org/conda-forge/linux-64/r-goftest-1.2_3-r42h57805ef_2.conda#4210e40893bbac7533714429ac4d0fe9 https://conda.anaconda.org/conda-forge/linux-64/r-isoband-0.2.7-r42ha503ecb_2.conda#44979df954a15195470f336cd18b5eb6 https://conda.anaconda.org/conda-forge/noarch/r-iterators-1.0.14-r42hc72bb7e_2.conda#616ab7b008326d3d76d59ba35b3fb592 https://conda.anaconda.org/conda-forge/linux-64/r-jsonlite-1.8.8-r42h57805ef_0.conda#d0b27ba963de139270a7b53f897afdf6 -https://conda.anaconda.org/conda-forge/linux-64/r-kernsmooth-2.23_24-r42hc2011d3_0.conda#aac4c7efaa5f2f7013cff5dabe0255eb https://conda.anaconda.org/conda-forge/noarch/r-labeling-0.4.3-r42hc72bb7e_0.conda#b9b940011dd81d8b60859fcd0d9775f4 https://conda.anaconda.org/conda-forge/linux-64/r-lattice-0.22_6-r42h57805ef_0.conda#93cee3961cc5277443a3e437f6991010 https://conda.anaconda.org/conda-forge/linux-64/r-lazyeval-0.2.2-r42h57805ef_4.conda#109112b1c26d932414daa139a45d3a69 @@ -574,7 +552,6 @@ https://conda.anaconda.org/conda-forge/noarch/r-nbclust-3.0.1-r42hc72bb7e_2.cond https://conda.anaconda.org/conda-forge/linux-64/r-ncdf4-1.22-r42h5647f33_0.conda#d23e6cd8fe41079eb1421b6a6d1f1c67 https://conda.anaconda.org/conda-forge/linux-64/r-pcict-0.5_4.4-r42h57805ef_1.conda#6e5770da5c174a2617096cbc2b8d96f4 https://conda.anaconda.org/conda-forge/noarch/r-pkgconfig-2.0.3-r42hc72bb7e_3.conda#469b66f84a5d234689b423c9821b188c -https://conda.anaconda.org/conda-forge/linux-64/r-proxy-0.4_27-r42h57805ef_2.conda#1d2ea39d52acbcc9d7db8a0abe5fdf7b https://conda.anaconda.org/conda-forge/linux-64/r-ps-1.7.6-r42h57805ef_0.conda#3a592c79e0fade3a0c3574696fa143a3 https://conda.anaconda.org/conda-forge/noarch/r-r.methodss3-1.8.2-r42hc72bb7e_2.conda#305fe9f97f7598d9722c76d6be7bf794 https://conda.anaconda.org/conda-forge/noarch/r-r6-2.5.1-r42hc72bb7e_2.conda#1473a12b55128f8ac776ae5595a4d0cb @@ -593,21 +570,32 @@ https://conda.anaconda.org/conda-forge/noarch/r-withr-3.0.0-r42hc72bb7e_0.conda# https://conda.anaconda.org/conda-forge/linux-64/r-xfun-0.45-r42ha18555a_0.conda#9e13c392bfcee4a261e4b513d6d862e7 https://conda.anaconda.org/conda-forge/noarch/r-xmlparsedata-1.0.5-r42hc72bb7e_2.conda#2f3614450b54f222c1eff786ec2a45ec https://conda.anaconda.org/conda-forge/linux-64/r-yaml-2.3.8-r42h57805ef_0.conda#97f60a93ca12f4fdd5f44049dcee4345 -https://conda.anaconda.org/conda-forge/noarch/seaborn-0.13.2-hd8ed1ab_2.conda#a79d8797f62715255308d92d3a91ef2e -https://conda.anaconda.org/conda-forge/noarch/xesmf-0.8.7-pyhd8ed1ab_0.conda#42301f78a4c6d2500f891b9723160d5c -https://conda.anaconda.org/conda-forge/noarch/xgboost-2.1.1-cuda118_pyh98e67c5_2.conda#8c61e30dd8325ea1598e9d0af3eb2582 -https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.7.2-pyhd8ed1ab_1.conda#0b896fef433a120a80f37e4ad57a3850 -https://conda.anaconda.org/conda-forge/linux-64/imagemagick-7.1.1_19-pl5321h7e74ff9_0.conda#a4a0ce7caba20cae61aac9aeacbd76c2 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-15.0.2-hac33072_2_cpu.conda#48c711b4e07664ec7b245a9664be60a1 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-flight-sql-15.0.2-h9241762_2_cpu.conda#97e46f0f20157e19487ca3e65100247a -https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_1.conda#37cec2cf68f4c09563d8bc833791096b -https://conda.anaconda.org/conda-forge/linux-64/psy-maps-1.5.0-py311h38be061_1.conda#d7901c26884613539e958c10e9973413 -https://conda.anaconda.org/conda-forge/linux-64/psy-reg-1.5.0-py311h38be061_1.conda#1077e7fc4aa594c5896cf8b8fa672f88 -https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.1-py311h38be061_0.conda#036ce626484c4458cc99b6d55bb036eb -https://conda.anaconda.org/conda-forge/noarch/python-cdo-1.6.0-pyhd8ed1ab_0.conda#3fd1a0b063c1fbbe4b7bd5a5a7601e84 +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 +https://conda.anaconda.org/conda-forge/linux-64/scikit-image-0.24.0-py312hf9745cd_3.conda#3612f99c589d51c363c8b90c0bcf3a18 +https://conda.anaconda.org/conda-forge/noarch/seaborn-base-0.13.2-pyhd8ed1ab_2.conda#b713b116feaf98acdba93ad4d7f90ca1 +https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.0-h86fa3b2_0.conda#061175d9d4c046a1cf8bffe95a359fab +https://conda.anaconda.org/conda-forge/linux-64/cdo-2.4.1-h9fe33b1_1.conda#a326dab3d2a1a8e32c2a6f792fac3161 +https://conda.anaconda.org/conda-forge/noarch/cfgrib-0.9.14.1-pyhd8ed1ab_0.conda#1870fe8c9bd8967429e227be28ab94d2 +https://conda.anaconda.org/conda-forge/noarch/chart-studio-1.1.0-pyh9f0ad1d_0.tar.bz2#acd9a12a35e5a0221bdf39eb6e4811dc +https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.4.2-pyhc1e730c_4.conda#ddcf387719b2e44df0cc4dd467643951 +https://conda.anaconda.org/conda-forge/linux-64/imagemagick-7.1.1_39-imagemagick_hcfc5581_1.conda#1144fe07cf76921ec664b868453027d3 +https://conda.anaconda.org/conda-forge/noarch/iris-3.10.0-pyha770c72_2.conda#5d8984ceb5fdf85110ca7108114ecc18 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-17.0.0-h8d2e343_13_cpu.conda#dc379f362829d5df5ce6722565110029 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.9.2-h1df15e4_7.conda#c693e703649051ee9db0fabd4fcd0483 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.9.2-hf2d2f32_7.conda#4015ef020928219acc0b5c9edbce8d30 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.9.2-h4a3bace_2.conda#c3fac34ecba2fcf9d5d31a03b975d5a1 +https://conda.anaconda.org/conda-forge/noarch/lime-0.2.0.1-pyhd8ed1ab_1.tar.bz2#789ce01416721a5533fb74aa4361fd13 +https://conda.anaconda.org/conda-forge/noarch/multiurl-0.3.2-pyhd8ed1ab_0.conda#9b6cf42ef472b332970282ec87d2e5d4 +https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda#15b51397e0fe8ea7d7da60d83eb76ebc +https://conda.anaconda.org/conda-forge/linux-64/nco-5.2.8-hf7c1f58_0.conda#6cd18a9c6b8269b0cd101ba9cc3d02ab +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed +https://conda.anaconda.org/conda-forge/noarch/prospector-1.12.1-pyhd8ed1ab_0.conda#8621ba9cf057da26d371b87cd2264259 +https://conda.anaconda.org/conda-forge/linux-64/psy-simple-1.5.1-py312h7900ff3_0.conda#683ec8787a523de54b02c885e2c2aefa +https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.2-py312h7900ff3_0.conda#a972ba77217a2cac592c41dd3cc56dfd +https://conda.anaconda.org/conda-forge/noarch/pyroma-4.2-pyhd8ed1ab_0.conda#fe2aca9a5d4cb08105aefc451ef96950 https://conda.anaconda.org/conda-forge/linux-64/r-bigmemory-4.6.4-r42ha503ecb_0.conda#12b6fa8fe80a6494a948c6ea2f34340d https://conda.anaconda.org/conda-forge/linux-64/r-checkmate-2.3.1-r42h57805ef_0.conda#9febce7369c72d991e2399d7d28f3390 -https://conda.anaconda.org/conda-forge/linux-64/r-class-7.3_22-r42h57805ef_1.conda#97476afece904fbbe73762b9cf8c4d83 https://conda.anaconda.org/conda-forge/linux-64/r-climdex.pcic-1.1_11-r42ha503ecb_2.conda#cff1d95fe315f109a1f01a7ef112fdd6 https://conda.anaconda.org/conda-forge/noarch/r-desc-1.4.3-r42hc72bb7e_0.conda#8c535581a9a3a1e2a0f5ef6d7e4d6a7f https://conda.anaconda.org/conda-forge/linux-64/r-ellipsis-0.3.2-r42h57805ef_2.conda#1673236a1895ca5cce15c888435ad2f9 @@ -628,17 +616,27 @@ https://conda.anaconda.org/conda-forge/noarch/r-rex-1.2.1-r42hc72bb7e_2.conda#b4 https://conda.anaconda.org/conda-forge/linux-64/r-sp-2.1_4-r42hb1dbf0f_0.conda#681bb0a7290d86f9f8bf8dc816f114c0 https://conda.anaconda.org/conda-forge/linux-64/r-spam-2.10_0-r42h9f9f741_0.conda#159d8ab59a2777a26a739f8090b5a80c https://conda.anaconda.org/conda-forge/linux-64/r-timechange-0.3.0-r42ha503ecb_0.conda#3d62906e9c1fecf61370a3ad6e808e5e -https://conda.anaconda.org/conda-forge/linux-64/r-units-0.8_5-r42ha503ecb_0.conda#90b4c99051df9db2f825d6259dcf12cd -https://conda.anaconda.org/conda-forge/linux-64/r-wk-0.9.1-r42ha503ecb_0.conda#3c5ea742d2069f956ea6ff02a2aadce1 https://conda.anaconda.org/conda-forge/linux-64/r-xml2-1.3.6-r42hbfba7a4_1.conda#5c3d7a89a2d5e1c0885f92d1aa6fde30 https://conda.anaconda.org/conda-forge/linux-64/r-zoo-1.8_12-r42h57805ef_1.conda#5367d265c0c9c151dea85f1ccb515ec1 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-15.0.2-h9241762_2_cpu.conda#c18bbb60ed10774dbf9ea86484728a74 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_1.conda#ab83e3b9ca2b111d8f332e9dc8b2170f +https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b +https://conda.anaconda.org/conda-forge/noarch/seaborn-0.13.2-hd8ed1ab_2.conda#a79d8797f62715255308d92d3a91ef2e +https://conda.anaconda.org/conda-forge/noarch/xgboost-2.1.2-cuda118_pyh256f914_0.conda#2dcf3e60ef65fd4cb95048f2491f6a89 +https://conda.anaconda.org/conda-forge/noarch/cads-api-client-1.5.2-pyhd8ed1ab_0.conda#e7005effa79f1493a51404873d6eb5a0 +https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_4.conda#f481c17430f801e68ee3b57cc30ecd2e +https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-17.0.0-h5888daf_13_cpu.conda#b654d072b8d5da807495e49b28a0b884 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.9.2-ha770c72_7.conda#63779711c7afd4fcf9cea67538baa67a +https://conda.anaconda.org/conda-forge/linux-64/libparquet-17.0.0-h39682fd_13_cpu.conda#49c60a8dc089d8127b9368e9eb6c1a77 +https://conda.anaconda.org/conda-forge/noarch/mapgenerator-1.0.7-pyhd8ed1ab_0.conda#d18db96ef2a920b0ecefe30282b0aecf +https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda#e2d2abb421c13456a9a9f80272fdf543 https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 +https://conda.anaconda.org/conda-forge/linux-64/psy-maps-1.5.0-py312h7900ff3_1.conda#080bc8f34a9cb0ab81ae0369fd43b7ab +https://conda.anaconda.org/conda-forge/linux-64/psy-reg-1.5.0-py312h7900ff3_1.conda#ea719cfcc2e5b815b137b7082ece8aeb +https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.8.0-pyhd8ed1ab_0.conda#fba377622e74ee0bbeb8ccae9fa593d3 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-17.0.0-py312h01725c0_2_cpu.conda#add603bfa43d9bf3f06783f780e1a817 +https://conda.anaconda.org/conda-forge/noarch/python-cdo-1.6.0-pyhd8ed1ab_0.conda#3fd1a0b063c1fbbe4b7bd5a5a7601e84 https://conda.anaconda.org/conda-forge/linux-64/r-akima-0.6_3.4-r42h61816a4_2.conda#8536251313f441c4d70ff11ad976d294 https://conda.anaconda.org/conda-forge/noarch/r-callr-3.7.6-r42hc72bb7e_0.conda#4fb1765d6dc531936db81af3f6be316a https://conda.anaconda.org/conda-forge/noarch/r-doparallel-1.0.17-r42hc72bb7e_2.conda#1cddfbaade4416f0234670391bb31ba2 -https://conda.anaconda.org/conda-forge/linux-64/r-e1071-1.7_14-r42ha503ecb_0.conda#6e147da5592263573409bce2e9c39b3c https://conda.anaconda.org/conda-forge/noarch/r-gtable-0.3.5-r42hc72bb7e_0.conda#b5cff9c0564c9fcd8b62632430a0cee5 https://conda.anaconda.org/conda-forge/noarch/r-hypergeo-1.2_13-r42hc72bb7e_1004.conda#7a207a992c606168044d13dcffd80ad4 https://conda.anaconda.org/conda-forge/noarch/r-knitr-1.47-r42hc72bb7e_0.conda#0a20a2f6546bc0cde246c53a92a7964d @@ -647,12 +645,16 @@ https://conda.anaconda.org/conda-forge/linux-64/r-lubridate-1.9.3-r42h57805ef_0. https://conda.anaconda.org/conda-forge/linux-64/r-mgcv-1.9_1-r42h316c678_0.conda#5c3d738118f5948f6cc29ccb63d6e2ff https://conda.anaconda.org/conda-forge/noarch/r-r.utils-2.12.3-r42hc72bb7e_0.conda#81f505dec8850e227d9b2a7e88fa505f https://conda.anaconda.org/conda-forge/linux-64/r-reshape-0.8.9-r42hc72bb7e_2.conda#17e75917161bf824248cc54a412b4394 -https://conda.anaconda.org/conda-forge/linux-64/r-s2-1.1.6-r42h5eac2b3_0.conda#c3835d051156c3eacce21caec8061594 https://conda.anaconda.org/conda-forge/noarch/r-scales-1.3.0-r42hc72bb7e_0.conda#0af4021fe6d0047bbf7a34bf21c50bdd https://conda.anaconda.org/conda-forge/linux-64/r-specsverification-0.5_3-r42h7525677_2.tar.bz2#1521b8a303852af0496245e368d3c61c +https://conda.anaconda.org/conda-forge/linux-64/r-splancs-2.01_45-r42hbcb9c34_0.conda#bcd96dc088f54514a54d57e6b8ed51b6 https://conda.anaconda.org/conda-forge/linux-64/r-vctrs-0.6.5-r42ha503ecb_0.conda#5689030c60302fb5bb7a48b54c11dbe8 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-15.0.2-py311h78dcc79_2_cpu.conda#6f20003320c613f2505cf248bfce48f6 -https://conda.anaconda.org/conda-forge/linux-64/r-classint-0.4_10-r42h61816a4_0.conda#668a2f3e36b373878e698b1387bea45b +https://conda.anaconda.org/conda-forge/noarch/xesmf-0.8.7-pyhd8ed1ab_0.conda#42301f78a4c6d2500f891b9723160d5c +https://conda.anaconda.org/conda-forge/noarch/cdsapi-0.7.4-pyhd8ed1ab_0.conda#67a29b663023b8c0e3d8a73013ea3e23 +https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py312h5aa26c2_1.conda#4a30f4277a1894928a7057d0e14c1c95 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-17.0.0-h5888daf_13_cpu.conda#cd2c36e8865b158b82f61c6aac28b7e1 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_1.conda#37cec2cf68f4c09563d8bc833791096b +https://conda.anaconda.org/conda-forge/linux-64/ncl-6.6.2-h7cb714c_54.conda#7363202c15302898deb49e82ca3e5f58 https://conda.anaconda.org/conda-forge/noarch/r-cyclocomp-1.1.1-r42hc72bb7e_0.conda#6bd41a85dc43541400311eca03d4e2d4 https://conda.anaconda.org/conda-forge/noarch/r-gridextra-2.3-r42hc72bb7e_1005.conda#da116b29105a8d48571975a185e9bb94 https://conda.anaconda.org/conda-forge/noarch/r-lmomco-2.5.1-r42hc72bb7e_0.conda#6efbdfe5d41b3ef5652be1ea2e0a6e3c @@ -660,30 +662,32 @@ https://conda.anaconda.org/conda-forge/noarch/r-multiapply-2.1.4-r42hc72bb7e_1.c https://conda.anaconda.org/conda-forge/noarch/r-pillar-1.9.0-r42hc72bb7e_1.conda#07d5ce8e710897745f14c951ff947cdd https://conda.anaconda.org/conda-forge/linux-64/r-purrr-1.0.2-r42h57805ef_0.conda#7985dada48799b7814ca069794d0b1a3 https://conda.anaconda.org/conda-forge/noarch/r-r.cache-0.16.0-r42hc72bb7e_2.conda#34daac4e8faee056f15abdee858fc721 -https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.13-pyhd8ed1ab_0.conda#b77166a6032a2b8e52b3fee90d62ea4d -https://conda.anaconda.org/conda-forge/noarch/pyarrow-hotfix-0.6-pyhd8ed1ab_0.conda#ccc06e6ef2064ae129fab3286299abda +https://conda.anaconda.org/conda-forge/linux-64/rasterio-1.3.11-py312hd177ed6_1.conda#246c5f31c607ecfe1ece1e8cc6ecc9c5 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-17.0.0-hf54134d_13_cpu.conda#46f41533959eee8826c09e55976b8c06 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_1.conda#ab83e3b9ca2b111d8f332e9dc8b2170f https://conda.anaconda.org/conda-forge/noarch/r-climprojdiags-0.3.3-r42hc72bb7e_0.conda#f34d40a3f0f9160fdd2bccaae8e185d1 https://conda.anaconda.org/conda-forge/noarch/r-lintr-3.1.2-r42hc72bb7e_0.conda#ef49cc606b94a9d5f30b9c48f5f68848 -https://conda.anaconda.org/conda-forge/linux-64/r-sf-1.0_14-r42h85a8d9e_1.conda#ad59b523759f3e8acc6fd623cfbfb5a9 https://conda.anaconda.org/conda-forge/linux-64/r-tibble-3.2.1-r42h57805ef_2.conda#b1278a5148c9e52679bb72112770cdc3 -https://conda.anaconda.org/conda-forge/noarch/dask-2024.8.2-pyhd8ed1ab_0.conda#3adbad9b363bd0163ef2ac59f095cc13 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-17.0.0-py312h9cebb41_2.conda#5f7d505626cb057e1320bbd46dd02ef2 https://conda.anaconda.org/conda-forge/noarch/r-ggplot2-3.5.1-r42hc72bb7e_0.conda#77cc0254e0dc92e5e7791ce20a170f74 https://conda.anaconda.org/conda-forge/noarch/r-rematch2-2.1.2-r42hc72bb7e_3.conda#5ccfee6f3b94e6b247c7e1929b24f1cc -https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_0.conda#b30cbc09f81d9dbaf8b74f2c8eacddc5 +https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.17-pyhd8ed1ab_0.conda#4f75a3a76e9f693fc33be59485f46fcf https://conda.anaconda.org/conda-forge/noarch/r-styler-1.10.3-r42hc72bb7e_0.conda#1b2b8fa85a9d0556773abac4763d8ef9 https://conda.anaconda.org/conda-forge/linux-64/r-tlmoments-0.7.5.3-r42ha503ecb_1.conda#6aa1414e06dfffc39d3b5ca78b60b377 https://conda.anaconda.org/conda-forge/noarch/r-viridis-0.6.5-r42hc72bb7e_0.conda#959f69b6dfd4b620a15489975fa27670 -https://conda.anaconda.org/conda-forge/noarch/esmvalcore-2.11.0-pyhd8ed1ab_0.conda#ae2c9a927475f5519d0164c542cde378 +https://conda.anaconda.org/conda-forge/noarch/dask-2024.11.0-pyhd8ed1ab_0.conda#9a25bf7e2a910e85209218896f2adeb9 https://conda.anaconda.org/conda-forge/linux-64/r-fields-15.2-r42h61816a4_0.conda#d84fe2f9e893e92089370b195e2263a0 https://conda.anaconda.org/conda-forge/noarch/r-spei-1.8.1-r42hc72bb7e_1.conda#7fe060235dac0fc0b3d387f98e79d128 -https://conda.anaconda.org/conda-forge/linux-64/r-geomap-2.5_5-r42h57805ef_0.conda#e58ccf961b56e57d7c1e50995005b0bd +https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_1.conda#86286b197e33e3b034416c18ba0f574c +https://conda.anaconda.org/conda-forge/linux-64/r-geomap-2.5_0-r42h57805ef_2.conda#020534c6abdee4f1253c221e926a5341 +https://conda.anaconda.org/conda-forge/noarch/esmvalcore-2.11.0-pyhd8ed1ab_0.conda#ae2c9a927475f5519d0164c542cde378 https://conda.anaconda.org/conda-forge/noarch/r-s2dverification-2.10.3-r42hc72bb7e_2.conda#8079a86a913155fe2589ec0b76dc9f5e -https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.13-pyhd8ed1ab_0.conda#b2f4f2f3923646802215b040e63d042e +https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.14-pyhd8ed1ab_0.conda#351a11ac1215eb4f6c5b82e30070277a https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.5-pyhd8ed1ab_0.conda#b808b8a0494c5cca76200c73e260a060 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.15.4-pyhd8ed1ab_0.conda#c7c50dd5192caa58a05e6a4248a27acb +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 -https://conda.anaconda.org/conda-forge/noarch/sphinx-8.0.2-pyhd8ed1ab_0.conda#625004bdab1b171dfd1e29ebb30c40dd +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e diff --git a/config-user-example.yml b/config-user-example.yml deleted file mode 100644 index c102928db9..0000000000 --- a/config-user-example.yml +++ /dev/null @@ -1,274 +0,0 @@ -############################################################################### -# Example user configuration file for ESMValTool -############################################################################### -# -# Note for users: -# -------------- -# Site-specific entries for different HPC centers are given at the bottom of -# this file. Comment out/replace as needed. This default version of the file -# can be used in combination with the command line argument -# ``search_esgf=when_missing``. If only certain values are allowed for an -# option, these are listed after ``---``. The option in square brackets is the -# default value, i.e., the one that is used if this option is omitted in the -# file. -# -############################################################################### -# -# Note for developers: -# ------------------- -# Two identical copies of this file (``ESMValTool/config-user-example.yml`` and -# ``ESMValCore/esmvalcore/config-user.yml``) exist. If you change one of it, -# make sure to apply the changes to the other. -# -############################################################################### ---- - -# Destination directory where all output will be written -# Includes log files and performance stats. -output_dir: ~/esmvaltool_output - -# Auxiliary data directory -# Used by some recipes to look for additional datasets. -auxiliary_data_dir: ~/auxiliary_data - -# Automatic data download from ESGF --- [never]/when_missing/always -# Use automatic download of missing CMIP3, CMIP5, CMIP6, CORDEX, and obs4MIPs -# data from ESGF. ``never`` disables this feature, which is useful if you are -# working on a computer without an internet connection, or if you have limited -# disk space. ``when_missing`` enables the automatic download for files that -# are not available locally. ``always`` will always check ESGF for the latest -# version of a file, and will only use local files if they correspond to that -# latest version. -search_esgf: never - -# Directory for storing downloaded climate data -# Make sure to use a directory where you can store multiple GBs of data. Your -# home directory on a HPC is usually not suited for this purpose, so please -# change the default value in this case! -download_dir: ~/climate_data - -# Run at most this many tasks in parallel --- [null]/1/2/3/4/... -# Set to ``null`` to use the number of available CPUs. If you run out of -# memory, try setting max_parallel_tasks to ``1`` and check the amount of -# memory you need for that by inspecting the file ``run/resource_usage.txt`` in -# the output directory. Using the number there you can increase the number of -# parallel tasks again to a reasonable number for the amount of memory -# available in your system. -max_parallel_tasks: null - -# Log level of the console --- debug/[info]/warning/error -# For much more information printed to screen set log_level to ``debug``. -log_level: info - -# Exit on warning --- true/[false] -# # Only used in NCL diagnostic scripts. -exit_on_warning: false - -# Plot file format --- [png]/pdf/ps/eps/epsi -output_file_type: png - -# Remove the ``preproc`` directory if the run was successful --- [true]/false -# By default this option is set to ``true``, so all preprocessor output files -# will be removed after a successful run. Set to ``false`` if you need those -# files. -remove_preproc_dir: true - -# Use netCDF compression --- true/[false] -compress_netcdf: false - -# Save intermediary cubes in the preprocessor --- true/[false] -# Setting this to ``true`` will save the output cube from each preprocessing -# step. These files are numbered according to the preprocessing order. -save_intermediary_cubes: false - -# Path to custom ``config-developer.yml`` file -# This can be used to customise project configurations. See -# ``config-developer.yml`` for an example. Set to ``null`` to use the default. -config_developer_file: null - -# Use a profiling tool for the diagnostic run --- [false]/true -# A profiler tells you which functions in your code take most time to run. -# Only available for Python diagnostics. -profile_diagnostic: false - -# Rootpaths to the data from different projects -# This default setting will work if files have been downloaded by ESMValTool -# via ``search_esgf``. Lists are also possible. For site-specific entries and -# more examples, see below. Comment out these when using a site-specific path. -rootpath: - default: ~/climate_data - -# Directory structure for input data --- [default]/ESGF/BADC/DKRZ/ETHZ/etc. -# This default setting will work if files have been downloaded by ESMValTool -# via ``search_esgf``. See ``config-developer.yml`` for definitions. Comment -# out/replace as per needed. -drs: - CMIP3: ESGF - CMIP5: ESGF - CMIP6: ESGF - CORDEX: ESGF - obs4MIPs: ESGF - -# Example rootpaths and directory structure that showcases the different -# projects and also the use of lists -# For site-specific entries, see below. -#rootpath: -# CMIP3: [~/cmip3_inputpath1, ~/cmip3_inputpath2] -# CMIP5: [~/cmip5_inputpath1, ~/cmip5_inputpath2] -# CMIP6: [~/cmip6_inputpath1, ~/cmip6_inputpath2] -# OBS: ~/obs_inputpath -# OBS6: ~/obs6_inputpath -# obs4MIPs: ~/obs4mips_inputpath -# ana4mips: ~/ana4mips_inputpath -# native6: ~/native6_inputpath -# RAWOBS: ~/rawobs_inputpath -# default: ~/default_inputpath -#drs: -# CMIP3: default -# CMIP5: default -# CMIP6: default -# CORDEX: default -# obs4MIPs: default - -# Directory tree created by automatically downloading from ESGF -# Uncomment the lines below to locate data that has been automatically -# downloaded from ESGF (using ``search_esgf``). -#rootpath: -# CMIP3: ~/climate_data -# CMIP5: ~/climate_data -# CMIP6: ~/climate_data -# CORDEX: ~/climate_data -# obs4MIPs: ~/climate_data -#drs: -# CMIP3: ESGF -# CMIP5: ESGF -# CMIP6: ESGF -# CORDEX: ESGF -# obs4MIPs: ESGF - -# Site-specific entries: JASMIN -# Uncomment the lines below to locate data on JASMIN. -#auxiliary_data_dir: /gws/nopw/j04/esmeval/aux_data/AUX -#rootpath: -# CMIP6: /badc/cmip6/data/CMIP6 -# CMIP5: /badc/cmip5/data/cmip5/output1 -# CMIP3: /badc/cmip3_drs/data/cmip3/output -# OBS: /gws/nopw/j04/esmeval/obsdata-v2 -# OBS6: /gws/nopw/j04/esmeval/obsdata-v2 -# obs4MIPs: /gws/nopw/j04/esmeval/obsdata-v2 -# ana4mips: /gws/nopw/j04/esmeval/obsdata-v2 -# CORDEX: /badc/cordex/data/CORDEX/output -#drs: -# CMIP6: BADC -# CMIP5: BADC -# CMIP3: BADC -# CORDEX: BADC -# OBS: default -# OBS6: default -# obs4MIPs: default -# ana4mips: default - -# Site-specific entries: DKRZ-Levante -# For bd0854 members a shared download directory is available -#search_esgf: when_missing -#download_dir: /work/bd0854/DATA/ESMValTool2/download -# Uncomment the lines below to locate data on Levante at DKRZ. -#auxiliary_data_dir: /work/bd0854/DATA/ESMValTool2/AUX -#rootpath: -# CMIP6: /work/bd0854/DATA/ESMValTool2/CMIP6_DKRZ -# CMIP5: /work/bd0854/DATA/ESMValTool2/CMIP5_DKRZ -# CMIP3: /work/bd0854/DATA/ESMValTool2/CMIP3 -# CORDEX: /work/ik1017/C3SCORDEX/data/c3s-cordex/output -# OBS: /work/bd0854/DATA/ESMValTool2/OBS -# OBS6: /work/bd0854/DATA/ESMValTool2/OBS -# obs4MIPs: /work/bd0854/DATA/ESMValTool2/OBS -# ana4mips: /work/bd0854/DATA/ESMValTool2/OBS -# native6: /work/bd0854/DATA/ESMValTool2/RAWOBS -# RAWOBS: /work/bd0854/DATA/ESMValTool2/RAWOBS -#drs: -# CMIP6: DKRZ -# CMIP5: DKRZ -# CMIP3: DKRZ -# CORDEX: BADC -# obs4MIPs: default -# ana4mips: default -# OBS: default -# OBS6: default -# native6: default - -# Site-specific entries: ETHZ -# Uncomment the lines below to locate data at ETHZ. -#rootpath: -# CMIP6: /net/atmos/data/cmip6 -# CMIP5: /net/atmos/data/cmip5 -# CMIP3: /net/atmos/data/cmip3 -# OBS: /net/exo/landclim/PROJECTS/C3S/datadir/obsdir/ -#drs: -# CMIP6: ETHZ -# CMIP5: ETHZ -# CMIP3: ETHZ - -# Site-specific entries: IPSL -# Uncomment the lines below to locate data on Ciclad at IPSL. -#rootpath: -# IPSLCM: / -# CMIP5: /bdd/CMIP5/output -# CMIP6: /bdd/CMIP6 -# CMIP3: /bdd/CMIP3 -# CORDEX: /bdd/CORDEX/output -# obs4MIPs: /bdd/obs4MIPS/obs-CFMIP/observations -# ana4mips: /not_yet -# OBS: /not_yet -# OBS6: /not_yet -# RAWOBS: /not_yet -#drs: -# CMIP6: DKRZ -# CMIP5: DKRZ -# CMIP3: IPSL -# CORDEX: BADC -# obs4MIPs: IPSL -# ana4mips: default -# OBS: not_yet -# OBS6: not_yet - -# Site-specific entries: Met Office -# Uncomment the lines below to locate data at the Met Office. -#rootpath: -# CMIP5: /project/champ/data/cmip5/output1 -# CMIP6: /project/champ/data/CMIP6 -# CORDEX: /project/champ/data/cordex/output -# OBS: /data/users/esmval/ESMValTool/obs -# OBS6: /data/users/esmval/ESMValTool/obs -# obs4MIPs: /data/users/esmval/ESMValTool/obs -# ana4mips: /project/champ/data/ana4MIPs -# native6: /data/users/esmval/ESMValTool/rawobs -# RAWOBS: /data/users/esmval/ESMValTool/rawobs -#drs: -# CMIP5: BADC -# CMIP6: BADC -# CORDEX: BADC -# OBS: default -# OBS6: default -# obs4MIPs: default -# ana4mips: BADC -# native6: default - -# Site-specific entries: NCI -# Uncomment the lines below to locate data at NCI. -#rootpath: -# CMIP6: [/g/data/oi10/replicas/CMIP6, /g/data/fs38/publications/CMIP6, /g/data/xp65/public/apps/esmvaltool/replicas/CMIP6] -# CMIP5: [/g/data/r87/DRSv3/CMIP5, /g/data/al33/replicas/CMIP5/combined, /g/data/rr3/publications/CMIP5/output1, /g/data/xp65/public/apps/esmvaltool/replicas/cmip5/output1] -# CMIP3: /g/data/r87/DRSv3/CMIP3 -# OBS: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# OBS6: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# obs4MIPs: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# ana4mips: /g/data/ct11/access-nri/replicas/esmvaltool/obsdata-v2 -# native6: /g/data/xp65/public/apps/esmvaltool/native6 -# -#drs: -# CMIP6: NCI -# CMIP5: NCI -# CMIP3: NCI -# CORDEX: ESGF -# obs4MIPs: default -# ana4mips: default diff --git a/doc/sphinx/source/community/dataset.rst b/doc/sphinx/source/community/dataset.rst index 424d4d4694..7a24e7c923 100644 --- a/doc/sphinx/source/community/dataset.rst +++ b/doc/sphinx/source/community/dataset.rst @@ -42,14 +42,15 @@ and run the recipe, to make sure the CMOR checks pass without warnings or errors To test a pull request for a new CMORizer script: -#. Download the data following the instructions included in the script and place - it in the ``RAWOBS`` path specified in your ``config-user.yml`` +#. Download the data following the instructions included in the script and + place it in the ``RAWOBS`` ``rootpath`` specified in your + :ref:`configuration ` #. If available, use the downloading script by running ``esmvaltool data download --config_file `` #. Run the cmorization by running ``esmvaltool data format `` #. Copy the resulting data to the ``OBS`` (for CMIP5 compliant data) or ``OBS6`` - (for CMIP6 compliant data) path specified in your - ``config-user.yml`` + (for CMIP6 compliant data) ``rootpath`` specified in your + :ref:`configuration ` #. Run ``recipes/examples/recipe_check_obs.yml`` with the new dataset to check that the data can be used diff --git a/doc/sphinx/source/community/diagnostic.rst b/doc/sphinx/source/community/diagnostic.rst index 285815f7cf..1be820f7b8 100644 --- a/doc/sphinx/source/community/diagnostic.rst +++ b/doc/sphinx/source/community/diagnostic.rst @@ -64,7 +64,7 @@ If it is just a few simple scripts or packaging is not possible (i.e. for NCL) y and paste the source code into the ``esmvaltool/diag_scripts`` directory. If you have existing code in a compiled language like -C, C++, or Fortran that you want to re-use, the recommended way to proceed is to add Python bindings and publish +C, C++, or Fortran that you want to reuse, the recommended way to proceed is to add Python bindings and publish the package on PyPI so it can be installed as a Python dependency. You can then call the functions it provides using a Python diagnostic. @@ -134,9 +134,8 @@ Diagnostic output Typically, diagnostic scripts create plots, but any other output such as e.g. text files or tables is also possible. Figures should be saved in the ``plot_dir``, either in both ``.pdf`` and -``.png`` format (preferred), or -respect the ``output_file_type`` specified in the -:ref:`esmvalcore:user configuration file`. +``.png`` format (preferred), or respect the :ref:`configuration option +` ``output_file_type`` . Data should be saved in the ``work_dir``, preferably as a ``.nc`` (`NetCDF `__) file, following the `CF-Conventions `__ as much as possible. @@ -181,7 +180,7 @@ human inspection. In addition to provenance information, a caption is also added to the plots. Provenance information from the recipe is automatically recorded by ESMValCore, whereas -diagnostic scripts must include code specifically to record provenance. See below for +diagnostic scripts must include code specifically to record provenance. See below for documentation of provenance attributes that can be included in a recipe. When contributing a diagnostic, please make sure it records the provenance, and that no warnings related to provenance are generated when running the recipe. @@ -252,7 +251,7 @@ for example plot_types: errorbar: error bar plot -To use these items, include them in the provenance record dictionary in the form +To use these items, include them in the provenance record dictionary in the form :code:`key: [value]` i.e. for the example above as :code:`'plot_types': ['errorbar']`. @@ -275,8 +274,8 @@ Always use :func:`esmvaltool.diag_scripts.shared.run_diagnostic` at the end of y with run_diagnostic() as config: main(config) -Create a ``provenance_record`` for each diagnostic file (i.e. image or data -file) that the diagnostic script outputs. The ``provenance_record`` is a +Create a ``provenance_record`` for each diagnostic file (i.e. image or data +file) that the diagnostic script outputs. The ``provenance_record`` is a dictionary of provenance items, for example: .. code-block:: python @@ -296,15 +295,15 @@ dictionary of provenance items, for example: 'statistics': ['mean'], } -To save a matplotlib figure, use the convenience function -:func:`esmvaltool.diag_scripts.shared.save_figure`. Similarly, to save Iris cubes use +To save a matplotlib figure, use the convenience function +:func:`esmvaltool.diag_scripts.shared.save_figure`. Similarly, to save Iris cubes use :func:`esmvaltool.diag_scripts.shared.save_data`. Both of these functions take ``provenance_record`` as an argument and log the provenance accordingly. Have a look at the example Python diagnostic in `esmvaltool/diag_scripts/examples/diagnostic.py `_ for a complete example. -For any other files created, you will need to make use of a +For any other files created, you will need to make use of a :class:`esmvaltool.diag_scripts.shared.ProvenanceLogger` to log provenance. Include the following code directly after the file is saved: @@ -489,7 +488,7 @@ This includes the following items: * In-code documentation (comments, docstrings) * Code quality (e.g. no hardcoded pathnames) * No Codacy errors reported -* Re-use of existing functions whenever possible +* Reuse of existing functions whenever possible * Provenance implemented Run recipe diff --git a/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst b/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst index a73643f454..d0d7f74672 100644 --- a/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst +++ b/doc/sphinx/source/community/release_strategy/detailed_release_procedure.rst @@ -49,7 +49,7 @@ and attach it in the release testing issue; to record the environment in a yaml Modifications to configuration files need to be documented as well. To test recipes, it is recommended to only use the default options and DKRZ data directories, simply by uncommenting -the DKRZ-Levante block of a newly generated ``config-user.yml`` file. +the DKRZ-Levante block of a :ref:`newly generated configuration file `. Submit run scripts - test recipe runs ------------------------------------- @@ -61,7 +61,7 @@ You will have to set the name of your environment, your email address (if you wa More information on running jobs with SLURM on DKRZ/Levante can be found in the DKRZ `documentation `_. -You can also specify the path to your ``config-user.yml`` file where ``max_parallel_tasks`` can be set. The script was found to work well with ``max_parallel_tasks=8``. Some recipes need to be run with ``max_parallel_tasks=1`` (large memory requirements, CMIP3 data, diagnostic issues, ...). These recipes are listed in `ONE_TASK_RECIPES`. +You can also specify the path to your configuration directory where ``max_parallel_tasks`` can be set in a YAML file. The script was found to work well with ``max_parallel_tasks=8``. Some recipes need to be run with ``max_parallel_tasks=1`` (large memory requirements, CMIP3 data, diagnostic issues, ...). These recipes are listed in `ONE_TASK_RECIPES`. Some recipes need other job requirements, you can add their headers in the `SPECIAL_RECIPES` dictionary. Otherwise the header will be written following the template that is written in the lines below. If you want to exclude recipes, you can do so by uncommenting the `exclude` lines. diff --git a/doc/sphinx/source/community/release_strategy/release_strategy.rst b/doc/sphinx/source/community/release_strategy/release_strategy.rst index b95bab67b1..72c55266dd 100644 --- a/doc/sphinx/source/community/release_strategy/release_strategy.rst +++ b/doc/sphinx/source/community/release_strategy/release_strategy.rst @@ -53,7 +53,20 @@ With the following release schedule, we strive to have three releases per year a Upcoming releases ^^^^^^^^^^^^^^^^^ -- 2.12.0 (TBD) +- 2.12.0 (Release Manager: `Saskia Loosveldt Tomas`_) + ++------------+------------+----------------------------------------+-------------------------------------+ +| Planned | Done | Event | Changelog | ++============+============+========================================+=====================================+ +| 2025-01-13 | | ESMValCore `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-01-20 | | ESMValCore Release 2.12.0 | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-01-27 | | ESMValTool `Feature Freeze`_ | | ++------------+------------+----------------------------------------+-------------------------------------+ +| 2025-02-03 | | ESMValTool Release 2.12.0 | | ++------------+------------+----------------------------------------+-------------------------------------+ + Past releases ^^^^^^^^^^^^^ diff --git a/doc/sphinx/source/community/upgrading.rst b/doc/sphinx/source/community/upgrading.rst index 9ed7f8b5b1..9a9b37f178 100644 --- a/doc/sphinx/source/community/upgrading.rst +++ b/doc/sphinx/source/community/upgrading.rst @@ -145,7 +145,7 @@ Many operations previously performed by the diagnostic scripts, are now included The backend operations are fully controlled by the ``preprocessors`` section in the recipe. Here, a number of preprocessor sets can be defined, with different options for each of the operations. The sets defined in this section are applied in the ``diagnostics`` section to preprocess a given variable. -It is recommended to proceed step by step, porting and testing each operation separately before proceeding with the next one. A useful setting in the user configuration file (``config-private.yml``) called ``write_intermediary_cube`` allows writing out the variable field after each preprocessing step, thus facilitating the comparison with the old version (e.g., after CMORization, level selection, after regridding, etc.). The CMORization step of the new backend exactly corresponds to the operation performed by the old backend (and stored in the ``climo`` directory, now called ``preprec``): this is the very first step to be checked, by simply comparing the intermediary file produced by the new backend after CMORization with the output of the old backend in the ``climo`` directorsy (see "Testing" below for instructions). +It is recommended to proceed step by step, porting and testing each operation separately before proceeding with the next one. A useful setting in the configuration called ``write_intermediary_cube`` allows writing out the variable field after each preprocessing step, thus facilitating the comparison with the old version (e.g., after CMORization, level selection, after regridding, etc.). The CMORization step of the new backend exactly corresponds to the operation performed by the old backend (and stored in the ``climo`` directory, now called ``preprec``): this is the very first step to be checked, by simply comparing the intermediary file produced by the new backend after CMORization with the output of the old backend in the ``climo`` directorsy (see "Testing" below for instructions). The new backend also performs variable derivation, replacing the ``calculate`` function in the ``variable_defs`` scripts. If the recipe which is being ported makes use of derived variables, the corresponding calculation must be ported from the ``./variable_defs/.ncl`` file to ``./esmvaltool/preprocessor/_derive.py``. @@ -159,7 +159,7 @@ In the new version, all settings are centralized in the recipe, completely repla Make sure the diagnostic script writes NetCDF output ====================================================== -Each diagnostic script is required to write the output of the anaylsis in one or more NetCDF files. This is to give the user the possibility to further look into the results, besides the plots, but (most importantly) for tagging purposes when publishing the data in a report and/or on a website. +Each diagnostic script is required to write the output of the analysis in one or more NetCDF files. This is to give the user the possibility to further look into the results, besides the plots, but (most importantly) for tagging purposes when publishing the data in a report and/or on a website. For each of the plot produced by the diagnostic script a single NetCDF file has to be generated. The variable saved in this file should also contain all the necessary metadata that documents the plot (dataset names, units, statistical methods, etc.). The files have to be saved in the work directory (defined in `cfg['work_dir']` and `config_user_info@work_dir`, for the python and NCL diagnostics, respectively). @@ -209,7 +209,7 @@ Before submitting a pull request, the code should be cleaned to adhere to the co Update the documentation ======================== -If necessary, add or update the documentation for your recipes in the corrsponding rst file, which is now in ``doc\sphinx\source\recipes``. Do not forget to also add the documentation file to the list in ``doc\sphinx\source\annex_c`` to make sure it actually appears in the documentation. +If necessary, add or update the documentation for your recipes in the corresponding rst file, which is now in ``doc\sphinx\source\recipes``. Do not forget to also add the documentation file to the list in ``doc\sphinx\source\annex_c`` to make sure it actually appears in the documentation. Open a pull request =================== diff --git a/doc/sphinx/source/conf.py b/doc/sphinx/source/conf.py index 1af560b576..de7feb4775 100644 --- a/doc/sphinx/source/conf.py +++ b/doc/sphinx/source/conf.py @@ -168,8 +168,13 @@ # `conf.py` file.Be aware that `navigation_with_keys = True` has negative # accessibility implications: # https://github.com/pydata/pydata-sphinx-theme/issues/1492" -html_theme_options = {"navigation_with_keys": False} - +html_theme_options = { + "navigation_with_keys": False, + "logo": { + "image_light": "figures/ESMValTool-logo-2.png", + "image_dark": "figures/ESMValTool-logo-2-dark.png", + }, +} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] @@ -192,7 +197,7 @@ # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = [] +html_static_path = ["figures/ESMValTool-logo-2-dark.png"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied diff --git a/doc/sphinx/source/develop/dataset.rst b/doc/sphinx/source/develop/dataset.rst index f3c168a17c..f624a44feb 100644 --- a/doc/sphinx/source/develop/dataset.rst +++ b/doc/sphinx/source/develop/dataset.rst @@ -76,7 +76,7 @@ for downloading (e.g. providing contact information, licence agreements) and using the observations. The unformatted (raw) observations should then be stored in the appropriate of these three folders. -For each additional dataset, an entry needs to be made to the file +For each additional dataset, an entry needs to be made to the file `datasets.yml `_. The dataset entry should contain: @@ -92,10 +92,10 @@ of the cmorizing script (see Section `4. Create a cmorizer for the dataset`_). 3.1 Downloader script (optional) -------------------------------- -A Python script can be written to download raw observations +A Python script can be written to download raw observations from source and store the data in the appropriate tier subdirectory of the folder ``RAWOBS`` automatically. -There are many downloading scripts available in +There are many downloading scripts available in `/esmvaltool/cmorizers/data/downloaders/datasets/ `_ where several data download mechanisms are provided: @@ -108,18 +108,18 @@ Note that the name of this downloading script has to be identical to the name of the dataset. Depending on the source server, the downloading script needs to contain paths to -raw observations, filename patterns and various necessary fields to retrieve +raw observations, filename patterns and various necessary fields to retrieve the data. -Default ``start_date`` and ``end_date`` can be provided in cases where raw data +Default ``start_date`` and ``end_date`` can be provided in cases where raw data are stored in daily, monthly, and yearly files. The downloading script for the given dataset can be run with: .. code-block:: console - esmvaltool data download --config_file + esmvaltool data download --config_dir -The options ``--start`` and ``--end`` can be added to the command above to +The options ``--start`` and ``--end`` can be added to the command above to restrict the download of raw data to a time range. They will be ignored if a specific dataset does not support it (i.e. because it is provided as a single file). Valid formats are ``YYYY``, ``YYYYMM`` and ``YYYYMMDD``. By default, already downloaded data are not overwritten @@ -128,7 +128,7 @@ unless the option ``--overwrite=True`` is used. 4. Create a cmorizer for the dataset ==================================== -There are many cmorizing scripts available in +There are many cmorizing scripts available in `/esmvaltool/cmorizers/data/formatters/datasets/ `_ where solutions to many kinds of format issues with observational data are @@ -158,7 +158,7 @@ configuration file: `MTE.yml `_ in the directory ``ESMValTool/esmvaltool/cmorizers/data/cmor_config/``. Note that both the name of this configuration file and the cmorizing script have to be -identical to the name of your dataset. +identical to the name of your dataset. It is recommended that you set ``project`` to ``OBS6`` in the configuration file. That way, the variables defined in the CMIP6 CMOR table, augmented with the custom variables described above, are available to your script. @@ -188,7 +188,8 @@ The main body of the CMORizer script must contain a function called with this exact call signature. Here, ``in_dir`` corresponds to the input directory of the raw files, ``out_dir`` to the output directory of final reformatted data set, ``cfg`` to the dataset-specific configuration file, -``cfg_user`` to the user configuration file, ``start_date`` to the start +``cfg_user`` to the configuration object (which behaves basically like a +dictionary), ``start_date`` to the start of the period to format, and ``end_date`` to the end of the period to format. If not needed, the last three arguments can be ignored using underscores. The return value of this function is ignored. All @@ -256,9 +257,9 @@ The cmorizing script for the given dataset can be run with: .. code-block:: console - esmvaltool data format --config_file + esmvaltool data format --config_dir -The options ``--start`` and ``--end`` can be added to the command above to +The options ``--start`` and ``--end`` can be added to the command above to restrict the formatting of raw data to a time range. They will be ignored if a specific dataset does not support it (i.e. because it is provided as a single file). Valid formats are ``YYYY``, ``YYYYMM`` and ``YYYYMMDD``. @@ -267,12 +268,12 @@ does not support it (i.e. because it is provided as a single file). Valid format The output path given in the configuration file is the path where your cmorized dataset will be stored. The ESMValTool will create a folder - with the correct tier information + with the correct tier information (see Section `2. Edit your configuration file`_) if that tier folder is not - already available, and then a folder named after the dataset. + already available, and then a folder named after the dataset. In this folder the cmorized data set will be stored as a NetCDF file. The cmorized dataset will be automatically moved to the correct tier - subfolder of your OBS or OBS6 directory if the option + subfolder of your OBS or OBS6 directory if the option ``--install=True`` is used in the command above and no such directory was already created. @@ -284,9 +285,9 @@ the cmorizing scripts can be run in a single command with: .. code-block:: console - esmvaltool data prepare --config_file + esmvaltool data prepare --config_dir -Note that options from the ```esmvaltool data download`` and +Note that options from the ```esmvaltool data download`` and ``esmvaltool data format`` commands can be passed to the above command. 6. Naming convention of the observational data files diff --git a/doc/sphinx/source/faq.rst b/doc/sphinx/source/faq.rst index 10c72bd2cb..43251a801b 100644 --- a/doc/sphinx/source/faq.rst +++ b/doc/sphinx/source/faq.rst @@ -59,12 +59,17 @@ This is a useful functionality because it allows the user to `fix` things on-the quitting the Ipython console, code execution continues as per normal. -Use multiple config-user.yml files -================================== +Using multiple configuration directories +======================================== + +By default, ESMValTool will read YAML configuration files from the user +configuration directory ``~/.config/esmvaltool``, which can be changed with the +``ESMVALTOOL_CONFIG_DIR`` environment variable. +If required, users can specify the command line option ``--config_dir`` to +select another configuration directory, which is read **in addition** to the +user configuration directory +See the section on configuration :ref:`config_yaml_files` for details on this. -The user selects the configuration yaml file at run time. It's possible to -have several configurations files. For instance, it may be practical to have one -config file for debugging runs and another for production runs. Create a symbolic link to the latest output directory ===================================================== diff --git a/doc/sphinx/source/figures/ESMValTool-logo-2-dark.png b/doc/sphinx/source/figures/ESMValTool-logo-2-dark.png new file mode 100644 index 0000000000..e120b2e731 Binary files /dev/null and b/doc/sphinx/source/figures/ESMValTool-logo-2-dark.png differ diff --git a/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png b/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png new file mode 100644 index 0000000000..14aef201ee Binary files /dev/null and b/doc/sphinx/source/figures/ESMValTool-logo-2-glow.png differ diff --git a/doc/sphinx/source/figures/ESMValTool-logo-2.png b/doc/sphinx/source/figures/ESMValTool-logo-2.png index e876219038..aaaa3578a5 100644 Binary files a/doc/sphinx/source/figures/ESMValTool-logo-2.png and b/doc/sphinx/source/figures/ESMValTool-logo-2.png differ diff --git a/doc/sphinx/source/functionalities.rst b/doc/sphinx/source/functionalities.rst index 5b49c118a2..0098d95ded 100644 --- a/doc/sphinx/source/functionalities.rst +++ b/doc/sphinx/source/functionalities.rst @@ -12,9 +12,9 @@ that it can: - execute the workflow; and - output the desired collective data and media. -To facilitate these four steps, the user has control over the tool via -two main input files: the :ref:`user configuration file ` -and the :ref:`recipe `. The configuration file sets +To facilitate these four steps, the user has control over the tool via the +:ref:`configuration ` and the :ref:`recipe +`. The configuration sets user and site-specific parameters (like input and output paths, desired output graphical formats, logging level, etc.), whereas the recipe file sets data, preprocessing and diagnostic-specific parameters (data @@ -27,7 +27,7 @@ recyclable; the recipe file can be used for a large number of applications, since it may include as many datasets, preprocessors and diagnostics sections as the user deems useful. -Once the user configuration files and the recipe are at hand, the user +Once the configuration files and the recipe are at hand, the user can start the tool. A schematic overview of the ESMValTool workflow is depicted in the figure below. diff --git a/doc/sphinx/source/input.rst b/doc/sphinx/source/input.rst index 798b2ceb27..fbc16b45ec 100644 --- a/doc/sphinx/source/input.rst +++ b/doc/sphinx/source/input.rst @@ -76,7 +76,7 @@ For example, run to run the default example recipe and automatically download the required data to the directory ``~/climate_data``. -The data only needs to be downloaded once, every following run will re-use +The data only needs to be downloaded once, every following run will reuse previously downloaded data stored in this directory. See :ref:`esmvalcore:config-esgf` for a more in depth explanation and the available configuration options. @@ -117,7 +117,7 @@ OBS and OBS6 data is stored in the `esmeval` Group Workspace (GWS), and to be gr GWS, one must apply at https://accounts.jasmin.ac.uk/services/group_workspaces/esmeval/ ; after permission has been granted, the user is encouraged to use the data locally, and not move it elsewhere, to minimize both data transfers and stale disk usage; to note that Tier 3 data is subject to data protection restrictions; for further inquiries, -the GWS is adminstered by [Valeriu Predoi](mailto:valeriu.predoi@ncas.ac.uk). +the GWS is administered by [Valeriu Predoi](mailto:valeriu.predoi@ncas.ac.uk). Using a CMORizer script ----------------------- @@ -193,8 +193,8 @@ To CMORize one or more datasets, run: esmvaltool data format --config_file [CONFIG_FILE] [DATASET_LIST] -The path to the raw data to be CMORized must be specified in the :ref:`user -configuration file` as RAWOBS. +The ``rootpath`` to the raw data to be CMORized must be specified in the +:ref:`configuration ` as ``RAWOBS``. Within this path, the data are expected to be organized in subdirectories corresponding to the data tier: Tier2 for freely-available datasets (other than obs4MIPs and ana4mips) and Tier3 for restricted datasets (i.e., dataset which @@ -269,6 +269,8 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CLOUDSAT-L2 | clw, clivi, clwvi, lwp (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| CMAP | pr (Amon) | 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CowtanWay | tasa (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | CRU | tas, tasmin, tasmax, pr, clt (Amon), evspsblpot (Emon) | 2 | Python | @@ -298,7 +300,17 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | ESACCI-FIRE | burntArea (Lmon) | 2 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| ESACCI-LANDCOVER | baresoilFrac, cropFrac, grassFrac, shrubFrac, treeFrac (Lmon) | 2 | NCL | +| ESACCI-LANDCOVER v1.6.1 | baresoilFrac, cropFrac, grassFrac, shrubFrac, treeFrac (Lmon) | 2 | NCL | +| | | | (CMORizer | +| | | | available until | +| | | | ESMValTool | +| | | | v2.11.0) | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| ESACCI-LANDCOVER v2.0.8 | baresoilFrac, cropFrac, grassFrac, shrubFrac, treeFrac (Lmon, frequency=yr) | 2 | Python | +| | | | (CMORizer | +| | | | available since | +| | | | ESMValTool | +| | | | v2.12.0) | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | ESACCI-LST | ts (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -356,6 +368,8 @@ A list of the datasets for which a CMORizers is available is provided in the fol +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | JRA-25 | clt, hus, prw, rlut, rlutcs, rsut, rsutcs (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ +| JRA-55 | cli, clivi, clw, clwvi, clt, prw, rlus, rlut, rlutcs, rsus, rsuscs, rsut, rsutcs, ta, tas, wap (Amon)| 2 | Python | ++------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | Kadow2020 | tasa (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | LAI3g | lai (Lmon) | 3 | Python | @@ -392,13 +406,13 @@ A list of the datasets for which a CMORizers is available is provided in the fol | | tasmax, tasmin, ts, ua, va, wap, zg (Amon) | | | | | pr, rlut, ua, va (day) | | | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NCEP-DOE-R2 | clt, hur, prw, ta, wap (Amon) | 2 | Python | +| NCEP-DOE-R2 | clt, hur, prw, ta, wap, pr, tauu, tauv, tos (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NDP | cVeg (Lmon) | 3 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NIWA-BS | toz, tozStderr (Amon) | 3 | NCL | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ -| NOAA-CIRES-20CR-V2 | clt, clwvi, hus, prw, rlut, rsut (Amon) | 2 | Python | +| NOAA-CIRES-20CR-V2 | clt, clwvi, hus, prw, rlut, rsut, pr, tauu, tauv (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ | NOAA-CIRES-20CR-V3 | clt, clwvi, hus, prw, rlut, rlutcs, rsut, rsutcs (Amon) | 2 | Python | +------------------------------+------------------------------------------------------------------------------------------------------+------+-----------------+ @@ -480,8 +494,8 @@ A list of all currently supported native datasets is :ref:`provided here A detailed description of how to include new native datasets is given :ref:`here `. -To use this functionality, users need to provide a path in the -:ref:`esmvalcore:user configuration file` for the ``native6`` project data +To use this functionality, users need to provide a ``rootpath`` in the +:ref:`configuration ` for the ``native6`` project data and/or the dedicated project used for the native dataset, e.g., ``ICON``. Then, in the recipe, they can refer to those projects. For example: diff --git a/doc/sphinx/source/quickstart/configuration.rst b/doc/sphinx/source/quickstart/configuration.rst index 34c29aac5c..9cea6413b6 100644 --- a/doc/sphinx/source/quickstart/configuration.rst +++ b/doc/sphinx/source/quickstart/configuration.rst @@ -1,4 +1,4 @@ -.. _config-user: +.. _config: ************* Configuration @@ -7,22 +7,23 @@ Configuration The ``esmvaltool`` command is provided by the ESMValCore package, the documentation on configuring ESMValCore can be found :ref:`here `. -In particular, it is recommended to read the section on the -:ref:`User configuration file ` -and the section on +An overview of all configuration options can be found +:ref:`here `. +In particular, it is recommended to read the section on how to :ref:`specify +configuration options ` and the section on :ref:`Finding data `. -To install the default configuration file in the default location, run +To install the default configuration in the default location, run .. code:: bash esmvaltool config get_config_user -Note that this file needs to be customized using the instructions above, so +Note that this needs to be customized using the instructions above, so the ``esmvaltool`` command can find the data on your system, before it can run a recipe. There is a lesson available in the `ESMValTool tutorial `_ -that describes how to personalize the configuration file. It can be found +that describes how to personalize the configuration. It can be found `at this site `_. diff --git a/doc/sphinx/source/quickstart/installation.rst b/doc/sphinx/source/quickstart/installation.rst index 4fb75b2f4f..9f66c1f670 100644 --- a/doc/sphinx/source/quickstart/installation.rst +++ b/doc/sphinx/source/quickstart/installation.rst @@ -72,15 +72,15 @@ https://mamba.readthedocs.io/en/latest/installation.html. installation. First download the installation file for -`Linux `_ +`Linux `_ or -`MacOSX `_. +`MacOSX `_. After downloading the installation file from one of the links above, execute it by running (Linux example): .. code-block:: bash - bash Mambaforge-Linux-x86_64.sh + bash Miniforge3-Linux-x86_64.sh and follow the instructions on your screen. @@ -99,7 +99,7 @@ later by running: source /etc/profile.d/conda.sh where ```` is the installation location of mamba (e.g. -``/home/$USER/mambaforge`` if you chose the default installation path). +``/home/$USER/miniforge3`` if you chose the default installation path). If you use another shell than Bash, have a look at the available configurations in the ``/etc/profile.d`` directory. @@ -111,7 +111,7 @@ You can check that mamba installed correctly by running which mamba this should show the path to your mamba executable, e.g. -``~/mambaforge/bin/mamba``. +``~/miniforge3/bin/mamba``. It is recommended to update both mamba and conda after installing: @@ -489,7 +489,7 @@ To check that the installation was successful, run this should show the directory of the source code that you just downloaded. If the command above shows a directory inside your conda environment instead, -e.g. ``~/mambaforge/envs/esmvaltool/lib/python3.11/site-packages/esmvalcore``, +e.g. ``~/miniforge3/envs/esmvaltool/lib/python3.11/site-packages/esmvalcore``, you may need to manually remove that directory and run ``pip install --editable '.[develop]'`` again. diff --git a/doc/sphinx/source/quickstart/output.rst b/doc/sphinx/source/quickstart/output.rst index 4a33e8ca42..33836f1c9a 100644 --- a/doc/sphinx/source/quickstart/output.rst +++ b/doc/sphinx/source/quickstart/output.rst @@ -5,8 +5,9 @@ Output ****** ESMValTool automatically generates a new output directory with every run. The -location is determined by the output_dir option in the config-user.yml file, -the recipe name, and the date and time, using the the format: YYYYMMDD_HHMMSS. +location is determined by the :ref:`configuration option +` ``output_dir``, the recipe name, and the date and +time, using the the format: YYYYMMDD_HHMMSS. For instance, a typical output location would be: output_directory/recipe_ocean_amoc_20190118_1027/ @@ -33,13 +34,15 @@ The preprocessed datasets will be stored to the preproc/ directory. Each variable in each diagnostic will have its own the `metadata.yml`_ interface files saved in the preproc directory. -If the option ``save_intermediary_cubes`` is set to ``true`` in the -config-user.yml file, then the intermediary cubes will also be saved here. -This option is set to false in the default ``config-user.yml`` file. +If the :ref:`configuration option ` +``save_intermediary_cubes`` is set to ``true`` , then the intermediary cubes +will also be saved here. +This option is set to ``false`` by default. -If the option ``remove_preproc_dir`` is set to ``true`` in the config-user.yml -file, then the preproc directory will be deleted after the run completes. This -option is set to true in the default ``config-user.yml`` file. +If the :ref:`configuration option ` +``remove_preproc_dir`` is set to ``true`` , then the preproc directory will be +deleted after the run completes. +This option is set to ``true`` by default. Run @@ -70,8 +73,8 @@ Plots ===== The plots directory is where diagnostics save their output figures. These -plots are saved in the format requested by the option `output_file_type` in the -config-user.yml file. +plots are saved in the format requested by the :ref:`configuration option +` ``output_file_type``. Settings.yml @@ -81,10 +84,10 @@ The settings.yml file is automatically generated by ESMValCore. For each diagnos a unique settings.yml file will be produced. The settings.yml file passes several global level keys to diagnostic scripts. -This includes several flags from the config-user.yml file (such as -'write_netcdf', 'write_plots', etc...), several paths which are specific to the -diagnostic being run (such as 'plot_dir' and 'run_dir') and the location on -disk of the metadata.yml file (described below). +This includes several flags from the configuration (such as +``write_netcdf``, ``write_plots``, etc...), several paths which are specific to +the diagnostic being run (such as ``plot_dir`` and ``run_dir``) and the +location on disk of the metadata.yml file (described below). .. code-block:: yaml @@ -147,5 +150,5 @@ As you can see, this is effectively a dictionary with several items including data paths, metadata and other information. There are several tools available in python which are built to read and parse -these files. The tools are avaialbe in the shared directory in the diagnostics +these files. The tools are available in the shared directory in the diagnostics directory. diff --git a/doc/sphinx/source/quickstart/running.rst b/doc/sphinx/source/quickstart/running.rst index 7f9cadbaa1..20cb8620b0 100644 --- a/doc/sphinx/source/quickstart/running.rst +++ b/doc/sphinx/source/quickstart/running.rst @@ -39,20 +39,20 @@ from ESGF to the local directory ``~/climate_data``, run The ``--search_esgf=when_missing`` option tells ESMValTool to search for and download the necessary climate data files, if they cannot be found locally. -The data only needs to be downloaded once, every following run will re-use +The data only needs to be downloaded once, every following run will reuse previously downloaded data. If you have all required data available locally, you can run the tool with ``--search_esgf=never`` argument (the default). Note that in that case the required data should be located in the directories -specified in your user configuration file. +specified in the configuration (see :ref:`esmvalcore:config_option_rootpath`). A third option ``--search_esgf=always`` is available. With this option, the tool will first check the ESGF for the needed data, regardless of any local data availability; if the data found on ESGF is newer than the local data (if any) or the user specifies a version of the data that is available only from the ESGF, then that data will be downloaded; otherwise, local data will be used. -Recall that the chapter :ref:`Configuring ESMValTool ` -provides an explanation of how to create your own config-user.yml file. +Recall that the chapter on :ref:`configuring ESMValTool ` +provides an explanation of how to set up the configuration. See :ref:`running esmvaltool ` in the ESMValCore documentation for a more complete introduction to the ``esmvaltool`` command. diff --git a/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst b/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst index dc26a745e2..b551bbbdc5 100644 --- a/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst +++ b/doc/sphinx/source/recipes/recipe_carvalhais14nat.rst @@ -73,7 +73,7 @@ The settings needed for loading the observational dataset in all diagnostics are provided in the recipe through `obs_info` within `obs_details` section. * ``obs_data_subdir``: subdirectory of auxiliary_data_dir (set in - config-user file) where observation data are stored {e.g., + configuration) where observation data are stored {e.g., data_ESMValTool_Carvalhais2014}. * ``source_label``: source data label {'Carvalhais2014'}. * ``variant_label``: variant of the observation {'BE'} for best estimate. @@ -112,7 +112,7 @@ Script land_carbon_cycle/diag_global_turnover.py * ``y0``: {``float``, 1.0} Y - coordinate of the upper edge of the figure. * ``wp``: {``float``, 1 / number of models} - width of each map. * ``hp``: {``float``, = wp} - height of each map. - * ``xsp``: {``float``, 0} - spacing betweeen maps in X - direction. + * ``xsp``: {``float``, 0} - spacing between maps in X - direction. * ``ysp``: {``float``, -0.03} - spacing between maps in Y -direction. Negative to reduce the spacing below default. * ``aspect_map``: {``float``, 0.5} - aspect of the maps. @@ -217,10 +217,10 @@ Due to inherent dependence of the diagnostic on uncertainty estimates in observation, the data needed for each diagnostic script are processed at different spatial resolutions (as in Carvalhais et al., 2014), and provided in 11 different resolutions (see Table 1). Note that the uncertainties were -estimated at the resolution of the selected models, and, thus, only the -pre-processed observed data can be used with the recipe. -It is not possible to use regridding functionalities of ESMValTool to regrid -the observational data to other spatial resolutions, as the uncertainty +estimated at the resolution of the selected models, and, thus, only the +pre-processed observed data can be used with the recipe. +It is not possible to use regridding functionalities of ESMValTool to regrid +the observational data to other spatial resolutions, as the uncertainty estimates cannot be regridded. Table 1. A summary of the observation datasets at different resolutions. @@ -309,7 +309,7 @@ Example plots Comparison of latitudinal (zonal) variations of pearson correlation between turnover time and climate: turnover time and precipitation, controlled for - temperature (left) and vice-versa (right). Reproduces figures 2c and 2d in + temperature (left) and vice-versa (right). Reproduces figures 2c and 2d in `Carvalhais et al. (2014)`_. .. _fig_carvalhais14nat_2: @@ -320,7 +320,7 @@ Example plots Comparison of observation-based and modelled ecosystem carbon turnover time. Along the diagnonal, tau_ctotal are plotted, above the bias, and below - density plots. The inset text in density plots indicate the correlation. + density plots. The inset text in density plots indicate the correlation. .. _fig_carvalhais14nat_3: @@ -328,11 +328,11 @@ Example plots :align: center :width: 80% - Global distributions of multimodel bias and model agreement. Multimodel bias - is calculated as the ratio of multimodel median turnover time and that from - observation. Stippling indicates the regions where only less than one - quarter of the models fall within the range of observational uncertainties - (`5^{th}` and `95^{th}` percentiles). Reproduces figure 3 in `Carvalhais et + Global distributions of multimodel bias and model agreement. Multimodel bias + is calculated as the ratio of multimodel median turnover time and that from + observation. Stippling indicates the regions where only less than one + quarter of the models fall within the range of observational uncertainties + (`5^{th}` and `95^{th}` percentiles). Reproduces figure 3 in `Carvalhais et al. (2014)`_. .. _fig_carvalhais14nat_4: @@ -341,7 +341,7 @@ Example plots :align: center :width: 80% - Comparison of latitudinal (zonal) variations of observation-based and - modelled ecosystem carbon turnover time. The zonal turnover time is - calculated as the ratio of zonal `ctotal` and `gpp`. Reproduces figures 2a + Comparison of latitudinal (zonal) variations of observation-based and + modelled ecosystem carbon turnover time. The zonal turnover time is + calculated as the ratio of zonal `ctotal` and `gpp`. Reproduces figures 2a and 2b in `Carvalhais et al. (2014)`_. diff --git a/doc/sphinx/source/recipes/recipe_climwip.rst b/doc/sphinx/source/recipes/recipe_climwip.rst index 0928ba939f..900698b85a 100644 --- a/doc/sphinx/source/recipes/recipe_climwip.rst +++ b/doc/sphinx/source/recipes/recipe_climwip.rst @@ -43,9 +43,9 @@ Using shapefiles for cutting scientific regions To use shapefiles for selecting SREX or AR6 regions by name it is necessary to download them, e.g., from the sources below and reference the file using the `shapefile` parameter. This can either be a -absolute or a relative path. In the example recipes they are stored in a subfolder `shapefiles` -in the `auxiliary_data_dir` (with is specified in the -`config-user.yml `_). +absolute or a relative path. In the example recipes they are stored in a subfolder `shapefiles` +in the :ref:`configuration option ` +``auxiliary_data_dir``. SREX regions (AR5 reference regions): http://www.ipcc-data.org/guidelines/pages/ar5_regions.html @@ -249,7 +249,7 @@ Brunner et al. (2020) recipe and example independence weighting The recipe uses an additional step between pre-processor and weight calculation to calculate anomalies relative to the global mean (e.g., tas_ANOM = tas_CLIM - global_mean(tas_CLIM)). This means we do not use the absolute temperatures of a model as performance criterion but rather the horizontal temperature distribution (see `Brunner et al. 2020 `_ for a discussion). -This recipe also implements a somewhat general independence weighting for CMIP6. In contrast to model performance (which should be case specific) model independence can largely be seen as only dependet on the multi-model ensemble in use but not the target variable or region. This means that the configuration used should be valid for similar subsets of CMIP6 as used in this recipe: +This recipe also implements a somewhat general independence weighting for CMIP6. In contrast to model performance (which should be case specific) model independence can largely be seen as only dependent on the multi-model ensemble in use but not the target variable or region. This means that the configuration used should be valid for similar subsets of CMIP6 as used in this recipe: .. code-block:: yaml diff --git a/doc/sphinx/source/recipes/recipe_gier20bg.rst b/doc/sphinx/source/recipes/recipe_gier20bg.rst index bb11770a24..b8f8fb9b8e 100644 --- a/doc/sphinx/source/recipes/recipe_gier20bg.rst +++ b/doc/sphinx/source/recipes/recipe_gier20bg.rst @@ -53,7 +53,7 @@ User settings in recipe * Optional diag_script_info attributes: * ``styleset``: styleset for color coding panels - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotname``: NCL string formatting how variable should be named in plots defaults to short_name if not assigned. @@ -64,7 +64,7 @@ User settings in recipe amplitude contour plot * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png #. Script xco2_analysis/main.ncl: @@ -77,7 +77,7 @@ User settings in recipe accounting for the ensemble member named in "ensemble_refs" * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``ensemble_refs``: list of model-ensemble pairs to denote which ensemble member to use for calculating multi-model mean. required if ensemble_mean = true @@ -97,17 +97,17 @@ User settings in recipe * ``plot_var2_mean``: If True adds mean of seasonal cycle to panel as string. * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotname``: String formatting how variable should be named in plots defaults to short_name if not assigned #. Script xco2_analysis/sat_masks.ncl: * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotname``: String formatting how variable should be named in plots defaults to short_name if not assigned - * ``c3s_plots``: Missing value plots seperated by timeseries of c3s satellites + * ``c3s_plots``: Missing value plots separated by timeseries of c3s satellites #. Script xco2_analysis/station_comparison.ncl: @@ -116,7 +116,7 @@ User settings in recipe first, then 2D variable, followed by surface stations * Optional diag_script_info attributes: - * ``output_file_type``: output file type for plots, default: config_user -> png + * ``output_file_type``: output file type for plots, default: png * ``var_plotnames``: String formatting how variables should be named in plots defaults to short_name if not assigned * ``overwrite_altitudes``: Give other altitude values than the ones attached in diff --git a/doc/sphinx/source/recipes/recipe_hydrology.rst b/doc/sphinx/source/recipes/recipe_hydrology.rst index d0e2e0bcb3..995a70b3ae 100644 --- a/doc/sphinx/source/recipes/recipe_hydrology.rst +++ b/doc/sphinx/source/recipes/recipe_hydrology.rst @@ -62,13 +62,13 @@ Diagnostics are stored in esmvaltool/diag_scripts/hydrology * wflow.py * lisflood.py * hype.py - * globwat.py + * globwat.py User settings in recipe ----------------------- -All hydrological recipes require a shapefile as an input to produce forcing data. This shapefile determines the shape of the basin for which the data will be cut out and processed. All recipes are tested with `the shapefiles `_ that are used for the eWaterCycle project. In principle any shapefile can be used, for example, the freely available basin shapefiles from the `HydroSHEDS project `_. +All hydrological recipes require a shapefile as an input to produce forcing data. This shapefile determines the shape of the basin for which the data will be cut out and processed. All recipes are tested with `the shapefiles `_ that are used for the eWaterCycle project. In principle any shapefile can be used, for example, the freely available basin shapefiles from the `HydroSHEDS project `_. #. recipe_pcrglobwb.yml @@ -87,7 +87,7 @@ All hydrological recipes require a shapefile as an input to produce forcing data *extract_shape:* - * shapefile: Meuse.shp (MARRMoT is a hydrological Lumped model that needs catchment-aggregated forcing data. The catchment is provided as a shapefile, the path can be relative to ``auxiliary_data_dir`` as defined in config-user.yml.). + * shapefile: Meuse.shp (MARRMoT is a hydrological Lumped model that needs catchment-aggregated forcing data. The catchment is provided as a shapefile, the path can be relative to :ref:`configuration option ` ``auxiliary_data_dir``). * method: contains * crop: true @@ -107,7 +107,7 @@ All hydrological recipes require a shapefile as an input to produce forcing data * dem_file: netcdf file containing a digital elevation model with elevation in meters and coordinates latitude and longitude. A wflow example dataset is available at: https://github.com/openstreams/wflow/tree/master/examples/wflow_rhine_sbm - The example dem_file can be obtained from https://github.com/openstreams/wflow/blob/master/examples/wflow_rhine_sbm/staticmaps/wflow_dem.map + The example dem_file can be obtained from https://github.com/openstreams/wflow/blob/master/examples/wflow_rhine_sbm/staticmaps/wflow_dem.map * regrid: the regridding scheme for regridding to the digital elevation model. Choose ``area_weighted`` (slow) or ``linear``. #. recipe_lisflood.yml diff --git a/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst b/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst index 42bedcec09..718c345b19 100644 --- a/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst +++ b/doc/sphinx/source/recipes/recipe_ipccwg1ar6ch3.rst @@ -6,7 +6,7 @@ IPCC AR6 Chapter 3 (selected figures) Overview -------- -This recipe collects selected diagnostics used in IPCC AR6 WGI Chapter 3: +This recipe collects selected diagnostics used in IPCC AR6 WGI Chapter 3: Human influence on the climate system (`Eyring et al., 2021`_). Plots from IPCC AR6 can be readily reproduced and compared to previous versions. The aim is to be able to start with what was available now the next time allowing us to focus @@ -15,7 +15,8 @@ on developing more innovative analysis methods rather than constantly having to Processing of CMIP3 models currently works only in serial mode, due to an issue in the input data still under investigation. To run the recipe for Fig 3.42a -and Fig. 3.43 set "max_parallel_tasks: 1" in the config-user.yml file. +and Fig. 3.43 set the :ref:`configuration option ` +``max_parallel_tasks: 1``. The plots are produced collecting the diagnostics from individual recipes. The following figures from `Eyring et al. (2021)`_ can currently be reproduced: @@ -43,10 +44,9 @@ To reproduce Fig. 3.9 you need the shapefile of the `AR6 reference regions (`Iturbide et al., 2020 `_). Please download the file `IPCC-WGI-reference-regions-v4_shapefile.zip `_, -unzip and store it in `/IPCC-regions/` (the `auxiliary_data_dir` -is defined in the `config-user.yml -`_ -file). +unzip and store it in `/IPCC-regions/` (where +``auxiliary_data_dir`` is given as :ref:`configuration option +`). .. _`Eyring et al., 2021`: https://www.ipcc.ch/report/ar6/wg1/chapter/chapter-3/ .. _`Eyring et al. (2021)`: https://www.ipcc.ch/report/ar6/wg1/chapter/chapter-3/ @@ -179,7 +179,7 @@ User settings in recipe * start_year: start year in figure * end_year: end year in figure - * panels: list of variable blocks for each panel + * panels: list of variable blocks for each panel *Optional settings for script* @@ -205,7 +205,7 @@ User settings in recipe * plot_units: variable unit for plotting * y-min: set min of y-axis * y-max: set max of y-axis - * order: order in which experiments should be plotted + * order: order in which experiments should be plotted * stat_shading: if true: shading of statistic range * ref_shading: if true: shading of reference period @@ -225,7 +225,7 @@ User settings in recipe * plot_legend: if true, plot legend will be plotted * plot_units: variable unit for plotting - * multi_model_mean: if true, multi-model mean and uncertaintiy will be + * multi_model_mean: if true, multi-model mean and uncertainty will be plotted *Optional settings for variables* @@ -304,7 +304,7 @@ User settings in recipe * labels: List of labels for each variable on the x-axis * model_spread: if True, model spread is shaded * plot_median: if True, median is plotted - * project_order: give order of projects + * project_order: give order of projects Variables @@ -452,7 +452,7 @@ Example plots 2013). For line colours see the legend of Figure 3.4. Additionally, the multi-model mean (red) and standard deviation (grey shading) are shown. Observational and model datasets were detrended by removing the - least-squares quadratic trend. + least-squares quadratic trend. .. figure:: /recipes/figures/ipccwg1ar6ch3/tas_anom_damip_global_1850-2020.png :align: center @@ -467,7 +467,7 @@ Example plots anomalies are shown relative to 1950-2010 for Antarctica and relative to 1850-1900 for other continents. CMIP6 historical simulations are expanded by the SSP2-4.5 scenario simulations. All available ensemble members were used. - Regions are defined by Iturbide et al. (2020). + Regions are defined by Iturbide et al. (2020). .. figure:: /recipes/figures/ipccwg1ar6ch3/model_bias_pr_annualclim_CMIP6.png :align: center @@ -487,7 +487,7 @@ Example plots show a change greater than the variability threshold; crossed lines indicate regions with conflicting signal, where >=66% of models show change greater than the variability threshold and <80% of all models agree on the sign of - change. + change. .. figure:: /recipes/figures/ipccwg1ar6ch3/precip_anom_1950-2014.png :align: center @@ -511,7 +511,7 @@ Example plots forcings (brown) and natural forcings only (blue). Observed trends for each observational product are shown as horizontal lines. Panel (b) shows annual mean precipitation rate (mm day-1) of GHCN version 2 for the years 1950-2014 - over land areas used to compute the plots. + over land areas used to compute the plots. .. figure:: /recipes/figures/ipccwg1ar6ch3/zonal_westerly_winds.png :align: center diff --git a/doc/sphinx/source/recipes/recipe_kcs.rst b/doc/sphinx/source/recipes/recipe_kcs.rst index fa07f0a167..1ed117ecb6 100644 --- a/doc/sphinx/source/recipes/recipe_kcs.rst +++ b/doc/sphinx/source/recipes/recipe_kcs.rst @@ -30,7 +30,7 @@ In the second diagnostic, for both the control and future periods, the N target 2. Further constrain the selection by picking samples that represent either high or low changes in summer precipitation and summer and winter temperature, by limiting the remaining samples to certain percentile ranges: relatively wet/cold in the control and dry/warm in the future, or vice versa. The percentile ranges are listed in table 1 of Lenderink 2014's supplement. This should result is approximately 50 remaining samples for each scenario, for both control and future. 3. Use a Monte-Carlo method to make a final selection of 8 resamples with minimal reuse of the same ensemble member/segment. -Datasets have been split in two parts: the CMIP datasets and the target model datasets. An example use case for this recipe is to compare between CMIP5 and CMIP6, for example. The recipe can work with a target model that is not part of CMIP, provided that the data are CMOR compatible, and using the same data referece syntax as the CMIP data. Note that you can specify :ref:`multiple data paths` in the user configuration file. +Datasets have been split in two parts: the CMIP datasets and the target model datasets. An example use case for this recipe is to compare between CMIP5 and CMIP6, for example. The recipe can work with a target model that is not part of CMIP, provided that the data are CMOR compatible, and using the same data reference syntax as the CMIP data. Note that you can specify :ref:`multiple data paths` in the configuration. Available recipes and diagnostics @@ -128,7 +128,7 @@ AND highlighting the selected steering parameters and resampling periods: .. figure:: /recipes/figures/kcs/global_matching.png :align: center -The diagnostic ``local_resampling`` procudes a number of output files: +The diagnostic ``local_resampling`` produces a number of output files: * ``season_means_.nc``: intermediate results, containing the season means for each segment of the original target model ensemble. * ``top1000_.csv``: intermediate results, containing the 1000 combinations that have been selected based on winter mean precipitation. diff --git a/doc/sphinx/source/recipes/recipe_model_evaluation.rst b/doc/sphinx/source/recipes/recipe_model_evaluation.rst index 9e199815e0..c61f34aa62 100644 --- a/doc/sphinx/source/recipes/recipe_model_evaluation.rst +++ b/doc/sphinx/source/recipes/recipe_model_evaluation.rst @@ -35,9 +35,9 @@ User settings ------------- It is recommended to use a vector graphic file type (e.g., SVG) for the output -format when running this recipe, i.e., run the recipe with the command line -option ``--output_file_type=svg`` or use ``output_file_type: svg`` in your -:ref:`esmvalcore:user configuration file`. +format when running this recipe, i.e., run the recipe with the +:ref:`configuration options ` ``output_file_type: +svg``. Note that map and profile plots are rasterized by default. Use ``rasterize: false`` in the recipe to disable this. diff --git a/doc/sphinx/source/recipes/recipe_monitor.rst b/doc/sphinx/source/recipes/recipe_monitor.rst index ee3b9b44fa..8f4893fc12 100644 --- a/doc/sphinx/source/recipes/recipe_monitor.rst +++ b/doc/sphinx/source/recipes/recipe_monitor.rst @@ -36,9 +36,9 @@ User settings ------------- It is recommended to use a vector graphic file type (e.g., SVG) for the output -files when running this recipe, i.e., run the recipe with the command line -option ``--output_file_type=svg`` or use ``output_file_type: svg`` in your -:ref:`esmvalcore:user configuration file`. +format when running this recipe, i.e., run the recipe with the +:ref:`configuration options ` ``output_file_type: +svg``. Note that map and profile plots are rasterized by default. Use ``rasterize_maps: false`` or ``rasterize: false`` (see `Recipe settings`_) in the recipe to disable this. diff --git a/doc/sphinx/source/recipes/recipe_oceans.rst b/doc/sphinx/source/recipes/recipe_oceans.rst index d8bf3143e1..17552b39fa 100644 --- a/doc/sphinx/source/recipes/recipe_oceans.rst +++ b/doc/sphinx/source/recipes/recipe_oceans.rst @@ -458,7 +458,7 @@ and a latitude and longitude coordinates. This diagnostic also includes the optional arguments, `maps_range` and `diff_range` to manually define plot ranges. Both arguments are a list of two floats -to set plot range minimun and maximum values respectively for Model and Observations +to set plot range minimum and maximum values respectively for Model and Observations maps (Top panels) and for the Model minus Observations panel (bottom left). Note that if input data have negative values the Model over Observations map (bottom right) is not produced. @@ -491,14 +491,14 @@ diagnostic_maps_multimodel.py The diagnostic_maps_multimodel.py_ diagnostic makes model(s) vs observations maps and if data are not provided it draws only model field. -It is always nessary to define the overall layout trough the argument `layout_rowcol`, +It is always necessary to define the overall layout through the argument `layout_rowcol`, which is a list of two integers indicating respectively the number of rows and columns to organize the plot. Observations has not be accounted in here as they are automatically added at the top of the figure. This diagnostic also includes the optional arguments, `maps_range` and `diff_range` to manually define plot ranges. Both arguments are a list of two floats -to set plot range minimun and maximum values respectively for variable data and +to set plot range minimum and maximum values respectively for variable data and the Model minus Observations range. Note that this diagnostic assumes that the preprocessors do the bulk of the @@ -748,7 +748,7 @@ These tools are: - bgc_units: converts to sensible units where appropriate (ie Celsius, mmol/m3) - timecoord_to_float: Converts time series to decimal time ie: Midnight on January 1st 1970 is 1970.0 - add_legend_outside_right: a plotting tool, which adds a legend outside the axes. -- get_image_format: loads the image format, as defined in the global user config.yml. +- get_image_format: loads the image format, as defined in the global configuration. - get_image_path: creates a path for an image output. - make_cube_layer_dict: makes a dictionary for several layers of a cube. @@ -762,8 +762,8 @@ A note on the auxiliary data directory Some of these diagnostic scripts may not function on machines with no access to the internet, as cartopy may try to download the shape files. The solution to this issue is the put the relevant cartopy shapefiles in a directory which -is visible to esmvaltool, then link that path to ESMValTool via -the `auxiliary_data_dir` variable in your config-user.yml file. +is visible to esmvaltool, then link that path to ESMValTool via the +:ref:`configuration option ` ``auxiliary_data_dir``. The cartopy masking files can be downloaded from: https://www.naturalearthdata.com/downloads/ diff --git a/doc/sphinx/source/recipes/recipe_rainfarm.rst b/doc/sphinx/source/recipes/recipe_rainfarm.rst index d6c06c6f7a..aeb7cd0638 100644 --- a/doc/sphinx/source/recipes/recipe_rainfarm.rst +++ b/doc/sphinx/source/recipes/recipe_rainfarm.rst @@ -32,7 +32,7 @@ User settings * nf: number of subdivisions for downscaling (e.g. 8 will produce output fields with linear resolution increased by a factor 8) * conserv_glob: logical, if to conserve precipitation over full domain * conserv_smooth: logical, if to conserve precipitation using convolution (if neither conserv_glob or conserv_smooth is chosen, box conservation is used) -* weights_climo: set to false or omit if no orographic weights are to be used, else set it to the path to a fine-scale precipitation climatology file. If a relative file path is used, `auxiliary_data_dir` will be searched for this file. The file is expected to be in NetCDF format and should contain at least one precipitation field. If several fields at different times are provided, a climatology is derived by time averaging. Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (http://www.worldclim.org) or CHELSA (http://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://www.gdal.org). +* weights_climo: set to false or omit if no orographic weights are to be used, else set it to the path to a fine-scale precipitation climatology file. If a relative file path is used, ``auxiliary_data_dir`` will be searched for this file. The file is expected to be in NetCDF format and should contain at least one precipitation field. If several fields at different times are provided, a climatology is derived by time averaging. Suitable climatology files could be for example a fine-scale precipitation climatology from a high-resolution regional climate model (see e.g. Terzago et al. 2018), a local high-resolution gridded climatology from observations, or a reconstruction such as those which can be downloaded from the WORLDCLIM (http://www.worldclim.org) or CHELSA (http://chelsa-climate.org) websites. The latter data will need to be converted to NetCDF format before being used (see for example the GDAL tools (https://www.gdal.org). Variables @@ -60,4 +60,4 @@ Example plots .. figure:: /recipes/figures/rainfarm/rainfarm.png :width: 14cm - Example of daily cumulated precipitation from the CMIP5 EC-EARTH model on a specific day, downscaled using RainFARM from its original resolution (1.125°) (left panel), increasing spatial resolution by a factor of 8 to 0.14°; Two stochastic realizations are shown (central and right panel). A fixed spectral slope of s=1.7 was used. Notice how the downscaled fields introduce fine scale precipitation structures, while still maintaining on average the original coarse-resolution precipitation. Different stochastic realizations are shown to demonstrate how an ensemble of realizations can be used to reproduce unresolved subgrid variability. (N.B.: this plot was not produced by ESMValTool - the recipe output is netcdf only). + Example of daily cumulated precipitation from the CMIP5 EC-EARTH model on a specific day, downscaled using RainFARM from its original resolution (1.125°) (left panel), increasing spatial resolution by a factor of 8 to 0.14°; Two stochastic realizations are shown (central and right panel). A fixed spectral slope of s=1.7 was used. Notice how the downscaled fields introduce fine scale precipitation structures, while still maintaining on average the original coarse-resolution precipitation. Different stochastic realizations are shown to demonstrate how an ensemble of realizations can be used to reproduce unresolved subgrid variability. (N.B.: this plot was not produced by ESMValTool - the recipe output is netcdf only). diff --git a/doc/sphinx/source/recipes/recipe_shapeselect.rst b/doc/sphinx/source/recipes/recipe_shapeselect.rst index 63afbcae6c..12da974c28 100644 --- a/doc/sphinx/source/recipes/recipe_shapeselect.rst +++ b/doc/sphinx/source/recipes/recipe_shapeselect.rst @@ -29,7 +29,7 @@ User settings in recipe *Required settings (scripts)* - * shapefile: path to the user provided shapefile. A relative path is relative to the auxiliary_data_dir as configured in config-user.yml. + * shapefile: path to the user provided shapefile. A relative path is relative to the :ref:`configuration option ` ``auxiliary_data_dir``. * weighting_method: the preferred weighting method 'mean_inside' - mean of all grid points inside polygon; 'representative' - one point inside or close to the polygon is used to represent the complete area. diff --git a/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst b/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst index 3c7fa86a3a..4faa05c2a9 100644 --- a/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst +++ b/doc/sphinx/source/recipes/recipe_wenzel14jgr.rst @@ -28,8 +28,8 @@ User settings .. note:: - Make sure to run this recipe setting ``max_parallel_tasks: 1`` in the ``config_user.yml`` - file or using the CLI flag ``--max_parallel_tasks=1``. + Make sure to run this recipe with the :ref:`configuration option + ` ``max_parallel_tasks: 1``. User setting files (cfg files) are stored in nml/cfg_carbon/ diff --git a/doc/sphinx/source/recipes/recipe_wenzel16nat.rst b/doc/sphinx/source/recipes/recipe_wenzel16nat.rst index 03bb822545..a661844e70 100644 --- a/doc/sphinx/source/recipes/recipe_wenzel16nat.rst +++ b/doc/sphinx/source/recipes/recipe_wenzel16nat.rst @@ -35,9 +35,8 @@ User settings .. note:: - Make sure to run this recipe setting ``output_file_type: pdf`` in the ``config_user.yml`` - file or using the CLI flag ``--output_file_type=pdf``. - + Make sure to run this recipe with the :ref:`configuration option + ` ``max_parallel_tasks: 1``. #. Script carbon_beta.ncl @@ -58,7 +57,7 @@ User settings none -#. Script carbon_co2_cycle.ncl +#. Script carbon_co2_cycle.ncl *Required Settings (scripts)* @@ -72,7 +71,7 @@ User settings *Required settings (variables)* - * reference_dataset: name of reference datatset (observations) + * reference_dataset: name of reference dataset (observations) *Optional settings (variables)* @@ -102,15 +101,15 @@ Example plots ------------- .. figure:: /recipes/figures/wenzel16nat/fig_1.png - :width: 12 cm + :width: 12 cm :align: center - + Comparison of CO\ :sub:`2` seasonal amplitudes for CMIP5 historical simulations and observations showing annual mean atmospheric CO\ :sub:`2` versus the amplitudes of the CO\ :sub:`2` seasonal cycle at Pt. Barrow, Alaska (produced with carbon_co2_cycle.ncl, similar to Fig. 1a from Wenzel et al. (2016)). - + .. figure:: /recipes/figures/wenzel16nat/fig_2.png - :width: 12 cm + :width: 12 cm :align: center - + Barchart showing the gradient of the linear correlations for the comparison of CO\ :sub:`2` seasonal amplitudes for CMIP5 historical for at Pt. Barrow, Alaska (produced with carbon_co2_cycle.ncl, similar to Fig. 1b from Wenzel et al. (2016)). .. figure:: /recipes/figures/wenzel16nat/fig_3.png diff --git a/doc/sphinx/source/utils/utils.rst b/doc/sphinx/source/utils/utils.rst index 962f66f0b0..97916c9edd 100644 --- a/doc/sphinx/source/utils/utils.rst +++ b/doc/sphinx/source/utils/utils.rst @@ -135,10 +135,11 @@ This suite is configured to work with versions of cylc older than 8.0.0 . To prepare for using this tool: #. Log in to a system that uses `slurm `_ -#. Make sure the required CMIP and observational datasets are available and configured in config-user.yml +#. Make sure the required CMIP and observational datasets are available and + their ``rootpath`` and ``drs`` is properly set up in the :ref:`configuration + ` #. Make sure the required auxiliary data is available (see :ref:`recipe documentation `) #. Install ESMValTool -#. Update config-user.yml so it points to the right data locations Next, get started with `cylc `_: @@ -152,67 +153,6 @@ Next, get started with `cylc `. -Using Rose and cylc -------------------- -It is possible to run more than one recipe in one go: currently this relies on the user -having access to a HPC that has ``rose`` and ``cylc`` installed since the procedure involves -installing and submitting a Rose suite. The utility that allows you to do this is -``esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py``. - -Base suite -.......... -The base suite to run esmvaltool via rose-cylc is `u-bd684`; you can find -this suite in the Met Office Rose repository at: - -https://code.metoffice.gov.uk/svn/roses-u/b/d/6/8/4/trunk/ - -When ``rose`` will be working with python3.x, this location will become -default and the pipeline will aceess it independently of user, unless, of -course the user will specify ``-s $SUITE_LOCATION``; until then the user needs -to grab a copy of it in ``$HOME`` or specify the default location via ``-s`` option. - -Environment -........... -We will move to a unified and centrally-installed esmvaltool environment; -until then, the user will have to alter the env_setup script: - -``u-bd684/app/esmvaltool/env_setup`` - -with the correct pointers to esmvaltool installation, if desired. - -To be able to submit to cylc, you need to have the `/metomi/` suite in path -AND use a `python2.7` environment. Use the Jasmin-example below for guidance. - -Jasmin-example -.............. -This shows how to interact with rose-cylc and run esmvaltool under cylc -using this script: - -.. code:: bash - - export PATH=/apps/contrib/metomi/bin:$PATH - export PATH=/home/users/valeriu/miniconda2/bin:$PATH - mkdir esmvaltool_rose - cd esmvaltool_rose - cp ESMValTool/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py . - svn checkout https://code.metoffice.gov.uk/svn/roses-u/b/d/6/8/4/trunk/ ~/u-bd684 - [enter Met Office password] - [configure ~/u-bd684/rose_suite.conf] - [configure ~/u-bd684/app/esmvaltool/env_setup] - python esmvt_rose_wrapper.py -c config-user.yml \ - -r recipe_autoassess_stratosphere.yml recipe_OceanPhysics.yml \ - -d $HOME/esmvaltool_rose - rose suite-run u-bd684 - -Note that you need to pass FULL PATHS to cylc, no `.` or `..` because all -operations are done remotely on different nodes. - -A practical actual example of running the tool can be found on JASMIN: -``/home/users/valeriu/esmvaltool_rose``. -There you will find the run shell: ``run_example``, as well as an example -how to set the configuration file. If you don't have Met Office credentials, -a copy of `u-bd684` is always located in ``/home/users/valeriu/roses/u-bd684`` on Jasmin. - .. _utils_batch_jobs: Using the scripts in `utils/batch-jobs` @@ -238,11 +178,11 @@ The following parameters have to be set in the script in order to make it run: * ``submit``, *bool*: Whether or not to automatically submit the job after creating the launch script. Default value is ``False``. * ``account``, *str*: Name of the DKRZ account in which the job will be billed. * ``outputs``, *str*: Name of the directory in which the job outputs (.out and .err files) are going to be saved. The outputs will be saved in `/home/user/`. -* ``conda_path``, *str*: Full path to the `mambaforge/etc/profile.d/conda.sh` executable. +* ``conda_path``, *str*: Full path to the `miniforge3/etc/profile.d/conda.sh` executable. Optionally, the following parameters can be edited: -* ``config_file``, *str*: Path to ``config-user.yml`` if default ``~/.esmvaltool/config-user.yml`` not used. +* ``config_dir``, *str*: Path to :ref:`configuration directory `, by default ``~/.config/esmvaltool/``. * ``partition``, *str*: Name of the DKRZ partition used to run jobs. Default is ``interactive`` to minimize computing cost compared to ``compute`` for which nodes cannot be shared. * ``memory``, *str*: Amount of memory requested for each run. Default is ``64G`` to allow to run 4 recipes on the same node in parallel. * ``time``, *str*: Time limit. Default is ``04:00:00`` to increase the job priority. Jobs can run for up to 8 hours and 12 hours on the compute and interactive partitions, respectively. @@ -291,7 +231,7 @@ script as well as a list of all available recipes. To generate the list, run the for recipe in $(esmvaltool recipes list | grep '\.yml$'); do echo $(basename "$recipe"); done > all_recipes.txt -To keep the script execution fast, it is recommended to use ``log_level: info`` in your config-user.yml file so that SLURM +To keep the script execution fast, it is recommended to use ``log_level: info`` in the configuration so that SLURM output files are rather small. .. _overview_page: @@ -443,63 +383,6 @@ klaus.zimmermann@smhi.se .. _pygithub: https://pygithub.readthedocs.io/en/latest/introduction.html -Recipe filler -============= - -If you need to fill in a blank recipe with additional datasets, you can do that with -the command `recipe_filler`. This runs a tool to obtain a set of additional datasets when -given a blank recipe, and you can give an arbitrary number of data parameters. The blank recipe -should contain, to the very least, a list of diagnostics, each with their variable(s). -Example of running the tool: - -.. code-block:: bash - - recipe_filler recipe.yml - -where `recipe.yml` is the recipe that needs to be filled with additional datasets; a minimal -example of this recipe could be: - -.. code-block:: yaml - - diagnostics: - diagnostic: - variables: - ta: - mip: Amon # required - start_year: 1850 # required - end_year: 1900 # required - - -Key features ------------- - -- you can add as many variable parameters as are needed; if not added, the - tool will use the ``"*"`` wildcard and find all available combinations; -- you can restrict the number of datasets to be looked for with the ``dataset:`` - key for each variable, pass a list of datasets as value, e.g. - ``dataset: [MPI-ESM1-2-LR, MPI-ESM-LR]``; -- you can specify a pair of experiments, e.g. ``exp: [historical, rcp85]`` - for each variable; this will look for each available dataset per experiment - and assemble an aggregated data stretch from each experiment to complete - for the total data length specified by ``start_year`` and ``end_year``; equivalent to - ESMValTool's syntax on multiple experiments; this option needs an ensemble - to be declared explicitly; it will return no entry if there are gaps in data; -- ``start_year`` and ``end_year`` are required and are used to filter out the - datasets that don't have data in the interval; as noted above, the tool will not - return datasets with partial coverage from ``start_year`` to ``end_year``; - if you want all possible years hence no filtering on years just use ``"*"`` - for start and end years; -- ``config-user: rootpath: CMIPX`` may be a list, rootpath lists are supported; -- all major DRS paths (including ``default``, ``BADC``, ``ETHZ`` etc) are supported; -- speedup is achieved through CMIP mip tables lookup, so ``mip`` is required in recipe; - -Caveats -------- - -- the tool doesn't yet work with derived variables; it will not return any available datasets; -- operation restricted to CMIP data only, OBS lookup is not available yet. - - Extracting a list of input files from the provenance ==================================================== diff --git a/docker/Dockerfile b/docker/Dockerfile index 9ee3ddf0f8..9670028c7b 100644 --- a/docker/Dockerfile +++ b/docker/Dockerfile @@ -1,6 +1,6 @@ # To build this container, go to ESMValTool root folder and execute: # docker build -t esmvaltool:latest . -f docker/Dockerfile -FROM condaforge/mambaforge +FROM condaforge/miniforge3 WORKDIR /src/ESMValTool COPY environment.yml . diff --git a/docker/Dockerfile.dev b/docker/Dockerfile.dev index 65f1a34ea5..b7204abaa7 100644 --- a/docker/Dockerfile.dev +++ b/docker/Dockerfile.dev @@ -1,6 +1,6 @@ # To build this container, go to ESMValTool root folder and execute: # docker build -t esmvaltool:development . -f docker/Dockerfile.dev -FROM condaforge/mambaforge +FROM condaforge/miniforge3 WORKDIR /src/ESMValTool RUN apt update && DEBIAN_FRONTEND=noninteractive apt install -y curl git ssh && apt clean diff --git a/docker/Dockerfile.exp b/docker/Dockerfile.exp index a522995fc4..062a64b8ab 100644 --- a/docker/Dockerfile.exp +++ b/docker/Dockerfile.exp @@ -1,6 +1,6 @@ # To build this container, go to ESMValTool root folder and execute: # docker build -t esmvaltool:experimental . -f docker/Dockerfile.exp -FROM condaforge/mambaforge +FROM condaforge/miniforge3 RUN apt update && apt install -y git && apt clean WORKDIR /src/ESMValTool diff --git a/environment.yml b/environment.yml index 54aa73bcf0..72ccf127f6 100644 --- a/environment.yml +++ b/environment.yml @@ -14,20 +14,23 @@ dependencies: - cdo >=2.3.0 - cdsapi - cf-units + - cfgrib - cftime - cmocean + - curl <8.10 # https://github.com/ESMValGroup/ESMValTool/issues/3758 - cython - dask !=2024.8.0 # https://github.com/dask/dask/issues/11296 - distributed - ecmwf-api-client - eofs - - esmpy >=8.6.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 + - esmpy - esmvalcore 2.11.* - fiona - fire - fsspec - - gdal - - iris >=3.6.1 + - gdal >=3.9.0 + - importlib_metadata <8 # https://github.com/ESMValGroup/ESMValTool/issues/3699 only for Python 3.10/11 and esmpy<8.6 + - iris >=3.11 - iris-esmf-regrid >=0.10.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 - jinja2 - joblib @@ -41,21 +44,21 @@ dependencies: - numpy !=1.24.3 # severe masking bug - openpyxl - packaging - - pandas !=2.2.0,!=2.2.1,!=2.2.2 # github.com/ESMValGroup/ESMValCore/pull/2305 + - pandas==2.1.4 # unpin when ESMValCore released with https://github.com/ESMValGroup/ESMValCore/pull/2529 - pip !=21.3 - progressbar2 - prov - - psyplot - - psy-maps - - psy-reg - - psy-simple + - psyplot >=1.5.0 + - psy-maps >=1.5.0 + - psy-reg >=1.5.0 + - psy-simple >=1.5.0 - pyproj >=2.1 - pys2index # only from conda-forge - - python >=3.10 + - python >=3.10,<3.13 - python-cdo - python-dateutil - pyyaml - - rasterio + - rasterio >=1.3.10 - requests - ruamel.yaml - scikit-image @@ -63,14 +66,14 @@ dependencies: - scipy - seaborn - seawater - - shapely >=2 + - shapely >=2.0.2 - xarray >=0.12.0 - xesmf >=0.7.1 - xgboost >1.6.1 # github.com/ESMValGroup/ESMValTool/issues/2779 - xlsxwriter - zarr # Python packages needed for unit testing - - flake8 ==5.0.4 + - flake8 >=6 - pytest >=3.9,!=6.0.0rc1,!=6.0.0 - pytest-cov - pytest-env @@ -89,14 +92,14 @@ dependencies: - imagehash - isort ==5.13.2 - pre-commit - - prospector + - prospector >=1.12 # earliest support for Python 3.12 - pyroma # - vprof not on conda-forge - yamllint ==1.35.1 - yapf ==0.32.0 # NCL and dependencies - - ncl + - ncl >=6.6.2 - cdo - imagemagick - nco diff --git a/environment_osx.yml b/environment_osx.yml index d89556b593..242f0a4f56 100644 --- a/environment_osx.yml +++ b/environment_osx.yml @@ -14,6 +14,7 @@ dependencies: - cdo >=2.3.0 - cdsapi - cf-units + - cfgrib - cftime - cmocean - cython @@ -21,13 +22,14 @@ dependencies: - distributed - ecmwf-api-client - eofs - - esmpy >=8.6.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 + - esmpy - esmvalcore 2.11.* - fiona - fire - fsspec - - gdal - - iris >=3.6.1 + - gdal >=3.9.0 + - importlib_metadata <8 # https://github.com/ESMValGroup/ESMValTool/issues/3699 only for Python 3.10/11 and esmpy<8.6 + - iris >=3.11 - iris-esmf-regrid >=0.10.0 # github.com/SciTools-incubator/iris-esmf-regrid/pull/342 - jinja2 - joblib @@ -41,21 +43,21 @@ dependencies: - numpy !=1.24.3 # severe masking bug - openpyxl - packaging - - pandas !=2.2.0,!=2.2.1,!=2.2.2 # github.com/ESMValGroup/ESMValCore/pull/2305 + - pandas==2.1.4 # unpin when ESMValCore released with https://github.com/ESMValGroup/ESMValCore/pull/2529 - pip !=21.3 - progressbar2 - prov - - psyplot - - psy-maps - - psy-reg - - psy-simple + - psyplot >=1.5.0 + - psy-maps >=1.5.0 + - psy-reg >=1.5.0 + - psy-simple >=1.5.0 - pyproj >=2.1 - - pys2index # only from conda-forge - - python >=3.10 + - pys2index >=0.1.5 # only from conda-forge; https://github.com/ESMValGroup/ESMValTool/pull/3792 + - python >=3.10,<3.13 - python-cdo - python-dateutil - pyyaml - - rasterio + - rasterio >=1.3.10 - requests - ruamel.yaml - scikit-image @@ -63,14 +65,14 @@ dependencies: - scipy - seaborn - seawater - - shapely >=2 + - shapely >=2.0.2 - xarray >=0.12.0 - xesmf >=0.7.1 - xgboost >1.6.1 # github.com/ESMValGroup/ESMValTool/issues/2779 - xlsxwriter - zarr # Python packages needed for unit testing - - flake8 ==5.0.4 + - flake8 >=6 - pytest >=3.9,!=6.0.0rc1,!=6.0.0 - pytest-cov - pytest-env @@ -89,7 +91,7 @@ dependencies: - imagehash - isort ==5.13.2 - pre-commit - - prospector + - prospector >=1.12 # earliest support for Python 3.12 - pyroma # - vprof not on conda-forge - yamllint ==1.35.1 diff --git a/esmvaltool/cmorizers/data/cmor_config/CMAP.yml b/esmvaltool/cmorizers/data/cmor_config/CMAP.yml new file mode 100644 index 0000000000..eef1861f08 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/CMAP.yml @@ -0,0 +1,21 @@ +--- +# Global attributes of NetCDF file +attributes: + dataset_id: CMAP + project_id: OBS6 + tier: 2 + version: "v1" + modeling_realm: reanaly + source: "https://psl.noaa.gov/data/gridded/data.cmap.html" + reference: "cmap" + comment: | + '' + +# Variables to CMORize +variables: + # monthly frequency + pr_month: + short_name: pr + mip: Amon + raw: precip + file: "precip.mon.mean.nc" diff --git a/esmvaltool/cmorizers/data/cmor_config/ESACCI-LANDCOVER.yml b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LANDCOVER.yml new file mode 100644 index 0000000000..925057dc12 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/ESACCI-LANDCOVER.yml @@ -0,0 +1,32 @@ +attributes: + project_id: 'OBS' + dataset_id: 'ESACCI-LANDCOVER' + tier: 2 + modeling_realm: sat + institution: 'Universite catholique de Louvain' + reference: 'esacci-landcover' + source: 'ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/pft' + title: 'ESA CCI Land Cover' + version: 'v2.0.8' + comment: '' +filename: ESACCI-LC-L4-PFT-Map-300m-P1Y-{year}-v2.0.8.nc +variables: + baresoilFrac: + mip: Lmon + long_name: 'BARE' + frequency: yr + cropFrac: + mip: Lmon + long_name: 'GRASS-MAN' + frequency: yr + grassFrac: + mip: Lmon + long_name: 'GRASS-NAT' + frequency: yr + shrubFrac: + mip: Lmon + frequency: yr + treeFrac: + mip: Lmon + frequency: yr + diff --git a/esmvaltool/cmorizers/data/cmor_config/JRA-55.yml b/esmvaltool/cmorizers/data/cmor_config/JRA-55.yml new file mode 100644 index 0000000000..a4f4c8b379 --- /dev/null +++ b/esmvaltool/cmorizers/data/cmor_config/JRA-55.yml @@ -0,0 +1,103 @@ +--- +# Common global attributes for Cmorizer output +attributes: + dataset_id: JRA-55 + version: '1' + tier: 2 + modeling_realm: reanaly + project_id: OBS6 + source: 'https://rda.ucar.edu/datasets/ds628.1/' + reference: 'jra_55' + comment: | + '' + +# Variables to cmorize +variables: + cli: + short_name: cli + mip: Amon + file: fcst_p125.229_ciwc.{year}01_{year}12.grb + + clivi: + short_name: clivi + mip: Amon + file: fcst_column125.058_cice.{year}01_{year}12.grb + + clw: + short_name: clw + mip: Amon + file: fcst_p125.228_clwc.{year}01_{year}12.grb + + clwvi: + short_name: clwvi + mip: Amon + operator: sum + files: + - 'fcst_column125.058_cice.{year}01_{year}12.grb' + - 'fcst_column125.227_cw.{year}01_{year}12.grb' + + clt: + short_name: clt + mip: Amon + file: fcst_surf125.071_tcdc.{year}01_{year}12.grb + + prw: + short_name: prw + mip: Amon + file: fcst_column125.054_pwat.{year}01_{year}12.grb + + rlus: + short_name: rlus + mip: Amon + typeOfLevel: surface + file: fcst_phy2m125.212_ulwrf.{year}01_{year}12.grb + + rlut: + short_name: rlut + mip: Amon + typeOfLevel: nominalTop + file: fcst_phy2m125.212_ulwrf.{year}01_{year}12.grb + + rlutcs: + short_name: rlutcs + mip: Amon + file: fcst_phy2m125.162_csulf.{year}01_{year}12.grb + + rsus: + short_name: rsus + mip: Amon + typeOfLevel: surface + file: fcst_phy2m125.211_uswrf.{year}01_{year}12.grb + + rsuscs: + short_name: rsuscs + mip: Amon + typeOfLevel: surface + file: fcst_phy2m125.160_csusf.{year}01_{year}12.grb + + rsut: + short_name: rsut + mip: Amon + typeOfLevel: nominalTop + file: fcst_phy2m125.211_uswrf.{year}01_{year}12.grb + + rsutcs: + short_name: rsutcs + mip: Amon + typeOfLevel: nominalTop + file: fcst_phy2m125.160_csusf.{year}01_{year}12.grb + + ta: + short_name: ta + mip: Amon + file: anl_p125.011_tmp.{year}01_{year}12.grb + + tas: + short_name: tas + mip: Amon + file: anl_surf125.011_tmp.{year}01_{year}12.grb + + wap: + short_name: wap + mip: Amon + file: anl_p125.039_vvel.{year}01_{year}12.grb diff --git a/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml b/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml index e0768cf354..f18f76f5a9 100644 --- a/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml +++ b/esmvaltool/cmorizers/data/cmor_config/NCEP-DOE-R2.yml @@ -39,3 +39,25 @@ variables: mip: Amon raw: omega file: 'omega\.mon\.mean\.nc' + pr_month: + short_name: pr + mip: Amon + raw: prate + file: 'prate.sfc.mon.mean.nc' + tauu_month: + short_name: tauu + mip: Amon + raw: uflx + file: 'uflx.sfc.mon.mean.nc' + make_negative: true + tauv_month: + short_name: tauv + mip: Amon + raw: vflx + file: 'vflx.sfc.mon.mean.nc' + make_negative: true + tos_month: + short_name: tos + mip: Amon + raw: skt + file: 'skt.sfc.mon.mean.nc' diff --git a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml index 7591e99257..faded8f9d6 100644 --- a/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml +++ b/esmvaltool/cmorizers/data/cmor_config/NOAA-CIRES-20CR-V2.yml @@ -44,3 +44,21 @@ variables: mip: Amon raw: uswrf file: 'uswrf.ntat.mon.mean.nc' + pr_month: + short_name: pr + mip: Amon + raw: prate + file: 'prate.mon.mean.nc' + tauu_month: + short_name: tauu + mip: Amon + raw: uflx + file: 'uflx.mon.mean.nc' + make_negative: true + tauv_month: + short_name: tauv + mip: Amon + raw: vflx + file: 'vflx.mon.mean.nc' + make_negative: true + diff --git a/esmvaltool/cmorizers/data/cmorizer.py b/esmvaltool/cmorizers/data/cmorizer.py index 16b7666350..5e66b7a70f 100755 --- a/esmvaltool/cmorizers/data/cmorizer.py +++ b/esmvaltool/cmorizers/data/cmorizer.py @@ -10,6 +10,7 @@ import os import shutil import subprocess +import warnings from pathlib import Path import esmvalcore @@ -18,13 +19,14 @@ from esmvalcore.config import CFG from esmvalcore.config._logging import configure_logging +from esmvaltool import ESMValToolDeprecationWarning from esmvaltool.cmorizers.data.utilities import read_cmor_config logger = logging.getLogger(__name__) datasets_file = os.path.join(os.path.dirname(__file__), 'datasets.yml') -class Formatter(): +class _Formatter(): """ Class to manage the download and formatting of datasets. @@ -39,26 +41,40 @@ def __init__(self, info): self.datasets_info = info self.config = '' - def start(self, command, datasets, config_file, options): + def start(self, command, datasets, config_file, config_dir, options): """Read configuration and set up formatter for data processing. Parameters ---------- command: str - Name of the command to execute + Name of the command to execute. datasets: str - List of datasets to process, comma separated + List of datasets to process, comma separated. config_file: str - Config file to use + Config file to use. Option will be removed in v2.14.0. + config_dir: str + Config directory to use. options: dict() - Extra options to overwrite config user file + Extra options to overwrite configuration. + """ if isinstance(datasets, str): self.datasets = datasets.split(',') else: self.datasets = datasets - CFG.load_from_file(config_file) + if config_file is not None: # remove in v2.14.0 + CFG.load_from_file(config_file) + elif config_dir is not None: + config_dir = Path( + os.path.expandvars(config_dir) + ).expanduser().absolute() + if not config_dir.is_dir(): + raise NotADirectoryError( + f"Invalid --config_dir given: {config_dir} is not an " + f"existing directory" + ) + CFG.update_from_dirs([config_dir]) CFG.update(options) self.config = CFG.start_session(f'data_{command}') @@ -199,8 +215,9 @@ def format(self, start, end, install): failed_datasets.append(dataset) if failed_datasets: - raise Exception( - f'Format failed for datasets {" ".join(failed_datasets)}') + raise RuntimeError( + f'Format failed for datasets {" ".join(failed_datasets)}' + ) @staticmethod def has_downloader(dataset): @@ -400,7 +417,7 @@ class DataCommand(): def __init__(self): with open(datasets_file, 'r', encoding='utf8') as data: self._info = yaml.safe_load(data) - self.formatter = Formatter(self._info) + self.formatter = _Formatter(self._info) def _has_downloader(self, dataset): return 'Yes' if self.formatter.has_downloader(dataset) else "No" @@ -441,28 +458,48 @@ def download(self, start=None, end=None, overwrite=False, + config_dir=None, **kwargs): """Download datasets. Parameters ---------- - datasets : list(str) + datasets: list(str) List of datasets to format - config_file : str, optional - Path to ESMValTool's config user file, by default None - start : str, optional + config_file: str, optional + Path to ESMValTool's config user file, by default None. + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. + start: str, optional Start of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. - end : str, optional + end: str, optional End of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. - overwrite : bool, optional + overwrite: bool, optional If true, download already present data again + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + start = self._parse_date(start) end = self._parse_date(end) - self.formatter.start('download', datasets, config_file, kwargs) + self.formatter.start( + 'download', datasets, config_file, config_dir, kwargs + ) self.formatter.download(start, end, overwrite) def format(self, @@ -471,6 +508,7 @@ def format(self, start=None, end=None, install=False, + config_dir=None, **kwargs): """Format datasets. @@ -480,6 +518,11 @@ def format(self, List of datasets to format config_file : str, optional Path to ESMValTool's config user file, by default None + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. start : str, optional Start of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. @@ -488,11 +531,25 @@ def format(self, are YYYY, YYYYMM and YYYYMMDD. install : bool, optional If true, move processed data to the folder, by default False + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + start = self._parse_date(start) end = self._parse_date(end) - self.formatter.start('formatting', datasets, config_file, kwargs) + self.formatter.start( + 'formatting', datasets, config_file, config_dir, kwargs + ) self.formatter.format(start, end, install) def prepare(self, @@ -502,6 +559,7 @@ def prepare(self, end=None, overwrite=False, install=False, + config_dir=None, **kwargs): """Download and format a set of datasets. @@ -511,6 +569,11 @@ def prepare(self, List of datasets to format config_file : str, optional Path to ESMValTool's config user file, by default None + + .. deprecated:: 2.12.0 + This option has been deprecated in ESMValTool version 2.12.0 + and is scheduled for removal in version 2.14.0. Please use the + option `config_dir` instead. start : str, optional Start of the interval to process, by default None. Valid formats are YYYY, YYYYMM and YYYYMMDD. @@ -521,11 +584,25 @@ def prepare(self, If true, move processed data to the folder, by default False overwrite : bool, optional If true, download already present data again + config_dir: str, optional + Path to additional ESMValTool configuration directory. See + :ref:`esmvalcore:config_yaml_files` for details. + """ + if config_file is not None: + msg = ( + "The option `config_file` has been deprecated in ESMValTool " + "version 2.12.0 and is scheduled for removal in version " + "2.14.0. Please use the option ``config_dir`` instead." + ) + warnings.warn(msg, ESMValToolDeprecationWarning) + start = self._parse_date(start) end = self._parse_date(end) - self.formatter.start('preparation', datasets, config_file, kwargs) + self.formatter.start( + 'preparation', datasets, config_file, config_dir, kwargs + ) if self.formatter.download(start, end, overwrite): self.formatter.format(start, end, install) else: diff --git a/esmvaltool/cmorizers/data/datasets.yml b/esmvaltool/cmorizers/data/datasets.yml index dabe314025..4c7c168009 100644 --- a/esmvaltool/cmorizers/data/datasets.yml +++ b/esmvaltool/cmorizers/data/datasets.yml @@ -17,16 +17,16 @@ datasets: analyses covering analysis of monthly rainfall. The dataset provides consistent temporal and spatial analyses across Australia for each observed data variable. This accounts for spatial and temporal gaps in observations. Where possible, the gridded analysis techniques provide useful estimates in data-sparse regions - such as central Australia. - + such as central Australia. + Time coverage: Site-based data are used to provide gridded climate data at the monthly timescale for rainfall (1900+). Reference: Evans, A., Jones, D.A., Smalley, R., and Lellyett, S. 2020. An enhanced gridded rainfall analysis scheme for Australia. Bureau of Meteorology Research Report. No. 41. National Computational Infrastructure (NCI) - Catalogue Record: http://dx.doi.org/10.25914/6009600786063. - Data from NCI (National Computing Infrastructure Australia https://nci.org.au/), + Data from NCI (National Computing Infrastructure Australia https://nci.org.au/), requires an NCI account and access to Gadi(Supercomputer in Canberra) and the project found in catalogue record. Access can be requested through NCI. NCI is an ESGF node (https://esgf.nci.org.au/projects/esgf-nci/) - + ANUClimate: tier: 3 source: "https://dx.doi.org/10.25914/60a10aa56dd1b" @@ -35,7 +35,7 @@ datasets: Data from NCI project requiring an NCI account and access to GADI ANUClimate 2.0 consists of gridded daily and monthly climate variables across the terrestrial landmass of Australia - from at least 1970 to the present. Rainfall grids are generated from 1900 to the present. The underpinning spatial + from at least 1970 to the present. Rainfall grids are generated from 1900 to the present. The underpinning spatial models have been developed at the Fenner School of Environment and Society of the Australian National University. APHRO-MA: @@ -264,6 +264,15 @@ datasets: named like the year (e.g. 2007), no subdirectories with days etc. + CMAP: + tier: 2 + source: https://psl.noaa.gov/data/gridded/data.cmap.html + last_access: 2024-09-09 + info: | + To facilitate the download, the links to the https server are provided. + https://downloads.psl.noaa.gov/Datasets/cmap/enh/ + precip.mon.mean.nc + CowtanWay: tier: 2 source: https://www-users.york.ac.uk/~kdc3/papers/coverage2013/series.html @@ -301,7 +310,7 @@ datasets: last_access: 2020-03-23 info: | Create a new empty directory ``$RAWOBSPATH/Tier2/CT2019`` (where - ``$RAWOBSPATH`` is given by your user configuration file) where the raw + ``$RAWOBSPATH`` is given by your configuration) where the raw data will be stored. The download of the data is automatically handled by this script. If data is already present in this directory, the download is skipped (to force a new download delete your old files). @@ -473,24 +482,16 @@ datasets: ESACCI-LANDCOVER: tier: 2 - source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/land_cover_maps/ - last_access: 2019-01-10 + source: ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/pft/v2.0.8/ + last_access: 2024-07-11 info: | - Download the 3 NetCDF files for 2000, 2005 and 2010. - Download the CCI-LC Tools from: - http://maps.elie.ucl.ac.be/CCI/viewer/download/lc-user-tools-3.14.zip - Unpack and run the CCI-LC Tools on each of the NetCDF files as follows: - bash lc-user-tools-3.14/bin/aggregate-map.sh \ - -PgridName=GEOGRAPHIC_LAT_LON -PnumMajorityClasses=1 \ - -PoutputAccuracy=false -PoutputPFTClasses=true \ - -PoutputLCCSClasses=false -PnumRows=360 - Put the resulting processed data in input_dir_path. - - Caveats - The CCI-LC Tools must be applied before running this script. - The CCI-LC Tools require Java Version 7 or higher. - The input data are available for a single year and are copied over to - generate a time series over their time range of validity. + Download and processing instructions: + Use the following CLI to download all the files: + esmvaltool data download ESACCI-LANDCOVER + The underlying downloader is located here: + /ESMValTool/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py + and it will download all the files currently available on CEDA (1992-2020) + under a single directory as follow: ${RAWOBS}/Tier2/ESACCI-LANDCOVER ESACCI-LST: tier: 2 @@ -562,7 +563,7 @@ datasets: source: https://wui.cmsaf.eu/safira/action/viewDoiDetails?acronym=COMBI_V001 last_access: 2024-02-21 info: | - CDR2 requires registration at EUMETSAT CM SAF, the information on how to + CDR2 requires registration at EUMETSAT CM SAF, the information on how to download the order will be emailed once the order is ready. All files need to be in one directory, not in yearly subdirectories. @@ -816,6 +817,15 @@ datasets: mon/atmos/rsut/rsut_Amon_reanalysis_JRA-25_197901-201312.nc mon/atmos/rsutcs/rsutcs_Amon_reanalysis_JRA-25_197901-201312.nc + JRA-55: + tier: 2 + source: https://rda.ucar.edu/datasets/ds628.1/ + last_access: 2023-03-22 + info: | + Create an account on the research data archive (RDA) in order to be able + to download the data (1.25 degree, pressure levels). See + https://rda.ucar.edu/login/register/ for more details. + Kadow2020: tier: 2 source: http://users.met.fu-berlin.de/~ChristopherKadow/ @@ -902,11 +912,11 @@ datasets: Select "Data Access" -> "Subset/Get Data" -> "Get Data" and follow the "Instructions for downloading". All *.he5 files need to be saved in the $RAWOBS/Tier3/MLS-AURA directory, where $RAWOBS refers to the RAWOBS - directory defined in the user configuration file. Apply this procedure to - both links provided above. The temperature fields are necessary for quality + directory defined in the configuration. Apply this procedure to both + links provided above. The temperature fields are necessary for quality control of the RHI data (see Data Quality Document for MLS-AURA for more information). - A registration is required + A registration is required. MOBO-DIC_MPIM: tier: 2 @@ -960,9 +970,14 @@ datasets: pressure/ rhum.mon.mean.nc air.mon.mean.nc + omega.mon.mean.nc https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ gaussian_grid tcdc.eatm.mon.mean.nc + prate.sfc.mon.mean.nc + uflx.sfc.mon.mean.nc + vflx.sfc.mon.mean.nc + skt.sfc.mon.mean.nc https://downloads.psl.noaa.gov/Datasets/ncep.reanalysis2/Monthlies/ surface pr_wtr.eatm.mon.mean.nc @@ -1053,7 +1068,9 @@ datasets: gaussian/monolevel/tcdc.eatm.mon.mean.nc gaussian/monolevel/ulwrf.ntat.mon.mean.nc gaussian/monolevel/uswrf.ntat.mon.mean.nc - + gaussian/monolevel/prate.mon.mean.nc + gaussian/monolevel/uflx.mon.mean.nc + gaussian/monolevel/vflx.mon.mean.nc NOAA-CIRES-20CR-V3: tier: 2 source: ftp.cdc.noaa.gov/Projects/20thC_ReanV3/Monthlies/ @@ -1075,7 +1092,7 @@ datasets: last_access: 2023-12-04 info: | Download the following files: - ersst.yyyymm.nc + ersst.yyyymm.nc for years 1854 to 2020 NOAA-ERSSTv5: @@ -1084,7 +1101,7 @@ datasets: last_access: 2023-12-04 info: | Download the following files: - ersst.v5.yyyymm.nc + ersst.v5.yyyymm.nc for years 1854 onwards NOAAGlobalTemp: @@ -1111,13 +1128,13 @@ datasets: Download daily data from: https://nsidc.org/data/NSIDC-0116 Login required for download, and also requires citation only to use - + NSIDC-G02202-sh: tier: 3 source: https://polarwatch.noaa.gov/erddap/griddap/nsidcG02202v4shmday last_access: 2023-05-13 info: | - Download monthly data. + Download monthly data. Login required for download, and also requires citation only to use OceanSODA-ETHZ: diff --git a/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py b/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py index 72cf8d98af..374c750ef6 100644 --- a/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py +++ b/esmvaltool/cmorizers/data/download_scripts/download_era_interim.py @@ -12,8 +12,13 @@ 4. Copy/paste the text in https://api.ecmwf.int/v1/key/ into a blank text file and save it as $HOME/.ecmwfapirc -5. Use ESMValCore/esmvalcore/config-user.yml as an template -and set the rootpath of the output directory in RAWOBS +5. Copy the default configuration file with + +```bash +esmvaltool config get_config_user --path=config-user.yml +``` + +and set the ``rootpath`` for the RAWOBS project. 6. Check the description of the variables at https://apps.ecmwf.int/codes/grib/param-db diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py b/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py new file mode 100644 index 0000000000..5fd58b5ac1 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/cmap.py @@ -0,0 +1,38 @@ +"""Script to download CMAP (CPC Merged Analysis of Precipitation).""" + +import logging + +from esmvaltool.cmorizers.data.downloaders.ftp import FTPDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = FTPDownloader( + config=config, + server="ftp2.psl.noaa.gov", + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.connect() + + downloader.download_file("/Datasets/cmap/enh/precip.mon.mean.nc") diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py new file mode 100644 index 0000000000..efffa2aaaa --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/esacci_landcover.py @@ -0,0 +1,52 @@ +"""Script to download ESACCI-LANDCOVER pft data from the CEDA.""" + +from datetime import datetime + +from esmvaltool.cmorizers.data.downloaders.ftp import CCIDownloader + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + # Default start and end dates if not provided + if not start_date: + start_date = datetime(1992, 1, 1) + if not end_date: + end_date = datetime(2020, 12, 31) + + # Initialize the downloader + downloader = CCIDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + downloader.ftp_name = 'land_cover' + downloader.connect() + + # Set current working directory to the main directory with the files + downloader.set_cwd('/pft/v2.0.8/') + + # Create a regex pattern to match any .nc files + year_range = '|'.join(str(year) for year in range(start_date.year, + end_date.year + 1)) + pattern = rf".*-(?:{year_range}).*\.nc$" + + # Download all .nc files in the directory + downloader.download_folder('.', filter_files=pattern) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py b/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py new file mode 100644 index 0000000000..7a9e374136 --- /dev/null +++ b/esmvaltool/cmorizers/data/downloaders/datasets/jra_55.py @@ -0,0 +1,113 @@ +"""Script to download JRA-55 from RDA.""" +import logging +import os +from datetime import datetime + +from dateutil import relativedelta + +from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader + +logger = logging.getLogger(__name__) + + +def download_dataset(config, dataset, dataset_info, start_date, end_date, + overwrite): + """Download dataset. + + Parameters + ---------- + config : dict + ESMValTool's user configuration + dataset : str + Name of the dataset + dataset_info : dict + Dataset information from the datasets.yml file + start_date : datetime + Start of the interval to download + end_date : datetime + End of the interval to download + overwrite : bool + Overwrite already downloaded files + """ + downloader = WGetDownloader( + config=config, + dataset=dataset, + dataset_info=dataset_info, + overwrite=overwrite, + ) + + os.makedirs(downloader.local_folder, exist_ok=True) + + user = os.environ.get("rda-user") + if user is None: + user = str(input("RDA user name? ")) + if user == "": + errmsg = ("A RDA account is required to download JRA-55 data." + " Please visit https://rda.ucar.edu/login/register/" + " to create an account at the Research Data Archive" + " (RDA) if needed.") + logger.error(errmsg) + raise ValueError + + passwd = os.environ.get("rda-passwd") + if passwd is None: + passwd = str(input("RDA password? ")) + + if start_date is None: + start_date = datetime(1958, 1, 1) + if end_date is None: + end_date = datetime(2022, 12, 31) + loop_date = start_date + + options = ["-O", "Authentication.log", "--save-cookies=auth.rda_ucar_edu", + f"--post-data=\"email={user}&passwd={passwd}&action=login\""] + + # login to Research Data Archive (RDA) + + downloader.login("https://rda.ucar.edu/cgi-bin/login", options) + + # download files + + url = "https://data.rda.ucar.edu/ds628.1" + download_options = ["--load-cookies=auth.rda_ucar_edu"] + + # define variables to download + + var = [["011_tmp", "anl_p125"], + ["011_tmp", "anl_surf125"], + ["039_vvel", "anl_p125"], + ["071_tcdc", "fcst_surf125"], + ["054_pwat", "fcst_column125"], + ["058_cice", "fcst_column125"], + ["160_csusf", "fcst_phy2m125"], + ["162_csulf", "fcst_phy2m125"], + ["211_uswrf", "fcst_phy2m125"], + ["212_ulwrf", "fcst_phy2m125"], + ["227_cw", "fcst_column125"], + ["228_clwc", "fcst_p125"], + ["229_ciwc", "fcst_p125"]] + + # download data + + while loop_date <= end_date: + year = loop_date.year + + for item in var: + varname = item[0] + channel = item[1] + fname = f"{channel}.{varname}.{year}01_{year}12" + # download file + downloader.download_file(url + f"/{channel}/{year}/" + + fname, download_options) + # add file extension ".grb" + os.rename(downloader.local_folder + "/" + fname, + downloader.local_folder + "/" + fname + ".grb") + + loop_date += relativedelta.relativedelta(years=1) + + # clean up temporary files + + if os.path.exists("Authentication.log"): + os.remove("Authentication.log") + if os.path.exists("auth.rda_ucar_edu"): + os.remove("auth.rda_ucar_edu") diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py index 704493554f..2d691e710d 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/ncep_doe_r2.py @@ -48,3 +48,11 @@ def download_dataset(config, dataset, dataset_info, start_date, end_date, wget_options=[]) downloader.download_file(url + "surface/pr_wtr.eatm.mon.mean.nc", wget_options=[]) + downloader.download_file(url + "gaussian_grid/prate.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/uflx.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/vflx.sfc.mon.mean.nc", + wget_options=[]) + downloader.download_file(url + "gaussian_grid/skt.sfc.mon.mean.nc", + wget_options=[]) diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py index fb2d733f06..bbbd708293 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_cires_20cr_v2.py @@ -34,7 +34,7 @@ def download_dataset(config, dataset, dataset_info, start_date, end_date, ) downloader.connect() - downloader.set_cwd("Projects/20thC_ReanV2/Monthlies/") + downloader.set_cwd("/Projects/20thC_ReanV2/Monthlies/") downloader.download_file("monolevel/cldwtr.eatm.mon.mean.nc", sub_folder='surface') downloader.download_file("monolevel/pr_wtr.eatm.mon.mean.nc", @@ -47,3 +47,9 @@ def download_dataset(config, dataset, dataset_info, start_date, end_date, sub_folder='surface_gauss') downloader.download_file("gaussian/monolevel/uswrf.ntat.mon.mean.nc", sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/prate.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/uflx.mon.mean.nc", + sub_folder='surface_gauss') + downloader.download_file("gaussian/monolevel/vflx.mon.mean.nc", + sub_folder='surface_gauss') diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py index 0ac6a3e012..5a54080be4 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv3b.py @@ -1,6 +1,7 @@ """Script to download NOAA-ERSST-v3b.""" import logging from datetime import datetime + from dateutil import relativedelta from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py index f995f9d2c7..7dbeccfe12 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/noaa_ersstv5.py @@ -1,6 +1,7 @@ """Script to download NOAA-ERSST-V5.""" import logging from datetime import datetime + from dateutil import relativedelta from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader diff --git a/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py index 798decda96..8c3c02c410 100644 --- a/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py +++ b/esmvaltool/cmorizers/data/downloaders/datasets/nsidc_g02202_sh.py @@ -1,6 +1,7 @@ """Script to download NSIDC-G02202-sh.""" import logging from datetime import datetime + from dateutil import relativedelta from esmvaltool.cmorizers.data.downloaders.wget import WGetDownloader diff --git a/esmvaltool/cmorizers/data/downloaders/wget.py b/esmvaltool/cmorizers/data/downloaders/wget.py index 8544e1d727..2afcca1d5a 100644 --- a/esmvaltool/cmorizers/data/downloaders/wget.py +++ b/esmvaltool/cmorizers/data/downloaders/wget.py @@ -54,6 +54,20 @@ def download_file(self, server_path, wget_options): logger.debug(command) subprocess.check_output(command) + def login(self, server_path, wget_options): + """Login. + + Parameters + ---------- + server_path: str + Path to remote file + wget_options: list(str) + Extra options for wget + """ + command = ['wget'] + wget_options + [server_path] + logger.debug(command) + subprocess.check_output(command) + @property def overwrite_options(self): """Get overwrite options as configured in downloader.""" diff --git a/esmvaltool/cmorizers/data/formatters/datasets/agcd.py b/esmvaltool/cmorizers/data/formatters/datasets/agcd.py index a8b138f7b9..f0d6b290ef 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/agcd.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/agcd.py @@ -80,7 +80,7 @@ def _extract_variable(cmor_info, attrs, filepath, out_dir): utils.fix_var_metadata(cube, cmor_info) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) bounds = get_time_bounds(cube.coords('time')[0], 'mon') cube.coords('time')[0].bounds = bounds utils.set_global_atts(cube, attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/anuclimate.py b/esmvaltool/cmorizers/data/formatters/datasets/anuclimate.py index 0077bd17a4..f82ad295ca 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/anuclimate.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/anuclimate.py @@ -87,7 +87,7 @@ def _extract_variable(cmor_info, attrs, filepaths, out_dir): for cbls in [cbls_1, cbls_2]: iris.util.equalise_attributes(cbls) cubesave = cbls.concatenate_cube() - utils.fix_coords(cubesave) + cubesave = utils.fix_coords(cubesave) logger.info("Saving file") utils.save_variable(cubesave, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/aphro_ma.py b/esmvaltool/cmorizers/data/formatters/datasets/aphro_ma.py index 002c83662d..1e1f9dbc4b 100755 --- a/esmvaltool/cmorizers/data/formatters/datasets/aphro_ma.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/aphro_ma.py @@ -94,7 +94,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir, version): # fix coordinates if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Fix metadata attrs = cfg['attributes'].copy() @@ -124,7 +124,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir, version): attrs['mip'] = 'Amon' # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Save variable utils.save_variable(cube, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py b/esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py index 81e4909584..c2be3dce7e 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/berkeleyearth.py @@ -172,7 +172,7 @@ def _extr_var_n_calc_abs_tas(short_name, var, cfg, filepath, out_dir): for s_name, cube in zip(short_names, [cube_abs, cube_anom]): cmor_info = cfg['cmor_table'].get_variable(var['mip'], s_name) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) @@ -209,7 +209,7 @@ def _extr_var_n_calc_abs_tas(short_name, var, cfg, filepath, out_dir): cube_sftlf = cubes.extract(NameConstraint(var_name=raw_var_sftlf))[0] # fix coordinates - utils.fix_coords(cube_sftlf) + cube_sftlf = utils.fix_coords(cube_sftlf) # cmorize sftlf units cmor_info_sftlf = cfg['cmor_table'].get_variable(var['rawsftlf_mip'], diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ceres_ebaf.py b/esmvaltool/cmorizers/data/formatters/datasets/ceres_ebaf.py index c63f72170a..e02332130d 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/ceres_ebaf.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/ceres_ebaf.py @@ -50,7 +50,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir): utils.convert_timeunits(cube, 1950) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Fix metadata attrs = cfg['attributes'] diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cmap.py b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py new file mode 100644 index 0000000000..fecd2b128e --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/cmap.py @@ -0,0 +1,69 @@ +"""ESMValTool CMORizer for CMAP (CPC Merged Analysis of Precipitation) data. + +Tier + Tier 2: other freely-available dataset. + +Source + https://psl.noaa.gov/data/gridded/data.cmap.html + +Last access + 20240909 + +Download and processing instructions + To facilitate the download, the links to the ftp server are provided. + + https://downloads.psl.noaa.gov/Datasets/cmap/enh/ + precip.mon.mean.nc + +Caveats + +""" + +import logging +import re +from copy import deepcopy +from pathlib import Path + +import iris + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _extract_variable(short_name, var, cfg, raw_filepath, out_dir): + cmor_info = cfg["cmor_table"].get_variable(var["mip"], short_name) + attributes = deepcopy(cfg["attributes"]) + attributes["mip"] = var["mip"] + + cubes = iris.load(raw_filepath) + for cube in cubes: + assert cube.units == "mm/day", f"unknown units:{cube.units}" + # convert data from mm/day to kg m-2 s-1 + # mm/day ~ density_water * mm/day + # = 1000 kg m-3 * 1/(1000*86400) m s-1 = 1/86400 kg m-2 s-1 + cube = cube / 86400 + cube.units = "kg m-2 s-1" + + utils.fix_var_metadata(cube, cmor_info) + cube = utils.fix_coords(cube) + utils.set_global_atts(cube, attributes) + + logger.info("Saving file") + utils.save_variable(cube, short_name, out_dir, attributes, + unlimited_dimensions=["time"]) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + for short_name, var in cfg["variables"].items(): + logger.info("CMORizing variable '%s'", short_name) + short_name = var["short_name"] + raw_filenames = Path(in_dir).rglob("*.nc") + filenames = [] + for raw_filename in raw_filenames: + if re.search(var["file"], str(raw_filename)) is not None: + filenames.append(raw_filename) + + for filename in sorted(filenames): + _extract_variable(short_name, var, cfg, filename, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cowtanway.py b/esmvaltool/cmorizers/data/formatters/datasets/cowtanway.py index 76c9d525c8..dc2073f825 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/cowtanway.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/cowtanway.py @@ -43,7 +43,7 @@ def _extract_variable(short_name, var, vkey, version, cfg, filepath, out_dir): utils.convert_timeunits(cube, 1950) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/cru.py b/esmvaltool/cmorizers/data/formatters/datasets/cru.py index 03d1ac77f4..28d1f9fb7e 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/cru.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/cru.py @@ -72,7 +72,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir): Unit("days since 1950-1-1 00:00:00", calendar="gregorian")) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) if "height2m" in cmor_info.dimensions: utils.add_height2m(cube) if version not in ["TS4.02"]: diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py b/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py index 395d78e25d..64f64f4e82 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/ct2019.py @@ -11,7 +11,7 @@ Download and processing instructions Create a new empty directory ``$RAWOBSPATH/Tier2/CT2019`` (where - ``$RAWOBSPATH`` is given by your user configuration file) where the raw + ``$RAWOBSPATH`` is given in the configuration) where the raw data will be stored. The download of the data is automatically handled by this script. If data is already present in this directory, the download is skipped (to force a new download delete your old files). @@ -39,7 +39,7 @@ def _add_aux_coords(cube, input_files, coords_to_add): logger.info("Adding auxiliary coordinate '%s' to '%s'", coord_name, cube.var_name) coord_cube = _load_cube(input_files, coord_name) - utils.fix_coords(coord_cube) + coord_cube = utils.fix_coords(coord_cube) dim_coords = [c.name() for c in coord_cube.coords(dim_coords=True)] if 'boundary' in dim_coords: (points, bounds) = _interpolate_center(coord_cube) @@ -166,7 +166,7 @@ def _extract_variable(short_name, var, cfg, input_files, out_dir): utils.convert_timeunits(cube, 1950) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Fix metadata attrs = cfg['attributes'] diff --git a/esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py b/esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py index 8e070a3ae0..a793f8cbb1 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/duveiller2018.py @@ -111,13 +111,9 @@ def extract_variable(var_info, raw_info, out_dir, attrs): # Fix metadata fix_var_metadata(cube, var_info) # Fix coords - fix_coords(cube) + cube = fix_coords(cube) # Now set the time coordinate properly fix_time_coord_duveiller2018(cube) - # Latitude has to be increasing so flip it - # (this is not fixed in fix_coords) - logger.info("Flipping dimensional coordinate latitude") - cube = cube[:, ::-1, :] # Global attributes set_global_atts(cube, attrs) save_variable(cube, var, out_dir, attrs, local_keys=['positive']) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/eppley_vgpm_modis.py b/esmvaltool/cmorizers/data/formatters/datasets/eppley_vgpm_modis.py index 6a6d15d267..6fae2d2d1e 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/eppley_vgpm_modis.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/eppley_vgpm_modis.py @@ -54,7 +54,7 @@ def extract_variable(var_info, raw_info, out_dir, attrs): for cube in cubes: if cube.var_name == rawvar: fix_var_metadata(cube, var_info) - fix_coords(cube) + cube = fix_coords(cube) _fix_data(cube, var) set_global_atts(cube, attrs) save_variable( diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.ncl b/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.ncl deleted file mode 100644 index 8472cef6fb..0000000000 --- a/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.ncl +++ /dev/null @@ -1,217 +0,0 @@ -; ############################################################################# -; ESMValTool CMORizer for ESACCI-LANDCOVER data -; ############################################################################# -; -; Tier -; Tier 2: other freely-available dataset. -; -; Source -; ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/land_cover_maps/ -; -; Last access -; 20190110 -; -; Download and processing instructions -; Download the 3 NetCDF files for 2000, 2005 and 2010. -; Download the CCI-LC Tools from: -; http://maps.elie.ucl.ac.be/CCI/viewer/download/lc-user-tools-3.14.zip -; Unpack and run the CCI-LC Tools on each of the NetCDF files as follows: -; bash lc-user-tools-3.14/bin/aggregate-map.sh \ -; -PgridName=GEOGRAPHIC_LAT_LON -PnumMajorityClasses=1 \ -; -PoutputAccuracy=false -PoutputPFTClasses=true \ -; -PoutputLCCSClasses=false -PnumRows=360 -; Put the resulting processed data in input_dir_path. -; -; Caveat -; The CCI-LC Tools must be applied before running this script. -; The CCI-LC Tools require Java Version 7 or higher. -; The input data are available for a single year and are copied over to -; generate a time series over their time range of validity. -; -; Modification history -; 20190110-righi_mattia: rewritten in NCL for v2. -; 20160714-benjamin_mueller: written. -; -; ############################################################################# -loadscript(getenv("esmvaltool_root") + \ - "/data/formatters/interface.ncl") - -begin - - ; Script name (for logger) - DIAG_SCRIPT = "esacci_landcover.ncl" - - ; Source name - OBSNAME = "ESACCI-LANDCOVER" - - ; Tier - TIER = 2 - - ; Years - YEARS = (/2000, 2005, 2010/) - - ; Variable names - VAR = \ - (/"baresoilFrac", "cropFrac", "grassFrac", "shrubFrac", "treeFrac"/) - - ; Corresponding aggregation classes in the raw data - CLASSES = [/"Bare_Soil", \ - "Managed_Grass", \ - "Natural_Grass", \ - (/"Shrub_Broadleaf_Deciduous", \ - "Shrub_Broadleaf_Evergreen", \ - "Shrub_Needleleaf_Evergreen"/), \ - (/"Tree_Broadleaf_Deciduous", \ - "Tree_Broadleaf_Evergreen", \ - "Tree_Needleleaf_Deciduous", \ - "Tree_Needleleaf_Evergreen"/)/] - - ; MIPs - MIP = (/"Lmon", "Lmon", "Lmon", "Lmon", "Lmon"/) - - ; Frequency - FREQ = (/"mon", "mon", "mon", "mon", "mon"/) - - ; CMOR table - CMOR_TABLE = getenv("cmor_tables") + "/cmip5/Tables/CMIP5_Lmon" - - ; Type - TYPE = "sat" - - ; Version - VERSION = "L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg" - - ; Global attributes - SOURCE = "ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/" - REF = "Defourny, P.: ESA Land Cover Climate Change Initiative " + \ - "(Land_Cover_cci): Global Land Cover Maps, Version 1.6.1. " + \ - "Centre for Environmental Data Analysis, " + \ - "http://catalogue.ceda.ac.uk/uuid/4761751d7c844e228ec2f5fe11b2e3b0, 2016." - COMMENT = "" - -end - -begin - - do yy = 0, dimsizes(YEARS) - 1 - - fname = \ - input_dir_path + "ESACCI-LC-" + VERSION + "-" + YEARS(yy) + "-v1.6.1.nc" - - f = addfile(fname, "r") - - ; Create time coordinate - YEAR1 = YEARS(yy) - 2 - YEAR2 = YEARS(yy) + 2 - time = create_timec(YEAR1, YEAR2) - - do vv = 0, dimsizes(VAR) - 1 - - log_info("Processing " + VAR(vv) + " (" + MIP(vv) + ")") - - ; Set classes to be added up - class = CLASSES[vv] - - ; Save mask before adding up classes - do cc = 0, dimsizes(class) - 1 - qq = f->$class(cc)$ - replace_ieeenan(qq, FILL, 0) - qq@_FillValue = FILL - tmp = ismissing(qq) - delete(qq) - if (cc.eq.0) then - lmask = tmp - else - lmask := lmask .and. tmp - end if - delete(tmp) - end do - - ; Add up classes - do cc = 0, dimsizes(class) - 1 - log_info(" adding class " + class(cc)) - tmp = f->$class(cc)$ - replace_ieeenan(tmp, FILL, 0) - tmp@_FillValue = FILL - tmp = where(ismissing(tmp), 0., tmp) - if (cc.eq.0) then - xx = tmp - else - xx = xx + tmp - end if - delete(tmp) - end do - delete(class) - - ; Reapply mask of missing values - xx = where(lmask, xx@_FillValue, xx) - - ; Define output array - output = \ - new((/dimsizes(time), dimsizes(xx&lat), dimsizes(xx&lon)/), float) - output!0 = "time" - output&time = time - output!1 = "lat" - output&lat = xx&lat - output!2 = "lon" - output&lon = xx&lon - output = conform(output, xx, (/1, 2/)) - delete(xx) - - ; Set standard fill value - output@_FillValue = FILL - - ; Convert units - output = output * 100 - output@units = "%" - - ; Format coordinates - output!0 = "time" - output!1 = "lat" - output!2 = "lon" - format_coords(output, YEAR1 + "0101", YEAR2 + "1231", FREQ(vv)) - - ; Set variable attributes - tmp = format_variable(output, VAR(vv), CMOR_TABLE) - delete(output) - output = tmp - delete(tmp) - - ; Calculate coordinate bounds - bounds = guess_coord_bounds(output, FREQ(vv)) - - ; Set global attributes - gAtt = set_global_atts(OBSNAME, TIER, SOURCE, REF, COMMENT) - - ; Output file - DATESTR = YEAR1 + "01-" + YEAR2 + "12" - fout = output_dir_path + \ - str_join((/"OBS", OBSNAME, TYPE, VERSION, \ - MIP(vv), VAR(vv), DATESTR/), "_") + ".nc" - - ; Special case for baresoilFrac: add auxiliary coordinate - if (VAR(vv).eq."baresoilFrac") then - output@coordinates = "type" - end if - - ; Write variable - write_nc(fout, VAR(vv), output, bounds, gAtt) - delete(gAtt) - delete(output) - delete(bounds) - - ; Special case for baresoilFrac: add auxiliary coordinate - if (VAR(vv).eq."baresoilFrac") then - type = tochar("bare_ground") - type!0 = "strlen" - type@long_name = "surface type" - type@standard_name = "area_type" - w = addfile(fout, "w") - w->type = type - delete(w) - end if - - end do - end do - -end diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.py new file mode 100644 index 0000000000..d0e4d9d722 --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_landcover.py @@ -0,0 +1,190 @@ +"""ESMValTool CMORizer for ESACCI-LANDCOVER pft data. + +Tier + Tier 2: other freely-available dataset. + +Source + ftp://anon-ftp.ceda.ac.uk/neodc/esacci/land_cover/data/pft/ + +Last access + 20240626 + +Download and processing instructions + Download the data from: + pft/v2.0.8/ + Put all files under a single directory (no subdirectories with years). + in ${RAWOBS}/Tier2/ESACCI-LANDCOVER + +""" + +import os +import glob +import logging +from datetime import datetime +import iris +import numpy as np + +from esmvaltool.cmorizers.data.utilities import ( + fix_coords, + fix_var_metadata, + set_global_atts, + add_typebare, + save_variable, +) + +# Configure logging +logging.basicConfig(level=logging.INFO) +logger = logging.getLogger(__name__) + +# Enable the new split-attributes handling mode +iris.FUTURE.save_split_attrs = True + + +def average_block(data, block_size): + """Average the data within each block of size block_size. + + Parameters + ---------- + data : numpy.ndarray + The input data array to be block averaged. + block_size : int + The size of the block used for averaging. The data is averaged + within non-overlapping blocks of this size along the spatial dimensions + (latitude and longitude). + + Returns + ------- + numpy.ndarray + The block-averaged data array. + """ + shape = data.shape + reshaped_data = data.reshape(shape[0], shape[1] // block_size, + block_size, shape[2] // block_size, + block_size) + averaged_data = reshaped_data.mean(axis=(2, 4)) + return averaged_data + + +def regrid_iris(cube): + """Regrid the cubes using block averaging. + + Parameters + ---------- + cube : iris.cube.Cube + The input data cube to be regridded. + + Returns + ------- + iris.cube.Cube + The regridded data cube. + + Notes + ----- + The block size is set to 100, which means the data will be averaged within + non-overlapping blocks of 100x100 grid cells along the spatial dimensions. + """ + logger.info("Regridding using block averaging") + + block_size = 100 # Number of grid cells to average in each block + + combined_data = average_block(cube.data, block_size) + + # Define target latitude and longitude ranges + target_lats = np.linspace(90 - 0.5 * (180 / combined_data.shape[1]), + -90 + 0.5 * (180 / combined_data.shape[1]), + combined_data.shape[1]) + target_lons = np.linspace(-180 + 0.5 * (360 / combined_data.shape[2]), + 180 - 0.5 * (360 / combined_data.shape[2]), + combined_data.shape[2]) + + combined_cube = iris.cube.Cube(combined_data, + dim_coords_and_dims=[ + (cube.coord('time'), 0), + (iris.coords.DimCoord( + target_lats, + standard_name='latitude', + units='degrees'), 1), + (iris.coords.DimCoord( + target_lons, + standard_name='longitude', + units='degrees'), 2)]) + + combined_cube.coord('latitude').guess_bounds() + combined_cube.coord('longitude').guess_bounds() + + return combined_cube + + +def regrid_fix(cube, glob_attrs, var_name, var_info): + """Regrid cube and fixes. + + Regrids the cube, fixes metadata, coordinates and glob_attrs. + + Parameters + ---------- + cube: iris.cube.Cube + Data cube to be regridded. + + vals: dict + Variable long_name. + + glob_attrs: dict + Dictionary holding cube metadata attributes. + + var_name: str + Variable name. + + var_info: dict + Dictionary holding cube metadata attributes. + + Returns + ------- + cube: iris.cube.Cube + data cube regridded and with fixed coordinates. + """ + logger.info("Regridding cube for %s", var_name) + regridded_cube = regrid_iris(cube) + fix_var_metadata(regridded_cube, var_info) + regridded_cube = fix_coords(regridded_cube) + set_global_atts(regridded_cube, glob_attrs) + + return regridded_cube + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorize data.""" + glob_attrs = cfg['attributes'] + if not start_date: + start_date = datetime(1992, 1, 1) + if not end_date: + end_date = datetime(2020, 12, 31) + + for year in range(start_date.year, end_date.year + 1): + inpfile_pattern = os.path.join(in_dir, cfg['filename']) + year_inpfile_pattern = inpfile_pattern.format(year=year) + inpfiles = sorted(glob.glob(year_inpfile_pattern)) + for inpfile in inpfiles: + cubes = iris.load(inpfile) + for var_name, vals in cfg['variables'].items(): + var_info = cfg['cmor_table'].get_variable(vals['mip'], + var_name) + glob_attrs['mip'] = vals['mip'] + glob_attrs['frequency'] = vals['frequency'] + if var_name == 'shrubFrac': + cube = cubes.extract_cube('SHRUBS-BD') + \ + cubes.extract_cube('SHRUBS-BE') + \ + cubes.extract_cube('SHRUBS-ND') + \ + cubes.extract_cube('SHRUBS-NE') + elif var_name == 'treeFrac': + cube = cubes.extract_cube('TREES-BD') + \ + cubes.extract_cube('TREES-BE') + \ + cubes.extract_cube('TREES-ND') + \ + cubes.extract_cube('TREES-NE') + else: + cube = cubes.extract_cube(vals['long_name']) + regridded_cube = regrid_fix(cube, glob_attrs, + var_name, var_info) + if var_name == 'baresoilFrac': + add_typebare(regridded_cube) + save_variable(regridded_cube, var_name, out_dir, glob_attrs, + unlimited_dimensions=['time']) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_oc.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_oc.py index 9ac8ac1a76..c267222c5c 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/esacci_oc.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_oc.py @@ -114,7 +114,7 @@ def extract_variable(var_info, raw_info, out_dir, attrs): if cube.var_name == rawvar: fix_var_metadata(cube, var_info) _fix_time(cube, var_info.frequency) - fix_coords(cube, overwrite_time_bounds=False) + cube = fix_coords(cube, overwrite_time_bounds=False) cube = _add_depth_coord(cube) _fix_data(cube, var) set_global_atts(cube, attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py index 8e55296f9e..c009b96ffb 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_sst.py @@ -62,7 +62,7 @@ def extract_variable(var_info, raw_info, attrs, year): # Fix cube fix_var_metadata(cube, var_info) convert_timeunits(cube, year) - fix_coords(cube) + cube = fix_coords(cube) set_global_atts(cube, attrs) return cube diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py b/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py index d4901007cc..d662f0c752 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/esacci_watervapour.py @@ -53,7 +53,7 @@ def extract_variable(var_info, raw_info, attrs, year): # Fix cube fix_var_metadata(cube, var_info) convert_timeunits(cube, year) - fix_coords(cube, overwrite_time_bounds=False) + cube = fix_coords(cube, overwrite_time_bounds=False) set_global_atts(cube, attrs) # Remove dysfunctional ancillary data without sandard name for ancillary_variable in cube.ancillary_variables(): diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esdc.py b/esmvaltool/cmorizers/data/formatters/datasets/esdc.py index bf473f53be..529f497396 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/esdc.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/esdc.py @@ -47,7 +47,7 @@ def _fix_cube(var, cube, cfg): logger.info("Converting time units to gregorian") cube.coord('time').units = cf_units.Unit(old_unit.origin, calendar='gregorian') - utils.fix_coords(cube) + cube = utils.fix_coords(cube) cube.convert_units(cmor_info.units) if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/esrl.py b/esmvaltool/cmorizers/data/formatters/datasets/esrl.py index a0343e3417..ab9e0930e9 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/esrl.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/esrl.py @@ -187,7 +187,7 @@ def _extract_variable(short_name, var, cfg, out_dir, station_dic): # Fix metadata utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) cube.convert_units(cmor_info.units) attrs = cfg['attributes'] diff --git a/esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py b/esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py index 93a41fffd4..3e25d8a894 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/fluxcom.py @@ -29,7 +29,6 @@ import re import iris -import numpy as np from esmvaltool.cmorizers.data import utilities as utils @@ -66,11 +65,8 @@ def _extract_variable(cmor_info, attrs, filepath, out_dir): cube.coord('lon').standard_name = 'longitude' utils.fix_var_metadata(cube, cmor_info) utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) utils.set_global_atts(cube, attrs) - utils.flip_dim_coord(cube, 'latitude') - coord = cube.coord('latitude') - coord.bounds = np.flip(coord.bounds, axis=1) logger.info("Saving file") utils.save_variable(cube, var, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ghcn_cams.py b/esmvaltool/cmorizers/data/formatters/datasets/ghcn_cams.py index 5b343eed18..2f3eff6bdd 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/ghcn_cams.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/ghcn_cams.py @@ -35,7 +35,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir): utils.convert_timeunits(cube, 1950) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/gistemp.py b/esmvaltool/cmorizers/data/formatters/datasets/gistemp.py index 81beb56c91..01366a0c06 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/gistemp.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/gistemp.py @@ -33,7 +33,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir): utils.convert_timeunits(cube, 1950) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/glodap.py b/esmvaltool/cmorizers/data/formatters/datasets/glodap.py index c96f0a1771..0323f8b800 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/glodap.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/glodap.py @@ -118,7 +118,7 @@ def extract_variable(in_files, out_dir, attrs, raw_info, cmor_table): bounds=[0., 12.]), 0) fix_var_metadata(cube, var_info) - fix_coords(cube) + cube = fix_coords(cube) _fix_data(cube, var) set_global_atts(cube, attrs) save_variable(cube, var, out_dir, attrs, unlimited_dimensions=['time']) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/hwsd.py b/esmvaltool/cmorizers/data/formatters/datasets/hwsd.py index 30e2a8975b..68c894f39b 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/hwsd.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/hwsd.py @@ -50,7 +50,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir): long_name='time') cube.add_dim_coord(time_dim, 0) utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Fix units if 'kg C' in cube.units.origin: diff --git a/esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py b/esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py index 6ac33cb8b5..bd41512294 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/jma_transcom.py @@ -66,7 +66,7 @@ def _extract_variable(cmor_info, attrs, in_dir, out_dir, ctl): # Fix metadata utils.fix_var_metadata(cube, cmor_info) utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) utils.set_global_atts(cube, attrs) utils.save_variable(cube, cmor_info.short_name, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/jra_55.py b/esmvaltool/cmorizers/data/formatters/datasets/jra_55.py new file mode 100644 index 0000000000..16125d4c2f --- /dev/null +++ b/esmvaltool/cmorizers/data/formatters/datasets/jra_55.py @@ -0,0 +1,176 @@ +""" +ESMValTool CMORizer for JRA-55 data. + +Tier + Tier 2: other freely-available dataset. + +Source + Research Data Archive (RDA): + https://rda.ucar.edu/datasets/ds628.1/ + +Last access + 20230322 + +Download and processing instructions + see download script cmorizers/data/downloaders/datasets/jra_55.py +""" + +import copy +import logging +import os +import xarray as xr + +from cf_units import Unit + +import iris + +from esmvaltool.cmorizers.data import utilities as utils + +logger = logging.getLogger(__name__) + + +def _load_jra55_grib(filenames, var): + """Load data from GRIB file and return list of cubes.""" + leveltype = var.get('typeOfLevel') + cubelist = [] + if leveltype is not None: + dataset = xr.open_mfdataset(filenames, engine="cfgrib", + filter_by_keys={'typeOfLevel': leveltype}) + else: + dataset = xr.open_mfdataset(filenames, engine="cfgrib") + varnames = list(dataset.data_vars) + for varname in varnames: + da_tmp = dataset[varname] + # conversion to Iris cubes requires a valid standard_name + da_tmp.attrs['standard_name'] = var['standard_name'] + cube = da_tmp.to_iris() + # remove auxiliary coordinate 'time' + cube.remove_coord('time') + # rename coordinate from 'forecast_reference_time' to 'time + timecoord = cube.dim_coords[0] + timecoord.rename("time") + # convert unit string to cf_unit object + # (calendar (calendar=coord.units.calendar) must be irgnored + # or conversion fails + timecoord.units = Unit(timecoord.units) + # add forecast period to time coordinate to get the actual time + # for which the data are valid + forecast = cube.coord('forecast_period') # forecast period in hours + timecoord.points = timecoord.points + forecast.points * 3600 + # remove unneeded scalar variables to prevent warnings + auxcoordnames = ['step', 'entireAtmosphere', 'number', 'isobaricLayer', + 'surface', 'nominalTop', 'heightAboveGround'] + for aux_coord in cube.coords(dim_coords=False): + if aux_coord.var_name in auxcoordnames: + cube.remove_coord(aux_coord) + cubelist.append(cube) + + return cubelist + + +def _extract_variable(short_name, var, in_files, cfg, out_dir): + """Extract variable.""" + # load data (returns a list of cubes) + cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) + var['standard_name'] = cmor_info.standard_name + cubes = _load_jra55_grib(in_files, var) + + # apply operators (if any) + if len(cubes) > 1: + if var.get('operator', '') == 'sum': + # Multiple variables case using sum operation + cube = None + for in_cube in cubes: + if cube is None: + cube = in_cube + else: + cube += in_cube + elif var.get('operator', '') == 'diff': + # two variables case using diff operation + if len(cubes) != 2: + errmsg = (f'operator diff selected for variable {short_name} ' + f'expects exactly two input variables and two input ' + f'files') + raise ValueError(errmsg) + cube = cubes[0] - cubes[1] + else: + oper = var.get('operator') + raise ValueError( + f'multiple input files found for variable {short_name} ' + f'with unknown operator {oper}') + else: + cube = cubes[0] + + # Fix metadata + attrs = copy.deepcopy(cfg['attributes']) + attrs['mip'] = var['mip'] + utils.fix_var_metadata(cube, cmor_info) + + if cube.var_name in ['hfls', 'hfss', 'rlus', 'rlut', 'rlutcs', 'rsus', + 'rsuscs', 'rsut', 'rsutcs']: + attrs['positive'] = 'up' + + if cube.var_name in ['rlds', 'rldscs', 'rsds', 'rsdscs', 'rsdt', 'rtmt', + 'tauu', 'tauv']: + attrs['positive'] = 'down' + + # fix longitudes and z-coordinate (if present) + for coord in cube.dim_coords: + coord_type = iris.util.guess_coord_axis(coord) + if coord_type == 'X': + # -> shift longitude coordinate by one grid box + # to match obs4mips/CREATE-IP grid + coord.points = coord.points + 360 / len(coord.points) + if coord_type == 'Z': + coord.standard_name = 'air_pressure' + coord.long_name = 'pressure' + coord.var_name = 'plev' + coord.attributes['positive'] = 'down' + if coord.units == "hPa": + coord.convert_units('Pa') + utils.flip_dim_coord(cube, coord.standard_name) + + utils.fix_dim_coordnames(cube) + utils.fix_coords(cube) + if 'height2m' in cmor_info.dimensions: + utils.add_height2m(cube) + utils.set_global_atts(cube, attrs) + + # Save variable + utils.save_variable(cube, + short_name, + out_dir, + attrs, + unlimited_dimensions=['time'], + local_keys=['positive']) + + +def cmorization(in_dir, out_dir, cfg, cfg_user, start_date, end_date): + """Cmorization func call.""" + # Run the cmorization + if start_date is None: + start_date = 1958 + else: + start_date = start_date.year + if end_date is None: + end_date = 2022 + else: + end_date = end_date.year + for (short_name, var) in cfg['variables'].items(): + short_name = var['short_name'] + filename = [] + for year in range(start_date, end_date + 1): + if 'file' in var: + filename.append(os.path.join(in_dir, + var['file'].format(year=year))) + elif 'files' in var: + for file in var['files']: + filename.append(os.path.join(in_dir, + file.format(year=year))) + else: + raise ValueError(f"No input file(s) specified for variable " + f"{short_name}.") + + logger.info("CMORizing variable '%s' from file '%s'", short_name, + filename) + _extract_variable(short_name, var, filename, cfg, out_dir) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/lai3g.py b/esmvaltool/cmorizers/data/formatters/datasets/lai3g.py index 218a22a0cf..1db260d13d 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/lai3g.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/lai3g.py @@ -92,9 +92,7 @@ def _extract_variable(cmor_info, attrs, in_dir, out_dir, cfg): final_cube = cubes.concatenate_cube() utils.fix_var_metadata(final_cube, cmor_info) utils.convert_timeunits(final_cube, 1950) - utils.fix_coords(final_cube) - if not cfg.get('regrid'): - utils.flip_dim_coord(final_cube, 'latitude') + final_cube = utils.fix_coords(final_cube) utils.set_global_atts(final_cube, attrs) utils.save_variable(final_cube, cmor_info.short_name, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py b/esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py index f1e516a89d..f8b0a3ad7c 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/landflux_eval.py @@ -38,7 +38,7 @@ def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): _fix_time_coord(cube) utils.fix_var_metadata(cube, cmor_info) utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) utils.set_global_atts(cube, attrs) utils.save_variable(cube, var, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py index e7984bb23a..306c4f8f27 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2016.py @@ -74,7 +74,7 @@ def extract_variable(var_info, raw_info, out_dir, attrs): for cube in cubes: if cube.var_name == rawvar: fix_var_metadata(cube, var_info) - fix_coords(cube) + cube = fix_coords(cube) _fix_data(cube, var) set_global_atts(cube, attrs) save_variable( diff --git a/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2020.py b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2020.py index d5739cb8f1..e8419b320b 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2020.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/landschuetzer2020.py @@ -103,7 +103,7 @@ def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): # Fix coordinates _fix_climatological_time(cube) - utils.fix_coords( + cube = utils.fix_coords( cube, overwrite_lat_bounds=False, overwrite_lon_bounds=False, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl index b57bca6a09..d9fbf761df 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl +++ b/esmvaltool/cmorizers/data/formatters/datasets/merra.ncl @@ -14,7 +14,7 @@ ; Download and processing instructions ; (requires EarthData login; see https://urs.earthdata.nasa.gov/) ; Use ESMValTool automatic download: -; esmvaltool data download --config_file MERRA +; esmvaltool data download MERRA ; ; Modification history ; 20230818-lauer_axel: added output of clwvi (iwp + lwp) @@ -209,7 +209,7 @@ begin delete(tmp) - ; calcuation of outgoing fluxes: out = in - net + ; calculation of outgoing fluxes: out = in - net if ((VAR(vv) .eq. "rsut") .or. (VAR(vv) .eq. "rsutcs")) then tmp = f->SWTDN if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then @@ -220,7 +220,8 @@ begin delete(tmp) end if - ; calcuation of total precipitation flux = large-scale+convective+anvil + ; calculation of total precipitation flux = + ; large-scale+convective+anvil if (VAR(vv) .eq. "pr") then tmp = f->PRECCON ; surface precipitation flux from convection if (isatt(tmp, "scale_factor") .or. isatt(tmp, "add_offset")) then diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py b/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py index 1cde246026..0a5031b243 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/mls_aura.py @@ -14,7 +14,7 @@ Select "Data Access" -> "Subset/Get Data" -> "Get Data" and follow the "Instructions for downloading". All *.he5 files need to be saved in the $RAWOBS/Tier3/MLS-AURA directory, where $RAWOBS refers to the RAWOBS - directory defined in the user configuration file. Apply this procedure to + directory defined in the configuration. Apply this procedure to both links provided above. The temperature fields are necessary for quality control of the RHI data (see Data Quality Document for MLS-AURA for more information). @@ -312,7 +312,7 @@ def _save_cube(cube, cmor_info, attrs, out_dir): cube.coord('air_pressure').convert_units('Pa') utils.fix_var_metadata(cube, cmor_info) utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) utils.set_global_atts(cube, attrs) utils.save_variable(cube, cmor_info.short_name, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py index 9ae096104f..7b10ef0b5e 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/mobo_dic_mpim.py @@ -171,7 +171,7 @@ def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): elif cube.coords('Julian Day'): # MOBO-DIC2004-2019 _fix_time(cube) cube.coord('depth').units = 'm' - utils.fix_coords(cube, overwrite_time_bounds=False) + cube = utils.fix_coords(cube, overwrite_time_bounds=False) # Fix global metadata utils.set_global_atts(cube, attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/mte.py b/esmvaltool/cmorizers/data/formatters/datasets/mte.py index 78ee04636b..e82baab967 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/mte.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/mte.py @@ -57,9 +57,8 @@ def _extract_variable(raw_var, cmor_info, attrs, filepath, out_dir): _fix_time_coord(cube) utils.fix_var_metadata(cube, cmor_info) utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) utils.set_global_atts(cube, attrs) - utils.flip_dim_coord(cube, 'latitude') utils.save_variable(cube, var, out_dir, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py b/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py index 5e2829af07..c0f33286d5 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/ncep_ncar_r1.py @@ -84,10 +84,7 @@ def _fix_units(cube, definition): def _fix_coordinates(cube, definition, cmor_info): - # fix flipped latitude - utils.flip_dim_coord(cube, 'latitude') - # fix other coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) @@ -146,6 +143,9 @@ def _extract_variable(short_name, var, cfg, raw_filepath, out_dir): cube = _fix_coordinates(cube, definition, cmor_info) + if var.get("make_negative"): + cube.data = -1 * cube.data + utils.save_variable( cube, short_name, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/ndp.py b/esmvaltool/cmorizers/data/formatters/datasets/ndp.py index 76d82cdf27..0e393a452b 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/ndp.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/ndp.py @@ -61,7 +61,7 @@ def _extract_variable(cmor_info, attrs, var_file, out_dir, cfg): cube.convert_units('kg m-2') utils.fix_var_metadata(cube, cmor_info) utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) utils.set_global_atts(cube, attrs) utils.save_variable(cube, cmor_info.short_name, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv5.py b/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv5.py index b9f6421e63..c01783724c 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv5.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/noaa_ersstv5.py @@ -69,7 +69,7 @@ def _extract_variable(raw_var, cmor_info, attrs, filepaths, out_dir): cube = iris.util.squeeze(cube) utils.fix_var_metadata(cube, cmor_info) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) utils.set_global_atts(cube, attrs) utils.save_variable(cube, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/oceansoda_ethz.py b/esmvaltool/cmorizers/data/formatters/datasets/oceansoda_ethz.py index a818af0424..2e8baf2c8f 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/oceansoda_ethz.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/oceansoda_ethz.py @@ -42,12 +42,14 @@ def _fix_coords(cube, cmor_info): time_coord.points = time_coord.units.date2num(new_dates) cube.coord('lat').standard_name = 'latitude' cube.coord('lon').standard_name = 'longitude' - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Scalar coordinates if cmor_info.short_name in ('fgco2', 'spco2'): utils.add_scalar_depth_coord(cube) + return cube + def _fix_data(cube, var): """Fix data.""" @@ -109,7 +111,7 @@ def _extract_variable(var_info, cmor_info, attrs, filepath, out_dir): _fix_var_metadata(var_info, cmor_info, attrs, cube) # Fix coordinates - _fix_coords(cube, cmor_info) + cube = _fix_coords(cube, cmor_info) # Fix global metadata utils.set_global_atts(cube, attrs) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/persiann_cdr.py b/esmvaltool/cmorizers/data/formatters/datasets/persiann_cdr.py index 323422b9a5..1b72aaddb5 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/persiann_cdr.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/persiann_cdr.py @@ -113,7 +113,7 @@ def _extract_variable(short_name, var, cfg, input_files, out_dir): cube.units = 'kg m-2 s-1' # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Fix metadata attrs = cfg['attributes'] diff --git a/esmvaltool/cmorizers/data/formatters/datasets/phc.py b/esmvaltool/cmorizers/data/formatters/datasets/phc.py index a554ebff7c..84a924d48d 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/phc.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/phc.py @@ -101,7 +101,7 @@ def extract_variable(var_info, raw_info, out_dir, attrs): cube = _fix_data(xr_time, var) fix_var_metadata(cube, var_info) - fix_coords(cube) + cube = fix_coords(cube) set_global_atts(cube, attrs) print(out_dir) if var != "areacello": diff --git a/esmvaltool/cmorizers/data/formatters/datasets/regen.py b/esmvaltool/cmorizers/data/formatters/datasets/regen.py index a26971f8a8..f38424ae20 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/regen.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/regen.py @@ -44,7 +44,7 @@ def _extract_variable(short_name, var, cfg, file_path, out_dir): utils.convert_timeunits(cube, 1950) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Fix metadata attrs = cfg['attributes'] @@ -72,7 +72,7 @@ def _extract_variable(short_name, var, cfg, file_path, out_dir): attrs['mip'] = 'Amon' # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) # Save variable utils.save_variable(cube, diff --git a/esmvaltool/cmorizers/data/formatters/datasets/scripps_co2_kum.py b/esmvaltool/cmorizers/data/formatters/datasets/scripps_co2_kum.py index bae7423e86..6a3ccf6ac0 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/scripps_co2_kum.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/scripps_co2_kum.py @@ -93,7 +93,7 @@ def _extract_variable(short_name, var, cfg, filepath, out_dir): # Fix metadata utils.convert_timeunits(cube, 1950) - utils.fix_coords(cube) + cube = utils.fix_coords(cube) cmor_info = cfg['cmor_table'].get_variable(var['mip'], short_name) cube.convert_units(cmor_info.units) attrs = cfg['attributes'] diff --git a/esmvaltool/cmorizers/data/formatters/datasets/wfde5.py b/esmvaltool/cmorizers/data/formatters/datasets/wfde5.py index b61a043f04..0cc467e161 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/wfde5.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/wfde5.py @@ -72,7 +72,7 @@ def _extract_variable(var, cfg, filenames, out_dir): _fix_time_coord(cube, var) # Fix coordinates - utils.fix_coords(cube) + cube = utils.fix_coords(cube) if 'height2m' in cmor_info.dimensions: utils.add_height2m(cube) diff --git a/esmvaltool/cmorizers/data/formatters/datasets/woa.py b/esmvaltool/cmorizers/data/formatters/datasets/woa.py index cac388a0fd..35db6d810d 100644 --- a/esmvaltool/cmorizers/data/formatters/datasets/woa.py +++ b/esmvaltool/cmorizers/data/formatters/datasets/woa.py @@ -110,7 +110,7 @@ def extract_variable(in_files, out_dir, attrs, raw_info, cmor_table): calendar='gregorian') fix_var_metadata(cube, var_info) - fix_coords(cube) + cube = fix_coords(cube) _fix_data(cube, var, attrs['version']) set_global_atts(cube, attrs) save_variable(cube, var, out_dir, attrs, unlimited_dimensions=['time']) diff --git a/esmvaltool/cmorizers/data/utilities.py b/esmvaltool/cmorizers/data/utilities.py index 853ebd8526..ed8b9a9af9 100644 --- a/esmvaltool/cmorizers/data/utilities.py +++ b/esmvaltool/cmorizers/data/utilities.py @@ -94,6 +94,21 @@ def add_scalar_height_coord(cube: Cube, height: float = 2.0) -> None: cube.add_aux_coord(height_coord, ()) +def add_typebare(cube, value='bare_ground'): + """Add scalar coordinate 'typebare' with value of `value`.""" + logger.debug("Adding typebare coordinate (%s)", value) + typebare_coord = iris.coords.AuxCoord(value, + var_name='typebare', + standard_name='area_type', + long_name='surface type', + units=Unit('no unit')) + try: + cube.coord('area_type') + except iris.exceptions.CoordinateNotFoundError: + cube.add_aux_coord(typebare_coord, ()) + return cube + + @contextmanager def constant_metadata(cube): """Do cube math without modifying units, attributes etc. @@ -204,14 +219,7 @@ def fix_coords(cube, if cube_coord.var_name == 'lon': logger.info("Fixing longitude...") if cube_coord.ndim == 1: - if cube_coord.points[0] < 0. and \ - cube_coord.points[-1] < 181.: - cube_coord.points = \ - cube_coord.points + 180. - cube.attributes['geospatial_lon_min'] = 0. - cube.attributes['geospatial_lon_max'] = 360. - nlon = len(cube_coord.points) - roll_cube_data(cube, nlon // 2, -1) + cube = cube.intersection(longitude=(0.0, 360.0)) if overwrite_lon_bounds or not cube_coord.has_bounds(): fix_bounds(cube, cube_coord) @@ -220,6 +228,8 @@ def fix_coords(cube, logger.info("Fixing latitude...") if overwrite_lat_bounds or not cube.coord('latitude').has_bounds(): fix_bounds(cube, cube.coord('latitude')) + if cube_coord.core_points()[0] > cube_coord.core_points()[-1]: + cube = iris.util.reverse(cube, cube_coord) # fix depth if cube_coord.var_name == 'lev': @@ -326,7 +336,10 @@ def save_variable(cube, var, outdir, attrs, **kwargs): except iris.exceptions.CoordinateNotFoundError: time_suffix = None else: - if len(time.points) == 1 and "mon" not in cube.attributes.get('mip'): + if ( + len(time.points) == 1 and + "mon" not in cube.attributes.get('mip') + ) or cube.attributes.get("frequency") == "yr": year = str(time.cell(0).point.year) time_suffix = '-'.join([year + '01', year + '12']) else: diff --git a/esmvaltool/cmorizers/mip_convert/config-mipconv-user.yml b/esmvaltool/cmorizers/mip_convert/config-mipconv-user.yml deleted file mode 100644 index 93362f92d7..0000000000 --- a/esmvaltool/cmorizers/mip_convert/config-mipconv-user.yml +++ /dev/null @@ -1,22 +0,0 @@ -############################################################################### -# User's configuration file for the ESMValTool with mip_convert -# For further details see the README document; current sections are -# mandatory and should be populated with valid entries. -# Author: V. Predoi / UREAD / November 2018 -############################################################################### ---- -# root to directory where mip_convert rose suites will be run -# make this different than your usual /roses/ dir -ROSES_ROOT: "/home/users/$USER/roses_mipconv" -# root to directory where mip_convert rose suites will write output -ROSES_OUTPUT: "/home/users/$USER/roses_mipconv_output" -# map dataset name to relevant UM suite -DATASET_TO_SUITE: {"UKESM1-0-LL": "u-ar766a"} -# map variable standard name to stream definition -STREAM_MAP: {"ps": "ap4", "ta": "ap4", "va": "ap4", "ua": "ap5", "mrsos": "ap5", "toz":"apm"} -# root directory where PP data lives -# this directory is in Jasmin/Archer structure; this one here -# is an actual directory with data -INPUT_DIR: "/group_workspaces/jasmin4/ncas_cms/valeriu/MASS_DATA" -# map streams to realm components -STREAM_COMPONENTS: {"ap4": ["atmos-physics", "land"], "apm": ["atmos-physics"], "ap5": ["land"]} diff --git a/esmvaltool/cmorizers/mip_convert/esmvt_mipconv_setup.py b/esmvaltool/cmorizers/mip_convert/esmvt_mipconv_setup.py deleted file mode 100644 index 8868827d5d..0000000000 --- a/esmvaltool/cmorizers/mip_convert/esmvt_mipconv_setup.py +++ /dev/null @@ -1,527 +0,0 @@ -""" -Run the first communication between esmvaltool's recipe and mip_convert. - -Description: ------------- - -This script sets up the correct rose suite directories to run mip_convert -on different UM suite data. You can run this tool in three different ways: - - (with -m --mode option) setup-only: will set up the mip convert rose - directories only; it will use the -c configuration file for user options; - - (with -m --mode option) setup-run-suites: will set up the mip convert rose - suites and will go ahead and submit them to cylc via rose suite-run; - - (with -m --mode option) postproc: will symlink newly created netCDF data - into a directory per esmvaltool recipe; note that for now, there is no - DRS-like path set up in that directory; - -Usage: ------- --c --config-file: [REQUIRED] user specific configuration file; --r --recipe-file: [REQUIRED] single or multiple (space-sep) recipe files; --m --mode: [OPTIONAL] running mode (setup-only, setup-run-suites, - postproc), default=setup-only --l --log-level: [OPTIONAL] log level, default=info - -Environment ------------ -current JASMIN rose/cyclc need python2.7; esmvaltool needs python3.x -So it is impossible at the moment to run this script as executable from an -esmvaltool environment. Instead, you can run it as a stand-alone tool in a -python 2.7 environment, intwo stages: - -[set up mip_convert suites and run them] -python esmvt_mipconv_setup.py -c config.yml -r recipe.yml -m setup-run-suites -[check succesful completion of mip_convert suites] -[run the symlinking] -python esmvt_mipconv_setup.py -c config.yml -r recipe.yml -m postproc - -A practical example of running the tool can be found on JASMIN: -/home/users/valeriu/esmvaltool_mip_convert -There you will find the two component shells: run_conversion -and run_symlink, as well as an example how to set the configuration file. - -The suite used is now on MOSRS (as of 3 December 2018): u-bd681 -You can use the default location on Jasmin: -DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-bd681" -alternatively this can be turned off, should you want to check out the suite -off MOSRS and use it locally. - -Contact: --------- -author: Valeriu Predoi (UREAD, valeriu.predoi@ncas.ac.uk) -""" -import argparse -import configparser -import datetime -import logging -import os -import shutil -import subprocess -import socket - -import yaml - -#################### -# global variables # -#################### - -# the tool uses a specially tailored mip_convert Rose suite -# locations of the suite depends on the host -host_name = socket.gethostname().split('.') -if len(host_name) > 1: - if host_name[1] == 'ceda': - # default location for mip_convert suite on JASMIN: - # previous suite: u-ak283_esmvt; new one u-bd681 - # DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-ak283_esmvt" - DEFAULT_SUITE_LOCATION = "/home/users/valeriu/roses/u-bd681" - # note that you can fcm checkout it straight from the MOSRS - -# stream mapping; taken from hadsdk.streams -# these are used to set defaults if not overrides -STREAM_MAP = { - 'CMIP5': { - '3hr': 'apk', - '6hrPlev': 'apc', - '6hrlev': 'apg', - 'Amon': 'apm', - 'Lmon': 'apm', - 'LImon': 'apm', - 'Oday': 'opa', - 'Omon': 'opm', - 'Oyr': 'opy', - 'CF3hr': 'apk', - 'CFday': 'apa', - 'CFmon': 'apm', - 'CFsubhr': 'ape', - 'day': 'apa' - }, - 'CMIP6': { - '3hr': 'ap8', - '6hrLev': 'ap7', - '6hrPlev': 'ap7', - '6hrPlevPt': 'ap7', - 'AERday': 'ap6', - 'AERhr': 'ap9', - 'AERmon': 'ap4', - 'AERmonZ': 'ap4', - 'Amon': 'ap5', - 'CF3hr': 'ap8', - 'CFday': 'ap6', - 'CFmon': 'ap5', - 'E1hr': 'ap9', - 'E1hrClimMon': 'ap9', - 'E3hr': 'ap8', - 'E3hrPt': 'ap8', - 'E6hrZ': 'ap7', - 'Eday': 'ap6', - 'EdayZ': 'ap6', - 'Efx': 'ancil', - 'Emon': 'ap5', - 'EmonZ': 'ap5', - 'Esubhr': 'ap8', - 'Eyr': 'ap5', - 'LImon': 'ap5', - 'Lmon': 'ap5', - 'Oday': 'ond', - 'Ofx': 'ancil', - 'Omon': 'onm', - 'SIday': 'ind', - 'SImon': 'inm', - 'day': 'ap6', - 'fx': 'ancil', - 'prim1hrpt': 'ap9', - 'prim3hr': 'ap8', - 'prim3hrpt': 'ap8', - 'prim6hr': 'ap7', - 'prim6hrpt': 'ap7', - 'primDay': 'ap6', - 'primMon': 'ap5', - 'primSIday': 'ap6' - } -} - -# set up logging -logger = logging.getLogger(__name__) - -# print the header -HEADER = r""" -______________________________________________________________________ - - ESMValTool + mip_convert: linking mip_convert to ESMValTool -______________________________________________________________________ - -""" + __doc__ - - -def get_args(): - """Define the `esmvaltool` command line.""" - # parse command line args - parser = argparse.ArgumentParser( - description=HEADER, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - '-c', - '--config-file', - default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), - help='Configuration file') - parser.add_argument( - '-r', - '--recipe-files', - type=str, - nargs='+', - help='Recipe files (list or single file)') - parser.add_argument( - '-m', - '--mode', - default='setup-only', - choices=['setup-only', 'setup-run-suites', 'postproc'], - help='How to run: setup: sets up mipconvert suites only;\n' + - 'or setup-run-suites: sets up suites and runs them as well;\n' + - 'or postproc: grab the output from mip_convert and use it.') - parser.add_argument( - '-l', - '--log-level', - default='info', - choices=['debug', 'info', 'warning', 'error']) - args = parser.parse_args() - return args - - -def _set_logger(logging, out_dir, log_file, log_level): - # set logging for screen and file output - root_logger = logging.getLogger() - out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" - logging.basicConfig( - filename=os.path.join(out_dir, log_file), - filemode='a', - format=out_fmt, - datefmt='%H:%M:%S', - level=logging.DEBUG) - root_logger.setLevel(log_level.upper()) - logfmt = logging.Formatter(out_fmt) - console_handler = logging.StreamHandler() - console_handler.setFormatter(logfmt) - root_logger.addHandler(console_handler) - - -def read_yaml_file(yaml_file): - """Read recipe into a dictionary.""" - with open(yaml_file, 'r') as yfile: - loaded_file = yaml.safe_load(yfile) - return loaded_file - - -def map_var_to_stream(diagnostics, stream_map): - """Map variable standard name to stream string.""" - stream_list = [] - for _, diag in diagnostics.items(): - for var in diag['variables']: - stream = stream_map[var] - stream_list.append(stream) - stream_list = list(set(stream_list)) - return stream_list - - -def write_rose_conf(rose_config_template, recipe_file, config_file, log_level): - """Write the new rose conf file per suite.""" - # Build the ConfigParser object - config = configparser.ConfigParser() - config.optionxform = str - config.read(rose_config_template) - recipe_object = read_yaml_file(recipe_file) - conf_file = read_yaml_file(config_file) - datasets = recipe_object['datasets'] - - # check if dataset needs analysis - datasets_to_analyze = [] - for dataset in datasets: - if dataset['dataset'] not in conf_file['DATASET_TO_SUITE']: - logger.warning("Dataset %s has no mapping to suite", - dataset['dataset']) - logger.warning("Assuming data retrival from elsewhere.") - else: - datasets_to_analyze.append(dataset) - diagnostics = recipe_object['diagnostics'] - active_streams = map_var_to_stream(diagnostics, conf_file['STREAM_MAP']) - - # set stream overrides to None and set components - # also set CYCLING_FREQUENCIES to P1Y overall - stream_overrides = {} - stream_components = {} - cycling_frequencies = {} - for stream in active_streams: - stream_overrides[stream] = 'None' - stream_components[stream] = conf_file['STREAM_COMPONENTS'][stream] - cycling_frequencies[stream] = 'P1Y' - - # set the logger to start outputting - if not os.path.exists(conf_file['ROSES_OUTPUT']): - os.makedirs(conf_file['ROSES_OUTPUT']) - _set_logger(logging, conf_file['ROSES_OUTPUT'], 'rose_suites_setup.log', - log_level) - logger.info(HEADER) - - # store the rose suite locations - rose_suite_locations = [] - - # loop through datasets (different suites for different datasets) - for dataset in datasets_to_analyze: - - # set correct paths - rose_suite = os.path.join( - conf_file['ROSES_ROOT'], - conf_file['DATASET_TO_SUITE'][dataset['dataset']]) - rose_suite_locations.append(rose_suite) - rose_output = os.path.join( - conf_file['ROSES_OUTPUT'], - conf_file['DATASET_TO_SUITE'][dataset['dataset']]) - if os.path.exists(rose_suite): - shutil.rmtree(rose_suite) - if os.path.exists(DEFAULT_SUITE_LOCATION): - shutil.copytree(DEFAULT_SUITE_LOCATION, rose_suite) - else: - logger.error("Default Suite Location not found: %s", - DEFAULT_SUITE_LOCATION) - break - if not os.path.exists(rose_output): - os.makedirs(rose_output) - new_mipconv_config = os.path.join(rose_suite, 'mip_convert_config') - - # start logging - logger.info("Working on dataset: %s", dataset) - logger.info("Mapping dataset to suite: %s", rose_suite) - logger.info("Output and logs written to: %s", rose_output) - logger.info("Creating rose suite directories...") - logger.info("Use rose-suite.conf template %s", rose_config_template) - logger.info("Use user config file %s", config_file) - - # write the file - config.set('jinja2:suite.rc', 'INPUT_DIR', - '"' + conf_file['INPUT_DIR'] + '"') - config.set('jinja2:suite.rc', 'OUTPUT_DIR', '"' + rose_output + '"') - config.set('jinja2:suite.rc', 'CDDS_DIR', - '"' + DEFAULT_SUITE_LOCATION + '"') - config.set('jinja2:suite.rc', 'MIP_CONVERT_CONFIG_DIR', - '"' + new_mipconv_config + '"') - config.set('jinja2:suite.rc', 'ACTIVE_STREAMS', str(active_streams)) - config.set('jinja2:suite.rc', 'STREAM_TIME_OVERRIDES', - str(stream_overrides)) - config.set('jinja2:suite.rc', 'FIRST_YEAR', str(dataset['start_year'])) - config.set('jinja2:suite.rc', 'REF_YEAR', str(dataset['start_year'])) - config.set('jinja2:suite.rc', 'FINAL_YEAR', str(dataset['end_year'])) - config.set('jinja2:suite.rc', 'STREAM_COMPONENTS', - str(stream_components)) - config.set('jinja2:suite.rc', 'CYCLING_FREQUENCIES', - str(cycling_frequencies)) - config.set( - 'jinja2:suite.rc', 'TARGET_SUITE_NAME', - '"' + conf_file['DATASET_TO_SUITE'][dataset['dataset']] + '"') - with open(os.path.join(rose_suite, 'rose-suite.conf'), 'w') as r_c: - logger.info("Writing rose-suite.conf file %s", - os.path.join(rose_suite, 'rose-suite.conf')) - config.write(r_c) - - # now that we have to conf file set up we need to - # edit the mip_convert configuration file with the correct data - for key, values in conf_file['STREAM_COMPONENTS'].items(): - for comp in values: - mipconv_config = os.path.join(new_mipconv_config, - 'mip_convert.cfg.' + comp) - _edit_mip_convert_config(mipconv_config, conf_file, dataset, - key) - - return rose_suite_locations - - -def _edit_mip_convert_config(mipconv_config, conf_file, dataset, stream): - """Edit the mip_convert file for correct runs.""" - # set the correct variables - base_date = str(dataset['start_year']) + '-01-01-00-00-00' - suite_id = conf_file['DATASET_TO_SUITE'][dataset['dataset']] - cdds_dir = os.path.join(DEFAULT_SUITE_LOCATION, 'mip_convert_aux') - - # Build the ConfigParser object - config = configparser.ConfigParser() - config.optionxform = str - config.read(mipconv_config) - - # set the correct fields - config.set('COMMON', 'cdds_dir', cdds_dir) - config.set('request', 'base_date', base_date) - config.set('request', 'suite_id', suite_id) - stream_section = '_'.join(['stream', stream]) - # add the section if not there already - if not config.has_section(stream_section): - config.add_section(stream_section) - if 'mip' not in dataset: - # can work without any mip in dataset - # will not take it from diagnostic (will assemble - # all possible mappings instead) - logger.warning("No mip in the recipe dataset section.") - logger.warning("Assigning mapping from default dictionary.") - stream_map_default = STREAM_MAP[dataset['project']] - variables = [] - cmip_types = [] - for key, val in conf_file['STREAM_MAP'].items(): - for key_def, val_def in stream_map_default.items(): - if val == val_def: - cmip_types.append('_'.join([dataset['project'], key_def])) - variables.append(key) - str_variables = ' '.join(list(set([v for v in variables]))) - if variables: - for cmip_type in cmip_types: - config.set(stream_section, cmip_type, str_variables) - else: - cmip_type = '_'.join([dataset['project'], dataset['mip']]) - all_vars = conf_file['STREAM_MAP'].keys() - str_variables = ' '.join( - [v for v in all_vars if conf_file['STREAM_MAP'][v] == stream]) - config.set(stream_section, cmip_type, str_variables) - - # write to file - with open(mipconv_config, 'w') as r_c: - logger.info("Writing mip_convert config file %s", mipconv_config) - config.write(r_c) - - -def _put_in_env(env_script): - """Put new system vars in environment.""" - logger.info("Setting environment for suite submission...") - - # First make it executable. - chmod_command = ["chmod", "+x", env_script] - proc = subprocess.Popen(chmod_command, stdout=subprocess.PIPE) - proc.communicate() - logger.info("Script %s is now executable.", env_script) - - # set the environment - for line in open(env_script, 'r'): - if line.split("=")[0] == 'export PATH': - logger.info("Appending %s to path...", - line.split("=")[1].strip("\n")) - add_path = line.split("=")[1].strip("\n").strip(":$PATH") - os.environ["PATH"] += os.pathsep + add_path - elif line.split("=")[0] == 'export PYTHONPATH': - logger.info("Exporting %s as PYTHONPATH...", - line.split("=")[1].strip("\n")) - os.environ["PYTHONPATH"] = line.split("=")[1].strip("\n") - - # print and check - logger.info("New path: %s", str(os.environ["PATH"])) - logger.info("mip_convert PYTHONPATH: %s", str(os.environ["PYTHONPATH"])) - proc = subprocess.Popen(["which", "rose"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("rose: %s %s", out, err) - proc = subprocess.Popen(["which", "mip_convert"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("mip_convert: %s %s", out, err) - - -def _source_envs(suite): - """Source relevant environments.""" - # source the Met Office rose/cylc environment - # and the suite specific environment - suite_env = os.path.join(suite, 'env_setup_command_line.sh') # suite env - env_file_mo = os.path.join(suite, 'sourcepaths.sh') # metomi env - _put_in_env(suite_env) - _put_in_env(env_file_mo) - - -def _run_suite(suite): - """Run the mip_convert suite.""" - os.chdir(suite) - logger.info("Submitting suite from %s", suite) - proc = subprocess.Popen(["rose", "suite-run"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("Rose communications: %s %s", str(out), str(err)) - - -def symlink_data(recipe_file, config_file, log_level): - """Grab the mip_converted output and manage it for ESMValTool.""" - # get configuration and recipe - recipe_object = read_yaml_file(recipe_file) - conf_file = read_yaml_file(config_file) - datasets = recipe_object['datasets'] - - # create directory that stores all the output netCDF files - now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") - new_subdir = '_'.join((recipe_file.strip('.yml'), now)) - sym_output_dir = os.path.join(conf_file['ROSES_OUTPUT'], - 'mip_convert_symlinks', new_subdir) - if not os.path.exists(sym_output_dir): - os.makedirs(sym_output_dir) - - # set the logger to start outputting - _set_logger(logging, conf_file['ROSES_OUTPUT'], 'file_simlink.log', - log_level) - logger.info(HEADER) - - # loop through all datasets to symlink output - for dataset in datasets: - rose_output = os.path.join( - conf_file['ROSES_OUTPUT'], - conf_file['DATASET_TO_SUITE'][dataset['dataset']]) - logger.info("Working on dataset: %s", dataset) - logger.info("Output and logs written to: %s", rose_output) - - # create the dataset dir - dataset_output = os.path.join(sym_output_dir, dataset['dataset']) - if os.path.exists(dataset_output): - shutil.rmtree(dataset_output) - os.makedirs(dataset_output) - - # loop through files - for root, _, files in os.walk(rose_output): - for xfile in files: - real_file = os.path.join(root, xfile) - imag_file = os.path.join(dataset_output, xfile) - - # symlink it if nc file - if real_file.endswith('.nc') and \ - xfile.split('_')[2] == dataset['dataset']: - if not os.path.islink(imag_file): - logger.info("File to symlink: %s", real_file) - logger.info("Symlinked file: %s", imag_file) - os.symlink(real_file, imag_file) - else: - logger.info("Symlinked file exists...") - logger.info("Original file: %s", real_file) - logger.info("Symlinked file: %s", imag_file) - - -def main(): - """Run the the meat of the code.""" - logger.info("Running main function...") - args = get_args() - rose_config_template = os.path.join( - os.path.dirname(__file__), "rose-suite-template.conf") - - # make sure the file is retrieved nonetheless - if not os.path.isfile(rose_config_template): - logger.info("Fetching rose template config from suite %s", - DEFAULT_SUITE_LOCATION) - rose_config_template = os.path.join(DEFAULT_SUITE_LOCATION, - "rose-suite-template.conf") - - recipe_files = args.recipe_files - config_file = args.config_file - log_level = args.log_level - for recipe_file in recipe_files: - if args.mode == 'setup-only': - # set up the rose suites - write_rose_conf(rose_config_template, recipe_file, config_file, - log_level) - elif args.mode == 'setup-run-suites': - # setup roses - roses = write_rose_conf(rose_config_template, recipe_file, - config_file, log_level) - # set up the environment and submit - for rose in roses: - _source_envs(rose) - _run_suite(rose) - elif args.mode == 'postproc': - symlink_data(recipe_file, config_file, log_level) - - -if __name__ == '__main__': - main() diff --git a/esmvaltool/cmorizers/mip_convert/recipe_mip_convert.yml b/esmvaltool/cmorizers/mip_convert/recipe_mip_convert.yml deleted file mode 100644 index 8d5168a975..0000000000 --- a/esmvaltool/cmorizers/mip_convert/recipe_mip_convert.yml +++ /dev/null @@ -1,51 +0,0 @@ -#### summary -# Example of ESMValTool recipe that can be used with the mip_convert capability -# Data for this recipe exists in pp format on JASMIN, ready for mip_convert-ion -# The recipe is no different than any typical ESMValTool recipes, but can be used -# for a test run of mip_convert capability; see the README document and the included -# config-mipconv-user.yml configuration file. -# Author: V. Predoi (Uni Reading, valeriu.predoi@ncas.ac.uk) -# Date: first draft/November 2018 -########################################################################################################### ---- - -datasets: - - {dataset: UKESM1-0-LL, project: CMIP6, mip: Amon, exp: piControl-spinup, ensemble: r1i1p1f1_gn, start_year: 1850, end_year: 1860} - -preprocessors: - pp_rad: - regrid: - target_grid: 1x1 - scheme: linear - -diagnostics: - validation_mip_convert: - description: "Test with mip convert" - variables: - # mapping of standard_name to stream for CMIP6 - # see the associated config file for input - # "ps": "ap4", "ta": "ap4", "va": "ap4", "ua": "ap5", "mrsos": "ap5", "toz":"apm" - ps: - preprocessor: pp_rad - field: T2Ms - ta: - preprocessor: pp_rad - field: T2Ms - va: - preprocessor: pp_rad - field: T2Ms - ua: - preprocessor: pp_rad - field: T2Ms - toz: - preprocessor: pp_rad - field: T2Ms - scripts: - meridional_mean: - script: validation.py - title: "" - control_model: UKESM1-0-LL - exper_model: UKESM1-0-LL - analysis_type: meridional_mean - seasonal_analysis: True - diff --git a/esmvaltool/cmorizers/mip_convert/rose-suite-template.conf b/esmvaltool/cmorizers/mip_convert/rose-suite-template.conf deleted file mode 100644 index 5562333fed..0000000000 --- a/esmvaltool/cmorizers/mip_convert/rose-suite-template.conf +++ /dev/null @@ -1,20 +0,0 @@ -[jinja2:suite.rc] -ACTIVE_STREAMS = -CONCATENATE = "FALSE" -CYCLING_FREQUENCIES = -DUMMY_RUN = "FALSE" -FINAL_YEAR = -FIRST_YEAR = -REF_YEAR = -INPUT_DIR = -LOCATION = "LOTUS" -MEMORY = "70000" -MIP_CONVERT_CONFIG_DIR = -OUTPUT_DIR = -PARALLEL_TASKS = "20" -NTHREADS_CONCATENATE = "6" -CDDS_DIR = -STREAM_COMPONENTS = -STREAM_TIME_OVERRIDES = -TARGET_SUITE_NAME = -WALL_TIME = "6:00:00" diff --git a/esmvaltool/config-references.yml b/esmvaltool/config-references.yml index 199dc671e0..79a85c9866 100644 --- a/esmvaltool/config-references.yml +++ b/esmvaltool/config-references.yml @@ -336,6 +336,11 @@ authors: name: Lillis, Jon institute: MetOffice, UK orcid: + lindenlaub_lukas: + name: Lindenlaub, Lukas + institute: University of Bremen, Germany + orcid: https://orcid.org/0000-0001-6349-9118 + github: lukruh little_bill: name: Little, Bill institute: MetOffice, UK @@ -466,11 +471,6 @@ authors: rol_evert: name: Rol, Evert orcid: https://orcid.org/0000-0001-8357-4453 - ruhe_lukas: - name: Ruhe, Lukas - institute: University of Bremen, Germany - orcid: https://orcid.org/0000-0001-6349-9118 - github: lukruh russell_joellen: name: Russell, Joellen institute: Univ. of Arizona, USA diff --git a/esmvaltool/diag_scripts/climate_metrics/feedback_parameters.py b/esmvaltool/diag_scripts/climate_metrics/feedback_parameters.py index db350982a2..d6bd28b0fb 100644 --- a/esmvaltool/diag_scripts/climate_metrics/feedback_parameters.py +++ b/esmvaltool/diag_scripts/climate_metrics/feedback_parameters.py @@ -365,7 +365,7 @@ def _create_regression_plot(tas_cube, y_reg = reg.slope * x_reg + reg.intercept # Plot data - title = (f'{FEEDBACK_PARAMETERS.get(var,var)} TOA radiance for ' + title = (f'{FEEDBACK_PARAMETERS.get(var, var)} TOA radiance for ' f'{dataset_name}') filename = f'{var}_regression_{dataset_name}' if description is not None: diff --git a/esmvaltool/diag_scripts/kcs/local_resampling.py b/esmvaltool/diag_scripts/kcs/local_resampling.py index 9eb2ea28ed..0bf6260d65 100644 --- a/esmvaltool/diag_scripts/kcs/local_resampling.py +++ b/esmvaltool/diag_scripts/kcs/local_resampling.py @@ -292,7 +292,7 @@ def select_final_subset(cfg, subsets, prov=None): Final set of eight samples should have with minimal reuse of the same ensemble member for the same period. From 10.000 randomly - selected sets of 8 samples, count and penalize re-used segments (1 + selected sets of 8 samples, count and penalize reused segments (1 for 3*reuse, 5 for 4*reuse). Choose the set with the lowest penalty. """ n_samples = cfg['n_samples'] @@ -387,7 +387,7 @@ def _get_climatology(cfg, scenario_name, table, prov=None): resampled_control = _recombine(segments_control, table['control']) resampled_future = _recombine(segments_future, table['future']) - # Store the resampled contol climates + # Store the resampled control climates filename = get_diagnostic_filename(f'resampled_control_{scenario_name}', cfg, extension='nc') diff --git a/esmvaltool/diag_scripts/monitor/compute_eofs.py b/esmvaltool/diag_scripts/monitor/compute_eofs.py index dea5d63b9a..a07ca835c0 100644 --- a/esmvaltool/diag_scripts/monitor/compute_eofs.py +++ b/esmvaltool/diag_scripts/monitor/compute_eofs.py @@ -24,10 +24,10 @@ Path to the folder to store figures. Defaults to ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are - replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the default ESMValTool plot directory (i.e., ``output_dir/plots/diagnostic_name/script_name/``, see - :ref:`esmvalcore:user configuration file`). + :ref:`esmvalcore:outputdata`). rasterize_maps: bool, optional (default: True) If ``True``, use `rasterization `_ for diff --git a/esmvaltool/diag_scripts/monitor/monitor.py b/esmvaltool/diag_scripts/monitor/monitor.py index 59e37b9842..dda5aa4f3d 100644 --- a/esmvaltool/diag_scripts/monitor/monitor.py +++ b/esmvaltool/diag_scripts/monitor/monitor.py @@ -52,10 +52,10 @@ Path to the folder to store figures. Defaults to ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are - replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the default ESMValTool plot directory (i.e., ``output_dir/plots/diagnostic_name/script_name/``, see - :ref:`esmvalcore:user configuration file`). + :ref:`esmvalcore:outputdata`). rasterize_maps: bool, optional (default: True) If ``True``, use `rasterization `_ for diff --git a/esmvaltool/diag_scripts/monitor/multi_datasets.py b/esmvaltool/diag_scripts/monitor/multi_datasets.py index 879346954c..41f238a64e 100644 --- a/esmvaltool/diag_scripts/monitor/multi_datasets.py +++ b/esmvaltool/diag_scripts/monitor/multi_datasets.py @@ -100,10 +100,10 @@ Path to the folder to store figures. Defaults to ``{plot_dir}/../../{dataset}/{exp}/{modeling_realm}/{real_name}``. All tags (i.e., the entries in curly brackets, e.g., ``{dataset}``, are - replaced with the corresponding tags). ``{plot_dir}`` is replaced with the + replaced with the corresponding tags). ``{plot_dir}`` is replaced with the default ESMValTool plot directory (i.e., ``output_dir/plots/diagnostic_name/script_name/``, see - :ref:`esmvalcore:user configuration file`). + :ref:`esmvalcore:outputdata`). savefig_kwargs: dict, optional Optional keyword arguments for :func:`matplotlib.pyplot.savefig`. By default, uses ``bbox_inches: tight, dpi: 300, orientation: landscape``. @@ -608,6 +608,7 @@ from pprint import pformat import cartopy.crs as ccrs +import dask.array as da import iris import matplotlib as mpl import matplotlib.dates as mdates @@ -1176,7 +1177,17 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): axes_data = fig.add_subplot(gridspec[0:2, 0:2], projection=projection) plot_kwargs['axes'] = axes_data - plot_data = plot_func(cube, **plot_kwargs) + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs['transform_first'] = True + npx = da if cube.has_lazy_data() else np + cube_to_plot = cube.copy( + npx.ma.filled(cube.core_data(), np.nan) + ) + else: + cube_to_plot = cube + plot_data = plot_func(cube_to_plot, **plot_kwargs) axes_data.coastlines() if gridline_kwargs is not False: axes_data.gridlines(**gridline_kwargs) @@ -1193,7 +1204,17 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): if self.plots[plot_type]['common_cbar']: plot_kwargs.setdefault('vmin', plot_data.get_clim()[0]) plot_kwargs.setdefault('vmax', plot_data.get_clim()[1]) - plot_ref = plot_func(ref_cube, **plot_kwargs) + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs['transform_first'] = True + npx = da if ref_cube.has_lazy_data() else np + ref_cube_to_plot = ref_cube.copy( + npx.ma.filled(ref_cube.core_data(), np.nan) + ) + else: + ref_cube_to_plot = ref_cube + plot_ref = plot_func(ref_cube_to_plot, **plot_kwargs) axes_ref.coastlines() if gridline_kwargs is not False: axes_ref.gridlines(**gridline_kwargs) @@ -1212,7 +1233,17 @@ def _plot_map_with_ref(self, plot_func, dataset, ref_dataset): plot_kwargs_bias = self._get_plot_kwargs(plot_type, dataset, bias=True) plot_kwargs_bias['axes'] = axes_bias - plot_bias = plot_func(bias_cube, **plot_kwargs_bias) + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs_bias['transform_first'] = True + npx = da if bias_cube.has_lazy_data() else np + bias_cube_to_plot = bias_cube.copy( + npx.ma.filled(bias_cube.core_data(), np.nan) + ) + else: + bias_cube_to_plot = bias_cube + plot_bias = plot_func(bias_cube_to_plot, **plot_kwargs_bias) axes_bias.coastlines() if gridline_kwargs is not False: axes_bias.gridlines(**gridline_kwargs) @@ -1268,7 +1299,17 @@ def _plot_map_without_ref(self, plot_func, dataset): axes = fig.add_subplot(projection=self._get_map_projection()) plot_kwargs = self._get_plot_kwargs(plot_type, dataset) plot_kwargs['axes'] = axes - plot_map = plot_func(cube, **plot_kwargs) + if plot_func is iris.plot.contourf: + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + plot_kwargs['transform_first'] = True + npx = da if cube.has_lazy_data() else np + cube_to_plot = cube.copy( + npx.ma.filled(cube.core_data(), np.nan) + ) + else: + cube_to_plot = cube + plot_map = plot_func(cube_to_plot, **plot_kwargs) axes.coastlines() gridline_kwargs = self._get_gridline_kwargs(plot_type) if gridline_kwargs is not False: @@ -2535,7 +2576,11 @@ def create_hovmoeller_time_vs_lat_or_lon_plot(self, datasets): # Provenance tracking provenance_record = { 'ancestors': ancestors, - 'authors': ['schlund_manuel', 'kraft_jeremy', 'ruhe_lukas'], + 'authors': [ + 'schlund_manuel', + 'kraft_jeremy', + 'lindenlaub_lukas' + ], 'caption': caption, 'plot_types': ['zonal'], 'long_names': [dataset['long_name']], diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl index bd672ed3cf..0f1b49c224 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6a.ncl @@ -151,10 +151,8 @@ begin fx_variable = "volcello" error_msg("f", "russell18jgr-fig6.ncl", " ", "volcello file for " \ + vo_datasets(iii) \ - + " not found in the metadata file, please add "\ - + "'fx_files: [volcello]' to the variable dictionary in the " \ - + "recipe or add the location of file to input directory " \ - + "in config-user.yml ") + + " not found in the metadata file, please specify " \ + + "'volcello' as supplementary variable in the recipe.") end if dataset_so_time = read_data(so_items[iii]) diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl index 6b019625f0..71323f411d 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig6b.ncl @@ -45,10 +45,10 @@ ; ; Caveats ; -; - MIROC-ESM and BNU-ESM doesnot work as depth variable is not called lev. -; - MRI_ESM1 doesnot work as the data is ofset by 80 degrees in longitude +; - MIROC-ESM and BNU-ESM does not work as depth variable is not called lev. +; - MRI_ESM1 does not work as the data is offset by 80 degrees in longitude ; and causes problem in interpolation. -; - CCSM4 ans CESM1-CAM5 dont work as the units for so is 1, not accepted +; - CCSM4 and CESM1-CAM5 dont work as the units for so is 1, not accepted ; by ESMValTool. ; - Transport is very small in case of NorESM1-M and ME as volcello ; values look incorrect(very small). @@ -153,11 +153,10 @@ begin if (all(ismissing(fx_var))) then fx_variable = "volcello" - error_msg("f", "russell_fig-7i.ncl", " ", "areacello file for " + \ + error_msg("f", "russell_fig-7i.ncl", " ", "volcello file for " + \ vo_datasets(iii) \ - + " not found in the metadata file, please " + \ - "add 'fx_files: [volcello]' to the variable dictionary in" + \ - " the recipe or add the location of file to config-user.yml") + + " not found in the metadata file, please specify " \ + + "'volcello' as supplementary variable in the recipe.") end if dataset_so_time = read_data(so_items[iii]) diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl index 86ce4bee70..cf14857a7b 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig7i.ncl @@ -156,9 +156,8 @@ begin fx_variable = "areacello" error_msg("f", "russell_fig-7i.ncl", " ", "areacello file for " + \ datasetnames(iii) + " not found in the metadata file," + \ - " please add 'fx_files: [areacello]' to the variable " + \ - "dictionary in the recipe or add the location of " + \ - " file to config-user.yml") + + " not found in the metadata file, please specify " \ + + "'areacello' as supplementary variable in the recipe.") end if areacello_2d = fx_var delete(fx_var) @@ -212,9 +211,9 @@ begin "lgPerimOn" : False ; no perimeter "lgItemCount" : dimsizes(annots) ; how many "lgLineLabelStrings" : annots ; labels - "lgLabelsOn" : False ; no default lables + "lgLabelsOn" : False ; no default labsels "lgLineLabelFontHeightF" : 0.0085 ; font height - "lgDashIndexes" : dashes ; line paterns + "lgDashIndexes" : dashes ; line patterns "lgLineColors" : colors "lgMonoLineLabelFontColor" : True ; one label color end create diff --git a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl index 2fe0cc3e4a..017b70103a 100644 --- a/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl +++ b/esmvaltool/diag_scripts/russell18jgr/russell18jgr-fig9c.ncl @@ -227,9 +227,8 @@ begin if (all(ismissing(fx_var))) then error_msg("f", "russell18jgr-fig9c.ncl", " ", "areacello file for " + \ datasetnames(iii) + " not found in the metadata file, " + \ - "please add 'fx_files: [areacello]' to the variable " + \ - "dictionary in the recipe or add the location of " + \ - " file to config-user.yml ") + + " not found in the metadata file, please specify " \ + + "'areacello' as supplementary variable in the recipe.") end if areacello_2d = fx_var @@ -304,9 +303,9 @@ begin "lgPerimOn" : False ; no perimeter "lgItemCount" : dimsizes(annots) ; how many "lgLabelStrings" : annots ; labels - "lgLabelsOn" : True ; no default lables + "lgLabelsOn" : True ; no default labels "lgLabelFontHeightF" : 0.001 ; font height - "lgItemType" : "markers" ; line paterns + "lgItemType" : "markers" ; line patterns "lgMarkerColors" : colors "lgMarkerIndexes" : markers ; one label color end create diff --git a/esmvaltool/diag_scripts/shared/_supermeans.py b/esmvaltool/diag_scripts/shared/_supermeans.py index 7099ba4725..8543ca99cf 100644 --- a/esmvaltool/diag_scripts/shared/_supermeans.py +++ b/esmvaltool/diag_scripts/shared/_supermeans.py @@ -13,7 +13,6 @@ import cf_units import iris import iris.coord_categorisation -from iris.coord_categorisation import _pt_date import numpy as np @@ -206,6 +205,28 @@ def add_start_hour(cube, coord, name='diurnal_sampling_hour'): _add_categorised_coord(cube, name, coord, start_hour_from_bounds) +# lifted from iris==3.10 last iris to have it in iris.coord_categorisation +# Private "helper" function +def _pt_date(coord, time): + """Return the datetime of a time-coordinate point. + + Parameters + ---------- + coord : Coord + Coordinate (must be Time-type). + time : float + Value of a coordinate point. + + Returns + ------- + cftime.datetime + + """ + # NOTE: All of the currently defined categorisation functions are + # calendar operations on Time coordinates. + return coord.units.num2date(time, only_use_cftime_datetimes=True) + + def start_hour_from_bounds(coord, _, bounds): """Add hour from bounds.""" return np.array([_pt_date(coord, _bounds[0]).hour for _bounds in bounds]) diff --git a/esmvaltool/diag_scripts/shared/plot/_plot.py b/esmvaltool/diag_scripts/shared/plot/_plot.py index d7db4e1b14..092479a999 100644 --- a/esmvaltool/diag_scripts/shared/plot/_plot.py +++ b/esmvaltool/diag_scripts/shared/plot/_plot.py @@ -4,6 +4,7 @@ from copy import deepcopy import cartopy.crs as ccrs +import dask.array as da import iris.quickplot import matplotlib.colors as colors import matplotlib.pyplot as plt @@ -230,7 +231,15 @@ def global_contourf(cube, kwargs['levels'] = levels axes = plt.axes(projection=ccrs.Robinson(central_longitude=10)) plt.sca(axes) - map_plot = iris.plot.contourf(cube, **kwargs) + + # see https://github.com/SciTools/cartopy/issues/2457 + # and https://github.com/SciTools/cartopy/issues/2468 + kwargs['transform_first'] = True + npx = da if cube.has_lazy_data() else np + map_plot = iris.plot.contourf( + cube.copy(npx.ma.filled(cube.core_data(), np.nan)), + **kwargs, + ) # Appearance axes.gridlines(color='lightgrey', alpha=0.5) diff --git a/esmvaltool/interface_scripts/logging.ncl b/esmvaltool/interface_scripts/logging.ncl index 6333479f96..35c3167341 100644 --- a/esmvaltool/interface_scripts/logging.ncl +++ b/esmvaltool/interface_scripts/logging.ncl @@ -61,9 +61,9 @@ procedure log_debug(output_string[*]:string) ; output_string: the text to be output as message on screen ; ; Description -; Write a debug message to the log file (only if log_level = debug in -; config-user.yml). If the input is an array, each element will be -; written on different lines. +; Write a debug message to the log file (only if log_level = debug in the +; configuration). If the input is an array, each element will be written on +; different lines. ; ; Caveats ; diff --git a/esmvaltool/recipes/examples/recipe_check_obs.yml b/esmvaltool/recipes/examples/recipe_check_obs.yml index b3cca9e028..880aef831a 100644 --- a/esmvaltool/recipes/examples/recipe_check_obs.yml +++ b/esmvaltool/recipes/examples/recipe_check_obs.yml @@ -61,6 +61,16 @@ diagnostics: scripts: null + CMAP: + description: CMAP check + variables: + pr: + additional_datasets: + - {project: OBS6, dataset: CMAP, mip: Amon, tier: 2, + type: reanaly, version: v1} + scripts: null + + CRU: description: CRU check variables: @@ -235,8 +245,8 @@ diagnostics: treeFrac: additional_datasets: - {dataset: ESACCI-LANDCOVER, project: OBS, mip: Lmon, tier: 2, - type: sat, version: L4-LCCS-Map-300m-P5Y-aggregated-0.500000Deg, - start_year: 1998, end_year: 2012} + type: sat, version: v2.0.8, frequency: yr, + start_year: 1992, end_year: 2020} scripts: null ESACCI-LST: @@ -548,6 +558,30 @@ diagnostics: type: reanaly, version: 1, start_year: 1979, end_year: 2007} scripts: null + JRA-55: + description: JRA-55 check + variables: + cli: + clivi: + clw: + clwvi: + clt: + prw: + rlus: + rlut: + rlutcs: + rsus: + rsuscs: + rsut: + rsutcs: + ta: + tas: + wap: + additional_datasets: + - {dataset: JRA-55, project: OBS6, mip: Amon, tier: 2, + type: reanaly, version: 1, start_year: 1958, end_year: 2022} + scripts: null + Kadow2020: description: Kadow2020 check variables: @@ -675,6 +709,10 @@ diagnostics: prw: ta: wap: + pr: + tauu: + tauv: + tos: additional_datasets: - {dataset: NCEP-DOE-R2, project: OBS6, mip: Amon, tier: 2, type: reanaly, version: 2, start_year: 1979, end_year: 2022} @@ -690,6 +728,9 @@ diagnostics: prw: rlut: rsut: + pr: + tauu: + tauv: additional_datasets: - {dataset: NOAA-CIRES-20CR-V2, project: OBS6, mip: Amon, tier: 2, type: reanaly, version: v2, start_year: 1871, end_year: 2012} diff --git a/esmvaltool/recipes/examples/recipe_extract_shape.yml b/esmvaltool/recipes/examples/recipe_extract_shape.yml index 79f04371b5..08d1bab490 100644 --- a/esmvaltool/recipes/examples/recipe_extract_shape.yml +++ b/esmvaltool/recipes/examples/recipe_extract_shape.yml @@ -7,7 +7,7 @@ documentation: The example shapefile(s) can be copied from esmvaltool/diag_scripts/shapeselect/testdata/Elbe.* and - placed in the auxiliary_data_dir defined in config-user.yml. + placed in the auxiliary_data_dir defined in the configuration. title: Example recipe extracting precipitation in the Elbe catchment. diff --git a/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml b/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml index f68a597733..925d9bd420 100644 --- a/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml +++ b/esmvaltool/recipes/hydrology/recipe_hydro_forcing.yml @@ -9,7 +9,7 @@ documentation: used to: 1. Plot a timeseries of the raw daily data - 2. Plot monthly aggregrated data over a certain period + 2. Plot monthly aggregated data over a certain period 3. Plot the monthly climate statistics over a certain period authors: @@ -33,7 +33,7 @@ datasets: preprocessors: daily: extract_shape: &extract_shape - # In aux (config-user.yml) + # Relative to auxiliary_data_dir defined in configuration shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp method: contains crop: true diff --git a/esmvaltool/recipes/hydrology/recipe_lisflood.yml b/esmvaltool/recipes/hydrology/recipe_lisflood.yml index ffecbc37be..3acb4be481 100644 --- a/esmvaltool/recipes/hydrology/recipe_lisflood.yml +++ b/esmvaltool/recipes/hydrology/recipe_lisflood.yml @@ -37,7 +37,8 @@ preprocessors: scheme: linear extract_shape: # Perhaps a single shapefile needs to be created covering multiple basins - shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp # (config-user, aux) + # Relative to auxiliary_data_dir defined in configuration + shapefile: Lorentz_Basin_Shapefiles/Meuse/Meuse.shp method: contains crop: true # set to false to keep the entire globe (memory intensive!) daily_water: diff --git a/esmvaltool/recipes/hydrology/recipe_marrmot.yml b/esmvaltool/recipes/hydrology/recipe_marrmot.yml index dd6eef0a49..e85a66d9b9 100644 --- a/esmvaltool/recipes/hydrology/recipe_marrmot.yml +++ b/esmvaltool/recipes/hydrology/recipe_marrmot.yml @@ -28,7 +28,8 @@ preprocessors: daily: &daily extract_shape: # Lumped model: needs catchment-aggregated input data - shapefile: Meuse/Meuse.shp # In aux (config-user.yml) + # Relative to auxiliary_data_dir defined in configuration + shapefile: Meuse/Meuse.shp method: contains crop: true diff --git a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml index 20b0402a23..55c53147ec 100644 --- a/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml +++ b/esmvaltool/recipes/ipccwg1ar6ch3/recipe_ipccwg1ar6ch3_fig_3_42_a.yml @@ -10,7 +10,7 @@ documentation: Contribution to the Sixth Assessment Report: Chapter 3 Processing of CMIP3 models currently works only in serial mode, due to an issue in the input data still under investigation. To run the recipe - set: max_parallel_tasks: 1 in the config-user.yml file. + set the configuration option ``max_parallel_tasks: 1``. authors: - bock_lisa diff --git a/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml index 48c5153287..4277313428 100644 --- a/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml +++ b/esmvaltool/recipes/monitor/recipe_monitor_with_refs.yml @@ -10,7 +10,7 @@ documentation: - heuer_helge - kraft_jeremy - kuehbacher_birgit - - ruhe_lukas + - lindenlaub_lukas - sarauer_ellen - winterstein_franziska maintainer: diff --git a/esmvaltool/recipes/recipe_carvalhais14nat.yml b/esmvaltool/recipes/recipe_carvalhais14nat.yml index 9ec0811c00..63bfbb1edd 100644 --- a/esmvaltool/recipes/recipe_carvalhais14nat.yml +++ b/esmvaltool/recipes/recipe_carvalhais14nat.yml @@ -8,7 +8,7 @@ documentation: Carvalhais et al., 2014, Nature. The data required in the obs_details section can be obtained at http://www.bgc-jena.mpg.de/geodb/BGI/tau4ESMValTool.php - and have to be stored in the auxiliary_data_dir defined i config-user.yml, + and have to be stored in the auxiliary_data_dir defined in the configuration in a subdirectory obs_data_subdir specified in the obs_details section below. diff --git a/esmvaltool/recipes/recipe_runoff_et.yml b/esmvaltool/recipes/recipe_runoff_et.yml index 6924321c7c..0a83213caa 100644 --- a/esmvaltool/recipes/recipe_runoff_et.yml +++ b/esmvaltool/recipes/recipe_runoff_et.yml @@ -8,7 +8,7 @@ documentation: water balance components for different catchments and compares the results against observations. Currently, the required catchment mask needs to be downloaded manually at https://doi.org/10.5281/zenodo.2025776 and saved in - the auxiliary_data_dir defined in config-user.yml. + the auxiliary_data_dir defined in configuration. authors: - hagemann_stefan diff --git a/esmvaltool/recipes/recipe_sea_surface_salinity.yml b/esmvaltool/recipes/recipe_sea_surface_salinity.yml index 4e670eec7f..43ec0e6b5e 100644 --- a/esmvaltool/recipes/recipe_sea_surface_salinity.yml +++ b/esmvaltool/recipes/recipe_sea_surface_salinity.yml @@ -20,8 +20,7 @@ documentation: preprocessors: timeseries: extract_shape: - # Relative paths are relative to 'auxiliary_data_dir' as configured in - # the config-user.yml file. + # Relative paths are relative to the configuration option 'auxiliary_data_dir'. # The example shapefile can be downloaded from # https://marineregions.org/download_file.php?name=World_Seas_IHO_v3.zip # but any shapefile can be used @@ -50,7 +49,7 @@ datasets: - {<<: *cmip6, dataset: MPI-ESM1-2-HR, alias: MPI-ESM1-2-HR} - {<<: *cmip6, dataset: NorESM2-MM, alias: NorESM2-MM} - {<<: *cmip6, dataset: GISS-E2-2-H, alias: GISS-E2-2-H, institute: NASA-GISS} - + diagnostics: compare_salinity: diff --git a/esmvaltool/recipes/recipe_shapeselect.yml b/esmvaltool/recipes/recipe_shapeselect.yml index 0fb22c0d5d..b463f09df8 100644 --- a/esmvaltool/recipes/recipe_shapeselect.yml +++ b/esmvaltool/recipes/recipe_shapeselect.yml @@ -11,7 +11,7 @@ documentation: - berg_peter maintainer: - - ruhe_lukas + - lindenlaub_lukas projects: - c3s-magic @@ -36,8 +36,7 @@ diagnostics: script: shapeselect/diag_shapeselect.py # Example shapefiles can be found in: # esmvaltool/diag_scripts/shapeselect/testdata/ - # Relative paths are relative to 'auxiliary_data_dir' as configured in - # the config-user.yml file. + # Relative paths are relative to configuration option 'auxiliary_data_dir'. shapefile: 'Thames.shp' weighting_method: 'mean_inside' write_xlsx: true diff --git a/esmvaltool/references/esacci-landcover.bibtex b/esmvaltool/references/esacci-landcover.bibtex index ca6380e61b..44757b1d04 100644 --- a/esmvaltool/references/esacci-landcover.bibtex +++ b/esmvaltool/references/esacci-landcover.bibtex @@ -1,7 +1,8 @@ @misc{esacci-landcover, - url = {http://catalogue.ceda.ac.uk/uuid/4761751d7c844e228ec2f5fe11b2e3b0}, - title = {IPSL IPSL-CM6A-LR model output prepared for CMIP6 CMIP abrupt-4xCO2}, - publisher = {ESA Land Cover Climate Change Initiative (Land_Cover_cci): Global Land Cover Maps, Version 1.6.1.}, - year = {2016}, - author = {P. Defourny} + doi = {10.5194/essd-15-1465-2023}, + url = {https://catalogue.ceda.ac.uk/uuid/26a0f46c95ee4c29b5c650b129aab788/}, + title = {A 29-year time series of annual 300 m resolution plant-functional-type maps for climate models}, + publisher = {Earth System Science Data}, + year = {2023}, + author = { Kandice L. Harper, Céline Lamarche, Andrew Hartley, Philippe Peylin, Catherine Ottlé, Vladislav Bastrikov, Rodrigo San Martín, Sylvia I. Bohnenstengel, Grit Kirches, Martin Boettcher, Roman Shevchuk, Carsten Brockmann, and Pierre Defourny } } diff --git a/esmvaltool/references/jra_55.bibtex b/esmvaltool/references/jra_55.bibtex new file mode 100644 index 0000000000..d979a6c9cc --- /dev/null +++ b/esmvaltool/references/jra_55.bibtex @@ -0,0 +1,10 @@ +@article{jra_55, + doi = {https://doi.org/10.5065/D60G3H5B}, + title={The JRA-55 Reanalysis: General Specifications and Basic Characteristics}, + author={Kobayashi, S. and Y. Ota and Y. Harada and A. Ebita and M. Moriya and H. Onoda and K. Onogi and H. Kamahori and C. Kobayashi and H. Endo and K. Miyaoka and K. Takahashi}, + journal={J. Met. Soc. Jap.}, + volume={93}, + number={1}, + pages={5-48}, + year={2015} +} diff --git a/esmvaltool/utils/batch-jobs/generate.py b/esmvaltool/utils/batch-jobs/generate.py index afba37906f..428229b6eb 100644 --- a/esmvaltool/utils/batch-jobs/generate.py +++ b/esmvaltool/utils/batch-jobs/generate.py @@ -9,7 +9,7 @@ - conda_path 2) If needed, edit optional parameters: - outputs -- config_file +- config_dir 3) SLURM settings This script is configured to optimize the computing footprint of the recipe testing. It is not necessary to edit @@ -46,14 +46,14 @@ memory = '64G' # Default walltime time = '04:00:00' -# Full path to the mambaforge/etc/profile.d/conda.sh executable +# Full path to the miniforge3/etc/profile.d/conda.sh executable # Set the path to conda -conda_path = 'PATH_TO/mambaforge/etc/profile.d/conda.sh' -# Full path to config_file -# If none, ~/.esmvaltool/config-user.yml is used -config_file = '' +conda_path = 'PATH_TO/miniforge3/etc/profile.d/conda.sh' +# Full path to configuration directory +# If none, ~/.config/esmvaltool/ +config_dir = '' # Set max_parallel_tasks -# If none, read from config_file +# If none, read from configuration default_max_parallel_tasks = 8 # List of recipes that require non-default SLURM options set above @@ -315,11 +315,11 @@ def generate_submit(): file.write(f'. {conda_path}\n') file.write(f'conda activate {env}\n') file.write('\n') - if not config_file: + if not config_dir: file.write(f'esmvaltool run {str(recipe)}') else: - file.write(f'esmvaltool run --config_file ' - f'{str(config_file)} {str(recipe)}') + file.write(f'esmvaltool run --config_dir ' + f'{str(config_dir)} {str(recipe)}') # set max_parallel_tasks max_parallel_tasks = MAX_PARALLEL_TASKS.get( recipe.stem, diff --git a/esmvaltool/utils/recipe_filler.py b/esmvaltool/utils/recipe_filler.py deleted file mode 100755 index 40f637c6d5..0000000000 --- a/esmvaltool/utils/recipe_filler.py +++ /dev/null @@ -1,914 +0,0 @@ -""" -Fill in a blank recipe with additional datasets. - -Tool to obtain a set of additional datasets when given a blank recipe. -The blank recipe should contain, to the very least, a list of diagnostics -each with their variable(s). Example of minimum settings: - -diagnostics: - diagnostic: - variables: - ta: - mip: Amon - start_year: 1850 - end_year: 1900 - -Note that the tool will exit if any of these minimum settings are missing! - -Key features: - -- you can add as many variable parameters as are needed; if not added, the - tool will use the "*" wildcard and find all available combinations; -- you can restrict the number of datasets to be looked for with the `dataset:` - key for each variable, pass a list of datasets as value, e.g. - `dataset: [MPI-ESM1-2-LR, MPI-ESM-LR]`; -- you can specify a pair of experiments eg `exp: [rcp26, rcp85]` - for each variable; this will look for each available dataset per experiment - and assemble an aggregated data stretch from each experiment; equivalent to - esmvaltool's syntax of multiple experiments; this option needs an ensemble - to be declared explicitly; it will return no entry if there are gaps in data -- `start_year` and `end_year` are mandatory and are used to filter out the - datasets that don't have data in the interval; if you want all possible years - hence no filtering on years just use "*" for start and end years; -- `config-user: rootpath: CMIPX` may be a list, rootpath lists are supported; - -Caveats: - -- the tool doesn't yet work for derived variables; -- operation restricted to CMIP data. - -Have fun! -""" -import argparse -import datetime -import itertools -import logging -import logging.config -import os -import shutil -import time -from glob import glob -from pathlib import Path - -import esmvalcore -import yaml - -from esmvalcore import __version__ as core_ver -from esmvalcore.cmor.table import CMOR_TABLES, read_cmor_tables -from packaging import version as pkg_version -from ruamel.yaml import YAML - -logger = logging.getLogger(__name__) - -CFG = {} - - -def _purge_file_handlers(cfg: dict) -> None: - """Remove handlers with filename set. - - This is used to remove file handlers which require an output - directory to be set. - """ - cfg['handlers'] = { - name: handler - for name, handler in cfg['handlers'].items() - if 'filename' not in handler - } - prev_root = cfg['root']['handlers'] - cfg['root']['handlers'] = [ - name for name in prev_root if name in cfg['handlers'] - ] - - -def _update_stream_level(cfg: dict, level=None): - """Update the log level for the stream handlers.""" - handlers = cfg['handlers'] - - for handler in handlers.values(): - if level is not None and 'stream' in handler: - if handler['stream'] in ('ext://sys.stdout', 'ext://sys.stderr'): - handler['level'] = level.upper() - - -def _get_log_files(cfg: dict, output_dir: str = None) -> list: - """Initialize log files for the file handlers.""" - log_files = [] - - handlers = cfg['handlers'] - - for handler in handlers.values(): - filename = handler.get('filename', None) - - if filename: - if not os.path.isabs(filename): - handler['filename'] = os.path.join(output_dir, filename) - log_files.append(handler['filename']) - - return log_files - - -def configure_logging(cfg_file: str = None, - output_dir: str = None, - console_log_level: str = None) -> list: - """Configure logging. - - Parameters - ---------- - cfg_file : str, optional - Logging config file. If `None`, defaults to `configure-logging.yml` - output_dir : str, optional - Output directory for the log files. If `None`, log only to the console. - console_log_level : str, optional - If `None`, use the default (INFO). - - Returns - ------- - log_files : list - Filenames that will be logged to. - """ - if cfg_file is None: - cfg_loc = Path(esmvalcore.__file__ + "esmvalcore") - if pkg_version.parse(core_ver) < pkg_version.parse('2.8.0'): - cfg_file = cfg_loc.parents[0] / '_config' / 'config-logging.yml' - else: - cfg_file = cfg_loc.parents[0] / 'config' / 'config-logging.yml' - - cfg_file = Path(cfg_file).absolute() - - with open(cfg_file) as file_handler: - cfg = yaml.safe_load(file_handler) - - if output_dir is None: - _purge_file_handlers(cfg) - - log_files = _get_log_files(cfg, output_dir=output_dir) - _update_stream_level(cfg, level=console_log_level) - - logging.config.dictConfig(cfg) - logging.Formatter.converter = time.gmtime - logging.captureWarnings(True) - - return log_files - - -def read_config_developer_file(cfg_file=None): - """Read the developer's configuration file.""" - if cfg_file is None: - cfg_loc = Path(esmvalcore.__file__ + "esmvalcore") - cfg_file = cfg_loc.parents[0] / 'config-developer.yml' - - with open(cfg_file, 'r') as file: - cfg = yaml.safe_load(file) - - return cfg - - -def _normalize_path(path): - """Normalize paths. - - Expand ~ character and environment variables and convert path to absolute. - - Parameters - ---------- - path: str - Original path - - Returns - ------- - str: - Normalized path - """ - if path is None: - return None - return os.path.abspath(os.path.expanduser(os.path.expandvars(path))) - - -def read_config_user_file(config_file, folder_name, options=None): - """Read config user file and store settings in a dictionary.""" - if not config_file: - config_file = '~/.esmvaltool/config-user.yml' - config_file = os.path.abspath( - os.path.expandvars(os.path.expanduser(config_file))) - # Read user config file - if not os.path.exists(config_file): - print(f"ERROR: Config file {config_file} does not exist") - - with open(config_file, 'r') as file: - cfg = yaml.safe_load(file) - - if options is None: - options = dict() - for key, value in options.items(): - cfg[key] = value - - # set defaults - defaults = { - 'compress_netcdf': False, - 'exit_on_warning': False, - 'output_file_type': 'png', - 'output_dir': 'esmvaltool_output', - 'auxiliary_data_dir': 'auxiliary_data', - 'save_intermediary_cubes': False, - 'remove_preproc_dir': True, - 'max_parallel_tasks': None, - 'run_diagnostic': True, - 'profile_diagnostic': False, - 'config_developer_file': None, - 'drs': {}, - } - - for key in defaults: - if key not in cfg: - logger.info( - "No %s specification in config file, " - "defaulting to %s", key, defaults[key]) - cfg[key] = defaults[key] - - cfg['output_dir'] = _normalize_path(cfg['output_dir']) - cfg['auxiliary_data_dir'] = _normalize_path(cfg['auxiliary_data_dir']) - - cfg['config_developer_file'] = _normalize_path( - cfg['config_developer_file']) - - for key in cfg['rootpath']: - root = cfg['rootpath'][key] - if isinstance(root, str): - cfg['rootpath'][key] = [_normalize_path(root)] - else: - cfg['rootpath'][key] = [_normalize_path(path) for path in root] - - # insert a directory date_time_recipe_usertag in the output paths - now = datetime.datetime.utcnow().strftime("%Y%m%d_%H%M%S") - new_subdir = '_'.join((folder_name, now)) - cfg['output_dir'] = os.path.join(cfg['output_dir'], new_subdir) - - # create subdirectories - cfg['preproc_dir'] = os.path.join(cfg['output_dir'], 'preproc') - cfg['work_dir'] = os.path.join(cfg['output_dir'], 'work') - cfg['plot_dir'] = os.path.join(cfg['output_dir'], 'plots') - cfg['run_dir'] = os.path.join(cfg['output_dir'], 'run') - - # Read developer configuration file - read_cmor_tables(cfg['config_developer_file']) - - return cfg - - -HEADER = r""" -______________________________________________________________________ - _____ ____ __ ____ __ _ _____ _ - | ____/ ___|| \/ \ \ / /_ _| |_ _|__ ___ | | - | _| \___ \| |\/| |\ \ / / _` | | | |/ _ \ / _ \| | - | |___ ___) | | | | \ V / (_| | | | | (_) | (_) | | - |_____|____/|_| |_| \_/ \__,_|_| |_|\___/ \___/|_| -______________________________________________________________________ - -""" + __doc__ - -dataset_order = [ - 'dataset', 'project', 'exp', 'mip', 'ensemble', 'grid', 'start_year', - 'end_year' -] - -# cmip eras -cmip_eras = ["CMIP5", "CMIP6"] - -# The base dictionairy (all wildcards): -base_dict = { - 'institute': '*', - 'dataset': '*', - 'project': '*', - 'exp': '*', - 'frequency': '*', - 'ensemble': '*', - 'mip': '*', - 'modeling_realm': '*', - 'short_name': '*', - 'grid': '*', - 'start_year': '*', - 'end_year': '*', - 'activity': '*', -} - - -def _get_download_dir(yamlconf, cmip_era): - """Get the Download Directory from user config file.""" - if 'download_dir' in yamlconf: - return os.path.join(yamlconf['download_dir'], cmip_era) - return False - - -def _get_site_rootpath(cmip_era): - """Get site (drs) from config-user.yml.""" - config_yml = get_args().config_file - with open(config_yml, 'r') as yamf: - yamlconf = yaml.safe_load(yamf) - drs = yamlconf['drs'][cmip_era] - - download_dir = _get_download_dir(yamlconf, cmip_era) - rootdir = [yamlconf['rootpath'][cmip_era], ] - - if download_dir: - rootdir.append(download_dir) - logger.debug("%s root directory %s", cmip_era, rootdir) - if drs == 'default' and 'default' in yamlconf['rootpath']: - rootdir = [yamlconf['rootpath']['default'], ] - if download_dir: - rootdir.append(download_dir) - - logger.debug("Using drs default and " - "default: %s data directory", rootdir) - - return drs, rootdir - - -def _get_input_dir(cmip_era): - """Get input_dir from config-developer.yml.""" - site = _get_site_rootpath(cmip_era)[0] - yamlconf = read_config_developer_file() - - return yamlconf[cmip_era]['input_dir'][site] - - -def _get_input_file(cmip_era): - """Get input_file from config-developer.yml.""" - yamlconf = read_config_developer_file() - return yamlconf[cmip_era]['input_file'] - - -def _determine_basepath(cmip_era): - """Determine a basepath.""" - if isinstance(_get_site_rootpath(cmip_era)[1], list): - rootpaths = _get_site_rootpath(cmip_era)[1] - else: - rootpaths = [_get_site_rootpath(cmip_era)[1]] - - basepaths = [] - for rootpath in rootpaths: - if _get_input_dir(cmip_era) != os.path.sep: - basepath = os.path.join(rootpath, _get_input_dir(cmip_era), - _get_input_file(cmip_era)) - else: - basepath = os.path.join(rootpath, _get_input_file(cmip_era)) - basepath = basepath.replace('//', '/') - basepaths.append(basepath) - logger.debug("We will look for files of patterns %s", basepaths) - - return basepaths - - -def _overlapping_datasets(files, all_years, start_year, end_year): - """Process overlapping datasets and check for avail data in time range.""" - valid_files = [] - ay_sorted = sorted(all_years) - if ay_sorted[0] <= start_year and ay_sorted[-1] >= end_year: - yr_pairs = sorted( - [all_years[i:i + 2] for i in range(0, len(all_years), 2)]) - yr_pairs = list(k for k, _ in itertools.groupby(yr_pairs)) - d_y = [ - yr_pairs[j][1] - yr_pairs[j + 1][0] - for j in range(len(yr_pairs) - 1) - ] - gaps = [c for c in d_y if c < -1] - if not gaps: - valid_files = files - logger.info("Contiguous data from multiple experiments.") - else: - logger.warning("Data from multiple exps has >1 year gaps! ") - logger.debug("Start %s/end %s requested - " - "files covering %s found.", - start_year, end_year, yr_pairs) - - return valid_files - - -def filter_years(files, start_year, end_year, overlap=False): - """ - Filter out files that are outside requested time range. - - Nifty function that takes a list of files and two years - as arguments; it will build a series of filter dictionaries - and check if data is available for the entire interval; - it will return a single file per dataset, the first file - in the list of files that cover the specified interval; - optional argument `overlap` used if multiple experiments are - used and overlap between datasets is present. - - Parameters - ---------- - files: list - A list of files that need filtering by requested time range. - - start_year: int - Integer start year of requested range. - - end_year: int - Integer end year of requested range. - - overlap: bool - Flag if datasets overlap; defaults to False. - - Returns - ------- - list - List of files which have been identified as falling in - the requested time range; if multiple files within time range - per dataset, the first file will be returned. - - """ - valid_files = [] - available_years = {} - - if start_year == "*" and end_year == "*": - return files - - if not files: - return valid_files - - all_files_roots = [("").join(fil.split("_")[0:-1]) for fil in files] - for fil in files: - available_years[("").join(fil.split("_")[0:-1])] = [] - for fil in files: - available_years[("").join(fil.split("_")[0:-1])].append( - fil.split("_")[-1].strip(".nc").split("-")) - - all_years = [] - for root, yr_list in available_years.items(): - actual_years = [] - yr_list = list(itertools.chain.from_iterable(yr_list)) - for year in yr_list: - if len(year) == 4: - actual_years.append(int(year)) - else: - actual_years.append(int(year[0:4])) - actual_years = sorted(actual_years) - all_years.extend(actual_years) - if not overlap: - actual_years = sorted(list(set(actual_years))) - if actual_years[0] <= start_year and actual_years[-1] >= end_year: - idx = all_files_roots.index(root) - valid_files.append(files[idx]) - - # multiple experiments to complete each other - if overlap: - valid_files = _overlapping_datasets(files, all_years, start_year, - end_year) - - if not valid_files: - logger.warning("No data found to fully cover start " - "%s / end %s as requested!", start_year, end_year) - - return valid_files - - -def _resolve_latestversion(dirname_template): - """Resolve the 'latestversion' tag.""" - for version_separator in ['{latestversion}', '{version}']: - if version_separator in dirname_template: - break - else: - return dirname_template - - # Find latest version - part1, part2 = dirname_template.split(version_separator) - part2 = part2.lstrip(os.sep) - part1_contents = glob(part1) - if part1_contents: - versions = os.listdir(part1_contents[0]) - versions.sort(reverse=True) - for version in ['latest'] + versions: - dirname = os.path.join(part1, version, part2) - if glob(dirname): - return dirname - - return dirname_template - - -def list_all_files(file_dict, cmip_era): - """ - List all files that match the dataset dictionary. - - Function that returns all files that are determined by a - file_dict dictionary; file_dict is keyed on usual parameters - like `dataset`, `project`, `mip` etc; glob.glob is used - to find files; speedup is achieved by replacing wildcards - with values from CMOR tables. - - Parameters - ---------- - file_dict: dict - Dictionary to hold dataset specifications. - - cmip_era: str - Either CMIP5 or CMIP6. - - Returns - ------- - list: - List of found files. - - """ - mip = file_dict['mip'] - short_name = file_dict['short_name'] - try: - frequency = CMOR_TABLES[cmip_era].get_variable(mip, - short_name).frequency - realms = CMOR_TABLES[cmip_era].get_variable(mip, - short_name).modeling_realm - except AttributeError: - logger.warning("Could not find %s CMOR table " - "for variable %s with mip %s", - cmip_era, short_name, mip) - return [] - file_dict['frequency'] = frequency - - basepaths = _determine_basepath(cmip_era) - all_files = [] - - for basepath in basepaths: - new_path = basepath[:] - - # could have multiple realms - for realm in realms: - file_dict['modeling_realm'] = realm - - # load all the files in the custom dict - for key, value in file_dict.items(): - new_path = new_path.replace('{' + key + '}', str(value)) - new_path = _resolve_latestversion(new_path) - if new_path.startswith("~"): - new_path = os.path.expanduser(new_path) - if not new_path.startswith(os.sep): - raise ValueError( - "Could not expand ~ to user home dir " - "please expand it in the config user file!") - logger.info("Expanding path to %s", new_path) - - # Globs all the wildcards into a list of files. - files = glob(new_path) - all_files.extend(files) - if not all_files: - logger.warning("Could not find any file for data specifications.") - - return all_files - - -def _file_to_recipe_dataset(fn_path, cmip_era, file_dict): - """Convert a filename to an recipe ready dataset.""" - # Add the obvious ones - ie the one you requested! - output_dataset = {} - output_dataset['project'] = cmip_era - for key, value in file_dict.items(): - if value == '*': - continue - if key in dataset_order: - output_dataset[key] = value - - # Split file name and base path into directory structure and filenames. - basefiles = _determine_basepath(cmip_era) - _, fnfile = os.path.split(fn_path) - - for basefile in basefiles: - _, basefile = os.path.split(basefile) - # Some of the key words include the splitting character '_' ! - basefile = basefile.replace('short_name', 'shortname') - basefile = basefile.replace('start_year', 'startyear') - basefile = basefile.replace('end_year', 'endyear') - - # Assume filename is separated by '_' - basefile_split = [key.replace("{", "") for key in basefile.split('_')] - basefile_split = [key.replace("}", "") for key in basefile_split] - fnfile_split = fnfile.split('_') - - # iterate through directory structure looking for useful bits. - for base_key, fn_key in zip(basefile_split, fnfile_split): - if base_key == '*.nc': - fn_key = fn_key.replace('.nc', '') - start_year, end_year = fn_key.split('-') - output_dataset['start_year'] = start_year - output_dataset['end_year'] = end_year - elif base_key == "ensemble*.nc": - output_dataset['ensemble'] = fn_key - elif base_key == "grid*.nc": - output_dataset['grid'] = fn_key - elif base_key == "shortname": - pass - else: - output_dataset[base_key] = fn_key - if "exp" in file_dict: - if isinstance(file_dict["exp"], list): - output_dataset["exp"] = file_dict["exp"] - - return output_dataset - - -def _remove_duplicates(add_datasets): - """ - Remove accidental duplicates. - - Close to 0% chances this will ever be used. - May be used when there are actual duplicates in data - storage, we've seen these before, but seldom. - """ - datasets = [] - seen = set() - - for dataset in add_datasets: - orig_exp = dataset["exp"] - dataset["exp"] = str(dataset["exp"]) - tup_dat = tuple(dataset.items()) - if tup_dat not in seen: - seen.add(tup_dat) - dataset["exp"] = orig_exp - datasets.append(dataset) - - return datasets - - -def _check_recipe(recipe_dict): - """Perform a quick recipe check for mandatory fields.""" - do_exit = False - if "diagnostics" not in recipe_dict: - logger.error("Recipe missing diagnostics section.") - do_exit = True - for diag_name, diag in recipe_dict["diagnostics"].items(): - if "variables" not in diag: - logger.error("Diagnostic %s missing variables.", diag_name) - do_exit = True - for var_name, var_pars in diag["variables"].items(): - if "mip" not in var_pars: - logger.error("Variable %s missing mip.", var_name) - do_exit = True - if "start_year" not in var_pars: - logger.error("Variable %s missing start_year.", var_name) - do_exit = True - if "end_year" not in var_pars: - logger.error("Variable %s missing end_year.", var_name) - do_exit = True - if "exp" in var_pars: - if isinstance(var_pars["exp"], - list) and "ensemble" not in var_pars: - logger.error("Asking for experiments list for ") - logger.error("variable %s - you need to ", var_name) - logger.error("define an ensemble for this case.") - do_exit = True - if do_exit: - raise ValueError("Please fix the issues in recipe and rerun") - - -def _check_config_file(user_config_file): - """Perform a quick recipe check for mandatory fields.""" - do_exit = False - if "rootpath" not in user_config_file: - logger.error("Config file missing rootpath section.") - do_exit = True - if "drs" not in user_config_file: - logger.error("Config file missing drs section.") - do_exit = True - for proj in cmip_eras: - if proj not in user_config_file["rootpath"].keys(): - logger.error("Config file missing rootpath for %s", proj) - do_exit = True - if proj not in user_config_file["drs"].keys(): - logger.error("Config file missing drs for %s", proj) - do_exit = True - if do_exit: - raise ValueError("Please fix issues in config file and rerun") - - -def _parse_recipe_to_dicts(yamlrecipe): - """Parse a recipe's variables into a dictionary of dictionairies.""" - output_dicts = {} - for diag in yamlrecipe['diagnostics']: - for variable, var_dict in yamlrecipe['diagnostics'][diag][ - 'variables'].items(): - new_dict = base_dict.copy() - for var_key, var_value in var_dict.items(): - if var_key in new_dict: - new_dict[var_key] = var_value - output_dicts[(diag, variable)] = new_dict - - return output_dicts - - -def _add_datasets_into_recipe(additional_datasets, output_recipe): - """Add the datasets into a new recipe.""" - yaml = YAML() - yaml.default_flow_style = False - with open(output_recipe, 'r') as yamlfile: - cur_yaml = yaml.load(yamlfile) - for diag_var, add_dat in additional_datasets.items(): - if add_dat: - if 'additional_datasets' in cur_yaml['diagnostics']: - cur_yaml['diagnostics'][diag_var[0]]['variables'][ - diag_var[1]]['additional_datasets'].extend(add_dat) - else: - cur_yaml['diagnostics'][diag_var[0]]['variables'][ - diag_var[1]]['additional_datasets'] = add_dat - if cur_yaml: - with open(output_recipe, 'w') as yamlfile: - yaml.dump(cur_yaml, yamlfile) - - -def _find_all_datasets(recipe_dict, cmip_eras): - """Find all datasets explicitly.""" - datasets = [] - for cmip_era in cmip_eras: - if cmip_era == "CMIP6": - activity = "CMIP" - else: - activity = "" - drs, site_path = _get_site_rootpath(cmip_era) - if drs in ["default", "SMHI"]: - logger.info("DRS is %s; filter on dataset disabled.", drs) - datasets = ["*"] - else: - if not isinstance(site_path, list): - site_path = [site_path] - for site_pth in site_path: - if drs in ["BADC", "DKRZ", "CP4CDS"]: - institutes_path = os.path.join(site_pth, activity) - elif drs in ["ETHZ", "RCAST"]: - exp = recipe_dict["exp"][0] - if exp == "*": - exp = "piControl" # all institutes have piControl - mip = recipe_dict["mip"] - var = recipe_dict["short_name"] - institutes_path = os.path.join(site_pth, exp, mip, var) - - if not os.path.isdir(institutes_path): - logger.warning("Path to data %s " - "does not exist; will look everywhere.", - institutes_path) - datasets = ["*"] - return datasets - - institutes = os.listdir(institutes_path) - if drs in ["BADC", "DKRZ", "CP4CDS"]: - for institute in institutes: - datasets.extend( - os.listdir(os.path.join(institutes_path, - institute))) - else: - datasets.extend(institutes) - - return datasets - - -def _get_exp(recipe_dict): - """Get the correct exp as list of single or multiple exps.""" - if isinstance(recipe_dict["exp"], list): - exps_list = recipe_dict["exp"] - logger.info("Multiple %s experiments requested", exps_list) - else: - exps_list = [recipe_dict["exp"]] - logger.info("Single %s experiment requested", exps_list) - - return exps_list - - -def _get_datasets(recipe_dict, cmip_eras): - """Get the correct datasets as list if needed.""" - if recipe_dict["dataset"] == "*": - datasets = _find_all_datasets(recipe_dict, cmip_eras) - return datasets - if isinstance(recipe_dict['dataset'], list): - datasets = recipe_dict['dataset'] - logger.info("Multiple %s datasets requested", datasets) - else: - datasets = [recipe_dict['dataset']] - logger.info("Single %s dataset requested", datasets) - - return datasets - - -def get_args(): - """Parse command line arguments.""" - parser = argparse.ArgumentParser( - description=__doc__, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument('recipe', help='Path/name of yaml pilot recipe file') - parser.add_argument('-c', - '--config-file', - default=os.path.join(os.environ["HOME"], '.esmvaltool', - 'config-user.yml'), - help='User configuration file') - - parser.add_argument('-o', - '--output', - default=os.path.join(os.getcwd(), - 'recipe_autofilled.yml'), - help='Output recipe, default recipe_autofilled.yml') - - args = parser.parse_args() - return args - - -def _get_timefiltered_files(recipe_dict, exps_list, cmip_era): - """Obtain all files that correspond to requested time range.""" - # multiple experiments allowed, complement data from each exp - if len(exps_list) > 1: - files = [] - for exp in exps_list: - recipe_dict["exp"] = exp - files.extend(list_all_files(recipe_dict, cmip_era)) - files = filter_years(files, - recipe_dict["start_year"], - recipe_dict["end_year"], - overlap=True) - recipe_dict["exp"] = exps_list - - else: - files = list_all_files(recipe_dict, cmip_era) - files = filter_years(files, recipe_dict["start_year"], - recipe_dict["end_year"]) - - return files - - -def run(): - """Run the `recipe_filler` tool. Help in __doc__ and via --help.""" - # Get arguments - args = get_args() - input_recipe = args.recipe - output_recipe = args.output - cmip_eras = ["CMIP5", "CMIP6"] - - # read the config file - config_user = read_config_user_file(args.config_file, - 'recipe_filler', - options={}) - - # configure logger - run_dir = os.path.join(config_user['output_dir'], 'recipe_filler') - if not os.path.isdir(run_dir): - os.makedirs(run_dir) - log_files = configure_logging(output_dir=run_dir, - console_log_level=config_user['log_level']) - logger.info(HEADER) - logger.info("Using user configuration file: %s", args.config_file) - logger.info("Using pilot recipe file: %s", input_recipe) - logger.info("Writing filled out recipe to: %s", output_recipe) - log_files = "\n".join(log_files) - logger.info("Writing program log files to:\n%s", log_files) - - # check config user file - _check_config_file(config_user) - - # parse recipe - with open(input_recipe, 'r') as yamlfile: - yamlrecipe = yaml.safe_load(yamlfile) - _check_recipe(yamlrecipe) - recipe_dicts = _parse_recipe_to_dicts(yamlrecipe) - - # Create a list of additional_datasets for each diagnostic/variable. - additional_datasets = {} - for (diag, variable), recipe_dict in recipe_dicts.items(): - logger.info("Looking for data for " - "variable %s in diagnostic %s", variable, diag) - new_datasets = [] - if "short_name" not in recipe_dict: - recipe_dict['short_name'] = variable - elif recipe_dict['short_name'] == "*": - recipe_dict['short_name'] = variable - - # adjust cmip era if needed - if recipe_dict['project'] != "*": - cmip_eras = [recipe_dict['project']] - - # get datasets depending on user request; always a list - datasets = _get_datasets(recipe_dict, cmip_eras) - - # get experiments depending on user request; always a list - exps_list = _get_exp(recipe_dict) - - # loop through datasets - for dataset in datasets: - recipe_dict['dataset'] = dataset - logger.info("Seeking data for dataset: %s", dataset) - for cmip_era in cmip_eras: - files = _get_timefiltered_files(recipe_dict, exps_list, - cmip_era) - - # assemble in new recipe - add_datasets = [] - for fn in sorted(files): - fn_dir = os.path.dirname(fn) - logger.info("Data directory: %s", fn_dir) - out = _file_to_recipe_dataset(fn, cmip_era, recipe_dict) - logger.info("New recipe entry: %s", out) - if out is None: - continue - add_datasets.append(out) - new_datasets.extend(add_datasets) - additional_datasets[(diag, variable, cmip_era)] = \ - _remove_duplicates(new_datasets) - - # add datasets to recipe as additional_datasets - shutil.copyfile(input_recipe, output_recipe, follow_symlinks=True) - _add_datasets_into_recipe(additional_datasets, output_recipe) - logger.info("Finished recipe filler. Go get some science done now!") - - -if __name__ == "__main__": - run() diff --git a/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py b/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py deleted file mode 100644 index 5965877717..0000000000 --- a/esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py +++ /dev/null @@ -1,258 +0,0 @@ -r""" -Install and run u-bd684 - the esmvaltool rose-cylc suite. - -Usage: ------- --c --config-file: [REQUIRED] user specific configuration file; --r --recipe-file: [REQUIRED] single or multiple (space-sep) recipe files; --d --main-dir: [OPTIONAL] main run dir name (full path); - defaults to $HOME/ESMVALTOOL_ROSE; --s --suite-dir [OPTIONAL] u-bd684 dir full path; can be set by user; - defaults to $HOME/u-bd684; --n --no-submit [OPTIONAL] if specified, will not submit suite to cylc; --l --log-level: [OPTIONAL] log level, default=info - -Example: --------- -python esmvt_rose_wrapper.py -c /home/users/valeriu/input/config-user.yml \ - -r /home/users/valeriu/recipes/recipe1.yml \ - /home/users/valeriu/recipes/recipe2.yml \ - -d /home/users/valeriu/esmvat_WRAPPER \ - -s /home/users/valeriu/u-bd684/ \ - -n - -Base suite: ------------ -The base suite to run esmvaltool via rose-cylc is u-bd684; you can find -this suite in the Met Office Rose repository at: - -https://code.metoffice.gov.uk/svn/roses-u/b/d/6/8/4/trunk/ - -When rose (exec.) will be working with python3.x, this location will become -default and the pipeline will aceess it independently of user, unless, of -course the user will specify -s $SUITE_LOCATION; until then the user needs -to grab a copy of it in $HOME or specify the default location via -s option. - -Environment: ------------- -We will move to a unified and centrally-installed esmvaltool environment; -until then, the user will have to alter the env_setup script: - -u-bd684/app/esmvaltool/env_setup - -with the correct pointers to esmvaltool installation, if desired; -NOTE that the defaults are working pointers for an install on CEDA-Jasmin. - -To be able to submit to cylc, you need to have the /metomi/ suite in path -AND use a python2.7 environment. Use the Jasmin-example below for guidance. - -Jasmin-example: ---------------- -This shows how to interact with rose-cylc and run esmvaltool under cylc -using this script: - -export PATH=/apps/contrib/metomi/bin:$PATH -export PATH=/home/users/valeriu/miniconda2/bin:$PATH -mkdir esmvaltool_rose -cd esmvaltool_rose -cp $esmvaltool/utils/rose-cylc/esmvt_rose_wrapper.py . -[get u-bd684 in $HOME, get your recipes and the config] -python esmvt_rose_wrapper.py -c config-user.yml \ --r recipe_autoassess_stratosphere.yml recipe_OceanPhysics.yml \ --d $HOME/esmvaltool_rose - -Note that you need to pass FULL PATHS to cylc, no . or .. because all -operations are done remotely on different nodes. - -A practical actual example of running the tool can be found on JASMIN: -/home/users/valeriu/esmvaltool_rose -There you will find the run shell: run_example, as well as an example -how to set the configuration file. A copy of u-bd684 is always located -in /home/users/valeriu/roses/u-bd684. - -Contact: --------- -author: Valeriu Predoi (UREAD, valeriu.predoi@ncas.ac.uk) -""" -import argparse -import configparser -import logging -import os -import subprocess -import shutil - -import yaml - - -# set up logging -logger = logging.getLogger(__name__) - -# print the header -HEADER = r""" -______________________________________________________________________ - - ESMValTool Rose-Cylc Wrapper -______________________________________________________________________ - -""" + __doc__ - - -def get_args(): - """Define the `esmvaltool` command line.""" - # parse command line args - parser = argparse.ArgumentParser( - description=HEADER, - formatter_class=argparse.RawDescriptionHelpFormatter) - parser.add_argument( - '-c', - '--config-file', - default=os.path.join(os.path.dirname(__file__), 'config-user.yml'), - help='Configuration file') - parser.add_argument( - '-r', - '--recipe-files', - type=str, - nargs='+', - help='Recipe files (list or single file)') - parser.add_argument( - '-d', - '--main-dir', - default=os.path.join(os.environ['HOME'], 'ESMVALTOOL_ROSE'), - help='Main analysis directory; default to $HOME/ESMVALTOOL_ROSE') - parser.add_argument( - '-s', - '--suite-dir', - default=os.path.join(os.environ['HOME'], 'u-bd684'), - help='u-bd684 suite directory; default to $HOME/u-bd684') - parser.add_argument( - '-n', - '--no-submit', - action='store_true', - help="Flag to NOT submit the Rose suite.") - parser.add_argument( - '-l', - '--log-level', - default='info', - choices=['debug', 'info', 'warning', 'error']) - args = parser.parse_args() - return args - - -def _set_logger(logging, out_dir, log_file, log_level): - # set logging for screen and file output - root_logger = logging.getLogger() - out_fmt = "%(asctime)s %(levelname)-8s %(name)s,%(lineno)s\t%(message)s" - logging.basicConfig( - filename=os.path.join(out_dir, log_file), - filemode='a', - format=out_fmt, - datefmt='%H:%M:%S', - level=logging.DEBUG) - root_logger.setLevel(log_level.upper()) - logfmt = logging.Formatter(out_fmt) - console_handler = logging.StreamHandler() - console_handler.setFormatter(logfmt) - root_logger.addHandler(console_handler) - - -def read_yaml_file(yaml_file): - """Read recipe into a dictionary.""" - with open(yaml_file, 'r') as yfile: - loaded_file = yaml.safe_load(yfile) - return loaded_file - - -def _setup_work(rose_config_template, recipe_files, - config_file, main_dir, default_suite, log_level): - """Write the new rose conf file per suite.""" - # Build the ConfigParser object - config = configparser.ConfigParser() - config.optionxform = str - config.read(rose_config_template) - - # set the main work dir - if not os.path.exists(main_dir): - os.makedirs(main_dir) - - # assemble work tree - if not os.path.isfile(os.path.join(main_dir, config_file)): - shutil.copy2(config_file, main_dir) - if not os.path.exists(os.path.join(main_dir, 'recipes')): - os.makedirs(os.path.join(main_dir, 'recipes')) - if not os.path.exists(os.path.join(main_dir, - os.path.basename(config_file))): - shutil.copy2(config_file, main_dir) - recipes_field = [] - for recipe in recipe_files: - if not os.path.exists(os.path.join(main_dir, 'recipes', - os.path.basename(recipe))): - shutil.copy2(recipe, os.path.join(main_dir, 'recipes')) - recipes_field.append(os.path.basename(recipe).strip('.yml')) - rose_suite = os.path.join(main_dir, 'u-bd684') - if os.path.exists(rose_suite): - shutil.rmtree(rose_suite) - shutil.copytree(default_suite, rose_suite) - out_dir = os.path.join(main_dir, 'output') - if not os.path.exists(out_dir): - os.makedirs(out_dir) - - # set logging - _set_logger(logging, out_dir, 'setup.log', log_level) - logger.info(HEADER) - - # start logging - logger.info("Main working directory: %s", main_dir) - logger.info("Using Rose-Cylc suite base: %s", default_suite) - logger.info("Output and logs written to: %s", out_dir) - logger.info("Creating rose suite directories...") - logger.info("Use rose-suite.conf template %s", rose_config_template) - logger.info("Use user config file %s", config_file) - - # write the file - config.set('jinja2:suite.rc', 'INPUT_DIR', - '"' + main_dir + '"') - config.set('jinja2:suite.rc', 'OUTPUT_DIR', '"' + out_dir + '"') - config.set('jinja2:suite.rc', 'RECIPES', str(recipes_field)) - with open(os.path.join(rose_suite, 'rose-suite.conf'), 'w') as r_c: - logger.info("Writing rose-suite.conf file %s", - os.path.join(rose_suite, 'rose-suite.conf')) - config.write(r_c) - - return rose_suite - - -def _run_suite(suite): - """Run the mip_convert suite.""" - os.chdir(suite) - logger.info("Submitting suite from %s", suite) - proc = subprocess.Popen(["rose", "suite-run"], stdout=subprocess.PIPE) - out, err = proc.communicate() - logger.info("Rose communications: %s %s", str(out), str(err)) - - -def main(): - """Run the the meat of the code.""" - logger.info("Running main function...") - args = get_args() - # rose suite default location - if args.suite_dir: - default_suite = args.suite_dir - rose_config_template = os.path.join(default_suite, "rose-suite.conf") - - # get command line arguments - recipe_files = args.recipe_files - config_file = args.config_file - main_dir = args.main_dir - log_level = args.log_level - - # setup rose suite - run_rose = _setup_work(rose_config_template, recipe_files, - config_file, main_dir, default_suite, log_level) - - # submit to cylc - if not args.no_submit: - _run_suite(run_rose) - - -if __name__ == '__main__': - main() diff --git a/setup.py b/setup.py index bbc50df9ef..af741a64f6 100755 --- a/setup.py +++ b/setup.py @@ -25,6 +25,7 @@ 'cdo', 'cdsapi', 'cf-units', + 'cfgrib', 'cftime', 'cmocean', 'dask!=2024.8.0', # https://github.com/dask/dask/issues/11296 @@ -50,23 +51,23 @@ 'numpy!=1.24.3', # severe masking bug 'openpyxl', 'packaging', - 'pandas!=2.2.0,!=2.2.1,!=2.2.2', # ESMValCore PR2305 + 'pandas==2.1.4', # see note in environment.yml 'progressbar2', - 'psyplot', - 'psy-maps', - 'psy-reg', - 'psy-simple', + 'psyplot>=1.5.0', # psy*<1.5.0 are not py312 compat + 'psy-maps>=1.5.0', + 'psy-reg>=1.5.0', + 'psy-simple>=1.5.0', 'pyproj>=2.1', 'pys2index', 'python-dateutil', 'pyyaml', - 'rasterio', + 'rasterio>=1.3.10', 'requests', 'ruamel.yaml', 'scikit-image', 'scikit-learn>=1.4.0', # github.com/ESMValGroup/ESMValTool/issues/3504 'scipy', - 'scitools-iris>=3.6.1', + 'scitools-iris>=3.11', 'seaborn', 'seawater', 'shapely>=2', @@ -107,7 +108,7 @@ 'imagehash', 'isort', 'pre-commit', - 'prospector[with_pyroma]!=1.1.6.3,!=1.1.6.4', + 'prospector[with_pyroma]>=1.12', 'vprof', 'yamllint', 'yapf', @@ -227,6 +228,7 @@ def read_description(filename): 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', + 'Programming Language :: Python :: 3.12', 'Topic :: Scientific/Engineering', 'Topic :: Scientific/Engineering :: Atmospheric Science', 'Topic :: Scientific/Engineering :: GIS', @@ -249,13 +251,9 @@ def read_description(filename): }, entry_points={ 'console_scripts': [ - 'mip_convert_setup = ' - 'esmvaltool.cmorizers.mip_convert.esmvt_mipconv_setup:main', 'nclcodestyle = esmvaltool.utils.nclcodestyle.nclcodestyle:_main', 'test_recipe = ' 'esmvaltool.utils.testing.recipe_settings.install_expand_run:main', - 'recipe_filler = ' - 'esmvaltool.utils.recipe_filler:run' ], 'esmvaltool_commands': [ 'colortables = ' diff --git a/tests/integration/test_cmorizer.py b/tests/integration/test_cmorizer.py index 11bade4190..48f75b951a 100644 --- a/tests/integration/test_cmorizer.py +++ b/tests/integration/test_cmorizer.py @@ -4,6 +4,7 @@ import os import sys +import esmvalcore import iris import iris.coord_systems import iris.coords @@ -13,7 +14,9 @@ import pytest import yaml from cf_units import Unit +from packaging import version +from esmvaltool import ESMValToolDeprecationWarning from esmvaltool.cmorizers.data.cmorizer import DataCommand @@ -28,8 +31,8 @@ def keep_cwd(): os.chdir(curr_path) -def write_config_user_file(dirname): - """Replace config_user file values for testing.""" +def write_config_file(dirname): + """Replace configuration values for testing.""" config_file = dirname / 'config-user.yml' cfg = { 'output_dir': str(dirname / 'output_dir'), @@ -143,14 +146,59 @@ def arguments(*args): sys.argv = backup -def test_cmorize_obs_woa_no_data(tmp_path): +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) +def test_cmorize_obs_woa_no_data_config_file(tmp_path): """Test for example run of cmorize_obs command.""" + config_file = write_config_file(tmp_path) + os.makedirs(os.path.join(tmp_path, 'raw_stuff', 'Tier2')) + os.makedirs(os.path.join(tmp_path, 'output_dir')) + with keep_cwd(): + with pytest.raises(RuntimeError): + with pytest.warns(ESMValToolDeprecationWarning): + DataCommand().format('WOA', config_file=config_file) + + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=True) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) +def test_cmorize_obs_woa_data_config_file(tmp_path): + """Test for example run of cmorize_obs command.""" + config_file = write_config_file(tmp_path) + data_path = os.path.join(tmp_path, 'raw_stuff', 'Tier2', 'WOA') + put_dummy_data(data_path) + with keep_cwd(): + with pytest.warns(ESMValToolDeprecationWarning): + DataCommand().format('WOA', config_file=config_file) - config_user_file = write_config_user_file(tmp_path) + log_dir = os.path.join(tmp_path, 'output_dir') + log_file = os.path.join(log_dir, + os.listdir(log_dir)[0], 'run', 'main_log.txt') + check_log_file(log_file, no_data=False) + output_path = os.path.join(log_dir, os.listdir(log_dir)[0], 'Tier2', 'WOA') + check_output_exists(output_path) + check_conversion(output_path) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) +def test_cmorize_obs_woa_no_data(tmp_path): + """Test for example run of cmorize_obs command.""" + write_config_file(tmp_path) os.makedirs(os.path.join(tmp_path, 'raw_stuff', 'Tier2')) with keep_cwd(): - with pytest.raises(Exception): - DataCommand().format('WOA', config_user_file) + with pytest.raises(RuntimeError): + DataCommand().format('WOA', config_dir=str(tmp_path)) log_dir = os.path.join(tmp_path, 'output_dir') log_file = os.path.join(log_dir, @@ -158,14 +206,17 @@ def test_cmorize_obs_woa_no_data(tmp_path): check_log_file(log_file, no_data=True) +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) def test_cmorize_obs_woa_data(tmp_path): """Test for example run of cmorize_obs command.""" - - config_user_file = write_config_user_file(tmp_path) + write_config_file(tmp_path) data_path = os.path.join(tmp_path, 'raw_stuff', 'Tier2', 'WOA') put_dummy_data(data_path) with keep_cwd(): - DataCommand().format('WOA', config_user_file) + DataCommand().format('WOA', config_dir=str(tmp_path)) log_dir = os.path.join(tmp_path, 'output_dir') log_file = os.path.join(log_dir, diff --git a/tests/integration/test_diagnostic_run.py b/tests/integration/test_diagnostic_run.py index b0c606f4ee..670f7088dd 100644 --- a/tests/integration/test_diagnostic_run.py +++ b/tests/integration/test_diagnostic_run.py @@ -5,12 +5,14 @@ from pathlib import Path from textwrap import dedent +import esmvalcore import pytest import yaml from esmvalcore._main import run +from packaging import version -def write_config_user_file(dirname): +def write_config_file(dirname): config_file = dirname / 'config-user.yml' cfg = { 'output_dir': str(dirname / 'output_dir'), @@ -68,10 +70,13 @@ def check(result_file): ] +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) >= version.parse("2.14.0"), + reason='ESMValCore >= v2.14.0', +) @pytest.mark.installation @pytest.mark.parametrize('script_file', SCRIPTS) -def test_diagnostic_run(tmp_path, script_file): - +def test_diagnostic_run_config_file(tmp_path, script_file): local_script_file = Path(__file__).parent / script_file recipe_file = tmp_path / 'recipe_test.yml' @@ -96,12 +101,58 @@ def test_diagnostic_run(tmp_path, script_file): """.format(script_file, result_file)) recipe_file.write_text(str(recipe)) - config_user_file = write_config_user_file(tmp_path) + config_file = write_config_file(tmp_path) with arguments( 'esmvaltool', 'run', '--config_file', - config_user_file, + config_file, + str(recipe_file), + ): + run() + + check(result_file) + + +@pytest.mark.skipif( + version.parse(esmvalcore.__version__) < version.parse("2.12.0"), + reason='ESMValCore < v2.12.0', +) +@pytest.mark.installation +@pytest.mark.parametrize('script_file', SCRIPTS) +def test_diagnostic_run(tmp_path, script_file): + local_script_file = Path(__file__).parent / script_file + + recipe_file = tmp_path / 'recipe_test.yml' + script_file = tmp_path / script_file + result_file = tmp_path / 'result.yml' + config_dir = tmp_path / 'config' + config_dir.mkdir(exist_ok=True, parents=True) + + shutil.copy(local_script_file, script_file) + + # Create recipe + recipe = dedent(""" + documentation: + title: Test recipe + description: Recipe with no data. + authors: [andela_bouwe] + + diagnostics: + diagnostic_name: + scripts: + script_name: + script: {} + setting_name: {} + """.format(script_file, result_file)) + recipe_file.write_text(str(recipe)) + + write_config_file(config_dir) + with arguments( + 'esmvaltool', + 'run', + '--config_dir', + str(config_dir), str(recipe_file), ): run() diff --git a/tests/integration/test_recipe_filler.py b/tests/integration/test_recipe_filler.py deleted file mode 100644 index b78ac8c5f8..0000000000 --- a/tests/integration/test_recipe_filler.py +++ /dev/null @@ -1,211 +0,0 @@ -"""Tests for _data_finder.py.""" -import contextlib -import os -import shutil -import sys -import tempfile - -import pytest -import yaml - -from esmvaltool.utils.recipe_filler import run - - -# Load test configuration -with open(os.path.join(os.path.dirname(__file__), - 'recipe_filler.yml')) as file: - CONFIG = yaml.safe_load(file) - - -@contextlib.contextmanager -def arguments(*args): - backup = sys.argv - sys.argv = list(args) - yield - sys.argv = backup - - -def print_path(path): - """Print path.""" - txt = path - if os.path.isdir(path): - txt += '/' - if os.path.islink(path): - txt += ' -> ' + os.readlink(path) - print(txt) - - -def tree(path): - """Print path, similar to the the `tree` command.""" - print_path(path) - for dirpath, dirnames, filenames in os.walk(path): - for dirname in dirnames: - print_path(os.path.join(dirpath, dirname)) - for filename in filenames: - print_path(os.path.join(dirpath, filename)) - - -def create_file(filename): - """Create an empty file.""" - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - os.makedirs(dirname) - - with open(filename, 'a'): - pass - - -def create_tree(path, filenames=None, symlinks=None): - """Create directory structure and files.""" - for filename in filenames or []: - create_file(os.path.join(path, filename)) - - for symlink in symlinks or []: - link_name = os.path.join(path, symlink['link_name']) - os.symlink(symlink['target'], link_name) - - -def write_config_user_file(dirname, file_path, drs): - config_file = dirname / 'config-user.yml' - cfg = { - 'log_level': 'info', - 'output_dir': str(dirname / 'recipe_filler_output'), - 'rootpath': { - 'CMIP5': str(dirname / file_path), - 'CMIP6': str(dirname / file_path), - }, - 'drs': { - 'CMIP5': drs, - 'CMIP6': drs, - }, - } - config_file.write_text(yaml.safe_dump(cfg, encoding=None)) - return str(config_file) - - -def write_recipe(dirname, recipe_dict): - recipe_file = dirname / 'recipe.yml' - diags = {'diagnostics': recipe_dict} - recipe_file.write_text(yaml.safe_dump(diags, encoding=None)) - return str(recipe_file) - - -@pytest.fixture -def root(): - """Root function for tests.""" - dirname = tempfile.mkdtemp() - yield os.path.join(dirname, 'output1') - print("Directory structure was:") - tree(dirname) - shutil.rmtree(dirname) - - -def setup_files(tmp_path, root, cfg): - """Create config, recipe ,output recipe etc.""" - user_config_file = write_config_user_file(tmp_path, root, cfg['drs']) - diagnostics = {} - diagnostics["test_diagnostic"] = {} - diagnostics["test_diagnostic"]["variables"] = {} - diagnostics["test_diagnostic"]["variables"]["test_var"] = cfg["variable"] - recipe = write_recipe(tmp_path, diagnostics) - output_recipe = str(tmp_path / "recipe_auto.yml") - - return user_config_file, recipe, output_recipe - - -@pytest.mark.parametrize('cfg', CONFIG['has_additional_datasets']) -def test_adding_datasets(tmp_path, root, cfg): - """Test retrieving additional datasets.""" - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) - - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" in var - - -@pytest.mark.parametrize('cfg', CONFIG['no_additional_datasets']) -def test_not_adding_datasets(tmp_path, root, cfg): - """Test retrieving no additional datasets.""" - create_tree(root, cfg.get('available_files'), - cfg.get('available_symlinks')) - - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" not in var - - -def test_bad_var(tmp_path, root): - """Test a bad variable in the works.""" - cfg = CONFIG['bad_variable'][0] - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - # this doesn't fail and it shouldn't since it can go on - # and look for data for other valid variables - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" not in var - - -def test_no_short_name(tmp_path, root): - """Test a bad variable in the works.""" - cfg = CONFIG['no_short_name'][0] - user_config_file, recipe, output_recipe = setup_files(tmp_path, root, cfg) - - # this doesn't fail and it shouldn't since it can go on - # and look for data for other valid variables - with arguments( - 'recipe_filler', - recipe, - '-c', - user_config_file, - '-o', - output_recipe, - ): - run() - - with open(output_recipe, 'r') as file: - autofilled_recipe = yaml.safe_load(file) - diag = autofilled_recipe["diagnostics"]["test_diagnostic"] - var = diag["variables"]["test_var"] - assert "additional_datasets" not in var diff --git a/tests/system/__init__.py b/tests/system/__init__.py deleted file mode 100644 index 5f7877c08d..0000000000 --- a/tests/system/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Test running esmvaltool""" diff --git a/tests/system/config-test.yml b/tests/system/config-test.yml deleted file mode 100644 index ec25dec23d..0000000000 --- a/tests/system/config-test.yml +++ /dev/null @@ -1,44 +0,0 @@ -############################################################################### -# Diagnostic test configuration file for the ESMValTool -# -# './setup.py test' will look for this file in the following locations -# and use the first config-test.yml file found: -# - current working directory -# - ~/.esmvaltool/ -# - ESMValTool/tests/test_diagnostics/ -# -############################################################################### ---- - -test: - # Execute system/diagnostic tests [false]/true - run: false - # Simulate input data using the dummydata module [true]/false - simulate_input: true - # Limit testing/generating reference data to the following recipes - # An empty list means any recipe in esmvaltool/nml - recipes: [ - recipe_MyVar.yml, - ] - -# Reference data configuration -reference: - # Directory containing reference output - output: ~/esmvaltool_reference_output - # Generate reference data instead of checking [false]/true - generate: false - -# Template for the user configuration file -user: - log_level: warning - exit_on_warning: false - output_file_type: pdf - save_intermediary_cubes: true - - rootpath: - CMIP5: ~/esmvaltool_simulated_input - OBS: ~/esmvaltool_simulated_input - default: ~/esmvaltool_simulated_input - - drs: - CMIP5: default diff --git a/tests/system/data_simulator.py b/tests/system/data_simulator.py deleted file mode 100644 index 203816ca0e..0000000000 --- a/tests/system/data_simulator.py +++ /dev/null @@ -1,114 +0,0 @@ -"""Simulate test data for `esmvaltool`.""" -import os -import sys -import tempfile -import time - -import numpy as np - -from esmvalcore import __version__ as core_ver -from packaging import version -if version.parse(core_ver) < version.parse('2.8.0'): - from esmvalcore._config import read_config_user_file -else: - from esmvalcore.config import CFG -if version.parse(core_ver) <= version.parse('2.7.1'): - from esmvalcore._recipe import read_recipe_file -else: - from esmvalcore._recipe.recipe import read_recipe_file - - -def get_input_filename(variable, rootpath, drs): - """Get a valid input filename.""" - # TODO: implement this according to esmvalcore._data_finder.py - # or patch get_input_filelist there. - return tempfile.NamedTemporaryFile().name + '.nc' - - -def write_data_file(short_name, filename, field, start_year, end_year): - """Write a file containing simulated data.""" - from dummydata.model2 import Model2 - from dummydata.model3 import Model3 - - if 'T2M' in field: - writer = Model2 - elif 'T3M' in field: - writer = Model3 - else: - raise NotImplementedError( - "Cannot create a model from field {}".format(field)) - - # TODO: Maybe this should be made configurable per diagnostic or model - cfg = { - 'ta': { - 'method': 'gaussian_blobs', - 'low': 223, - 'high': 303, - }, - 'pr': { - 'method': 'gaussian_blobs', - 'low': 1e-7, - 'high': 2e-4, - } - } - - kwargs = cfg[short_name] if short_name in cfg else {} - - writer( - var=short_name, - oname=filename, - start_year=start_year, - stop_year=end_year, - **kwargs) - - -def simulate_input_data(recipe_file, config_user_file=None): - """Simulate data for variables defined in recipe""" - if config_user_file: - if version.parse(core_ver) <= version.parse('2.8.0'): - user_config = read_config_user_file( - config_file=config_user_file, recipe_name='') - else: - user_config = CFG.load_from_file( - config_file=config_user_file, recipe_name='') - else: - user_config = { - 'rootpath': { - 'default': '.', - }, - 'drs': {}, - } - - recipe = read_recipe_file(recipe_file, user_config, initialize_tasks=False) - - start_time = time.time() - - for diagnostic in recipe.diagnostics.values(): - np.random.seed(0) - for variables in diagnostic['variables'].values(): - for variable in variables: - filename = get_input_filename( - variable=variable, - rootpath=user_config['rootpath'], - drs=user_config['drs']) - dirname = os.path.dirname(filename) - if not os.path.exists(dirname): - print("Creating {}".format(dirname)) - os.makedirs(dirname) - - print("Writing {}".format(filename)) - write_data_file( - short_name=variable['short_name'], - filename=filename, - field=variable['field'], - start_year=variable['start_year'], - end_year=variable['end_year'], - ) - - print( - "Simulating data took {:.0f} seconds".format(time.time() - start_time)) - - -if __name__ == '__main__': - for path in sys.argv[1:]: - simulate_input_data(recipe_file=path, config_user_file=None) diff --git a/tests/system/esmvaltool_testlib.py b/tests/system/esmvaltool_testlib.py deleted file mode 100644 index f73c639a89..0000000000 --- a/tests/system/esmvaltool_testlib.py +++ /dev/null @@ -1,227 +0,0 @@ -"""Provide a class for testing esmvaltool.""" - -import glob -import os -import shutil -import sys -from unittest import SkipTest - -import numpy as np -import yaml -# from easytest import EasyTest - -import esmvaltool - - -def _load_config(filename=None): - """Load test configuration""" - if filename is None: - # look in default locations for config-test.yml - config_file = 'config-test.yml' - default_locations = [ - '.', - '~/.esmvaltool', - os.path.dirname(__file__), - ] - for path in default_locations: - filepath = os.path.join(os.path.expanduser(path), config_file) - if os.path.exists(filepath): - filename = os.path.abspath(filepath) - break - - with open(filename, 'r') as file: - cfg = yaml.safe_load(file) - - cfg['configfile'] = filename - cfg['reference']['output'] = os.path.abspath( - os.path.expanduser(cfg['reference']['output'])) - - if cfg['test'].get('recipes', []) == []: - script_root = esmvaltool.get_script_root() - recipe_glob = os.path.join(script_root, 'nml', 'recipe_*.yml') - cfg['test']['recipes'] = glob.glob(recipe_glob) - - return cfg - - -_CFG = _load_config() - -RECIPES = _CFG['test']['recipes'] - - -def _create_config_user_file(output_directory): - """Write a config-user.yml file. - - Write a configuration file for running ESMValTool - such that it writes all output to `output_directory`. - """ - cfg = _CFG['user'] - - cfg['output_dir'] = output_directory - - # write to file - filename = os.path.join(output_directory, 'config-user.yml') - with open(filename, 'w') as file: - yaml.safe_dump(cfg, file) - - return filename - - -class ESMValToolTest: # was ESMValToolTest(EasyTest) - """Main class for ESMValTool test runs.""" - - def __init__(self, recipe, output_directory, ignore='', **kwargs): - """ - Create ESMValToolTest instance - - recipe: str - The filename of the recipe that should be tested. - output_directory : str - The name of a directory where results can be stored. - ignore: str or iterable of str - Glob patterns of files to be ignored when testing. - """ - if not _CFG['test']['run']: - raise SkipTest("System tests disabled in {}".format( - _CFG['configfile'])) - - self.ignore = (ignore, ) if isinstance(ignore, str) else ignore - - script_root = esmvaltool.get_script_root() - - # Set recipe path - if not os.path.exists(recipe): - recipe = os.path.join( - os.path.dirname(script_root), 'recipes', recipe) - self.recipe_file = os.path.abspath(recipe) - - # Simulate input data? - self.simulate_input = _CFG['test']['simulate_input'] - - # Create reference output? - self.create_reference_output = _CFG['reference']['generate'] - - # Define reference output path - reference_dir = os.path.join( - _CFG['reference']['output'], - os.path.splitext(os.path.basename(self.recipe_file))[0]) - - # If reference data is neither available nor should be generated, skip - if not (os.path.exists(reference_dir) or self.create_reference_output): - raise SkipTest( - "No reference data available for recipe {} in {}".format( - recipe, _CFG['reference']['output'])) - - # Write ESMValTool configuration file - self.config_user_file = _create_config_user_file(output_directory) - - super(ESMValToolTest, self).__init__( - exe='esmvaltool', - args=['-n', self.recipe_file, '-c', self.config_user_file], - output_directory=output_directory, - refdirectory=reference_dir, - **kwargs) - - def run(self, **kwargs): - """Run tests or generate reference data.""" - if self.simulate_input: - from .data_simulator import simulate_input_data - simulate_input_data( - recipe_file=self.recipe_file, - config_user_file=self.config_user_file) - - if self.create_reference_output: - self.generate_reference_output() - raise SkipTest("Generated reference data instead of running test") - else: - super(ESMValToolTest, self).run_tests(**kwargs) - - def generate_reference_output(self): - """Generate reference output. - - Generate reference data by executing the recipe and then moving - results to the output directory. - """ - if not os.path.exists(self.refdirectory): - self._execute() - shutil.move(self.output_directory, - os.path.dirname(self.refdirectory)) - else: - print("Warning: not generating reference data, reference " - "directory {} already exists.".format(self.refdirectory)) - - def _execute(self): - """Execute ESMValTool - - Override the _execute method because we want to run in our own - Python instance to get coverage reporting and we want to update - the location of `self.output_directory` afterwards. - """ - # run ESMValTool - sys.argv[1:] = self.args - esmvaltool.main.run() - - # Update the output directory to point to the output of the run - output_directory = self.output_directory # noqa - - output = [] - for path in os.listdir(output_directory): - path = os.path.join(output_directory, path) - if os.path.isdir(path): - output.append(path) - - if not output: - raise OSError( - "Output directory not found in location {}. " - "Probably ESMValTool failed to create any output.".format( - output_directory)) - - if len(output) > 1: - print("Warning: found multiple output directories:\n{}\nin output " - "location {}\nusing the first one.".format( - output, output_directory)) - - self.output_directory = output[0] + os.sep # noqa - - def _get_files_from_refdir(self): - """Get a list of files from reference directory. - - Ignore files that match patterns in self.ignore. - - Override this method of easytest.EasyTest to be able to ignore certain - files. - """ - from fnmatch import fnmatchcase - - matches = [] - for root, _, filenames in os.walk(self.refdirectory): - for filename in filenames: - path = os.path.join(root, filename) - relpath = os.path.relpath(path, start=self.refdirectory) - for pattern in self.ignore: - if fnmatchcase(relpath, pattern): - break - else: - matches.append(path) - - return matches - - def _compare_netcdf_values(self, f1, f2, allow_subset=False): - """Compare two netCDF4 Dataset instances. - - Check if dataset2 contains the same variable values as dataset1. - - Override this method of easytest.EasyTest because it is broken - for the case where value1 and value2 have no length. - """ - if allow_subset: # allow that only a subset of data is compared - raise NotImplementedError - - for key in f1.variables: - values1 = f1.variables[key][:] - values2 = f2.variables[key][:] - - if not np.array_equal(values1, values2): - return False - - return True diff --git a/tests/system/test_recipes.py b/tests/system/test_recipes.py deleted file mode 100644 index 0825707bd4..0000000000 --- a/tests/system/test_recipes.py +++ /dev/null @@ -1,35 +0,0 @@ -"""Test script to compare the output of ESMValTool against previous runs.""" - -import shutil -import tempfile - -import pytest - -from .esmvaltool_testlib import RECIPES, ESMValToolTest - - -@pytest.fixture -def output_directory(): - """Create a directory for storing ESMValTool output.""" - tmp = tempfile.mkdtemp() - yield tmp - shutil.rmtree(tmp, ignore_errors=True) - - -@pytest.mark.parametrize("recipe", RECIPES) -def test_recipe(output_directory, recipe): # noqa - """Create a test for each recipe in RECIPES and run those.""" - test = ESMValToolTest( - recipe=recipe, - output_directory=output_directory, - ignore=['tmp/*/*', '*log*.txt', '*.log'], - checksum_exclude=['pdf', 'ps', 'png', 'eps', 'epsi', 'nc']) - - test.run( - graphics=None, - files='all', - check_size_gt_zero=True, - checksum_files='all', - check_file_content=['nc']) - - assert test.sucess diff --git a/tests/unit/cmorizers/test_utilities.py b/tests/unit/cmorizers/test_utilities.py index 156a3507c0..f5823aa734 100644 --- a/tests/unit/cmorizers/test_utilities.py +++ b/tests/unit/cmorizers/test_utilities.py @@ -8,6 +8,7 @@ import iris.coords import iris.cube import iris.fileformats +import iris.util import numpy as np import pytest from cf_units import Unit @@ -194,8 +195,9 @@ def test_fix_coords(): cube.coord("longitude").units = "m" cube.coord("latitude").units = "K" cube_2 = cube.copy() - cube_2.coord("depth").bounds = [[0., 2.5], [2.5, 25.], [25., 250.]] - utils.fix_coords(cube) + + cube = utils.fix_coords(cube) + assert cube.coord("time").var_name == "time" assert cube.coord("longitude").var_name == "lon" assert cube.coord("latitude").var_name == "lat" @@ -217,24 +219,31 @@ def test_fix_coords(): # both cf-units <= 3.1.0 and later versions, we list both variants in the # following assertion. assert cube.coord("time").units.calendar in ("standard", "gregorian") - assert cube.coord("longitude").points[0] == 178.5 - assert cube.coord("longitude").points[1] == 179.5 + assert cube.coord("longitude").points[0] == 358.5 + assert cube.coord("longitude").points[1] == 359.5 assert cube.coord("longitude").has_bounds() - assert cube.coord("longitude").bounds[1][1] == 180. - assert cube.data[1, 1, 1, 0] == 22. + assert cube.coord("longitude").bounds[1][1] == 360.0 + assert cube.data[1, 1, 1, 1] == 22. assert cube.coord("latitude").has_bounds() assert cube.coord("depth").has_bounds() assert cube.coord('latitude').coord_system is None assert cube.coord('longitude').coord_system is None - utils.fix_coords(cube_2, - overwrite_time_bounds=False, - overwrite_lon_bounds=False, - overwrite_lat_bounds=False, - overwrite_lev_bounds=False) + + cube_2.coord("depth").bounds = [[0., 2.5], [2.5, 25.], [25., 250.]] + cube_2 = iris.util.reverse(cube_2, "latitude") + np.testing.assert_allclose(cube_2.coord('latitude').points, [2.5, 1.5]) + cube_2 = utils.fix_coords( + cube_2, + overwrite_time_bounds=False, + overwrite_lon_bounds=False, + overwrite_lat_bounds=False, + overwrite_lev_bounds=False, + ) assert cube_2.coord("time").bounds[0][1] == 30. - assert cube_2.coord("longitude").bounds[1][1] == 180. + assert cube_2.coord("longitude").bounds[1][1] == 360.0 assert cube_2.coord("latitude").bounds[1][1] == 3. assert cube_2.coord("depth").bounds[1][1] == 25. + np.testing.assert_allclose(cube_2.coord('latitude').points, [1.5, 2.5]) def test_fix_var_metadata():