From 1b77d1fba0a9de9d731283ea9de69488476e7841 Mon Sep 17 00:00:00 2001 From: Gillian Petro <96886803+gspetro-NOAA@users.noreply.github.com> Date: Thu, 14 Mar 2024 18:40:03 -0400 Subject: [PATCH] [develop]: Update documentation to reflect recent (Feb '24) changes (#72) * add extlinks & UFS/EPIC logo * add linkcheck to doc Makefile * add/update requirements files * update linkcheck redirects/ignores * update tech overview; minor conf.py edits * reorganize/add to Tech Overview * rm comments * update build/run ch w/config info * update configuration/run/status check info in build/run ch * move info on data * minor edits * update BASEDIR path in land_analysis.yaml * add FAQ; rm info on log/err files from build/run ch * change docs to doc * update Glossary, uwtools desc; misc * update RTD YAML * minor fix to FAQ * update LANDDAROOT path * minor edits from chj * add run w/o rocot info & jjobs info * update auto_doc.yaml to use doc instead of docs * update dir structure * add NCEP/NCO terms * update UW tools link * complete sentence * update dir str diagram * update log file info and com dir str diagram * minor build/run updates * add RTD integration to repo? --- .github/workflows/auto_doc.yml | 4 +- .readthedocs.yaml | 4 +- {docs => doc}/Makefile | 18 +- {docs => doc}/README_git | 0 {docs => doc}/make.bat | 70 ++--- docs/requirements.txt => doc/requirements.in | 5 +- doc/requirements.txt | 76 +++++ .../source/BackgroundInfo/Introduction.rst | 2 +- .../BackgroundInfo/TechnicalOverview.rst | 230 ++++++++++++++ {docs => doc}/source/BackgroundInfo/index.rst | 0 .../BuildingRunningTesting/BuildRunLandDA.rst | 295 ++++++++++++++++++ .../BuildingRunningTesting/Container.rst | 0 .../BuildingRunningTesting/TestingLandDA.rst | 0 .../source/BuildingRunningTesting/index.rst | 0 .../CustomizingTheWorkflow/DASystem.rst | 0 .../source/CustomizingTheWorkflow/Model.rst | 0 .../source/CustomizingTheWorkflow/index.rst | 0 doc/source/Reference/FAQ.rst | 41 +++ {docs => doc}/source/Reference/Glossary.rst | 15 + {docs => doc}/source/Reference/index.rst | 1 + {docs => doc}/source/_static/custom.css | 0 .../source/_static/theme_overrides.css | 0 doc/source/conf.py | 129 ++++++++ {docs => doc}/source/index.rst | 0 {docs => doc}/source/references.bib | 2 +- .../BackgroundInfo/TechnicalOverview.rst | 158 ---------- .../BuildingRunningTesting/BuildRunLandDA.rst | 254 --------------- docs/source/conf.py | 88 ------ 28 files changed, 845 insertions(+), 547 deletions(-) rename {docs => doc}/Makefile (63%) rename {docs => doc}/README_git (100%) rename {docs => doc}/make.bat (95%) rename docs/requirements.txt => doc/requirements.in (52%) create mode 100644 doc/requirements.txt rename {docs => doc}/source/BackgroundInfo/Introduction.rst (91%) create mode 100644 doc/source/BackgroundInfo/TechnicalOverview.rst rename {docs => doc}/source/BackgroundInfo/index.rst (100%) create mode 100644 doc/source/BuildingRunningTesting/BuildRunLandDA.rst rename {docs => doc}/source/BuildingRunningTesting/Container.rst (100%) rename {docs => doc}/source/BuildingRunningTesting/TestingLandDA.rst (100%) rename {docs => doc}/source/BuildingRunningTesting/index.rst (100%) rename {docs => doc}/source/CustomizingTheWorkflow/DASystem.rst (100%) rename {docs => doc}/source/CustomizingTheWorkflow/Model.rst (100%) rename {docs => doc}/source/CustomizingTheWorkflow/index.rst (100%) create mode 100644 doc/source/Reference/FAQ.rst rename {docs => doc}/source/Reference/Glossary.rst (89%) rename {docs => doc}/source/Reference/index.rst (92%) rename {docs => doc}/source/_static/custom.css (100%) rename {docs => doc}/source/_static/theme_overrides.css (100%) create mode 100644 doc/source/conf.py rename {docs => doc}/source/index.rst (100%) rename {docs => doc}/source/references.bib (99%) delete mode 100644 docs/source/BackgroundInfo/TechnicalOverview.rst delete mode 100644 docs/source/BuildingRunningTesting/BuildRunLandDA.rst delete mode 100644 docs/source/conf.py diff --git a/.github/workflows/auto_doc.yml b/.github/workflows/auto_doc.yml index 0a657ea6..218ed04e 100644 --- a/.github/workflows/auto_doc.yml +++ b/.github/workflows/auto_doc.yml @@ -20,11 +20,11 @@ jobs: uses: actions/upload-artifact@v3 with: name: html-docs - path: docs/build/html/ + path: doc/build/html/ - name: Deploy uses: peaceiris/actions-gh-pages@v3 if: github.ref == 'refs/heads/develop' with: github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: docs/build/html/ + publish_dir: doc/build/html/ diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 95b6d849..5899d5be 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -17,7 +17,7 @@ build: # Build documentation in the docs/ directory with Sphinx sphinx: - configuration: docs/source/conf.py + configuration: doc/source/conf.py # If using Sphinx, optionally build your docs in additional formats such as PDF # formats: @@ -26,6 +26,6 @@ sphinx: # Optionally declare the Python requirements required to build your docs python: install: - - requirements: docs/requirements.txt + - requirements: doc/requirements.txt diff --git a/docs/Makefile b/doc/Makefile similarity index 63% rename from docs/Makefile rename to doc/Makefile index d0c3cbf1..358cb1ba 100644 --- a/docs/Makefile +++ b/doc/Makefile @@ -3,18 +3,30 @@ # You can set these variables from the command line, and also # from the environment for the first two. -SPHINXOPTS ?= +SPHINXOPTS = -a -n #-W SPHINXBUILD ?= sphinx-build SOURCEDIR = source BUILDDIR = build +LINKCHECKDIR = $(BUILDDIR)/linkcheck # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -.PHONY: help Makefile +.PHONY: help Makefile linkcheck + +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) + +doc: + make clean + $(MAKE) linkcheck + $(MAKE) html + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(SPHINXOPTS) $(SOURCEDIR) $(LINKCHECKDIR) # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) -w $(BUILDDIR)/warnings.log diff --git a/docs/README_git b/doc/README_git similarity index 100% rename from docs/README_git rename to doc/README_git diff --git a/docs/make.bat b/doc/make.bat similarity index 95% rename from docs/make.bat rename to doc/make.bat index 747ffb7b..dc1312ab 100644 --- a/docs/make.bat +++ b/doc/make.bat @@ -1,35 +1,35 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=sphinx-build -) -set SOURCEDIR=source -set BUILDDIR=build - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The 'sphinx-build' command was not found. Make sure you have Sphinx - echo.installed, then set the SPHINXBUILD environment variable to point - echo.to the full path of the 'sphinx-build' executable. Alternatively you - echo.may add the Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.https://www.sphinx-doc.org/ - exit /b 1 -) - -if "%1" == "" goto help - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% - -:end -popd +@ECHO OFF + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/requirements.txt b/doc/requirements.in similarity index 52% rename from docs/requirements.txt rename to doc/requirements.in index 18ba1911..26c778f4 100644 --- a/docs/requirements.txt +++ b/doc/requirements.in @@ -1,4 +1,3 @@ -sphinxcontrib-bibtex +sphinx>=6.0.0 sphinx_rtd_theme -docutils>=0.18.1 -urllib3==1.26.15 +sphinxcontrib-bibtex diff --git a/doc/requirements.txt b/doc/requirements.txt new file mode 100644 index 00000000..5725f809 --- /dev/null +++ b/doc/requirements.txt @@ -0,0 +1,76 @@ +# +# This file is autogenerated by pip-compile with Python 3.11 +# by the following command: +# +# pip-compile requirements.in +# +alabaster==0.7.16 + # via sphinx +babel==2.14.0 + # via sphinx +certifi==2024.2.2 + # via requests +charset-normalizer==3.3.2 + # via requests +docutils==0.20.1 + # via + # pybtex-docutils + # sphinx + # sphinx-rtd-theme + # sphinxcontrib-bibtex +idna==3.6 + # via requests +imagesize==1.4.1 + # via sphinx +jinja2==3.1.3 + # via sphinx +latexcodec==2.0.1 + # via pybtex +markupsafe==2.1.5 + # via jinja2 +packaging==23.2 + # via sphinx +pybtex==0.24.0 + # via + # pybtex-docutils + # sphinxcontrib-bibtex +pybtex-docutils==1.0.3 + # via sphinxcontrib-bibtex +pygments==2.17.2 + # via sphinx +pyyaml==6.0.1 + # via pybtex +requests==2.31.0 + # via sphinx +six==1.16.0 + # via + # latexcodec + # pybtex +snowballstemmer==2.2.0 + # via sphinx +sphinx==7.2.6 + # via + # -r requirements.in + # sphinx-rtd-theme + # sphinxcontrib-bibtex + # sphinxcontrib-jquery +sphinx-rtd-theme==2.0.0 + # via -r requirements.in +sphinxcontrib-applehelp==1.0.8 + # via sphinx +sphinxcontrib-bibtex==2.6.2 + # via -r requirements.in +sphinxcontrib-devhelp==1.0.6 + # via sphinx +sphinxcontrib-htmlhelp==2.0.5 + # via sphinx +sphinxcontrib-jquery==4.1 + # via sphinx-rtd-theme +sphinxcontrib-jsmath==1.0.1 + # via sphinx +sphinxcontrib-qthelp==1.0.7 + # via sphinx +sphinxcontrib-serializinghtml==1.1.10 + # via sphinx +urllib3==1.26.15 + # via requests diff --git a/docs/source/BackgroundInfo/Introduction.rst b/doc/source/BackgroundInfo/Introduction.rst similarity index 91% rename from docs/source/BackgroundInfo/Introduction.rst rename to doc/source/BackgroundInfo/Introduction.rst index e71d4361..dd457d03 100644 --- a/docs/source/BackgroundInfo/Introduction.rst +++ b/doc/source/BackgroundInfo/Introduction.rst @@ -80,7 +80,7 @@ When posting a question, it is recommended that users provide the following info Bug Reports ============ -If users (especially new users) believe they have identified a bug in the system, it is recommended that they first ask about the problem in `GitHub Discussions `__, since many "bugs" do not require a code change/fix --- instead, the user may be unfamiliar with the system and/or may have misunderstood some component of the system or the instructions, which is causing the problem. Asking for assistance in a `GitHub Discussion `__ post can help clarify whether there is a simple adjustment to fix the problem or whether there is a genuine bug in the code. Users are also encouraged to search `open issues `__ to see if their bug has already been identified. If there is a genuine bug, and there is no open issue to address it, users can report the bug by filing a `GitHub Issue `__. +If users (especially new users) believe they have identified a bug in the system, it is recommended that they first ask about the problem in :land-wflow-repo:`GitHub Discussions `, since many "bugs" do not require a code change/fix --- instead, the user may be unfamiliar with the system and/or may have misunderstood some component of the system or the instructions, which is causing the problem. Asking for assistance in a :land-wflow-repo:`GitHub Discussion ` post can help clarify whether there is a simple adjustment to fix the problem or whether there is a genuine bug in the code. Users are also encouraged to search :land-wflow-repo:`open issues ` to see if their bug has already been identified. If there is a genuine bug, and there is no open issue to address it, users can report the bug by filing a :land-wflow-repo:`GitHub Issue `. Feature Requests and Enhancements ================================== diff --git a/doc/source/BackgroundInfo/TechnicalOverview.rst b/doc/source/BackgroundInfo/TechnicalOverview.rst new file mode 100644 index 00000000..b4d774ab --- /dev/null +++ b/doc/source/BackgroundInfo/TechnicalOverview.rst @@ -0,0 +1,230 @@ +.. _TechOverview: + +********************* +Technical Overview +********************* + +.. _prerequisites: + +Prerequisites +*************** + +Minimum System Requirements +============================== + +:term:`UFS` applications, models, and components require a UNIX-based operating system (i.e., Linux or MacOS). + +Additionally, users will need: + + * Disk space: ~23GB (11GB for Land DA System [or 6.5GB for Land DA container], 11GB for Land DA data, and ~1GB for staging and output) + * 6 CPU cores (or option to run with "oversubscribe") + +Software Prerequisites +======================== + +The Land DA System requires: + + * An :term:`MPI` implementation + * A Fortran compiler + * Python + * :term:`NetCDF` + * Lmod + * `spack-stack `__ + * `jedi-bundle `__ (Skylab v4.0) + +These software prerequisites are pre-installed in the Land DA :term:`container` and on other Level 1 systems (see :ref:`below ` for details). However, users on non-Level 1 systems will need to install them. + +Before using the Land DA container, users will need to install `Singularity/Apptainer `__ and an **Intel** MPI (available `free here `__). + + +.. _LevelsOfSupport: + +Supported Systems for Running Land DA +**************************************** + +Four levels of support have been defined for :term:`UFS` applications, and the Land DA System operates under this paradigm: + +* **Level 1** *(Pre-configured)*: Prerequisite software libraries are pre-built and available in a central location; code builds; full testing of model. +* **Level 2** *(Configurable)*: Prerequisite libraries are not available in a centralized location but are expected to install successfully; code builds; full testing of model. +* **Level 3** *(Limited-test platforms)*: Libraries and code build on these systems, but there is limited testing of the model. +* **Level 4** *(Build-only platforms)*: Libraries and code build, but running the model is not tested. + +Level 1 Systems +================== +Preconfigured (Level 1) systems for Land DA already have the required external libraries available in a central location via :term:`spack-stack` and the ``jedi-bundle`` (Skylab v4.0). Land DA is expected to build and run out-of-the-box on these systems, and users can download the Land DA code without first installing prerequisite software. With the exception of the Land DA container, users must have access to these Level 1 systems in order to use them. + +.. COMMENT: Update spack-stack to 1.5.1 + ++-----------+-----------------------------------+-----------------------------------------------------------------+ +| Platform | Compiler/MPI | spack-stack & jedi-bundle Installations | ++===========+===================================+=================================================================+ +| Hera | intel/2022.1.2 / | /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.3.0 | +| | | | +| | impi/2022.1.2 | /scratch2/NAGAPE/epic/UFS_Land-DA/jedi/jedi-bundle | ++-----------+-----------------------------------+-----------------------------------------------------------------+ +| Orion | intel/2022.1.2 / | /work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.3.0 | +| | | | +| | impi/2022.1.2 | /work/noaa/epic/UFS_Land-DA/jedi/jedi-bundle | ++-----------+-----------------------------------+-----------------------------------------------------------------+ +| Container | intel-oneapi-compilers/2021.8.0 / | /opt/spack-stack/ (inside the container) | +| | | | +| | intel-oneapi-mpi/2021.8.0 | /opt/jedi-bundle (inside the container) | ++-----------+-----------------------------------+-----------------------------------------------------------------+ + +Level 2-4 Systems +=================== + +On non-Level 1 platforms, the Land DA System can be run within a container that includes the prerequisite software; otherwise, the required libraries will need to be installed as part of the Land DA build process. Once these prerequisite libraries are installed, applications and models should build and run successfully. However, users may need to perform additional troubleshooting on Level 3 or 4 systems since little or no pre-release testing has been conducted on these systems. + +.. _repos-dir-structure: + +Code Repositories and Directory Structure +******************************************** + +.. _components: + +Hierarchical Repository Structure +=================================== + +The main repository for the Land DA System is named ``land-DA_workflow``; +it is available on GitHub at https://github.com/ufs-community/land-DA_workflow. +This :term:`umbrella repository` uses Git submodules and an ``app_build.sh`` file to pull in the appropriate versions of external repositories associated with the Land DA System. :numref:`Table %s ` describes the various subrepositories that form the UFS Land DA System. + +.. _LandDAComponents: + +.. list-table:: UFS Land DA System Components + :header-rows: 1 + + * - Land DA Submodule Name + - Repository Name + - Repository Description + - Authoritative Repository URL + * - DA_update + - land-DA + - Contains scripts and components for performing data assimilation (DA) procedures. + - https://github.com/ufs-community/land-DA/ + * - *-- add_jedi_incr* + - *-- land-apply_jedi_incr* + - Contains code that applies the JEDI-generated DA increment to UFS ``sfc_data`` restart + - https://github.com/NOAA-PSL/land-apply_jedi_incr + * - ufsLand.fd + - ufs-land-driver-emc-dev + - Repository for the UFS Land Driver + - https://github.com/NOAA-EPIC/ufs-land-driver-emc-dev + * - *-- ccpp-physics* + - *-- ccpp-physics* + - Repository for the Common Community Physics Package (CCPP) + - https://github.com/ufs-community/ccpp-physics/ + * - ufs_model.fd + - ufs-weather-model + - Repository for the UFS Weather Model (WM). This repository contains a number of subrepositories, which are documented :doc:`in the WM User's `. + - https://github.com/ufs-community/ufs-weather-model/ + * - vector2tile_converter.fd + - land-vector2tile + - Contains code to map between the vector format used by the Noah-MP offline driver, and the tile format used by the UFS atmospheric model. + - https://github.com/NOAA-PSL/land-vector2tile + * - N/A + - uwtools + - Repository for the Unified Workflow (UW) Toolkit. This repository is not a Git submodule, but the build script installs UW tools, if desired, as part of the build. + - https://github.com/ufs-community/uwtools + +.. note:: + The prerequisite libraries (including NCEP Libraries and external libraries) are not included in the UFS Land DA System repository. The `spack-stack `__ repository assembles these prerequisite libraries. Spack-stack has already been built on `preconfigured (Level 1) platforms `__. However, it must be built on other systems. See the :doc:`spack-stack Documentation ` for details on installing spack-stack. + +.. _file-dir-structure: + +File & Directory Structure +============================ + +The ``land-DA_workflow`` is evolving to follow the :term:`NCEP` Central Operations (NCO) :nco:`WCOSS Implementation Standards `. When the ``develop`` branch of the ``land-DA_workflow`` repository is cloned with the ``--recursive`` argument, the specific GitHub repositories described in ``/sorc/app_build.sh`` are cloned into ``sorc``. The diagram below illustrates the file and directory structure of the Land DA System. Directories in parentheses () are only visible after the build step. Some files and directories have been removed for brevity. + +.. code-block:: console + + land-offline_workflow + ├── doc + ├── (exec) + ├── jobs + ├── (lib*) + ├── modulefiles + ├── parm + │ ├── check_release_outputs.sh + │ ├── land_analysis__.yaml + │ └── run_without_rocoto.sh + ├── sorc + │ ├── (build) + │ ├── cmake + │ │ ├── compiler_flags_* + │ │ └── landda_compiler_flags.cmake + │ ├── (conda) + │ ├── DA_update + │ │ ├── add_jedi_incr + │ │ ├── jedi/fv3-jedi + │ │ └── do_LandDA.sh + │ ├── test + │ ├── tile2tile_converter.fd + │ │ ├── cmake + │ │ └── config + │ ├── ufsLand.fd + │ │ └── ccpp-physics + │ ├── ufs_model.fd + │ ├── vector2tile_converter.fd + │ │ ├── cmake + │ │ └── config + │ ├── CMakeLists.txt + │ └── app_build.sh + ├── LICENSE + ├── README.md + ├── datm_cdeps_lnd_gswp3_rst + ├── do_submit_cycle.sh + ├── fv3_run + ├── incdate.sh + ├── land_mods + ├── module_check.sh + ├── release.environment + ├── run_container_executable.sh + ├── settings_DA_* + └── submit_cycle.sh + +:numref:`Table %s ` describes the contents of the most important Land DA subdirectories. :numref:`Section %s ` describes the Land DA System components. Users can reference the :nco:`NCO Implementation Standards ` (p. 19) for additional details on repository structure in NCO-compliant repositories. + +.. _Subdirectories: + +.. list-table:: *Subdirectories of the land-DA_workflow repository* + :widths: 20 50 + :header-rows: 1 + + * - Directory Name + - Description + * - doc + - Repository documentation + * - exec + - Binary executables + * - jobs + - :term:`J-job ` scripts launched by Rocoto + * - lib + - Model-specific libraries + * - modulefiles + - Files that load the modules required for building and running the workflow + * - parm + - Parameter files used to configure the model, physics, workflow, and various components + * - scripts + - Scripts launched by the :term:`J-jobs` + * - sorc + - External source code used to build the Land DA System + * - versions + - Contain run.ver and build.ver, which are files that get automatically sourced in order to track package versions at run and compile time respectively. + +.. _land-component: + +The UFS Land Component +========================= + +The UFS Land DA System has been updated to build the UFS Noah-MP land component as part of the build process. +Updates allowing the Land DA System to run with the land component are underway. + +The land component makes use of a National Unified Operational Prediction Capability (:term:`NUOPC`) cap to interface with a coupled modeling system. +Unlike the standalone Noah-MP land driver, the Noah-MP :term:`NUOPC cap` is able to create an :term:`ESMF` multi-tile grid by reading in a mosaic grid file. For the domain, the :term:`FMS` initializes reading and writing of the cubed-sphere tiled output. Then, the Noah-MP land component reads static information and initial conditions (e.g., surface albedo) and interpolates the data to the date of the simulation. The solar zenith angle is calculated based on the time information. + +Unified Workflow (UW) Tools +============================ +The Unified Workflow (UW) is a set of tools intended to unify the workflow for various UFS applications under one framework. The UW toolkit currently includes rocoto, template, and configuration (config) tools, which are being incorporated into the Land DA workflow. Additional tools are under development. More details about UW tools can be found in the `uwtools ` GitHub repository and in the :uw:`UW Documentation <>`. \ No newline at end of file diff --git a/docs/source/BackgroundInfo/index.rst b/doc/source/BackgroundInfo/index.rst similarity index 100% rename from docs/source/BackgroundInfo/index.rst rename to doc/source/BackgroundInfo/index.rst diff --git a/doc/source/BuildingRunningTesting/BuildRunLandDA.rst b/doc/source/BuildingRunningTesting/BuildRunLandDA.rst new file mode 100644 index 00000000..92f4c0eb --- /dev/null +++ b/doc/source/BuildingRunningTesting/BuildRunLandDA.rst @@ -0,0 +1,295 @@ +.. _BuildRunLandDA: + +************************************ +Land DA Workflow (Hera & Orion) +************************************ + +This chapter provides instructions for building and running basic Land DA cases for the Unified Forecast System (:term:`UFS`) Land DA System. Users can choose between two options: + + * A Dec. 21, 2019 00z sample case using ERA5 data with the UFS Land Driver (``land_analysis_era5_``) + * A Jan. 3, 2000 00z sample case using GSWP3 data with the UFS Noah-MP land component (``land_analysis_gswp3_``). + +.. attention:: + + These steps are designed for use on :ref:`Level 1 ` systems (i.e., Hera and Orion) and may require significant changes on other systems. It is recommended that users on other systems run the containerized version of Land DA. Users may reference :numref:`Chapter %s: Containerized Land DA Workflow ` for instructions. + +Create a Working Directory +***************************** + +Create a directory for the Land DA experiment (``$LANDDAROOT``): + +.. code-block:: console + + mkdir /path/to/landda + cd /path/to/landda + export LANDDAROOT=`pwd` + +where ``/path/to/landda`` is the path to the directory where the user plans to run Land DA experiments. + +.. _GetCode: + +Get Code +*********** + +Clone the Land DA repository. To clone the ``develop`` branch, run: + +.. code-block:: console + + git clone -b develop --recursive https://github.com/ufs-community/land-DA_workflow.git + +To clone the most recent release, run the same command with |branch| in place of ``develop``: + +.. code-block:: console + + git clone -b release/public-v1.2.0 --recursive https://github.com/ufs-community/land-DA_workflow.git + +.. _build-land-da: + +Build the Land DA System +*************************** + +#. Navigate to the ``sorc`` directory. + + .. code-block:: console + + cd $LANDDAROOT/land-DA_workflow/sorc + +#. Run the build script ``app_build.sh``: + + .. code-block:: console + + ./app_build.sh + + If the code successfully compiles, the console output should end with: + + .. code-block:: console + + [100%] Completed 'ufs_model.fd' + [100%] Built target ufs_model.fd + ... Moving pre-compiled executables to designated location ... + + Additionally, the ``exec`` directory will contain the following executables: + + * ``apply_incr.exe`` + * ``ufsLand.exe`` + * ``vector2tile_converter.exe`` + * ``tile2tile_converter.exe`` + * ``ufs_model`` + +.. _config-wflow: + +Configure an Experiment +************************* + +.. _load-env: + +Load the Workflow Environment +=============================== + +To load the workflow environment, run: + +.. code-block:: console + + cd $LANDDAROOT/land-DA_workflow + source versions/build.ver_ + module use modulefiles + module load wflow_ + conda activate land_da + +where ```` is ``hera`` or ``orion``. + +Modify the Workflow Configuration YAML +======================================== + +The ``develop`` branch includes two default experiments: + + * A Dec. 21, 2019 00z sample case using the UFS Land Driver. + * A Jan. 3, 2000 00z sample case using the UFS Noah-MP land component. + +Copy the experiment settings into ``land_analysis.yaml``: + +.. code-block:: console + + cd $LANDDAROOT/land-DA_workflow/parm + cp land_analysis__.yaml land_analysis.yaml + +where: + + * ```` is either ``gswp3`` or ``era5`` forcing data. + * ```` is ``hera`` or ``orion``. + +Users will need to configure certain elements of their experiment in ``land_analysis.yaml``: + + * ``MACHINE:`` A valid machine name (i.e., ``hera`` or ``orion``) + * ``ACCOUNT:`` A valid account name. Hera, Orion, and most NOAA RDHPCS systems require a valid account name; other systems may not + * ``EXP_BASEDIR:`` The full path to the directory where land-DA_workflow was cloned (i.e., ``$LANDDAROOT``) + * ``JEDI_INSTALL:`` The full path to the system's ``jedi-bundle`` installation + * ``LANDDA_INPUTS:`` The full path to the experiment data. See :ref:`Data ` below for information on prestaged data on Level 1 platforms. + * ``OUTDIR:`` The full path to the directory where experiment will write its output. By default, this is set to ``"&EXP_BASEDIR;/landda_expts/DA__test"``, but users can change the ``DA__test`` portion to a name of their choice. If users do not change the name, the new experiment will overwrite data from the previous experiment. + +.. note:: + + To determine an appropriate ``ACCOUNT`` field for Level 1 systems that use the Slurm job scheduler, run ``saccount_params``. On other systems, running ``groups`` will return a list of projects that the user has permissions for. Not all listed projects/groups have an HPC allocation, but those that do are potentially valid account names. + +Users may configure other elements of an experiment in ``land_analysis.yaml`` if desired. The ``land_analysis_*`` files contain reasonable default values for running a Land DA experiment. Users who wish to run a more complex experiment may change the values in these files and the files they reference using information in Sections :numref:`%s ` & :numref:`%s `. + +.. _GetData: + +Data +------ + +:numref:`Table %s ` shows the locations of pre-staged data on NOAA :term:`RDHPCS` (i.e., Hera and Orion). + +.. _Level1Data: + +.. table:: Level 1 RDHPCS Data + + +-----------+--------------------------------------------------+ + | Platform | Data Location | + +===========+==================================================+ + | Hera | /scratch2/NAGAPE/epic/UFS_Land-DA/inputs | + +-----------+--------------------------------------------------+ + | Orion | /work/noaa/epic/UFS_Land-DA/inputs | + +-----------+--------------------------------------------------+ + +Users who have difficulty accessing the data on Hera or Orion may download it according to the instructions in :numref:`Section %s ` and set ``LANDDA_INPUTS`` to point to the location of the downloaded data. Similarly, users with access to data for additional experiments may set the path to that data in ``LANDDA_INPUTS``. + +.. _generate-wflow: + +Generate the Rocoto XML File +============================== + +Generate the workflow with ``uwtools`` by running: + +.. code-block:: console + + uw rocoto realize --input-file land_analysis.yaml --output-file land_analysis.xml + +If the command runs without problems, ``uwtools`` will output a "0 errors found" message similar to the following: + +.. code-block:: console + + [2024-03-01T20:36:03] INFO 0 UW schema-validation errors found + [2024-03-01T20:36:03] INFO 0 Rocoto validation errors found + +Run the Experiment +******************** + +.. _wflow-overview: + +Workflow Overview +================== + +Each Land DA experiment includes multiple tasks that must be run in order to satisfy the dependencies of later tasks. These tasks are housed in the :term:`J-job ` scripts contained in the ``jobs`` directory. + +.. list-table:: *J-job Tasks in the Land DA Workflow* + :header-rows: 1 + + * - J-job Task + - Description + * - JLANDDA_PREP_EXP + - Sets up the experiment + * - JLANDDA_PREP_OBS + - Sets up the observation files + * - JLANDDA_PREP_BMAT + - Sets up the :term:`JEDI` run + * - JLANDDA_RUN_ANA + - Runs JEDI + * - JLANDDA_RUN_FCST + - Runs forecast + +Users may run these tasks :ref:`using the Rocoto workflow manager ` or :ref:`using a batch script `. + +.. _run-w-rocoto: + +Run With Rocoto +================= + +.. note:: + + Users who do not have Rocoto installed on their system can view :numref:`Section %s: Run Without Rocoto `. + +To run the experiment, issue a ``rocotorun`` command from the ``parm`` directory: + +.. code-block:: console + + rocotorun -w land_analysis.xml -d land_analysis.db + +.. _VerifySuccess: + +Track Experiment Status +------------------------- + +To view the experiment status, run: + +.. code-block:: console + + rocotostat -w land_analysis.xml -d land_analysis.db + +If ``rocotorun`` was successful, the ``rocotostat`` command will print a status report to the console. For example: + +.. code-block:: console + + CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION + ====================================================================================================== + 200001030000 prepexp druby://hfe08:41879 SUBMITTING - 2 0.0 + 200001030000 prepobs - - - - - + 200001030000 prepbmat - - - - - + 200001030000 runana - - - - - + 200001030000 runfcst - - - - - + +Users will need to issue the ``rocotorun`` command multiple times. The tasks must run in order, and ``rocotorun`` initiates the next task once its dependencies have completed successfully. Note that the status table printed by ``rocotostat`` only updates after each ``rocotorun`` command. For each task, a log file is generated. These files are stored in ``$LANDDAROOT/com/output/logs/run_``, where ```` is either ``gswp3`` or ``era5``. + +The experiment has successfully completed when all tasks say SUCCEEDED under STATE. Other potential statuses are: QUEUED, SUBMITTING, RUNNING, and DEAD. Users may view the log files to determine why a task may have failed. + +.. _run-batch-script: + +Run Without Rocoto +-------------------- + +Users may choose not to run the workflow with uwtools and Rocoto. To run the :term:`J-jobs` scripts in the ``jobs`` directory, navigate to the ``parm`` directory and edit ``run_without_rocoto.sh`` (e.g., using vim or preferred command line editor). Users will likely need to change the ``MACHINE``, ``ACCOUNT``, and ``EXP_BASEDIR`` variables to match their system. Then, run ``run_without_rocoto.sh``: + +.. code-block:: console + + cd $LANDDAROOT/land-DA_workflow/parm + sbatch run_without_rocoto.sh + +Check Experiment Output +========================= + +As the experiment progresses, it will generate a number of directories to hold intermediate and output files. The directory structure for those files and directories appears below: + +.. code-block:: console + + $LANDDAROOT: Base directory + ├── land-DA_workflow(): Home directory of the land DA workflow + ├── com + │ ├── landda () + │ │ └── vX.Y.Z () + │ │ └── DA_ () + │ │ ├── DA: Directory containing the output files of JEDI run + │ │ │ ├── hofx + │ │ │ ├── jedi_incr + │ │ │ └── logs + │ │ └── mem000: Directory containing the output files + │ └── output + │ └── logs + │ └── run_ (): Directory containing the log file of the Rocoto workflow + └── workdir() + └── run_ + └── mem000: Working directory + +```` refers to the type of forcing data used (``gswp3`` or ``era5``). Each variable in parentheses and angle brackets (e.g., ``()``) is the name for the directory defined in the file ``land_analysis.yaml``. In the future, this directory structure will be further modified to meet the :nco:`NCO Implementation Standards<>`. + +Check for the background and analysis files in the experiment directory: + +.. code-block:: console + + ls -l $LANDDAROOT/com/landda/v1.2.1/run_/mem000/restarts/ + +where: + + * ```` is either ``era5`` or ``gswp3``, and + * ```` is either ``vector`` or ``tile`` depending on whether ERA5 or GSWP3 forcing data was used, respectively. + +The experiment should generate several restart files. diff --git a/docs/source/BuildingRunningTesting/Container.rst b/doc/source/BuildingRunningTesting/Container.rst similarity index 100% rename from docs/source/BuildingRunningTesting/Container.rst rename to doc/source/BuildingRunningTesting/Container.rst diff --git a/docs/source/BuildingRunningTesting/TestingLandDA.rst b/doc/source/BuildingRunningTesting/TestingLandDA.rst similarity index 100% rename from docs/source/BuildingRunningTesting/TestingLandDA.rst rename to doc/source/BuildingRunningTesting/TestingLandDA.rst diff --git a/docs/source/BuildingRunningTesting/index.rst b/doc/source/BuildingRunningTesting/index.rst similarity index 100% rename from docs/source/BuildingRunningTesting/index.rst rename to doc/source/BuildingRunningTesting/index.rst diff --git a/docs/source/CustomizingTheWorkflow/DASystem.rst b/doc/source/CustomizingTheWorkflow/DASystem.rst similarity index 100% rename from docs/source/CustomizingTheWorkflow/DASystem.rst rename to doc/source/CustomizingTheWorkflow/DASystem.rst diff --git a/docs/source/CustomizingTheWorkflow/Model.rst b/doc/source/CustomizingTheWorkflow/Model.rst similarity index 100% rename from docs/source/CustomizingTheWorkflow/Model.rst rename to doc/source/CustomizingTheWorkflow/Model.rst diff --git a/docs/source/CustomizingTheWorkflow/index.rst b/doc/source/CustomizingTheWorkflow/index.rst similarity index 100% rename from docs/source/CustomizingTheWorkflow/index.rst rename to doc/source/CustomizingTheWorkflow/index.rst diff --git a/doc/source/Reference/FAQ.rst b/doc/source/Reference/FAQ.rst new file mode 100644 index 00000000..6b6d28fa --- /dev/null +++ b/doc/source/Reference/FAQ.rst @@ -0,0 +1,41 @@ +.. _FAQ: + +********************************* +Frequently Asked Questions (FAQ) +********************************* + +.. contents:: + :depth: 2 + :local: + +.. _RestartTask: + +How do I restart a DEAD task? +============================= + +On platforms that utilize Rocoto workflow software (including Hera and Orion), if something goes wrong with the workflow, a task may end up in the DEAD state: + +.. code-block:: console + + rocotostat -w land_analysis.xml -d land_analysis.db + CYCLE TASK JOBID STATE EXIT STATUS TRIES DURATION + ============================================================================= + 200001030000 prepexp 16779414 SUCCEEDED 0 1 11.0 + 200001030000 prepobs 16779415 SUCCEEDED 0 1 0.0 + 200001030000 prepbmat 16779416 SUCCEEDED 0 1 9.0 + 200001030000 runana 16779434 SUCCEEDED 0 1 68.0 + 200001030000 runfcst - DEAD 256 1 2186.0 + + +This means that the dead task has not completed successfully, so the workflow has stopped. Once the issue has been identified and fixed (by referencing the log files), users can re-run the failed task using the ``rocotorewind`` command: + +.. COMMENT: Where are the log files actually? + +.. code-block:: console + + rocotorewind -w land_analysis.xml -d land_analysis.db -v 10 -c 200001030000 -t runfcst + +where ``-c`` specifies the cycle date (first column of ``rocotostat`` output) and ``-t`` represents the task name +(second column of ``rocotostat`` output). After using ``rocotorewind``, the next time ``rocotorun`` is used to +advance the workflow, the job will be resubmitted. + diff --git a/docs/source/Reference/Glossary.rst b/doc/source/Reference/Glossary.rst similarity index 89% rename from docs/source/Reference/Glossary.rst rename to doc/source/Reference/Glossary.rst index 97009da0..a8d1f111 100644 --- a/docs/source/Reference/Glossary.rst +++ b/doc/source/Reference/Glossary.rst @@ -24,6 +24,9 @@ Glossary ESMF `Earth System Modeling Framework `__. The ESMF defines itself as "a suite of software tools for developing high-performance, multi-component Earth science modeling applications." It is a community-developed software infrastructure for building and coupling models. + ex-scripts + Scripting layer (contained in ``land-DA_workflow/jobs/``) that should be called by a :term:`J-job ` for each workflow component to run a specific task or sub-task in the workflow. The different scripting layers are described in detail in the :nco:`NCO Implementation Standards document `. + FMS The Flexible Modeling System (`FMS `__) is a software framework for supporting the efficient development, construction, execution, and scientific interpretation of atmospheric, @@ -38,6 +41,9 @@ Glossary GSWP3 The Global Soil Wetness Project Phase 3 (`GSWP3 `__) dataset is a century-long comprehensive set of data documenting several variables for hydro-energy-eco systems. + J-jobs + Scripting layer (contained in ``land-DA_workflow/jobs/``) that should be directly called for each workflow component (either on the command line or by the workflow manager) to run a specific task in the workflow. The different scripting layers are described in detail in the :nco:`NCO Implementation Standards document `. + JEDI The Joint Effort for Data assimilation Integration (`JEDI `__) is a unified and versatile data assimilation (DA) system for Earth System Prediction. It aims to enable efficient research and accelerated transition from research to operations by providing a framework that takes into account all components of the Earth system in a consistent manner. The JEDI software package can run on a variety of platforms and for a variety of purposes, and it is designed to readily accommodate new atmospheric and oceanic models and new observation systems. The `JEDI User's Guide `__ contains extensive information on the software. @@ -55,6 +61,12 @@ Glossary netCDF NetCDF (`Network Common Data Form `__) is a file format and community standard for storing multidimensional scientific data. It includes a set of software libraries and machine-independent data formats that support the creation, access, and sharing of array-oriented scientific data. + NCEP + National Centers for Environmental Prediction (NCEP) is an arm of the National Weather Service consisting of nine centers. More information can be found at https://www.ncep.noaa.gov. + + NCO + :term:`NCEP` Central Operations. Visit the `NCO website `__ for more information. + NUOPC National Unified Operational Prediction Capability The `National Unified Operational Prediction Capability `__ is a consortium of Navy, NOAA, and Air Force modelers and their research partners. It aims to advance the weather modeling systems used by meteorologists, mission planners, and decision makers. NUOPC partners are working toward a common model architecture --- a standard way of building models --- in order to make it easier to collaboratively build modeling systems. @@ -82,5 +94,8 @@ Glossary UFS The Unified Forecast System (UFS) is a community-based, coupled, comprehensive Earth modeling system consisting of several applications (apps). These apps span regional to global domains and sub-hourly to seasonal time scales. The UFS is designed to support the :term:`Weather Enterprise` and to be the source system for NOAA's operational numerical weather prediction applications. For more information, visit https://ufscommunity.org/. + Umbrella repository + A repository that houses external code, or “externals,” from additional repositories. + Weather Enterprise Individuals and organizations from public, private, and academic sectors that contribute to the research, development, and production of weather forecast products; primary consumers of these weather forecast products. \ No newline at end of file diff --git a/docs/source/Reference/index.rst b/doc/source/Reference/index.rst similarity index 92% rename from docs/source/Reference/index.rst rename to doc/source/Reference/index.rst index f2c320a6..82b5f7b5 100644 --- a/docs/source/Reference/index.rst +++ b/doc/source/Reference/index.rst @@ -6,4 +6,5 @@ Reference .. toctree:: :maxdepth: 3 + FAQ Glossary diff --git a/docs/source/_static/custom.css b/doc/source/_static/custom.css similarity index 100% rename from docs/source/_static/custom.css rename to doc/source/_static/custom.css diff --git a/docs/source/_static/theme_overrides.css b/doc/source/_static/theme_overrides.css similarity index 100% rename from docs/source/_static/theme_overrides.css rename to doc/source/_static/theme_overrides.css diff --git a/doc/source/conf.py b/doc/source/conf.py new file mode 100644 index 00000000..d10d8877 --- /dev/null +++ b/doc/source/conf.py @@ -0,0 +1,129 @@ +# Configuration file for the Sphinx documentation builder. +# +# For the full list of built-in configuration values, see the documentation: +# https://www.sphinx-doc.org/en/master/usage/configuration.html + +# -- Project information ----------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information + +project = "UFS Offline Land DA User's Guide" +copyright = '2023, ' +author = ' ' + +# The short X.Y version +version = 'v1.2' +# The full version, including alpha/beta/rc tags +release = 'v1.2.0' + +numfig = True + +# -- General configuration --------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration + +extensions = [ + 'sphinx_rtd_theme', + 'sphinx.ext.intersphinx', + 'sphinx.ext.extlinks', + 'sphinxcontrib.bibtex', +] + +# File with bibliographic info +bibtex_bibfiles = ['references.bib'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The master toctree document. +master_doc = 'index' + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +exclude_patterns = [] + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# Documentation-wide substitutions + +rst_prolog = """ +.. |latestr| replace:: v1.2.0 +.. |tag| replace:: ``ufs-land-da-v1.2.0`` +.. |branch| replace:: ``release/public-v1.2.0`` +""" + +# -- Linkcheck options ------------------------------------------------- + +# Avoid a 403 Forbidden error when accessing certain links (e.g., noaa.gov) +# Can be found using navigator.userAgent inside a browser console. +user_agent = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/121.0.0.0 Safari/537.36" + +# Ignore working links that cause a linkcheck 403 error. +linkcheck_ignore = [r'https://www\.intel\.com/content/www/us/en/developer/tools/oneapi/hpc\-toolkit\-download\.html', + r'https://doi.org/10.1029/.*', + r'https://doi.org/10.1002/.*', + ] + +# Ignore anchor tags for Land DA data bucket. Shows Not Found even when they exist. +linkcheck_anchors_ignore = [] + +linkcheck_allowed_redirects = {r"https://github.com/ufs-community/land-DA_workflow/wiki/.*": + r"https://raw.githubusercontent.com/wiki/ufs-community/land-DA_workflow/.*", + r"https://github.com/ufs-community/land-DA_workflow/issues/new": + r"https://github.com/login.*", + r"https://doi.org/10.5281/zenodo.*": + r"https://zenodo.org/records/*", + r"https://doi.org/10.25923/RB19-0Q26": + r"https://repository.library.noaa.gov/view/noaa/22752", + r"https://doi.org/10.1016/j.physd.2006.11.008": + r"https://linkinghub.elsevier.com/retrieve/pii/S0167278906004647", + r"https://doi.org/.*/.*": + r"https://journals.ametsoc.org:443/view/journals/.*/.*/.*/.*", + } + + +# -- Options for HTML output ------------------------------------------------- +# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output + +# The theme to use for HTML and HTML Help pages. +html_theme = 'sphinx_rtd_theme' +html_theme_path = ["_themes", ] +html_logo= "https://github.com/ufs-community/ufs/wiki/images/ufs-epic-logo.png" + +# Theme options are theme-specific and customize the look and feel of a theme +# further. +html_theme_options = { + "body_max_width": "none", + 'navigation_depth': 6, + } + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] +html_context = {} + +def setup(app): + app.add_css_file('custom.css') # may also be an URL + app.add_css_file('theme_overrides.css') # may also be a URL + +# -- Options for intersphinx extension --------------------------------------- + +intersphinx_mapping = { + 'jedi': ('https://jointcenterforsatellitedataassimilation-jedi-docs.readthedocs-hosted.com/en/1.7.0', None), + 'spack-stack': ('https://spack-stack.readthedocs.io/en/1.3.0/', None), + 'ufs-wm': ('https://ufs-weather-model.readthedocs.io/en/develop/', None), + 'gswp3': ('https://hydro.iis.u-tokyo.ac.jp/GSWP3/', None), +} + +# -- Options for extlinks extension --------------------------------------- + +extlinks_detect_hardcoded_links = True +extlinks = {'github-docs': ('https://docs.github.com/en/%s', '%s'), + 'nco': ('https://www.nco.ncep.noaa.gov/idsb/implementation_standards/%s', '%s'), + 'rst': ('https://www.sphinx-doc.org/en/master/usage/restructuredtext/%s', '%s'), + 'rtd': ('https://readthedocs.org/projects/land-da-workflow/%s', '%s'), + 'land-wflow-repo': ('https://github.com/ufs-community/land-DA_workflow/%s', '%s'), + 'land-wflow-wiki': ('https://github.com/ufs-community/land-DA_workflow/wiki/%s','%s'), + 'uw': ('https://uwtools.readthedocs.io/en/main/%s', '%s'), + } diff --git a/docs/source/index.rst b/doc/source/index.rst similarity index 100% rename from docs/source/index.rst rename to doc/source/index.rst diff --git a/docs/source/references.bib b/doc/source/references.bib similarity index 99% rename from docs/source/references.bib rename to doc/source/references.bib index fd1c2d88..50c565bc 100644 --- a/docs/source/references.bib +++ b/doc/source/references.bib @@ -228,7 +228,7 @@ @article{Brasnett1999 volume={38}, number={6}, pages={726--740}, - doi={10.1175/1520-0450(1999)038<0726:AGAOSD>2.0.CO;2}, + doi={10.1175/1520-0450(1999)038%3C0726:AGAOSD%3E2.0.CO;2}, } @techreport{Jordan1991, diff --git a/docs/source/BackgroundInfo/TechnicalOverview.rst b/docs/source/BackgroundInfo/TechnicalOverview.rst deleted file mode 100644 index ca1b9a3b..00000000 --- a/docs/source/BackgroundInfo/TechnicalOverview.rst +++ /dev/null @@ -1,158 +0,0 @@ -.. _TechOverview: - -********************* -Technical Overview -********************* - -Prerequisites -*************** - -Minimum System Requirements -============================== - -:term:`UFS` applications, models, and components require a UNIX-based operating system (i.e., Linux or MacOS). - -Additionally, users will need: - - * Disk space: ~23GB (11GB for Land DA System [or 6.5GB for Land DA container], 11GB for Land DA data, and ~1GB for staging and output) - * 6 CPU cores (or option to run with "oversubscribe") - -Software Prerequisites -======================== - -The Land DA System requires: - - * An :term:`MPI` implementation - * A Fortran compiler - * Python - * :term:`NetCDF` - * Lmod - * `spack-stack `__ - * `jedi-bundle `__ (Skylab v4.0) - -These software prerequisites are pre-installed in the Land DA :term:`container` and on other Level 1 systems (see :ref:`below ` for details). However, users on non-Level 1 systems will need to install them. - -Before using the Land DA container, users will need to install `Singularity/Apptainer `__ and an **Intel** MPI (available `free here `__). - - -.. _LevelsOfSupport: - -Supported Systems for Running Land DA -**************************************** - -Four levels of support have been defined for :term:`UFS` applications, and the Land DA System operates under this paradigm: - -* **Level 1** *(Pre-configured)*: Prerequisite software libraries are pre-built and available in a central location; code builds; full testing of model. -* **Level 2** *(Configurable)*: Prerequisite libraries are not available in a centralized location but are expected to install successfully; code builds; full testing of model. -* **Level 3** *(Limited-test platforms)*: Libraries and code build on these systems, but there is limited testing of the model. -* **Level 4** *(Build-only platforms)*: Libraries and code build, but running the model is not tested. - -Level 1 Systems -================== -Preconfigured (Level 1) systems for Land DA already have the required external libraries available in a central location via :term:`spack-stack` and the ``jedi-bundle`` (Skylab v4.0). Land DA is expected to build and run out-of-the-box on these systems, and users can download the Land DA code without first installing prerequisite software. With the exception of the Land DA container, users must have access to these Level 1 systems in order to use them. - -+-----------+-----------------------------------+-----------------------------------------------------------------+ -| Platform | Compiler/MPI | spack-stack & jedi-bundle Installations | -+===========+===================================+=================================================================+ -| Hera | intel/2022.1.2 / | /scratch1/NCEPDEV/nems/role.epic/spack-stack/spack-stack-1.3.0 | -| | | | -| | impi/2022.1.2 | /scratch2/NAGAPE/epic/UFS_Land-DA/jedi/jedi-bundle | -+-----------+-----------------------------------+-----------------------------------------------------------------+ -| Orion | intel/2022.1.2 / | /work/noaa/epic/role-epic/spack-stack/orion/spack-stack-1.3.0 | -| | | | -| | impi/2022.1.2 | /work/noaa/epic/UFS_Land-DA/jedi/jedi-bundle | -+-----------+-----------------------------------+-----------------------------------------------------------------+ -| Container | intel-oneapi-compilers/2021.8.0 / | /opt/spack-stack/ (inside the container) | -| | | | -| | intel-oneapi-mpi/2021.8.0 | /opt/jedi-bundle (inside the container) | -+-----------+-----------------------------------+-----------------------------------------------------------------+ - -Level 2-4 Systems -=================== - -On non-Level 1 platforms, the Land DA System can be run within a container that includes the prerequisite software; otherwise, the required libraries will need to be installed as part of the Land DA build process. Once these prerequisite libraries are installed, applications and models should build and run successfully. However, users may need to perform additional troubleshooting on Level 3 or 4 systems since little or no pre-release testing has been conducted on these systems. - -Code Repositories and Directory Structure -******************************************** - -File & Directory Structure -============================ - -The main repository for the Land DA System is named ``land-DA_workflow``; -it is available on GitHub at https://github.com/ufs-community/land-DA_workflow. -The ``land-DA_workflow`` repository contains a few nested submodules. -When the ``develop`` branch of the ``land-DA_workflow`` repository -is cloned with the ``--recursive`` argument, the basic directory structure will be -similar to the example below. Some files and directories have been removed for brevity. -Directories in parentheses () are only visible after the build step. - -.. code-block:: console - - land-offline_workflow - ├── DA_update - │ ├── add_jedi_incr - │ ├── jedi/fv3-jedi - │ └── do_LandDA.sh - ├── cmake - ├── configures - ├── docs - ├── modulefiles - ├── test - ├── tile2tile - ├── ufs-land-driver-emc-dev - │ └── ccpp-physics - ├── (ufs-weather-model) - ├── vector2tile - ├── CMakeLists.txt - ├── README.md - ├── LICENSE - ├── check_* - ├── do_submit_cycle.sh - ├── release.environment - ├── settings_DA_* - ├── submit_cycle.sh - └── template.* - -Land DA Components -===================== - -:numref:`Table %s ` describes the various subrepositories that form -the UFS Land DA System. - -.. _LandDAComponents: - -.. table:: UFS Land DA System Components - - +--------------------------+-----------------------------------------+------------------------------------------------------+ - | Repository Name | Repository Description | Authoritative repository URL | - +==========================+=========================================+======================================================+ - | DA_update | Contains scripts and components for | https://github.com/ufs-community/land-DA/ | - | | performing data assimilation (DA) | | - | | procedures. | | - +--------------------------+-----------------------------------------+------------------------------------------------------+ - | *-- land-apply_jedi_incr*| Contains code that applies the | https://github.com/NOAA-PSL/land-apply_jedi_incr | - | | JEDI-generated DA increment to UFS | | - | | ``sfc_data`` restart | | - +--------------------------+-----------------------------------------+------------------------------------------------------+ - | ufs-land-driver-emc-dev | Repository for the UFS Land | https://github.com/NOAA-EPIC/ufs-land-driver-emc-dev | - | | Driver | | - +--------------------------+-----------------------------------------+------------------------------------------------------+ - | *-- ccpp-physics* | Repository for the Common | https://github.com/ufs-community/ccpp-physics/ | - | | Community Physics Package (CCPP) | | - | | | | - +--------------------------+-----------------------------------------+------------------------------------------------------+ - | land-vector2tile | Contains code to map between the vector | https://github.com/NOAA-PSL/land-vector2tile | - +--------------------------+-----------------------------------------+------------------------------------------------------+ - -The UFS Land Component -========================= - -The UFS Land DA System has been updated to build the UFS Noah-MP land component as part of the build process. -Updates allowing the Land DA System to run with the land component are underway. - -The land component makes use of a National Unified Operational Prediction Capability (:term:`NUOPC`) cap to interface with a coupled modeling system. -Unlike the standalone Noah-MP land driver, the Noah-MP :term:`NUOPC cap` is able to create an :term:`ESMF` multi-tile grid by reading in a mosaic grid file. For the domain, the :term:`FMS` initializes reading and writing of the cubed-sphere tiled output. Then, the Noah-MP land component reads static information and initial conditions (e.g., surface albedo) and interpolates the data to the date of the simulation. The solar zenith angle is calculated based on the time information. - - - - diff --git a/docs/source/BuildingRunningTesting/BuildRunLandDA.rst b/docs/source/BuildingRunningTesting/BuildRunLandDA.rst deleted file mode 100644 index 9e61b705..00000000 --- a/docs/source/BuildingRunningTesting/BuildRunLandDA.rst +++ /dev/null @@ -1,254 +0,0 @@ -.. _BuildRunLandDA: - -************************************ -Land DA Workflow (Hera & Orion) -************************************ - -This chapter provides instructions for building and running basic Land DA cases for the Unified Forecast System (:term:`UFS`) Land DA System. Users can choose between two options: - - * A Dec. 21, 2019 00z sample case using ERA5 data with the UFS Land Driver (``settings_DA_cycle_era5``) - * A Jan. 3, 2000 00z sample case using GSWP3 data with the UFS Noah-MP land component (``settings_DA_cycle_gswp3``). - -.. attention:: - - These steps are designed for use on :ref:`Level 1 ` systems (i.e., Hera and Orion) and may require significant changes on other systems. It is recommended that users on other systems run the containerized version of Land DA. Users may reference :numref:`Chapter %s: Containerized Land DA Workflow ` for instructions. - -Create a Working Directory -***************************** - -Create a directory for the Land DA experiment (``$LANDDAROOT``): - -.. code-block:: console - - mkdir /path/to/landda - cd /path/to/landda - export LANDDAROOT=`pwd` - -where ``/path/to/landda`` is the path to the directory where the user plans to run Land DA experiments. - -.. _GetData: - -Get Data -*********** - -:numref:`Table %s ` shows the locations of pre-staged data on NOAA :term:`RDHPCS` (i.e., Hera and Orion). - -.. _Level1Data: - -.. table:: Level 1 RDHPCS Data - - +-----------+--------------------------------------------------+ - | Platform | Data Location | - +===========+==================================================+ - | Hera | /scratch2/NAGAPE/epic/UFS_Land-DA/inputs | - +-----------+--------------------------------------------------+ - | Orion | /work/noaa/epic/UFS_Land-DA/inputs | - +-----------+--------------------------------------------------+ - -Users can either set the ``LANDDA_INPUTS`` environment variable to the location of their system's pre-staged data or use a soft link to the data. For example, on Hera, users may set: - -.. code-block:: console - - export LANDDA_INPUTS=/scratch2/NAGAPE/epic/UFS_Land-DA/inputs - -Alternatively, users can add a soft link to the data. For example, on Orion: - -.. code-block:: console - - cd $LANDDAROOT - ln -fs /work/noaa/epic/UFS_Land-DA/inputs - -Users who have difficulty accessing the data on Hera or Orion may download it according to the instructions in :numref:`Section %s `. Users with access to data for additional experiments may use the same process described above to point or link to that data by modifying the path to the data appropriately. - -Users who are not using Land DA on Hera or Orion should view :numref:`Chapter %s ` for instructions on running the containerized version of Land DA. :numref:`Section %s ` explains options for downloading the sample data onto their system. - -Get Code -*********** - -Clone the Land DA repository. To clone the ``develop`` branch, run: - -.. code-block:: console - - git clone -b develop --recursive https://github.com/ufs-community/land-DA_workflow.git - -To clone the most recent release, run the same command with |branch| in place of ``develop``: - -.. code-block:: console - - git clone -b release/public-v1.2.0 --recursive https://github.com/ufs-community/land-DA_workflow.git - -.. _build-land-da: - -Build the Land DA System -*************************** - -#. Navigate to the ``sorc`` directory. - - .. code-block:: console - - cd $LANDDAROOT/land-DA_workflow/sorc - -#. Run the build script ``app_build.sh``: - - .. code-block:: console - - ./app_build.sh - - If the code successfully compiles, the console output should end with: - - .. code-block:: console - - [100%] Completed 'ufs-weather-model' - [100%] Built target ufs-weather-model - - Additionally, the ``exec`` directory will contain the following executables: - - * ``apply_incr.exe`` - * ``ufsLand.exe`` - * ``vector2tile_converter.exe`` - * ``tile2tile_converter.exe`` - * ``ufs_model`` - - -Configure the Experiment -*************************** - -The ``develop`` branch includes two scripts with default experiment settings: - - * ``settings_DA_cycle_era5`` for running a Dec. 21, 2019 00z sample case with the UFS Land Driver. - * ``settings_DA_cycle_gswp3`` for running a Jan. 3, 2000 00z sample case with the UFS Noah-MP land component. - -To configure an experiment: - -#. Navigate back to the ``land-DA_workflow`` directory and check that the account, queue, and partition are correct in ``submit_cycle.sh``. - - .. code-block:: console - - cd .. - vi submit_cycle.sh - - If necessary, modify lines 3 and 4 to include the correct account and queue (qos) for the system. It may also be necessary to add the following line to the script to specify the partition: - - .. code-block:: console - - #SBATCH --partition=my_partition - - where ``my_partition`` is the name of the partition on the user's system. - - When using the GSWP3 forcing option, users will need to update line 7 to say ``#SBATCH --cpus-per-task=4``. Users can perform this change manually in a code editor or run: - - .. code-block:: console - - sed -i 's/--cpus-per-task=1/--cpus-per-task=4/g' submit_cycle.sh - - -#. When using GSWP3 forcing option, users may also have to alter ``MACHINE_ID`` in line 8 of ``settings_DA_cycle_gswp3``. The default value is ``hera``, but ``orion`` is another option: - - .. code-block:: console - - export MACHINE_ID=orion - - Users running the ERA5 case do not need to make this change. - -#. Configure other elements of the experiment if desired. The ``settings_*`` files contain reasonable default values for running a Land DA experiment. Users who wish to run a more complex experiment may change the values in these files and the files they reference using information in Sections :numref:`%s ` & :numref:`%s `. - -Run an Experiment -******************** - -The Land DA System uses a script-based workflow that is launched using the ``do_submit_cycle.sh`` script. This script requires a ``settings_DA_cycle_*`` input file that details all the specifics of a given experiment. For example, to run the ERA5 case, users would run: - -.. code-block:: console - - ./do_submit_cycle.sh settings_DA_cycle_era5 - -Users can replace ``settings_DA_cycle_era5`` with a different settings file to run a different default experiment. Regardless of the file selected, the system will output a message such as ``Submitted batch job ########``, indicating that the job was successfully submitted. If all goes well, one full cycle will run with data assimilation (DA) and a forecast. - -.. _VerifySuccess: - -Check Progress -***************** - -To check on the experiment status, users on a system with a Slurm job scheduler may run: - -.. code-block:: console - - squeue -u $USER - -To view progress, users can open the ``log*`` and ``err*`` files once they have been generated: - -.. code-block:: console - - tail -f log* err* - -Users will need to type ``Ctrl+C`` to exit the files. For examples of what the log and error files should look like in a successful experiment, reference :ref:`ERA5 Experiment Logs ` or :ref:`GSWP3 Experiment Logs ` below. - -.. attention:: - - If the log file contains a NetCDF error (e.g., ``ModuleNotFoundError: No module named 'netCDF4'``), run: - - .. code-block:: console - - python -m pip install netCDF4 - - Then, resubmit the job (``sbatch submit_cycle.sh``). - -Next, check for the background and analysis files in the test directory. - -.. code-block:: console - - ls -l ../landda_expts/DA__test/mem000/restarts/`` - -where: - - * ```` is either ``era5`` or ``gswp3``, and - * ```` is either ``vector`` or ``tile`` depending on whether ERA5 or GSWP3 forcing data was used, respectively. - -The experiment should generate several files. - -.. _era5-log-output: - -ERA5 Experiment Logs -===================== - -For the ERA5 experiment, the ``log*`` file for a successful experiment will a message like: - -.. code-block:: console - - Creating: .//ufs_land_restart.2019-12-22_00-00-00.nc - Searching for forcing at time: 2019-12-22 01:00:00 - -The ``err*`` file for a successful experiment will end with something similar to: - -.. code-block:: console - - + THISDATE=2019122200 - + date_count=1 - + '[' 1 -lt 1 ']' - + '[' 2019122200 -lt 2019122200 ']' - -.. _gswp3-log-output: - -GSWP3 Experiment Logs -======================= - -For the GSWP3 experiment, the ``log*`` file for a successful experiment will end with a list of resource statistics. For example: - -.. code-block:: console - - Number of times filesystem performed OUTPUT = 250544 - Number of Voluntary Context Switches = 3252 - Number of InVoluntary Context Switches = 183 - *****************END OF RESOURCE STATISTICS************************* - -The ``err*`` file for a successful experiment will end with something similar to: - -.. code-block:: console - - + echo 'do_landDA: calling apply snow increment' - + [[ '' =~ hera\.internal ]] - + /apps/intel-2022.1.2/intel-2022.1.2/mpi/2021.5.1/bin/mpiexec -n 6 /path/to/land-DA_workflow/build/bin/apply_incr.exe /path/to/landda_expts/DA_GSWP3_test/DA/logs//apply_incr.log - + [[ 0 != 0 ]] - + '[' YES == YES ']' - + '[' YES == YES ']' - + cp /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile1.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile2.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile3.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile4.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile5.nc /path/to/workdir/mem000/jedi/20000103.000000.xainc.sfc_data.tile6.nc /path/to/landda_expts/DA_GSWP3_test/DA/jedi_incr/ - + [[ YES == \N\O ]] diff --git a/docs/source/conf.py b/docs/source/conf.py deleted file mode 100644 index baa156c3..00000000 --- a/docs/source/conf.py +++ /dev/null @@ -1,88 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# For the full list of built-in configuration values, see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Project information ----------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information - -project = "UFS Offline Land DA User's Guide" -copyright = '2023, ' -author = ' ' - -# The short X.Y version -version = 'v1.2' -# The full version, including alpha/beta/rc tags -release = 'v1.2.0' - -numfig = True - -# -- General configuration --------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration - -extensions = [ - 'sphinx_rtd_theme', - 'sphinx.ext.intersphinx', - 'sphinxcontrib.bibtex', -] - -# File with bibliographic info -bibtex_bibfiles = ['references.bib'] - -# Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] - -# The master toctree document. -master_doc = 'index' - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = [] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' - -# Documentation-wide substitutions - -rst_prolog = """ -.. |latestr| replace:: v1.2.0 -.. |tag| replace:: ``ufs-land-da-v1.2.0`` -.. |branch| replace:: ``release/public-v1.2.0`` -""" - -# -- Options for HTML output ------------------------------------------------- -# https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = 'sphinx_rtd_theme' -html_theme_path = ["_themes", ] - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -html_theme_options = { - "body_max_width": "none", - 'navigation_depth': 6, - } - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] -html_context = {} - -def setup(app): - app.add_css_file('custom.css') # may also be an URL - app.add_css_file('theme_overrides.css') # may also be a URL - -# Example configuration for intersphinx: refer to the Python standard library. -intersphinx_mapping = { - 'jedi': ('https://jointcenterforsatellitedataassimilation-jedi-docs.readthedocs-hosted.com/en/1.7.0', None), - 'spack-stack': ('https://spack-stack.readthedocs.io/en/1.3.0/', None), - 'ufs-wm': ('https://ufs-weather-model.readthedocs.io/en/latest/', None), - 'gswp3': ('https://hydro.iis.u-tokyo.ac.jp/GSWP3/', None), -}