diff --git a/.github/workflows/mdtf_tests.yml b/.github/workflows/mdtf_tests.yml index 80699f751..053fe0e2b 100644 --- a/.github/workflows/mdtf_tests.yml +++ b/.github/workflows/mdtf_tests.yml @@ -19,30 +19,29 @@ jobs: strategy: matrix: os: [ubuntu-latest, macos-13] - json-file: ["tests/github_actions_test_ubuntu_set1.jsonc","tests/github_actions_test_macos_set1.jsonc"] - json-file-set2: ["tests/github_actions_test_ubuntu_set2.jsonc", "tests/github_actions_test_macos_set2.jsonc"] - json-file-set3: ["tests/github_actions_test_ubuntu_set3.jsonc", "tests/github_actions_test_macos_set3.jsonc"] + json-file-1a: ["tests/github_actions_test_ubuntu_1a.jsonc","tests/github_actions_test_macos_1a.jsonc"] + json-file-1b: ["tests/github_actions_test_ubuntu_1b.jsonc","tests/github_actions_test_macos_1b.jsonc"] + json-file-2: ["tests/github_actions_test_ubuntu_2.jsonc", "tests/github_actions_test_macos_2.jsonc"] + json-file-3: ["tests/github_actions_test_ubuntu_3.jsonc", "tests/github_actions_test_macos_3.jsonc"] # if experimental is true, other jobs to run if one fails experimental: [false] exclude: - os: ubuntu-latest - json-file: "tests/github_actions_test_macos_set1.jsonc" + json-file-1a: "tests/github_actions_test_macos_1a.jsonc" - os: ubuntu-latest - json-file-set2: "tests/github_actions_test_macos_set2.jsonc" + json-file-1b: "tests/github_actions_test_macos_1b.jsonc" - os: ubuntu-latest - json-file-set3: "tests/github_actions_test_macos_set3.jsonc" - - os: macos-12 - json-file: "tests/github_actions_test_ubuntu_set1.jsonc" - - os: macos-12 - json-file-set2: "tests/github_actions_test_ubuntu_set2.jsonc" - - os: macos-12 - json-file-set3: "tests/github_actions_test_ubuntu_set3.jsonc" + json-file-2: "tests/github_actions_test_macos_2.jsonc" + - os: ubuntu-latest + json-file-3: "tests/github_actions_test_macos_3.jsonc" + - os: macos-13 + json-file-1a: "tests/github_actions_test_ubuntu_1a.jsonc" - os: macos-13 - json-file: "tests/github_actions_test_ubuntu_set1.jsonc" + json-file-1b: "tests/github_actions_test_ubuntu_1b.jsonc" - os: macos-13 - json-file-set2: "tests/github_actions_test_ubuntu_set2.jsonc" + json-file-2: "tests/github_actions_test_ubuntu_2.jsonc" - os: macos-13 - json-file-set3: "tests/github_actions_test_ubuntu_set3.jsonc" + json-file-3: "tests/github_actions_test_ubuntu_3.jsonc" max-parallel: 3 steps: - uses: actions/checkout@v3 @@ -62,19 +61,13 @@ jobs: condarc: | channels: - conda-forge - - - name: Install XQuartz if macOS - if: ${{ matrix.os == 'macos-12' || matrix.os == 'macos-13'}} + - name: Set conda environment variables for macOS + if: ${{ matrix.os == 'macos-13' }} run: | - echo "Installing XQuartz" - brew install --cask xquartz echo "CONDA_ROOT=$(echo /Users/runner/micromamba)" >> $GITHUB_ENV echo "MICROMAMBA_EXE=$(echo /Users/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV echo "CONDA_ENV_DIR=$(echo /Users/runner/micromamba/envs)" >> $GITHUB_ENV - - name: Set environment variables - run: | - echo "POD_OUTPUT=$(echo $PWD/../wkdir)" >> $GITHUB_ENV - - name: Set conda vars + - name: Set conda environment variables for ubuntu if: ${{ matrix.os == 'ubuntu-latest' }} run: | echo "MICROMAMBA_EXE=$(echo /home/runner/micromamba-bin/micromamba)" >> $GITHUB_ENV @@ -84,7 +77,7 @@ jobs: run: | echo "Installing Conda Environments" echo "conda root ${CONDA_ROOT}" - echo "env dir ${CONDA_ENV_DIR}" + echo "env dir ${CONDA_ENV_DIR}" # MDTF-specific setup: install all conda envs ./src/conda/micromamba_env_setup.sh --all --micromamba_root ${CONDA_ROOT} --micromamba_exe ${MICROMAMBA_EXE} --env_dir ${CONDA_ENV_DIR} echo "Creating the _MDTF_synthetic_data environment" @@ -128,17 +121,21 @@ jobs: tar -xvf Wheeler_Kiladis_obs_data.tar # clean up tarballs rm -f *.tar - - name: Run diagnostic tests set 1 + - name: Run diagnostic tests set 1a run: | - echo "POD_OUTPUT is: " + echo "POD_OUTPUT=$(echo $PWD/../wkdir)" >> $GITHUB_ENV + echo "POD_OUTPUT is " echo "${POD_OUTPUT}" micromamba activate _MDTF_base # trivial check that install script worked ./mdtf_framework.py --help # run the test PODs - ./mdtf -f ${{matrix.json-file}} + timeout 5m ./mdtf -f ${{matrix.json-file-1a}} # Debug POD log(s) # cat ${POD_OUTPUT}/MDTF_NCAR.Synthetic_1975_1981/Wheeler_Kiladis/Wheeler_Kiladis.log + - name: Run diagnostic tests set 1b + run: | + ./mdtf -f ${{matrix.json-file-1b}} - name: Get observational data for set 2 run: | echo "${PWD}" @@ -162,7 +159,7 @@ jobs: run: | micromamba activate _MDTF_base # run the test PODs - ./mdtf -f ${{matrix.json-file-set2}} + timeout 5m ./mdtf -f ${{matrix.json-file-2}} # Uncomment the following line for debugging #cat ../wkdir/MDTF_GFDL.Synthetic_1_10/MJO_prop_amp/MJO_prop_amp.log - name: Get observational data for set 3 @@ -201,7 +198,7 @@ jobs: run: | micromamba activate _MDTF_base # run the test PODs - ./mdtf -f ${{matrix.json-file-set3}} + timeout 5m ./mdtf -f ${{matrix.json-file-3}} #- name: Run unit tests # run: | # micromamba activate _MDTF_base diff --git a/data/fieldlist_GFDL.jsonc b/data/fieldlist_GFDL.jsonc index 2c9ffb1fb..59d4a95af 100644 --- a/data/fieldlist_GFDL.jsonc +++ b/data/fieldlist_GFDL.jsonc @@ -214,7 +214,7 @@ "units": "kg m-2 s-1", "ndim": 3 }, - "prw": { + "wvp": { "standard_name": "atmosphere_mass_content_of_water_vapor", "long_name": "Water Vapor Path", "realm": "atmos", diff --git a/diagnostics/stc_eddy_heat_fluxes/settings.jsonc b/diagnostics/stc_eddy_heat_fluxes/settings.jsonc index 3cf0e6136..7b5eb081f 100644 --- a/diagnostics/stc_eddy_heat_fluxes/settings.jsonc +++ b/diagnostics/stc_eddy_heat_fluxes/settings.jsonc @@ -27,9 +27,6 @@ "python3": ["matplotlib", "numpy", "pandas", "xarray", "xesmf"] } }, - "data": { - "realm" : "atmos" - }, "dimensions": { "lat": { diff --git a/src/pod_setup.py b/src/pod_setup.py index 46f3d5f67..e50c5adee 100644 --- a/src/pod_setup.py +++ b/src/pod_setup.py @@ -148,6 +148,7 @@ def verify_pod_settings(self): value[0]) from exc def verify_runtime_reqs(runtime_reqs: dict): + pod_env = "" for k, v in runtime_reqs.items(): if any(v): pod_env = k @@ -172,6 +173,7 @@ def verify_runtime_reqs(runtime_reqs: dict): pass else: self.log.info(f"Checking {e} for {self.name} package requirements") + conda_root = self.pod_env_vars['CONDA_ROOT'] if os.path.exists(os.path.join(conda_root, "bin/conda")): args = [os.path.join(conda_root, "bin/conda"), 'list', @@ -300,12 +302,13 @@ def setup_pod(self, runtime_config: util.NameSpace, # Translate the varlistEntries from the POD convention to the data convention if desired and the pod # convention does not match the case convention data_convention = case_dict.convention.lower() - if runtime_config.translate_data and pod_convention != data_convention: - self.log.info(f'Translating POD variables from {pod_convention} to {data_convention}') - else: + if not runtime_config.translate_data: data_convention = 'no_translation' - self.log.info(f'POD convention and data convention are both {pod_convention}. ' + self.log.info(f'Runtime option translate_data is set to .false.' f'No data translation will be performed for case {case_name}.') + if pod_convention != data_convention: + self.log.info(f'Translating POD variables from {pod_convention} to {data_convention}') + # A 'noTranslationFieldlist' will be defined for the varlistEntry translation attribute for v in pod_input.varlist.keys(): for v_entry in cases[case_name].varlist.iter_vars(): diff --git a/src/preprocessor.py b/src/preprocessor.py index 9c727da45..a54a4c276 100644 --- a/src/preprocessor.py +++ b/src/preprocessor.py @@ -241,8 +241,9 @@ def execute(self, var, ds, **kwargs): """ tv = var.translation # abbreviate # convert dependent variable + # Note: may need to define src_unit = ds[tv.name].units or similar ds = units.convert_dataarray( - ds, tv.name, src_unit=None, dest_unit=var.units, log=var.log + ds, tv.name, src_unit=None, dest_unit=var.units.units, log=var.log ) tv.units = var.units @@ -251,8 +252,13 @@ def execute(self, var, ds, **kwargs): if c.axis == 'T': continue # TODO: separate function to handle calendar conversion dest_c = var.axes[c.axis] + src_units = None + for v in ds.variables: + if hasattr(ds[v], 'standard_name'): + if ds[v].standard_name == dest_c.standard_name: + src_units = ds[v].units ds = units.convert_dataarray( - ds, c.standard_name, src_unit=None, dest_unit=dest_c.units, log=var.log + ds, c.standard_name, src_unit=src_units, dest_unit=dest_c.units, log=var.log ) if c.has_bounds and c.bounds_var.name in ds: ds = units.convert_dataarray( @@ -719,7 +725,7 @@ def check_time_bounds(self, ds, var: translation.TranslatedVarlistEntry, freq: s `__ objects so that they can be compared with the model data's time axis. """ - # TODO make time bound checks less restrictive for mon and longer data + dt_range = var.T.range ds_decode = xr.decode_cf(ds, use_cftime=True) t_coord = ds_decode[var.T.name] @@ -922,12 +928,20 @@ def query_catalog(self, for var in case_d.varlist.iter_vars(): realm_regex = var.realm + '*' + var_id = var.translation.name + standard_name = var.translation.standard_name + date_range = var.translation.T.range + if var.translation.convention == 'no_translation': + date_range = var.T.range + var_id = var.name + standard_name = var.standard_name if var.is_static: date_range = None freq = "fx" else: freq = var.T.frequency - date_range = var.translation.T.range + if freq == 'hr': + freq = '1hr' if not isinstance(freq, str): freq = freq.format_local() # define initial query dictionary with variable settings requirements that do not change if @@ -935,8 +949,8 @@ def query_catalog(self, case_d.query['frequency'] = freq case_d.query['path'] = [path_regex] case_d.query['realm'] = realm_regex - case_d.query['standard_name'] = var.translation.standard_name - case_d.query['variable_id'] = var.translation.name + case_d.query['standard_name'] = standard_name + case_d.query['variable_id'] = var_id # change realm key name if necessary if cat.df.get('modeling_realm', None) is not None: @@ -945,7 +959,7 @@ def query_catalog(self, # search catalog for convention specific query object var.log.info("Querying %s for variable %s for case %s.", data_catalog, - var.translation.name, + var_id, case_name) cat_subset = cat.search(**case_d.query) if cat_subset.df.empty: @@ -984,7 +998,7 @@ def query_catalog(self, f"configuration file.") else: raise util.DataRequestError( - f"Unable to find match or alternate for {var.translation.name}" + f"Unable to find match or alternate for {var_id}" f" for case {case_name} in {data_catalog}") # Get files in specified date range @@ -993,7 +1007,7 @@ def query_catalog(self, cat_subset.esmcat._df = self.check_group_daterange(cat_subset.df, date_range) if cat_subset.df.empty: raise util.DataRequestError( - f"check_group_daterange returned empty data frame for {var.translation.name}" + f"check_group_daterange returned empty data frame for {var_id}" f" case {case_name} in {data_catalog}, indicating issues with data continuity") # v.log.debug("Read %d mb for %s.", cat_subset.esmcat._df.dtypes.nbytes / (1024 * 1024), v.full_name) # convert subset catalog to an xarray dataset dict @@ -1046,10 +1060,13 @@ def query_catalog(self, cat_dict[case_name] = xr.merge([cat_dict[case_name], var_xr], compat='no_conflicts') # check that start and end times include runtime startdate and enddate if not var.is_static: + var_obj = var.translation + if var.translation.convention == 'no_translation': + var_obj = var try: - self.check_time_bounds(cat_dict[case_name], var.translation, freq) + self.check_time_bounds(cat_dict[case_name], var_obj, freq) except LookupError: - var.log.error(f'Data not found in catalog query for {var.translation.name}' + var.log.error(f'Data not found in catalog query for {var_id}' f' for requested date_range.') raise SystemExit("Terminating program") return cat_dict @@ -1387,9 +1404,12 @@ def write_pp_catalog(self, for case_name, case_dict in cases.items(): ds_match = input_catalog_ds[case_name] for var in case_dict.varlist.iter_vars(): - ds_var = ds_match.data_vars.get(var.translation.name, None) + var_name = var.translation.name + if var.translation.convention == 'no_translation': + var_name = var.name + ds_var = ds_match.data_vars.get(var_name, None) if ds_var is None: - log.error(f'No var {var.translation.name}') + log.error(f'No var {var_name}') d = dict.fromkeys(columns, "") for key, val in ds_match.attrs.items(): if 'intake_esm_attrs' in key: @@ -1405,7 +1425,7 @@ def write_pp_catalog(self, d.update({'end_time': util.cftime_to_str(input_catalog_ds[case_name].time.values[-1])}) cat_entries.append(d) - # create a Pandas dataframe romthe catalog entries + # create a Pandas dataframe from the catalog entries cat_df = pd.DataFrame(cat_entries) cat_df.head() diff --git a/src/translation.py b/src/translation.py index 4f45c70df..ec5f961f1 100644 --- a/src/translation.py +++ b/src/translation.py @@ -154,7 +154,7 @@ def to_CF_standard_name(self, standard_name: str, if var_dict['standard_name'] == standard_name\ and var_dict['realm'] == realm\ and var_dict['modifier'] == modifier: - if not var_dict['long_name'] or var_dict['long_name'].lower() == long_name.lower(): + # if not var_dict['long_name'] or var_dict['long_name'].lower() == long_name.lower(): return var_name else: if var_dict['standard_name'] in precip_vars and standard_name in precip_vars: @@ -331,13 +331,16 @@ def translate_coord(self, coord, class_dict=None, log=_log) -> dict: new_coord = v break else: - new_coord = [lut1.values()][0] + new_coord = [lut1[k] for k in lut1.keys()][0] # should return ordered dict if hasattr(coord, 'is_scalar') and coord.is_scalar: coord_name = "" - if new_coord.get('name', None): + if hasattr(new_coord, 'name'): coord_name = new_coord['name'] - elif new_coord.get('out_name', None): + elif hasattr(new_coord, 'out_name'): coord_name = new_coord['out_name'] + else: + coord_name = [k for k in lut1.keys()][0] + coord_copy = copy.deepcopy(new_coord) coord_copy['value'] = units.convert_scalar_coord(coord, coord_copy['units'], diff --git a/src/units.py b/src/units.py index fc3956cad..7929110ca 100644 --- a/src/units.py +++ b/src/units.py @@ -106,7 +106,7 @@ def units_equal(*args, rtol=None): """Returns True if and only if all unit-ful quantities in *args* are strictly equal (:func:`units_equivalent` is True and :func:`conversion_factor` = 1). - .. note:: + . note:: rtol, atol tolerances on floating-point equality are not currently implemented in cfunits, so we use :func:`relative_tol`. @@ -186,7 +186,8 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) Dataset *ds*, with *da_name* modified in-place. """ da = ds.get(da_name, None) - var_name = da_name + + search_attrs = ['standard_name', 'long_name'] if da is None: # search attributes for standard_name or long_name that matches input name @@ -200,7 +201,7 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) if isinstance(att, str): if att == da_name: da = dset - var_name = var + da_name = var break # try to find approximate matches to input name in standard_name and long_name attributes if da is None: @@ -216,7 +217,7 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) log.info("Found approximate match for %s in dataset %s attribute %s", da_name, attr, att_value) da = dset - var_name = var + da_name = var break if da is None: raise ValueError(f"convert_dataarray: '{da_name}' not found in dataset.") @@ -236,8 +237,14 @@ def convert_dataarray(ds, da_name: str, src_unit=None, dest_unit=None, log=_log) std_name = f"{da.attrs['standard_name']}" elif 'long_name' in da.attrs: std_name = f"{da.attrs['long_name']}" - ds[var_name].attrs['standard_name'] = std_name.replace(' ', '_') - + ds[da_name].attrs['standard_name'] = std_name.replace(' ', '_') + + # udunits does not recognize mb == hPa, so hardcode correction + if src_unit == 'mb': + ds[da_name].attrs['units'] = 'hPa' + src_unit = 'hPa' + if dest_unit == 'mb': + dest_unit = 'hPa' if units_equal(src_unit, dest_unit): log.debug(("Source, dest units of '%s'%s identical (%s); no conversion " "done."), da.name, std_name, dest_unit) diff --git a/src/util/datelabel.py b/src/util/datelabel.py index 4c9539431..9d816c77d 100644 --- a/src/util/datelabel.py +++ b/src/util/datelabel.py @@ -20,12 +20,12 @@ Properties and use of :class:`DateRange`, :class:`Date` and :class:`DateFrequency` objects are best illustrated by examples: -.. code-block:: python +. code-block:: python >>> Date('20001215').month 12 - >>> Date('200012') == datetime(2000, 12, 1) + >>> Date('200012') == datetime.datetime(2000, 12, 1) True >>> DateRange('2010-2020') in DateRange('2008-2019') @@ -56,6 +56,8 @@ # match-case statement to give date format # input can be int or str + + def date_fmt(date: str): date_digits = len(date) match date_digits: @@ -72,6 +74,8 @@ def date_fmt(date: str): return fmt # convert a string to a cftime object + + def str_to_cftime(time_str: str, fmt=None, calendar=None): if fmt is None: fmt = date_fmt(time_str) @@ -1175,7 +1179,7 @@ def _parse_input_string(cls, quantity, unit): s = 'wk' elif s in ['daily', 'day', 'days', 'dy', 'd', 'diurnal', 'diurnally']: s = 'day' - elif s in ['hourly', 'hour', 'hours', 'hr', 'h']: + elif s in ['hourly', 'hour', 'hours', 'hr', 'h', '1hr']: s = 'hr' elif s in ['minutes', 'minute', 'min']: s = 'min' diff --git a/src/xr_parser.py b/src/xr_parser.py index 01aab0dbd..40e4dce82 100644 --- a/src/xr_parser.py +++ b/src/xr_parser.py @@ -757,8 +757,10 @@ def compare_attr(self, our_attr_tuple, ds_attr_tuple, comparison_func=None, - False: Change *ds* to match *our_var*. """ # unpack tuples + our_var, our_attr_name, our_attr = our_attr_tuple ds_var, ds_attr_name, ds_attr = ds_attr_tuple + if comparison_func is None: comparison_func = (lambda x, y: x == y) @@ -821,8 +823,8 @@ def compare_attr(self, our_attr_tuple, ds_attr_tuple, comparison_func=None, else: comparison_func = self.approximate_attribute_value(our_attr, ds_attr) if not comparison_func: - raise util.MetadataEvent((f"Unexpected {our_attr_name} for variable " - f"'{our_var.name}': '{ds_attr}' (expected '{our_attr}').")) + self.log.warning(f"Unexpected {our_attr_name} for variable " + f"'{our_var.name}': '{ds_attr}' (expected '{our_attr}').") else: self.log.warning(f"Could not find exact match for {our_var.name} attribute {our_attr_name}" f"{our_attr}; data processing will proceed with approximate match {ds_attr}") @@ -946,6 +948,7 @@ def reconcile_units(self, our_var, ds_var): # will raise UnitsUndefinedError or log warning if unit attribute missing self.check_metadata(ds_var, 'units') # Check equivalence of units: if units are not equivalent, raise MetadataEvent + self.reconcile_attr(our_var, ds_var, 'units', comparison_func=units.units_equivalent, fill_ours=True, fill_ds=True @@ -1268,6 +1271,9 @@ def check_metadata(self, ds_var, *attr_names): """Wrapper for :meth:`~DefaultDatasetParser.normalize_attr`, specialized to the case of getting a variable's standard_name. """ + delete_chars = re.compile(r"[\".,'*]") + ds_var.attrs = {delete_chars.sub('', k): v for k, v in ds_var.attrs.items()} + ds_var.encoding = {delete_chars.sub('', k): v for k, v in ds_var.encoding.items()} for attr in attr_names: if attr not in ds_var.attrs: if attr in ds_var.encoding: diff --git a/tests/esm_catalog_test_macos.csv b/tests/esm_catalog_test_macos.csv index 8c57d8a7d..bd856631a 100644 --- a/tests/esm_catalog_test_macos.csv +++ b/tests/esm_catalog_test_macos.csv @@ -7,8 +7,8 @@ CMIP,,,,,,day,,,,,,,,,,wind_speed,,m s-1,atmos,,,,,,sfcWind,,,1,,1990-01-01 00:0 CMIP,,,,,,day,,,,,,,,,,air_temperature,,K ,atmos,,,,,,tas,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.tas.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,surface_temperature,,K,atmos,,,,,,ts,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.ts.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,geopotential_height,,m,atmos,,,,,,zg500,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg500.day.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_latent_heat_flux,,W m-2,atmos,,,,,,hfls,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfls.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_sensible_heat_flux,,W m-2,atmos,,,,,,hfss,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfss.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,specific_humidity,,1,atmos,,,,,,hus,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hus.mon.nc,v0 @@ -33,13 +33,13 @@ CMIP,,,,,,mon,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,32,,1990-01-01 00:0 CMIP,,,,,,mon,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.wap.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,geopotential_height,,m,atmos,,,,,,zg,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,sea_surface_height_above_geoid,,m,ocean,,,,,,zos,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zos.mon.nc,v0 -GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,WVP,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,wvp,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 CESM,,,,,,1hr,,,,,,,,,,precipitation_rate,,m s-1,atmos,,,,,,PRECT,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.PRECT.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,prw,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.prw.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,specific_humidity,,1,atmos,,,,,,qsat,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/Users/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.qsat_int.1hr.nc,v0 diff --git a/tests/esm_catalog_test_ubuntu.csv b/tests/esm_catalog_test_ubuntu.csv index 2fe35eff9..10f413688 100644 --- a/tests/esm_catalog_test_ubuntu.csv +++ b/tests/esm_catalog_test_ubuntu.csv @@ -7,8 +7,8 @@ CMIP,,,,,,day,,,,,,,,,,wind_speed,,m s-1,atmos,,,,,,sfcWind,,,1,,1990-01-01 00:0 CMIP,,,,,,day,,,,,,,,,,air_temperature,,K ,atmos,,,,,,tas,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.tas.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,surface_temperature,,K,atmos,,,,,,ts,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.ts.day.nc,v0 CMIP,,,,,,day,,,,,,,,,,geopotential_height,,m,atmos,,,,,,zg500,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/day/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg500.day.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 -CMIP,,,,,,mon,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,cell_area,,m2,atmos,,,,,,areacella,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacella.mon.nc,v0 +CMIP,,,,,,fx,,,,,,,,,,call_area,,m2,ocean,,,,,,areacello,,,1,,,,,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.areacello.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_latent_heat_flux,,W m-2,atmos,,,,,,hfls,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfls.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,surface_upward_sensible_heat_flux,,W m-2,atmos,,,,,,hfss,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hfss.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,specific_humidity,,1,atmos,,,,,,hus,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.hus.mon.nc,v0 @@ -33,13 +33,13 @@ CMIP,,,,,,mon,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,32,,1990-01-01 00:0 CMIP,,,,,,mon,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.wap.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,geopotential_height,,m,ocean,,,,,,zg,,,32,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zg.mon.nc,v0 CMIP,,,,,,mon,,,,,,,,,,sea_surface_height_above_geoid,,m,ocean,,,,,,zos,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231/mon/CMIP_Synthetic_r1i1p1f1_gr1_19900101-20091231.zos.mon.nc,v0 -GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,WVP,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,1990-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 -GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,0001-01-01 00:00:00,2009-12-31 00:00:00,1990-01-01 00:00:00-2009-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,wvp,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.WVP.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,precipitation_flux,,kg m-2 s-1,atmos,,,,,,precip,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.precip.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,toa_outgoing_longwave_flux,,W m-2,atmos,,,,,,rlut,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.rlut.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,specific_humidity,,1,atmos,,,,,,sphum,,,1,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.sphum.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,eastward_wind,,m s-1,atmos,,,,,,ua,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.ua.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,northward_wind,,m s-1,atmos,,,,,,va,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.va.day.nc,v0 +GFDL,,,,,,day,,,,,,,,,,lagrangian_tendency_of_air_pressure,,Pa s-1,atmos,,,,,,wap,,,19,,0001-01-01 00:00:00,0010-12-31 00:00:00,0001-01-01 00:00:00-0010-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/GFDL.Synthetic/day/GFDL.Synthetic.wap.day.nc,v0 CESM,,,,,,1hr,,,,,,,,,,precipitation_rate,,m s-1,atmos,,,,,,PRECT,,,1,,1990-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.PRECT.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,atmosphere_mass_content_of_water_vapor,,kg m-2,atmos,,,,,,prw,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.prw.1hr.nc,v0 CESM,,,,,,1hr,,,,,,,,,,specific_humidity,,1,atmos,,,,,,qsat_int,,,1,,1975-01-01 00:00:00,1981-12-31 00:00:00,1975-01-01 00:00:00-1981-12-31 00:00:00,/home/runner/work/MDTF-diagnostics/mdtf_test_data/NCAR.Synthetic/1hr/NCAR.Synthetic.qsat_int.1hr.nc,v0 diff --git a/tests/github_actions_test_macos_set1.jsonc b/tests/github_actions_test_macos_1a.jsonc similarity index 97% rename from tests/github_actions_test_macos_set1.jsonc rename to tests/github_actions_test_macos_1a.jsonc index 61e6813c9..a706a5188 100644 --- a/tests/github_actions_test_macos_set1.jsonc +++ b/tests/github_actions_test_macos_1a.jsonc @@ -5,10 +5,7 @@ "pod_list": [ //"convective_transition_diag", //"Wheeler_Kiladis", - //"MJO_suite", - "MJO_teleconnection", "precip_diurnal_cycle" - //"EOF_500hPa" ], "case_list": { "NCAR.Synthetic": { diff --git a/tests/github_actions_test_macos_1b.jsonc b/tests/github_actions_test_macos_1b.jsonc new file mode 100644 index 000000000..4a1419058 --- /dev/null +++ b/tests/github_actions_test_macos_1b.jsonc @@ -0,0 +1,81 @@ +// Configuration for MDTF-diagnostics driver script self-test using the macOS github action. +// All text to the right of an unquoted "//" is a comment and ignored, as well +// as blank lines (JSONC quasi-standard.) +{ + "pod_list": [ + "MJO_suite", + "MJO_teleconnection" + //"EOF_500hPa" + ], + "case_list": { + "NCAR.Synthetic": { + "convention" : "CESM", + "startdate" : "19750101", + "enddate" : "19811231" + } + }, + // PATHS --------------------------------------------------------------------- + // Location of supporting data downloaded when the framework was installed. + + // If a relative path is given, it's resolved relative to the MDTF-diagnostics + // code directory. Environment variables (eg, $HOME) can be referenced with a + // "$" and will be expended to their current values when the framework runs. + "DATA_CATALOG": "./tests/esm_catalog_test_macos.json", + // Parent directory containing observational data used by individual PODs. + "OBS_DATA_ROOT": "../inputdata/obs_data", + + // Working directory. Defaults to working directory if blank. + "WORK_DIR": "../wkdir", + + // Directory to write output. The results of each run of the framework will be + // put in a subdirectory of this directory. + "OUTPUT_DIR": "../wkdir", + + // Location of the Anaconda/miniconda installation to use for managing + // dependencies (path returned ls by running `conda info --base`.) If empty, + // framework will attempt to determine location of system's conda installation. + "conda_root": "/Users/runner/micromamba", + + "micromamba_exe": "/Users/runner/micromamba-bin/micromamba", + + + // Directory containing the framework-specific conda environments. This should + // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left + // blank, the framework will look for its environments in the system default + // location. + "conda_env_root": "/Users/runner/micromamba/envs", + + // SETTINGS ------------------------------------------------------------------ + // Any command-line option recognized by the mdtf script + // can be set here, in the form "flag name": "desired setting". + + // Settings affecting what output is generated: + // Set to true to run the preprocessor; default true: + "run_pp": true, + + // Set to true to perform data translation; default false: + "translate_data": true, + + // Set to true to have PODs save postscript figures in addition to bitmaps. + "save_ps": false, + + // Set to true for files > 4 GB + "large_file": false, + + // If true, leave pp data in OUTPUT_DIR after preprocessing; if false, delete pp data after PODs + // run to completion + "save_pp_data": true, + + // Set to true to save HTML and bitmap plots in a .tar file. + "make_variab_tar": false, + + // Generate html output for multiple figures per case + "make_multicase_figure_html": false, + + // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved + // under a unique name. + "overwrite": false, + // List with custom preprocessing script(s) to run on data + // Place these scripts in the user_scripts directory of your copy of the MDTF-diagnostics repository + "user_pp_scripts" : [] +} diff --git a/tests/github_actions_test_macos_set2.jsonc b/tests/github_actions_test_macos_2.jsonc similarity index 100% rename from tests/github_actions_test_macos_set2.jsonc rename to tests/github_actions_test_macos_2.jsonc diff --git a/tests/github_actions_test_macos_set3.jsonc b/tests/github_actions_test_macos_3.jsonc similarity index 97% rename from tests/github_actions_test_macos_set3.jsonc rename to tests/github_actions_test_macos_3.jsonc index a7490eda0..5af660168 100644 --- a/tests/github_actions_test_macos_set3.jsonc +++ b/tests/github_actions_test_macos_3.jsonc @@ -6,10 +6,10 @@ "pod_list": [ //"temp_extremes_distshape",// needs matplotlib 3.7.3, but not avail on conda yet //"tropical_pacific_sea_level", - "ocn_surf_flux_diag", - "mixed_layer_depth", - "seaice_suite", - "stc_eddy_heat_fluxes" + "ocn_surf_flux_diag" + //"mixed_layer_depth", + //"seaice_suite", + //"stc_eddy_heat_fluxes" // "albedofb" ], "case_list" : { diff --git a/tests/github_actions_test_ubuntu_set1.jsonc b/tests/github_actions_test_ubuntu_1a.jsonc similarity index 95% rename from tests/github_actions_test_ubuntu_set1.jsonc rename to tests/github_actions_test_ubuntu_1a.jsonc index 1b2d4bcec..ea1b1b937 100644 --- a/tests/github_actions_test_ubuntu_set1.jsonc +++ b/tests/github_actions_test_ubuntu_1a.jsonc @@ -4,13 +4,10 @@ { "pod_list": [ //"convective_transition_diag", - //"Wheeler_Kiladis", - //"MJO_suite", - "MJO_teleconnection", - "precip_diurnal_cycle" - //"EOF_500hPa" + "Wheeler_Kiladis" + // "precip_diurnal_cycle" ], - "case_list" : { + "case_list" : { "NCAR.Synthetic": { "convention" : "CESM", "startdate" : "19750101", diff --git a/tests/github_actions_test_ubuntu_1b.jsonc b/tests/github_actions_test_ubuntu_1b.jsonc new file mode 100644 index 000000000..eec5cbcbb --- /dev/null +++ b/tests/github_actions_test_ubuntu_1b.jsonc @@ -0,0 +1,80 @@ +// Configuration for MDTF-diagnostics driver script self-test. +// All text to the right of an unquoted "//" is a comment and ignored, as well +// as blank lines (JSONC quasi-standard.) +{ + "pod_list": [ + "MJO_suite", + "MJO_teleconnection" + //"EOF_500hPa" + ], + "case_list" : { + "NCAR.Synthetic": { + "convention" : "CESM", + "startdate" : "19750101", + "enddate" : "19811231" + } + }, + // PATHS --------------------------------------------------------------------- + // Location of supporting data downloaded when the framework was installed. + + // If a relative path is given, it's resolved relative to the MDTF-diagnostics + // code directory. Environment variables (eg, $HOME) can be referenced with a + // "$" and will be expended to their current values when the framework runs. + "DATA_CATALOG": "./tests/esm_catalog_test_ubuntu.json", + // Parent directory containing observational data used by individual PODs. + "OBS_DATA_ROOT": "../inputdata/obs_data", + + // Working directory. Defaults to working directory if blank. + "WORK_DIR": "../wkdir", + + // Directory to write output. The results of each run of the framework will be + // put in a subdirectory of this directory. + "OUTPUT_DIR": "../wkdir", + + // Location of the Anaconda/miniconda installation to use for managing + // dependencies (path returned lsby running `conda info --base`.) If empty, + // framework will attempt to determine location of system's conda installation. + //"conda_root": "/usr/share/miniconda3", + "conda_root": "/home/runner/micromamba", + + "micromamba_exe": "/home/runner/micromamba-bin/micromamba", + // Directory containing the framework-specific conda environments. This should + // be equal to the "--env_dir" flag passed to conda_env_setup.sh. If left + // blank, the framework will look for its environments in the system default + // location. + "conda_env_root": "/home/runner/micromamba/envs", + + // SETTINGS ------------------------------------------------------------------ + // Any command-line option recognized by the mdtf script + // can be set here, in the form "flag name": "desired setting". + + // Settings affecting what output is generated: + // Set to true to run the preprocessor; default true: + "run_pp": true, + + // Set to true to perform data translation; default false: + "translate_data": true, + + // Set to true to have PODs save postscript figures in addition to bitmaps. + "save_ps": false, + + // Set to true for files > 4 GB + "large_file": false, + + // If true, leave pp data in OUTPUT_DIR after preprocessing; if false, delete pp data after PODs + // run to completion + "save_pp_data": true, + + // Set to true to save HTML and bitmap plots in a .tar file. + "make_variab_tar": false, + + // Generate html output for multiple figures per case + "make_multicase_figure_html": false, + + // Set to true to overwrite results in OUTPUT_DIR; otherwise results saved + // under a unique name. + "overwrite": false, + // List with custom preprocessing script(s) to run on data + // Place these scripts in the user_scripts directory of your copy of the MDTF-diagnostics repository + "user_pp_scripts" : [] +} diff --git a/tests/github_actions_test_ubuntu_set2.jsonc b/tests/github_actions_test_ubuntu_2.jsonc similarity index 100% rename from tests/github_actions_test_ubuntu_set2.jsonc rename to tests/github_actions_test_ubuntu_2.jsonc diff --git a/tests/github_actions_test_ubuntu_set3.jsonc b/tests/github_actions_test_ubuntu_3.jsonc similarity index 97% rename from tests/github_actions_test_ubuntu_set3.jsonc rename to tests/github_actions_test_ubuntu_3.jsonc index 376548366..0f3dba986 100644 --- a/tests/github_actions_test_ubuntu_set3.jsonc +++ b/tests/github_actions_test_ubuntu_3.jsonc @@ -5,10 +5,10 @@ "pod_list": [ //"temp_extremes_distshape",// needs matplotlib 3.7.3, but not avail on conda yet //"tropical_pacific_sea_level", - "ocn_surf_flux_diag", - "mixed_layer_depth", - "seaice_suite", - "stc_eddy_heat_fluxes" + "ocn_surf_flux_diag" + //"mixed_layer_depth", + //"seaice_suite", + //"stc_eddy_heat_fluxes" // "albedofb" ], "case_list" : {