|
| 1 | +# Copyright Iris contributors |
| 2 | +# |
| 3 | +# This file is part of Iris and is released under the LGPL license. |
| 4 | +# See COPYING and COPYING.LESSER in the root of the repository for full |
| 5 | +# licensing details. |
| 6 | +""" |
| 7 | +Generate FF, PP and NetCDF files based on a minimal synthetic FF file. |
| 8 | +
|
| 9 | +NOTE: uses the Mule package, so depends on an environment with Mule installed. |
| 10 | +""" |
| 11 | + |
| 12 | + |
| 13 | +def _create_um_files( |
| 14 | + len_x: int, len_y: int, len_z: int, len_t: int, compress, save_paths: dict |
| 15 | +) -> None: |
| 16 | + """ |
| 17 | + Generate an FF object of given shape and compression, save to FF/PP/NetCDF. |
| 18 | +
|
| 19 | + This is run externally |
| 20 | + (:func:`benchmarks.generate_data.run_function_elsewhere`), so all imports |
| 21 | + are self-contained and input parameters are simple types. |
| 22 | + """ |
| 23 | + from copy import deepcopy |
| 24 | + from datetime import datetime |
| 25 | + from tempfile import NamedTemporaryFile |
| 26 | + |
| 27 | + from mo_pack import compress_wgdos as mo_pack_compress |
| 28 | + from mule import ArrayDataProvider, Field3, FieldsFile |
| 29 | + from mule.pp import fields_to_pp_file |
| 30 | + import numpy as np |
| 31 | + |
| 32 | + from iris import load_cube |
| 33 | + from iris import save as save_cube |
| 34 | + |
| 35 | + def packing_patch(*compress_args, **compress_kwargs) -> bytes: |
| 36 | + """ |
| 37 | + Force conversion from returned :class:`memoryview` to :class:`bytes`. |
| 38 | +
|
| 39 | + Downstream uses of :func:`mo_pack.compress_wgdos` were written |
| 40 | + for the ``Python2`` behaviour, where the returned buffer had a |
| 41 | + different ``__len__`` value to the current :class:`memoryview`. |
| 42 | + Unable to fix directly in Mule, so monkey patching for now. |
| 43 | + """ |
| 44 | + return mo_pack_compress(*compress_args, **compress_kwargs).tobytes() |
| 45 | + |
| 46 | + import mo_pack |
| 47 | + |
| 48 | + mo_pack.compress_wgdos = packing_patch |
| 49 | + |
| 50 | + ######## |
| 51 | + |
| 52 | + template = { |
| 53 | + "fixed_length_header": {"dataset_type": 3, "grid_staggering": 3}, |
| 54 | + "integer_constants": { |
| 55 | + "num_p_levels": len_z, |
| 56 | + "num_cols": len_x, |
| 57 | + "num_rows": len_y, |
| 58 | + }, |
| 59 | + "real_constants": {}, |
| 60 | + "level_dependent_constants": {"dims": (len_z + 1, None)}, |
| 61 | + } |
| 62 | + new_ff = FieldsFile.from_template(deepcopy(template)) |
| 63 | + |
| 64 | + data_array = np.arange(len_x * len_y).reshape(len_x, len_y) |
| 65 | + array_provider = ArrayDataProvider(data_array) |
| 66 | + |
| 67 | + def add_field(level_: int, time_step_: int) -> None: |
| 68 | + """ |
| 69 | + Add a minimal field to the new :class:`~mule.FieldsFile`. |
| 70 | +
|
| 71 | + Includes the minimum information to allow Mule saving and Iris |
| 72 | + loading, as well as incrementation for vertical levels and time |
| 73 | + steps to allow generation of z and t dimensions. |
| 74 | + """ |
| 75 | + new_field = Field3.empty() |
| 76 | + # To correspond to the header-release 3 class used. |
| 77 | + new_field.lbrel = 3 |
| 78 | + # Mule uses the first element of the lookup to test for |
| 79 | + # unpopulated fields (and skips them), so the first element should |
| 80 | + # be set to something. The year will do. |
| 81 | + new_field.raw[1] = datetime.now().year |
| 82 | + |
| 83 | + # Horizontal. |
| 84 | + new_field.lbcode = 1 |
| 85 | + new_field.lbnpt = len_x |
| 86 | + new_field.lbrow = len_y |
| 87 | + new_field.bdx = new_ff.real_constants.col_spacing |
| 88 | + new_field.bdy = new_ff.real_constants.row_spacing |
| 89 | + new_field.bzx = new_ff.real_constants.start_lon - 0.5 * new_field.bdx |
| 90 | + new_field.bzy = new_ff.real_constants.start_lat - 0.5 * new_field.bdy |
| 91 | + |
| 92 | + # Hemisphere. |
| 93 | + new_field.lbhem = 32 |
| 94 | + # Processing. |
| 95 | + new_field.lbproc = 0 |
| 96 | + |
| 97 | + # Vertical. |
| 98 | + # Hybrid height values by simulating sequences similar to those in a |
| 99 | + # theta file. |
| 100 | + new_field.lbvc = 65 |
| 101 | + if level_ == 0: |
| 102 | + new_field.lblev = 9999 |
| 103 | + else: |
| 104 | + new_field.lblev = level_ |
| 105 | + |
| 106 | + level_1 = level_ + 1 |
| 107 | + six_rec = 20 / 3 |
| 108 | + three_rec = six_rec / 2 |
| 109 | + |
| 110 | + new_field.blev = level_1 ** 2 * six_rec - six_rec |
| 111 | + new_field.brsvd1 = ( |
| 112 | + level_1 ** 2 * six_rec + (six_rec * level_1) - three_rec |
| 113 | + ) |
| 114 | + |
| 115 | + brsvd2_simulated = np.linspace(0.995, 0, len_z) |
| 116 | + shift = min(len_z, 2) |
| 117 | + bhrlev_simulated = np.concatenate( |
| 118 | + [np.ones(shift), brsvd2_simulated[:-shift]] |
| 119 | + ) |
| 120 | + new_field.brsvd2 = brsvd2_simulated[level_] |
| 121 | + new_field.bhrlev = bhrlev_simulated[level_] |
| 122 | + |
| 123 | + # Time. |
| 124 | + new_field.lbtim = 11 |
| 125 | + |
| 126 | + new_field.lbyr = time_step_ |
| 127 | + for attr_name in ["lbmon", "lbdat", "lbhr", "lbmin", "lbsec"]: |
| 128 | + setattr(new_field, attr_name, 0) |
| 129 | + |
| 130 | + new_field.lbyrd = time_step_ + 1 |
| 131 | + for attr_name in ["lbmond", "lbdatd", "lbhrd", "lbmind", "lbsecd"]: |
| 132 | + setattr(new_field, attr_name, 0) |
| 133 | + |
| 134 | + # Data and packing. |
| 135 | + new_field.lbuser1 = 1 |
| 136 | + new_field.lbpack = int(compress) |
| 137 | + new_field.bacc = 0 |
| 138 | + new_field.bmdi = -1 |
| 139 | + new_field.lbext = 0 |
| 140 | + new_field.set_data_provider(array_provider) |
| 141 | + |
| 142 | + new_ff.fields.append(new_field) |
| 143 | + |
| 144 | + for time_step in range(len_t): |
| 145 | + for level in range(len_z): |
| 146 | + add_field(level, time_step + 1) |
| 147 | + |
| 148 | + ff_path = save_paths.get("FF", None) |
| 149 | + pp_path = save_paths.get("PP", None) |
| 150 | + nc_path = save_paths.get("NetCDF", None) |
| 151 | + |
| 152 | + if ff_path: |
| 153 | + new_ff.to_file(ff_path) |
| 154 | + if pp_path: |
| 155 | + fields_to_pp_file(str(pp_path), new_ff.fields) |
| 156 | + if nc_path: |
| 157 | + temp_ff_path = None |
| 158 | + # Need an Iris Cube from the FF content. |
| 159 | + if ff_path: |
| 160 | + # Use the existing file. |
| 161 | + ff_cube = load_cube(ff_path) |
| 162 | + else: |
| 163 | + # Make a temporary file. |
| 164 | + temp_ff_path = NamedTemporaryFile() |
| 165 | + new_ff.to_file(temp_ff_path.name) |
| 166 | + ff_cube = load_cube(temp_ff_path.name) |
| 167 | + |
| 168 | + save_cube(ff_cube, nc_path, zlib=compress) |
| 169 | + if temp_ff_path: |
| 170 | + temp_ff_path.close() |
| 171 | + |
| 172 | + |
| 173 | +FILE_EXTENSIONS = {"FF": "", "PP": ".pp", "NetCDF": ".nc"} |
| 174 | + |
| 175 | + |
| 176 | +def create_um_files( |
| 177 | + len_x: int, |
| 178 | + len_y: int, |
| 179 | + len_z: int, |
| 180 | + len_t: int, |
| 181 | + compress: bool, |
| 182 | + file_types: list, |
| 183 | +) -> dict: |
| 184 | + """ |
| 185 | + Generate FF-based FF / PP / NetCDF files with specified shape and compression. |
| 186 | +
|
| 187 | + All files representing a given shape are saved in a dedicated directory. A |
| 188 | + dictionary of the saved paths is returned. |
| 189 | +
|
| 190 | + If the required files exist, they are re-used, unless |
| 191 | + :const:`benchmarks.REUSE_DATA` is ``False``. |
| 192 | + """ |
| 193 | + # Self contained imports to avoid linting confusion with _create_um_files(). |
| 194 | + from . import BENCHMARK_DATA, REUSE_DATA, run_function_elsewhere |
| 195 | + |
| 196 | + save_name_sections = ["UM", len_x, len_y, len_z, len_t] |
| 197 | + save_name = "_".join(str(section) for section in save_name_sections) |
| 198 | + save_dir = BENCHMARK_DATA / save_name |
| 199 | + if not save_dir.is_dir(): |
| 200 | + save_dir.mkdir(parents=True) |
| 201 | + |
| 202 | + save_paths = {} |
| 203 | + files_exist = True |
| 204 | + for file_type in file_types: |
| 205 | + file_ext = FILE_EXTENSIONS[file_type] |
| 206 | + save_path = (save_dir / f"{compress}").with_suffix(file_ext) |
| 207 | + files_exist = files_exist and save_path.is_file() |
| 208 | + save_paths[file_type] = str(save_path) |
| 209 | + |
| 210 | + if not REUSE_DATA or not files_exist: |
| 211 | + _ = run_function_elsewhere( |
| 212 | + _create_um_files, len_x, len_y, len_z, len_t, compress, save_paths |
| 213 | + ) |
| 214 | + |
| 215 | + return save_paths |
0 commit comments