diff --git a/CHANGELOG.md b/CHANGELOG.md index 3934ab68..3c90bbd2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,11 @@ # CHANGELOG +## 0.#.# + +### Changed +- Non-standard "sample_{phi,omega,...}" groups in NXsample made optional and NXclass now set to NXtransformations instead of NXpositioner. + ## 0.9.2 diff --git a/src/nexgen/nxs_copy/copy_utils.py b/src/nexgen/nxs_copy/copy_utils.py index d4238421..0f44e4fe 100644 --- a/src/nexgen/nxs_copy/copy_utils.py +++ b/src/nexgen/nxs_copy/copy_utils.py @@ -133,17 +133,25 @@ def convert_scan_axis( ax_range (ArrayLike): Scan points. If passed, axis_increment_set and axis_end will also be written.\ Defaults to None """ - del nxsample["transformations/" + ax] - nxsample["transformations/" + ax] = nxdata[ax] - name = ( - "sample_" + ax + "/" + ax if "sam" not in ax else "sample_" + ax[-1] + "/" + ax - ) - del nxsample[name] - nxsample[name] = nxdata[ax] + del nxsample[f"transformations/{ax}"] + nxsample[f"transformations/{ax}"] = nxdata[ax] + grp_name = f"sample_{ax}" if "sam" not in ax else f"sample_{ax[-1]}" + old_exists = grp_name in list(nxsample.keys()) + if old_exists: + del nxsample[grp_name] + nxsample[f"{grp_name}/{ax}"] = nxdata[ax] if ax_range is not None and "sam" not in ax: increment = round(ax_range[1] - ax_range[0], 3) - nxsample["sample_" + ax].create_dataset(ax + "_increment_set", data=increment) - nxsample["sample_" + ax].create_dataset(ax + "_end", data=ax_range + increment) + end = ax_range + increment + nxsample["transformations"].create_dataset( + f"{ax}_increment_set", data=increment + ) + nxsample["transformations"].create_dataset(f"{ax}_end", data=end) + if old_exists: + nxsample[f"{grp_name}/{ax}_increment_set"] = nxsample[ + f"transformations/{ax}_increment_set" + ] + nxsample[f"{grp_name}/{ax}_end"] = nxsample[f"transformations/{ax}_end"] def check_and_fix_det_axis(nxs_in: h5py.File): diff --git a/src/nexgen/nxs_write/nxclass_writers.py b/src/nexgen/nxs_write/nxclass_writers.py index 62145463..b67f41e3 100644 --- a/src/nexgen/nxs_write/nxclass_writers.py +++ b/src/nexgen/nxs_write/nxclass_writers.py @@ -31,6 +31,7 @@ ) from .write_utils import ( TSdset, + add_sample_axis_groups, calculate_origin, create_attributes, mask_and_flatfield_writer, @@ -73,10 +74,8 @@ def write_NXentry(nxsfile: h5py.File, definition: str = "NXmx") -> h5py.Group: def write_NXdata( nxsfile: h5py.File, datafiles: List[Path], - goniometer_axes: List[Axis], data_type: str, - osc_scan: Dict[str, ArrayLike], - transl_scan: Dict[str, ArrayLike] = None, + osc_axis: str = "omega", entry_key: str = "data", ): """ @@ -85,15 +84,13 @@ def write_NXdata( Args: nxsfile (h5py.File): NeXus file handle. datafiles (List[Path]): List of Path objects pointing to HDF5 data files. - goniometer_axes (List[Axis]): List of goniometer axes. data_type (str): Images or events. - osc_scan (Dict[str, ArrayLike]): Rotation scan. If writing events, this is just a (start, end) tuple. - transl_scan (Dict[str, ArrayLike], optional): Scan along the xy axes at sample. Defaults to None. + osc_scan (str, optional): Rotation scan axis name. Defaults to omega. entry_key (str): Entry key to create the external links to the data files. Defaults to data. Raises: OSError: If no data is passed. - ValueError: If the data typs is neither "images" nor "events". + ValueError: If the data type is neither "images" nor "events". """ NXclass_logger.info("Start writing NXdata.") # Check that a valid datafile_list has been passed. @@ -102,9 +99,6 @@ def write_NXdata( "No HDF5 data filenames have been found. Please pass at least one." ) - # This assumes that a rotation scan is always passed - osc_axis, osc_range = list(osc_scan.items())[0] - # Create NXdata group, unless it already exists, in which case just open it. nxdata = nxsfile.require_group("/entry/data") create_attributes( @@ -149,43 +143,66 @@ def write_NXdata( "Unknown data type. Please pass one value for data_type from : [images, events]" ) - # Write rotation axis dataset - ax = nxdata.create_dataset(osc_axis, data=osc_range) - idx = [n for n, ax in enumerate(goniometer_axes) if ax.name == osc_axis][0] - dep = set_dependency( - goniometer_axes[idx].depends, path="/entry/sample/transformations/" - ) - # Write attributes for axis +# NXtransformations +def write_NXtransformations( + parent_group: h5py.Group, + axes: List[Axis], + scan: Optional[Dict[str, ArrayLike]] = None, + collection_type: str = "images", +): + """Write NXtransformations group. + + This group coulld be written either in /entry/sample/ for the goniometer or in \ + /entry/instrument/detector for the detector axes. In the latter case, the scan \ + should always be None. + + Args: + parent_group (h5py.Group): Handle to HDF5 group where NXtransformations \ + should be written. + axes (List[Axis]): List of Axes to write to the NXtransformations group. + scan (Optional[Dict[str, ArrayLike]], optional): All the scan axes, both \ + rotation and translation. Defaults to None. + collection_type (str, optional): Collection type, could be images or \ + events. Defaults to "images". + """ + NXclass_logger.debug(f"Start writing NXtransformations group in {parent_group}.") + nxtransformations = parent_group.require_group("transformations") create_attributes( - ax, - ("depends_on", "transformation_type", "units", "vector"), - ( - dep, - goniometer_axes[idx].transformation_type, - goniometer_axes[idx].units, - goniometer_axes[idx].vector, - ), + nxtransformations, + ("NX_class",), + ("NXtransformations",), ) - # If present, add linear/grid scan details - if transl_scan: - for k, v in transl_scan.items(): - ax_dset = nxdata.create_dataset(k, data=v) - ax_idx = [n for n, ax in enumerate(goniometer_axes) if ax.name == k][0] - ax_dep = set_dependency( - goniometer_axes[ax_idx].depends, path="/entry/sample/transformations/" - ) - create_attributes( - ax_dset, - ("depends_on", "transformation_type", "units", "vector"), - ( - ax_dep, - goniometer_axes[ax_idx].transformation_type, - goniometer_axes[ax_idx].units, - goniometer_axes[ax_idx].vector, - ), - ) + for ax in axes: + # Dataset + data = ( + scan[ax.name] + if scan and ax.name in scan.keys() + else np.array([ax.start_pos]) + ) + # Dependency + ax_dep = set_dependency(ax.depends, path=nxtransformations.name) + + nxax = nxtransformations.create_dataset(ax.name, data=data) + create_attributes( + nxax, + ("depends_on", "transformation_type", "units", "vector"), + (ax_dep, ax.transformation_type, ax.units, ax.vector), + ) + + # Write _increment_set and _end for rotation axis + if scan and collection_type == "images": + if ax.name in scan.keys() and ax.transformation_type == "rotation": + NXclass_logger.debug( + f"Adding increment_set and end for axis {ax.name}." + ) + nxtransformations.create_dataset( + f"{ax.name}_increment_set", data=ax.increment + ) + increment_set = np.repeat(ax.increment, len(scan[ax.name])) + ax_end = scan[ax.name] + increment_set + nxtransformations.create_dataset(f"{ax.name}_end", data=ax_end) # NXsample @@ -197,6 +214,7 @@ def write_NXsample( transl_scan: Dict[str, ArrayLike] = None, sample_depends_on: str = None, sample_details: Dict[str, Any] = None, + add_nonstandard_fields: bool = True, ): """ Write NXsample group at /entry/sample. @@ -207,10 +225,13 @@ def write_NXsample( data_type (str): Images or events. osc_scan (Dict[str, ArrayLike]): Rotation scan. If writing events, this is just a (start, end) tuple. transl_scan (Dict[str, ArrayLike], optional): Scan along the xy axes at sample. Defaults to None. - sample_depends_on (str, optional): Axis on which the sample depends on. If absent, the depends_on field will be set to the last axis listed in the goniometer. Defaults to None. + sample_depends_on (str, optional): Axis on which the sample depends on. If absent, the depends_on field \ + will be set to the last axis listed in the goniometer. Defaults to None. sample_details (Dict[str, Any], optional): General information about the sample, eg. name, temperature. + add_nonstandard_fields (bool, optional): Choose whether to add the old "sample_{x,phi,...}/{x,phi,...}" to the group. \ + These fields are non-standard but may be needed for processing to run. Defaults to True. """ - NXclass_logger.info("Start writing NXsample and NXtransformations.") + NXclass_logger.info("Start writing NXsample.") # Create NXsample group, unless it already exists, in which case just open it. nxsample = nxsfile.require_group("/entry/sample") create_attributes( @@ -219,128 +240,35 @@ def write_NXsample( ("NXsample",), ) - # Create NXtransformations group: /entry/sample/transformations - nxtransformations = nxsample.require_group("transformations") - create_attributes( - nxtransformations, - ("NX_class",), - ("NXtransformations",), - ) + # Merge the scan dictionaries + full_scan = osc_scan if transl_scan is None else osc_scan | transl_scan - # Get rotation details - osc_axis, osc_range = list(osc_scan.items())[0] + # Create NXtransformations group: /entry/sample/transformations + write_NXtransformations(nxsample, goniometer_axes, full_scan, data_type) + if add_nonstandard_fields: + add_sample_axis_groups(nxsample, goniometer_axes) # Save sample depends_on if sample_depends_on: nxsample.create_dataset( "depends_on", - data=set_dependency(sample_depends_on, path=nxtransformations.name), + data=set_dependency( + sample_depends_on, path=nxsample["transformations"].name + ), ) else: nxsample.create_dataset( "depends_on", - data=set_dependency(goniometer_axes[-1].name, path=nxtransformations.name), + data=set_dependency( + goniometer_axes[-1].name, path=nxsample["transformations"].name + ), ) - # Get xy details if passed - scan_axes = [] - if transl_scan: - for k in transl_scan.keys(): - scan_axes.append(k) - - # Create sample_{axisname} groups - for idx, ax in enumerate(goniometer_axes): - axis_name = ax.name - grp_name = ( - f"sample_{axis_name[-1]}" if "sam_" in axis_name else f"sample_{axis_name}" - ) - nxsample_ax = nxsample.create_group(grp_name) - create_attributes(nxsample_ax, ("NX_class",), ("NXpositioner",)) - if axis_name == osc_axis: - # If we're dealing with the scan axis - if ( - "data" in nxsfile["/entry"].keys() - and axis_name in nxsfile["/entry/data"].keys() - ): - nxsample_ax[axis_name] = nxsfile[nxsfile["/entry/data"][axis_name].name] - nxtransformations[axis_name] = nxsfile[ - nxsfile["/entry/data"][axis_name].name - ] - else: - nxax = nxsample_ax.create_dataset(axis_name, data=osc_range) - _dep = set_dependency( - goniometer_axes[idx].depends, path="/entry/sample/transformations/" - ) - create_attributes( - nxax, - ("depends_on", "transformation_type", "units", "vector"), - ( - _dep, - goniometer_axes[idx].transformation_type, - goniometer_axes[idx].units, - goniometer_axes[idx].vector, - ), - ) - nxtransformations[axis_name] = nxsfile[nxax.name] - # Write {axisname}_increment_set and {axis_name}_end datasets - if data_type == "images": - increment_set = np.repeat( - goniometer_axes[idx].increment, len(osc_range) - ) - nxsample_ax.create_dataset( - axis_name + "_increment_set", - data=goniometer_axes[idx].increment, - ) # increment_set - nxsample_ax.create_dataset( - axis_name + "_end", data=osc_range + increment_set - ) - elif axis_name in scan_axes: - # For translations - if ( - "data" in nxsfile["/entry"].keys() - and axis_name in nxsfile["/entry/data"].keys() - ): - nxsample_ax[axis_name] = nxsfile[nxsfile["/entry/data"][axis_name].name] - nxtransformations[axis_name] = nxsfile[ - nxsfile["/entry/data"][axis_name].name - ] - else: - nxax = nxsample_ax.create_dataset( - axis_name, data=transl_scan[axis_name] - ) - _dep = set_dependency( - goniometer_axes[idx].depends, path="/entry/sample/transformations/" - ) - create_attributes( - nxax, - ("depends_on", "transformation_type", "units", "vector"), - ( - _dep, - goniometer_axes[idx].transformation_type, - goniometer_axes[idx].units, - goniometer_axes[idx].vector, - ), - ) - nxtransformations[axis_name] = nxsfile[nxax.name] - else: - # For all other axes - nxax = nxsample_ax.create_dataset( - axis_name, data=np.array([goniometer_axes[idx].start_pos]) - ) - _dep = set_dependency( - goniometer_axes[idx].depends, path="/entry/sample/transformations/" - ) - create_attributes( - nxax, - ("depends_on", "transformation_type", "units", "vector"), - ( - _dep, - goniometer_axes[idx].transformation_type, - goniometer_axes[idx].units, - goniometer_axes[idx].vector, - ), - ) - nxtransformations[axis_name] = nxsfile[nxax.name] + # Add scan axes datasets to NXdata + nxdata = nxsfile.require_group("/entry/data") + for ax in goniometer_axes: + if ax.name in full_scan.keys(): + nxdata[ax.name] = nxsfile[f"/entry/sample/transformations/{ax.name}"] # Look for nxbeam in file, if it's there make link try: @@ -602,57 +530,37 @@ def write_NXdetector( ) # Write NXtransformations: entry/instrument/detector/transformations/detector_z and two_theta - nxtransformations = nxdetector.require_group("transformations") - create_attributes(nxtransformations, ("NX_class",), ("NXtransformations",)) - - # Create groups for detector_z and any other detector axis (eg. two_theta) if present - # This assumes that the detector axes are fixed. - for idx, ax in enumerate(detector.detector_axes): - if ax.name == "det_z": - grp_name = "detector_z" - dist = units_of_length(str(detector.detector_axes[idx].start_pos) + "mm") - else: - grp_name = ax.name - - # It shouldn't be too much of an issue but just in case ... - if detector.detector_axes[idx].depends == "det_z": - grp_dep = "detector_z" - else: - grp_dep = detector.detector_axes[idx].depends - _dep = set_dependency( - detector.detector_axes[idx].depends, - nxtransformations.name + f"/{grp_dep}/", - ) + write_NXtransformations(nxdetector, detector.detector_axes) + # NXdetector depends on the last (often only) axis in the list + det_dep = set_dependency( + detector.detector_axes[-1].name, + path="/entry/instrument/detector/transformations", + ) + nxdetector.create_dataset("depends_on", data=det_dep) - nxgrp_ax = nxtransformations.create_group(grp_name) - create_attributes(nxgrp_ax, ("NX_class",), ("NXpositioner",)) - nxdet_ax = nxgrp_ax.create_dataset( - ax.name, data=np.array([detector.detector_axes[idx].start_pos]) - ) - create_attributes( - nxdet_ax, - ("depends_on", "transformation_type", "units", "vector"), - ( - _dep, - detector.detector_axes[idx].transformation_type, - detector.detector_axes[idx].units, - detector.detector_axes[idx].vector, - ), - ) - if ax.name == detector.detector_axes[-1].name: - # Detector depends_on - nxdetector.create_dataset( - "depends_on", - data=set_dependency(ax.name, path=nxgrp_ax.name), - ) + # Just a det_z check + if "det_z" not in list(nxdetector["transformations"].keys()): + NXclass_logger.error("No det_z field in nexus file.") + return - # Write a soft link for detector_z - if "detector_z" in list(nxtransformations.keys()): - nxdetector["detector_z"] = nxsfile[ - "/entry/instrument/detector/transformations/detector_z" - ] + # Write a soft link for detector_z, workaround for autoPROC + # TODO see https://github.com/DiamondLightSource/nexgen/issues/140 + nxdetector.create_group("detector_z") + create_attributes( + nxdetector["detector_z"], + ("NX_class",), + ("NXtransformations",), # NXtransformations instead of NXpositioner. TOBETESTED + ) + nxdetector["detector_z/det_z"] = nxsfile[ + "/entry/instrument/detector/transformations/det_z" + ] # Detector distance + det_z_idx = [ + n for n, ax in enumerate(detector.detector_axes) if ax.name == "det_z" + ][0] + dist = units_of_length(str(detector.detector_axes[det_z_idx].start_pos) + "mm") + nxdetector.create_dataset("distance", data=dist.to("m").magnitude) create_attributes( nxdetector["distance"], ("units",), (format(dist.to("m").units, "~")) @@ -712,7 +620,7 @@ def write_NXdetector_module( "vector", ), ( - "/entry/instrument/detector/transformations/detector_z/det_z", + "/entry/instrument/detector/transformations/det_z", offsets[0], "mm", "translation", @@ -776,7 +684,7 @@ def write_NXdetector_module( "vector", ), ( - "/entry/instrument/detector/transformations/detector_z/det_z", + "/entry/instrument/detector/transformations/det_z", [0, 0, 0], "mm", "translation", diff --git a/src/nexgen/nxs_write/nxmx_writer.py b/src/nexgen/nxs_write/nxmx_writer.py index 73795ac2..387e3351 100644 --- a/src/nexgen/nxs_write/nxmx_writer.py +++ b/src/nexgen/nxs_write/nxmx_writer.py @@ -141,6 +141,7 @@ def write( start_time: datetime | str | None = None, est_end_time: datetime | str | None = None, write_mode: str = "x", + add_non_standard: bool = True, ): """Write the NXmx format NeXus file. @@ -159,6 +160,8 @@ def write( in the format "%Y-%m-%dT%H:%M:%SZ". Defaults to None. write_mode (str, optional): String indicating writing mode for the output NeXus file. \ Accepts any valid h5py file opening mode. Defaults to "x". + add_non_standard (bool, optional): Flag if non-standard NXsample fields should be added \ + for processing to work. Defaults to True, will change in the future. """ metafile = self._get_meta_file(image_filename) if metafile: @@ -192,10 +195,8 @@ def write( write_NXdata( nxs, datafiles, - self.goniometer.axes_list, "images", - osc, - transl, + list(osc.keys())[0], ) # NXinstrument: entry/instrument @@ -234,6 +235,7 @@ def write( osc, transl, sample_depends_on=None, # TODO + add_nonstandard_fields=add_non_standard, ) def write_vds( @@ -332,6 +334,7 @@ def write( self, start_time: datetime | str | None = None, write_mode: str = "x", + add_non_standard: bool = False, ): """Write a NXmx-like NeXus file for event mode data collections. @@ -342,6 +345,8 @@ def write( Defaults to None. write_mode (str, optional): String indicating writing mode for the output NeXus file. Accepts any valid \ h5py file opening mode. Defaults to "x". + add_non_standard (bool, optional): Flag if non-standard NXsample fields should be added \ + for processing to work. Defaults to False. """ # Get metafile # No data files, just link to meta @@ -365,9 +370,8 @@ def write( write_NXdata( nxs, [metafile], - self.goniometer.axes_list, "events", - osc, + list(osc.keys())[0], ) # NXinstrument: entry/instrument @@ -404,6 +408,7 @@ def write( "events", osc, sample_depends_on=None, # TODO + add_nonstandard_fields=add_non_standard, ) @@ -524,9 +529,8 @@ def write( write_NXdata( nxs, datafiles, - self.goniometer.axes_list, "images", - osc, + list(osc.keys())[0], entry_key=data_entry_key, ) diff --git a/src/nexgen/nxs_write/write_utils.py b/src/nexgen/nxs_write/write_utils.py index 2d2d532b..c1d99324 100644 --- a/src/nexgen/nxs_write/write_utils.py +++ b/src/nexgen/nxs_write/write_utils.py @@ -15,6 +15,8 @@ from hdf5plugin import Bitshuffle, Blosc from numpy.typing import ArrayLike +from ..nxs_utils import Axis + # Logger NXclassUtils_logger = logging.getLogger("nexgen.NXclass_writers.utils") NXclassUtils_logger.setLevel(logging.DEBUG) @@ -28,7 +30,8 @@ def create_attributes(nxs_obj: h5py.Group | h5py.Dataset, names: Tuple, values: Create or overwrite attributes with additional metadata information. Args: - nxs_obj (h5py.Group | h5py.Dataset): NeXus object to which the attributes should be attached. + nxs_obj (h5py.Group | h5py.Dataset): NeXus object to which the \ + attributes should be attached. names (Tuple): The names of the new attributes. values (Tuple): The attribute values asociated to the names. """ @@ -42,11 +45,14 @@ def create_attributes(nxs_obj: h5py.Group | h5py.Dataset, names: Tuple, values: def set_dependency(dep_info: str, path: str = None): """ Define value for "depends_on" attribute. - If the attribute points to the head of the dependency chain, simply pass "." for dep_info. + If the attribute points to the head of the dependency chain, simply pass \ + "." for dep_info. Args: - dep_info (str): The name of the transformation upon which the current one depends on. - path (str): Where the transformation is. Set to None, if passed it points to location in the NeXus tree. + dep_info (str): The name of the transformation upon which the current \ + one depends on. + path (str): Where the transformation is. Set to None, if passed it \ + points to location in the NeXus tree. Returns: The value to be passed to the attribute "depends_on" """ @@ -70,9 +76,12 @@ def calculate_origin( """ Calculate the offset of the detector. - This function returns the detector origin array, which is saved as the vector attribute of the module_offset field. - The value to set the module_offset to is also returned: the magnitude of the displacement if the vector is normalized, 1.0 otherwise - Assumes that fast and slow axis vectors have already been converted to mcstas if needed. + This function returns the detector origin array, which is saved as the \ + vector attribute of the module_offset field. + The value to set the module_offset to is also returned: the magnitude of \ + the displacement if the vector is normalized, 1.0 otherwise + Assumes that fast and slow axis vectors have already been converted to \ + mcstas if needed. Args: beam_center_fs (List | Tuple): Beam center position in fast and slow direction. @@ -80,13 +89,16 @@ def calculate_origin( fast_axis_vector (Tuple): Fast axis vector. slow_axis_vector (Tuple): Slow axis vector. mode (str, optional): Decide how origin should be calculated. - If set to "1" the displacement vector is un-normalized and the offset value set to 1.0. - If set to "2" the displacement is normalized and the offset value is set to the magnitude of the displacement. + If set to "1" the displacement vector is un-normalized \ + and the offset value set to 1.0. + If set to "2" the displacement is normalized and the \ + offset value is set to the magnitude of the displacement. Defaults to "1". Returns: det_origin (List): Displacement of beam center, vector attribute of module_offset. - offset_val (float): Value to assign to module_offset, depending whether det_origin is normalized or not. + offset_val (float): Value to assign to module_offset, depending whether \ + det_origin is normalized or not. """ # what was calculate module_offset x_scaled = beam_center_fs[0] * fs_pixel_size[0] @@ -109,7 +121,8 @@ def find_number_of_images(datafile_list: List[Path], entry_key: str = "data") -> Args: datafile_list (List[Path]): List of paths to the input image files. - entry_key (str): Key for the location of the images inside the data files. Defaults to "data". + entry_key (str): Key for the location of the images inside the \ + data files. Defaults to "data". Returns: num_images (int): Total number of images. @@ -140,17 +153,18 @@ def mask_and_flatfield_writer( dset_data: str | ArrayLike, applied_val: bool, ): - """ Utility function to write mask or flatfield to NXdetector group for image data when not \ - already linked to the _meta.h5 file. - If the pixel_mask/flatfield data is passed as a string, it will be assumed to be a file path and \ - the writer will try to set up an external link to it. + """ Utility function to write mask or flatfield to NXdetector group for \ + image data when not already linked to the _meta.h5 file. + If the pixel_mask/flatfield data is passed as a string, it will be assumed \ + to be a file path and the writer will try to set up an external link to it. Args: nxdet_grp (h5py.Group): Handle to HDF5 NXdetector group. dset_name (str): Name of the new field/dataset to be written. - dset_data (str | ArrayLike): Dataset data to be written in the field. Can be a string or an \ - array-like dataset. If the data type is a numpy ndarray, it will be compressed before writing. - applied_val (bool): Value to write to the `{flatfield,pixel_mask}_applied` fields. + dset_data (str | ArrayLike): Dataset data to be written in the field. \ + Can be a string or an array-like dataset. \ + If the data type is a numpy ndarray, it will be compressed before writing. + applied_val (bool): Value to write to `{flatfield,pixel_mask}_applied` fields. """ if dset_data is None: NXclassUtils_logger.warning( @@ -239,25 +253,32 @@ def write_compressed_copy( **kwargs, ): """ - Write a compressed copy of some dataset in the desired HDF5 group, using the filter of choice with lz4 compression. Available filters \ - at this time include "Blosc" and "Bitshuffle" (default). - The main application for this function in nexgen is to write a compressed copy of a pixel mask or a flatfield file/dataset \ - directly into the NXdetector group of a NXmx NeXus file. - The data and filename arguments are mutually exclusive as only one of them can be used as input. - If a filename is passed, it is also required to pass the key for the relevant dataset to be copied. Failure to do so will result \ - in nothing being written to the NeXus file. + Write a compressed copy of some dataset in the desired HDF5 group, using \ + the filter of choice with lz4 compression. Available filters at this \ + time include "Blosc" and "Bitshuffle" (default). + The main application for this function in nexgen is to write a compressed \ + copy of a pixel mask or a flatfield file/dataset directly into the \ + NXdetector group of a NXmx NeXus file. + The data and filename arguments are mutually exclusive as only one of them \ + can be used as input. + If a filename is passed, it is also required to pass the key for the \ + relevant dataset to be copied. Failure to do so will result in nothing being \ + written to the NeXus file. Args: nxgroup (h5py.Group): Handle to HDF5 group. dset_name (str): Name of the new dataset to be written. data (ArrayLike, optional): Dataset to be compressed. Defaults to None. - filename (Path | str, optional): Filename containing the dataset to be compressed into the NeXus file. Defaults to None. - filter_choice (str, optional): Filter to be used for compression. Either blosc or bitshuffle. Defaults to bitshuffle. - dset_key (str, optional): Dataset name inside the passed file. Defaults to "image". + filename (Path | str, optional): Filename containing the dataset to be \ + compressed into the NeXus file. Defaults to None. + filter_choice (str, optional): Filter to be used for compression. \ + Either blosc or bitshuffle. Defaults to bitshuffle. + dset_key (str, optional): Dataset name inside the passed file. \ + Defaults to "image". Keyword Args: - block_size (int, optional): Number of elements per block, it needs to be divisible by 8. Needed for Bitshuffle filter. \ - Defaults to 0. + block_size (int, optional): Number of elements per block, it needs to \ + be divisible by 8. Needed for Bitshuffle filter. Defaults to 0. Raises: ValueError: If both a dataset and a filename have been passed to the function. @@ -296,3 +317,27 @@ def write_compressed_copy( NXclassUtils_logger.info( f"A compressed copy of the {dset_name} has been written into the NeXus file." ) + + +def add_sample_axis_groups(nxsample: h5py.Group, axis_list: List[Axis]): + """ + Add non-standard "sample_{phi,omega,...}" groups to NXsample. These may be needed for \ + some autoprocessing tools to work correctly. + + Args: + nxsample (h5py.Group): NeXus NXsample group. + axis_list (List[Axis]): List of goniometer axes. + """ + NXclassUtils_logger.debug("Add non-standard fields for autoPROC to work.") + nxtransf = nxsample["transformations"] + for ax in axis_list: + grp_name = f"sample_{ax.name[-1]}" if "sam" in ax.name else f"sample_{ax.name}" + nx_ax = nxsample.require_group(grp_name) + # NOTE: NX_class here set to NXtransformations instead of NXpositioner + # One step closer to standard. TO BE TESTED + create_attributes(nx_ax, ("NX_class",), ("NXtransformations",)) + nx_ax[ax.name] = nxtransf[ax.name] + if f"{ax.name}_end" in nxtransf.keys(): + nx_ax[f"{ax.name}_end"] = nxtransf[f"{ax.name}_end"] + if f"{ax.name}_increment_set" in nxtransf.keys(): + nx_ax[f"{ax.name}_increment_set"] = nxtransf[f"{ax.name}_increment_set"] diff --git a/tests/nxs_write/test_NXclassWriters.py b/tests/nxs_write/test_NXclassWriters.py index e74dd9fd..6fdda585 100644 --- a/tests/nxs_write/test_NXclassWriters.py +++ b/tests/nxs_write/test_NXclassWriters.py @@ -27,6 +27,7 @@ write_NXnote, write_NXsample, write_NXsource, + write_NXtransformations, ) test_module = {"fast_axis": [1, 0, 0], "slow_axis": [0, 1, 0]} @@ -35,7 +36,7 @@ def test_given_no_data_files_when_write_NXdata_then_assert_error(): mock_hdf5_file = MagicMock() with pytest.raises(OSError): - write_NXdata(mock_hdf5_file, [], [], "", "", []) + write_NXdata(mock_hdf5_file, [], "") def test_write_NXentry(dummy_nexus_file): @@ -64,73 +65,81 @@ def test_write_NXSource_with_probe(dummy_nexus_file, mock_source): assert dummy_nexus_file["/entry/source/probe"][()] == b"electron" -def test_given_no_data_type_specified_when_write_NXdata_then_exception_raised( +def test_write_NXtransformations_for_detector_axes(dummy_nexus_file): + det_axes = [ + Axis("two_theta", ".", "rotation", (0, 0, -1), start_pos=90), + Axis("det_z", "Two_theta", "translation", (0, 0, 1), start_pos=500), + ] + nxdet = dummy_nexus_file.require_group("/entry/instrument/detector/") + write_NXtransformations(nxdet, det_axes) + + assert "transformations" in nxdet.keys() + assert ( + "det_z" in nxdet["transformations"].keys() + and "two_theta" in nxdet["transformations"].keys() + ) + assert_array_equal(nxdet["transformations/det_z"][()], 500) + assert_array_equal(nxdet["transformations/two_theta"][()], 90) + + +def test_write_NXtransformations_for_sample_with_rotation_scan( dummy_nexus_file, mock_goniometer +): + nxsample = dummy_nexus_file.require_group("/entry/sample/") + write_NXtransformations(nxsample, mock_goniometer.axes_list, mock_goniometer.scan) + + assert "transformations" in nxsample.keys() + assert_array_equal(nxsample["transformations/omega"][()], np.arange(0, 90, 1)) + assert ( + "omega_increment_set" in nxsample["transformations"].keys() + and "omega_end" in nxsample["transformations"].keys() + ) + assert_array_equal(nxsample["transformations/sam_z"][()], 0.0) + + +def test_write_NXtransformations_for_sample_for_events(dummy_nexus_file): + axes_list = [Axis("phi", ".", "rotation", (0, 0, -1), start_pos=10)] + test_scan = {"phi": (10, 12)} + test_gonio = Goniometer(axes_list, test_scan) + nxsample = dummy_nexus_file.require_group("/entry/sample/") + write_NXtransformations(nxsample, test_gonio.axes_list, test_gonio.scan, "events") + + assert "phi_end" not in nxsample["transformations"].keys() + assert_array_equal(nxsample["transformations/phi"], test_scan["phi"]) + + +def test_given_no_data_type_specified_when_write_NXdata_then_exception_raised( + dummy_nexus_file, ): with pytest.raises(ValueError): write_NXdata( dummy_nexus_file, [Path("tmp")], - mock_goniometer.axes_list, "", - mock_goniometer.scan, ) -def test_given_one_data_file_when_write_NXdata_then_data_in_file( - dummy_nexus_file, mock_goniometer -): +def test_given_one_data_file_when_write_NXdata_then_data_in_file(dummy_nexus_file): write_NXdata( dummy_nexus_file, [Path("tmp")], - mock_goniometer.axes_list, "images", - mock_goniometer.scan, + "omega", ) assert dummy_nexus_file["/entry/data"].attrs["NX_class"] == b"NXdata" + assert dummy_nexus_file["/entry/data"].attrs["axes"] == b"omega" + assert dummy_nexus_file["/entry/data"].attrs["omega_indices"] == [0] assert "data_000001" in dummy_nexus_file["/entry/data"] -def test_given_scan_axis_when_write_NXdata_then_axis_in_data_entry_with_correct_data_and_attributes( - dummy_nexus_file, mock_goniometer -): - test_axis = "omega" - test_scan_range = np.arange(0, 90, 1) - axis_entry = f"/entry/data/{test_axis}" - - write_NXdata( - dummy_nexus_file, - [Path("tmp")], - mock_goniometer.axes_list, - "images", - mock_goniometer.scan, - ) - - assert test_axis in dummy_nexus_file["/entry/data"] - assert_array_equal(test_scan_range, dummy_nexus_file[axis_entry][:]) - assert dummy_nexus_file[axis_entry].attrs["depends_on"] == b"." - assert dummy_nexus_file[axis_entry].attrs["transformation_type"] == b"rotation" - assert dummy_nexus_file[axis_entry].attrs["units"] == b"deg" - assert_array_equal(dummy_nexus_file[axis_entry].attrs["vector"][:], [-1.0, 0.0, 0]) - - -def test_given_scan_axis_when_write_NXsample_then_scan_axis_data_copied_from_data_group_as_well_as_increment_set_and_end( +def test_given_scan_axis_when_write_NXsample_then_scan_axis_data_written_and_link_to_NXdata_created( dummy_nexus_file, mock_goniometer ): test_axis = "omega" test_scan_range = [0, 1, 2] - axis_entry = f"/entry/sample/sample_{test_axis}/{test_axis}" + axis_entry = f"/entry/sample/transformations/{test_axis}" osc_scan = {test_axis: test_scan_range} - # Doing this to write the scan axis data into the data group - write_NXdata( - dummy_nexus_file, - [Path("tmp")], - mock_goniometer.axes_list, - "images", - osc_scan, - ) - write_NXsample( dummy_nexus_file, mock_goniometer.axes_list, @@ -138,7 +147,7 @@ def test_given_scan_axis_when_write_NXsample_then_scan_axis_data_copied_from_dat osc_scan, ) - assert f"sample_{test_axis}" in dummy_nexus_file["/entry/sample"] + assert "transformations" in dummy_nexus_file["/entry/sample"] assert_array_equal(test_scan_range, dummy_nexus_file[axis_entry][:]) assert dummy_nexus_file[axis_entry].attrs["depends_on"] == b"." assert dummy_nexus_file[axis_entry].attrs["transformation_type"] == b"rotation" @@ -147,38 +156,33 @@ def test_given_scan_axis_when_write_NXsample_then_scan_axis_data_copied_from_dat assert_array_equal(dummy_nexus_file[axis_entry + "_increment_set"][()], 1) # assert_array_equal(dummy_nexus_file[axis_entry + "_increment_set"][:], [1] * 3) assert dummy_nexus_file[axis_entry + "_end"][1] == 2 + assert f"{test_axis}" in dummy_nexus_file["/entry/data"] -def test_given_reverse_rotation_scan_increment_set_and_axis_end_written_correctly( +def test_given_reverse_rotation_scan_increment_set_and_axis_end_written_correctly_and_old_fields_not_added( dummy_nexus_file, ): test_axis = Axis("phi", ".", TransformationType.ROTATION, (0, 0, -1)) test_rw_scan = {"phi": np.arange(10, 8, -0.5)} test_gonio = Goniometer([test_axis], test_rw_scan) - # Doing this to write the scan axis data into the data group - write_NXdata( - dummy_nexus_file, - [Path("tmp")], - test_gonio.axes_list, - "images", - test_gonio.scan, - ) - write_NXsample( dummy_nexus_file, test_gonio.axes_list, "images", test_rw_scan, sample_depends_on=test_axis.name, + add_nonstandard_fields=False, ) - axis_entry = f"/entry/sample/sample_{test_axis.name}/{test_axis.name}" + axis_entry = f"/entry/sample/transformations/{test_axis.name}" assert_array_equal(dummy_nexus_file[axis_entry][()], [10.0, 9.5, 9.0, 8.5]) assert_array_equal(dummy_nexus_file[axis_entry + "_increment_set"][()], -0.5) assert_array_equal(dummy_nexus_file[axis_entry + "_end"][()], [9.5, 9.0, 8.5, 8.0]) + assert "sample_phi" not in dummy_nexus_file["/entry/sample"].keys() + def test_sample_depends_on_written_correctly_in_NXsample( dummy_nexus_file, mock_goniometer @@ -187,15 +191,6 @@ def test_sample_depends_on_written_correctly_in_NXsample( test_scan_range = [0, 1, 2] osc_scan = {test_axis: test_scan_range} - # Doing this to write the scan axis data into the data group - write_NXdata( - dummy_nexus_file, - [Path("tmp")], - mock_goniometer.axes_list, - "images", - mock_goniometer.scan, - ) - write_NXsample( dummy_nexus_file, mock_goniometer.axes_list, @@ -221,24 +216,46 @@ def test_sample_depends_on_written_correctly_in_NXsample_when_value_not_passed( test_depends = f"/entry/sample/transformations/{mock_goniometer.axes_list[-1].name}" - # Doing this to write the scan axis data into the data group - write_NXdata( + write_NXsample( dummy_nexus_file, - [Path("tmp")], mock_goniometer.axes_list, "images", - mock_goniometer.scan, + osc_scan, ) + assert "depends_on" in dummy_nexus_file["/entry/sample"] + assert dummy_nexus_file["/entry/sample/depends_on"][()] == test_depends.encode() + + +def test_old_sample_groups_added_correctly_to_NXsample_for_rotation_scan( + dummy_nexus_file, +): + test_axes = [ + Axis("phi", ".", TransformationType.ROTATION, (0, 0, -1)), + Axis("sam_z", "phi", TransformationType.TRANSLATION, (0, 0, 1)), + ] + test_scan = {"phi": np.arange(0, 1, 0.1)} + test_gonio = Goniometer(test_axes, test_scan) + write_NXsample( dummy_nexus_file, - mock_goniometer.axes_list, + test_gonio.axes_list, "images", - osc_scan, + test_scan, + sample_depends_on=test_axes[0].name, ) - assert "depends_on" in dummy_nexus_file["/entry/sample"] - assert dummy_nexus_file["/entry/sample/depends_on"][()] == test_depends.encode() + sample_path = "/entry/sample/" + + assert "sample_phi" in dummy_nexus_file[sample_path].keys() + assert ( + "sample_z" in dummy_nexus_file[sample_path].keys() + and "sam_z" in dummy_nexus_file[sample_path]["sample_z"].keys() + ) + assert_array_equal( + dummy_nexus_file[sample_path]["transformations/phi"], + dummy_nexus_file[sample_path]["sample_phi/phi"][()], + ) def test_sample_details_in_NXsample(dummy_nexus_file, mock_goniometer): @@ -248,15 +265,6 @@ def test_sample_details_in_NXsample(dummy_nexus_file, mock_goniometer): test_scan_range = [0, 1, 2] osc_scan = {test_axis: test_scan_range} - # Doing this to write the scan axis data into the data group - write_NXdata( - dummy_nexus_file, - [Path("tmp")], - mock_goniometer.axes_list, - "images", - mock_goniometer.scan, - ) - write_NXsample( dummy_nexus_file, mock_goniometer.axes_list, @@ -432,8 +440,8 @@ def test_write_NXdetector_for_eiger_images_without_meta_file( # Check detector axis and distance tr = det + "transformations/" - assert "detector_z" in list(dummy_nexus_file[tr].keys()) - axis_entry = tr + "detector_z/det_z" + assert "det_z" in list(dummy_nexus_file[tr].keys()) + axis_entry = tr + "det_z" assert_array_equal( mock_eiger.detector_axes[0].start_pos, dummy_nexus_file[axis_entry][()] ) @@ -453,6 +461,7 @@ def test_write_NXdetector_for_eiger_images_without_meta_file( # Check that detector_z has also been written in /detector assert "detector_z" in list(dummy_nexus_file[det].keys()) + assert "det_z" in list(dummy_nexus_file[det + "detector_z"].keys()) @patch("nexgen.nxs_write.nxclass_writers.write_NXcollection") diff --git a/tests/nxs_write/test_write_utils.py b/tests/nxs_write/test_write_utils.py index 42dafc75..3710db51 100644 --- a/tests/nxs_write/test_write_utils.py +++ b/tests/nxs_write/test_write_utils.py @@ -5,7 +5,9 @@ import numpy as np import pytest +from nexgen.nxs_write.nxclass_writers import write_NXtransformations from nexgen.nxs_write.write_utils import ( + add_sample_axis_groups, calculate_estimated_end_time, calculate_origin, create_attributes, @@ -122,3 +124,16 @@ def test_calculate_estimated_end_time_from_datetime(): timestamp = datetime.strptime("2023-11-15T10:30:42", "%Y-%m-%dT%H:%M:%S") est_end_time = calculate_estimated_end_time(timestamp, 20) assert est_end_time == "2023-11-15T10:31:02Z" + + +def test_add_non_standard_fields_to_NXsample(dummy_nexus_file, mock_goniometer): + nxsample_path = "/entry/sample" + nxsample = dummy_nexus_file.require_group(nxsample_path) + write_NXtransformations(nxsample, mock_goniometer.axes_list, mock_goniometer.scan) + add_sample_axis_groups(nxsample, mock_goniometer.axes_list) + + assert "sample_omega" in list(dummy_nexus_file[nxsample_path].keys()) + assert "sample_z" in list(dummy_nexus_file[nxsample_path].keys()) + assert "omega_increment_set" in list( + dummy_nexus_file[nxsample_path]["sample_omega"] + )