Skip to content

Commit

Permalink
Fixes for 16bit and palette color LUTs
Browse files Browse the repository at this point in the history
  • Loading branch information
CPBridge committed Feb 1, 2024
1 parent 609a5f6 commit 8f104b1
Show file tree
Hide file tree
Showing 3 changed files with 148 additions and 26 deletions.
4 changes: 2 additions & 2 deletions src/highdicom/content.py
Original file line number Diff line number Diff line change
Expand Up @@ -2401,7 +2401,7 @@ def __init__(
'Length of argument "lut_data" must be no greater than '
'2^(bits per entry) elements.'
)
elif len_data == 2 ** bits_per_entry:
elif len_data == 2 ** 16:
# Per the standard, this is recorded as 0
number_of_entries = 0
else:
Expand Down Expand Up @@ -2614,7 +2614,7 @@ def __init__(
)

len_data = len(expanded_lut_values)
if len_data == 2 ** bits_per_entry:
if len_data == 2 ** 16:
number_of_entries = 0
else:
number_of_entries = len_data
Expand Down
60 changes: 37 additions & 23 deletions src/highdicom/seg/sop.py
Original file line number Diff line number Diff line change
Expand Up @@ -1635,6 +1635,7 @@ def __init__(
content_creator_identification

if self.SegmentationType == SegmentationTypeValues.BINARY.value:
dtype = np.uint8
self.BitsAllocated = 1
self.HighBit = 0
if self.file_meta.TransferSyntaxUID.is_encapsulated:
Expand All @@ -1644,6 +1645,7 @@ def __init__(
'is not compatible with the BINARY segmentation type'
)
elif self.SegmentationType == SegmentationTypeValues.FRACTIONAL.value:
dtype = np.uint8
self.BitsAllocated = 8
self.HighBit = 7
segmentation_fractional_type = SegmentationFractionalTypeValues(
Expand All @@ -1659,22 +1661,19 @@ def __init__(
# Decide on the output datatype and update the image metadata
# accordingly. Use the smallest possible type unless there is
# a palette color LUT that says otherwise.
labelmap_dtype = _get_unsigned_dtype(len(segment_descriptions))
if labelmap_dtype == np.uint32:
raise ValueError(
"Too many classes to represent with a 16 bit integer."
)
labelmap_bitdepth = np.iinfo(labelmap_dtype).bits
if palette_color_lut_transformation is not None:
lut_bitdepth = (
palette_color_lut_transformation.red_lut.bits_per_entry
)
if lut_bitdepth < labelmap_bitdepth:
labelmap_bitdepth = lut_bitdepth
dtype = np.dtype(f'u{labelmap_bitdepth // 8}')
else:
dtype = _get_unsigned_dtype(len(segment_descriptions))
if dtype == np.uint32:
raise ValueError(
'The labelmap provided does not have entries '
'to cover the number all specified classes.'
"Too many classes to represent with a 16 bit integer."
)
labelmap_bitdepth = lut_bitdepth
labelmap_bitdepth = np.iinfo(dtype).bits
self.BitsAllocated = labelmap_bitdepth
self.HighBit = self.BitsAllocated - 1
self.BitsStored = self.BitsAllocated
Expand Down Expand Up @@ -1737,6 +1736,13 @@ def __init__(
'to cover all segments.'
)

for desc in segment_descriptions:
if hasattr(desc, 'RecommendedDisplayCIELabValue'):
raise ValueError(
'Segment descriptions should not specify a display '
'color when using a palette color LUT.'
)

# Add the LUT to this instance
_add_palette_color_lookup_table_attributes(
self,
Expand Down Expand Up @@ -1873,6 +1879,7 @@ def __init__(
pixel_array,
number_of_segments,
segmentation_type,
dtype=dtype,
)
self.SegmentsOverlap = segments_overlap.value

Expand All @@ -1887,10 +1894,10 @@ def __init__(
if pixel_array.ndim == 4:
pixel_array = self._combine_segments(
pixel_array,
labelmap_dtype=labelmap_dtype
labelmap_dtype=dtype
)
else:
pixel_array = pixel_array.astype(labelmap_dtype)
pixel_array = pixel_array.astype(dtype)

if has_ref_frame_uid:
if tile_pixel_array:
Expand Down Expand Up @@ -2177,6 +2184,7 @@ def __init__(
number_of_segments=number_of_segments,
segmentation_type=segmentation_type,
max_fractional_value=max_fractional_value,
dtype=dtype,
)

# Even though completely empty planes were removed earlier,
Expand Down Expand Up @@ -2634,7 +2642,8 @@ def _check_dimension_organization_type(
def _check_and_cast_pixel_array(
pixel_array: np.ndarray,
number_of_segments: int,
segmentation_type: SegmentationTypeValues
segmentation_type: SegmentationTypeValues,
dtype: type,
) -> Tuple[np.ndarray, SegmentsOverlapValues]:
"""Checks on the shape and data type of the pixel array.
Expand All @@ -2649,6 +2658,8 @@ def _check_and_cast_pixel_array(
they were passed. 1D array of integers.
segmentation_type: highdicom.seg.SegmentationTypeValues
The segmentation_type parameter.
dtype: type
Pixel type of the output array.
Returns
-------
Expand Down Expand Up @@ -2733,7 +2744,7 @@ def _check_and_cast_pixel_array(
'0.0 or 1.0 in case of BINARY or LABELMAP segmentation '
'type.'
)
pixel_array = pixel_array.astype(np.uint8)
pixel_array = pixel_array.astype(dtype)

# Need to check whether or not segments overlap
if len(unique_values) == 1 and unique_values[0] == 0.0:
Expand Down Expand Up @@ -2918,7 +2929,8 @@ def _get_segment_pixel_array(
segment_number: int,
number_of_segments: int,
segmentation_type: SegmentationTypeValues,
max_fractional_value: int
max_fractional_value: int,
dtype: type,
) -> np.ndarray:
"""Get pixel data array for a specific segment and plane.
Expand All @@ -2941,13 +2953,15 @@ def _get_segment_pixel_array(
Desired output segmentation type.
max_fractional_value: int
Value for scaling FRACTIONAL segmentations.
dtype: type
Data type of the returned pixel array.
Returns
-------
numpy.ndarray:
Pixel data array consisting of pixel data for a single segment for
a single plane. Output array has dtype np.uint8 and binary values
(0 or 1).
a single plane. Output array has the specified dtype and binary
values (0 or 1).
"""
if pixel_array.dtype in (np.float_, np.float32, np.float64):
Expand All @@ -2962,7 +2976,7 @@ def _get_segment_pixel_array(
segment_array = np.around(
segment_array * float(max_fractional_value)
)
segment_array = segment_array.astype(np.uint8)
segment_array = segment_array.astype(dtype)
else:
if pixel_array.ndim == 2:
# "Label maps" that must be converted to binary masks.
Expand All @@ -2971,18 +2985,18 @@ def _get_segment_pixel_array(
# operations here, for efficiency reasons. If there is only
# a single segment, the label map pixel array is already
# correct
if pixel_array.dtype != np.uint8:
segment_array = pixel_array.astype(np.uint8)
if pixel_array.dtype != dtype:
segment_array = pixel_array.astype(dtype)
else:
segment_array = pixel_array
else:
segment_array = (
pixel_array == segment_number
).astype(np.uint8)
).astype(dtype)
else:
segment_array = pixel_array[:, :, segment_number - 1]
if segment_array.dtype != np.uint8:
segment_array = segment_array.astype(np.uint8)
if segment_array.dtype != dtype:
segment_array = segment_array.astype(dtype)

# It may happen that a binary valued array is passed that should be
# stored as a fractional segmentation. In this case, we also need
Expand Down
110 changes: 109 additions & 1 deletion tests/test_seg.py
Original file line number Diff line number Diff line change
Expand Up @@ -1582,7 +1582,7 @@ def test_construction_9(self):
assert hasattr(instance, 'BluePaletteColorLookupTableData')

def test_construction_10(self):
# A label with a palette color LUT and ICC Profile
# A labelmap with a palette color LUT and ICC Profile
instance = Segmentation(
self._ct_series,
self._ct_series_mask_array,
Expand All @@ -1608,6 +1608,114 @@ def test_construction_10(self):
assert hasattr(instance, 'BluePaletteColorLookupTableDescriptor')
assert hasattr(instance, 'BluePaletteColorLookupTableData')

def test_construction_large_labelmap_monochrome(self):
n_classes = 300 # force 16 bit
segment_descriptions = [
SegmentDescription(
segment_number=i,
segment_label=f'Segment #{i}',
segmented_property_category=self._segmented_property_category,
segmented_property_type=self._segmented_property_type,
algorithm_type=SegmentAlgorithmTypeValues.AUTOMATIC.value,
algorithm_identification=AlgorithmIdentificationSequence(
name='bla',
family=codes.DCM.ArtificialIntelligence,
version='v1'
)
)
for i in range(1, n_classes)
]

# A labelmap with a large number of classes to force 16 bit
instance = Segmentation(
self._ct_series,
self._ct_series_mask_array,
SegmentationTypeValues.LABELMAP.value,
segment_descriptions,
self._series_instance_uid,
self._series_number,
self._sop_instance_uid,
self._instance_number,
self._manufacturer,
self._manufacturer_model_name,
self._software_versions,
self._device_serial_number,
)
assert instance.PhotometricInterpretation == 'MONOCHROME2'
assert not hasattr(instance, 'ICCProfile')
assert not hasattr(instance, 'RedPaletteColorLookupTableDescriptor')
assert not hasattr(instance, 'RedPaletteColorLookupTableData')
assert not hasattr(instance, 'GreenPaletteColorLookupTableDescriptor')
assert not hasattr(instance, 'GreenPaletteColorLookupTableData')
assert not hasattr(instance, 'BluePaletteColorLookupTableDescriptor')
assert not hasattr(instance, 'BluePaletteColorLookupTableData')
assert instance.pixel_array.dtype == np.uint16
arr = self.get_array_after_writing(instance)
assert arr.dtype == np.uint16

def test_construction_large_labelmap_palettecolor(self):
n_classes = 300 # force 16 bit
segment_descriptions = [
SegmentDescription(
segment_number=i,
segment_label=f'Segment #{i}',
segmented_property_category=self._segmented_property_category,
segmented_property_type=self._segmented_property_type,
algorithm_type=SegmentAlgorithmTypeValues.AUTOMATIC.value,
algorithm_identification=AlgorithmIdentificationSequence(
name='bla',
family=codes.DCM.ArtificialIntelligence,
version='v1'
)
)
for i in range(1, n_classes)
]

r_lut_data = np.arange(10, 10 + n_classes, dtype=np.uint16)
g_lut_data = np.arange(20, 20 + n_classes, dtype=np.uint16)
b_lut_data = np.arange(30, 30 + n_classes, dtype=np.uint16)
r_first_mapped_value = 0
g_first_mapped_value = 0
b_first_mapped_value = 0
r_lut = PaletteColorLUT(r_first_mapped_value, r_lut_data, color='red')
g_lut = PaletteColorLUT(g_first_mapped_value, g_lut_data, color='green')
b_lut = PaletteColorLUT(b_first_mapped_value, b_lut_data, color='blue')
self._lut_transformation = PaletteColorLUTTransformation(
red_lut=r_lut,
green_lut=g_lut,
blue_lut=b_lut,
palette_color_lut_uid=UID(),
)

# A labelmap with a large number of classes to force 16 bit
instance = Segmentation(
self._ct_series,
self._ct_series_mask_array,
SegmentationTypeValues.LABELMAP.value,
segment_descriptions,
self._series_instance_uid,
self._series_number,
self._sop_instance_uid,
self._instance_number,
self._manufacturer,
self._manufacturer_model_name,
self._software_versions,
self._device_serial_number,
palette_color_lut_transformation=self._lut_transformation,
icc_profile=self._icc_profile,
)
assert instance.PhotometricInterpretation == 'PALETTE COLOR'
assert hasattr(instance, 'ICCProfile')
assert hasattr(instance, 'RedPaletteColorLookupTableDescriptor')
assert hasattr(instance, 'RedPaletteColorLookupTableData')
assert hasattr(instance, 'GreenPaletteColorLookupTableDescriptor')
assert hasattr(instance, 'GreenPaletteColorLookupTableData')
assert hasattr(instance, 'BluePaletteColorLookupTableDescriptor')
assert hasattr(instance, 'BluePaletteColorLookupTableData')
assert instance.pixel_array.dtype == np.uint16
arr = self.get_array_after_writing(instance)
assert arr.dtype == np.uint16

def test_construction_workers(self):
# Create a segmentation with multiple workers
Segmentation(
Expand Down

0 comments on commit 8f104b1

Please sign in to comment.