Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add non-static and kt sampling #280

Merged
merged 10 commits into from
Jul 3, 2024
Merged
1,660 changes: 1,360 additions & 300 deletions direct/common/subsample.py

Large diffs are not rendered by default.

17 changes: 10 additions & 7 deletions direct/common/subsample_config.py
Original file line number Diff line number Diff line change
@@ -1,20 +1,23 @@
# coding=utf-8
# Copyright (c) DIRECT Contributors

from __future__ import annotations

from dataclasses import dataclass
georgeyiasemis marked this conversation as resolved.
Show resolved Hide resolved
from typing import Optional, Tuple
from typing import Optional

from omegaconf import MISSING

from direct.config.defaults import BaseConfig
from direct.types import MaskFuncMode


@dataclass
class MaskingConfig(BaseConfig):
name: str = MISSING
accelerations: Tuple[int, ...] = (5,) # Ideally Union[float, int].
center_fractions: Optional[Tuple[float, ...]] = (0.1,) # Ideally Optional[Tuple[float, ...]]
accelerations: tuple[float, ...] = (5.0,)
center_fractions: Optional[tuple[float, ...]] = (0.1,)
uniform_range: bool = False
image_center_crop: bool = False
mode: MaskFuncMode = MaskFuncMode.STATIC

val_accelerations: Tuple[int, ...] = (5, 10)
val_center_fractions: Optional[Tuple[float, ...]] = (0.1, 0.05)
val_accelerations: tuple[float, ...] = (5.0, 10.0)
val_center_fractions: Optional[tuple[float, ...]] = (0.1, 0.05)
5 changes: 1 addition & 4 deletions direct/data/mri_transforms.py
Original file line number Diff line number Diff line change
Expand Up @@ -317,7 +317,7 @@ def __call__(self, sample: dict[str, Any]) -> dict[str, Any]:
Sample with `sampling_mask` key.
"""
if not self.shape:
shape = sample["kspace"].shape[-3:]
shape = sample["kspace"].shape[1:]
elif any(_ is None for _ in self.shape): # Allow None as values.
kspace_shape = list(sample["kspace"].shape[1:-1])
shape = tuple(_ if _ else kspace_shape[idx] for idx, _ in enumerate(self.shape)) + (2,)
Expand All @@ -328,9 +328,6 @@ def __call__(self, sample: dict[str, Any]) -> dict[str, Any]:

sampling_mask = self.mask_func(shape=shape, seed=seed, return_acs=False)

if sample["kspace"].ndim == 5:
sampling_mask = sampling_mask.unsqueeze(0)

if "padding" in sample:
sampling_mask = T.apply_padding(sampling_mask, sample["padding"])

Expand Down
12 changes: 12 additions & 0 deletions direct/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from enum import Enum
from typing import NewType, Union

import numpy as np
import torch
from omegaconf.omegaconf import DictConfig
from torch import nn as nn
Expand All @@ -19,6 +20,7 @@
FileOrUrl = NewType("FileOrUrl", PathOrString)
HasStateDict = Union[nn.Module, torch.optim.Optimizer, torch.optim.lr_scheduler._LRScheduler, GradScaler]
TensorOrNone = Union[None, torch.Tensor]
TensorOrNdarray = Union[torch.Tensor, np.ndarray]


class DirectEnum(str, Enum):
Expand Down Expand Up @@ -57,6 +59,16 @@ class TransformKey(DirectEnum):
SAMPLING_MASK = "sampling_mask"
ACS_MASK = "acs_mask"
SCALING_FACTOR = "scaling_factor"
REFERENCE_IMAGE = "reference_image"
MOVING_IMAGE = "moving_image"
WARPED_IMAGE = "warped_image"
DISPLACEMENT_FIELD = "displacement_field"


class MaskFuncMode(DirectEnum):
STATIC = "static"
DYNAMIC = "dynamic"
MULTISLICE = "multislice"
georgeyiasemis marked this conversation as resolved.
Show resolved Hide resolved


class IntegerListOrTupleStringMeta(type):
Expand Down
56 changes: 56 additions & 0 deletions direct/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -535,3 +535,59 @@ def dict_flatten(in_dict: DictOrDictConfig, dict_out: Optional[DictOrDictConfig]
continue
dict_out[k] = v
return dict_out


def reshape_array_to_shape(array: np.ndarray, requested_shape: Tuple[int, ...]) -> np.ndarray:
"""Reshapes the given array to match the requested shape by adding dimensions of size 1 where necessary.

Parameters
----------
array : np.ndarray
The input array to be reshaped.
requested_shape tuple of ints
The desired shape of the output array.

Returns
-------
np.ndarray
The reshaped array with the requested shape.

Example
-------
>>> array1 = np.random.rand(4, 5)
>>> requested_shape1 = (4, 5, 1)
>>> result1 = reshape_array_to_shape(array1, requested_shape1)
>>> print(result1.shape) # Output: (4, 5, 1)

>>> array2 = np.random.rand(4, 5)
>>> requested_shape2 = (1, 4, 5, 1)
>>> result2 = reshape_array_to_shape(array2, requested_shape2)
>>> print(result2.shape) # Output: (1, 4, 5, 1)

>>> array3 = np.random.rand(2, 4, 5)
>>> requested_shape3 = (2, 4, 5, 1)
>>> result3 = reshape_array_to_shape(array3, requested_shape3)
>>> print(result3.shape) # Output: (2, 4, 5, 1)
"""

# Get the current shape of the array
current_shape = array.shape

# Check if the current shape already matches the requested shape
if current_shape == requested_shape:
return array

# Initialize a new shape list with ones
new_shape = [1] * len(requested_shape)

# Fill in the new shape list with dimensions from the current shape where appropriate
j = 0 # Index for current shape
for i, dim in enumerate(requested_shape):
if j < len(current_shape) and dim == current_shape[j]:
new_shape[i] = current_shape[j]
j += 1

# Reshape the array to the new shape
reshaped_array = np.reshape(array, new_shape)

return reshaped_array
georgeyiasemis marked this conversation as resolved.
Show resolved Hide resolved
3 changes: 2 additions & 1 deletion tests/test_train.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,12 +25,13 @@
)
from direct.launch import launch
from direct.train import setup_train
from direct.types import MaskFuncMode


def create_test_transform_cfg(transforms_type):
transforms_config = TransformsConfig(
normalization=NormalizationTransformConfig(scaling_key="masked_kspace"),
masking=MaskingConfig(name="FastMRIRandom"),
masking=MaskingConfig(name="FastMRIRandom", mode=MaskFuncMode.STATIC),
cropping=CropTransformConfig(crop="(32, 32)"),
sensitivity_map_estimation=SensitivityMapEstimationTransformConfig(estimate_sensitivity_maps=True),
transforms_type=transforms_type,
Expand Down
Loading
Loading