Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Manual close of all memmap BEFORE cleanup #69

Merged
merged 1 commit into from
Oct 11, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
21 changes: 19 additions & 2 deletions trx/trx_file_memmap.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,10 @@
import numpy as np

from trx.io import get_trx_tmpdir
from trx.utils import (get_reference_info_wrapper,
from trx.utils import (append_generator_to_dict,
close_or_delete_mmap,
convert_data_dict_to_tractogram,
append_generator_to_dict)
get_reference_info_wrapper)

try:
import dipy
Expand Down Expand Up @@ -1736,6 +1737,22 @@ def to_sft(self, resize=False):
def close(self) -> None:
"""Cleanup on-disk temporary folder and initialize an empty TrxFile"""
if self._uncompressed_folder_handle is not None:
close_or_delete_mmap(self.streamlines)

# # Close or delete attributes in dictionaries
for key in self.data_per_vertex:
close_or_delete_mmap(self.data_per_vertex[key])

for key in self.data_per_streamline:
close_or_delete_mmap(self.data_per_streamline[key])

for key in self.groups:
close_or_delete_mmap(self.groups[key])

for key in self.data_per_group:
for dpg in self.data_per_group[key]:
close_or_delete_mmap(self.data_per_group[key][dpg])

try:
self._uncompressed_folder_handle.cleanup()
except PermissionError:
Expand Down
33 changes: 27 additions & 6 deletions trx/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,28 @@
dipy_available = False


def close_or_delete_mmap(obj):
"""
Close the memory-mapped file if it exists, otherwise set the object to None.

Parameters:
-----------
obj : object
The object that potentially has a memory-mapped file to be closed.

"""
if hasattr(obj, '_mmap') and obj._mmap is not None:
obj._mmap.close()
elif isinstance(obj, ArraySequence):
close_or_delete_mmap(obj._data)
close_or_delete_mmap(obj._offsets)
close_or_delete_mmap(obj._lengths)
elif isinstance(obj, np.memmap):
del obj
else:
logging.warning('Object to be close or deleted must be np.memmap')


def split_name_with_gz(filename):
"""
Returns the clean basename and extension of a file.
Expand Down Expand Up @@ -115,8 +137,7 @@ def get_reference_info_wrapper(reference):
voxel_sizes = header['voxel_sizes']
voxel_order = header['voxel_order']
elif is_sft:
affine, dimensions, voxel_sizes, voxel_order =\
reference.space_attributes
affine, dimensions, voxel_sizes, voxel_order = reference.space_attributes
elif is_trx:
affine = header['VOXEL_TO_RASMM']
dimensions = header['DIMENSIONS']
Expand Down Expand Up @@ -152,10 +173,10 @@ def is_header_compatible(reference_1, reference_2):
Does all the spatial attribute match
"""

affine_1, dimensions_1, voxel_sizes_1, voxel_order_1 = \
get_reference_info_wrapper(reference_1)
affine_2, dimensions_2, voxel_sizes_2, voxel_order_2 = \
get_reference_info_wrapper(reference_2)
affine_1, dimensions_1, voxel_sizes_1, voxel_order_1 = get_reference_info_wrapper(
reference_1)
affine_2, dimensions_2, voxel_sizes_2, voxel_order_2 = get_reference_info_wrapper(
reference_2)

identical_header = True
if not np.allclose(affine_1, affine_2, rtol=1e-03, atol=1e-03):
Expand Down
Loading