From cb49860bf7c1c616594a847ee01a7b5f07088e65 Mon Sep 17 00:00:00 2001 From: Marcel Issler <147622604+Macl-I@users.noreply.github.com> Date: Mon, 19 Aug 2024 16:33:11 +0200 Subject: [PATCH] Update _widget.py updated chunking to 'auto' instead of time steps. That way larger data can be processed at the cost of speed. This was a problem with chunk sizes of approx. 6 Gb on a 16 Gb ram machine --- src/napari_fast4dreg/_widget.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/src/napari_fast4dreg/_widget.py b/src/napari_fast4dreg/_widget.py index a19a42f..f6d8912 100644 --- a/src/napari_fast4dreg/_widget.py +++ b/src/napari_fast4dreg/_widget.py @@ -142,9 +142,10 @@ def run_pipeline(image, ref_channel = len(data[0]) # read in raw data as dask array - new_shape = (np.shape(data)[0],1,np.shape(data)[-3],np.shape(data)[-2],np.shape(data)[-1]) - data = data.rechunk(new_shape) - + #new_shape = (np.shape(data)[0],1,np.shape(data)[-3],np.shape(data)[-2],np.shape(data)[-1]) + data = data.rechunk('auto') + new_shape = data.chunksize + # write data to tmp_file data = write_tmp_data_to_disk(tmp_path, data, new_shape) print('Imge imported')