Skip to content

Commit

Permalink
Fixed to writing zarr v3
Browse files Browse the repository at this point in the history
  • Loading branch information
folterj committed Jul 1, 2024
1 parent d00e41d commit a3f0fcf
Show file tree
Hide file tree
Showing 5 changed files with 71 additions and 13 deletions.
8 changes: 4 additions & 4 deletions OmeSliCC/OmeZarrSource.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
import numpy as np
from ome_zarr.io import parse_url, ZarrLocation
from ome_zarr.io import parse_url
from ome_zarr.reader import Reader

from OmeSliCC.OmeSource import OmeSource
Expand All @@ -25,8 +25,7 @@ def __init__(self, filename: str,
self.levels = []
nchannels = 1
try:
#location = parse_url(filename)
location = ZarrLocation(filename)
location = parse_url(filename)
if location is None:
raise FileNotFoundError(f'Error parsing ome-zarr file {filename}')
reader = Reader(location)
Expand Down Expand Up @@ -110,7 +109,8 @@ def _find_metadata(self):
self.source_mag = 0

def get_source_dask(self):
return self.levels
return [redimension_data(level, self.dimension_order, self.get_dimension_order())
for level in self.levels]

def _asarray_level(self, level: int, **slicing) -> np.ndarray:
redim = redimension_data(self.levels[level], self.dimension_order, self.get_dimension_order())
Expand Down
16 changes: 8 additions & 8 deletions OmeSliCC/Zarr.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,18 +24,17 @@ def create(self, source, tile_size=None, npyramid_add=0, pyramid_downsample=2, c
self.pyramid_downsample = pyramid_downsample
if v3:
import zarrita
store_path = zarrita.store.make_store_path(self.filename, mode='w')
store_path = zarrita.store.make_store_path(self.filename)
if os.path.exists(self.filename):
shutil.rmtree(str(store_path.store.root))
self.zarr_root = zarr.Group.create(store=store_path)
self.zarr_root = zarrita.Group.create(store=store_path, exists_ok=True)
else:
file_url = pathlib.Path(self.filename, mode='w').as_uri()
self.zarr_root = zarr.open_group(store=file_url, mode='w', storage_options={'dimension_separator': '/'})
xyzct = source.get_size_xyzct()
shape0 = [xyzct['xyzct'.index(dimension)] for dimension in dimension_order]
dtype = source.pixel_types[0]
pixel_size_um = source.get_pixel_size_micrometer()
compressor, compression_filters = create_compression_filter(compression)
scale = 1
datasets = []
if tile_size:
Expand All @@ -50,11 +49,11 @@ def create(self, source, tile_size=None, npyramid_add=0, pyramid_downsample=2, c
import zarrita
shape = np.array(shape).tolist() # convert to basic int
tile_size = np.array(tile_size).tolist() # convert to basic int
#dataset = self.zarr_root.create_array(str(pathi), shape=shape, chunks=tile_size, dtype=dtype,
# compressor=compressor, codecs=compression_filters)
dataset = zarrita.Array.create(store_path / str(pathi), shape=shape, chunks=tile_size, dtype=dtype,
compressor=compressor, codecs=compression_filters)
codecs = create_compression_codecs(compression)
dataset = self.zarr_root.create_array(str(pathi), shape=shape, chunk_shape=tile_size, dtype=dtype,
codecs=codecs)
else:
compressor, compression_filters = create_compression_filter(compression)
dataset = self.zarr_root.create_dataset(str(pathi), shape=shape, chunks=tile_size, dtype=dtype,
compressor=compressor, filters=compression_filters)
self.data.append(dataset)
Expand Down Expand Up @@ -93,5 +92,6 @@ def set(self, data, x0=0, y0=0, x1=0, y1=0):
data1 = image_resize(data, new_size, dimension_order=self.dimension_order)
else:
data1 = data
self.data[pathi][..., sy0:sy1, sx0:sx1] = data1
#self.data[pathi][..., sy0:sy1, sx0:sx1] = data1
self.data[pathi] = data1
scale /= self.pyramid_downsample
29 changes: 29 additions & 0 deletions OmeSliCC/image_util.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,35 @@ def create_compression_filter(compression: list) -> tuple:
return compressor, compression_filters


def create_compression_codecs(compression: list) -> list:
codecs = None
compression = ensure_list(compression)
if compression is not None and len(compression) > 0:
compression_type = compression[0].lower()
if len(compression) > 1:
level = int(compression[1])
else:
level = None
if 'lzw' in compression_type:
from imagecodecs.numcodecs import Lzw
codecs = [Lzw()]
elif '2k' in compression_type or '2000' in compression_type:
from imagecodecs.numcodecs import Jpeg2k
codecs = [Jpeg2k(level=level)]
elif 'jpegls' in compression_type:
from imagecodecs.numcodecs import Jpegls
codecs = [Jpegls(level=level)]
elif 'jpegxr' in compression_type:
from imagecodecs.numcodecs import Jpegxr
codecs = [Jpegxr(level=level)]
elif 'jpegxl' in compression_type:
from imagecodecs.numcodecs import Jpegxl
codecs = [Jpegxl(level=level)]
else:
codecs = [compression]
return codecs


def get_tiff_pages(tiff: TiffFile) -> list:
pages = []
found = False
Expand Down
30 changes: 30 additions & 0 deletions environment_no_bioformats_omero.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
name: omeslicc_env
channels:
- ome
- conda-forge
- defaults
dependencies:
- python=3.10
- pip
- numpy
- pandas
- scikit-image
- scikit-learn
- imagecodecs>=2023.3.16
- numcodecs
- dask
- tifffile>=2023.9.26
- zarr
- ome-zarr
- aiohttp
- rsa
- tqdm
- psutil
- PyYAML
- Pillow
- imageio
- validators
- toml
- pip:
- opencv-python
- xmltodict
1 change: 0 additions & 1 deletion tests/ome_zarr_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,6 @@ def save_zarr_v3(source, data, filename, output_params):
#filename = 'E:/Personal/Crick/slides/test_images/zarr test.zarr'
filename = 'https://uk1s3.embassy.ebi.ac.uk/idr/zarr/v0.4/idr0062A/6001240.zarr'
source = create_source(filename, {})
#source = ZarrSource(filename)
w, h = source.get_size()
output_filename = 'D:/slides/test/' + os.path.basename(filename)
npyramid_add = 4
Expand Down

0 comments on commit a3f0fcf

Please sign in to comment.