Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add support to TiledDataset for missing, irregular or overlapping tiles #487

Open
wants to merge 15 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions changelog/487.feature.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add support to TiledDataset for mosaic datasets with missing tiles or where tiles are irregularly arranged.
15 changes: 11 additions & 4 deletions dkist/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -299,16 +299,23 @@ def eit_dataset():
return f.tree["dataset"]


@pytest.fixture
def simple_tiled_dataset(dataset):
@pytest.fixture(params=[False,
[[False, False],
[True, False]]],
ids=["simple-nomask", "simple-masked"])
def simple_tiled_dataset(dataset, request):
datasets = [copy.deepcopy(dataset) for i in range(4)]
for ds in datasets:
ds.meta["inventory"] = dataset.meta["inventory"]
dataset_array = np.array(datasets).reshape((2,2))
return TiledDataset(dataset_array, dataset.meta["inventory"])
return TiledDataset(dataset_array, dataset.meta["inventory"], mask=request.param)


@pytest.fixture
@pytest.fixture(params=[False,
[[False, True, False],
[True, False, True],
[False, True, False]]],
ids=["large-nomask", "large-masked"])
def large_tiled_dataset(tmp_path_factory):
vbidir = tmp_path_factory.mktemp("data")
with gzip.open(Path(rootdir) / "large_vbi.asdf.gz", mode="rb") as gfo:
Expand Down
10 changes: 5 additions & 5 deletions dkist/dataset/tiled_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,8 @@

return cls(datasets, inventory)

def __init__(self, dataset_array, inventory=None):
self._data = np.array(dataset_array, dtype=object)
def __init__(self, dataset_array, inventory=None, mask=False):
self._data = np.ma.masked_array(dataset_array, dtype=object, mask=mask)
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can we auto-generate the mask based on the elements of dataset_array?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I expect so. We can readily assume anything that isn't a Dataset should be masked out, but we should probably also have some way of flagging invalid Datasets for masking as well.

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Shouldn't the default value of mask be None?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Either works fine. False is specifying to set the mask to be False (ie: not masked) everywhere. Which is the default if you pass None into masked_array anyway, but explicit is better than implicit.

self._inventory = inventory or {}
self._validate_component_datasets(self._data, inventory)

Expand All @@ -105,7 +105,7 @@

@staticmethod
def _validate_component_datasets(datasets, inventory):
datasets = datasets.flat
datasets = datasets.compressed()
inv_1 = datasets[0].meta["inventory"]
if inv_1 and inv_1 is not inventory:
raise ValueError("The inventory record of the first dataset does not match the one passed to TiledDataset")
Expand All @@ -122,7 +122,7 @@
"""
Represent this `.TiledDataset` as a 1D array.
"""
return type(self)(self._data.flat, self.inventory)
return type(self)(self._data.compressed(), self.inventory)

@property
def inventory(self):
Expand All @@ -137,7 +137,7 @@
A single `astropy.table.Table` containing all the FITS headers for all
files in this dataset.
"""
return vstack([ds.meta["headers"] for ds in self._data.flat])
return vstack([ds.meta["headers"] for ds in self._data.compressed()])

Check warning on line 140 in dkist/dataset/tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/dataset/tiled_dataset.py#L140

Added line #L140 was not covered by tests

@property
def shape(self):
Expand Down
6 changes: 5 additions & 1 deletion dkist/io/asdf/converters/tiled_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,20 @@
tags = [
"tag:dkist.nso.edu:dkist/tiled_dataset-0.1.0",
"asdf://dkist.nso.edu/tags/tiled_dataset-1.0.0",
"asdf://dkist.nso.edu/tags/tiled_dataset-1.1.0",
]
types = ["dkist.dataset.tiled_dataset.TiledDataset"]

def from_yaml_tree(cls, node, tag, ctx):
from dkist.dataset.tiled_dataset import TiledDataset

return TiledDataset(node["datasets"], node["inventory"])
mask = node.get("mask", None)

return TiledDataset(node["datasets"], node["inventory"], mask)

def to_yaml_tree(cls, tiled_dataset, tag, ctx):
tree = {}
tree["inventory"] = tiled_dataset._inventory
tree["datasets"] = tiled_dataset._data.tolist()
tree["mask"] = tiled_dataset._data.mask

Check warning on line 23 in dkist/io/asdf/converters/tiled_dataset.py

View check run for this annotation

Codecov / codecov/patch

dkist/io/asdf/converters/tiled_dataset.py#L23

Added line #L23 was not covered by tests
return tree
2 changes: 2 additions & 0 deletions dkist/io/asdf/entry_points.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,8 @@ def get_extensions():
dkist_converters = [FileManagerConverter(), DatasetConverter(), TiledDatasetConverter()]
wcs_converters = [VaryingCelestialConverter(), CoupledCompoundConverter(), RavelConverter(), AsymmetricMappingConverter()]
return [
ManifestExtension.from_uri("asdf://dkist.nso.edu/manifests/dkist-1.3.0",
converters=dkist_converters),
ManifestExtension.from_uri("asdf://dkist.nso.edu/manifests/dkist-1.2.0",
converters=dkist_converters),
ManifestExtension.from_uri("asdf://dkist.nso.edu/manifests/dkist-1.1.0",
Expand Down
14 changes: 14 additions & 0 deletions dkist/io/asdf/resources/manifests/dkist-1.3.0.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
%YAML 1.1
---
id: asdf://dkist.nso.edu/manifests/dkist-1.3.0
extension_uri: asdf://dkist.nso.edu/dkist/extensions/dkist-1.3.0
title: DKIST extension
description: ASDF schemas and tags for DKIST classes.

tags:
- schema_uri: "asdf://dkist.nso.edu/schemas/file_manager-1.0.0"
tag_uri: "asdf://dkist.nso.edu/tags/file_manager-1.0.0"
- schema_uri: "asdf://dkist.nso.edu/schemas/dataset-1.1.0"
tag_uri: "asdf://dkist.nso.edu/tags/dataset-1.2.0"
- schema_uri: "asdf://dkist.nso.edu/schemas/tiled_dataset-1.1.0"
tag_uri: "asdf://dkist.nso.edu/tags/tiled_dataset-1.1.0"
31 changes: 31 additions & 0 deletions dkist/io/asdf/resources/schemas/tiled_dataset-1.1.0.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
%YAML 1.1
---
$schema: "http://stsci.edu/schemas/yaml-schema/draft-01"
id: "asdf://dkist.nso.edu/schemas/tiled_dataset-1.1.0"

title: |
A DKIST Tiled Dataset object.
description:
The container for a set of Dataset objects.

type: object
properties:
datasets:
description: A nested structure of Dataset objects
type: array
items:
type: array
items:
anyOf:
- tag: "asdf://dkist.nso.edu/tags/dataset-1.*"
- null: true
inventory:
description: A copy of the inventory record for this dataset.
type: object
mask:
description: A mask to indicate if invalid or missing Datasets should be ignored.
datatype: bool8

required: [datasets, inventory, mask]
additionalProperties: false
...
9 changes: 5 additions & 4 deletions dkist/io/asdf/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@

import numpy as np
import pytest
from pytest_lazy_fixtures import lf

import asdf
import astropy.table
Expand Down Expand Up @@ -68,9 +69,9 @@ def test_roundtrip_tiled_dataset(simple_tiled_dataset):
@pytest.mark.parametrize("tagobj",
[
"dataset",
"simple_tiled_dataset",
lf("simple_tiled_dataset"),
],
indirect=True)
indirect=False)
def test_save_dataset_without_file_schema(tagobj, tmp_path):
tree = {"dataset": tagobj}
with asdf.AsdfFile(tree) as afile:
Expand All @@ -96,9 +97,9 @@ def test_asdf_tags(dataset, tmp_path):
@pytest.mark.parametrize("tagobj",
[
"dataset",
"simple_tiled_dataset",
lf("simple_tiled_dataset"),
],
indirect=True)
indirect=False)
def test_save_dataset_with_file_schema(tagobj, tmpdir):
tree = {"dataset": tagobj}
with importlib_resources.as_file(importlib_resources.files("dkist.io") / "level_1_dataset_schema.yaml") as schema_path:
Expand Down
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -55,6 +55,7 @@ tests = [
"pytest-filter-subpackage",
"pytest-benchmark",
"pytest-xdist",
"pytest-lazy-fixtures",
"hypothesis",
"tox",
"pydot",
Expand Down
Loading