Skip to content

Commit

Permalink
fix labels
Browse files Browse the repository at this point in the history
  • Loading branch information
mathleur committed Oct 28, 2024
2 parents dfa7e44 + 8014417 commit fb361d2
Show file tree
Hide file tree
Showing 109 changed files with 525 additions and 421 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ jobs:
python -m coverage report
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
files: coverage.xml
deploy:
Expand Down
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -24,3 +24,4 @@ polytope_venv
polytope_venv_latest
new_updated_numpy_venv
newest-polytope-venv
serializedTree
3 changes: 3 additions & 0 deletions codecov.yml
Original file line number Diff line number Diff line change
@@ -1,3 +1,6 @@
codecov:
branch: develop # set new Default branch

ignore:
- "tests" # ignore tests folder
- "**/test*"
Expand Down
67 changes: 0 additions & 67 deletions polytope/options.py

This file was deleted.

1 change: 0 additions & 1 deletion polytope/version.py

This file was deleted.

File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import logging
from abc import ABC, abstractmethod
from typing import Any
from typing import Any, Dict

from ...utility.combinatorics import validate_axes
from ..datacube_axis import DatacubeAxis
Expand Down Expand Up @@ -31,9 +31,10 @@ def __init__(self, axis_options=None, compressed_axes_options=[]):
self.merged_axes = []
self.unwanted_path = {}
self.compressed_axes = compressed_axes_options
self.grid_md5_hash = None

@abstractmethod
def get(self, requests: TensorIndexTree) -> Any:
def get(self, requests: TensorIndexTree, context: Dict) -> Any:
"""Return data given a set of request trees"""

@property
Expand Down Expand Up @@ -69,6 +70,7 @@ def _create_axes(self, name, values, transformation_type_key, transformation_opt
# TODO: do we use this?? This shouldn't work for a disk in lat/lon on a octahedral or other grid??
for compressed_grid_axis in transformation.compressed_grid_axes:
self.compressed_grid_axes.append(compressed_grid_axis)
self.grid_md5_hash = transformation.md5_hash
if len(final_axis_names) > 1:
self.coupled_axes.append(final_axis_names)
for axis in final_axis_names:
Expand Down Expand Up @@ -126,9 +128,10 @@ def get_indices(self, path: DatacubePath, axis, lower, upper, method=None):
"""
path = self.fit_path(path)
indexes = axis.find_indexes(path, self)

idx_between = axis.find_indices_between(indexes, lower, upper, self, method)

logging.info(f"For axis {axis.name} between {lower} and {upper}, found indices {idx_between}")
logging.debug(f"For axis {axis.name} between {lower} and {upper}, found indices {idx_between}")

return idx_between

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,9 @@ def check_branching_axes(self, request):
for axis_name in axes_to_remove:
self._axes.pop(axis_name, None)

def get(self, requests: TensorIndexTree):
def get(self, requests: TensorIndexTree, context=None):
if context is None:
context = {}
requests.pprint()
if len(requests.children) == 0:
return requests
Expand All @@ -104,11 +106,11 @@ def get(self, requests: TensorIndexTree):
uncompressed_request = {}
for i, key in enumerate(compressed_request[0].keys()):
uncompressed_request[key] = combi[i]
complete_uncompressed_request = (uncompressed_request, compressed_request[1])
complete_uncompressed_request = (uncompressed_request, compressed_request[1], self.grid_md5_hash)
complete_list_complete_uncompressed_requests.append(complete_uncompressed_request)
complete_fdb_decoding_info.append(fdb_requests_decoding_info[j])
logging.debug("The requests we give GribJump are: %s", complete_list_complete_uncompressed_requests)
output_values = self.gj.extract(complete_list_complete_uncompressed_requests)
output_values = self.gj.extract(complete_list_complete_uncompressed_requests, context)
logging.debug("GribJump outputs: %s", output_values)
self.assign_fdb_output_to_nodes(output_values, complete_fdb_decoding_info)

Expand All @@ -124,7 +126,7 @@ def get_fdb_requests(

# First when request node is root, go to its children
if requests.axis.name == "root":
logging.info("Looking for data for the tree: %s", [leaf.flatten() for leaf in requests.leaves])
logging.debug("Looking for data for the tree: %s", [leaf.flatten() for leaf in requests.leaves])

for c in requests.children:
self.get_fdb_requests(c, fdb_requests, fdb_requests_decoding_info)
Expand Down Expand Up @@ -161,8 +163,8 @@ def remove_duplicates_in_request_ranges(self, fdb_node_ranges, current_start_idx
new_current_start_idx = []
for j, idx in enumerate(sub_lat_idxs):
if idx not in seen_indices:
# TODO: need to remove it from the values in the corresponding tree node
# TODO: need to read just the range we give to gj ... DONE?
# NOTE: need to remove it from the values in the corresponding tree node
# NOTE: need to read just the range we give to gj
original_fdb_node_range_vals.append(actual_fdb_node[0].values[j])
seen_indices.add(idx)
new_current_start_idx.append(idx)
Expand All @@ -187,8 +189,6 @@ def nearest_lat_lon_search(self, requests):

second_ax = requests.children[0].children[0].axis

# TODO: actually, here we should not remap the nearest_pts, we should instead unmap the
# found_latlon_pts and then remap them later once we have compared found_latlon_pts and nearest_pts
nearest_pts = [
[lat_val, second_ax._remap_val_to_axis_range(lon_val)]
for (lat_val, lon_val) in zip(
Expand Down Expand Up @@ -325,8 +325,6 @@ def sort_fdb_request_ranges(self, current_start_idx, lat_length, fdb_node_ranges
request_ranges_with_idx = list(enumerate(interm_request_ranges))
sorted_list = sorted(request_ranges_with_idx, key=lambda x: x[1][0])
original_indices, sorted_request_ranges = zip(*sorted_list)
logging.debug("We sorted the request ranges into: %s", sorted_request_ranges)
logging.debug("The sorted and unique leaf node ranges are: %s", new_fdb_node_ranges)
return (original_indices, sorted_request_ranges, new_fdb_node_ranges)

def datacube_natural_indexes(self, axis, subarray):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -24,10 +24,12 @@ def __init__(self, dimensions, compressed_axes_options=[]):
self.stride[k] = stride_cumulative
stride_cumulative *= self.dimensions[k]

def get(self, requests: TensorIndexTree):
def get(self, requests: TensorIndexTree, context=None):
# Takes in a datacube and verifies the leaves of the tree are complete
# (ie it found values for all datacube axis)

if context is None:
context = {}
for r in requests.leaves:
path = r.flatten()
if len(path.items()) == len(self.dimensions.items()):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -50,12 +50,14 @@ def __init__(self, dataarray: xr.DataArray, axis_options=None, compressed_axes_o
val = self._axes[name].type
self._check_and_add_axes(options, name, val)

def get(self, requests, leaf_path=None, axis_counter=0):
def get(self, requests, context=None, leaf_path=None, axis_counter=0):
if context is None:
context = {}
if leaf_path is None:
leaf_path = {}
if requests.axis.name == "root":
for c in requests.children:
self.get(c, leaf_path, axis_counter + 1)
self.get(c, context, leaf_path, axis_counter + 1)
else:
key_value_path = {requests.axis.name: requests.values}
ax = requests.axis
Expand All @@ -66,7 +68,7 @@ def get(self, requests, leaf_path=None, axis_counter=0):
if len(requests.children) != 0:
# We are not a leaf and we loop over
for c in requests.children:
self.get(c, leaf_path, axis_counter + 1)
self.get(c, context, leaf_path, axis_counter + 1)
else:
if self.axis_counter != axis_counter:
requests.remove_branch()
Expand Down
File renamed without changes.
File renamed without changes.
File renamed without changes.
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ def add_child(self, node):
def add_value(self, value):
new_values = list(self.values)
new_values.append(value)
new_values.sort()
self.values = tuple(new_values)

def create_child(self, axis, value, next_nodes, polytope_label=None):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,17 +15,25 @@ def __init__(self, name, mapper_options):
self.local_area = []
if mapper_options.local is not None:
self.local_area = mapper_options.local
self._axis_reversed = None
if mapper_options.axis_reversed is not None:
self._axis_reversed = mapper_options.axis_reversed
self.old_axis = name
self._final_transformation = self.generate_final_transformation()
self._final_mapped_axes = self._final_transformation._mapped_axes
self._axis_reversed = self._final_transformation._axis_reversed
self.compressed_grid_axes = self._final_transformation.compressed_grid_axes
self.md5_hash = self._final_transformation.md5_hash

def generate_final_transformation(self):
map_type = _type_to_datacube_mapper_lookup[self.grid_type]
module = import_module("polytope.datacube.transformations.datacube_mappers.mapper_types." + self.grid_type)
module = import_module(
"polytope_feature.datacube.transformations.datacube_mappers.mapper_types." + self.grid_type
)
constructor = getattr(module, map_type)
transformation = deepcopy(constructor(self.old_axis, self.grid_axes, self.grid_resolution, self.local_area))
transformation = deepcopy(
constructor(self.old_axis, self.grid_axes, self.grid_resolution, self.local_area, self._axis_reversed)
)
return transformation

def blocked_axes(self):
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,19 @@


class HealpixGridMapper(DatacubeMapper):
def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
# TODO: if local area is not empty list, raise NotImplemented
self._mapped_axes = mapped_axes
self._base_axis = base_axis
self._resolution = resolution
self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
self._first_axis_vals = self.first_axis_vals()
self.compressed_grid_axes = [self._mapped_axes[1]]
self.md5_hash = md5_hash.get(resolution, None)
if self._axis_reversed[mapped_axes[1]]:
raise NotImplementedError("Healpix grid with second axis in decreasing order is not supported")
if not self._axis_reversed[mapped_axes[0]]:
raise NotImplementedError("Healpix grid with first axis in increasing order is not supported")

def first_axis_vals(self):
rad2deg = 180 / math.pi
Expand Down Expand Up @@ -133,3 +138,7 @@ def unmap(self, first_val, second_val):
second_idx = self.second_axis_vals(first_val).index(second_val)
healpix_index = self.axes_idx_to_healpix_idx(first_idx, second_idx)
return healpix_index


# md5 grid hash in form {resolution : hash}
md5_hash = {}
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@


class NestedHealpixGridMapper(DatacubeMapper):
def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
# TODO: if local area is not empty list, raise NotImplemented
self._mapped_axes = mapped_axes
self._base_axis = base_axis
Expand All @@ -17,6 +17,11 @@ def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
self.k = int(math.log2(self.Nside))
self.Npix = 12 * self.Nside * self.Nside
self.Ncap = (self.Nside * (self.Nside - 1)) << 1
self.md5_hash = md5_hash.get(resolution, None)
if self._axis_reversed[mapped_axes[1]]:
raise NotImplementedError("Healpix grid with second axis in decreasing order is not supported")
if not self._axis_reversed[mapped_axes[0]]:
raise NotImplementedError("Healpix grid with first axis in increasing order is not supported")

def first_axis_vals(self):
rad2deg = 180 / math.pi
Expand Down Expand Up @@ -211,3 +216,7 @@ def ring_to_nested(self, idx):

def int_sqrt(self, i):
return int(math.sqrt(i + 0.5))


# md5 grid hash in form {resolution : hash}
md5_hash = {}
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@


class LocalRegularGridMapper(DatacubeMapper):
def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
def __init__(self, base_axis, mapped_axes, resolution, local_area=[], axis_reversed=None):
# TODO: if local area is not empty list, raise NotImplemented
self._mapped_axes = mapped_axes
self._base_axis = base_axis
Expand All @@ -15,17 +15,32 @@ def __init__(self, base_axis, mapped_axes, resolution, local_area=[]):
if not isinstance(resolution, list):
self.first_resolution = resolution
self.second_resolution = resolution
self.md5_hash = md5_hash.get(resolution, None)
else:
self.first_resolution = resolution[0]
self.second_resolution = resolution[1]
self.md5_hash = md5_hash.get(tuple(resolution), None)
self._first_deg_increment = (local_area[1] - local_area[0]) / self.first_resolution
self._second_deg_increment = (local_area[3] - local_area[2]) / self.second_resolution
self._axis_reversed = {mapped_axes[0]: True, mapped_axes[1]: False}
if axis_reversed is None:
self._axis_reversed = {mapped_axes[0]: False, mapped_axes[1]: False}
else:
assert set(axis_reversed.keys()) == set(mapped_axes)
self._axis_reversed = axis_reversed
self._first_axis_vals = self.first_axis_vals()
self.compressed_grid_axes = [self._mapped_axes[1]]
if self._axis_reversed[mapped_axes[1]]:
raise NotImplementedError("Local regular grid with second axis in decreasing order is not supported")

def first_axis_vals(self):
first_ax_vals = [self._first_axis_max - i * self._first_deg_increment for i in range(self.first_resolution + 1)]
if self._axis_reversed[self._mapped_axes[0]]:
first_ax_vals = [
self._first_axis_max - i * self._first_deg_increment for i in range(self.first_resolution + 1)
]
else:
first_ax_vals = [
self._first_axis_min + i * self._first_deg_increment for i in range(self.first_resolution + 1)
]
return first_ax_vals

def map_first_axis(self, lower, upper):
Expand Down Expand Up @@ -68,3 +83,7 @@ def unmap(self, first_val, second_val):
second_idx = self.second_axis_vals(first_val).index(second_val)
final_index = self.axes_idx_to_regular_idx(first_idx, second_idx)
return final_index


# md5 grid hash in form {resolution : hash}
md5_hash = {}
Loading

0 comments on commit fb361d2

Please sign in to comment.