Skip to content

Commit

Permalink
Bunch of stuff is still broken, but AV2 Demo 3D works for RGB + Lidar
Browse files Browse the repository at this point in the history
  • Loading branch information
kylevedder committed Mar 14, 2024
1 parent c21b82d commit 0e4ab40
Show file tree
Hide file tree
Showing 25 changed files with 625 additions and 1,070 deletions.
8 changes: 5 additions & 3 deletions bucketed_scene_flow_eval/datasets/__init__.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
from bucketed_scene_flow_eval.datasets.argoverse2 import Argoverse2SceneFlow
from bucketed_scene_flow_eval.datasets.waymoopen import WaymoOpenSceneFlow
from bucketed_scene_flow_eval.interfaces import AbstractDataset

importable_classes = [Argoverse2SceneFlow, WaymoOpenSceneFlow]
# from bucketed_scene_flow_eval.datasets.waymoopen import WaymoOpenSceneFlow

importable_classes = [Argoverse2SceneFlow] # , WaymoOpenSceneFlow]
name_to_class_lookup = {cls.__name__.lower(): cls for cls in importable_classes}


def construct_dataset(name: str, args: dict):
def construct_dataset(name: str, args: dict) -> AbstractDataset:
name = name.lower()
if name not in name_to_class_lookup:
raise ValueError(f"Unknown dataset name: {name}")
Expand Down
36 changes: 20 additions & 16 deletions bucketed_scene_flow_eval/datasets/argoverse2/argoverse_raw_data.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,6 @@
import pandas as pd
from scipy.spatial.transform import Rotation

from bucketed_scene_flow_eval.datasets.shared_datastructures import (
AbstractSequence,
AbstractSequenceLoader,
CachedSequenceLoader,
RawItem,
)
from bucketed_scene_flow_eval.datastructures import (
SE2,
SE3,
Expand All @@ -25,7 +19,10 @@
RGBFrame,
RGBFrameLookup,
RGBImage,
TimeSyncedAVLidarData,
TimeSyncedRawItem,
)
from bucketed_scene_flow_eval.interfaces import AbstractSequence, CachedSequenceLoader
from bucketed_scene_flow_eval.utils import load_json

GROUND_HEIGHT_THRESHOLD = 0.4 # 40 centimeters
Expand Down Expand Up @@ -356,7 +353,9 @@ def _load_pose(self, idx) -> SE3:
)
return se3

def load(self, idx: int, relative_to_idx: int) -> RawItem:
def load(
self, idx: int, relative_to_idx: int
) -> tuple[TimeSyncedRawItem, TimeSyncedAVLidarData]:
assert idx < len(self), f"idx {idx} out of range, len {len(self)} for {self.dataset_dir}"
timestamp = self.timestamp_list[idx]
ego_pc = self._load_pc(idx)
Expand Down Expand Up @@ -384,17 +383,22 @@ def load(self, idx: int, relative_to_idx: int) -> RawItem:
self.camera_names,
)

return RawItem(
pc=pc_frame,
rgbs=rgb_frames,
is_ground_points=is_ground_points,
in_range_mask=in_range_mask_with_ground,
log_id=self.log_id,
log_idx=idx,
log_timestamp=timestamp,
return (
TimeSyncedRawItem(
pc=pc_frame,
rgbs=rgb_frames,
log_id=self.log_id,
log_idx=idx,
log_timestamp=timestamp,
),
TimeSyncedAVLidarData(
is_ground_points=is_ground_points, in_range_mask=in_range_mask_with_ground
),
)

def load_frame_list(self, relative_to_idx: Optional[int]) -> list[RawItem]:
def load_frame_list(
self, relative_to_idx: Optional[int]
) -> list[TimeSyncedRawItem, TimeSyncedAVLidarData]:
return [
self.load(idx, relative_to_idx if relative_to_idx is not None else idx)
for idx in range(len(self))
Expand Down
117 changes: 72 additions & 45 deletions bucketed_scene_flow_eval/datasets/argoverse2/argoverse_scene_flow.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,20 @@

import numpy as np

from bucketed_scene_flow_eval.datasets.shared_datastructures import (
AbstractSequence,
AbstractSequenceLoader,
CachedSequenceLoader,
RawItem,
SceneFlowItem,
from bucketed_scene_flow_eval.datastructures import (
EgoLidarFlow,
MaskArray,
PointCloud,
PointCloudFrame,
SemanticClassId,
SemanticClassIdArray,
SupervisedPointCloudFrame,
TimeSyncedAVLidarData,
TimeSyncedRawItem,
TimeSyncedSceneFlowItem,
VectorArray,
)
from bucketed_scene_flow_eval.datastructures import PointCloud, PointCloudFrame
from bucketed_scene_flow_eval.interfaces import CachedSequenceLoader
from bucketed_scene_flow_eval.utils.loaders import load_feather

from . import ArgoverseRawSequence
Expand Down Expand Up @@ -81,22 +87,25 @@ def _prep_flow(self, flow_dir: Path):
self.timestamp_list = self.timestamp_list[: len(self.flow_data_files) + 1]

@staticmethod
def get_class_str(class_id: int) -> Optional[str]:
if class_id not in CATEGORY_MAP:
def get_class_str(class_id: SemanticClassId) -> Optional[str]:
class_id_int = int(class_id)
if class_id_int not in CATEGORY_MAP:
return None
return CATEGORY_MAP[class_id]
return CATEGORY_MAP[class_id_int]

def _make_default_classes(self, pc: PointCloud) -> np.ndarray:
return np.ones(len(pc.points), dtype=np.int32) * CATEGORY_MAP_INV["BACKGROUND"]
def _make_default_classes(self, pc: PointCloud) -> SemanticClassIdArray:
return np.ones(len(pc.points), dtype=SemanticClassId) * CATEGORY_MAP_INV["BACKGROUND"]

def _load_flow(
self, idx, classes_0: np.ndarray
) -> tuple[Optional[np.ndarray], Optional[np.ndarray], np.ndarray]:
def _load_flow_feather(
self, idx: int, classes_0: SemanticClassIdArray
) -> tuple[VectorArray, MaskArray, SemanticClassIdArray]:
assert idx < len(self), f"idx {idx} out of range, len {len(self)} for {self.dataset_dir}"
# There is no flow information for the last pointcloud in the sequence.

if idx == len(self) - 1 or idx == -1:
return None, None, classes_0
assert (
idx != len(self) - 1
), f"idx {idx} is the last frame in the sequence, which has no flow data"
assert idx >= 0, f"idx {idx} is out of range"
flow_data_file = self.flow_data_files[idx]
flow_data = load_feather(flow_data_file, verbose=False)
is_valid_arr = flow_data["is_valid"].values
Expand All @@ -113,28 +122,45 @@ def _load_flow(

return flow_0_1, is_valid_arr, classes_0

def _load_no_flow(self, raw_item: RawItem) -> SceneFlowItem:
classes_0 = self._make_default_classes(raw_item.pc.pc)

return SceneFlowItem(
**vars(raw_item), pc_classes=classes_0, flowed_pc=copy.deepcopy(raw_item.pc)
def _make_tssf_item(
self, raw_item: TimeSyncedRawItem, classes_0: SemanticClassIdArray, flow: EgoLidarFlow
) -> TimeSyncedSceneFlowItem:
supervised_pc = SupervisedPointCloudFrame(
**vars(raw_item.pc),
full_pc_classes=classes_0,
)
return TimeSyncedSceneFlowItem(
pc=supervised_pc,
rgbs=raw_item.rgbs,
log_id=raw_item.log_id,
log_idx=raw_item.log_idx,
log_timestamp=raw_item.log_timestamp,
flow=flow,
)

def _load_no_flow(
self, raw_item: TimeSyncedRawItem, metadata: TimeSyncedAVLidarData
) -> tuple[TimeSyncedSceneFlowItem, TimeSyncedAVLidarData]:
classes_0 = self._make_default_classes(raw_item.pc.pc)
flow = EgoLidarFlow.make_no_flow(len(classes_0))
return self._make_tssf_item(raw_item, classes_0, flow), metadata

def _load_with_flow(self, raw_item: RawItem, idx: int, relative_to_idx: int) -> SceneFlowItem:
def _load_with_flow(
self,
raw_item: TimeSyncedRawItem,
metadata: TimeSyncedAVLidarData,
idx: int,
relative_to_idx: int,
) -> tuple[TimeSyncedSceneFlowItem, TimeSyncedAVLidarData]:
start_pose = self._load_pose(relative_to_idx)
idx_pose = self._load_pose(idx)
relative_pose = start_pose.inverse().compose(idx_pose)

classes_0_with_ground = self._make_default_classes(raw_item.pc.pc)
(
relative_global_frame_flow_0_1_with_ground,
is_valid_flow_with_ground_arr,
classes_0_with_ground,
) = self._load_flow(idx, classes_0_with_ground)

assert (
relative_global_frame_flow_0_1_with_ground is not None
), f"Flow data missing for {idx}"
) = self._load_flow_feather(idx, self._make_default_classes(raw_item.pc.pc))

relative_global_frame_with_ground_flowed_pc = raw_item.pc.global_pc.copy()
relative_global_frame_with_ground_flowed_pc.points[
Expand All @@ -145,24 +171,23 @@ def _load_with_flow(self, raw_item: RawItem, idx: int, relative_to_idx: int) ->
relative_pose.inverse()
)

return SceneFlowItem(
**vars(raw_item),
pc_classes=classes_0_with_ground,
flowed_pc=PointCloudFrame(
full_pc=ego_flowed_pc_with_ground, pose=raw_item.pc.pose, mask=raw_item.pc.mask
),
)
delta_flow = ego_flowed_pc_with_ground.points - raw_item.pc.full_global_pc.points

flow = EgoLidarFlow(full_flow=delta_flow, mask=is_valid_flow_with_ground_arr)
return (self._make_tssf_item(raw_item, classes_0_with_ground, flow), metadata)

def load(self, idx: int, relative_to_idx: int, with_flow: bool = True) -> SceneFlowItem:
def load(
self, idx: int, relative_to_idx: int, with_flow: bool = True
) -> tuple[TimeSyncedSceneFlowItem, TimeSyncedAVLidarData]:
assert idx < len(self), f"idx {idx} out of range, len {len(self)} for {self.dataset_dir}"
raw_item = super().load(idx, relative_to_idx)
raw_item, metadata = super().load(idx, relative_to_idx)

if with_flow:
return self._load_with_flow(raw_item, idx, relative_to_idx)
return self._load_with_flow(raw_item, metadata, idx, relative_to_idx)
else:
return self._load_no_flow(raw_item)
return self._load_no_flow(raw_item, metadata)

def load_frame_list(self, relative_to_idx: Optional[int]) -> list[RawItem]:
def load_frame_list(self, relative_to_idx: Optional[int]) -> list[TimeSyncedRawItem]:
return [
self.load(
idx=idx,
Expand Down Expand Up @@ -323,12 +348,14 @@ class ArgoverseNoFlowSequence(ArgoverseSceneFlowSequence):
def _prep_flow(self, flow_dir: Path):
pass

def _load_flow(
self, idx, classes_0: np.ndarray
) -> tuple[Optional[np.ndarray], Optional[np.ndarray], np.ndarray]:
def _load_flow_feather(
self, idx: int, classes_0: SemanticClassIdArray
) -> tuple[VectorArray, MaskArray, SemanticClassIdArray]:
raise NotImplementedError("No flow data available for ArgoverseNoFlowSequence")

def load(self, idx: int, relative_to_idx: int, with_flow: bool = True) -> SceneFlowItem:
def load(
self, idx: int, relative_to_idx: int, with_flow: bool = True
) -> tuple[TimeSyncedSceneFlowItem, TimeSyncedAVLidarData]:
return super().load(idx, relative_to_idx, with_flow=False)


Expand Down
Loading

0 comments on commit 0e4ab40

Please sign in to comment.