diff --git a/habitat-lab/habitat/datasets/rearrange/configs/all_receptacles_test.yaml b/habitat-lab/habitat/datasets/rearrange/configs/all_receptacles_test.yaml new file mode 100644 index 0000000000..67465be7c1 --- /dev/null +++ b/habitat-lab/habitat/datasets/rearrange/configs/all_receptacles_test.yaml @@ -0,0 +1,52 @@ +--- +## All receptacles and objects are used. +# Define your own dataset path, +dataset_path: "data/replica_cad/replicaCAD.scene_dataset_config.json" +additional_object_paths: + - "data/objects/ycb/configs/" +correct_unstable_results: True +scene_sets: + - + name: "all_scenes" + included_substrings: + - "" + excluded_substrings: ["NONE"] + +object_sets: + - + name: "simple_objects" + included_substrings: + - "002_master_chef_can" + - "003_cracker_box" + - "004_sugar_box" + - "005_tomato_soup_can" + - "007_tuna_fish_can" + - "008_pudding_box" + - "009_gelatin_box" + - "010_potted_meat_can" + - "024_bowl" + excluded_substrings: [] +receptacle_sets: + - + name: "all_receptacles" + included_object_substrings: + - "" + excluded_object_substrings: [] + included_receptacle_substrings: + - "" + excluded_receptacle_substrings: [] + +scene_sampler: + type: "subset" + params: + scene_sets: ["all_scenes"] + +object_samplers: + - + name: "simple_objects_sample" + type: "uniform" + params: + object_sets: ["simple_objects"] + receptacle_sets: ["all_receptacles"] + num_samples: [10, 20] + orientation_sampling: "up" diff --git a/habitat-lab/habitat/datasets/rearrange/rearrange_generator.py b/habitat-lab/habitat/datasets/rearrange/rearrange_generator.py index 853a61ffa0..3801f70579 100644 --- a/habitat-lab/habitat/datasets/rearrange/rearrange_generator.py +++ b/habitat-lab/habitat/datasets/rearrange/rearrange_generator.py @@ -5,6 +5,7 @@ # LICENSE file in the root directory of this source tree. import os.path as osp +import time from collections import defaultdict try: @@ -26,6 +27,7 @@ from habitat.datasets.rearrange.rearrange_dataset import RearrangeEpisode from habitat.datasets.rearrange.samplers.receptacle import ( OnTopOfReceptacle, + Receptacle, ReceptacleSet, ReceptacleTracker, find_receptacles, @@ -391,29 +393,15 @@ def generate_scene(self) -> str: def visualize_scene_receptacles(self) -> None: """ - Generate a wireframe bounding box for each receptacle in the scene, aim the camera at it and record 1 observation. + Generate a debug line representation for each receptacle in the scene, aim the camera at it and record 1 observation. """ logger.info("visualize_scene_receptacles processing") receptacles = find_receptacles(self.sim) for receptacle in receptacles: logger.info("receptacle processing") - viz_objects = receptacle.add_receptacle_visualization(self.sim) - - # sample points in the receptacles to display - # for sample in range(25): - # sample_point = receptacle.sample_uniform_global(self.sim, 1.0) - # sutils.add_viz_sphere(self.sim, 0.025, sample_point) - - if viz_objects: - # point the camera at the 1st viz_object for the Receptacle - self.vdb.look_at( - viz_objects[0].root_scene_node.absolute_translation - ) - self.vdb.get_observation() - else: - logger.warning( - f"visualize_scene_receptacles: no visualization object generated for Receptacle '{receptacle.name}'." - ) + receptacle.debug_draw(self.sim) + self.vdb.look_at(receptacle.sample_uniform_global(self.sim, 1.0)) + self.vdb.get_observation() def generate_episodes( self, num_episodes: int = 1, verbose: bool = False @@ -545,7 +533,7 @@ def generate_single_episode(self) -> Optional[RearrangeEpisode]: self.vdb.make_debug_video(prefix="receptacles_") # sample object placements - object_to_containing_receptacle = {} + self.object_to_containing_receptacle = {} for sampler_name, obj_sampler in self._obj_samplers.items(): object_sample_data = obj_sampler.sample( self.sim, @@ -558,7 +546,7 @@ def generate_single_episode(self) -> Optional[RearrangeEpisode]: return None new_objects, receptacles = zip(*object_sample_data) for obj, rec in zip(new_objects, receptacles): - object_to_containing_receptacle[obj.handle] = rec + self.object_to_containing_receptacle[obj.handle] = rec if sampler_name not in self.episode_data["sampled_objects"]: self.episode_data["sampled_objects"][ sampler_name @@ -574,9 +562,13 @@ def generate_single_episode(self) -> Optional[RearrangeEpisode]: ) # debug visualization showing each newly added object if self._render_debug_obs: + logger.info( + f"Generating debug images for {len(new_objects)} objects..." + ) for new_object in new_objects: self.vdb.look_at(new_object.translation) self.vdb.get_observation() + logger.info(" ... done") # simulate the world for a few seconds to validate the placements if not self.settle_sim(): @@ -613,7 +605,7 @@ def generate_single_episode(self) -> Optional[RearrangeEpisode]: vdb=self.vdb, target_receptacles=target_receptacles[obj_sampler_name], goal_receptacles=goal_receptacles[sampler_name], - object_to_containing_receptacle=object_to_containing_receptacle, + object_to_containing_receptacle=self.object_to_containing_receptacle, ) if new_target_objects is None: return None @@ -694,7 +686,7 @@ def extract_recep_info(recep): ] name_to_receptacle = { - k: v.name for k, v in object_to_containing_receptacle.items() + k: v.name for k, v in self.object_to_containing_receptacle.items() } return RearrangeEpisode( @@ -787,9 +779,12 @@ def initialize_sim(self, scene_name: str, dataset_path: str) -> None: self.sim.agents[0].scene_node.translation = scene_bb.center() # initialize the debug visualizer - self.vdb = DebugVisualizer( - self.sim, output_path="rearrange_ep_gen_output/" + output_path = ( + "rearrange_ep_gen_output/" + if self.vdb is None + else self.vdb.output_path ) + self.vdb = DebugVisualizer(self.sim, output_path=output_path) def settle_sim( self, duration: float = 5.0, make_video: bool = True @@ -800,7 +795,9 @@ def settle_sim( """ if len(self.ep_sampled_objects) == 0: return True - # assert len(self.ep_sampled_objects) > 0 + + settle_start_time = time.time() + logger.info("Running placement stability analysis...") scene_bb = ( self.sim.get_active_scene_graph().get_root_node().cumulative_bb @@ -824,11 +821,13 @@ def settle_sim( if self._render_debug_obs: self.vdb.get_observation(obs_cache=settle_db_obs) + logger.info(f" ...done in {time.time()-settle_start_time} seconds.") # check stability of placements logger.info("Computing placement stability report:") + logger.info("----------------------------------------") max_settle_displacement = 0 error_eps = 0.1 - unstable_placements = [] + unstable_placements: List[str] = [] # list of unstable object handles for new_object in self.ep_sampled_objects: error = ( spawn_positions[new_object.handle] - new_object.translation @@ -839,6 +838,21 @@ def settle_sim( logger.info( f" Object '{new_object.handle}' unstable. Moved {error} units from placement." ) + if self._render_debug_obs: + self.vdb.peek_rigid_object( + obj=new_object, + peek_all_axis=True, + additional_savefile_prefix="unstable_", + debug_lines=[ + ( + [ + spawn_positions[new_object.handle], + new_object.translation, + ], + mn.Color4.red(), + ) + ], + ) logger.info( f" : unstable={len(unstable_placements)}|{len(self.ep_sampled_objects)} ({len(unstable_placements)/len(self.ep_sampled_objects)*100}%) : {unstable_placements}." ) @@ -852,5 +866,72 @@ def settle_sim( prefix="settle_", fps=30, obs_cache=settle_db_obs ) + # detailed receptacle stability report + logger.info(" Detailed sampling stats:") + + # receptacle: [num_objects, num_unstable_objects] + rec_num_obj_vs_unstable: Dict[Receptacle, List[int]] = {} + for obj_name, rec in self.object_to_containing_receptacle.items(): + if rec not in rec_num_obj_vs_unstable: + rec_num_obj_vs_unstable[rec] = [0, 0] + rec_num_obj_vs_unstable[rec][0] += 1 + if obj_name in unstable_placements: + rec_num_obj_vs_unstable[rec][1] += 1 + for rec, details in rec_num_obj_vs_unstable.items(): + logger.info( + f" receptacle '{rec.name}': ({details[1]}/{details[0]}) (unstable/total) objects." + ) + + success = len(unstable_placements) == 0 + + # optionally salvage the episode by removing unstable objects + if self.cfg.correct_unstable_results and not success: + logger.info(" attempting to correct unstable placements...") + for sampler_name, objects in self.episode_data[ + "sampled_objects" + ].items(): + obj_names = [obj.handle for obj in objects] + sampler = self._obj_samplers[sampler_name] + unstable_subset = [ + obj_name + for obj_name in unstable_placements + if obj_name in obj_names + ] + # check that we have freedom to reject some objects + if ( + len(objects) - len(unstable_subset) + >= sampler.num_objects[0] + ): + # remove the unstable objects from datastructures + self.episode_data["sampled_objects"][sampler_name] = [ + obj + for obj in self.episode_data["sampled_objects"][ + sampler_name + ] + if obj.handle not in unstable_subset + ] + self.ep_sampled_objects = [ + obj + for obj in self.ep_sampled_objects + if obj.handle not in unstable_subset + ] + else: + logger.info( + f" ... could not remove all unstable placements without violating minimum object sampler requirements for {sampler_name}" + ) + logger.info("----------------------------------------") + return False + logger.info( + f" ... corrected unstable placements successfully. Final object count = {len(self.ep_sampled_objects)}" + ) + # we removed all unstable placements + success = True + + logger.info("----------------------------------------") + + if self._render_debug_obs and success: + for obj in self.ep_sampled_objects: + self.vdb.peek_rigid_object(obj, peek_all_axis=True) + # return success or failure - return len(unstable_placements) == 0 + return success diff --git a/habitat-lab/habitat/datasets/rearrange/run_episode_generator.py b/habitat-lab/habitat/datasets/rearrange/run_episode_generator.py index 00f5835969..1a62703404 100644 --- a/habitat-lab/habitat/datasets/rearrange/run_episode_generator.py +++ b/habitat-lab/habitat/datasets/rearrange/run_episode_generator.py @@ -50,6 +50,8 @@ class RearrangeEpisodeGeneratorConfig: additional_object_paths: List[str] = field( default_factory=lambda: ["data/objects/ycb/"] ) + # optionally correct unstable states by removing extra unstable objects (within minimum samples limitations) + correct_unstable_results: bool = False # ----- resource set definitions ------ # Define the sets of scenes, objects, and receptacles which can be sampled from. # The SceneDataset will be searched for resources of each type with handles containing ANY "included" substrings and NO "excluded" substrings. diff --git a/habitat-lab/habitat/datasets/rearrange/samplers/object_sampler.py b/habitat-lab/habitat/datasets/rearrange/samplers/object_sampler.py index 8c55f2ade6..356fdde3d9 100644 --- a/habitat-lab/habitat/datasets/rearrange/samplers/object_sampler.py +++ b/habitat-lab/habitat/datasets/rearrange/samplers/object_sampler.py @@ -6,6 +6,7 @@ import math import random +import time from collections import defaultdict from typing import Dict, List, Optional, Tuple @@ -388,6 +389,8 @@ def sample( f" Trying to sample {self.target_objects_number} from range {self.num_objects}" ) + sampling_start_time = time.time() + pairing_start_time = sampling_start_time while ( len(new_objects) < self.target_objects_number and num_pairing_tries < self.max_sample_attempts @@ -415,8 +418,18 @@ def sample( self.receptacle_candidates = None if new_object is not None: + # when an object placement is successful, reset the try counter. + logger.info( + f" found obj|receptacle pairing ({len(new_objects)}/{self.target_objects_number}) in {num_pairing_tries} attempts ({time.time()-pairing_start_time}sec)." + ) + num_pairing_tries = 0 + pairing_start_time = time.time() new_objects.append((new_object, receptacle)) + logger.info( + f" Sampling process completed in ({time.time()-sampling_start_time}sec)." + ) + if len(new_objects) >= self.num_objects[0]: return new_objects diff --git a/habitat-lab/habitat/datasets/rearrange/samplers/receptacle.py b/habitat-lab/habitat/datasets/rearrange/samplers/receptacle.py index c5b3d8e6ad..6f220aef3d 100644 --- a/habitat-lab/habitat/datasets/rearrange/samplers/receptacle.py +++ b/habitat-lab/habitat/datasets/rearrange/samplers/receptacle.py @@ -4,15 +4,18 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. +import os +import random from abc import ABC, abstractmethod from copy import deepcopy from dataclasses import dataclass -from typing import Dict, List, Optional, Union +from typing import Any, Dict, List, Optional, Tuple, Union import magnum as mn import numpy as np import habitat_sim +from habitat.core.logging import logger from habitat.sims.habitat_simulator.sim_utilities import add_wire_box @@ -66,11 +69,24 @@ def sample_uniform_local( :param sample_region_scale: defines a XZ scaling of the sample region around its center. For example to constrain object spawning toward the center of a receptacle. """ - @abstractmethod def get_global_transform(self, sim: habitat_sim.Simulator) -> mn.Matrix4: """ Isolates boilerplate necessary to extract receptacle global transform of the Receptacle at the current state. """ + if self.parent_object_handle is None: + # global identify by default + return mn.Matrix4.identity_init() + elif not self.is_parent_object_articulated: + obj_mgr = sim.get_rigid_object_manager() + obj = obj_mgr.get_object_by_handle(self.parent_object_handle) + # NOTE: we use absolute transformation from the 2nd visual node (scaling node) and root of all render assets to correctly account for any COM shifting, re-orienting, or scaling which has been applied. + return obj.visual_scene_nodes[1].absolute_transformation() + else: + ao_mgr = sim.get_articulated_object_manager() + obj = ao_mgr.get_object_by_handle(self.parent_object_handle) + return obj.get_link_scene_node( + self.parent_link + ).absolute_transformation() def sample_uniform_global( self, sim: habitat_sim.Simulator, sample_region_scale: float @@ -91,6 +107,14 @@ def add_receptacle_visualization( """ return [] + @abstractmethod + def debug_draw(self, sim, color=None) -> None: + """ + Render the Receptacle with DebugLineRender utility at the current frame. + Simulator must be provided. If color is provided, the debug render will use it. + Must be called after each frame is rendered, before querying the image data. + """ + class OnTopOfReceptacle(Receptacle): def __init__(self, name: str, places: List[str]): @@ -112,6 +136,14 @@ def get_global_transform(self, sim: habitat_sim.Simulator) -> mn.Matrix4: return mn.Matrix4([[targ_T[j][i] for j in range(4)] for i in range(4)]) + def debug_draw(self, sim, color=None) -> None: + """ + Render the Receptacle with DebugLineRender utility at the current frame. + Simulator must be provided. If color is provided, the debug render will use it. + Must be called after each frame is rendered, before querying the image data. + """ + # TODO: + class AABBReceptacle(Receptacle): """ @@ -161,6 +193,7 @@ def sample_uniform_local( def get_global_transform(self, sim: habitat_sim.Simulator) -> mn.Matrix4: """ Isolates boilerplate necessary to extract receptacle global transform of the Receptacle at the current state. + This specialization adds override rotation handling for global bounding box Receptacles. """ if self.parent_object_handle is None: # this is a global stage receptacle @@ -192,17 +225,8 @@ def get_global_transform(self, sim: habitat_sim.Simulator) -> mn.Matrix4: l2w4 = l2w4.__matmul__(T.__matmul__(R).__matmul__(T.inverted())) return l2w4 - elif not self.is_parent_object_articulated: - obj_mgr = sim.get_rigid_object_manager() - obj = obj_mgr.get_object_by_handle(self.parent_object_handle) - # NOTE: we use absolute transformation from the 2nd visual node (scaling node) and root of all render assets to correctly account for any COM shifting, re-orienting, or scaling which has been applied. - return obj.visual_scene_nodes[1].absolute_transformation() - else: - ao_mgr = sim.get_articulated_object_manager() - obj = ao_mgr.get_object_by_handle(self.parent_object_handle) - return obj.get_link_scene_node( - self.parent_link - ).absolute_transformation() + # base class implements getting transform from attached objects + return super().get_global_transform def add_receptacle_visualization( self, sim: habitat_sim.Simulator @@ -241,6 +265,143 @@ def add_receptacle_visualization( ) return [box_obj] + def debug_draw(self, sim, color=None): + """ + Render the AABBReceptacle with DebugLineRender utility at the current frame. + Simulator must be provided. If color is provided, the debug render will use it. + Must be called after each frame is rendered, before querying the image data. + """ + # draw the box + if color is None: + color = mn.Color4.magenta() + dblr = sim.get_debug_line_render() + dblr.push_transform(self.get_global_transform(sim)) + dblr.draw_box(self.bounds.min, self.bounds.max, color) + dblr.pop_transform() + # TODO: test this + + +class TriangleMeshReceptacle(Receptacle): + """ + Defines a Receptacle surface as a triangle mesh. + TODO: configurable maximum height. + """ + + def __init__( + self, + name: str, + mesh_data: Tuple[List[Any], List[Any]], # vertices, indices + parent_object_handle: str = None, + parent_link: Optional[int] = None, + up: Optional[mn.Vector3] = None, + ) -> None: + """ + :param name: The name of the Receptacle. Should be unique and descriptive for any one object. + :param up: The "up" direction of the Receptacle in local AABB space. Used for optionally culling receptacles in un-supportive states such as inverted surfaces. + :param parent_object_handle: The rigid or articulated object instance handle for the parent object to which the Receptacle is attached. None for globally defined stage Receptacles. + :param parent_link: Index of the link to which the Receptacle is attached if the parent is an ArticulatedObject. -1 denotes the base link. None for rigid objects and stage Receptables. + """ + super().__init__(name, parent_object_handle, parent_link, up) + self.mesh_data = mesh_data + self.area_weighted_accumulator = ( + [] + ) # normalized float weights for each triangle for sampling + assert len(mesh_data[1]) % 3 == 0, "must be triangles" + self.total_area = 0 + for f_ix in range(int(len(mesh_data[1]) / 3)): + v = self.get_face_verts(f_ix) + w1 = v[1] - v[0] + w2 = v[2] - v[1] + self.area_weighted_accumulator.append( + 0.5 * np.linalg.norm(np.cross(w1, w2)) + ) + self.total_area += self.area_weighted_accumulator[-1] + for f_ix in range(len(self.area_weighted_accumulator)): + self.area_weighted_accumulator[f_ix] = ( + self.area_weighted_accumulator[f_ix] / self.total_area + ) + if f_ix > 0: + self.area_weighted_accumulator[ + f_ix + ] += self.area_weighted_accumulator[f_ix - 1] + # print(self.area_weighted_accumulator) + + def get_face_verts(self, f_ix): + verts = [] + for ix in range(3): + verts.append( + np.array( + self.mesh_data[0][self.mesh_data[1][int(f_ix * 3 + ix)]] + ) + ) + return verts + + def sample_area_weighted_triangle(self): + """ + Isolates the area weighted triangle sampling code. + """ + + def find_ge(a, x): + "Find leftmost item greater than or equal to x" + from bisect import bisect_left + + i = bisect_left(a, x) + if i != len(a): + return i + raise ValueError + + # first area weighted sampling of a triangle + sample_val = random.random() + tri_index = find_ge(self.area_weighted_accumulator, sample_val) + return tri_index + + def sample_uniform_local( + self, sample_region_scale: float = 1.0 + ) -> mn.Vector3: + """ + Sample a uniform random point from the mesh. + + :param sample_region_scale: defines a XZ scaling of the sample region around its center. For example to constrain object spawning toward the center of a receptacle. + """ + + if sample_region_scale != 1.0: + logger.warning( + "TriangleMeshReceptacle does not support 'sample_region_scale' != 1.0." + ) + + tri_index = self.sample_area_weighted_triangle() + + # then sample a random point in the triangle + # https://math.stackexchange.com/questions/538458/how-to-sample-points-on-a-triangle-surface-in-3d + coef1 = random.random() + coef2 = random.random() + if coef1 + coef2 >= 1: + coef1 = 1 - coef1 + coef2 = 1 - coef2 + v = self.get_face_verts(f_ix=tri_index) + rand_point = v[0] + coef1 * (v[1] - v[0]) + coef2 * (v[2] - v[0]) + + return rand_point + + def debug_draw(self, sim, color=None): + """ + Render the Receptacle with DebugLineRender utility at the current frame. + Draws the Receptacle mesh. + Simulator must be provided. If color is provided, the debug render will use it. + Must be called after each frame is rendered, before querying the image data. + """ + # draw all mesh triangles + if color is None: + color = mn.Color4.magenta() + dblr = sim.get_debug_line_render() + assert len(self.mesh_data[1]) % 3 == 0, "must be triangles" + for face in range(int(len(self.mesh_data[1]) / 3)): + verts = self.get_face_verts(f_ix=face) + for edge in range(3): + dblr.draw_transformed_line( + verts[edge], verts[(edge + 1) % 3], color + ) + def get_all_scenedataset_receptacles(sim) -> Dict[str, Dict[str, List[str]]]: """ @@ -262,6 +423,9 @@ def get_all_scenedataset_receptacles(sim) -> Dict[str, Dict[str, List[str]]]: stage_template = stm.get_template_by_handle(template_handle) for item in stage_template.get_user_config().get_subconfig_keys(): if item.startswith("receptacle_"): + print( + f"template file_directory = {stage_template.file_directory}" + ) if template_handle not in receptacles["stage"]: receptacles["stage"][template_handle] = [] receptacles["stage"][template_handle].append(item) @@ -272,6 +436,9 @@ def get_all_scenedataset_receptacles(sim) -> Dict[str, Dict[str, List[str]]]: obj_template = rotm.get_template_by_handle(template_handle) for item in obj_template.get_user_config().get_subconfig_keys(): if item.startswith("receptacle_"): + print( + f"template file_directory = {obj_template.file_directory}" + ) if template_handle not in receptacles["rigid"]: receptacles["rigid"][template_handle] = [] receptacles["rigid"][template_handle].append(item) @@ -290,9 +457,55 @@ def get_all_scenedataset_receptacles(sim) -> Dict[str, Dict[str, List[str]]]: return receptacles +def import_tri_mesh_ply(ply_file: str) -> Tuple[List[mn.Vector3], List[int]]: + """ + Returns a Tuple of (verts,indices) from a ply mesh. + NOTE: the input PLY must contain only triangles. + TODO: This could be replaced by a standard importer, but I didn't want to add additional dependencies for such as small feature. + """ + mesh_data: Tuple[List[mn.Vector3], List[int]] = ([], []) + with open(ply_file) as f: + lines = [line.rstrip() for line in f] + assert lines[0] == "ply", f"Must be PLY format. '{ply_file}'" + assert "format ascii" in lines[1], f"Must be ascii PLY. '{ply_file}'" + # parse the header + line_index = 2 + num_verts = 0 + num_faces = 0 + while line_index < len(lines): + if lines[line_index].startswith("element vertex"): + num_verts = int(lines[line_index][14:]) + print(f"num_verts = {num_verts}") + elif lines[line_index].startswith("element face"): + num_faces = int(lines[line_index][12:]) + print(f"num_faces = {num_faces}") + elif lines[line_index] == "end_header": + # done parsing header + line_index += 1 + break + line_index += 1 + assert ( + len(lines) - line_index == num_verts + num_faces + ), f"Lines after header ({len(lines) - line_index}) should agree with forward declared content. {num_verts} verts and {num_faces} faces expected. '{ply_file}'" + # parse the verts + for vert_line in range(line_index, num_verts + line_index): + coords = [float(x) for x in lines[vert_line].split(" ")] + mesh_data[0].append(mn.Vector3(coords)) + line_index += num_verts + for face_line in range(line_index, num_faces + line_index): + assert ( + int(lines[face_line][0]) == 3 + ), f"Faces must be triangles. '{ply_file}'" + indices = [int(x) for x in lines[face_line].split(" ")[1:]] + mesh_data[1].extend(indices) + + return mesh_data + + def parse_receptacles_from_user_config( user_subconfig: habitat_sim._ext.habitat_sim_bindings.Configuration, parent_object_handle: Optional[str] = None, + parent_template_directory: str = "", valid_link_names: Optional[List[str]] = None, ao_uniform_scaling: float = 1.0, ) -> List[Union[Receptacle, AABBReceptacle]]: @@ -307,11 +520,18 @@ def parse_receptacles_from_user_config( Construct and return a list of Receptacle objects. Multiple Receptacles can be defined in a single user subconfig. """ - receptacles: List[Union[Receptacle, AABBReceptacle]] = [] + receptacles: List[ + Union[Receptacle, AABBReceptacle, TriangleMeshReceptacle] + ] = [] + + # pre-define unique specifier strings for parsing receptacle types + receptacle_prefix_string = "receptacle_" + mesh_receptacle_id_string = "receptacle_mesh_" + aabb_receptacle_id_string = "receptacle_aabb_" # search the generic user subconfig metadata looking for receptacles for sub_config_key in user_subconfig.get_subconfig_keys(): - if sub_config_key.startswith("receptacle_"): + if sub_config_key.startswith(receptacle_prefix_string): sub_config = user_subconfig.get_subconfig(sub_config_key) # this is a receptacle, parse it assert sub_config.has_value("position") @@ -363,20 +583,43 @@ def parse_receptacles_from_user_config( ) receptacle_scale = ao_uniform_scaling * sub_config.get("scale") - # TODO: adding more receptacle types will require additional logic here - receptacles.append( - AABBReceptacle( - name=receptacle_name, - bounds=mn.Range3D.from_center( - receptacle_position, - receptacle_scale, - ), - rotation=rotation, - up=up, - parent_object_handle=parent_object_handle, - parent_link=parent_link_ix, + if aabb_receptacle_id_string in sub_config_key: + receptacles.append( + AABBReceptacle( + name=receptacle_name, + bounds=mn.Range3D.from_center( + receptacle_position, + receptacle_scale, + ), + rotation=rotation, + up=up, + parent_object_handle=parent_object_handle, + parent_link=parent_link_ix, + ) + ) + elif mesh_receptacle_id_string in sub_config_key: + mesh_file = os.path.join( + parent_template_directory, sub_config.get("mesh_filepath") + ) + assert os.path.exists( + mesh_file + ), f"Configured receptacle mesh asset '{mesh_file}' not found." + # TODO: build the mesh_data entry from scale and mesh + mesh_data = import_tri_mesh_ply(mesh_file) + + receptacles.append( + TriangleMeshReceptacle( + name=receptacle_name, + mesh_data=mesh_data, + up=up, + parent_object_handle=parent_object_handle, + parent_link=parent_link_ix, + ) + ) + else: + raise AssertionError( + f"Receptacle detected without a subtype specifier: '{mesh_receptacle_id_string}'" ) - ) return receptacles @@ -391,32 +634,45 @@ def find_receptacles( obj_mgr = sim.get_rigid_object_manager() ao_mgr = sim.get_articulated_object_manager() - receptacles: List[Union[Receptacle, AABBReceptacle]] = [] + receptacles: List[ + Union[Receptacle, AABBReceptacle, TriangleMeshReceptacle] + ] = [] # search for global receptacles included with the stage stage_config = sim.get_stage_initialization_template() if stage_config is not None: stage_user_attr = stage_config.get_user_config() - receptacles.extend(parse_receptacles_from_user_config(stage_user_attr)) + receptacles.extend( + parse_receptacles_from_user_config( + stage_user_attr, + parent_template_directory=stage_config.file_directory, + ) + ) # rigid object receptacles for obj_handle in obj_mgr.get_object_handles(): obj = obj_mgr.get_object_by_handle(obj_handle) + source_template_file = obj.creation_attributes.file_directory user_attr = obj.user_attributes receptacles.extend( parse_receptacles_from_user_config( - user_attr, parent_object_handle=obj_handle + user_attr, + parent_object_handle=obj_handle, + parent_template_directory=source_template_file, ) ) # articulated object receptacles for obj_handle in ao_mgr.get_object_handles(): obj = ao_mgr.get_object_by_handle(obj_handle) + # TODO: no way to get filepath from AO currently. Add this API. + source_template_file = "" user_attr = obj.user_attributes receptacles.extend( parse_receptacles_from_user_config( user_attr, parent_object_handle=obj_handle, + parent_template_directory=source_template_file, valid_link_names=[ obj.get_link_name(link) for link in range(-1, obj.num_links) diff --git a/habitat-lab/habitat/sims/habitat_simulator/debug_visualizer.py b/habitat-lab/habitat/sims/habitat_simulator/debug_visualizer.py index 162a19b865..0752685edc 100644 --- a/habitat-lab/habitat/sims/habitat_simulator/debug_visualizer.py +++ b/habitat-lab/habitat/sims/habitat_simulator/debug_visualizer.py @@ -4,7 +4,8 @@ # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. -from typing import Any, List, Optional, Union +import os +from typing import Any, List, Optional, Tuple, Union import magnum as mn import numpy as np @@ -109,17 +110,40 @@ def save_observation( # filename format "prefixmonth_day_year_hourminutesecondmicrosecond.png" date_time = datetime.now().strftime("%m_%d_%Y_%H%M%S%f") - file_path = output_path + prefix + date_time + ".png" + file_path = os.path.join(output_path, prefix + date_time + ".png") image.save(file_path) if show: image.show() return file_path + def render_debug_lines( + self, + debug_lines: Optional[List[Tuple[List[mn.Vector3], mn.Color4]]] = None, + ): + """ + Draw a set of debug lines with accomanying colors. + """ + # support None input to make useage easier elsewhere + if debug_lines is not None: + for points, color in debug_lines: + for p_ix, point in enumerate(points): + if p_ix == 0: + continue + prev_point = points[p_ix - 1] + self.debug_line_render.draw_transformed_line( + prev_point, + point, + color, + ) + def peek_rigid_object( self, obj: habitat_sim.physics.ManagedRigidObject, cam_local_pos: Optional[mn.Vector3] = None, peek_all_axis: bool = False, + additional_savefile_prefix="", + debug_lines: Optional[List[Tuple[List[mn.Vector3], mn.Color4]]] = None, + show: bool = False, ) -> str: """ Specialization to peek a rigid object. @@ -131,6 +155,9 @@ def peek_rigid_object( obj.root_scene_node.cumulative_bb, cam_local_pos, peek_all_axis, + additional_savefile_prefix, + debug_lines, + show, ) def peek_articulated_object( @@ -138,6 +165,9 @@ def peek_articulated_object( obj: habitat_sim.physics.ManagedArticulatedObject, cam_local_pos: Optional[mn.Vector3] = None, peek_all_axis: bool = False, + additional_savefile_prefix="", + debug_lines: Optional[List[Tuple[List[mn.Vector3], mn.Color4]]] = None, + show: bool = False, ) -> str: """ Specialization to peek an articulated object. @@ -149,7 +179,15 @@ def peek_articulated_object( obj_bb = get_ao_global_bb(obj) - return self._peek_object(obj, obj_bb, cam_local_pos, peek_all_axis) + return self._peek_object( + obj, + obj_bb, + cam_local_pos, + peek_all_axis, + additional_savefile_prefix, + debug_lines, + show, + ) def _peek_object( self, @@ -160,11 +198,15 @@ def _peek_object( obj_bb: mn.Range3D, cam_local_pos: Optional[mn.Vector3] = None, peek_all_axis: bool = False, + additional_savefile_prefix="", + debug_lines: Optional[List[Tuple[List[mn.Vector3], mn.Color4]]] = None, + show: bool = False, ) -> str: """ - Compute a camera placement to view an ArticulatedObject and show/save an observation. + Compute a camera placement to view an object and show/save an observation. Return the filepath. If peek_all_axis, then create a merged 3x2 matrix of images looking at the object from all angles. + debug_lines: optionally provide a list of debug line render tuples, each with a sequence of points and a color. These will be displayed in all peek images. """ obj_abs_transform = obj.root_scene_node.absolute_transformation() look_at = obj_abs_transform.translation @@ -187,42 +229,51 @@ def _peek_object( * distance + look_at ) + self.render_debug_lines(debug_lines) return self.save_observation( - prefix="peek_" + obj.handle, + prefix=additional_savefile_prefix + "peek_" + obj.handle, look_at=look_at, look_from=look_from, + show=show, ) - else: - # collect axis observations - axis_obs: List[Any] = [] - for axis in range(6): - axis_vec = mn.Vector3() - axis_vec[axis % 3] = 1 if axis // 3 == 0 else -1 - look_from = ( - obj_abs_transform.transform_vector(axis_vec).normalized() - * distance - + look_at - ) - self.get_observation(look_at, look_from, axis_obs) - # stitch images together - stitched_image = None - from PIL import Image - from habitat_sim.utils import viz_utils as vut + # collect axis observations + axis_obs: List[Any] = [] + for axis in range(6): + axis_vec = mn.Vector3() + axis_vec[axis % 3] = 1 if axis // 3 == 0 else -1 + look_from = ( + obj_abs_transform.transform_vector(axis_vec).normalized() + * distance + + look_at + ) + self.render_debug_lines(debug_lines) + self.get_observation(look_at, look_from, axis_obs) + # stitch images together + stitched_image = None + from PIL import Image - for ix, obs in enumerate(axis_obs): - image = vut.observation_to_image(obs["rgb"], "color") - if stitched_image is None: - stitched_image = Image.new( - image.mode, (image.size[0] * 3, image.size[1] * 2) - ) - location = ( - image.size[0] * (ix % 3), - image.size[1] * (0 if ix // 3 == 0 else 1), + from habitat_sim.utils import viz_utils as vut + + for ix, obs in enumerate(axis_obs): + image = vut.observation_to_image(obs["rgb"], "color") + if stitched_image is None: + stitched_image = Image.new( + image.mode, (image.size[0] * 3, image.size[1] * 2) ) - stitched_image.paste(image, location) + location = ( + image.size[0] * (ix % 3), + image.size[1] * (0 if ix // 3 == 0 else 1), + ) + stitched_image.paste(image, location) + if show: stitched_image.show() - return "" + save_path = os.path.join( + self.output_path, + additional_savefile_prefix + "peek_6x_" + obj.handle + ".png", + ) + stitched_image.save(save_path) + return save_path def make_debug_video( self, @@ -249,7 +300,7 @@ def make_debug_video( from habitat_sim.utils import viz_utils as vut - file_path = output_path + prefix + date_time + file_path = os.path.join(output_path, prefix + date_time) logger.info(f"DebugVisualizer: Saving debug video to {file_path}") vut.make_video( obs_cache, self.default_sensor_uuid, "color", file_path, fps=fps diff --git a/scripts/hab2_bench/README.md b/scripts/hab2_bench/README.md index 6290afeb94..9da49d72b4 100644 --- a/scripts/hab2_bench/README.md +++ b/scripts/hab2_bench/README.md @@ -29,5 +29,3 @@ were used with 8 processes assigned to each GPU. We used python-3.8 and gcc-9.3 H2.0. We report average SPS over 10 runs and a 95% confidence-interval computed via standard error of the mean. Note that 8 processes do not fully utilize a 2080 Ti and thus multi-process multi-GPU performance may be better on machines with more CPU cores. - -TODO: add some example results on diff --git a/scripts/receptacle_annotation/README.md b/scripts/receptacle_annotation/README.md new file mode 100644 index 0000000000..93f38a02c9 --- /dev/null +++ b/scripts/receptacle_annotation/README.md @@ -0,0 +1,165 @@ +Receptacle Automation Pipeline +============================== + +The utilities in this directory are intended to assist users with annotating receptacles for procedural clutter object placement (e.g. for Habitat 2.0 rearrangement tasks). + +*A **receptacle** is typically defined as an object or space used to contain something.* + +# Context + +Many Embodied AI (EAI) tasks (e.g. robotic object rearrangement) involve context rich scenes with a variety of small clutter objects placed in and around larger furniture objects and architectural features. For example, utensils and flatware in kitchen cabinets and drawers. + +While artists and users can produce individual arrangements of a scene using standard modeling software, an automated, generative approach is desirable for producing large scale variations (e.g. thousands to millions of variations) for use in training and testing AI models. + +[Existing tools in Habitat-lab](https://github.com/facebookresearch/habitat-lab/tree/main/habitat-lab/habitat/datasets/rearrange) depend on the pre-process of annotating receptacle metadata for each and every scene. Currently this process is manual, requiring an artist to place bounding boxes using a modeling software (e.g. Blender) and export a JSON configuration object which is parsed by Habitat sampling logic. See [“The Manual Process”](#the-manual-process) below for details. + +## Citation +[Habitat 2.0: Training Home Assistants to Rearrange their Habitat](https://arxiv.org/abs/2106.14405) Andrew Szot, Alex Clegg, Eric Undersander, Erik Wijmans, Yili Zhao, John Turner, Noah Maestre, Mustafa Mukadam, Devendra Chaplot, Oleksandr Maksymets, Aaron Gokaslan, Vladimir Vondrus, Sameer Dharur, Franziska Meier, Wojciech Galuba, Angel Chang, Zsolt Kira, Vladlen Koltun, Jitendra Malik, Manolis Savva, Dhruv Batra. Advances in Neural Information Processing Systems (NeurIPS), 2021. + +# The Semi-Automated Receptacle Annotation Process +This approach utilizes Habitat-sim’s [Recast|Detour NavMesh](https://aihabitat.org/docs/habitat-sim/habitat_sim.nav.PathFinder.html) integration to compute a set of surfaces which may support or contain the clutter objects. The resulting mesh data is then post-processed into mesh receptacle data structures and manually culled or adjusted by an artist or user in Blender. + +The final result is a set of [PLY](https://en.wikipedia.org/wiki/PLY_(file_format)) mesh files and a chunk of JSON metadata which can be included in the stage or object configuration files. + +## Setup +First install habitat-sim and habitat-lab with support for Bullet physics as described in the [installation section](https://github.com/facebookresearch/habitat-lab#installation) of Habitat-lab. + +- [Download Blender](https://www.blender.org/download/) (tested with v3.3) and install. + - *Note: run Blender from the terminal on OSX and Linux to see script debug output and errors.* +- Pepare your scene assets in SceneDataset format as described [here](https://aihabitat.org/docs/habitat-sim/attributesJSON.html). For an example, see [ReplicaCAD](https://aihabitat.org/datasets/replica_cad/). +- Configure a custom [NavMeshSettings](https://aihabitat.org/docs/habitat-sim/habitat_sim.nav.NavMeshSettings.html) JSON file or use the provided *clutter_object.navmeshsettings.json* (for small rearrange objects like cans). + +## Annotation Process +*NOTE: This process currently supports ONLY global receptacles. While mesh receptacles can be added to object configs and will be parsed by the generator code, this use case has not yet been tested.* + +### Overview: +1. [Generate Receptacles:](#1-generate-receptacles) Generate a NavMesh for the scene and export all islands as [.OBJ](https://en.wikipedia.org/wiki/Wavefront_.obj_file) files. (*generate_receptacle_navmesh_obj.py*) +1. [[Blender] Import Receptalce Proposals:](#2-blender-import-receptacle-proposals) Import receptacle meshes in Blender with *blender_receptalce_annotation.py* in "read" mode. +1. [[Blender] Modify Receptacle Set:](#3-blender-modify-receptacle-set)Manually cull, name, and optionally modify the proposed receptacle mesh set. +1. [[Blender] Export Receptacles:](#4-blender-export-receptacles)Export the final metadata JSON and receptacle mesh set as [PLY](https://en.wikipedia.org/wiki/PLY_(file_format)) files with *blender_receptalce_annotation.py* in "write" mode. +1. [Copy Results into SceneDataset:](#5-copy-results-into-scenedataset)Copy the metadata and assets into the SceneDataset directories and files. + +### 1. Generate Receptacles: +Generates navmeshes and island .obj files for all stages or scenes in the provided dataset. + +#### Basic use: +Generates a default navmesh for a human sized entity and places all output in `navmeshes/` directory: +```bash +#from root habitat-lab/ directory +python scripts/receptacle_annotation/generate_receptacle_navmesh_objs.py --dataset path/to/my_scene.scene_dataset.json +``` + +optionally provide a modified path for script output: + +```bash +--output-dir path/to/directory/ +``` + +#### Custom NavMeshSettings: +You can optionally configure a custom [NavMeshSettings](https://aihabitat.org/docs/habitat-sim/habitat_sim.nav.NavMeshSettings.html) JSON file. + +```bash +#from root habitat-lab/ directory +python scripts/receptacle_annotation/generate_receptacle_navmesh_objs.py --dataset path/to/my_scene.scene_dataset.json --navmesh-settings path/to/my_settings.navmesh_settings.json +``` + +Example *clutter_object.navmeshsettings.json* is provided pre-configured for reasonable receptacle generation results for small clutter objects such as [YCB](https://github.com/facebookresearch/habitat-sim/blob/main/DATASETS.md#ycb-benchmarks---object-and-model-set). + +See "*Habitat-Sim Basics for Navigation*" from the [ECCV tutorial series](https://aihabitat.org/tutorial/2020/) for more details on configurable navmesh parameters. + +### 2. [Blender] Import Receptacle Proposals: +Given 1. the set of navmesh island .objs and 2. the stage asset path from [Generate Receptacles](#1-generate-receptacles), run the *blender_receptacle_annotation.py* script in "read" mode from within Blender. + +#### Configure Script Parameters: +Set the path to your output directory from step 1: +```python +path_to_receptacle_navmesh_assets = "navmeshes/" +``` +Modify: +- `stage_index` to choose which scene from your dataset to load. +- `reload_scene` to avoid costly asset re-load if iterating on a single scene +- `cull_floor_like_receptacles` to optionally remove any proposed receptacles with average height at floor level. + +```python +mode = "read" +reload_scene = True +stage_index = 0 #determines which asset will be loaded from the directory +cull_floor_like_receptacles = False +``` + +*NOTE: This process will only load the stage asset. Objects added to the scene in scene_instance.json files will not be loaded in Blender automatically.* + +After running this script, you should see your stage asset and accompanying island meshes named `receptacle_mesh_`, `receptacle_mesh_.001`, ... `receptacle_mesh_.xxx`. + +### 3. [Blender] Modify Receptacle Set: +The goal of this manual phase is to select which meshes will make-up the final receptacle set and choose semantically meaningful names. + +*NOTE: all names must begin with prefix 'receptacle_mesh_'.* + +#### **Blender UI Tips:** +1. Select an object in the outline and press `numpad-'.'` with cursor in the 3D view panel to center camera view on an object. With the cursor in the outline panel, `numpad-'.'` will center on the object's outline entry. +1. `'TAB'` with an object selected and cursor in the 3D view panel to toggle between `Edit` and `Object` modes. +1. In `Edit` mode you can add, delete, and modify the meshes. +1. If your meshes are not aligned when initially imported, it may be the case that Habitat configs for your scene define a global re-orientation from the base asset. Rotate the parent object of your scene named "scene_frame" to correctly align with the loaded meshes. + +#### **Common Operations:** +*NOTE: Any triangle mesh will export correctly. Any new mesh faces MUST be triangulated.* +1. Naming: Default names (e.g. `receptacle_mesh_.001`) are not very informative. Edit text (leaving the `receptacle_mesh_` prefix) to semantically name the receptacles. These will later be used to define receptacle sets for clutter generation. +1. Culling false positives: +Some meshes generated from step 1 will not be reasonable receptacles. Delete these objects in the Blender browser. +1. Modifying meshes: Sometimes a receptacle will have gaps or erronoues faces. Use `Edit` mode to clean up the meshes. +1. Adding new receptacles: Completely new receptacles can be added by duplicating existing meshs or creating new geometry. For example (`Add`->`Mesh`->`Plane`) will create a planar mesh to start with. Remember to triangulate any new geometry before export. + +### 4. [Blender] Export Receptacles: +After authoring the desired receptacle mesh set, run the *blender_receptacle_annotation.py* script in "write" mode from within Blender to export the final meshes and metadata. + +```python +output_dir = "mesh_receptacle_out/" +mesh_relative_path = "" +mode = "write" +``` + +All receptacles meshes will be exported as .ply into the configured directory along with *receptacle_metadata.json*. '`mesh_relative_path`' defines the expected relative path between the *.json* and *.ply* files in their final SceneDataset locations. + +### 5. Copy Results into SceneDataset: +To use the new annotations in Habitat, you should copy the meshes and metadata into your SceneDataset. + +- The contents of *receptacle_metadata.json* can be copied into the `user_defined{}` object of the *.stage_config.json* or *.scene_instance.json* files. +- Meshes should be copied into the scene dataset such that the relative path from the metadata correctly routes to them from the *.json* location. + +# The Manual Process +*NOTE: This process currently supports ONLY axis-aligned bounding box (aabb) receptacles.* +1. Load the object or scene in Blender +1. Load the provided metadata export script (*blender_export_aabb_receptacle.py*) +1. Create a new Cube mesh primitive +1. Translate, scale, and rotate the box into the desired position +1. Name the box with prefix "receptacle_aabb_" (e.g. “receptacle_aabb_table_top”, “receptacle_aabb_left_middle_drawer”) +1. Edit the script to choose either "global" or "object" export mode +1. Run an exporter script to produce a JSON +1. Copy JSON into the object or scene’s configuration file under the "*user_defined*" tag. + +# Testing Receptacle Annotations: +The easiest way to test your annotations is to run the [rearrange generator](https://github.com/facebookresearch/habitat-lab/tree/main/habitat-lab/habitat/datasets/rearrange) in visual debugging mode with a custom configuration. + +1. Direct `dataset_path:` in *habitat-lab/habitat/datasets/rearrange/configs/all_receptacles_test.yaml* to your SceneDataset config file. Optionally modify object sets, receptacle sets, and sampler paramters. +1. Run the generator `--list` for a quick view of your receptacle configuration: + ```python + python habitat-lab/habitat/datasets/rearrange/run_episode_generator.py --list --config habitat-lab/habitat/datasets/rearrange/configs/all_receptacles_test.yaml + ``` + The output should list all the receptacles you have configured in stage and object config files. +1. Run the generator in verbose debug mode for log output, videos, and images of the sampling process: + ```python + python habitat-lab/habitat/datasets/rearrange/run_episode_generator.py --run --debug --verbose --config habitat-lab/habitat/datasets/rearrange/configs/all_receptacles_test.yaml --out mesh_receptacle_out/rearrange_generator_out/ --db-output mesh_receptacle_out/rearrange_generator_out/ + ``` + *NOTE: optionally configure `--out` for generated episodes and `--db-output` for debugging media output.* + + Metrics produced include: + - process timing (e.g. time to sample N objects) + - sampling failure statistics + - dynamic stability analysis: run on after sampling all objects to ensure placements are stable. Issues can indicate poor receptacle support surfaces (e.g. a sloped or un-even bed cover) + + Visual debug output includes: + - Video showing debug renders of all active receptacles + - Video from stability test + - Images of all items sampled + - Images of all items identified as unstable (prefix "unstable_") diff --git a/scripts/receptacle_annotation/blender_export_aabb_receptacles.py b/scripts/receptacle_annotation/blender_export_aabb_receptacles.py new file mode 100644 index 0000000000..d4b211c005 --- /dev/null +++ b/scripts/receptacle_annotation/blender_export_aabb_receptacles.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python + +# Copyright (c) Meta Platforms, Inc. and its affiliates. +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import json +import math +import os + +import bpy +import mathutils + +# NOTE: tested with Blender 3.x+ +# This script should be run from within Blender script interface. + +# NOTE: modify this path to include desired output directory +output_filename = "receptacle_output.json" +# the generic prefix marking an object as an aabb receptacle +mesh_receptacle_id_string = "receptacle_aabb_" + +# transformation from Blender to Habitat coordinate system +to_hab = mathutils.Quaternion((1.0, 0.0, 0.0), math.radians(-90.0)) +# the JSON config dict to fill +user_defined = {} + + +def write_object_receptacles(): + """ + Write out all AABB Recetpacle metadata for receptacles attached to an object (e.g. a table). + Use this to export metadata for .object_config.json and .ao_config.json files. + """ + for obj in bpy.context.scene.objects: + if "receptacle_" in obj.name: + receptacle_info = { + "name": obj.name, + # NOTE: hardcoded for now, set this yourself + "parent_object": "kitchen_island", + "parent_link": obj.parent.name.split("link_")[-1], + "position": list(obj.location), + # NOTE: need half-extents for the final size + "scale": list(obj.scale * 0.5), + # NOTE: default hardcoded value for now + "up": [0, 1, 0], + } + + # get top level parent + # top_parent = obj.parent + # while top_parent.parent is not None: + # top_parent = top_parent.parent + + user_defined[obj.name] = receptacle_info + + +def write_global_receptacles(): + """ + Write out all AABB Recetpacle metadata for receptacles in the global scene space. + Use this to export metadata for .stage_config.json and .scene_instance.json files. + """ + for obj in bpy.context.scene.objects: + if "receptacle_" in obj.name: + receptacle_info = {"name": obj.name} + + location = obj.location.copy() + rotation = obj.rotation_quaternion.copy() + location.rotate(to_hab) + rotation.rotate(to_hab) + + receptacle_info["position"] = list(location) + + receptacle_info["rotation"] = list(rotation) + + # NOTE: need half-extents for the final size + receptacle_info["scale"] = list(obj.scale * 0.5) + + # NOTE: default hardcoded value for now + receptacle_info["up"] = [0, 0, 1] + + user_defined[obj.name] = receptacle_info + + +# main + +# pick your mode: +write_global_receptacles() +# write_object_receptacles() + +# write the metadata +output_dir = output_filename[: -len(output_filename.split("/")[-1])] +os.makedirs(output_dir, exist_ok=True) +with open(output_filename, "w") as f: + json.dump(user_defined, f, indent=4) diff --git a/scripts/receptacle_annotation/blender_receptacle_annotation.py b/scripts/receptacle_annotation/blender_receptacle_annotation.py new file mode 100644 index 0000000000..a9f3dd2bb4 --- /dev/null +++ b/scripts/receptacle_annotation/blender_receptacle_annotation.py @@ -0,0 +1,356 @@ +#!/usr/bin/env python + +# Copyright (c) Meta Platforms, Inc. and its affiliates. +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import os +import random + +import bpy +import mathutils + +# the generic prefix marking an object as a mesh receptacle +mesh_receptacle_id_string = "receptacle_mesh_" + + +def get_mesh_area_and_avg_height(mesh_obj): + """ + Compute and return the area of a mesh object and its average vertex Y value. + """ + assert mesh_obj.type == "MESH", "Given object is not a mesh." + + # compute the face area + mesh_area = 0 + avg_y = 0.0 + for face in mesh_obj.data.polygons: + indices = face.vertices + assert len(indices) == 3, "Mesh must be triangulated." + mesh_area += mathutils.geometry.area_tri( + mesh_obj.data.vertices[indices[0]].co, + mesh_obj.data.vertices[indices[1]].co, + mesh_obj.data.vertices[indices[2]].co, + ) + for index in indices: + avg_y += mesh_obj.data.vertices[index].co[1] + avg_y /= len(mesh_obj.data.polygons) + return mesh_area, avg_y + + +def cull_floor_height_receptacles(eps: float = 0.05) -> None: + """ + Deletes receptacle meshes which are likely floor areas. + 1. Compute the area and Y average of all receptacle meshes. + 2. The largest area mesh is assumed to be the floor. + 3. The floor mesh and all other meshes with similar Y avg are deleted. + + :param eps: epsilon threshold for floor receptacle classification + """ + mesh_receptacles = get_mesh_receptacle_objects() + mesh_details = {} + floor_mesh_height = 0 + floor_mesh_area = 0 + for mesh in mesh_receptacles: + mesh_details[mesh.name] = get_mesh_area_and_avg_height(mesh) + if mesh_details[mesh.name][0] > floor_mesh_area: + floor_mesh_area = mesh_details[mesh.name][0] + floor_mesh_height = mesh_details[mesh.name][1] + + print(f"Floor area {floor_mesh_area} and height {floor_mesh_height}") + + # delete meshes with floor height + print("Meshes culled for floor height:") + for mesh_name, details in mesh_details.items(): + if abs(details[1] - floor_mesh_height) < eps: + print(f"{mesh_name} with height {details[1]} deleted.") + bpy.data.objects.remove( + bpy.data.objects[mesh_name], do_unlink=True + ) + + +def collect_stage_paths(data_dir: str): + """ + Recursive function to collect paths to all directories with island objs, navmesh, and render asset cache file + """ + dir_paths = [] + has_navmesh = False + has_render_cache = False + has_obj = False + for item in os.listdir(data_dir): + item_path = os.path.join(data_dir, item) + if os.path.isdir(item_path): + # recurse into directories + dir_paths.extend(collect_stage_paths(item_path)) + elif os.path.isfile(item_path): + if item.endswith(".navmesh"): + has_navmesh = True + elif item.endswith(".obj"): + has_obj = True + elif item.endswith("render_asset_path.txt"): + has_render_cache = True + if has_navmesh and has_render_cache and has_obj: + dir_paths.append(data_dir) + return dir_paths + + +def get_mesh_receptacle_objects(): + """ + Return a list of all mesh receptacle objects in the scene. + """ + mesh_receptacles = [ + x + for x in bpy.data.objects.values() + if mesh_receptacle_id_string in x.name + ] + return mesh_receptacles + + +def clear_scene(): + """ + Clear the entire scene of all meshes and resources. + """ + objs = bpy.data.objects + for objs_name in objs.keys(): + bpy.data.objects.remove(objs[objs_name], do_unlink=True) + + # remove stale data blocks from memory + for block in bpy.data.meshes: + if block.users == 0: + bpy.data.meshes.remove(block) + + for block in bpy.data.materials: + if block.users == 0: + bpy.data.materials.remove(block) + + for block in bpy.data.textures: + if block.users == 0: + bpy.data.textures.remove(block) + + for block in bpy.data.images: + if block.users == 0: + bpy.data.images.remove(block) + + +def clear_navmeshes(): + """ + Delete all mesh receptacle objects. + """ + mesh_receptacles = get_mesh_receptacle_objects() + for mesh_obj in mesh_receptacles: + bpy.data.objects.remove(mesh_obj, do_unlink=True) + + +def load_island_mesh(datapath): + """ + Load and name a single island mesh component. + """ + if os.path.isfile(datapath): + if datapath.endswith(".obj"): + bpy.ops.import_scene.obj(filepath=datapath) + elif datapath.endswith(".ply"): + bpy.ops.import_mesh.ply(filepath=datapath) + else: + print( + f"Cannot process receptacles from this format '{datapath.split('.')[-1]}'. Use .ply or .obj" + ) + return + mesh_objects = bpy.context.selected_objects + for mesh_obj in mesh_objects: + mesh_obj.name = mesh_receptacle_id_string + + +def load_island_meshes(datapath): + """ + Load a set of island objs indexed 0-N from a directory. + """ + assert os.path.exists(datapath) + for entry in os.listdir(datapath): + entry_path = os.path.join(datapath, entry) + if os.path.isfile(entry_path) and entry.endswith(".obj"): + load_island_mesh(entry_path) + + +def load_render_asset_from_cache(render_asset_cache_path): + assert os.path.isfile( + render_asset_cache_path + ), f"'{render_asset_cache_path}' does not exist." + assert render_asset_cache_path.endswith( + ".txt" + ), "must be a txt file containing only the render asset path." + with open(render_asset_cache_path, "r") as f: + render_asset_path = f.readline().strip("\n") + assert os.path.isfile(render_asset_path) + if render_asset_path.endswith(".glb"): + bpy.ops.import_scene.gltf(filepath=render_asset_path) + elif render_asset_path.endswith(".obj"): + bpy.ops.import_scene.obj(filepath=render_asset_path) + elif render_asset_path.endswith(".ply"): + bpy.ops.export_mesh.ply(filepath=render_asset_path) + else: + raise AssertionError( + f"Import of filetype '{render_asset_path}' not supported currently, aborthing scene load." + ) + + objs = bpy.context.selected_objects + # create an empty frame and parent the object + bpy.ops.object.empty_add( + type="ARROWS", align="WORLD", location=(0, 0, 0), scale=(1, 1, 1) + ) + frame = bpy.context.selected_objects[0] + frame.name = "scene_frame" + frame.rotation_mode = "QUATERNION" + for obj in objs: + if obj.parent == None: + obj.parent = frame + + +def assign_random_material_colors_to_rec_meshes(): + """ + Assign random colors to all materials attached to 'mesh_receptacle' objects. + """ + # get all mesh receptacles + mesh_receptacles = get_mesh_receptacle_objects() + for mesh_obj in mesh_receptacles: + # get all materials attached to this object + material_slots = mesh_obj.material_slots + for m in material_slots: + mat = m.material + # manipulate the material nodes + if mat.node_tree is not None: + for node in mat.node_tree.nodes: + # print(f" {node.bl_label}") + if node.bl_label == "Principled BSDF": + # print(f" {dir(node)}") + node.inputs["Base Color"].default_value = ( + random.random(), + random.random(), + random.random(), + 1, + ) + + +def get_receptacle_metadata( + object_name, output_directory, mesh_relative_path="" +): + """ + Generate a JSON metadata dict for the provided receptacle object. + """ + assert ( + mesh_receptacle_id_string in object_name + ), f"Are you sure '{object_name}' is a mesh receptacle?" + + obj = bpy.data.objects[object_name] + + receptacle_info = { + "name": obj.name, + # NOTE: default hardcoded values for now + "position": [0, 0, 0], + "rotation": [1, 0, 0, 0], + "scale": [1, 1, 1], + "up": [0, 1, 0], + # record the relative filepath to ply files + "mesh_filepath": mesh_relative_path + object_name + ".ply", + } + + # write the ply files + bpy.ops.object.select_all(action="DESELECT") + obj.select_set(True) + + bpy.ops.export_mesh.ply( + filepath=os.path.join( + output_directory, receptacle_info["mesh_filepath"] + ), + use_selection=True, + use_ascii=True, + # don't need extra mesh features + use_colors=False, + use_uv_coords=False, + use_normals=False, + # convert to habitat-ready coordinate system + axis_forward="-Z", + axis_up="Y", + ) + + # TODO: object parented mesh receptacles + # E.g. + # receptacle_info["parent_object"] = "kitchen_island" + # receptacle_info["parent_link"] = obj.parent.name.split("link_")[-1] + + return receptacle_info + + +def write_receptacle_metadata(output_filename, mesh_relative_path=""): + """ + Collect and write all receptacle metadata to a JSON file. + """ + user_defined = {} + + mesh_receptacles = get_mesh_receptacle_objects() + + output_directory = output_filename[: -len(output_filename.split("/")[-1])] + os.makedirs(output_directory, exist_ok=True) + + for rec_obj in mesh_receptacles: + user_defined[rec_obj.name] = get_receptacle_metadata( + rec_obj.name, output_directory, mesh_relative_path + ) + + import json + + with open(output_filename, "w") as f: + json.dump(user_defined, f, indent=4) + + +################################################ +# main +# NOTE: this should be run through the Blender script window, editing parameters as necessary + +# NOTE: This should be the global system path of "output_dir" from "generate_receptacle_navmesh_objs.py" +path_to_receptacle_navmesh_assets = ( + "/home/alexclegg/Documents/dev/habitat-lab/navmeshes/" +) + +# define the output directory for meshes and metadata +output_dir = "/home/alexclegg/Documents/dev/habitat-lab/mesh_receptacle_out/" +# Optionally specify a custom relative path between the metadata and meshes. +# For example, "meshes/" for .ply files in a `meshes` sub-directory relative to the .json +mesh_relative_path = "" + +# 1. load the assets +mode = "read" +reload_scene = False # if True, clear all assets and load the scene assets, otherwise assume we're in the same scene and only reload mesh receptacles +stage_index = 1 # determines which asset will be loaded form the directory +cull_floor_like_receptacles = False # if true, assume the largest navmesh island is the floor and remove any other islands with the same average height +# 2 do manual annotation +# 3. write the plys and metadata +# mode = "write" + +if mode == "read": + # clear any existing island meshes + if reload_scene: + clear_scene() + clear_navmeshes() + + stage_paths = collect_stage_paths(path_to_receptacle_navmesh_assets) + print(stage_paths) + assert ( + len(stage_paths) > stage_index + ), f"Index {stage_index} out of range. {len(stage_paths)} available." + + # first load the islands and the render asset + load_island_meshes(stage_paths[stage_index]) + if cull_floor_like_receptacles: + cull_floor_height_receptacles() + assign_random_material_colors_to_rec_meshes() + + # load the stage render asset + if reload_scene: + load_render_asset_from_cache( + os.path.join(stage_paths[stage_index], "render_asset_path.txt") + ) +elif mode == "write": + # write the results + write_receptacle_metadata( + output_filename=os.path.join(output_dir, "receptacle_metadata.json"), + mesh_relative_path=mesh_relative_path, + ) diff --git a/scripts/receptacle_annotation/clutter_object.navmeshsettings.json b/scripts/receptacle_annotation/clutter_object.navmeshsettings.json new file mode 100644 index 0000000000..7fda4d07fd --- /dev/null +++ b/scripts/receptacle_annotation/clutter_object.navmeshsettings.json @@ -0,0 +1,18 @@ +{ + "cellSize": 0.01, + "cellHeight": 0.01, + "agentHeight": 0.275, + "agentRadius": 0.025, + "agentMaxClimb": 0.06, + "agentMaxSlope": 5.0, + "regionMinSize": 10.0, + "regionMergeSize": 20.0, + "edgeMaxLen": 12.0, + "edgeMaxError": 1.2999999, + "vertsPerPoly": 6.0, + "detailSampleDist": 6.0, + "detailSampleMaxError": 1.0, + "filterLowHangingObstacles": true, + "filterLedgeSpans": false, + "filterWalkableLowHeightSpans": false +} diff --git a/scripts/receptacle_annotation/generate_receptacle_navmesh_objs.py b/scripts/receptacle_annotation/generate_receptacle_navmesh_objs.py new file mode 100644 index 0000000000..97c9751541 --- /dev/null +++ b/scripts/receptacle_annotation/generate_receptacle_navmesh_objs.py @@ -0,0 +1,217 @@ +#!/usr/bin/env python + +# Copyright (c) Meta Platforms, Inc. and its affiliates. +# This source code is licensed under the MIT license found in the +# LICENSE file in the root directory of this source tree. + +import os +from typing import Any, List + +import git + +import habitat_sim +from habitat_sim.utils.settings import default_sim_settings, make_cfg + +# get the output directory and data path +repo = git.Repo(".", search_parent_directories=True) +dir_path = repo.working_tree_dir +data_path = os.path.join(dir_path, "data") + +# setup the scene settings +sim_settings = default_sim_settings.copy() +sim_settings["enable_physics"] = False # kinematics only +sim_settings["output_dir"] = "navmeshes/" +sim_settings["navmesh_settings"] = habitat_sim.nav.NavMeshSettings() + + +def save_navmesh_data(sim: habitat_sim.Simulator, output_dir: str) -> None: + """ + Iteratively save each navmesh island to a separate OBJ file in the configured output directory. + """ + os.makedirs(output_dir, exist_ok=True) + if sim.pathfinder.is_loaded: + for island in range(sim.pathfinder.num_islands): + vert_data = sim.pathfinder.build_navmesh_vertices(island) + index_data = sim.pathfinder.build_navmesh_vertex_indices(island) + export_navmesh_data_to_obj( + filename=os.path.join(output_dir, f"{island}.obj"), + vertex_data=vert_data, + index_data=index_data, + ) + else: + print("Cannot save navmesh data, no pathfinder loaded") + + +def export_navmesh_data_to_obj( + filename: str, vertex_data: List[Any], index_data: List[int] +) -> None: + """ + Export triangle mesh data in simple OBJ format. + NOTE: Could use an exporter framework, but this way is very simple and introduces no dependencies. + """ + with open(filename, "w") as f: + file_data = "" + for vert in vertex_data: + file_data += ( + "v " + + str(vert[0]) + + " " + + str(vert[1]) + + " " + + str(vert[2]) + + "\n" + ) + assert len(index_data) % 3 == 0, "must be triangles" + for ix in range(int(len(index_data) / 3)): + # NOTE: obj starts indexing at 1 + file_data += ( + "f " + + str(index_data[ix * 3] + 1) + + " " + + str(index_data[ix * 3 + 1] + 1) + + " " + + str(index_data[ix * 3 + 2] + 1) + + "\n" + ) + f.write(file_data) + + +def make_cfg_mm(settings): + """ + Create a Configuration with an attached MetadataMediator for shared dataset access and re-use without instantiating the Simulator object first. + """ + config = make_cfg(settings) + + # create an attach a MetadataMediator + mm = habitat_sim.metadata.MetadataMediator(config.sim_cfg) + + return habitat_sim.Configuration(config.sim_cfg, config.agents, mm) + + +def iteratively_export_all_scenes_navmesh( + config_with_mm, recompute_navmesh=False +): + # generate a SceneDataset report for quick investigation + print("-------------------------------") + print(config_with_mm.metadata_mediator.dataset_report()) + # list all registered scenes + print("SCENES") + for scene_handle in config_with_mm.metadata_mediator.get_scene_handles(): + print(scene_handle) + # list all registered stages + print("STAGES") + stage_handles = ( + config_with_mm.metadata_mediator.stage_template_manager.get_templates_by_handle_substring() + ) + for stage_handle in stage_handles: + print(stage_handle) + + failure_log = [] + # iterate over all registered stages to generate navmeshes + # NOTE: this iteration could be customized to hit a subset of stages or any registered scenes. + for stage_handle in stage_handles: + print("=================================================") + print(f" {stage_handle}") + config_with_mm.sim_cfg.scene_id = stage_handle + if stage_handle == "NONE": + # skip the empty "NONE" scene which is always present + continue + try: + with habitat_sim.Simulator(config_with_mm) as sim: + # instance the Simulator with a selected scene/stage and compute/export the navmesh + stage_filename = stage_handle.split("/")[-1] + stage_directory = stage_handle[: -len(stage_filename)] + stage_output_dir = os.path.join( + sim_settings["output_dir"], + stage_filename.split(".")[0] + "/", + ) + os.makedirs(stage_output_dir, exist_ok=True) + + # export the render asset path for later use in Blender + stage_template = sim.metadata_mediator.stage_template_manager.get_template_by_handle( + stage_handle + ) + render_asset_path = os.path.abspath( + stage_template.render_asset_handle + ) + render_asset_record_filepath = os.path.join( + stage_output_dir, "render_asset_path.txt" + ) + with open(render_asset_record_filepath, "w") as f: + f.write(render_asset_path) + + # recompute the navmesh if necessary + if recompute_navmesh or not sim.pathfinder.is_loaded(): + navmesh_filename = ( + stage_filename.split(".")[0] + ".navmesh" + ) + sim.recompute_navmesh( + sim.pathfinder, sim_settings["navmesh_settings"] + ) + if os.path.exists(stage_directory): + sim.pathfinder.save_nav_mesh( + stage_output_dir + navmesh_filename + ) + else: + failure_log.append( + ( + stage_handle, + f"No target directory for navmesh: {stage_directory}", + ) + ) + # export the navmesh OBJs + save_navmesh_data(sim, output_dir=stage_output_dir) + + except Exception as e: + failure_log.append((stage_handle, str(e))) + print("=================================================") + print(f"Failure log = {failure_log}") + print( + f"Tried {len(stage_handles)-1} stages." + ) # manually decrement the "NONE" scene + print("-------------------------------") + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument( + "--dataset", + dest="dataset", + type=str, + help="The SceneDataset config file.", + required=True, + ) + parser.add_argument( + "--output-dir", + dest="output_dir", + type=str, + default="navmeshes/", + help="The output directory for the navmesh .OBJ files. Sub-directories will be created for each stage/scene.", + ) + parser.add_argument( + "--navmesh-settings", + dest="navmesh_settings", + type=str, + default="", + help="Optionally provide a path to a navmesh settings JSON file to use instead of the default settings.", + ) + args, _ = parser.parse_known_args() + + sim_settings["scene_dataset_config_file"] = args.dataset + sim_settings["output_dir"] = args.output_dir + + # load user defined NavMeshSettings from JSON + if args.navmesh_settings != "": + assert os.path.exists( + args.navmesh_settings + ), f"Provided NavmeshSettings config file '{args.navmesh_settings}' not found, aborting." + assert args.navmesh_settings.endswith( + ".json" + ), "args.navmesh_settings must be a NavmeshSettings JSON file." + sim_settings["navmesh_settings"].read_from_json(args.navmesh_settings) + + iteratively_export_all_scenes_navmesh( + make_cfg_mm(sim_settings), recompute_navmesh=True + ) diff --git a/test/test_rearrange_task.py b/test/test_rearrange_task.py index ed2fed6306..12a0d6e035 100644 --- a/test/test_rearrange_task.py +++ b/test/test_rearrange_task.py @@ -11,7 +11,9 @@ import os.path as osp import time from glob import glob +from typing import List +import magnum as mn import pytest import torch import yaml @@ -19,9 +21,11 @@ import habitat import habitat.datasets.rearrange.run_episode_generator as rr_gen +import habitat.datasets.rearrange.samplers.receptacle as hab_receptacle import habitat.tasks.rearrange.rearrange_sim import habitat.tasks.rearrange.rearrange_task import habitat.utils.env_utils +import habitat_sim from habitat.config.default import _HABITAT_CFG_DIR, get_config from habitat.core.embodied_task import Episode from habitat.core.environments import get_env_class @@ -31,6 +35,7 @@ from habitat_baselines.config.default import get_config as baselines_get_config from habitat_baselines.rl.ddppo.ddp_utils import find_free_port from habitat_baselines.run import run_exp +from habitat_sim.utils.common import d3_40_colors_hex CFG_TEST = "benchmark/rearrange/pick.yaml" GEN_TEST_CFG = ( @@ -238,3 +243,242 @@ def test_tp_srl(test_cfg_path, mode): # Deinit processes group if torch.distributed.is_initialized(): torch.distributed.destroy_process_group() + + +def place_scene_topdown_camera(sim): + """ + Place the camera in the scene center looking down. + """ + scene_bb = sim.get_active_scene_graph().get_root_node().cumulative_bb + look_down = mn.Quaternion.rotation(mn.Deg(-90), mn.Vector3.x_axis()) + max_dim = max(scene_bb.size_x(), scene_bb.size_z()) + cam_pos = scene_bb.center() + cam_pos[1] += 0.52 * max_dim + scene_bb.size_y() / 2.0 + sim.agents[0].scene_node.translation = cam_pos + sim.agents[0].scene_node.rotation = look_down + + +def place_scene_isometric_camera(sim): + """ + Place the camera in the scene center looking down. + """ + scene_bb = sim.get_active_scene_graph().get_root_node().cumulative_bb + cam_pos = scene_bb.center() + max_dim = max(scene_bb.size_x(), scene_bb.size_z()) + cam_pos[1] += 0.52 * max_dim + scene_bb.size_y() / 2.0 + cam_pos[0] = scene_bb.left + look_at_center = mn.Quaternion.from_matrix( + mn.Matrix4.look_at( + eye=cam_pos, target=scene_bb.center(), up=mn.Vector3(0, 1, 0) + ).rotation() + ) + sim.agents[0].scene_node.translation = cam_pos + sim.agents[0].scene_node.rotation = look_at_center + + +# NOTE: set 'debug_visualization' = True to produce images showing receptacles +@pytest.mark.parametrize("debug_visualization", [True]) +@pytest.mark.parametrize( + "scene_asset", + [ + "GLAQ4DNUx5U", + # "NBg5UqG3di3", + # "CFVBbU9Rsyb" + ], +) +def test_mesh_receptacles(debug_visualization, scene_asset): + hm3d_data_path = "data/scene_datasets/hm3d/example/hm3d_example_basis.scene_dataset_config.json" + + mm = habitat_sim.metadata.MetadataMediator() + mm.active_dataset = hm3d_data_path + # print(mm.summary) + # print(mm.dataset_report()) + # print(mm.get_scene_handles()) + + ########################## + # Test Mesh Receptacles + ########################## + # 1. Load the parameterized scene + sim_settings = habitat_sim.utils.settings.default_sim_settings.copy() + sim_settings["scene"] = scene_asset + sim_settings["scene_dataset_config_file"] = hm3d_data_path + sim_settings["sensor_height"] = 0 + cfg = habitat_sim.utils.settings.make_cfg(sim_settings) + with habitat_sim.Simulator(cfg) as sim: + place_scene_topdown_camera(sim) + + # 2. Compute a navmesh + if not sim.pathfinder.is_loaded: + # compute a navmesh on the ground plane + navmesh_settings = habitat_sim.NavMeshSettings() + navmesh_settings.set_defaults() + sim.recompute_navmesh(sim.pathfinder, navmesh_settings, True) + + # 3. Create receptacles from navmesh data + # a) global receptacles + receptacles = [] + # get navmesh data per-island, convert to lists, create Receptacles + for isl_ix in range(sim.pathfinder.num_islands): + island_verts = sim.pathfinder.build_navmesh_vertices(isl_ix) + island_ixs = sim.pathfinder.build_navmesh_vertex_indices(isl_ix) + mesh_receptacle = hab_receptacle.TriangleMeshReceptacle( + name=str(isl_ix), mesh_data=(island_verts, island_ixs) + ) + receptacles.append(mesh_receptacle) + + # 4. render receptacle debug (vs. navmesh vis) + observations = [] + if debug_visualization: + sim.navmesh_visualization = True + observations.append(sim.get_sensor_observations()) + sim.navmesh_visualization = False + for isl_ix, mesh_rec in enumerate(receptacles): + isl_color = mn.Color4.from_srgb( + int(d3_40_colors_hex[isl_ix], base=16) + ) + # print(f"isl_color = {isl_color}") + mesh_rec.debug_draw(sim, color=isl_color) + observations.append(sim.get_sensor_observations()) + + # 5. sample from receptacles + samples_per_unit_area = 50 + + rec_samples: List[List[mn.Vector3]] = [] + for isl_ix, mesh_rec in enumerate(receptacles): + rec_samples.append([]) + num_samples = max( + 1, int(mesh_rec.total_area * samples_per_unit_area) + ) + # print(f"isl {isl_ix} num samples = {num_samples}") + for _samp_ix in range(num_samples): + rec_samples[-1].append( + mesh_rec.sample_uniform_global( + sim, sample_region_scale=1.0 + ) + ) + # test that the samples are on the source NavMesh + assert ( + sim.pathfinder.snap_point( + rec_samples[-1][-1], island_index=isl_ix + ) + - rec_samples[-1][-1] + ).length() < 0.01, "Sample is not on the island." + + if debug_visualization: + dblr = sim.get_debug_line_render() + # draw the samples + for isl_ix, samples in enumerate(rec_samples): + isl_color = mn.Color4.from_srgb( + int(d3_40_colors_hex[isl_ix], base=16) + ) + for sample in samples: + dblr.draw_circle(sample, 0.05, isl_color) + observations.append(sim.get_sensor_observations()) + + # 6. test sampling is correct (percent in each triangle equivalent to area weight) + samples_per_unit_area = 10000 + for mesh_rec in receptacles: + num_samples = max( + 1, int(mesh_rec.total_area * samples_per_unit_area) + ) + tri_samples: List[int] = [ + mesh_rec.sample_area_weighted_triangle() + for samp_ix in range(num_samples) + ] + for tri_ix in range(len(mesh_rec.area_weighted_accumulator)): + # compute the weight from weight accumulator + weight = mesh_rec.area_weighted_accumulator[tri_ix] + if tri_ix > 0: + weight -= mesh_rec.area_weighted_accumulator[tri_ix - 1] + num_tri = tri_samples.count(tri_ix) + # print(f"got {num_tri/num_samples} expected {weight}, diff = {abs(weight - num_tri/num_samples)}") + assert ( + abs(weight - num_tri / num_samples) < 0.005 + ), "area weighting may be off" + + # show observations + if debug_visualization: + from habitat_sim.utils import viz_utils as vut + + for obs in observations: + vut.observation_to_image(obs["color_sensor"], "color").show() + + +# NOTE: set 'debug_visualization' = True to produce images showing receptacles +@pytest.mark.skipif( + not osp.exists("data/test_assets/"), + reason="This test requires habitat-sim test assets.", +) +@pytest.mark.parametrize("debug_visualization", [True]) +def test_receptacle_parsing(debug_visualization): + observations = [] + + ########################## + # Test Mesh Receptacles + ########################## + # 1. Load the parameterized scene + sim_settings = habitat_sim.utils.settings.default_sim_settings.copy() + # sim_settings["scene"] = "data/test_assets/scenes/simple_room.stage_config.json" + sim_settings[ + "scene" + ] = "/home/alexclegg/Documents/dev/habitat-lab/mesh_receptacle_out/105515541_173104641.stage_config.json" + sim_settings["sensor_height"] = 0 + cfg = habitat_sim.utils.settings.make_cfg(sim_settings) + cfg.sim_cfg.scene_light_setup = "" + cfg.sim_cfg.override_scene_light_defaults = True + with habitat_sim.Simulator(cfg) as sim: + place_scene_isometric_camera(sim) + + # load test assets + sim.metadata_mediator.object_template_manager.load_configs( + "data/test_assets/objects/chair.object_config.json" + ) + # TODO: add an AO w/ receptacles also + + # test quick receptacle listing: + list_receptacles = hab_receptacle.get_all_scenedataset_receptacles(sim) + print(f"list_receptacles = {list_receptacles}") + # receptacles from stage configs: + # assert "receptacle_aabb_simpleroom_test" in list_receptacles["stage"]['data/test_assets/scenes/simple_room.stage_config.json'] + # assert "receptacle_mesh_simpleroom_test" in list_receptacles["stage"]['data/test_assets/scenes/simple_room.stage_config.json'] + # receptacles from rigid object configs: + # assert "receptacle_aabb_chair_test" in list_receptacles["rigid"]['data/test_assets/objects/chair.object_config.json'] + # assert "receptacle_mesh_chair_test" in list_receptacles["rigid"]['data/test_assets/objects/chair.object_config.json'] + # TODO: receptacles from articulated object configs: + # assert "" in list_receptacles["articulated"] + + # parse the metadata into Receptacle objects and test them + test_receptacles = hab_receptacle.find_receptacles(sim) + + # visualize all receptacles and test debug_draw + for rec in test_receptacles: + rec.debug_draw(sim) + observations.append(sim.get_sensor_observations()) + # then sample and draw: + # TODO: necessary here? + # 5. sample from receptacles + rec_samples = [] + num_samples = 50 + for _samp_ix in range(num_samples): + rec_samples.append( + rec.sample_uniform_global(sim, sample_region_scale=1.0) + ) + + if debug_visualization: + dblr = sim.get_debug_line_render() + # draw the samples + for sample in rec_samples: + print(sample) + dblr.draw_circle( + translation=mn.Vector3(sample), + radius=0.05, + color=mn.Color4.magenta(), + ) + observations.append(sim.get_sensor_observations()) + + # show observations + if debug_visualization: + from habitat_sim.utils import viz_utils as vut + + for obs in observations: + vut.observation_to_image(obs["color_sensor"], "color").show()