From b967be4585fd16a3086ac9d42eb3ffcf44c53bf7 Mon Sep 17 00:00:00 2001 From: TieJean Date: Mon, 18 Dec 2023 22:35:00 -0600 Subject: [PATCH] add preprocessing script for TUM dataset --- .../taijing/tum_freiburg2_desk_preprocess.sh | 31 ++++++++ .../orb_stereo_reformat_data.py | 2 +- ...vert_object_detections_from_json_to_csv.py | 76 +++++++++++++++++++ 3 files changed, 108 insertions(+), 1 deletion(-) create mode 100644 convenience_scripts/tum/taijing/tum_freiburg2_desk_preprocess.sh create mode 100644 src/evaluation/objects/convert_object_detections_from_json_to_csv.py diff --git a/convenience_scripts/tum/taijing/tum_freiburg2_desk_preprocess.sh b/convenience_scripts/tum/taijing/tum_freiburg2_desk_preprocess.sh new file mode 100644 index 00000000..1a3211f1 --- /dev/null +++ b/convenience_scripts/tum/taijing/tum_freiburg2_desk_preprocess.sh @@ -0,0 +1,31 @@ +SLAMDIR=/home/tiejean/Documents/projects/ut_vslam +DARADIR=/home/tiejean/Documents/mnt/oslam +ORB_OUT=$DARADIR/orb_out +ORB_POST_PROCESS=$DARADIR/orb_post_process + +bagname=freiburg2_desk +configname=base7a_2_fallback_a_2 + +configfile=$SLAMDIR/config/${base7a_2_fallback_a_2}.json +calibration_dir=$DARADIR/calibration/ +orb_out_dir=$ORB_OUT/$bagname/ +unsparsified_orb_out=$ORB_POST_PROCESS/unsparsified_ut_vslam_in/$bagname/ +sparsified_orb_out=$ORB_POST_PROCESS/sparsified_ut_vslam_in/$configname/$bagname/ + +mkdir -p $orb_out_dir +mkdir -p $unsparsified_orb_out +mkdir -p $sparsified_orb_out + +python3 src/data_preprocessing_utils/orb_stereo_reformat_data.py \ + -i $orb_out_dir -o $unsparsified_orb_out + +./bin/initialize_traj_and_feats_from_orb_out \ + --raw_data_path $orb_out_dir \ + --calibration_path $calibration_dir \ + --processed_data_path $unsparsified_orb_out + +./bin/orb_trajectory_sparsifier \ + --param_prefix $bagname \ + --input_processed_data_path $unsparsified_orb_out \ + --output_processed_data_path $sparsified_orb_out \ + --params_config_file $configfile diff --git a/src/data_preprocessing_utils/orb_stereo_reformat_data.py b/src/data_preprocessing_utils/orb_stereo_reformat_data.py index 93b43356..9ddf0da1 100644 --- a/src/data_preprocessing_utils/orb_stereo_reformat_data.py +++ b/src/data_preprocessing_utils/orb_stereo_reformat_data.py @@ -24,7 +24,7 @@ def add_to_features(features_dict, line): return measurement_x2 = float(tokens[4]) measurement_y2 = float(tokens[5]) - except ValueError as e: + except: return features_dict[feature_id] = ( depth, measurement_x1, measurement_y1, measurement_x2, measurement_y2) diff --git a/src/evaluation/objects/convert_object_detections_from_json_to_csv.py b/src/evaluation/objects/convert_object_detections_from_json_to_csv.py new file mode 100644 index 00000000..b1415936 --- /dev/null +++ b/src/evaluation/objects/convert_object_detections_from_json_to_csv.py @@ -0,0 +1,76 @@ +import json +import argparse +import os + +# https://github.com/ultralytics/yolov5/blob/master/data/coco.yaml +kIds2Clsnames = {0:"person", + 14:"bird", + 15:"cat", + 16:"dog", + 23:"giraffe", + 26:"handbag", + 32:"sports ball", + 39:"bottle", + 41:"cup", + 42:"fork", + 43:"knife", + 44:"spoon", + 45:"bowl", + 47:"apple", + 51:"carrot", + 54:"donut", + 56:"chair", + 58:"potted plant", + 60:"dinning table", + 62:"tv", + 64:"mouse", + 65:"remote", + 66:"keyboard", + 67:"cell phone", + 73:"book", + 75: "vase", + 76: "scissors", + 77:"teddy bear", } +kDelimiter = ", " + +def double2Ints(time: float): + return int(time), int((time-int(time))*1e9) + +def parse_args(): + parser = argparse.ArgumentParser(description="Convert object detections from OA-SLAM to ObVi-SLAM format") + parser.add_argument("--inpath", required=True, type=str, help="input detection file .json") + parser.add_argument("--outpath", required=True, type=str, help="output detection file .csv") + args = parser.parse_args() + return args + +if __name__ == "__main__": + args = parse_args() + ifp = open(args.inpath, "r") + if ifp.closed: + raise FileNotFoundError("Failed to open file " + args.inpath) + ofp = open(args.outpath, "w") + if ofp.closed: + raise FileNotFoundError("Failed to open file " + args.outpath) + data = json.load(ifp) + ofp.write("min_pixel_x, min_pixel_y, max_pixel_x, max_pixel_y, semantic_class, seconds, nano_seconds, camera_id, detection_confidence\n") + + for fdata in data: + seconds, nano_seconds = double2Ints(float(fdata["file_name"][:-4])) + camera_id = 1 + for detection in fdata["detections"]: + minx,miny,maxx,maxy = detection["bbox"][0],detection["bbox"][1],detection["bbox"][2],detection["bbox"][3] + cls = detection["category_id"] + if cls not in kIds2Clsnames.keys(): + print(cls) + continue + cls = kIds2Clsnames[cls] + detection_confidence = detection["detection_score"] + line = [ minx,miny,maxx,maxy,cls,seconds,nano_seconds,camera_id,detection_confidence] + line = [str(token) for token in line] + ofp.write(line[0]) + for token in line[1:]: + ofp.write(kDelimiter + token) + ofp.write("\n") + + ifp.close() + ofp.close() \ No newline at end of file