-
Notifications
You must be signed in to change notification settings - Fork 10
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
add preprocessing script for TUM dataset
- Loading branch information
Showing
3 changed files
with
108 additions
and
1 deletion.
There are no files selected for viewing
31 changes: 31 additions & 0 deletions
31
convenience_scripts/tum/taijing/tum_freiburg2_desk_preprocess.sh
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,31 @@ | ||
SLAMDIR=/home/tiejean/Documents/projects/ut_vslam | ||
DARADIR=/home/tiejean/Documents/mnt/oslam | ||
ORB_OUT=$DARADIR/orb_out | ||
ORB_POST_PROCESS=$DARADIR/orb_post_process | ||
|
||
bagname=freiburg2_desk | ||
configname=base7a_2_fallback_a_2 | ||
|
||
configfile=$SLAMDIR/config/${base7a_2_fallback_a_2}.json | ||
calibration_dir=$DARADIR/calibration/ | ||
orb_out_dir=$ORB_OUT/$bagname/ | ||
unsparsified_orb_out=$ORB_POST_PROCESS/unsparsified_ut_vslam_in/$bagname/ | ||
sparsified_orb_out=$ORB_POST_PROCESS/sparsified_ut_vslam_in/$configname/$bagname/ | ||
|
||
mkdir -p $orb_out_dir | ||
mkdir -p $unsparsified_orb_out | ||
mkdir -p $sparsified_orb_out | ||
|
||
python3 src/data_preprocessing_utils/orb_stereo_reformat_data.py \ | ||
-i $orb_out_dir -o $unsparsified_orb_out | ||
|
||
./bin/initialize_traj_and_feats_from_orb_out \ | ||
--raw_data_path $orb_out_dir \ | ||
--calibration_path $calibration_dir \ | ||
--processed_data_path $unsparsified_orb_out | ||
|
||
./bin/orb_trajectory_sparsifier \ | ||
--param_prefix $bagname \ | ||
--input_processed_data_path $unsparsified_orb_out \ | ||
--output_processed_data_path $sparsified_orb_out \ | ||
--params_config_file $configfile |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
76 changes: 76 additions & 0 deletions
76
src/evaluation/objects/convert_object_detections_from_json_to_csv.py
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,76 @@ | ||
import json | ||
import argparse | ||
import os | ||
|
||
# https://github.com/ultralytics/yolov5/blob/master/data/coco.yaml | ||
kIds2Clsnames = {0:"person", | ||
14:"bird", | ||
15:"cat", | ||
16:"dog", | ||
23:"giraffe", | ||
26:"handbag", | ||
32:"sports ball", | ||
39:"bottle", | ||
41:"cup", | ||
42:"fork", | ||
43:"knife", | ||
44:"spoon", | ||
45:"bowl", | ||
47:"apple", | ||
51:"carrot", | ||
54:"donut", | ||
56:"chair", | ||
58:"potted plant", | ||
60:"dinning table", | ||
62:"tv", | ||
64:"mouse", | ||
65:"remote", | ||
66:"keyboard", | ||
67:"cell phone", | ||
73:"book", | ||
75: "vase", | ||
76: "scissors", | ||
77:"teddy bear", } | ||
kDelimiter = ", " | ||
|
||
def double2Ints(time: float): | ||
return int(time), int((time-int(time))*1e9) | ||
|
||
def parse_args(): | ||
parser = argparse.ArgumentParser(description="Convert object detections from OA-SLAM to ObVi-SLAM format") | ||
parser.add_argument("--inpath", required=True, type=str, help="input detection file .json") | ||
parser.add_argument("--outpath", required=True, type=str, help="output detection file .csv") | ||
args = parser.parse_args() | ||
return args | ||
|
||
if __name__ == "__main__": | ||
args = parse_args() | ||
ifp = open(args.inpath, "r") | ||
if ifp.closed: | ||
raise FileNotFoundError("Failed to open file " + args.inpath) | ||
ofp = open(args.outpath, "w") | ||
if ofp.closed: | ||
raise FileNotFoundError("Failed to open file " + args.outpath) | ||
data = json.load(ifp) | ||
ofp.write("min_pixel_x, min_pixel_y, max_pixel_x, max_pixel_y, semantic_class, seconds, nano_seconds, camera_id, detection_confidence\n") | ||
|
||
for fdata in data: | ||
seconds, nano_seconds = double2Ints(float(fdata["file_name"][:-4])) | ||
camera_id = 1 | ||
for detection in fdata["detections"]: | ||
minx,miny,maxx,maxy = detection["bbox"][0],detection["bbox"][1],detection["bbox"][2],detection["bbox"][3] | ||
cls = detection["category_id"] | ||
if cls not in kIds2Clsnames.keys(): | ||
print(cls) | ||
continue | ||
cls = kIds2Clsnames[cls] | ||
detection_confidence = detection["detection_score"] | ||
line = [ minx,miny,maxx,maxy,cls,seconds,nano_seconds,camera_id,detection_confidence] | ||
line = [str(token) for token in line] | ||
ofp.write(line[0]) | ||
for token in line[1:]: | ||
ofp.write(kDelimiter + token) | ||
ofp.write("\n") | ||
|
||
ifp.close() | ||
ofp.close() |