From 096251bea921f38b91e34e2b2c38f90e945ed0a0 Mon Sep 17 00:00:00 2001 From: NMiklu Date: Fri, 13 Oct 2023 12:59:31 -0400 Subject: [PATCH 1/6] Updated logging info | Included colmap_worker test boilerplate --- colmap/colmap_worker_unit_tests.py | 16 ++++++++++++++++ colmap/configs/default.txt | 5 +++++ colmap/configs/local.txt | 2 +- 3 files changed, 22 insertions(+), 1 deletion(-) create mode 100644 colmap/colmap_worker_unit_tests.py diff --git a/colmap/colmap_worker_unit_tests.py b/colmap/colmap_worker_unit_tests.py new file mode 100644 index 00000000..f945d077 --- /dev/null +++ b/colmap/colmap_worker_unit_tests.py @@ -0,0 +1,16 @@ +import unittest +import colmap_worker + +class colmapWorkerTest(unittest.TestCase): + def setUp(self): + pass + + def tearDown(self): + pass + + def test_data(): + pass + + +if __name__ == "__main__"" + unittest.main() \ No newline at end of file diff --git a/colmap/configs/default.txt b/colmap/configs/default.txt index e69de29b..ef9728f5 100644 --- a/colmap/configs/default.txt +++ b/colmap/configs/default.txt @@ -0,0 +1,5 @@ +#set local run status / run colmap worker with or without webserver +local_run = True + +#Specify input data file path used for local runs ONLY +input_data_path = data/inputs/input.mp4 \ No newline at end of file diff --git a/colmap/configs/local.txt b/colmap/configs/local.txt index 7dae7b13..ef9728f5 100644 --- a/colmap/configs/local.txt +++ b/colmap/configs/local.txt @@ -2,4 +2,4 @@ local_run = True #Specify input data file path used for local runs ONLY -input_data_path = data/inputs/video/input.mp4 \ No newline at end of file +input_data_path = data/inputs/input.mp4 \ No newline at end of file From 7cdbda655a7304bd98bd0a53518cd3ea9c8dac8a Mon Sep 17 00:00:00 2001 From: NMiklu Date: Fri, 13 Oct 2023 13:07:42 -0400 Subject: [PATCH 2/6] Logging --- colmap/main.py | 39 +++++++++++++++++++++++++++------------ colmap/video_to_images.py | 5 +++-- 2 files changed, 30 insertions(+), 14 deletions(-) diff --git a/colmap/main.py b/colmap/main.py index 8e1b72e3..3251d69e 100644 --- a/colmap/main.py +++ b/colmap/main.py @@ -15,6 +15,8 @@ import argparse import sys +import logging + app = Flask(__name__) # base_url = "http://host.docker.internal:5000/" @@ -35,7 +37,7 @@ def to_url(local_file_path: str): return base_url + local_file_path -def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir): +def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir, log): # run colmap and save data to custom directory # Create output directory under data/output_data_dir/id # TODO: use library to fix filepath joining @@ -46,9 +48,11 @@ def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir): # (1) vid_to_images.py imgs_folder = os.path.join(output_path, "imgs") - print(video_file_path) + #print(video_file_path) + log.info("Video file path:{}".format(video_file_path)) + - split_video_into_frames(video_file_path, imgs_folder, 100) + split_video_into_frames(video_file_path, imgs_folder, 100, log) # imgs are now in output_data_dir/id # (2) colmap_runner.py @@ -94,19 +98,25 @@ def colmap_worker(): channel.queue_declare(queue="sfm-out") def process_colmap_job(ch, method, properties, body): - print("Starting New Job") + #print("Starting New Job") + logging.info("Starting New Job") print(body.decode()) job_data = json.loads(body.decode()) id = job_data["id"] - print(f"Running New Job With ID: {id}") + #print(f"Running New Job With ID: {id}") + logging.info(f"Running New Job With ID: {id}") + # TODO: Handle exceptions and enable steaming to make safer video = requests.get(job_data["file_path"], timeout=10) - print("Web server pinged") + #print("Web server pinged") + logging.info("Web server pinged") video_file_path = f"{input_data_dir}{id}.mp4" - print("Saving video to: {video_file_path}") + #print("Saving video to: {video_file_path}") + logging.info("Saving video to: {video_file_path}") open(video_file_path, "wb").write(video.content) - print("Video downloaded") + #print("Video downloaded") + logging.info("Video downloaded") # RUNS COLMAP AND CONVERSION CODE motion_data, imgs_folder = run_full_sfm_pipeline( @@ -127,12 +137,14 @@ def process_colmap_job(ch, method, properties, body): # confirm to rabbitmq job is done ch.basic_ack(delivery_tag=method.delivery_tag) - print("Job complete") + #print("Job complete") + logging.info("Job complete") channel.basic_qos(prefetch_count=1) channel.basic_consume(queue="sfm-in", on_message_callback=process_colmap_job) channel.start_consuming() - print("should not get here") + #print("should not get here") + logging.critical("should not get here") if __name__ == "__main__": print("~SFM WORKER~") @@ -141,15 +153,18 @@ def process_colmap_job(ch, method, properties, body): Path(f"{input_data_dir}").mkdir(parents=True, exist_ok=True) Path(f"{output_data_dir}").mkdir(parents=True, exist_ok=True) + logging.basicConfig(level=logging.DEBUG,filename="sfm-worker.log",format='%(asctime)s %(message)s',filemode='w') + # Load args from config file args = config_parser() # Local run behavior if args.local_run == True: motion_data, imgs_folder = run_full_sfm_pipeline( - "Local_Test", args.input_data_path, input_data_dir, output_data_dir + "Local_Test", args.input_data_path, input_data_dir, output_data_dir, logging.getLogger() ) - print(motion_data) + #print(motion_data) + logging.info(motion_data) json_motion_data = json.dumps(motion_data) # Standard webserver run behavior diff --git a/colmap/video_to_images.py b/colmap/video_to_images.py index 6ac40c61..e4153361 100644 --- a/colmap/video_to_images.py +++ b/colmap/video_to_images.py @@ -40,7 +40,7 @@ # 2 = FileExistsError; happens when you try to create data in an already existing folder # 3 = FileNotFoundError; happens when you try to use an output folder that does not exist -def split_video_into_frames(video_path, output_path, max_frames=200): +def split_video_into_frames(video_path, output_path, max_frames=200, log): ## determines whether image is blurry or not. # uses the variance of a laplacian transform to check for edges and returns true # if the variance is less than the threshold and the video is determined to be blurry @@ -71,7 +71,8 @@ def blurriness(image): ## sample up to max frame count sample_count = min(frame_count,max_frames) - print("SAMPLE COUNT:", sample_count) + #print("SAMPLE COUNT:", sample_count) + log.info("SAMPLE COUNT: {}".format(sample_count)) #print(f"frames = {frame_count}") From 5d35f8e8fdde15efd2d3643f13c618a8adb81498 Mon Sep 17 00:00:00 2001 From: NMiklu Date: Tue, 17 Oct 2023 10:41:15 -0400 Subject: [PATCH 3/6] Updated logging of sfm-worker --- colmap/colmap_runner.py | 13 ++++++++--- colmap/log.py | 16 ++++++++++++++ colmap/main.py | 45 +++++++++++++++++++++++---------------- colmap/matrix.py | 35 ++++++++++++++++++++++-------- colmap/video_to_images.py | 14 +++++++++--- 5 files changed, 90 insertions(+), 33 deletions(-) create mode 100644 colmap/log.py diff --git a/colmap/colmap_runner.py b/colmap/colmap_runner.py index e3465354..32bee12c 100644 --- a/colmap/colmap_runner.py +++ b/colmap/colmap_runner.py @@ -1,6 +1,7 @@ import subprocess import os import sys +import logging from pathlib import Path #Usage: python colmap_runner.py --flags @@ -39,11 +40,15 @@ def run_colmap(colmap_path, images_path, output_path): use_gpu = "false" Path(f"{output_path}").mkdir(parents=True, exist_ok=True) + # sfm-worker logger + logger = logging.getLogger('sfm-worker') + #Creating a new database for colmap try: database_path = output_path + "/database.db" subprocess.call([colmap_path, "database_creator", "--database_path", database_path]) - print("Created DB") + #print("Created DB") + logger.info("Created DB") except: return 1 @@ -52,13 +57,15 @@ def run_colmap(colmap_path, images_path, output_path): # --SiftExtraction.use_gpu=false for docker # TODO: make gpu use dynamic subprocess.call([colmap_path, "feature_extractor","--ImageReader.camera_model","PINHOLE",f"--SiftExtraction.use_gpu={use_gpu}","--ImageReader.single_camera=1", "--database_path", database_path, "--image_path", images_path]) - print("Features Extracted") + #print("Features Extracted") + logger.info("Features Extracted") except: return 1 #Feature matching try: - print("Feature Matching") + #print("Feature Matching") + logger.info("Feature Matching") subprocess.call([colmap_path, "exhaustive_matcher",f"--SiftMatching.use_gpu={use_gpu}", "--database_path", database_path]) except: return 1 diff --git a/colmap/log.py b/colmap/log.py new file mode 100644 index 00000000..f8277927 --- /dev/null +++ b/colmap/log.py @@ -0,0 +1,16 @@ +import logging + +def sfm_worker_logger(name='root'): + """ + Initializer for a global sfm-worker logger. + To initialize use: 'logger = log.sfm_worker_logger(name)' + To retrieve in different context: 'logger = logging.getLogger(name)' + """ + formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(module)s - %(message)s') + handler = logging.FileHandler('sfm-worker.log', mode='w') + handler.setFormatter(formatter) + + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + logger.addHandler(handler) + return logger \ No newline at end of file diff --git a/colmap/main.py b/colmap/main.py index 3251d69e..8cbde62a 100644 --- a/colmap/main.py +++ b/colmap/main.py @@ -16,6 +16,7 @@ import sys import logging +from log import sfm_worker_logger app = Flask(__name__) @@ -37,7 +38,7 @@ def to_url(local_file_path: str): return base_url + local_file_path -def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir, log): +def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir): # run colmap and save data to custom directory # Create output directory under data/output_data_dir/id # TODO: use library to fix filepath joining @@ -46,22 +47,27 @@ def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir, output_path = output_data_dir + id Path(f"{output_path}").mkdir(parents=True, exist_ok=True) + # Get logger + logger = logging.getLogger('sfm-worker') + # (1) vid_to_images.py imgs_folder = os.path.join(output_path, "imgs") #print(video_file_path) - log.info("Video file path:{}".format(video_file_path)) + logger.info("Video file path:{}".format(video_file_path)) - split_video_into_frames(video_file_path, imgs_folder, 100, log) + split_video_into_frames(video_file_path, imgs_folder,100) # imgs are now in output_data_dir/id # (2) colmap_runner.py colmap_path = "/usr/local/bin/colmap" status = run_colmap(colmap_path, imgs_folder, output_path) if status == 0: - print("COLMAP ran successfully.") + #print("COLMAP ran successfully.") + logger.info("COLMAP ran successfully.") elif status == 1: - print("ERROR: There was an unknown error running COLMAP") + #print("ERROR: There was an unknown error running COLMAP") + logger.info("ERROR: There was an unknown error running COLMAP") # (3) matrix.py initial_motion_path = os.path.join(output_path, "images.txt") @@ -85,6 +91,7 @@ def colmap_worker(): Path(f"{input_data_dir}").mkdir(parents=True, exist_ok=True) Path(f"{output_data_dir}").mkdir(parents=True, exist_ok=True) + logger = logging.getLogger('sfm-worker') rabbitmq_domain = "rabbitmq" credentials = pika.PlainCredentials("admin", "password123") @@ -98,25 +105,27 @@ def colmap_worker(): channel.queue_declare(queue="sfm-out") def process_colmap_job(ch, method, properties, body): + logger = logging.getLogger('sfm-worker') #print("Starting New Job") - logging.info("Starting New Job") - print(body.decode()) + logger.info("Starting New Job") + #print(body.decode()) + logger.info(body.decode()) job_data = json.loads(body.decode()) id = job_data["id"] #print(f"Running New Job With ID: {id}") - logging.info(f"Running New Job With ID: {id}") + logger.info(f"Running New Job With ID: {id}") # TODO: Handle exceptions and enable steaming to make safer video = requests.get(job_data["file_path"], timeout=10) #print("Web server pinged") - logging.info("Web server pinged") + logger.info("Web server pinged") video_file_path = f"{input_data_dir}{id}.mp4" #print("Saving video to: {video_file_path}") - logging.info("Saving video to: {video_file_path}") + logger.info("Saving video to: {video_file_path}") open(video_file_path, "wb").write(video.content) #print("Video downloaded") - logging.info("Video downloaded") + logger.info("Video downloaded") # RUNS COLMAP AND CONVERSION CODE motion_data, imgs_folder = run_full_sfm_pipeline( @@ -138,22 +147,23 @@ def process_colmap_job(ch, method, properties, body): # confirm to rabbitmq job is done ch.basic_ack(delivery_tag=method.delivery_tag) #print("Job complete") - logging.info("Job complete") + logger.info("Job complete") channel.basic_qos(prefetch_count=1) channel.basic_consume(queue="sfm-in", on_message_callback=process_colmap_job) channel.start_consuming() #print("should not get here") - logging.critical("should not get here") + logger.critical("should not get here") if __name__ == "__main__": - print("~SFM WORKER~") + logger = sfm_worker_logger('sfm-worker') + logger.info("~SFM WORKER~") + input_data_dir = "data/inputs/" output_data_dir = "data/outputs/" Path(f"{input_data_dir}").mkdir(parents=True, exist_ok=True) Path(f"{output_data_dir}").mkdir(parents=True, exist_ok=True) - logging.basicConfig(level=logging.DEBUG,filename="sfm-worker.log",format='%(asctime)s %(message)s',filemode='w') # Load args from config file args = config_parser() @@ -161,10 +171,9 @@ def process_colmap_job(ch, method, properties, body): # Local run behavior if args.local_run == True: motion_data, imgs_folder = run_full_sfm_pipeline( - "Local_Test", args.input_data_path, input_data_dir, output_data_dir, logging.getLogger() - ) + "Local_Test", args.input_data_path, input_data_dir, output_data_dir) #print(motion_data) - logging.info(motion_data) + logger.info(motion_data) json_motion_data = json.dumps(motion_data) # Standard webserver run behavior diff --git a/colmap/matrix.py b/colmap/matrix.py index d4de8812..4a824181 100644 --- a/colmap/matrix.py +++ b/colmap/matrix.py @@ -22,6 +22,7 @@ import image_position_extractor import json import os +import logging # https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles @@ -122,6 +123,8 @@ def rotation_matrix_from_vectors(vec1, vec2): def get_extrinsic(center_point, fp: str = "parsed_data.csv"): + # sfm-worker logger + logger = logging.getLogger('sfm-worker') # contrains filepath and extrinsic matrix filepaths = [] @@ -173,10 +176,12 @@ def get_extrinsic(center_point, fp: str = "parsed_data.csv"): # stack all extrinsic to perform faster transformations to the whole stack extrinsic_matrices = np.stack(extrinsic_matrices,axis=0) - print(extrinsic_matrices.shape) + #print(extrinsic_matrices.shape) + logger.info(extrinsic_matrices.shape) avg_y_axis = np.sum(extrinsic_matrices[:,0:3,1], axis=0) avg_y_axis = avg_y_axis/np.linalg.norm(avg_y_axis) - print("Consensus Y axis: ",avg_y_axis) + #print("Consensus Y axis: ",avg_y_axis) + #logger.info("Consensus Y axis: ",avg_y_axis) # Find a matrix to rotate the average y axis with the y-axis unit vector thus aligning every extrinsic to point in the same direction Rot = np.zeros((4,4)) @@ -189,8 +194,10 @@ def get_extrinsic(center_point, fp: str = "parsed_data.csv"): # Adjust extrinsic to center around the central point #center_point = np.average(extrinsic_matrices[:,0:3,3],axis=0) - print(center_point.shape) - print("center point ",center_point) + #print(center_point.shape) + logger.info(center_point.shape) + #print("center point ",center_point) + logger.info("center point {}".format(center_point)) extrinsic_matrices[:,0:3,3] -= center_point # Z offset assuming cameras are never below the object @@ -199,15 +206,21 @@ def get_extrinsic(center_point, fp: str = "parsed_data.csv"): # Normalize extrinsic transformation to remain within bounding box translation_magnitudes = np.linalg.norm(extrinsic_matrices[:,0:3,3],axis=1) avg_translation_magnitude = np.average(translation_magnitudes) - print("Translation mag: ",avg_translation_magnitude) + #print("Translation mag: ",avg_translation_magnitude) + logger.info("Translation mag: {}".format(avg_translation_magnitude)) extrinsic_matrices[:,0:3,3] /= avg_translation_magnitude # scale back up TODO: make dynamic extrinsic_matrices[:,0:3,3] *= 4 - print("Max ",extrinsic_matrices[:,0:3,3].max()) - print("Min ",extrinsic_matrices[:,0:3,3].min()) - print("avg ",np.average(extrinsic_matrices[:,0:3,3])) + #print("Max ",extrinsic_matrices[:,0:3,3].max()) + #print("Min ",extrinsic_matrices[:,0:3,3].min()) + #print("avg ",np.average(extrinsic_matrices[:,0:3,3])) + + logger.info("Max {}".format(extrinsic_matrices[:,0:3,3].max())) + logger.info("Min {}".format(extrinsic_matrices[:,0:3,3].min())) + logger.info("avg {}".format(np.average(extrinsic_matrices[:,0:3,3]))) + # Convert to json frames = [] @@ -251,6 +264,9 @@ def get_intrinsic(fp: str = "cameras.txt"): # COLMAP TO NDC def get_extrinsics_center(fp: str = "points3D.txt"): + # sfm-worker logger + logger = logging.getLogger('sfm-worker') + infile = open(fp, "r") lines = infile.readlines() point_count = 0 @@ -267,7 +283,8 @@ def get_extrinsics_center(fp: str = "points3D.txt"): point_count+=1 central_point /= point_count - print("Central point: ", central_point) + #print("Central point: ", central_point) + logger.info("Central point: {}".format(central_point)) return central_point diff --git a/colmap/video_to_images.py b/colmap/video_to_images.py index e4153361..a20f8911 100644 --- a/colmap/video_to_images.py +++ b/colmap/video_to_images.py @@ -2,6 +2,7 @@ import subprocess import os import sys +import logging from pathlib import Path # new imports @@ -40,10 +41,14 @@ # 2 = FileExistsError; happens when you try to create data in an already existing folder # 3 = FileNotFoundError; happens when you try to use an output folder that does not exist -def split_video_into_frames(video_path, output_path, max_frames=200, log): +def split_video_into_frames(video_path, output_path, max_frames=200): ## determines whether image is blurry or not. # uses the variance of a laplacian transform to check for edges and returns true # if the variance is less than the threshold and the video is determined to be blurry + + # Get Logger: + logger = logging.getLogger('sfm-worker') + def is_blurry(image, THRESHOLD): ## Convert image to grayscale gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) @@ -72,7 +77,7 @@ def blurriness(image): ## sample up to max frame count sample_count = min(frame_count,max_frames) #print("SAMPLE COUNT:", sample_count) - log.info("SAMPLE COUNT: {}".format(sample_count)) + logger.info("SAMPLE COUNT {}".format(sample_count)) #print(f"frames = {frame_count}") @@ -158,7 +163,8 @@ def blurriness(image): if (needs_adjust == True): image = cv2.resize(image, dimensions, interpolation=cv2.INTER_LANCZOS4) cv2.imwrite(f"{output_path}/img_{count}.png", image) - print('Saved image ', count) + #print("Saved image {}".format(count)) + logger.info("Saved image {}".format(count)) success, image = vidcap.read() count += 1 @@ -206,8 +212,10 @@ def test(): print("ERROR: Unrecognized flag", sys.argv[i]) quit()""" + #Calling split_video_into_frames status = split_video_into_frames(instance_name, output_path, ffmpeg_path, video_path, wanted_frames=200) + if status == 0: print("ffmpeg ran successfully.") elif status == 1: From f9419399a78159c084ea3be40e036af76559868a Mon Sep 17 00:00:00 2001 From: NMiklu Date: Tue, 17 Oct 2023 16:29:16 -0400 Subject: [PATCH 4/6] Logging comments | updated logging info --- colmap/log.py | 1 + colmap/main.py | 26 ++++++++++++-------------- colmap/matrix.py | 13 ++----------- colmap/video_to_images.py | 8 -------- 4 files changed, 15 insertions(+), 33 deletions(-) diff --git a/colmap/log.py b/colmap/log.py index f8277927..42bcd77e 100644 --- a/colmap/log.py +++ b/colmap/log.py @@ -3,6 +3,7 @@ def sfm_worker_logger(name='root'): """ Initializer for a global sfm-worker logger. + -> To initialize use: 'logger = log.sfm_worker_logger(name)' To retrieve in different context: 'logger = logging.getLogger(name)' """ diff --git a/colmap/main.py b/colmap/main.py index 8cbde62a..0186b12d 100644 --- a/colmap/main.py +++ b/colmap/main.py @@ -52,9 +52,8 @@ def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir): # (1) vid_to_images.py imgs_folder = os.path.join(output_path, "imgs") - #print(video_file_path) - logger.info("Video file path:{}".format(video_file_path)) + logger.info("Video file path:{}".format(video_file_path)) split_video_into_frames(video_file_path, imgs_folder,100) # imgs are now in output_data_dir/id @@ -63,10 +62,8 @@ def run_full_sfm_pipeline(id, video_file_path, input_data_dir, output_data_dir): colmap_path = "/usr/local/bin/colmap" status = run_colmap(colmap_path, imgs_folder, output_path) if status == 0: - #print("COLMAP ran successfully.") logger.info("COLMAP ran successfully.") elif status == 1: - #print("ERROR: There was an unknown error running COLMAP") logger.info("ERROR: There was an unknown error running COLMAP") # (3) matrix.py @@ -91,7 +88,7 @@ def colmap_worker(): Path(f"{input_data_dir}").mkdir(parents=True, exist_ok=True) Path(f"{output_data_dir}").mkdir(parents=True, exist_ok=True) - logger = logging.getLogger('sfm-worker') + logger = sfm_worker_logger('sfm-worker') rabbitmq_domain = "rabbitmq" credentials = pika.PlainCredentials("admin", "password123") @@ -106,25 +103,24 @@ def colmap_worker(): def process_colmap_job(ch, method, properties, body): logger = logging.getLogger('sfm-worker') - #print("Starting New Job") + logger.info("Starting New Job") - #print(body.decode()) logger.info(body.decode()) job_data = json.loads(body.decode()) id = job_data["id"] - #print(f"Running New Job With ID: {id}") + logger.info(f"Running New Job With ID: {id}") # TODO: Handle exceptions and enable steaming to make safer video = requests.get(job_data["file_path"], timeout=10) - #print("Web server pinged") + logger.info("Web server pinged") video_file_path = f"{input_data_dir}{id}.mp4" - #print("Saving video to: {video_file_path}") + logger.info("Saving video to: {video_file_path}") open(video_file_path, "wb").write(video.content) - #print("Video downloaded") + logger.info("Video downloaded") # RUNS COLMAP AND CONVERSION CODE @@ -146,16 +142,19 @@ def process_colmap_job(ch, method, properties, body): # confirm to rabbitmq job is done ch.basic_ack(delivery_tag=method.delivery_tag) - #print("Job complete") + logger.info("Job complete") channel.basic_qos(prefetch_count=1) channel.basic_consume(queue="sfm-in", on_message_callback=process_colmap_job) channel.start_consuming() - #print("should not get here") + logger.critical("should not get here") if __name__ == "__main__": + """ + LOGGER IS ASSUMED TO BE RUN LOCALLY!!! + """ logger = sfm_worker_logger('sfm-worker') logger.info("~SFM WORKER~") @@ -172,7 +171,6 @@ def process_colmap_job(ch, method, properties, body): if args.local_run == True: motion_data, imgs_folder = run_full_sfm_pipeline( "Local_Test", args.input_data_path, input_data_dir, output_data_dir) - #print(motion_data) logger.info(motion_data) json_motion_data = json.dumps(motion_data) diff --git a/colmap/matrix.py b/colmap/matrix.py index 4a824181..c284ea67 100644 --- a/colmap/matrix.py +++ b/colmap/matrix.py @@ -176,12 +176,11 @@ def get_extrinsic(center_point, fp: str = "parsed_data.csv"): # stack all extrinsic to perform faster transformations to the whole stack extrinsic_matrices = np.stack(extrinsic_matrices,axis=0) - #print(extrinsic_matrices.shape) + logger.info(extrinsic_matrices.shape) avg_y_axis = np.sum(extrinsic_matrices[:,0:3,1], axis=0) avg_y_axis = avg_y_axis/np.linalg.norm(avg_y_axis) - #print("Consensus Y axis: ",avg_y_axis) - #logger.info("Consensus Y axis: ",avg_y_axis) + # Find a matrix to rotate the average y axis with the y-axis unit vector thus aligning every extrinsic to point in the same direction Rot = np.zeros((4,4)) @@ -194,9 +193,7 @@ def get_extrinsic(center_point, fp: str = "parsed_data.csv"): # Adjust extrinsic to center around the central point #center_point = np.average(extrinsic_matrices[:,0:3,3],axis=0) - #print(center_point.shape) logger.info(center_point.shape) - #print("center point ",center_point) logger.info("center point {}".format(center_point)) extrinsic_matrices[:,0:3,3] -= center_point @@ -206,17 +203,12 @@ def get_extrinsic(center_point, fp: str = "parsed_data.csv"): # Normalize extrinsic transformation to remain within bounding box translation_magnitudes = np.linalg.norm(extrinsic_matrices[:,0:3,3],axis=1) avg_translation_magnitude = np.average(translation_magnitudes) - #print("Translation mag: ",avg_translation_magnitude) logger.info("Translation mag: {}".format(avg_translation_magnitude)) extrinsic_matrices[:,0:3,3] /= avg_translation_magnitude # scale back up TODO: make dynamic extrinsic_matrices[:,0:3,3] *= 4 - #print("Max ",extrinsic_matrices[:,0:3,3].max()) - #print("Min ",extrinsic_matrices[:,0:3,3].min()) - #print("avg ",np.average(extrinsic_matrices[:,0:3,3])) - logger.info("Max {}".format(extrinsic_matrices[:,0:3,3].max())) logger.info("Min {}".format(extrinsic_matrices[:,0:3,3].min())) logger.info("avg {}".format(np.average(extrinsic_matrices[:,0:3,3]))) @@ -283,7 +275,6 @@ def get_extrinsics_center(fp: str = "points3D.txt"): point_count+=1 central_point /= point_count - #print("Central point: ", central_point) logger.info("Central point: {}".format(central_point)) return central_point diff --git a/colmap/video_to_images.py b/colmap/video_to_images.py index a20f8911..b6304215 100644 --- a/colmap/video_to_images.py +++ b/colmap/video_to_images.py @@ -76,10 +76,8 @@ def blurriness(image): ## sample up to max frame count sample_count = min(frame_count,max_frames) - #print("SAMPLE COUNT:", sample_count) logger.info("SAMPLE COUNT {}".format(sample_count)) - #print(f"frames = {frame_count}") success, image = vidcap.read() img_height = image.shape[0] @@ -124,9 +122,6 @@ def blurriness(image): needs_adjust = False ## determines if we need to adjust aspect_ratio = img_height / img_width - #print (f"aspect ratio: {aspect_ratio}") - #print (f"img_width: {img_width}") - #print (f"img_height: {img_height}") ## adjust as necessaryx MAX_WIDTH = 200 MAX_HEIGHT = 200 @@ -148,8 +143,6 @@ def blurriness(image): else: img_height = (int) (img_height * aspect_ratio) - #print(f"new img height: {img_height}") - #print(f"new img width: {img_width}") dimensions = (img_width, img_height) @@ -163,7 +156,6 @@ def blurriness(image): if (needs_adjust == True): image = cv2.resize(image, dimensions, interpolation=cv2.INTER_LANCZOS4) cv2.imwrite(f"{output_path}/img_{count}.png", image) - #print("Saved image {}".format(count)) logger.info("Saved image {}".format(count)) success, image = vidcap.read() From f18f798712248fe47755fc96b9a5954bfd52392b Mon Sep 17 00:00:00 2001 From: NMiklu Date: Tue, 17 Oct 2023 16:54:07 -0400 Subject: [PATCH 5/6] dos2unix --- colmap/log.py | 11 +++++++++-- 1 file changed, 9 insertions(+), 2 deletions(-) diff --git a/colmap/log.py b/colmap/log.py index 42bcd77e..dbc0d8aa 100644 --- a/colmap/log.py +++ b/colmap/log.py @@ -8,10 +8,17 @@ def sfm_worker_logger(name='root'): To retrieve in different context: 'logger = logging.getLogger(name)' """ formatter = logging.Formatter(fmt='%(asctime)s - %(levelname)s - %(module)s - %(message)s') - handler = logging.FileHandler('sfm-worker.log', mode='w') + handler = logging.FileHandler(name+'.log', mode='w') handler.setFormatter(formatter) logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) logger.addHandler(handler) - return logger \ No newline at end of file + return logger + +if __name__ == "__main__": + theta = sfm_worker_logger('sfm-worker-test') + theta.info("info message") + theta.warning("warning message") + theta.error("error message") + theta.critical("critical message") \ No newline at end of file From 011ca87bbbf201b13abfda17bdbab59a00a8d9dd Mon Sep 17 00:00:00 2001 From: NMiklu Date: Fri, 10 Nov 2023 16:18:04 -0500 Subject: [PATCH 6/6] Updated sfm-worker logging --- colmap/colmap_runner.py | 18 ++++++++++++++---- 1 file changed, 14 insertions(+), 4 deletions(-) diff --git a/colmap/colmap_runner.py b/colmap/colmap_runner.py index 32bee12c..92df1bce 100644 --- a/colmap/colmap_runner.py +++ b/colmap/colmap_runner.py @@ -43,13 +43,18 @@ def run_colmap(colmap_path, images_path, output_path): # sfm-worker logger logger = logging.getLogger('sfm-worker') + + logger.info("run_colmap()-colmap_path: " + colmap_path) + logger.info("run_colmap()-images_path: " + images_path) + logger.info("run_colmap()-output_path: " + colmap_path) + #Creating a new database for colmap try: database_path = output_path + "/database.db" subprocess.call([colmap_path, "database_creator", "--database_path", database_path]) - #print("Created DB") logger.info("Created DB") except: + logger.error("DB Creation Failed") return 1 #Feature extracting @@ -57,32 +62,37 @@ def run_colmap(colmap_path, images_path, output_path): # --SiftExtraction.use_gpu=false for docker # TODO: make gpu use dynamic subprocess.call([colmap_path, "feature_extractor","--ImageReader.camera_model","PINHOLE",f"--SiftExtraction.use_gpu={use_gpu}","--ImageReader.single_camera=1", "--database_path", database_path, "--image_path", images_path]) - #print("Features Extracted") logger.info("Features Extracted") except: + logger.error("Features unable to be extracted") return 1 #Feature matching try: - #print("Feature Matching") - logger.info("Feature Matching") subprocess.call([colmap_path, "exhaustive_matcher",f"--SiftMatching.use_gpu={use_gpu}", "--database_path", database_path]) + logger.info("Feature Matched") except: + logger.error("Features unable to be matched") return 1 #Generating model try: subprocess.call([colmap_path, "mapper", "--database_path", database_path, "--image_path", images_path, "--output_path", output_path]) + logger.info("Model generated") except: + logger.error("Model unable to be generated") return 1 #Getting model as text try: # TODO: no longer works on windows fix file paths or run in docker subprocess.call([colmap_path, "model_converter", "--input_path", output_path + r"/0", "--output_path", output_path, "--output_type", "TXT"]) + logger.info("Model as text successful") except: + logger.error("Model as text unsuccessful") return 1 + logger.info("run_colmap successfully executed") return 0