diff --git a/.dockerignore b/.dockerignore index 057c696..f4ac0c1 100644 --- a/.dockerignore +++ b/.dockerignore @@ -25,4 +25,4 @@ Dockerfile .vscode/ results/* -uploads/* + diff --git a/.gitignore b/.gitignore index d74879c..c958bf3 100644 --- a/.gitignore +++ b/.gitignore @@ -16,7 +16,6 @@ __pycache__ **/*_cache results/* -uploads/* .DS_Store src/ansible/roles/isaac/files/autorun.sh diff --git a/Dockerfile b/Dockerfile index e54a178..95f45a2 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,7 +20,9 @@ RUN apt-get update && apt-get install -qy \ curl \ wget \ gpg \ - jq + jq \ + vim \ + nano # hashicorp sources RUN wget -O- https://apt.releases.hashicorp.com/gpg | \ @@ -94,4 +96,5 @@ WORKDIR /app ENTRYPOINT [ "/bin/sh", "-c" ] + ENV VERSION="v3.0.0" diff --git a/README.md b/README.md index d6ded7e..97dec71 100644 --- a/README.md +++ b/README.md @@ -6,6 +6,10 @@ Isaac Automator allows a quick deployment of Isaac Sim and Isaac Sim-based appli The result is a fully configured remote-desktop cloud workstation, which can be used for development and testing of the robotic applications within minutes and on a budget. Isaac Automator supports varierty of GPU instances and stop-start functionality to save on cloud costs, provides tools to aid the workflow (uploading and downloading data, autorun, deployment management, etc). +Isaac Sim Automator allows for quick deployment of Isaac Sim and Isaac Sim-based applications (like Orbit and Omniverse Isaac Gym Environments), onto the public clouds (AWS, GCP, Azure, and Alibaba Cloud are currently supported). + +The result is a fully configured remote desktod cloud workstation, which can be used for development and testing of the robotic applications within minutes and on a budget. Isaac Sim Automator supports varierty of GPU instances and stop-start functionality to save on cloud costs and a varierty of tools to aid the workflow (like uploading and downloading data, autorun, deployment management, etc). + - [Installation](#installation) - [Installing Docker](#installing-docker) - [Obtaining NGC API Key](#obtaining-ngc-api-key) @@ -136,13 +140,19 @@ docker run --platform linux/x86_64 -it --rm -v .:/app isa bash You will need _AWS Access Key_ and _AWS Secret Key_ for an existing account. You can obtain those in Identity and Access Management (IAM) Section in the AWS console. -If yoou have completed the above steps or already have your permissions and credentials set up, run the following command in the project root directory: +If you have completed the above steps or already have your permissions and credentials set up, run the following command in the project root directory: ```sh # enter the automator container ./run # inside container: ./deploy-aws + +## To run with user filled values ## +# Please make sure fill details inside file "sdg-deploy.txt" +## update file under folder "uploads/" "credentials" with aws access and secret key to be passed to IsaacSIM container for the new data upload to S3 + +cat sdg-deploy.txt | xargs ./deploy-aws ``` Tip: Run `./deploy-aws --help` to see more options. diff --git a/sdg-deploy.txt b/sdg-deploy.txt new file mode 100644 index 0000000..8ce2776 --- /dev/null +++ b/sdg-deploy.txt @@ -0,0 +1,14 @@ +--deployment-name= +--region '' +--isaac +--isaac-instance-type 'g5.12xlarge' +--isaac-image 'nvcr.io/nvidia/isaac-sim:4.0.0' +--oige 'no' +--orbit 'no' +--isaaclab 'no' +--ngc-api-key '' +--ngc-api-key-check +--aws-access-key-id '' +--aws-secret-access-key '' +--no-ovami +--existing 'ask' diff --git a/src/ansible/roles/isaac/templates/isaacsim.sh b/src/ansible/roles/isaac/templates/isaacsim.sh index b5c0327..d9e53ff 100755 --- a/src/ansible/roles/isaac/templates/isaacsim.sh +++ b/src/ansible/roles/isaac/templates/isaacsim.sh @@ -142,6 +142,7 @@ docker run \ \ -v "${OUT_DIR}":/results \ -v "${UPLOADS_DIR}":/uploads \ + -v "${UPLOADS_DIR}/credentials":/root/.aws/credentials \ -v "${WORKSPACE_DIR}":/workspace \ \ -v "/tmp/.X11-unix:/tmp/.X11-unix" \ diff --git a/uploads/autorun.sh b/uploads/autorun.sh index 7d27cbb..56a5a32 100644 --- a/uploads/autorun.sh +++ b/uploads/autorun.sh @@ -6,5 +6,8 @@ # replace with your own command # for example: -# ~/isaacsim.sh --cmd="/isaac-sim/kit/kit /isaac-sim/apps/omni.isaac.sim.kit --allow-root" -~/isaacsim.sh + +# ~ubuntu/Desktop/isaacsim.sh --cmd="/isaac-sim/kit/kit /isaac-sim/apps/omni.isaac.sim.kit --allow-root" +~/isaacsim.sh --cmd="bash /uploads/pass-creds-to-container.sh ; apt-get update && apt-get install -y vim ffmpeg && /isaac-sim/kit/kit /isaac-sim/apps/omni.isaac.sim.kit --ext-folder /isaac-sim/apps --allow-root" + + diff --git a/uploads/create_image_metadata.py b/uploads/create_image_metadata.py new file mode 100644 index 0000000..36687ed --- /dev/null +++ b/uploads/create_image_metadata.py @@ -0,0 +1,49 @@ +''' +Script for generating image metadatafile 'image.json' from calibration file. + +The image.json file generated follows the schema required by the rtls app. +''' + +import json +import os +import argparse + +image_metadata_file_name = "imageMetadata.json" +metadata_dict = {"images": [{"place": "", "view": "plan-view", "fileName": "Top.png"}]} + +def generate_image_metadata(calibration_file_path, output_folder_path = None): + calibration_json = None + with open(calibration_file_path) as calibration_json_file: + calibration_json = json.load(calibration_json_file) + + location_string = '/'.join(["{}={}".format(item['name'], item['value']) for item in calibration_json["sensors"][0]['place']]) + + if not location_string: + print("Could not construct {} file from calibration file {}.".format(image_metadata_file_name, calibration_file_path)) + return + + metadata_dict["images"][0]["place"] = location_string + + # Write output to file + if output_folder_path: + output_file_path = os.path.join(output_folder_path, image_metadata_file_name) + else: + curr_dir = os.getcwd() + output_file_path = os.path.join(curr_dir, image_metadata_file_name) + + with open(output_file_path, "w") as outfile: + json.dump(metadata_dict, outfile) + +def get_args(): + parser = argparse.ArgumentParser("Image Metadata Parser") + parser.add_argument('-c', '--calibration_file_path', required=True, help='Path of calibration file to read metadata information from.') + parser.add_argument('-d', '--destination_folder', required=False, help='Destination folder to write to.') + args = parser.parse_args() + return args + +def main(): + args = get_args() + generate_image_metadata(args.calibration_file_path, args.destination_folder) + +if __name__ == "__main__": + main() diff --git a/uploads/credentials b/uploads/credentials new file mode 100644 index 0000000..948ea4b --- /dev/null +++ b/uploads/credentials @@ -0,0 +1,3 @@ +[default] +aws_access_key_id = +aws_secret_access_key = diff --git a/uploads/install-aws-cli.sh b/uploads/install-aws-cli.sh new file mode 100644 index 0000000..dffcd7a --- /dev/null +++ b/uploads/install-aws-cli.sh @@ -0,0 +1,25 @@ +#!/bin/bash + +# SPDX-FileCopyrightText: Copyright (c) 2024 NVIDIA CORPORATION & AFFILIATES. All rights reserved. +# SPDX-License-Identifier: LicenseRef-NvidiaProprietary +# +# NVIDIA CORPORATION, its affiliates and licensors retain all intellectual +# property and proprietary rights in and to this material, related +# documentation and any modifications thereto. Any use, reproduction, +# disclosure or distribution of this material and related documentation +# without an express license agreement from NVIDIA CORPORATION or +# its affiliates is strictly prohibited. + +if ! hash aws 2>/dev/null; then + echo "Installing awscli" + { + rm -f /tmp/awscliv2.zip + curl --silent "https://awscli.amazonaws.com/awscli-exe-linux-x86_64.zip" -o /tmp/awscliv2.zip + rm -rf /tmp/aws + if ! hash unzip 2>/dev/null; then + apt-get -y update && apt-get -y install unzip + fi + unzip /tmp/awscliv2.zip -d /tmp + /tmp/aws/install + } > /dev/null +fi diff --git a/uploads/pass-creds-to-container.sh b/uploads/pass-creds-to-container.sh new file mode 100644 index 0000000..5a3d163 --- /dev/null +++ b/uploads/pass-creds-to-container.sh @@ -0,0 +1,3 @@ +echo 'export AWS_CONFIG_FILE=/uploads/credentials' >> $HOME/.bashrc + +echo 'export AWS_SHARED_CREDENTIALS_FILE=/uploads/credentials' >> $HOME/.bashrc diff --git a/uploads/post_processing.py b/uploads/post_processing.py new file mode 100644 index 0000000..a5ec696 --- /dev/null +++ b/uploads/post_processing.py @@ -0,0 +1,133 @@ +import logging +import boto3 +from botocore.exceptions import ClientError +import os +import argparse +import glob +import sys +from subprocess import PIPE, STDOUT, Popen +from pathlib import Path +logging.basicConfig(stream=sys.stdout, level=logging.INFO) + + +def upload_file(file_name, bucket, object_name=None): + """Upload a file to an S3 bucket + + :param file_name: File to upload + :param bucket: Bucket to upload to + :param object_name: S3 object name. If not specified then file_name is used + :return: True if file was uploaded, else False + """ + + # If S3 object_name was not specified, use file_name + if not object_name: + object_name = os.path.basename(file_name) + + # Upload the file + s3_client = boto3.client('s3') + try: + response = s3_client.upload_file(file_name, bucket, object_name) + except ClientError as e: + logging.error(e) + return False + return True + + +def build_videos(source_directory): + """Builds video from source_directory. Source directory must be parent folder of ground truth generated using ORA extension. + + :param source_directory: Source directory containing ground truth generated using ORA extension. + :return: None + """ + if os.path.isdir(source_directory): + # Grab all the rgb folders in the source directory + rgb_folders = glob.glob("{}/*/rgb/".format(source_directory)) + build_process_list = [] + curr_dir = os.getcwd() + logging.info("Creating videos from images in source_directory - {}. This might take a while based on the length of video to be generated. Logs for video creation are at {}.".format(source_directory, os.path.join(curr_dir, "video_creation_logs"))) + try: + for folder in rgb_folders: + # Get parent folder name + rgb_path = Path(folder) + camera_folder_name = os.path.basename(rgb_path.parent.absolute()) + + # Split till the second occurence of '_' and use the last portion of string. That is, go from World_Cameras_Camera_01 to Camera_01 + camera_folder_name = camera_folder_name.split('_', 2)[-1] + + # Create log folder and log files for video generation + log_file_path = os.path.join(curr_dir, "video_creation_logs", "{}.log".format(camera_folder_name)) + os.makedirs(os.path.dirname(log_file_path), exist_ok=True) + log_file = open(log_file_path, "w") + + # Run ffmpeg command for creating videos + build_process = Popen( + "ffmpeg -nostdin -r 30 -f image2 -s 1920x1080 -start_number 0 -y -i {}/%d.jpeg -vcodec libx264 -crf 23 -x264opts 'bframes=0:keyint=30' -pix_fmt yuv420p {}/{}.mp4".format( + folder, folder, camera_folder_name + ), + shell=True,stdout=log_file, stderr=log_file + ) + build_process_list.append(build_process) + + # Wait for all ffmpeg processes to finish + for process in build_process_list: + process.wait() + logging.info("Finished creating videos from images.") + except Exception as e: + logging.error("Could not run ffmpeg command due to error - {}. \n Note, this operation requires ffmpeg to be installed".format(e)) + else: + logging.error("Invalid source_directory passed.") + + +def get_args(): + parser = argparse.ArgumentParser("SDG Utils") + parser.add_argument('-sd', '--source_directory', required=False, help='Path to source folder to copy.') + parser.add_argument('-bu', '--build', required=False, action='store_true', help='Build videos in source foler.') + parser.add_argument('-f', '--format_to_copy', required=False, help='Format of files to match and upload, can be left empty for a single file.') + parser.add_argument('-dd', '--destination_directory', required=False, help='Destination folder in S3 folder.') + parser.add_argument('-sf', '--source_file', required=False, help='Path to source file to copy.') + parser.add_argument('-df', '--destination_file', required=False, help='Destination file in S3 folder') + parser.add_argument('-b', '--bucket', required=False, help='S3 bucket to copy data to.') + args = parser.parse_args() + return args + +def main(): + args = get_args() + source_directory = args.source_directory + destination_directory = args.destination_directory + format_to_copy = args.format_to_copy + source_file = args.source_file + destination_file = args.destination_file + bucket = args.bucket + build = args.build + + # Build videos from images present in source directory + if build: + build_videos(source_directory) + + # If S3 bucket is provided, upload content to S3 bucket. + if bucket: + # Upload multiple files matching a specific format from a directory. + if source_directory: + if os.path.isdir(source_directory): + files = glob.glob("{}/**/*.{}".format(source_directory, format_to_copy),recursive=True) + logging.info("Total files to copy - {}".format(len(files))) + for file in files: + logging.info("Copying file {}".format(file)) + file_name = os.path.basename(file) + destination_object = None + if destination_directory: + destination_object = destination_directory + "/" + file_name + upload_file(file, bucket, destination_object) + else: + logging.error("Invalid source_directory passed.") + # Upload a single file to the s3 bucket. + if source_file: + if os.path.isfile(source_file): + logging.info("Copying file {}".format(source_file)) + upload_file(source_file, bucket, destination_file) + else: + logging.error("Invalid source_file passed.") + + +if __name__ == "__main__": + main() \ No newline at end of file