Skip to content

Commit

Permalink
Speed up reprojection and bounding box factors
Browse files Browse the repository at this point in the history
  • Loading branch information
mandi1267 committed Dec 12, 2023
1 parent c647cea commit fa2fea9
Show file tree
Hide file tree
Showing 4 changed files with 178 additions and 51 deletions.
48 changes: 24 additions & 24 deletions include/refactoring/factors/bounding_box_factor.h
Original file line number Diff line number Diff line change
Expand Up @@ -94,14 +94,14 @@ class BoundingBoxFactor {
}
return true;
}
if (debug_) {
if (obj_id_.has_value() && frame_id_.has_value() &&
camera_id_.has_value()) {
LOG(INFO) << "Corners for obj id " << obj_id_.value()
<< " at frame/cam " << frame_id_.value() << "/"
<< camera_id_.value() << ": " << corner_results;
}
}
// if (debug_) {
// if (obj_id_.has_value() && frame_id_.has_value() &&
// camera_id_.has_value()) {
// LOG(INFO) << "Corners for obj id " << obj_id_.value()
// << " at frame/cam " << frame_id_.value() << "/"
// << camera_id_.value() << ": " << corner_results;
// }
// }

Eigen::Matrix<T, 4, 1> deviation =
corner_results - rectified_corner_locations_.template cast<T>();
Expand All @@ -110,26 +110,26 @@ class BoundingBoxFactor {
// LOG(INFO) << "Sqrt inf mat bounding box "
// << sqrt_inf_mat_bounding_box_corners_;

if (debug_) {
if (obj_id_.has_value() && frame_id_.has_value() &&
camera_id_.has_value()) {
LOG(INFO) << "Deviation for obj id " << obj_id_.value()
<< " at frame/cam " << frame_id_.value() << "/"
<< camera_id_.value() << ": " << deviation;
}
}
// if (debug_) {
// if (obj_id_.has_value() && frame_id_.has_value() &&
// camera_id_.has_value()) {
// LOG(INFO) << "Deviation for obj id " << obj_id_.value()
// << " at frame/cam " << frame_id_.value() << "/"
// << camera_id_.value() << ": " << deviation;
// }
// }
Eigen::Map<Eigen::Matrix<T, 4, 1>> residuals(residuals_ptr);
residuals =
sqrt_inf_mat_bounding_box_corners_rectified_.template cast<T>() *
deviation;
if (debug_) {
if (obj_id_.has_value() && frame_id_.has_value() &&
camera_id_.has_value()) {
LOG(INFO) << "Residuals for obj id " << obj_id_.value()
<< " at frame/cam " << frame_id_.value() << "/"
<< camera_id_.value() << ": " << residuals;
}
}
// if (debug_) {
// if (obj_id_.has_value() && frame_id_.has_value() &&
// camera_id_.has_value()) {
// LOG(INFO) << "Residuals for obj id " << obj_id_.value()
// << " at frame/cam " << frame_id_.value() << "/"
// << camera_id_.value() << ": " << residuals;
// }
// }

// LOG(INFO) << "Residuals " << residuals;
return true;
Expand Down
2 changes: 0 additions & 2 deletions include/refactoring/factors/reprojection_cost_functor.h
Original file line number Diff line number Diff line change
Expand Up @@ -82,8 +82,6 @@ class ReprojectionCostFunctor {
Eigen::Matrix<T, 2, 1> projected_pixel;
getProjectedPixelLocationRectified<T>(
pose, point, cam_to_robot_tf_inv_.cast<T>(), projected_pixel);
// LOG(INFO) << "Projected pixel location " << projected_pixel.x() << ",
// " << projected_pixel.y();

// Compute the residual.
residual[0] = T(rectified_error_multiplier_x_) *
Expand Down
81 changes: 66 additions & 15 deletions include/refactoring/types/ellipsoid_utils.h
Original file line number Diff line number Diff line change
Expand Up @@ -162,38 +162,89 @@ bool getCornerLocationsVectorRectified(
const T *robot_pose,
const Eigen::Transform<T, 3, Eigen::Affine> &robot_to_cam_tf,
Eigen::Matrix<T, 4, 1> &corner_results) {
Eigen::Transform<T, 3, Eigen::Affine> robot_to_world_current =
PoseArrayToAffine(&(robot_pose[3]), &(robot_pose[0]));

// Some components of this are taken from other functions
// Putting them all inline here because it's slightly faster

// Get the transform for the world relative to the robot pose --------------
// (inverse of robot pose)

// First get the inverse of the orientation
const Eigen::Map<const Eigen::Matrix<T, 3, 1>> rotation_axis(
&(robot_pose[3]));

const T rotation_angle = rotation_axis.norm();
Eigen::AngleAxis<T> inv_rotation_aa;
if (rotation_angle > T(kSmallAngleThreshold)) {
inv_rotation_aa =
Eigen::AngleAxis<T>(-rotation_angle, rotation_axis / rotation_angle);
} else {
inv_rotation_aa =
Eigen::AngleAxis<T>(T(0), Eigen::Matrix<T, 3, 1>{T(1), T(0), T(0)});
}

// Construct the inverse of the robot pose (T^-1 = R^T -R^T t)
Eigen::Map<const Eigen::Matrix<T, 3, 1>> translation_vec(&(robot_pose[0]));
Eigen::Translation<T, 3> translation_tf(
T(-1) * inv_rotation_aa.toRotationMatrix() * translation_vec);
const Eigen::Transform<T, 3, Eigen::Affine> robot_to_world_current_inv =
translation_tf * inv_rotation_aa;

// Robot to world defines the robot's pose in the world frame
// Cam to robot defines the camera pose in the robot's frame
// We want the world's pose in the camera frame
Eigen::Transform<T, 3, Eigen::Affine> world_to_camera =
robot_to_cam_tf * robot_to_world_current.inverse();
robot_to_cam_tf * robot_to_world_current_inv;

// Get the ellipsoid's pose in the world frame as a tf
Eigen::Matrix<T, 4, 4> dim_mat;
Eigen::Transform<T, 3, Eigen::Affine> ellipsoid_pose;

getEllipsoidDimMatAndTf(ellipsoid, dim_mat, ellipsoid_pose);
// Create the transform for the ellipsoid pose and the matrix that encodes
// the dimensions of an origin-centered axis-aligned ellipsoid
Eigen::Map<const Eigen::Matrix<T, 3, 1>> ellipsoid_transl_vec(ellipsoid);
Eigen::Translation<T, 3> ellipsoid_transl(ellipsoid_transl_vec);

Eigen::Matrix<T, 4, 1> diag_mat(
pow(ellipsoid[kEllipsoidPoseParameterizationSize] / T(2), 2) +
T(kDimensionRegularizationConstant),
pow(ellipsoid[kEllipsoidPoseParameterizationSize + 1] / T(2), 2) +
T(kDimensionRegularizationConstant),
pow(ellipsoid[kEllipsoidPoseParameterizationSize + 2] / T(2), 2) +
T(kDimensionRegularizationConstant),
T(-1));
dim_mat = diag_mat.asDiagonal();

#ifdef CONSTRAIN_ELLIPSOID_ORIENTATION
Eigen::AngleAxis<T> obj_axis_angle =
Eigen::AngleAxis<T>(ellipsoid[3], Eigen::Matrix<T, 3, 1>::UnitZ());
#else
const Eigen::Matrix<T, 3, 1> rotation_axis(
ellipsoid_data[3], ellipsoid_data[4], ellipsoid_data[5]);
Eigen::AngleAxis<T> axis_angle = VectorToAxisAngle(rotation_axis);
#endif

ellipsoid_pose = ellipsoid_transl * Eigen::Quaternion<T>(obj_axis_angle);

// Get the transform for the ellipsoid relative to the camera
Eigen::Transform<T, 3, Eigen::AffineCompact> combined_tf_compact =
world_to_camera * ellipsoid_pose;

// Get the dual quadric representation of the ellipsoid relative to the camera
Eigen::Matrix<T, 3, 3> q_mat = combined_tf_compact.matrix() * dim_mat *
combined_tf_compact.matrix().transpose();

// // Check if behind camera
// T t_z = combined_tf_compact.translation().z();
// T q33_adjusted_sqrt = sqrt(q_mat(2, 2) + pow(t_z, 2));

// T plus_tan_z_plane = t_z + q33_adjusted_sqrt;
// T min_tan_z_plane = t_z - q33_adjusted_sqrt;
// if ((plus_tan_z_plane < T(0)) && (min_tan_z_plane < T(0))) {
// LOG(WARNING)
// << "Ellipsoid fully behind camera plane. Not physically possible.";
// // TODO should we sleep or exit here to make this error more evident?
// }
// // Check if behind camera
// T t_z = combined_tf_compact.translation().z();
// T q33_adjusted_sqrt = sqrt(q_mat(2, 2) + pow(t_z, 2));

// T plus_tan_z_plane = t_z + q33_adjusted_sqrt;
// T min_tan_z_plane = t_z - q33_adjusted_sqrt;
// if ((plus_tan_z_plane < T(0)) && (min_tan_z_plane < T(0))) {
// LOG(WARNING)
// << "Ellipsoid fully behind camera plane. Not physically possible.";
// // TODO should we sleep or exit here to make this error more evident?
// }

T q1_1 = q_mat(0, 0);
T q1_3 = q_mat(0, 2);
Expand Down
98 changes: 88 additions & 10 deletions include/refactoring/types/vslam_math_util.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ template <typename T>
void VectorToAxisAngle(const Eigen::Matrix<T, 3, 1>& axis_angle_vec,
Eigen::AngleAxis<T>& angle_axis) {
const T rotation_angle = axis_angle_vec.norm();
if (rotation_angle > kSmallAngleThreshold) {
if (rotation_angle > T(kSmallAngleThreshold)) {
angle_axis =
Eigen::AngleAxis<T>(rotation_angle, axis_angle_vec / rotation_angle);
} else {
Expand All @@ -41,6 +41,32 @@ void VectorToAxisAngle(const Eigen::Matrix<T, 3, 1>& axis_angle_vec,
}
}

/**
* Convert from a vector that stores the axis-angle representation (with
* angle as the magnitude of the vector) to the Eigen AxisAngle
* representation of its inverse.
*
* @tparam T Type of each field.
* @param axis_angle_vec[in] Vector encoding the axis of rotation (as the
* direction) and the angle as the magnitude of the
* vector.
* @param angle_axis[out] Eigen AxisAngle representation for the
* rotation's inverse.
*/
template <typename T>
void VectorToAxisAngleInverse(
const Eigen::Map<const Eigen::Matrix<T, 3, 1>>& axis_angle_vec,
Eigen::AngleAxis<T>& angle_axis) {
const T rotation_angle = axis_angle_vec.norm();
if (rotation_angle > T(kSmallAngleThreshold)) {
angle_axis =
Eigen::AngleAxis<T>(-rotation_angle, axis_angle_vec / rotation_angle);
} else {
angle_axis =
Eigen::AngleAxis<T>(T(0), Eigen::Matrix<T, 3, 1>{T(1), T(0), T(0)});
}
}

/**
* Convert from a vector that stores the axis-angle representation (with
* angle as the magnitude of the vector) to the Eigen AxisAngle
Expand All @@ -61,6 +87,26 @@ Eigen::AngleAxis<T> VectorToAxisAngle(
return angle_axis;
}

/**
* Convert from a vector that stores the axis-angle representation (with
* angle as the magnitude of the vector) to the Eigen AxisAngle
* representation.
*
* @tparam T Type of each field.
* @param axis_angle_vec Vector encoding the axis of rotation (as the
* direction) and the angle as the magnitude of the
* vector.
*
* @return Eigen AxisAngle representation for the rotation.
*/
template <typename T>
Eigen::AngleAxis<T> VectorToAxisAngleInverse(
const Eigen::Map<const Eigen::Matrix<T, 3, 1>>& axis_angle_vec) {
Eigen::AngleAxis<T> angle_axis;
VectorToAxisAngleInverse(axis_angle_vec, angle_axis);
return angle_axis;
}

/**
* Create an Eigen Affine transform from the rotation and translation.
*
Expand Down Expand Up @@ -94,6 +140,21 @@ Eigen::Transform<T, 3, Eigen::Affine> PoseArrayToAffine(const T* rotation,
return transform;
}

template <typename T>
Eigen::Transform<T, 3, Eigen::Affine> InversePoseArrayToAffine(
const T* rotation, const T* translation) {
const Eigen::Map<const Eigen::Matrix<T, 3, 1>> rotation_axis(rotation);

Eigen::AngleAxis<T> inv_rotation_aa = VectorToAxisAngleInverse(rotation_axis);

Eigen::Map<const Eigen::Matrix<T, 3, 1>> translation_vec(translation);
Eigen::Translation<T, 3> translation_tf(
T(-1) * inv_rotation_aa.toRotationMatrix() * translation_vec);
const Eigen::Transform<T, 3, Eigen::Affine> transform =
translation_tf * inv_rotation_aa;
return transform;
}

/**
* Calculate the essential matrix for the camera at two robot poses.
* @tparam T Type of the number in the computation. Needed to
Expand Down Expand Up @@ -289,19 +350,37 @@ void getProjectedPixelLocationRectified(
const T* point,
const Eigen::Transform<T, 3, Eigen::Affine>& cam_to_robot_tf_inv,
Eigen::Matrix<T, 2, 1>& pixel_results) {
// Transform from world to current robot pose
Eigen::Transform<T, 3, Eigen::Affine> world_to_robot_current =
vslam_types_refactor::PoseArrayToAffine(&(pose[3]), &(pose[0])).inverse();
// LOG(INFO) << "World to robot current " << world_to_robot_current.matrix();

// Some components of this are taken from other functions
// Putting them all inline here because it's slightly faster

// Transform from world to current robot pose (inverse of robot's pose)
// Start with the inverse of the rotation
const Eigen::Map<const Eigen::Matrix<T, 3, 1>> rotation_axis(&(pose[3]));

Eigen::AngleAxis<T> inv_rotation_aa;
const T rotation_angle = rotation_axis.norm();
if (rotation_angle > T(kSmallAngleThreshold)) {
inv_rotation_aa =
Eigen::AngleAxis<T>(-rotation_angle, rotation_axis / rotation_angle);
} else {
inv_rotation_aa =
Eigen::AngleAxis<T>(T(0), Eigen::Matrix<T, 3, 1>{T(1), T(0), T(0)});
}

// // Construct the inverse of the robot pose (T^-1 = R^T -R^T t)
Eigen::Map<const Eigen::Matrix<T, 3, 1>> translation_vec(&(pose[0]));
Eigen::Translation<T, 3> translation_tf(
T(-1) * inv_rotation_aa.toRotationMatrix() * translation_vec);
const Eigen::Transform<T, 3, Eigen::Affine> world_to_robot_current =
translation_tf * inv_rotation_aa;

// Point in world frame
Eigen::Matrix<T, 3, 1> point_world(point[0], point[1], point[2]);
// LOG(INFO) << "Point world " << point_world;
Eigen::Map<const Eigen::Matrix<T, 3, 1>> point_world(point);

// Transform the point from global coordinates to frame of current pose.
Eigen::Matrix<T, 3, 1> point_current =
cam_to_robot_tf_inv * world_to_robot_current * point_world;
// LOG(INFO) << "Point " << point_current;

// Project the 3 D point into the current image.
// if (point_current.z() < T(0)) {
Expand All @@ -311,8 +390,7 @@ void getProjectedPixelLocationRectified(
// TODO should we sleep or exit here to make this error more evident?
// }

pixel_results.x() = point_current.x() / point_current.z();
pixel_results.y() = point_current.y() / point_current.z();
pixel_results = point_current.topRows(2) / point_current.z();
}

template <typename T>
Expand Down

0 comments on commit fa2fea9

Please sign in to comment.