milind-u | 9219598 | 2022-01-22 20:29:31 -0800 | [diff] [blame] | 1 | #include "y2022/vision/target_estimator.h" |
| 2 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 3 | #include "absl/strings/str_format.h" |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 4 | #include "ceres/ceres.h" |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 5 | #include "opencv2/core/core.hpp" |
| 6 | #include "opencv2/core/eigen.hpp" |
| 7 | #include "opencv2/features2d.hpp" |
| 8 | #include "opencv2/highgui/highgui.hpp" |
| 9 | #include "opencv2/imgproc.hpp" |
Philipp Schrader | 790cb54 | 2023-07-05 21:06:52 -0700 | [diff] [blame] | 10 | |
| 11 | #include "aos/time/time.h" |
| 12 | #include "frc971/control_loops/quaternion_utils.h" |
| 13 | #include "frc971/vision/geometry.h" |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 14 | #include "y2022/constants.h" |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 15 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 16 | DEFINE_bool(freeze_roll, false, "If true, don't solve for roll"); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 17 | DEFINE_bool(freeze_pitch, false, "If true, don't solve for pitch"); |
| 18 | DEFINE_bool(freeze_yaw, false, "If true, don't solve for yaw"); |
| 19 | DEFINE_bool(freeze_camera_height, true, |
| 20 | "If true, don't solve for camera height"); |
| 21 | DEFINE_bool(freeze_angle_to_camera, true, |
| 22 | "If true, don't solve for polar angle to camera"); |
| 23 | |
Yash Chainani | d5c7f0d | 2022-11-19 17:05:57 -0800 | [diff] [blame] | 24 | DEFINE_uint64(max_solver_iterations, 200, |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 25 | "Maximum number of iterations for the ceres solver"); |
| 26 | DEFINE_bool(solver_output, false, |
| 27 | "If true, log the solver progress and results"); |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 28 | DEFINE_bool(draw_projected_hub, true, |
| 29 | "If true, draw the projected hub when drawing an estimate"); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 30 | |
milind-u | 9219598 | 2022-01-22 20:29:31 -0800 | [diff] [blame] | 31 | namespace y2022::vision { |
| 32 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 33 | namespace { |
| 34 | |
| 35 | constexpr size_t kNumPiecesOfTape = 16; |
| 36 | // Width and height of a piece of reflective tape |
| 37 | constexpr double kTapePieceWidth = 0.13; |
| 38 | constexpr double kTapePieceHeight = 0.05; |
| 39 | // Height of the center of the tape (m) |
| 40 | constexpr double kTapeCenterHeight = 2.58 + (kTapePieceHeight / 2); |
| 41 | // Horizontal distance from tape to center of hub (m) |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 42 | constexpr double kUpperHubRadius = 1.36 / 2; |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 43 | |
| 44 | std::vector<cv::Point3d> ComputeTapePoints() { |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 45 | std::vector<cv::Point3d> tape_points; |
milind-u | 9219598 | 2022-01-22 20:29:31 -0800 | [diff] [blame] | 46 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 47 | constexpr size_t kNumVisiblePiecesOfTape = 5; |
| 48 | for (size_t i = 0; i < kNumVisiblePiecesOfTape; i++) { |
| 49 | // The center piece of tape is at 0 rad, so the angle indices are offset |
| 50 | // by the number of pieces of tape on each side of it |
| 51 | const double theta_index = |
| 52 | static_cast<double>(i) - ((kNumVisiblePiecesOfTape - 1) / 2); |
| 53 | // The polar angle is a multiple of the angle between tape centers |
| 54 | double theta = theta_index * ((2.0 * M_PI) / kNumPiecesOfTape); |
| 55 | tape_points.emplace_back(kUpperHubRadius * std::cos(theta), |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 56 | kUpperHubRadius * std::sin(theta), |
| 57 | kTapeCenterHeight); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 58 | } |
milind-u | 9219598 | 2022-01-22 20:29:31 -0800 | [diff] [blame] | 59 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 60 | return tape_points; |
| 61 | } |
milind-u | 9219598 | 2022-01-22 20:29:31 -0800 | [diff] [blame] | 62 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 63 | std::array<cv::Point3d, 4> ComputeMiddleTapePiecePoints() { |
| 64 | std::array<cv::Point3d, 4> tape_piece_points; |
| 65 | |
| 66 | // Angle that each piece of tape occupies on the hub |
| 67 | constexpr double kTapePieceAngle = |
| 68 | (kTapePieceWidth / (2.0 * M_PI * kUpperHubRadius)) * (2.0 * M_PI); |
| 69 | |
| 70 | constexpr double kThetaTapeLeft = -kTapePieceAngle / 2.0; |
| 71 | constexpr double kThetaTapeRight = kTapePieceAngle / 2.0; |
| 72 | |
| 73 | constexpr double kTapeTopHeight = |
| 74 | kTapeCenterHeight + (kTapePieceHeight / 2.0); |
| 75 | constexpr double kTapeBottomHeight = |
| 76 | kTapeCenterHeight - (kTapePieceHeight / 2.0); |
| 77 | |
| 78 | tape_piece_points[0] = {kUpperHubRadius * std::cos(kThetaTapeLeft), |
| 79 | kUpperHubRadius * std::sin(kThetaTapeLeft), |
| 80 | kTapeTopHeight}; |
| 81 | tape_piece_points[1] = {kUpperHubRadius * std::cos(kThetaTapeRight), |
| 82 | kUpperHubRadius * std::sin(kThetaTapeRight), |
| 83 | kTapeTopHeight}; |
| 84 | |
| 85 | tape_piece_points[2] = {kUpperHubRadius * std::cos(kThetaTapeRight), |
| 86 | kUpperHubRadius * std::sin(kThetaTapeRight), |
| 87 | kTapeBottomHeight}; |
| 88 | tape_piece_points[3] = {kUpperHubRadius * std::cos(kThetaTapeLeft), |
| 89 | kUpperHubRadius * std::sin(kThetaTapeLeft), |
| 90 | kTapeBottomHeight}; |
| 91 | |
| 92 | return tape_piece_points; |
| 93 | } |
| 94 | |
| 95 | } // namespace |
| 96 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 97 | const std::vector<cv::Point3d> TargetEstimator::kTapePoints = |
| 98 | ComputeTapePoints(); |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 99 | const std::array<cv::Point3d, 4> TargetEstimator::kMiddleTapePiecePoints = |
| 100 | ComputeMiddleTapePiecePoints(); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 101 | |
Milind Upadhyay | e500310 | 2022-04-02 22:16:39 -0700 | [diff] [blame] | 102 | namespace { |
| 103 | constexpr double kDefaultDistance = 3.0; |
| 104 | constexpr double kDefaultYaw = M_PI; |
| 105 | constexpr double kDefaultAngleToCamera = 0.0; |
| 106 | } // namespace |
| 107 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 108 | TargetEstimator::TargetEstimator(cv::Mat intrinsics, cv::Mat extrinsics) |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 109 | : blob_stats_(), |
Milind Upadhyay | 1c76a04 | 2022-04-02 20:42:42 -0700 | [diff] [blame] | 110 | middle_blob_index_(0), |
| 111 | max_blob_area_(0.0), |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 112 | image_(std::nullopt), |
| 113 | roll_(0.0), |
| 114 | pitch_(0.0), |
Milind Upadhyay | e500310 | 2022-04-02 22:16:39 -0700 | [diff] [blame] | 115 | yaw_(kDefaultYaw), |
| 116 | distance_(kDefaultDistance), |
| 117 | angle_to_camera_(kDefaultAngleToCamera), |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 118 | // Seed camera height |
| 119 | camera_height_(extrinsics.at<double>(2, 3) + |
| 120 | constants::Values::kImuHeight()) { |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 121 | cv::cv2eigen(intrinsics, intrinsics_); |
| 122 | cv::cv2eigen(extrinsics, extrinsics_); |
| 123 | } |
| 124 | |
| 125 | namespace { |
| 126 | void SetBoundsOrFreeze(double *param, bool freeze, double min, double max, |
| 127 | ceres::Problem *problem) { |
| 128 | if (freeze) { |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 129 | problem->SetParameterBlockConstant(param); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 130 | } else { |
| 131 | problem->SetParameterLowerBound(param, 0, min); |
| 132 | problem->SetParameterUpperBound(param, 0, max); |
| 133 | } |
| 134 | } |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 135 | |
| 136 | // With X, Y, Z being hub axes and x, y, z being camera axes, |
| 137 | // x = -Y, y = -Z, z = X |
| 138 | const Eigen::Matrix3d kHubToCameraAxes = |
| 139 | (Eigen::Matrix3d() << 0.0, -1.0, 0.0, 0.0, 0.0, -1.0, 1.0, 0.0, 0.0) |
| 140 | .finished(); |
| 141 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 142 | } // namespace |
| 143 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 144 | void TargetEstimator::Solve( |
| 145 | const std::vector<BlobDetector::BlobStats> &blob_stats, |
| 146 | std::optional<cv::Mat> image) { |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 147 | auto start = aos::monotonic_clock::now(); |
| 148 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 149 | blob_stats_ = blob_stats; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 150 | image_ = image; |
| 151 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 152 | // Do nothing if no blobs were detected |
| 153 | if (blob_stats_.size() == 0) { |
| 154 | confidence_ = 0.0; |
| 155 | return; |
| 156 | } |
| 157 | |
| 158 | CHECK_GE(blob_stats_.size(), 3) << "Expected at least 3 blobs"; |
| 159 | |
Milind Upadhyay | b67c618 | 2022-10-22 13:45:45 -0700 | [diff] [blame] | 160 | const auto circle = frc971::vision::Circle::Fit({blob_stats_[0].centroid, |
| 161 | blob_stats_[1].centroid, |
| 162 | blob_stats_[2].centroid}); |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 163 | CHECK(circle.has_value()); |
| 164 | |
Milind Upadhyay | 1c76a04 | 2022-04-02 20:42:42 -0700 | [diff] [blame] | 165 | max_blob_area_ = 0.0; |
| 166 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 167 | // Find the middle blob, which is the one with the angle closest to the |
| 168 | // average |
| 169 | double theta_avg = 0.0; |
| 170 | for (const auto &stats : blob_stats_) { |
| 171 | theta_avg += circle->AngleOf(stats.centroid); |
Milind Upadhyay | 1c76a04 | 2022-04-02 20:42:42 -0700 | [diff] [blame] | 172 | |
| 173 | if (stats.area > max_blob_area_) { |
| 174 | max_blob_area_ = stats.area; |
| 175 | } |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 176 | } |
| 177 | theta_avg /= blob_stats_.size(); |
| 178 | |
| 179 | double min_diff = std::numeric_limits<double>::infinity(); |
| 180 | for (auto it = blob_stats_.begin(); it < blob_stats_.end(); it++) { |
| 181 | const double diff = std::abs(circle->AngleOf(it->centroid) - theta_avg); |
| 182 | if (diff < min_diff) { |
| 183 | min_diff = diff; |
| 184 | middle_blob_index_ = it - blob_stats_.begin(); |
| 185 | } |
| 186 | } |
| 187 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 188 | ceres::Problem problem; |
| 189 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 190 | // x and y differences between projected centroids and blob centroids, as well |
| 191 | // as width and height differences between middle projected piece and the |
| 192 | // detected blob |
| 193 | const size_t num_residuals = (blob_stats_.size() * 2) + 2; |
| 194 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 195 | // Set up the only cost function (also known as residual). This uses |
| 196 | // auto-differentiation to obtain the derivative (jacobian). |
| 197 | ceres::CostFunction *cost_function = |
| 198 | new ceres::AutoDiffCostFunction<TargetEstimator, ceres::DYNAMIC, 1, 1, 1, |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 199 | 1, 1, 1>(this, num_residuals, |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 200 | ceres::DO_NOT_TAKE_OWNERSHIP); |
| 201 | |
| 202 | // TODO(milind): add loss function when we get more noisy data |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 203 | problem.AddResidualBlock(cost_function, new ceres::HuberLoss(2.0), &roll_, |
| 204 | &pitch_, &yaw_, &distance_, &angle_to_camera_, |
| 205 | &camera_height_); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 206 | |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 207 | // Compute the estimated rotation of the camera using the robot rotation. |
Milind Upadhyay | da042bb | 2022-03-26 16:01:45 -0700 | [diff] [blame] | 208 | const Eigen::Matrix3d extrinsics_rot = |
| 209 | Eigen::Affine3d(extrinsics_).rotation() * kHubToCameraAxes; |
| 210 | // asin returns a pitch in [-pi/2, pi/2] so this will be the correct euler |
| 211 | // angles. |
| 212 | const double pitch_seed = -std::asin(extrinsics_rot(2, 0)); |
| 213 | const double roll_seed = |
| 214 | std::atan2(extrinsics_rot(2, 1) / std::cos(pitch_seed), |
| 215 | extrinsics_rot(2, 2) / std::cos(pitch_seed)); |
| 216 | |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 217 | // TODO(milind): seed with localizer output as well |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 218 | |
Milind Upadhyay | e500310 | 2022-04-02 22:16:39 -0700 | [diff] [blame] | 219 | // If we didn't solve well last time, seed everything at the defaults so we |
| 220 | // don't get stuck in a bad state. |
| 221 | // Copied from localizer.cc |
| 222 | constexpr double kMinConfidence = 0.75; |
| 223 | if (confidence_ < kMinConfidence) { |
| 224 | roll_ = roll_seed; |
| 225 | pitch_ = pitch_seed; |
| 226 | yaw_ = kDefaultYaw; |
| 227 | distance_ = kDefaultDistance; |
| 228 | angle_to_camera_ = kDefaultAngleToCamera; |
| 229 | camera_height_ = extrinsics_(2, 3) + constants::Values::kImuHeight(); |
| 230 | } |
| 231 | |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 232 | // Constrain the rotation to be around the localizer's, otherwise there can be |
| 233 | // multiple solutions. There shouldn't be too much roll or pitch |
Milind Upadhyay | 1d9a9c7 | 2022-04-02 14:18:40 -0700 | [diff] [blame] | 234 | if (FLAGS_freeze_roll) { |
| 235 | roll_ = roll_seed; |
| 236 | } |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 237 | constexpr double kMaxRollDelta = 0.1; |
| 238 | SetBoundsOrFreeze(&roll_, FLAGS_freeze_roll, roll_seed - kMaxRollDelta, |
| 239 | roll_seed + kMaxRollDelta, &problem); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 240 | |
Milind Upadhyay | 1d9a9c7 | 2022-04-02 14:18:40 -0700 | [diff] [blame] | 241 | if (FLAGS_freeze_pitch) { |
| 242 | pitch_ = pitch_seed; |
| 243 | } |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 244 | constexpr double kMaxPitchDelta = 0.15; |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 245 | SetBoundsOrFreeze(&pitch_, FLAGS_freeze_pitch, pitch_seed - kMaxPitchDelta, |
| 246 | pitch_seed + kMaxPitchDelta, &problem); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 247 | // Constrain the yaw to where the target would be visible |
| 248 | constexpr double kMaxYawDelta = M_PI / 4.0; |
| 249 | SetBoundsOrFreeze(&yaw_, FLAGS_freeze_yaw, M_PI - kMaxYawDelta, |
| 250 | M_PI + kMaxYawDelta, &problem); |
| 251 | |
| 252 | constexpr double kMaxHeightDelta = 0.1; |
| 253 | SetBoundsOrFreeze(&camera_height_, FLAGS_freeze_camera_height, |
| 254 | camera_height_ - kMaxHeightDelta, |
| 255 | camera_height_ + kMaxHeightDelta, &problem); |
| 256 | |
| 257 | // Distances shouldn't be too close to the target or too far |
| 258 | constexpr double kMinDistance = 1.0; |
| 259 | constexpr double kMaxDistance = 10.0; |
| 260 | SetBoundsOrFreeze(&distance_, false, kMinDistance, kMaxDistance, &problem); |
| 261 | |
| 262 | // Keep the angle between +/- half of the angle between piece of tape |
| 263 | constexpr double kMaxAngle = ((2.0 * M_PI) / kNumPiecesOfTape) / 2.0; |
| 264 | SetBoundsOrFreeze(&angle_to_camera_, FLAGS_freeze_angle_to_camera, -kMaxAngle, |
| 265 | kMaxAngle, &problem); |
| 266 | |
| 267 | ceres::Solver::Options options; |
| 268 | options.minimizer_progress_to_stdout = FLAGS_solver_output; |
| 269 | options.gradient_tolerance = 1e-12; |
| 270 | options.function_tolerance = 1e-16; |
| 271 | options.parameter_tolerance = 1e-12; |
Yash Chainani | d5c7f0d | 2022-11-19 17:05:57 -0800 | [diff] [blame] | 272 | options.max_num_iterations = FLAGS_max_solver_iterations; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 273 | ceres::Solver::Summary summary; |
| 274 | ceres::Solve(options, &problem, &summary); |
| 275 | |
| 276 | auto end = aos::monotonic_clock::now(); |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 277 | VLOG(1) << "Target estimation elapsed time: " |
| 278 | << std::chrono::duration<double, std::milli>(end - start).count() |
| 279 | << " ms"; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 280 | |
Milind Upadhyay | 1c76a04 | 2022-04-02 20:42:42 -0700 | [diff] [blame] | 281 | // For computing the confidence, find the standard deviation in pixels. |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 282 | std::vector<double> residual(num_residuals); |
| 283 | (*this)(&roll_, &pitch_, &yaw_, &distance_, &angle_to_camera_, |
| 284 | &camera_height_, residual.data()); |
| 285 | double std_dev = 0.0; |
| 286 | for (auto it = residual.begin(); it < residual.end() - 2; it++) { |
| 287 | std_dev += std::pow(*it, 2); |
Milind Upadhyay | 14279de | 2022-02-26 16:07:53 -0800 | [diff] [blame] | 288 | } |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 289 | std_dev /= num_residuals - 2; |
| 290 | std_dev = std::sqrt(std_dev); |
| 291 | |
| 292 | // Use a sigmoid to convert the deviation into a confidence for the |
| 293 | // localizer. Fit a sigmoid to the points of (0, 1) and two other |
| 294 | // reasonable deviation-confidence combinations using |
Milind Upadhyay | da9a829 | 2022-04-02 18:00:04 -0700 | [diff] [blame] | 295 | // https://www.desmos.com/calculator/ha6fh9yw44 |
| 296 | constexpr double kSigmoidCapacity = 1.065; |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 297 | // Stretch the sigmoid out correctly. |
Milind Upadhyay | da9a829 | 2022-04-02 18:00:04 -0700 | [diff] [blame] | 298 | // Currently, good estimates have deviations of 1 or less pixels. |
| 299 | constexpr double kSigmoidScalar = 0.06496; |
| 300 | constexpr double kSigmoidGrowthRate = -0.6221; |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 301 | confidence_ = |
| 302 | kSigmoidCapacity / |
| 303 | (1.0 + kSigmoidScalar * std::exp(-kSigmoidGrowthRate * std_dev)); |
Milind Upadhyay | 14279de | 2022-02-26 16:07:53 -0800 | [diff] [blame] | 304 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 305 | if (FLAGS_solver_output) { |
| 306 | LOG(INFO) << summary.FullReport(); |
| 307 | |
| 308 | LOG(INFO) << "roll: " << roll_; |
| 309 | LOG(INFO) << "pitch: " << pitch_; |
| 310 | LOG(INFO) << "yaw: " << yaw_; |
| 311 | LOG(INFO) << "angle to target (based on yaw): " << angle_to_target(); |
| 312 | LOG(INFO) << "angle to camera (polar): " << angle_to_camera_; |
| 313 | LOG(INFO) << "distance (polar): " << distance_; |
| 314 | LOG(INFO) << "camera height: " << camera_height_; |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 315 | LOG(INFO) << "standard deviation (px): " << std_dev; |
Milind Upadhyay | 14279de | 2022-02-26 16:07:53 -0800 | [diff] [blame] | 316 | LOG(INFO) << "confidence: " << confidence_; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 317 | } |
| 318 | } |
| 319 | |
| 320 | namespace { |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 321 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 322 | // Hacks to extract a double from a scalar, which is either a ceres jet or a |
| 323 | // double. Only used for debugging and displaying. |
| 324 | template <typename S> |
| 325 | double ScalarToDouble(S s) { |
| 326 | const double *ptr = reinterpret_cast<double *>(&s); |
| 327 | return *ptr; |
| 328 | } |
| 329 | |
| 330 | template <typename S> |
| 331 | cv::Point2d ScalarPointToDouble(cv::Point_<S> p) { |
| 332 | return cv::Point2d(ScalarToDouble(p.x), ScalarToDouble(p.y)); |
| 333 | } |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 334 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 335 | } // namespace |
| 336 | |
| 337 | template <typename S> |
| 338 | bool TargetEstimator::operator()(const S *const roll, const S *const pitch, |
| 339 | const S *const yaw, const S *const distance, |
| 340 | const S *const theta, |
| 341 | const S *const camera_height, |
| 342 | S *residual) const { |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 343 | const auto H_hub_camera = ComputeHubCameraTransform( |
| 344 | *roll, *pitch, *yaw, *distance, *theta, *camera_height); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 345 | |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 346 | // Project tape points |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 347 | std::vector<cv::Point_<S>> tape_points_proj; |
| 348 | for (cv::Point3d tape_point_hub : kTapePoints) { |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 349 | tape_points_proj.emplace_back(ProjectToImage(tape_point_hub, H_hub_camera)); |
| 350 | VLOG(2) << "Projected tape point: " |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 351 | << ScalarPointToDouble( |
| 352 | tape_points_proj[tape_points_proj.size() - 1]); |
| 353 | } |
| 354 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 355 | // Find the rectangle bounding the projected piece of tape |
| 356 | std::array<cv::Point_<S>, 4> middle_tape_piece_points_proj; |
| 357 | for (auto tape_piece_it = kMiddleTapePiecePoints.begin(); |
| 358 | tape_piece_it < kMiddleTapePiecePoints.end(); tape_piece_it++) { |
| 359 | middle_tape_piece_points_proj[tape_piece_it - |
| 360 | kMiddleTapePiecePoints.begin()] = |
| 361 | ProjectToImage(*tape_piece_it, H_hub_camera); |
| 362 | } |
| 363 | |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 364 | // Now, find the closest tape for each blob. |
| 365 | // We don't normally see tape without matching blobs in the center. So we |
| 366 | // want to compress any gaps in the matched tape blobs. This makes it so it |
| 367 | // doesn't want to make the goal super small and skip tape blobs. The |
| 368 | // resulting accuracy is then pretty good. |
| 369 | |
| 370 | // Mapping from tape index to blob index. |
| 371 | std::vector<std::pair<size_t, size_t>> tape_indices; |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 372 | for (size_t i = 0; i < blob_stats_.size(); i++) { |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 373 | tape_indices.emplace_back(ClosestTape(i, tape_points_proj), i); |
| 374 | VLOG(2) << "Tape indices were " << tape_indices.back().first; |
| 375 | } |
| 376 | |
| 377 | std::sort( |
| 378 | tape_indices.begin(), tape_indices.end(), |
| 379 | [](const std::pair<size_t, size_t> &a, |
| 380 | const std::pair<size_t, size_t> &b) { return a.first < b.first; }); |
| 381 | |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 382 | std::optional<size_t> middle_tape_index = std::nullopt; |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 383 | for (size_t i = 0; i < tape_indices.size(); ++i) { |
| 384 | if (tape_indices[i].second == middle_blob_index_) { |
| 385 | middle_tape_index = i; |
| 386 | } |
| 387 | } |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 388 | CHECK(middle_tape_index.has_value()) << "Failed to find middle tape"; |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 389 | |
| 390 | if (VLOG_IS_ON(2)) { |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 391 | LOG(INFO) << "Middle tape is " << *middle_tape_index << ", blob " |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 392 | << middle_blob_index_; |
| 393 | for (size_t i = 0; i < tape_indices.size(); ++i) { |
| 394 | const auto distance = DistanceFromTapeIndex( |
| 395 | tape_indices[i].second, tape_indices[i].first, tape_points_proj); |
| 396 | LOG(INFO) << "Blob index " << tape_indices[i].second << " maps to " |
| 397 | << tape_indices[i].first << " distance " << distance.x << " " |
| 398 | << distance.y; |
| 399 | } |
| 400 | } |
| 401 | |
| 402 | { |
| 403 | size_t offset = 0; |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 404 | for (size_t i = *middle_tape_index + 1; i < tape_indices.size(); ++i) { |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 405 | tape_indices[i].first -= offset; |
| 406 | |
| 407 | if (tape_indices[i].first > tape_indices[i - 1].first + 1) { |
| 408 | offset += tape_indices[i].first - (tape_indices[i - 1].first + 1); |
| 409 | VLOG(2) << "Offset now " << offset; |
| 410 | tape_indices[i].first = tape_indices[i - 1].first + 1; |
| 411 | } |
| 412 | } |
| 413 | } |
| 414 | |
| 415 | if (VLOG_IS_ON(2)) { |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 416 | LOG(INFO) << "Middle tape is " << *middle_tape_index << ", blob " |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 417 | << middle_blob_index_; |
| 418 | for (size_t i = 0; i < tape_indices.size(); ++i) { |
| 419 | const auto distance = DistanceFromTapeIndex( |
| 420 | tape_indices[i].second, tape_indices[i].first, tape_points_proj); |
| 421 | LOG(INFO) << "Blob index " << tape_indices[i].second << " maps to " |
| 422 | << tape_indices[i].first << " distance " << distance.x << " " |
| 423 | << distance.y; |
| 424 | } |
| 425 | } |
| 426 | |
| 427 | { |
| 428 | size_t offset = 0; |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 429 | for (size_t i = *middle_tape_index; i > 0; --i) { |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 430 | tape_indices[i - 1].first -= offset; |
| 431 | |
| 432 | if (tape_indices[i - 1].first + 1 < tape_indices[i].first) { |
| 433 | VLOG(2) << "Too big a gap. " << tape_indices[i].first << " and " |
| 434 | << tape_indices[i - 1].first; |
| 435 | |
| 436 | offset += tape_indices[i].first - (tape_indices[i - 1].first + 1); |
| 437 | tape_indices[i - 1].first = tape_indices[i].first - 1; |
| 438 | VLOG(2) << "Offset now " << offset; |
| 439 | } |
| 440 | } |
| 441 | } |
| 442 | |
| 443 | if (VLOG_IS_ON(2)) { |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 444 | LOG(INFO) << "Middle tape is " << *middle_tape_index << ", blob " |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 445 | << middle_blob_index_; |
| 446 | for (size_t i = 0; i < tape_indices.size(); ++i) { |
| 447 | const auto distance = DistanceFromTapeIndex( |
| 448 | tape_indices[i].second, tape_indices[i].first, tape_points_proj); |
| 449 | LOG(INFO) << "Blob index " << tape_indices[i].second << " maps to " |
| 450 | << tape_indices[i].first << " distance " << distance.x << " " |
| 451 | << distance.y; |
| 452 | } |
| 453 | } |
| 454 | |
| 455 | for (size_t i = 0; i < tape_indices.size(); ++i) { |
| 456 | const auto distance = DistanceFromTapeIndex( |
| 457 | tape_indices[i].second, tape_indices[i].first, tape_points_proj); |
Milind Upadhyay | 1c76a04 | 2022-04-02 20:42:42 -0700 | [diff] [blame] | 458 | // Scale the distance based on the blob area: larger blobs have less noise. |
| 459 | const S distance_scalar = |
| 460 | S(blob_stats_[tape_indices[i].second].area / max_blob_area_); |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 461 | VLOG(2) << "Blob index " << tape_indices[i].second << " maps to " |
| 462 | << tape_indices[i].first << " distance " << distance.x << " " |
Milind Upadhyay | 1c76a04 | 2022-04-02 20:42:42 -0700 | [diff] [blame] | 463 | << distance.y << " distance scalar " |
| 464 | << ScalarToDouble(distance_scalar); |
| 465 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 466 | // Set the residual to the (x, y) distance of the centroid from the |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 467 | // matched projected piece of tape |
Milind Upadhyay | 1c76a04 | 2022-04-02 20:42:42 -0700 | [diff] [blame] | 468 | residual[i * 2] = distance_scalar * distance.x; |
| 469 | residual[(i * 2) + 1] = distance_scalar * distance.y; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 470 | } |
| 471 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 472 | // Penalize based on the difference between the size of the projected piece of |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 473 | // tape and that of the detected blobs. |
| 474 | const S middle_tape_piece_width = ceres::hypot( |
| 475 | middle_tape_piece_points_proj[2].x - middle_tape_piece_points_proj[3].x, |
| 476 | middle_tape_piece_points_proj[2].y - middle_tape_piece_points_proj[3].y); |
| 477 | const S middle_tape_piece_height = ceres::hypot( |
| 478 | middle_tape_piece_points_proj[1].x - middle_tape_piece_points_proj[2].x, |
| 479 | middle_tape_piece_points_proj[1].y - middle_tape_piece_points_proj[2].y); |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 480 | |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 481 | constexpr double kCenterBlobSizeScalar = 0.1; |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 482 | residual[blob_stats_.size() * 2] = |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 483 | kCenterBlobSizeScalar * |
| 484 | (middle_tape_piece_width - |
| 485 | static_cast<S>(blob_stats_[middle_blob_index_].size.width)); |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 486 | residual[(blob_stats_.size() * 2) + 1] = |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 487 | kCenterBlobSizeScalar * |
| 488 | (middle_tape_piece_height - |
| 489 | static_cast<S>(blob_stats_[middle_blob_index_].size.height)); |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 490 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 491 | if (image_.has_value()) { |
| 492 | // Draw the current stage of the solving |
| 493 | cv::Mat image = image_->clone(); |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 494 | std::vector<cv::Point2d> tape_points_proj_double; |
| 495 | for (auto point : tape_points_proj) { |
| 496 | tape_points_proj_double.emplace_back(ScalarPointToDouble(point)); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 497 | } |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 498 | DrawProjectedHub(tape_points_proj_double, image); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 499 | cv::imshow("image", image); |
| 500 | cv::waitKey(10); |
| 501 | } |
| 502 | |
| 503 | return true; |
| 504 | } |
| 505 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 506 | template <typename S> |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 507 | Eigen::Transform<S, 3, Eigen::Affine> |
| 508 | TargetEstimator::ComputeHubCameraTransform(S roll, S pitch, S yaw, S distance, |
| 509 | S theta, S camera_height) const { |
| 510 | using Vector3s = Eigen::Matrix<S, 3, 1>; |
| 511 | using Affine3s = Eigen::Transform<S, 3, Eigen::Affine>; |
| 512 | |
| 513 | Eigen::AngleAxis<S> roll_angle(roll, Vector3s::UnitX()); |
| 514 | Eigen::AngleAxis<S> pitch_angle(pitch, Vector3s::UnitY()); |
| 515 | Eigen::AngleAxis<S> yaw_angle(yaw, Vector3s::UnitZ()); |
| 516 | // Construct the rotation and translation of the camera in the hub's frame |
| 517 | Eigen::Quaternion<S> R_camera_hub = yaw_angle * pitch_angle * roll_angle; |
| 518 | Vector3s T_camera_hub(distance * ceres::cos(theta), |
| 519 | distance * ceres::sin(theta), camera_height); |
| 520 | |
| 521 | Affine3s H_camera_hub = Eigen::Translation<S, 3>(T_camera_hub) * R_camera_hub; |
| 522 | Affine3s H_hub_camera = H_camera_hub.inverse(); |
| 523 | |
| 524 | return H_hub_camera; |
| 525 | } |
| 526 | |
| 527 | template <typename S> |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 528 | cv::Point_<S> TargetEstimator::ProjectToImage( |
| 529 | cv::Point3d tape_point_hub, |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 530 | const Eigen::Transform<S, 3, Eigen::Affine> &H_hub_camera) const { |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 531 | using Vector3s = Eigen::Matrix<S, 3, 1>; |
| 532 | |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 533 | const Vector3s tape_point_hub_eigen = |
| 534 | Vector3s(S(tape_point_hub.x), S(tape_point_hub.y), S(tape_point_hub.z)); |
| 535 | // Project the 3d tape point onto the image using the transformation and |
| 536 | // intrinsics |
| 537 | const Vector3s tape_point_proj = |
milind-u | cafdd5d | 2022-03-01 19:58:57 -0800 | [diff] [blame] | 538 | intrinsics_ * (kHubToCameraAxes * (H_hub_camera * tape_point_hub_eigen)); |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 539 | |
| 540 | // Normalize the projected point |
| 541 | return {tape_point_proj.x() / tape_point_proj.z(), |
| 542 | tape_point_proj.y() / tape_point_proj.z()}; |
| 543 | } |
| 544 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 545 | namespace { |
| 546 | template <typename S> |
| 547 | cv::Point_<S> Distance(cv::Point p, cv::Point_<S> q) { |
| 548 | return cv::Point_<S>(S(p.x) - q.x, S(p.y) - q.y); |
| 549 | } |
| 550 | |
| 551 | template <typename S> |
| 552 | bool Less(cv::Point_<S> distance_1, cv::Point_<S> distance_2) { |
| 553 | return (ceres::pow(distance_1.x, 2) + ceres::pow(distance_1.y, 2) < |
| 554 | ceres::pow(distance_2.x, 2) + ceres::pow(distance_2.y, 2)); |
| 555 | } |
| 556 | } // namespace |
| 557 | |
| 558 | template <typename S> |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 559 | cv::Point_<S> TargetEstimator::DistanceFromTapeIndex( |
| 560 | size_t blob_index, size_t tape_index, |
| 561 | const std::vector<cv::Point_<S>> &tape_points) const { |
| 562 | return Distance(blob_stats_[blob_index].centroid, tape_points[tape_index]); |
| 563 | } |
| 564 | |
| 565 | template <typename S> |
| 566 | size_t TargetEstimator::ClosestTape( |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 567 | size_t blob_index, const std::vector<cv::Point_<S>> &tape_points) const { |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 568 | auto distance = cv::Point_<S>(std::numeric_limits<S>::infinity(), |
| 569 | std::numeric_limits<S>::infinity()); |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 570 | std::optional<size_t> final_match = std::nullopt; |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 571 | if (blob_index == middle_blob_index_) { |
| 572 | // Fix the middle blob so the solver can't go too far off |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 573 | final_match = tape_points.size() / 2; |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 574 | distance = DistanceFromTapeIndex(blob_index, *final_match, tape_points); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 575 | } else { |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 576 | // Give the other blob_stats some freedom in case some are split into pieces |
| 577 | for (auto it = tape_points.begin(); it < tape_points.end(); it++) { |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 578 | const size_t tape_index = std::distance(tape_points.begin(), it); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 579 | const auto current_distance = |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 580 | DistanceFromTapeIndex(blob_index, tape_index, tape_points); |
| 581 | if ((tape_index != (tape_points.size() / 2)) && |
Milind Upadhyay | 8f38ad8 | 2022-03-03 10:06:18 -0800 | [diff] [blame] | 582 | Less(current_distance, distance)) { |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 583 | final_match = tape_index; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 584 | distance = current_distance; |
| 585 | } |
| 586 | } |
| 587 | } |
| 588 | |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 589 | CHECK(final_match.has_value()); |
| 590 | VLOG(2) << "Matched index " << blob_index << " to " << *final_match |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 591 | << " distance " << distance.x << " " << distance.y; |
Austin Schuh | a685b5d | 2022-04-02 14:53:54 -0700 | [diff] [blame] | 592 | |
Milind Upadhyay | a31f027 | 2022-04-03 13:55:22 -0700 | [diff] [blame] | 593 | return *final_match; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 594 | } |
| 595 | |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 596 | void TargetEstimator::DrawProjectedHub( |
| 597 | const std::vector<cv::Point2d> &tape_points_proj, |
| 598 | cv::Mat view_image) const { |
| 599 | for (size_t i = 0; i < tape_points_proj.size() - 1; i++) { |
| 600 | cv::line(view_image, ScalarPointToDouble(tape_points_proj[i]), |
| 601 | ScalarPointToDouble(tape_points_proj[i + 1]), |
| 602 | cv::Scalar(255, 255, 255)); |
| 603 | cv::circle(view_image, ScalarPointToDouble(tape_points_proj[i]), 2, |
| 604 | cv::Scalar(255, 20, 147), cv::FILLED); |
| 605 | cv::circle(view_image, ScalarPointToDouble(tape_points_proj[i + 1]), 2, |
| 606 | cv::Scalar(255, 20, 147), cv::FILLED); |
| 607 | } |
| 608 | } |
| 609 | |
| 610 | void TargetEstimator::DrawEstimate(cv::Mat view_image) const { |
| 611 | if (FLAGS_draw_projected_hub) { |
| 612 | // Draw projected hub |
| 613 | const auto H_hub_camera = ComputeHubCameraTransform( |
| 614 | roll_, pitch_, yaw_, distance_, angle_to_camera_, camera_height_); |
| 615 | std::vector<cv::Point2d> tape_points_proj; |
| 616 | for (cv::Point3d tape_point_hub : kTapePoints) { |
| 617 | tape_points_proj.emplace_back( |
| 618 | ProjectToImage(tape_point_hub, H_hub_camera)); |
| 619 | } |
| 620 | DrawProjectedHub(tape_points_proj, view_image); |
| 621 | } |
| 622 | |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 623 | constexpr int kTextX = 10; |
Milind Upadhyay | 2da80bb | 2022-03-12 22:54:35 -0800 | [diff] [blame] | 624 | int text_y = 0; |
| 625 | constexpr int kTextSpacing = 25; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 626 | |
| 627 | const auto kTextColor = cv::Scalar(0, 255, 255); |
Milind Upadhyay | 2da80bb | 2022-03-12 22:54:35 -0800 | [diff] [blame] | 628 | constexpr double kFontScale = 0.6; |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 629 | |
Jim Ostrowski | b3d5f58 | 2022-04-02 20:22:49 -0700 | [diff] [blame] | 630 | cv::putText(view_image, |
| 631 | absl::StrFormat("Distance: %.3f m (%.3f in)", distance_, |
| 632 | distance_ / 0.0254), |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 633 | cv::Point(kTextX, text_y += kTextSpacing), |
| 634 | cv::FONT_HERSHEY_DUPLEX, kFontScale, kTextColor, 2); |
| 635 | cv::putText(view_image, |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 636 | absl::StrFormat("Angle to target: %.3f", angle_to_target()), |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 637 | cv::Point(kTextX, text_y += kTextSpacing), |
| 638 | cv::FONT_HERSHEY_DUPLEX, kFontScale, kTextColor, 2); |
| 639 | cv::putText(view_image, |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 640 | absl::StrFormat("Angle to camera: %.3f", angle_to_camera_), |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 641 | cv::Point(kTextX, text_y += kTextSpacing), |
| 642 | cv::FONT_HERSHEY_DUPLEX, kFontScale, kTextColor, 2); |
| 643 | |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 644 | cv::putText(view_image, |
| 645 | absl::StrFormat("Roll: %.3f, pitch: %.3f, yaw: %.3f", roll_, |
| 646 | pitch_, yaw_), |
Milind Upadhyay | 14279de | 2022-02-26 16:07:53 -0800 | [diff] [blame] | 647 | cv::Point(kTextX, text_y += kTextSpacing), |
| 648 | cv::FONT_HERSHEY_DUPLEX, kFontScale, kTextColor, 2); |
Milind Upadhyay | f61e148 | 2022-02-11 20:42:55 -0800 | [diff] [blame] | 649 | |
Milind Upadhyay | 3336f3a | 2022-04-01 21:45:57 -0700 | [diff] [blame] | 650 | cv::putText(view_image, absl::StrFormat("Confidence: %.3f", confidence_), |
| 651 | cv::Point(kTextX, text_y += kTextSpacing), |
| 652 | cv::FONT_HERSHEY_DUPLEX, kFontScale, kTextColor, 2); |
milind-u | 9219598 | 2022-01-22 20:29:31 -0800 | [diff] [blame] | 653 | } |
| 654 | |
| 655 | } // namespace y2022::vision |