Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 1 | #include <numeric> |
| 2 | |
| 3 | #include "aos/configuration.h" |
| 4 | #include "aos/events/logging/log_reader.h" |
| 5 | #include "aos/events/simulated_event_loop.h" |
| 6 | #include "aos/init.h" |
| 7 | #include "aos/util/mcap_logger.h" |
| 8 | #include "frc971/control_loops/pose.h" |
| 9 | #include "frc971/control_loops/quaternion_utils.h" |
| 10 | #include "frc971/vision/calibration_generated.h" |
| 11 | #include "frc971/vision/charuco_lib.h" |
| 12 | #include "frc971/vision/extrinsics_calibration.h" |
| 13 | #include "frc971/vision/target_mapper.h" |
| 14 | #include "frc971/vision/visualize_robot.h" |
| 15 | // clang-format off |
| 16 | // OpenCV eigen files must be included after Eigen includes |
| 17 | #include "opencv2/aruco.hpp" |
| 18 | #include "opencv2/calib3d.hpp" |
| 19 | #include "opencv2/core/eigen.hpp" |
| 20 | #include "opencv2/features2d.hpp" |
| 21 | #include "opencv2/highgui.hpp" |
| 22 | #include "opencv2/highgui/highgui.hpp" |
| 23 | #include "opencv2/imgproc.hpp" |
| 24 | // clang-format on |
| 25 | #include "frc971/constants/constants_sender_lib.h" |
| 26 | #include "frc971/vision/vision_util_lib.h" |
| 27 | #include "y2024/constants/simulated_constants_sender.h" |
| 28 | #include "y2024/vision/vision_util.h" |
| 29 | |
| 30 | DEFINE_bool(alt_view, false, |
| 31 | "If true, show visualization from field level, rather than above"); |
| 32 | DEFINE_string(config, "", |
| 33 | "If set, override the log's config file with this one."); |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 34 | DEFINE_string(constants_path, "y2024/constants/constants.json", |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 35 | "Path to the constant file"); |
| 36 | DEFINE_double(max_pose_error, 5e-5, |
| 37 | "Throw out target poses with a higher pose error than this"); |
| 38 | DEFINE_double( |
| 39 | max_pose_error_ratio, 0.4, |
| 40 | "Throw out target poses with a higher pose error ratio than this"); |
| 41 | DEFINE_string(output_folder, "/tmp", |
| 42 | "Directory in which to store the updated calibration files"); |
| 43 | DEFINE_string(target_type, "charuco_diamond", |
| 44 | "Type of target being used [aruco, charuco, charuco_diamond]"); |
| 45 | DEFINE_int32(team_number, 0, |
| 46 | "Required: Use the calibration for a node with this team number"); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 47 | DEFINE_uint64( |
| 48 | wait_key, 1, |
| 49 | "Time in ms to wait between images (0 to wait indefinitely until click)"); |
| 50 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 51 | DEFINE_bool(robot, false, |
| 52 | "If true we're calibrating extrinsics for the robot, use the " |
| 53 | "correct node path for the robot."); |
| 54 | |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 55 | DECLARE_int32(min_target_id); |
| 56 | DECLARE_int32(max_target_id); |
| 57 | |
| 58 | // Calibrate extrinsic relationship between cameras using two targets |
| 59 | // seen jointly between cameras. Uses two types of information: 1) |
| 60 | // when a single camera sees two targets, we estimate the pose between |
| 61 | // targets, and 2) when two separate cameras each see a target, we can |
| 62 | // use the pose between targets to estimate the pose between cameras. |
| 63 | |
| 64 | // We then create the extrinsics for the robot by starting with the |
| 65 | // given extrinsic for camera 1 (between imu/robot and camera frames) |
| 66 | // and then map each subsequent camera based on the data collected and |
| 67 | // the extrinsic poses computed here. |
| 68 | |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 69 | // TODO<Jim>: Not currently using estimate from first camera to last camera-- |
| 70 | // should do full estimation, and probably also include camera->imu extrinsics |
| 71 | // from all cameras, not just first camera |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 72 | |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 73 | namespace y2024::vision { |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 74 | using frc971::vision::DataAdapter; |
| 75 | using frc971::vision::ImageCallback; |
| 76 | using frc971::vision::PoseUtils; |
| 77 | using frc971::vision::TargetMap; |
| 78 | using frc971::vision::TargetMapper; |
| 79 | using frc971::vision::VisualizeRobot; |
| 80 | namespace calibration = frc971::vision::calibration; |
| 81 | |
| 82 | static constexpr double kImagePeriodMs = |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 83 | 1.0 / 60.0 * 1000.0; // Image capture period in ms |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 84 | |
| 85 | // Change reference frame from camera to robot |
| 86 | Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camera_target, |
| 87 | Eigen::Affine3d extrinsics) { |
| 88 | const Eigen::Affine3d H_robot_camera = extrinsics; |
| 89 | const Eigen::Affine3d H_robot_target = H_robot_camera * H_camera_target; |
| 90 | return H_robot_target; |
| 91 | } |
| 92 | |
| 93 | struct TimestampedPiDetection { |
| 94 | aos::distributed_clock::time_point time; |
| 95 | // Pose of target relative to robot |
| 96 | Eigen::Affine3d H_camera_target; |
| 97 | // name of pi |
| 98 | std::string pi_name; |
| 99 | int board_id; |
| 100 | }; |
| 101 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 102 | struct CameraNode { |
| 103 | std::string node_name; |
| 104 | int camera_number; |
| 105 | |
| 106 | inline const std::string camera_name() const { |
| 107 | return "/" + node_name + "/camera" + std::to_string(camera_number); |
| 108 | } |
| 109 | }; |
| 110 | |
| 111 | std::vector<CameraNode> CreateNodeList() { |
| 112 | std::vector<CameraNode> list; |
| 113 | |
| 114 | list.push_back({.node_name = "orin1", .camera_number = 0}); |
| 115 | list.push_back({.node_name = "orin1", .camera_number = 1}); |
| 116 | list.push_back({.node_name = "imu", .camera_number = 0}); |
| 117 | list.push_back({.node_name = "imu", .camera_number = 1}); |
| 118 | |
| 119 | return list; |
| 120 | } |
| 121 | |
| 122 | std::vector<CameraNode> node_list(CreateNodeList()); |
| 123 | std::map<std::string, int> CreateOrderingMap() { |
| 124 | std::map<std::string, int> map; |
| 125 | |
| 126 | for (uint i = 0; i < node_list.size(); i++) { |
| 127 | map.insert({node_list.at(i).camera_name(), i}); |
| 128 | } |
| 129 | |
| 130 | return map; |
| 131 | } |
| 132 | std::map<std::string, int> ordering_map(CreateOrderingMap()); |
| 133 | |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 134 | TimestampedPiDetection last_observation; |
| 135 | std::vector<std::pair<TimestampedPiDetection, TimestampedPiDetection>> |
| 136 | detection_list; |
| 137 | std::vector<TimestampedPiDetection> two_board_extrinsics_list; |
| 138 | VisualizeRobot vis_robot_; |
| 139 | |
| 140 | // TODO<jim>: Implement variance calcs |
| 141 | Eigen::Affine3d ComputeAveragePose( |
| 142 | std::vector<Eigen::Vector3d> &translation_list, |
| 143 | std::vector<Eigen::Vector4d> &rotation_list, |
| 144 | Eigen::Vector3d *translation_variance = nullptr, |
| 145 | Eigen::Vector3d *rotation_variance = nullptr) { |
| 146 | Eigen::Vector3d avg_translation = |
| 147 | std::accumulate(translation_list.begin(), translation_list.end(), |
| 148 | Eigen::Vector3d{0, 0, 0}) / |
| 149 | translation_list.size(); |
| 150 | |
| 151 | // TODO<Jim>: Use QuaternionMean from quaternion_utils.cc (but this |
| 152 | // requires a fixed number of quaternions to be averaged |
| 153 | Eigen::Vector4d avg_rotation = |
| 154 | std::accumulate(rotation_list.begin(), rotation_list.end(), |
| 155 | Eigen::Vector4d{0, 0, 0, 0}) / |
| 156 | rotation_list.size(); |
| 157 | // Normalize, so it's a valid quaternion |
| 158 | avg_rotation = avg_rotation / avg_rotation.norm(); |
| 159 | Eigen::Quaterniond avg_rotation_q{avg_rotation[0], avg_rotation[1], |
| 160 | avg_rotation[2], avg_rotation[3]}; |
| 161 | Eigen::Translation3d translation(avg_translation); |
| 162 | Eigen::Affine3d return_pose = translation * avg_rotation_q; |
| 163 | if (translation_variance != nullptr) { |
| 164 | *translation_variance = Eigen::Vector3d(); |
| 165 | } |
| 166 | if (rotation_variance != nullptr) { |
| 167 | LOG(INFO) << *rotation_variance; |
| 168 | } |
| 169 | |
| 170 | return return_pose; |
| 171 | } |
| 172 | |
| 173 | Eigen::Affine3d ComputeAveragePose( |
| 174 | std::vector<Eigen::Affine3d> &pose_list, |
| 175 | Eigen::Vector3d *translation_variance = nullptr, |
| 176 | Eigen::Vector3d *rotation_variance = nullptr) { |
| 177 | std::vector<Eigen::Vector3d> translation_list; |
| 178 | std::vector<Eigen::Vector4d> rotation_list; |
| 179 | |
| 180 | for (Eigen::Affine3d pose : pose_list) { |
| 181 | translation_list.push_back(pose.translation()); |
| 182 | Eigen::Quaterniond quat(pose.rotation().matrix()); |
| 183 | rotation_list.push_back( |
| 184 | Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z())); |
| 185 | } |
| 186 | |
| 187 | return ComputeAveragePose(translation_list, rotation_list, |
| 188 | translation_variance, rotation_variance); |
| 189 | } |
| 190 | |
| 191 | Eigen::Affine3d ComputeAveragePose( |
| 192 | std::vector<TimestampedPiDetection> &pose_list, |
| 193 | Eigen::Vector3d *translation_variance = nullptr, |
| 194 | Eigen::Vector3d *rotation_variance = nullptr) { |
| 195 | std::vector<Eigen::Vector3d> translation_list; |
| 196 | std::vector<Eigen::Vector4d> rotation_list; |
| 197 | |
| 198 | for (TimestampedPiDetection pose : pose_list) { |
| 199 | translation_list.push_back(pose.H_camera_target.translation()); |
| 200 | Eigen::Quaterniond quat(pose.H_camera_target.rotation().matrix()); |
| 201 | rotation_list.push_back( |
| 202 | Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z())); |
| 203 | } |
| 204 | return ComputeAveragePose(translation_list, rotation_list, |
| 205 | translation_variance, rotation_variance); |
| 206 | } |
| 207 | |
| 208 | void HandlePoses(cv::Mat rgb_image, |
| 209 | std::vector<TargetMapper::TargetPose> target_poses, |
| 210 | aos::distributed_clock::time_point distributed_eof, |
| 211 | std::string node_name) { |
| 212 | // This is used to transform points for visualization |
| 213 | // Assumes targets are aligned with x->right, y->up, z->out of board |
| 214 | Eigen::Affine3d H_world_board; |
| 215 | H_world_board = Eigen::Translation3d::Identity() * |
| 216 | Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX()); |
| 217 | if (FLAGS_alt_view) { |
| 218 | // Don't rotate -- this is like viewing from the side |
| 219 | H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0); |
| 220 | } |
| 221 | |
| 222 | bool draw_vis = false; |
| 223 | CHECK_LE(target_poses.size(), 2u) |
| 224 | << "Can't handle more than two tags in field of view"; |
| 225 | if (target_poses.size() == 2) { |
| 226 | draw_vis = true; |
| 227 | VLOG(1) << "Saw two boards in same view from " << node_name; |
| 228 | int from_index = 0; |
| 229 | int to_index = 1; |
| 230 | // Handle when we see two boards at once |
| 231 | // We'll store them referenced to the lower id board |
| 232 | if (target_poses[from_index].id > target_poses[to_index].id) { |
| 233 | std::swap<int>(from_index, to_index); |
| 234 | } |
| 235 | |
| 236 | // Create "from" (A) and "to" (B) transforms |
| 237 | Eigen::Affine3d H_camera_boardA = |
| 238 | PoseUtils::Pose3dToAffine3d(target_poses[from_index].pose); |
| 239 | Eigen::Affine3d H_camera_boardB = |
| 240 | PoseUtils::Pose3dToAffine3d(target_poses[to_index].pose); |
| 241 | |
| 242 | Eigen::Affine3d H_boardA_boardB = |
| 243 | H_camera_boardA.inverse() * H_camera_boardB; |
| 244 | |
| 245 | TimestampedPiDetection boardA_boardB{ |
| 246 | .time = distributed_eof, |
| 247 | .H_camera_target = H_boardA_boardB, |
| 248 | .pi_name = node_name, |
| 249 | .board_id = target_poses[from_index].id}; |
| 250 | |
| 251 | VLOG(1) << "Map from board " << from_index << " to " << to_index << " is\n" |
| 252 | << H_boardA_boardB.matrix(); |
| 253 | // Store this observation of the transform between two boards |
| 254 | two_board_extrinsics_list.push_back(boardA_boardB); |
| 255 | |
| 256 | if (FLAGS_visualize) { |
| 257 | vis_robot_.DrawFrameAxes( |
| 258 | H_world_board, |
| 259 | std::string("board ") + std::to_string(target_poses[from_index].id), |
| 260 | cv::Scalar(0, 255, 0)); |
| 261 | vis_robot_.DrawFrameAxes( |
| 262 | H_world_board * boardA_boardB.H_camera_target, |
| 263 | std::string("board ") + std::to_string(target_poses[to_index].id), |
| 264 | cv::Scalar(255, 0, 0)); |
| 265 | VLOG(2) << "Detection map from camera " << node_name << " to board " |
| 266 | << target_poses[from_index].id << " is\n" |
| 267 | << H_camera_boardA.matrix() << "\n and inverse is\n" |
| 268 | << H_camera_boardA.inverse().matrix() |
| 269 | << "\n and with world to board rotation is\n" |
| 270 | << H_world_board * H_camera_boardA.inverse().matrix(); |
| 271 | vis_robot_.DrawRobotOutline(H_world_board * H_camera_boardA.inverse(), |
| 272 | node_name, cv::Scalar(0, 0, 255)); |
| 273 | } |
| 274 | } else if (target_poses.size() == 1) { |
| 275 | VLOG(1) << "Saw single board in camera " << node_name; |
| 276 | Eigen::Affine3d H_camera2_board2 = |
| 277 | PoseUtils::Pose3dToAffine3d(target_poses[0].pose); |
| 278 | TimestampedPiDetection new_observation{.time = distributed_eof, |
| 279 | .H_camera_target = H_camera2_board2, |
| 280 | .pi_name = node_name, |
| 281 | .board_id = target_poses[0].id}; |
| 282 | |
| 283 | // Only take two observations if they're within half an image cycle of each |
| 284 | // other (i.e., as close in time as possible) |
| 285 | if (std::abs((distributed_eof - last_observation.time).count()) < |
| 286 | kImagePeriodMs / 2.0 * 1000000.0) { |
| 287 | // Sort by pi numbering, since this is how we will handle them |
| 288 | std::pair<TimestampedPiDetection, TimestampedPiDetection> new_pair; |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 289 | if (ordering_map.at(last_observation.pi_name) < |
| 290 | ordering_map.at(new_observation.pi_name)) { |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 291 | new_pair = std::pair(last_observation, new_observation); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 292 | } else if (ordering_map.at(last_observation.pi_name) > |
| 293 | ordering_map.at(new_observation.pi_name)) { |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 294 | new_pair = std::pair(new_observation, last_observation); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 295 | } |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 296 | |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 297 | detection_list.push_back(new_pair); |
| 298 | |
| 299 | // This bit is just for visualization and checking purposes-- use the |
| 300 | // last two-board observation to figure out the current estimate |
| 301 | // between the two cameras |
| 302 | if (FLAGS_visualize && two_board_extrinsics_list.size() > 0) { |
| 303 | draw_vis = true; |
| 304 | TimestampedPiDetection &last_two_board_ext = |
| 305 | two_board_extrinsics_list[two_board_extrinsics_list.size() - 1]; |
| 306 | Eigen::Affine3d &H_boardA_boardB = last_two_board_ext.H_camera_target; |
| 307 | int boardA_boardB_id = last_two_board_ext.board_id; |
| 308 | |
| 309 | TimestampedPiDetection camera1_boardA = new_pair.first; |
| 310 | TimestampedPiDetection camera2_boardB = new_pair.second; |
| 311 | // If camera1_boardA doesn't point to the first target, then swap |
| 312 | // these two |
| 313 | if (camera1_boardA.board_id != boardA_boardB_id) { |
| 314 | camera1_boardA = new_pair.second; |
| 315 | camera2_boardB = new_pair.first; |
| 316 | } |
| 317 | VLOG(1) << "Camera " << camera1_boardA.pi_name << " seeing board " |
| 318 | << camera1_boardA.board_id << " and camera " |
| 319 | << camera2_boardB.pi_name << " seeing board " |
| 320 | << camera2_boardB.board_id; |
| 321 | |
| 322 | vis_robot_.DrawRobotOutline( |
| 323 | H_world_board * camera1_boardA.H_camera_target.inverse(), |
| 324 | camera1_boardA.pi_name, cv::Scalar(0, 0, 255)); |
| 325 | vis_robot_.DrawRobotOutline( |
| 326 | H_world_board * H_boardA_boardB * |
| 327 | camera2_boardB.H_camera_target.inverse(), |
| 328 | camera2_boardB.pi_name, cv::Scalar(128, 128, 0)); |
| 329 | vis_robot_.DrawFrameAxes( |
| 330 | H_world_board, |
| 331 | std::string("Board ") + std::to_string(last_two_board_ext.board_id), |
| 332 | cv::Scalar(0, 255, 0)); |
| 333 | vis_robot_.DrawFrameAxes(H_world_board * H_boardA_boardB, "Board B", |
| 334 | cv::Scalar(255, 0, 0)); |
| 335 | } |
| 336 | VLOG(1) << "Storing observation between " << new_pair.first.pi_name |
| 337 | << ", target " << new_pair.first.board_id << " and " |
| 338 | << new_pair.second.pi_name << ", target " |
| 339 | << new_pair.second.board_id; |
| 340 | } else { |
| 341 | VLOG(2) << "Storing observation for " << node_name << " at time " |
| 342 | << distributed_eof; |
| 343 | last_observation = new_observation; |
| 344 | } |
| 345 | } |
| 346 | |
| 347 | if (FLAGS_visualize) { |
| 348 | if (!rgb_image.empty()) { |
| 349 | std::string image_name = node_name + " Image"; |
| 350 | cv::Mat rgb_small; |
| 351 | cv::resize(rgb_image, rgb_small, cv::Size(), 0.5, 0.5); |
| 352 | cv::imshow(image_name, rgb_small); |
| 353 | cv::waitKey(FLAGS_wait_key); |
| 354 | } |
| 355 | |
| 356 | if (draw_vis) { |
| 357 | cv::imshow("View", vis_robot_.image_); |
| 358 | cv::waitKey(FLAGS_wait_key); |
| 359 | vis_robot_.ClearImage(); |
| 360 | } |
| 361 | } |
| 362 | } |
| 363 | |
| 364 | void HandleTargetMap(const TargetMap &map, |
| 365 | aos::distributed_clock::time_point distributed_eof, |
| 366 | std::string node_name) { |
| 367 | VLOG(1) << "Got april tag map call from node " << node_name; |
| 368 | // Create empty RGB image in this case |
| 369 | cv::Mat rgb_image; |
| 370 | std::vector<TargetMapper::TargetPose> target_poses; |
| 371 | |
| 372 | for (const auto *target_pose_fbs : *map.target_poses()) { |
| 373 | // Skip detections with invalid ids |
| 374 | if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) < |
| 375 | FLAGS_min_target_id || |
| 376 | static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) > |
| 377 | FLAGS_max_target_id) { |
| 378 | LOG(INFO) << "Skipping tag with invalid id of " << target_pose_fbs->id(); |
| 379 | continue; |
| 380 | } |
| 381 | |
| 382 | // Skip detections with high pose errors |
| 383 | if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) { |
| 384 | LOG(INFO) << "Skipping tag " << target_pose_fbs->id() |
| 385 | << " due to pose error of " << target_pose_fbs->pose_error(); |
| 386 | continue; |
| 387 | } |
| 388 | // Skip detections with high pose error ratios |
| 389 | if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) { |
| 390 | LOG(INFO) << "Skipping tag " << target_pose_fbs->id() |
| 391 | << " due to pose error ratio of " |
| 392 | << target_pose_fbs->pose_error_ratio(); |
| 393 | continue; |
| 394 | } |
| 395 | |
| 396 | const TargetMapper::TargetPose target_pose = |
| 397 | PoseUtils::TargetPoseFromFbs(*target_pose_fbs); |
| 398 | |
| 399 | target_poses.emplace_back(target_pose); |
| 400 | |
| 401 | Eigen::Affine3d H_camera_target = |
| 402 | PoseUtils::Pose3dToAffine3d(target_pose.pose); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 403 | VLOG(1) << node_name << " saw target " << target_pose.id |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 404 | << " from TargetMap at timestamp " << distributed_eof |
| 405 | << " with pose = " << H_camera_target.matrix(); |
| 406 | } |
| 407 | HandlePoses(rgb_image, target_poses, distributed_eof, node_name); |
| 408 | } |
| 409 | |
| 410 | void HandleImage(aos::EventLoop *event_loop, cv::Mat rgb_image, |
| 411 | const aos::monotonic_clock::time_point eof, |
| 412 | aos::distributed_clock::time_point distributed_eof, |
| 413 | frc971::vision::CharucoExtractor &charuco_extractor, |
| 414 | std::string node_name) { |
| 415 | std::vector<cv::Vec4i> charuco_ids; |
| 416 | std::vector<std::vector<cv::Point2f>> charuco_corners; |
| 417 | bool valid = false; |
| 418 | std::vector<Eigen::Vector3d> rvecs_eigen; |
| 419 | std::vector<Eigen::Vector3d> tvecs_eigen; |
| 420 | // Why eof vs. distributed_eof? |
| 421 | charuco_extractor.ProcessImage(rgb_image, eof, event_loop->monotonic_now(), |
| 422 | charuco_ids, charuco_corners, valid, |
| 423 | rvecs_eigen, tvecs_eigen); |
| 424 | if (rvecs_eigen.size() > 0 && !valid) { |
| 425 | LOG(WARNING) << "Charuco extractor returned not valid"; |
| 426 | return; |
| 427 | } |
| 428 | |
| 429 | std::vector<TargetMapper::TargetPose> target_poses; |
| 430 | for (size_t i = 0; i < tvecs_eigen.size(); i++) { |
| 431 | Eigen::Quaterniond rotation( |
| 432 | frc971::controls::ToQuaternionFromRotationVector(rvecs_eigen[i])); |
| 433 | ceres::examples::Pose3d pose(Eigen::Vector3d(tvecs_eigen[i]), rotation); |
| 434 | TargetMapper::TargetPose target_pose{charuco_ids[i][0], pose}; |
| 435 | target_poses.emplace_back(target_pose); |
| 436 | |
| 437 | Eigen::Affine3d H_camera_target = PoseUtils::Pose3dToAffine3d(pose); |
| 438 | VLOG(2) << node_name << " saw target " << target_pose.id |
| 439 | << " from image at timestamp " << distributed_eof |
| 440 | << " with pose = " << H_camera_target.matrix(); |
| 441 | } |
| 442 | HandlePoses(rgb_image, target_poses, distributed_eof, node_name); |
| 443 | } |
| 444 | |
| 445 | void ExtrinsicsMain(int argc, char *argv[]) { |
| 446 | vis_robot_ = VisualizeRobot(cv::Size(1000, 1000)); |
| 447 | vis_robot_.ClearImage(); |
| 448 | const double kFocalLength = 1000.0; |
| 449 | const int kImageWidth = 1000; |
| 450 | vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength); |
| 451 | |
| 452 | std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config = |
| 453 | (FLAGS_config.empty() |
| 454 | ? std::nullopt |
| 455 | : std::make_optional(aos::configuration::ReadConfig(FLAGS_config))); |
| 456 | |
| 457 | // open logfiles |
| 458 | aos::logger::LogReader reader( |
| 459 | aos::logger::SortParts(aos::logger::FindLogs(argc, argv)), |
| 460 | config.has_value() ? &config->message() : nullptr); |
| 461 | |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 462 | reader.RemapLoggedChannel("/imu/constants", "y2024.Constants"); |
| 463 | reader.RemapLoggedChannel("/orin1/constants", "y2024.Constants"); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 464 | if (FLAGS_robot) { |
| 465 | reader.RemapLoggedChannel("/roborio/constants", "y2024.Constants"); |
| 466 | } |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 467 | reader.Register(); |
| 468 | |
| 469 | y2024::SendSimulationConstants(reader.event_loop_factory(), FLAGS_team_number, |
| 470 | FLAGS_constants_path); |
| 471 | |
| 472 | VLOG(1) << "Using target type " << FLAGS_target_type; |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 473 | |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 474 | std::vector<const calibration::CameraCalibration *> calibration_list; |
| 475 | |
| 476 | std::vector<std::unique_ptr<aos::EventLoop>> detection_event_loops; |
| 477 | std::vector<frc971::vision::CharucoExtractor *> charuco_extractors; |
| 478 | std::vector<frc971::vision::ImageCallback *> image_callbacks; |
| 479 | std::vector<Eigen::Affine3d> default_extrinsics; |
| 480 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 481 | for (const CameraNode &camera_node : node_list) { |
| 482 | const aos::Node *pi = aos::configuration::GetNode( |
| 483 | reader.configuration(), camera_node.node_name.c_str()); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 484 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 485 | detection_event_loops.emplace_back( |
| 486 | reader.event_loop_factory()->MakeEventLoop( |
| 487 | (camera_node.camera_name() + "_detection").c_str(), pi)); |
| 488 | frc971::constants::ConstantsFetcher<y2024::Constants> constants_fetcher( |
| 489 | detection_event_loops.back().get()); |
| 490 | // Get the calibration for this orin/camera pair |
| 491 | const calibration::CameraCalibration *calibration = |
| 492 | y2024::vision::FindCameraCalibration(constants_fetcher.constants(), |
| 493 | camera_node.node_name, |
| 494 | camera_node.camera_number); |
| 495 | calibration_list.push_back(calibration); |
Maxwell Henderson | 69ec45e | 2024-03-01 22:52:21 -0800 | [diff] [blame] | 496 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 497 | // Extract the extrinsics from the calibration, and save as "defaults" |
| 498 | cv::Mat extrinsics_cv = |
| 499 | frc971::vision::CameraExtrinsics(calibration).value(); |
| 500 | Eigen::Matrix4d extrinsics_matrix; |
| 501 | cv::cv2eigen(extrinsics_cv, extrinsics_matrix); |
| 502 | const auto ext_H_robot_pi = Eigen::Affine3d(extrinsics_matrix); |
| 503 | default_extrinsics.emplace_back(ext_H_robot_pi); |
Maxwell Henderson | 69ec45e | 2024-03-01 22:52:21 -0800 | [diff] [blame] | 504 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 505 | VLOG(1) << "Got extrinsics for " << camera_node.camera_name() << " as\n" |
| 506 | << default_extrinsics.back().matrix(); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 507 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 508 | detection_event_loops.back()->MakeWatcher( |
| 509 | camera_node.camera_name(), |
| 510 | [&reader, &detection_event_loops, camera_node](const TargetMap &map) { |
| 511 | aos::distributed_clock::time_point pi_distributed_time = |
| 512 | reader.event_loop_factory() |
| 513 | ->GetNodeEventLoopFactory( |
| 514 | detection_event_loops.back().get()->node()) |
| 515 | ->ToDistributedClock(aos::monotonic_clock::time_point( |
| 516 | aos::monotonic_clock::duration( |
| 517 | map.monotonic_timestamp_ns()))); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 518 | |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 519 | HandleTargetMap(map, pi_distributed_time, camera_node.camera_name()); |
| 520 | }); |
| 521 | LOG(INFO) << "Created watcher for using the detection event loop for " |
| 522 | << camera_node.camera_name() << " and size " |
| 523 | << detection_event_loops.size(); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 524 | } |
| 525 | |
| 526 | reader.event_loop_factory()->Run(); |
| 527 | |
| 528 | // Do quick check to see what averaged two-board pose for each pi is |
| 529 | // individually |
| 530 | CHECK_GT(two_board_extrinsics_list.size(), 0u) |
| 531 | << "Must have at least one view of both boards"; |
| 532 | int base_target_id = two_board_extrinsics_list[0].board_id; |
| 533 | VLOG(1) << "Base id for two_board_extrinsics_list is " << base_target_id; |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 534 | |
| 535 | for (auto camera_node : node_list) { |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 536 | std::vector<TimestampedPiDetection> pose_list; |
| 537 | for (auto ext : two_board_extrinsics_list) { |
| 538 | CHECK_EQ(base_target_id, ext.board_id) |
| 539 | << " All boards should have same reference id"; |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 540 | if (ext.pi_name == camera_node.camera_name()) { |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 541 | pose_list.push_back(ext); |
| 542 | } |
| 543 | } |
| 544 | Eigen::Affine3d avg_pose_from_pi = ComputeAveragePose(pose_list); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 545 | VLOG(1) << "Estimate from " << camera_node.camera_name() << " with " |
| 546 | << pose_list.size() << " observations is:\n" |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 547 | << avg_pose_from_pi.matrix(); |
| 548 | } |
| 549 | Eigen::Affine3d H_boardA_boardB_avg = |
| 550 | ComputeAveragePose(two_board_extrinsics_list); |
| 551 | // TODO: Should probably do some outlier rejection |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 552 | VLOG(1) << "Estimate of two board pose using all nodes with " |
| 553 | << two_board_extrinsics_list.size() << " observations is:\n" |
| 554 | << H_boardA_boardB_avg.matrix() << "\n"; |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 555 | |
| 556 | // Next, compute the relative camera poses |
| 557 | LOG(INFO) << "Got " << detection_list.size() << " extrinsic observations"; |
| 558 | std::vector<Eigen::Affine3d> H_camera1_camera2_list; |
| 559 | std::vector<Eigen::Affine3d> updated_extrinsics; |
| 560 | // Use the first node's extrinsics as our base, and fix from there |
| 561 | updated_extrinsics.push_back(default_extrinsics[0]); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 562 | LOG(INFO) << "Default extrinsic for camera " << node_list.at(0).camera_name() |
| 563 | << " is " << default_extrinsics[0].matrix(); |
| 564 | for (uint i = 0; i < node_list.size() - 1; i++) { |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 565 | H_camera1_camera2_list.clear(); |
| 566 | // Go through the list, and find successive pairs of cameras |
| 567 | for (auto [pose1, pose2] : detection_list) { |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 568 | if ((pose1.pi_name == node_list.at(i).camera_name()) && |
| 569 | (pose2.pi_name == node_list.at(i + 1).camera_name())) { |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 570 | Eigen::Affine3d H_camera1_boardA = pose1.H_camera_target; |
| 571 | // If pose1 isn't referenced to base_target_id, correct that |
| 572 | if (pose1.board_id != base_target_id) { |
| 573 | // pose1.H_camera_target references boardB, so map back to boardA |
| 574 | H_camera1_boardA = |
| 575 | pose1.H_camera_target * H_boardA_boardB_avg.inverse(); |
| 576 | } |
| 577 | |
| 578 | // Now, get the camera2->boardA map (notice it's boardA, same as |
| 579 | // camera1, so we can compute the difference based both on boardA) |
| 580 | Eigen::Affine3d H_camera2_boardA = pose2.H_camera_target; |
| 581 | // If pose2 isn't referenced to boardA (base_target_id), correct |
| 582 | // that |
| 583 | if (pose2.board_id != base_target_id) { |
| 584 | // pose2.H_camera_target references boardB, so map back to boardA |
| 585 | H_camera2_boardA = |
| 586 | pose2.H_camera_target * H_boardA_boardB_avg.inverse(); |
| 587 | } |
| 588 | |
| 589 | // Compute camera1->camera2 map |
| 590 | Eigen::Affine3d H_camera1_camera2 = |
| 591 | H_camera1_boardA * H_camera2_boardA.inverse(); |
| 592 | H_camera1_camera2_list.push_back(H_camera1_camera2); |
| 593 | VLOG(1) << "Map from camera " << pose1.pi_name << " and tag " |
| 594 | << pose1.board_id << " with observation: \n" |
| 595 | << pose1.H_camera_target.matrix() << "\n to camera " |
| 596 | << pose2.pi_name << " and tag " << pose2.board_id |
| 597 | << " with observation: \n" |
| 598 | << pose2.H_camera_target.matrix() << "\ngot map as\n" |
| 599 | << H_camera1_camera2.matrix(); |
| 600 | |
| 601 | Eigen::Affine3d H_world_board; |
| 602 | H_world_board = Eigen::Translation3d::Identity() * |
| 603 | Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX()); |
| 604 | if (FLAGS_alt_view) { |
| 605 | H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0); |
| 606 | } |
| 607 | |
| 608 | VLOG(2) << "Camera1 " << pose1.pi_name << " in world frame is \n" |
| 609 | << (H_world_board * H_camera1_boardA.inverse()).matrix(); |
| 610 | VLOG(2) << "Camera2 " << pose2.pi_name << " in world frame is \n" |
| 611 | << (H_world_board * H_camera2_boardA.inverse()).matrix(); |
| 612 | } |
| 613 | } |
| 614 | // TODO<Jim>: If we don't get any matches, we could just use default |
| 615 | // extrinsics |
| 616 | CHECK(H_camera1_camera2_list.size() > 0) |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 617 | << "Failed with zero poses for node " << node_list.at(i).camera_name() |
| 618 | << " and " << node_list.at(i + 1).camera_name(); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 619 | if (H_camera1_camera2_list.size() > 0) { |
| 620 | Eigen::Affine3d H_camera1_camera2_avg = |
| 621 | ComputeAveragePose(H_camera1_camera2_list); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 622 | LOG(INFO) << "From " << node_list.at(i).camera_name() << " to " |
| 623 | << node_list.at(i + 1).camera_name() << " found " |
| 624 | << H_camera1_camera2_list.size() |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 625 | << " observations, and the average pose is:\n" |
| 626 | << H_camera1_camera2_avg.matrix(); |
| 627 | Eigen::Affine3d H_camera1_camera2_default = |
| 628 | default_extrinsics[i].inverse() * default_extrinsics[i + 1]; |
| 629 | LOG(INFO) << "Compare this to that from default values:\n" |
| 630 | << H_camera1_camera2_default.matrix(); |
| 631 | Eigen::Affine3d H_camera1_camera2_diff = |
| 632 | H_camera1_camera2_avg * H_camera1_camera2_default.inverse(); |
| 633 | LOG(INFO) |
| 634 | << "Difference between averaged and default delta poses " |
| 635 | "has |T| = " |
| 636 | << H_camera1_camera2_diff.translation().norm() << "m and |R| = " |
| 637 | << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle() |
| 638 | << " radians (" |
| 639 | << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle() * |
| 640 | 180.0 / M_PI |
| 641 | << " degrees)"; |
| 642 | // Next extrinsic is just previous one * avg_delta_pose |
| 643 | Eigen::Affine3d next_extrinsic = |
| 644 | updated_extrinsics.back() * H_camera1_camera2_avg; |
| 645 | updated_extrinsics.push_back(next_extrinsic); |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 646 | LOG(INFO) << "Default Extrinsic for " << node_list.at(i + 1).camera_name() |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 647 | << " is \n" |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 648 | << default_extrinsics[i + 1].matrix(); |
| 649 | LOG(INFO) << "--> Updated Extrinsic for " |
| 650 | << node_list.at(i + 1).camera_name() << " is \n" |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 651 | << next_extrinsic.matrix(); |
| 652 | |
| 653 | // Wirte out this extrinsic to a file |
| 654 | flatbuffers::FlatBufferBuilder fbb; |
| 655 | flatbuffers::Offset<flatbuffers::Vector<float>> data_offset = |
| 656 | fbb.CreateVector( |
| 657 | frc971::vision::MatrixToVector(next_extrinsic.matrix())); |
| 658 | calibration::TransformationMatrix::Builder matrix_builder(fbb); |
| 659 | matrix_builder.add_data(data_offset); |
| 660 | flatbuffers::Offset<calibration::TransformationMatrix> |
| 661 | extrinsic_calibration_offset = matrix_builder.Finish(); |
| 662 | |
| 663 | calibration::CameraCalibration::Builder calibration_builder(fbb); |
| 664 | calibration_builder.add_fixed_extrinsics(extrinsic_calibration_offset); |
| 665 | const aos::realtime_clock::time_point realtime_now = |
| 666 | aos::realtime_clock::now(); |
| 667 | calibration_builder.add_calibration_timestamp( |
| 668 | realtime_now.time_since_epoch().count()); |
| 669 | fbb.Finish(calibration_builder.Finish()); |
| 670 | aos::FlatbufferDetachedBuffer<calibration::CameraCalibration> |
| 671 | solved_extrinsics = fbb.Release(); |
| 672 | |
| 673 | aos::FlatbufferDetachedBuffer< |
| 674 | frc971::vision::calibration::CameraCalibration> |
| 675 | cal_copy = aos::RecursiveCopyFlatBuffer(calibration_list[i + 1]); |
| 676 | cal_copy.mutable_message()->clear_fixed_extrinsics(); |
| 677 | cal_copy.mutable_message()->clear_calibration_timestamp(); |
| 678 | aos::FlatbufferDetachedBuffer<calibration::CameraCalibration> |
| 679 | merged_calibration = aos::MergeFlatBuffers( |
| 680 | &cal_copy.message(), &solved_extrinsics.message()); |
| 681 | |
| 682 | std::stringstream time_ss; |
| 683 | time_ss << realtime_now; |
| 684 | |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 685 | // TODO: This breaks because we're naming orin1 and imu as nodes |
| 686 | // Assumes camera_list name is of form "/orin#/cameraX" to create |
| 687 | // calibration filename |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 688 | const std::string calibration_filename = |
| 689 | FLAGS_output_folder + |
Maxwell Henderson | 9d7b186 | 2024-03-02 11:00:29 -0800 | [diff] [blame] | 690 | absl::StrFormat( |
| 691 | "/calibration_orin-%d-%s-%d_cam-%s_%s.json", FLAGS_team_number, |
| 692 | node_list.at(i + 1).camera_name().substr(5, 1), |
| 693 | calibration_list[i + 1]->camera_number(), |
| 694 | calibration_list[i + 1]->camera_id()->data(), time_ss.str()); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 695 | |
| 696 | LOG(INFO) << calibration_filename << " -> " |
| 697 | << aos::FlatbufferToJson(merged_calibration, |
| 698 | {.multi_line = true}); |
| 699 | |
| 700 | aos::util::WriteStringToFileOrDie( |
| 701 | calibration_filename, |
| 702 | aos::FlatbufferToJson(merged_calibration, {.multi_line = true})); |
| 703 | } |
| 704 | } |
| 705 | |
| 706 | // Cleanup |
| 707 | for (uint i = 0; i < image_callbacks.size(); i++) { |
| 708 | delete charuco_extractors[i]; |
| 709 | delete image_callbacks[i]; |
| 710 | } |
| 711 | } |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 712 | } // namespace y2024::vision |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 713 | |
| 714 | int main(int argc, char **argv) { |
| 715 | aos::InitGoogle(&argc, &argv); |
Jim Ostrowski | ddebdcb | 2024-02-29 22:25:36 -0800 | [diff] [blame] | 716 | y2024::vision::ExtrinsicsMain(argc, argv); |
Jim Ostrowski | 9bf206a | 2024-01-26 23:31:58 -0800 | [diff] [blame] | 717 | } |