blob: dee66469a46a93761ae96a70d06d2f62069ffd8c [file] [log] [blame]
Jim Ostrowski9bf206a2024-01-26 23:31:58 -08001#include <numeric>
2
3#include "aos/configuration.h"
4#include "aos/events/logging/log_reader.h"
5#include "aos/events/simulated_event_loop.h"
6#include "aos/init.h"
7#include "aos/util/mcap_logger.h"
8#include "frc971/control_loops/pose.h"
9#include "frc971/control_loops/quaternion_utils.h"
10#include "frc971/vision/calibration_generated.h"
11#include "frc971/vision/charuco_lib.h"
12#include "frc971/vision/extrinsics_calibration.h"
13#include "frc971/vision/target_mapper.h"
Jim Ostrowski33208982024-03-02 15:01:45 -080014#include "frc971/vision/vision_util_lib.h"
Jim Ostrowski9bf206a2024-01-26 23:31:58 -080015#include "frc971/vision/visualize_robot.h"
16// clang-format off
17// OpenCV eigen files must be included after Eigen includes
18#include "opencv2/aruco.hpp"
19#include "opencv2/calib3d.hpp"
20#include "opencv2/core/eigen.hpp"
21#include "opencv2/features2d.hpp"
22#include "opencv2/highgui.hpp"
23#include "opencv2/highgui/highgui.hpp"
24#include "opencv2/imgproc.hpp"
25// clang-format on
26#include "frc971/constants/constants_sender_lib.h"
27#include "frc971/vision/vision_util_lib.h"
28#include "y2024/constants/simulated_constants_sender.h"
29#include "y2024/vision/vision_util.h"
30
31DEFINE_bool(alt_view, false,
32 "If true, show visualization from field level, rather than above");
33DEFINE_string(config, "",
34 "If set, override the log's config file with this one.");
Jim Ostrowskiddebdcb2024-02-29 22:25:36 -080035DEFINE_string(constants_path, "y2024/constants/constants.json",
Jim Ostrowski9bf206a2024-01-26 23:31:58 -080036 "Path to the constant file");
37DEFINE_double(max_pose_error, 5e-5,
38 "Throw out target poses with a higher pose error than this");
39DEFINE_double(
40 max_pose_error_ratio, 0.4,
41 "Throw out target poses with a higher pose error ratio than this");
42DEFINE_string(output_folder, "/tmp",
43 "Directory in which to store the updated calibration files");
44DEFINE_string(target_type, "charuco_diamond",
45 "Type of target being used [aruco, charuco, charuco_diamond]");
46DEFINE_int32(team_number, 0,
47 "Required: Use the calibration for a node with this team number");
Jim Ostrowski9bf206a2024-01-26 23:31:58 -080048DEFINE_uint64(
49 wait_key, 1,
50 "Time in ms to wait between images (0 to wait indefinitely until click)");
Maxwell Henderson9d7b1862024-03-02 11:00:29 -080051DEFINE_bool(robot, false,
52 "If true we're calibrating extrinsics for the robot, use the "
53 "correct node path for the robot.");
54
Jim Ostrowski9bf206a2024-01-26 23:31:58 -080055DECLARE_int32(min_target_id);
56DECLARE_int32(max_target_id);
57
58// Calibrate extrinsic relationship between cameras using two targets
59// seen jointly between cameras. Uses two types of information: 1)
60// when a single camera sees two targets, we estimate the pose between
61// targets, and 2) when two separate cameras each see a target, we can
62// use the pose between targets to estimate the pose between cameras.
63
64// We then create the extrinsics for the robot by starting with the
65// given extrinsic for camera 1 (between imu/robot and camera frames)
66// and then map each subsequent camera based on the data collected and
67// the extrinsic poses computed here.
68
Jim Ostrowskiddebdcb2024-02-29 22:25:36 -080069// TODO<Jim>: Not currently using estimate from first camera to last camera--
70// should do full estimation, and probably also include camera->imu extrinsics
71// from all cameras, not just first camera
Jim Ostrowski9bf206a2024-01-26 23:31:58 -080072
Jim Ostrowskiddebdcb2024-02-29 22:25:36 -080073namespace y2024::vision {
Jim Ostrowski9bf206a2024-01-26 23:31:58 -080074using frc971::vision::DataAdapter;
75using frc971::vision::ImageCallback;
76using frc971::vision::PoseUtils;
77using frc971::vision::TargetMap;
78using frc971::vision::TargetMapper;
79using frc971::vision::VisualizeRobot;
80namespace calibration = frc971::vision::calibration;
81
82static constexpr double kImagePeriodMs =
Maxwell Henderson9d7b1862024-03-02 11:00:29 -080083 1.0 / 60.0 * 1000.0; // Image capture period in ms
Jim Ostrowski9bf206a2024-01-26 23:31:58 -080084
85// Change reference frame from camera to robot
86Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camera_target,
87 Eigen::Affine3d extrinsics) {
88 const Eigen::Affine3d H_robot_camera = extrinsics;
89 const Eigen::Affine3d H_robot_target = H_robot_camera * H_camera_target;
90 return H_robot_target;
91}
92
93struct TimestampedPiDetection {
94 aos::distributed_clock::time_point time;
95 // Pose of target relative to robot
96 Eigen::Affine3d H_camera_target;
97 // name of pi
98 std::string pi_name;
99 int board_id;
100};
101
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800102struct CameraNode {
103 std::string node_name;
104 int camera_number;
105
106 inline const std::string camera_name() const {
107 return "/" + node_name + "/camera" + std::to_string(camera_number);
108 }
109};
110
111std::vector<CameraNode> CreateNodeList() {
112 std::vector<CameraNode> list;
113
114 list.push_back({.node_name = "orin1", .camera_number = 0});
115 list.push_back({.node_name = "orin1", .camera_number = 1});
116 list.push_back({.node_name = "imu", .camera_number = 0});
117 list.push_back({.node_name = "imu", .camera_number = 1});
118
119 return list;
120}
121
122std::vector<CameraNode> node_list(CreateNodeList());
123std::map<std::string, int> CreateOrderingMap() {
124 std::map<std::string, int> map;
125
126 for (uint i = 0; i < node_list.size(); i++) {
127 map.insert({node_list.at(i).camera_name(), i});
128 }
129
130 return map;
131}
132std::map<std::string, int> ordering_map(CreateOrderingMap());
133
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800134TimestampedPiDetection last_observation;
135std::vector<std::pair<TimestampedPiDetection, TimestampedPiDetection>>
136 detection_list;
137std::vector<TimestampedPiDetection> two_board_extrinsics_list;
138VisualizeRobot vis_robot_;
139
140// TODO<jim>: Implement variance calcs
141Eigen::Affine3d ComputeAveragePose(
142 std::vector<Eigen::Vector3d> &translation_list,
143 std::vector<Eigen::Vector4d> &rotation_list,
144 Eigen::Vector3d *translation_variance = nullptr,
145 Eigen::Vector3d *rotation_variance = nullptr) {
146 Eigen::Vector3d avg_translation =
147 std::accumulate(translation_list.begin(), translation_list.end(),
148 Eigen::Vector3d{0, 0, 0}) /
149 translation_list.size();
150
151 // TODO<Jim>: Use QuaternionMean from quaternion_utils.cc (but this
152 // requires a fixed number of quaternions to be averaged
153 Eigen::Vector4d avg_rotation =
154 std::accumulate(rotation_list.begin(), rotation_list.end(),
155 Eigen::Vector4d{0, 0, 0, 0}) /
156 rotation_list.size();
157 // Normalize, so it's a valid quaternion
158 avg_rotation = avg_rotation / avg_rotation.norm();
159 Eigen::Quaterniond avg_rotation_q{avg_rotation[0], avg_rotation[1],
160 avg_rotation[2], avg_rotation[3]};
161 Eigen::Translation3d translation(avg_translation);
162 Eigen::Affine3d return_pose = translation * avg_rotation_q;
163 if (translation_variance != nullptr) {
164 *translation_variance = Eigen::Vector3d();
165 }
166 if (rotation_variance != nullptr) {
167 LOG(INFO) << *rotation_variance;
168 }
169
170 return return_pose;
171}
172
173Eigen::Affine3d ComputeAveragePose(
174 std::vector<Eigen::Affine3d> &pose_list,
175 Eigen::Vector3d *translation_variance = nullptr,
176 Eigen::Vector3d *rotation_variance = nullptr) {
177 std::vector<Eigen::Vector3d> translation_list;
178 std::vector<Eigen::Vector4d> rotation_list;
179
180 for (Eigen::Affine3d pose : pose_list) {
181 translation_list.push_back(pose.translation());
182 Eigen::Quaterniond quat(pose.rotation().matrix());
183 rotation_list.push_back(
184 Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z()));
185 }
186
187 return ComputeAveragePose(translation_list, rotation_list,
188 translation_variance, rotation_variance);
189}
190
191Eigen::Affine3d ComputeAveragePose(
192 std::vector<TimestampedPiDetection> &pose_list,
193 Eigen::Vector3d *translation_variance = nullptr,
194 Eigen::Vector3d *rotation_variance = nullptr) {
195 std::vector<Eigen::Vector3d> translation_list;
196 std::vector<Eigen::Vector4d> rotation_list;
197
198 for (TimestampedPiDetection pose : pose_list) {
199 translation_list.push_back(pose.H_camera_target.translation());
200 Eigen::Quaterniond quat(pose.H_camera_target.rotation().matrix());
201 rotation_list.push_back(
202 Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z()));
203 }
204 return ComputeAveragePose(translation_list, rotation_list,
205 translation_variance, rotation_variance);
206}
207
208void HandlePoses(cv::Mat rgb_image,
209 std::vector<TargetMapper::TargetPose> target_poses,
210 aos::distributed_clock::time_point distributed_eof,
211 std::string node_name) {
212 // This is used to transform points for visualization
213 // Assumes targets are aligned with x->right, y->up, z->out of board
214 Eigen::Affine3d H_world_board;
215 H_world_board = Eigen::Translation3d::Identity() *
216 Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
217 if (FLAGS_alt_view) {
218 // Don't rotate -- this is like viewing from the side
219 H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
220 }
221
222 bool draw_vis = false;
223 CHECK_LE(target_poses.size(), 2u)
224 << "Can't handle more than two tags in field of view";
225 if (target_poses.size() == 2) {
226 draw_vis = true;
227 VLOG(1) << "Saw two boards in same view from " << node_name;
228 int from_index = 0;
229 int to_index = 1;
230 // Handle when we see two boards at once
231 // We'll store them referenced to the lower id board
232 if (target_poses[from_index].id > target_poses[to_index].id) {
233 std::swap<int>(from_index, to_index);
234 }
235
236 // Create "from" (A) and "to" (B) transforms
237 Eigen::Affine3d H_camera_boardA =
238 PoseUtils::Pose3dToAffine3d(target_poses[from_index].pose);
239 Eigen::Affine3d H_camera_boardB =
240 PoseUtils::Pose3dToAffine3d(target_poses[to_index].pose);
241
242 Eigen::Affine3d H_boardA_boardB =
243 H_camera_boardA.inverse() * H_camera_boardB;
244
245 TimestampedPiDetection boardA_boardB{
246 .time = distributed_eof,
247 .H_camera_target = H_boardA_boardB,
248 .pi_name = node_name,
249 .board_id = target_poses[from_index].id};
250
251 VLOG(1) << "Map from board " << from_index << " to " << to_index << " is\n"
252 << H_boardA_boardB.matrix();
253 // Store this observation of the transform between two boards
254 two_board_extrinsics_list.push_back(boardA_boardB);
255
256 if (FLAGS_visualize) {
257 vis_robot_.DrawFrameAxes(
258 H_world_board,
259 std::string("board ") + std::to_string(target_poses[from_index].id),
260 cv::Scalar(0, 255, 0));
261 vis_robot_.DrawFrameAxes(
262 H_world_board * boardA_boardB.H_camera_target,
263 std::string("board ") + std::to_string(target_poses[to_index].id),
264 cv::Scalar(255, 0, 0));
265 VLOG(2) << "Detection map from camera " << node_name << " to board "
266 << target_poses[from_index].id << " is\n"
267 << H_camera_boardA.matrix() << "\n and inverse is\n"
268 << H_camera_boardA.inverse().matrix()
269 << "\n and with world to board rotation is\n"
270 << H_world_board * H_camera_boardA.inverse().matrix();
271 vis_robot_.DrawRobotOutline(H_world_board * H_camera_boardA.inverse(),
272 node_name, cv::Scalar(0, 0, 255));
273 }
274 } else if (target_poses.size() == 1) {
275 VLOG(1) << "Saw single board in camera " << node_name;
276 Eigen::Affine3d H_camera2_board2 =
277 PoseUtils::Pose3dToAffine3d(target_poses[0].pose);
278 TimestampedPiDetection new_observation{.time = distributed_eof,
279 .H_camera_target = H_camera2_board2,
280 .pi_name = node_name,
281 .board_id = target_poses[0].id};
282
283 // Only take two observations if they're within half an image cycle of each
284 // other (i.e., as close in time as possible)
285 if (std::abs((distributed_eof - last_observation.time).count()) <
286 kImagePeriodMs / 2.0 * 1000000.0) {
287 // Sort by pi numbering, since this is how we will handle them
288 std::pair<TimestampedPiDetection, TimestampedPiDetection> new_pair;
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800289 if (ordering_map.at(last_observation.pi_name) <
290 ordering_map.at(new_observation.pi_name)) {
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800291 new_pair = std::pair(last_observation, new_observation);
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800292 } else if (ordering_map.at(last_observation.pi_name) >
293 ordering_map.at(new_observation.pi_name)) {
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800294 new_pair = std::pair(new_observation, last_observation);
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800295 }
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800296
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800297 detection_list.push_back(new_pair);
298
299 // This bit is just for visualization and checking purposes-- use the
300 // last two-board observation to figure out the current estimate
301 // between the two cameras
302 if (FLAGS_visualize && two_board_extrinsics_list.size() > 0) {
303 draw_vis = true;
304 TimestampedPiDetection &last_two_board_ext =
305 two_board_extrinsics_list[two_board_extrinsics_list.size() - 1];
306 Eigen::Affine3d &H_boardA_boardB = last_two_board_ext.H_camera_target;
307 int boardA_boardB_id = last_two_board_ext.board_id;
308
309 TimestampedPiDetection camera1_boardA = new_pair.first;
310 TimestampedPiDetection camera2_boardB = new_pair.second;
311 // If camera1_boardA doesn't point to the first target, then swap
312 // these two
313 if (camera1_boardA.board_id != boardA_boardB_id) {
314 camera1_boardA = new_pair.second;
315 camera2_boardB = new_pair.first;
316 }
317 VLOG(1) << "Camera " << camera1_boardA.pi_name << " seeing board "
318 << camera1_boardA.board_id << " and camera "
319 << camera2_boardB.pi_name << " seeing board "
320 << camera2_boardB.board_id;
321
322 vis_robot_.DrawRobotOutline(
323 H_world_board * camera1_boardA.H_camera_target.inverse(),
324 camera1_boardA.pi_name, cv::Scalar(0, 0, 255));
325 vis_robot_.DrawRobotOutline(
326 H_world_board * H_boardA_boardB *
327 camera2_boardB.H_camera_target.inverse(),
328 camera2_boardB.pi_name, cv::Scalar(128, 128, 0));
329 vis_robot_.DrawFrameAxes(
330 H_world_board,
331 std::string("Board ") + std::to_string(last_two_board_ext.board_id),
332 cv::Scalar(0, 255, 0));
333 vis_robot_.DrawFrameAxes(H_world_board * H_boardA_boardB, "Board B",
334 cv::Scalar(255, 0, 0));
335 }
336 VLOG(1) << "Storing observation between " << new_pair.first.pi_name
337 << ", target " << new_pair.first.board_id << " and "
338 << new_pair.second.pi_name << ", target "
339 << new_pair.second.board_id;
340 } else {
341 VLOG(2) << "Storing observation for " << node_name << " at time "
342 << distributed_eof;
343 last_observation = new_observation;
344 }
345 }
346
347 if (FLAGS_visualize) {
348 if (!rgb_image.empty()) {
349 std::string image_name = node_name + " Image";
350 cv::Mat rgb_small;
351 cv::resize(rgb_image, rgb_small, cv::Size(), 0.5, 0.5);
352 cv::imshow(image_name, rgb_small);
353 cv::waitKey(FLAGS_wait_key);
354 }
355
356 if (draw_vis) {
357 cv::imshow("View", vis_robot_.image_);
358 cv::waitKey(FLAGS_wait_key);
359 vis_robot_.ClearImage();
360 }
361 }
362}
363
364void HandleTargetMap(const TargetMap &map,
365 aos::distributed_clock::time_point distributed_eof,
366 std::string node_name) {
Jim Ostrowski33208982024-03-02 15:01:45 -0800367 VLOG(1) << "Got april tag map call from camera " << node_name;
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800368 // Create empty RGB image in this case
369 cv::Mat rgb_image;
370 std::vector<TargetMapper::TargetPose> target_poses;
371
372 for (const auto *target_pose_fbs : *map.target_poses()) {
373 // Skip detections with invalid ids
374 if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) <
375 FLAGS_min_target_id ||
376 static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) >
377 FLAGS_max_target_id) {
378 LOG(INFO) << "Skipping tag with invalid id of " << target_pose_fbs->id();
379 continue;
380 }
381
382 // Skip detections with high pose errors
383 if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) {
384 LOG(INFO) << "Skipping tag " << target_pose_fbs->id()
385 << " due to pose error of " << target_pose_fbs->pose_error();
386 continue;
387 }
388 // Skip detections with high pose error ratios
389 if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) {
390 LOG(INFO) << "Skipping tag " << target_pose_fbs->id()
391 << " due to pose error ratio of "
392 << target_pose_fbs->pose_error_ratio();
393 continue;
394 }
395
396 const TargetMapper::TargetPose target_pose =
397 PoseUtils::TargetPoseFromFbs(*target_pose_fbs);
398
399 target_poses.emplace_back(target_pose);
400
401 Eigen::Affine3d H_camera_target =
402 PoseUtils::Pose3dToAffine3d(target_pose.pose);
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800403 VLOG(1) << node_name << " saw target " << target_pose.id
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800404 << " from TargetMap at timestamp " << distributed_eof
405 << " with pose = " << H_camera_target.matrix();
406 }
407 HandlePoses(rgb_image, target_poses, distributed_eof, node_name);
408}
409
410void HandleImage(aos::EventLoop *event_loop, cv::Mat rgb_image,
411 const aos::monotonic_clock::time_point eof,
412 aos::distributed_clock::time_point distributed_eof,
413 frc971::vision::CharucoExtractor &charuco_extractor,
414 std::string node_name) {
415 std::vector<cv::Vec4i> charuco_ids;
416 std::vector<std::vector<cv::Point2f>> charuco_corners;
417 bool valid = false;
418 std::vector<Eigen::Vector3d> rvecs_eigen;
419 std::vector<Eigen::Vector3d> tvecs_eigen;
420 // Why eof vs. distributed_eof?
421 charuco_extractor.ProcessImage(rgb_image, eof, event_loop->monotonic_now(),
422 charuco_ids, charuco_corners, valid,
423 rvecs_eigen, tvecs_eigen);
424 if (rvecs_eigen.size() > 0 && !valid) {
425 LOG(WARNING) << "Charuco extractor returned not valid";
426 return;
427 }
428
429 std::vector<TargetMapper::TargetPose> target_poses;
430 for (size_t i = 0; i < tvecs_eigen.size(); i++) {
431 Eigen::Quaterniond rotation(
432 frc971::controls::ToQuaternionFromRotationVector(rvecs_eigen[i]));
433 ceres::examples::Pose3d pose(Eigen::Vector3d(tvecs_eigen[i]), rotation);
434 TargetMapper::TargetPose target_pose{charuco_ids[i][0], pose};
435 target_poses.emplace_back(target_pose);
436
437 Eigen::Affine3d H_camera_target = PoseUtils::Pose3dToAffine3d(pose);
438 VLOG(2) << node_name << " saw target " << target_pose.id
439 << " from image at timestamp " << distributed_eof
440 << " with pose = " << H_camera_target.matrix();
441 }
442 HandlePoses(rgb_image, target_poses, distributed_eof, node_name);
443}
444
445void ExtrinsicsMain(int argc, char *argv[]) {
446 vis_robot_ = VisualizeRobot(cv::Size(1000, 1000));
447 vis_robot_.ClearImage();
448 const double kFocalLength = 1000.0;
449 const int kImageWidth = 1000;
450 vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength);
451
452 std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config =
453 (FLAGS_config.empty()
454 ? std::nullopt
455 : std::make_optional(aos::configuration::ReadConfig(FLAGS_config)));
456
457 // open logfiles
458 aos::logger::LogReader reader(
459 aos::logger::SortParts(aos::logger::FindLogs(argc, argv)),
460 config.has_value() ? &config->message() : nullptr);
461
Jim Ostrowskiddebdcb2024-02-29 22:25:36 -0800462 reader.RemapLoggedChannel("/imu/constants", "y2024.Constants");
463 reader.RemapLoggedChannel("/orin1/constants", "y2024.Constants");
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800464 if (FLAGS_robot) {
465 reader.RemapLoggedChannel("/roborio/constants", "y2024.Constants");
466 }
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800467 reader.Register();
468
469 y2024::SendSimulationConstants(reader.event_loop_factory(), FLAGS_team_number,
470 FLAGS_constants_path);
471
472 VLOG(1) << "Using target type " << FLAGS_target_type;
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800473
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800474 std::vector<const calibration::CameraCalibration *> calibration_list;
475
476 std::vector<std::unique_ptr<aos::EventLoop>> detection_event_loops;
477 std::vector<frc971::vision::CharucoExtractor *> charuco_extractors;
478 std::vector<frc971::vision::ImageCallback *> image_callbacks;
479 std::vector<Eigen::Affine3d> default_extrinsics;
480
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800481 for (const CameraNode &camera_node : node_list) {
482 const aos::Node *pi = aos::configuration::GetNode(
483 reader.configuration(), camera_node.node_name.c_str());
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800484
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800485 detection_event_loops.emplace_back(
486 reader.event_loop_factory()->MakeEventLoop(
487 (camera_node.camera_name() + "_detection").c_str(), pi));
Maxwell Henderson9beb5692024-03-17 18:36:11 -0700488 aos::EventLoop *const event_loop = detection_event_loops.back().get();
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800489 frc971::constants::ConstantsFetcher<y2024::Constants> constants_fetcher(
Maxwell Henderson9beb5692024-03-17 18:36:11 -0700490 event_loop);
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800491 // Get the calibration for this orin/camera pair
492 const calibration::CameraCalibration *calibration =
493 y2024::vision::FindCameraCalibration(constants_fetcher.constants(),
494 camera_node.node_name,
495 camera_node.camera_number);
496 calibration_list.push_back(calibration);
Maxwell Henderson69ec45e2024-03-01 22:52:21 -0800497
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800498 // Extract the extrinsics from the calibration, and save as "defaults"
499 cv::Mat extrinsics_cv =
500 frc971::vision::CameraExtrinsics(calibration).value();
501 Eigen::Matrix4d extrinsics_matrix;
502 cv::cv2eigen(extrinsics_cv, extrinsics_matrix);
503 const auto ext_H_robot_pi = Eigen::Affine3d(extrinsics_matrix);
504 default_extrinsics.emplace_back(ext_H_robot_pi);
Maxwell Henderson69ec45e2024-03-01 22:52:21 -0800505
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800506 VLOG(1) << "Got extrinsics for " << camera_node.camera_name() << " as\n"
507 << default_extrinsics.back().matrix();
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800508
Maxwell Henderson9beb5692024-03-17 18:36:11 -0700509 event_loop->MakeWatcher(
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800510 camera_node.camera_name(),
Maxwell Henderson9beb5692024-03-17 18:36:11 -0700511 [&reader, event_loop, camera_node](const TargetMap &map) {
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800512 aos::distributed_clock::time_point pi_distributed_time =
513 reader.event_loop_factory()
Maxwell Henderson9beb5692024-03-17 18:36:11 -0700514 ->GetNodeEventLoopFactory(event_loop->node())
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800515 ->ToDistributedClock(aos::monotonic_clock::time_point(
516 aos::monotonic_clock::duration(
517 map.monotonic_timestamp_ns())));
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800518
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800519 HandleTargetMap(map, pi_distributed_time, camera_node.camera_name());
520 });
Jim Ostrowski33208982024-03-02 15:01:45 -0800521 VLOG(1) << "Created watcher for using the detection event loop for "
522 << camera_node.camera_name() << " and size "
523 << detection_event_loops.size();
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800524 }
525
526 reader.event_loop_factory()->Run();
527
528 // Do quick check to see what averaged two-board pose for each pi is
529 // individually
530 CHECK_GT(two_board_extrinsics_list.size(), 0u)
531 << "Must have at least one view of both boards";
532 int base_target_id = two_board_extrinsics_list[0].board_id;
533 VLOG(1) << "Base id for two_board_extrinsics_list is " << base_target_id;
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800534
535 for (auto camera_node : node_list) {
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800536 std::vector<TimestampedPiDetection> pose_list;
537 for (auto ext : two_board_extrinsics_list) {
538 CHECK_EQ(base_target_id, ext.board_id)
539 << " All boards should have same reference id";
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800540 if (ext.pi_name == camera_node.camera_name()) {
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800541 pose_list.push_back(ext);
542 }
543 }
544 Eigen::Affine3d avg_pose_from_pi = ComputeAveragePose(pose_list);
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800545 VLOG(1) << "Estimate from " << camera_node.camera_name() << " with "
546 << pose_list.size() << " observations is:\n"
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800547 << avg_pose_from_pi.matrix();
548 }
549 Eigen::Affine3d H_boardA_boardB_avg =
550 ComputeAveragePose(two_board_extrinsics_list);
551 // TODO: Should probably do some outlier rejection
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800552 VLOG(1) << "Estimate of two board pose using all nodes with "
553 << two_board_extrinsics_list.size() << " observations is:\n"
554 << H_boardA_boardB_avg.matrix() << "\n";
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800555
556 // Next, compute the relative camera poses
557 LOG(INFO) << "Got " << detection_list.size() << " extrinsic observations";
558 std::vector<Eigen::Affine3d> H_camera1_camera2_list;
559 std::vector<Eigen::Affine3d> updated_extrinsics;
560 // Use the first node's extrinsics as our base, and fix from there
561 updated_extrinsics.push_back(default_extrinsics[0]);
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800562 LOG(INFO) << "Default extrinsic for camera " << node_list.at(0).camera_name()
563 << " is " << default_extrinsics[0].matrix();
564 for (uint i = 0; i < node_list.size() - 1; i++) {
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800565 H_camera1_camera2_list.clear();
566 // Go through the list, and find successive pairs of cameras
Jim Ostrowski33208982024-03-02 15:01:45 -0800567 // We'll be calculating and writing the second of the pair (the i+1'th
568 // camera)
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800569 for (auto [pose1, pose2] : detection_list) {
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800570 if ((pose1.pi_name == node_list.at(i).camera_name()) &&
571 (pose2.pi_name == node_list.at(i + 1).camera_name())) {
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800572 Eigen::Affine3d H_camera1_boardA = pose1.H_camera_target;
573 // If pose1 isn't referenced to base_target_id, correct that
574 if (pose1.board_id != base_target_id) {
575 // pose1.H_camera_target references boardB, so map back to boardA
576 H_camera1_boardA =
577 pose1.H_camera_target * H_boardA_boardB_avg.inverse();
578 }
579
580 // Now, get the camera2->boardA map (notice it's boardA, same as
Jim Ostrowski33208982024-03-02 15:01:45 -0800581 // camera1, so we can compute the difference between the cameras with
582 // both referenced to boardA)
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800583 Eigen::Affine3d H_camera2_boardA = pose2.H_camera_target;
584 // If pose2 isn't referenced to boardA (base_target_id), correct
585 // that
586 if (pose2.board_id != base_target_id) {
587 // pose2.H_camera_target references boardB, so map back to boardA
588 H_camera2_boardA =
589 pose2.H_camera_target * H_boardA_boardB_avg.inverse();
590 }
591
592 // Compute camera1->camera2 map
593 Eigen::Affine3d H_camera1_camera2 =
594 H_camera1_boardA * H_camera2_boardA.inverse();
595 H_camera1_camera2_list.push_back(H_camera1_camera2);
596 VLOG(1) << "Map from camera " << pose1.pi_name << " and tag "
597 << pose1.board_id << " with observation: \n"
598 << pose1.H_camera_target.matrix() << "\n to camera "
599 << pose2.pi_name << " and tag " << pose2.board_id
600 << " with observation: \n"
601 << pose2.H_camera_target.matrix() << "\ngot map as\n"
602 << H_camera1_camera2.matrix();
603
604 Eigen::Affine3d H_world_board;
605 H_world_board = Eigen::Translation3d::Identity() *
606 Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
607 if (FLAGS_alt_view) {
608 H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
609 }
610
611 VLOG(2) << "Camera1 " << pose1.pi_name << " in world frame is \n"
612 << (H_world_board * H_camera1_boardA.inverse()).matrix();
613 VLOG(2) << "Camera2 " << pose2.pi_name << " in world frame is \n"
614 << (H_world_board * H_camera2_boardA.inverse()).matrix();
615 }
616 }
617 // TODO<Jim>: If we don't get any matches, we could just use default
618 // extrinsics
619 CHECK(H_camera1_camera2_list.size() > 0)
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800620 << "Failed with zero poses for node " << node_list.at(i).camera_name()
621 << " and " << node_list.at(i + 1).camera_name();
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800622 if (H_camera1_camera2_list.size() > 0) {
623 Eigen::Affine3d H_camera1_camera2_avg =
624 ComputeAveragePose(H_camera1_camera2_list);
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800625 LOG(INFO) << "From " << node_list.at(i).camera_name() << " to "
626 << node_list.at(i + 1).camera_name() << " found "
627 << H_camera1_camera2_list.size()
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800628 << " observations, and the average pose is:\n"
629 << H_camera1_camera2_avg.matrix();
630 Eigen::Affine3d H_camera1_camera2_default =
631 default_extrinsics[i].inverse() * default_extrinsics[i + 1];
632 LOG(INFO) << "Compare this to that from default values:\n"
633 << H_camera1_camera2_default.matrix();
634 Eigen::Affine3d H_camera1_camera2_diff =
635 H_camera1_camera2_avg * H_camera1_camera2_default.inverse();
636 LOG(INFO)
637 << "Difference between averaged and default delta poses "
638 "has |T| = "
639 << H_camera1_camera2_diff.translation().norm() << "m and |R| = "
640 << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle()
641 << " radians ("
642 << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle() *
643 180.0 / M_PI
644 << " degrees)";
645 // Next extrinsic is just previous one * avg_delta_pose
646 Eigen::Affine3d next_extrinsic =
647 updated_extrinsics.back() * H_camera1_camera2_avg;
648 updated_extrinsics.push_back(next_extrinsic);
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800649 LOG(INFO) << "Default Extrinsic for " << node_list.at(i + 1).camera_name()
Jim Ostrowskiddebdcb2024-02-29 22:25:36 -0800650 << " is \n"
Maxwell Henderson9d7b1862024-03-02 11:00:29 -0800651 << default_extrinsics[i + 1].matrix();
652 LOG(INFO) << "--> Updated Extrinsic for "
653 << node_list.at(i + 1).camera_name() << " is \n"
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800654 << next_extrinsic.matrix();
655
656 // Wirte out this extrinsic to a file
657 flatbuffers::FlatBufferBuilder fbb;
658 flatbuffers::Offset<flatbuffers::Vector<float>> data_offset =
659 fbb.CreateVector(
660 frc971::vision::MatrixToVector(next_extrinsic.matrix()));
661 calibration::TransformationMatrix::Builder matrix_builder(fbb);
662 matrix_builder.add_data(data_offset);
663 flatbuffers::Offset<calibration::TransformationMatrix>
664 extrinsic_calibration_offset = matrix_builder.Finish();
665
666 calibration::CameraCalibration::Builder calibration_builder(fbb);
667 calibration_builder.add_fixed_extrinsics(extrinsic_calibration_offset);
668 const aos::realtime_clock::time_point realtime_now =
669 aos::realtime_clock::now();
670 calibration_builder.add_calibration_timestamp(
671 realtime_now.time_since_epoch().count());
672 fbb.Finish(calibration_builder.Finish());
673 aos::FlatbufferDetachedBuffer<calibration::CameraCalibration>
674 solved_extrinsics = fbb.Release();
675
676 aos::FlatbufferDetachedBuffer<
677 frc971::vision::calibration::CameraCalibration>
678 cal_copy = aos::RecursiveCopyFlatBuffer(calibration_list[i + 1]);
679 cal_copy.mutable_message()->clear_fixed_extrinsics();
680 cal_copy.mutable_message()->clear_calibration_timestamp();
681 aos::FlatbufferDetachedBuffer<calibration::CameraCalibration>
682 merged_calibration = aos::MergeFlatBuffers(
683 &cal_copy.message(), &solved_extrinsics.message());
684
685 std::stringstream time_ss;
686 time_ss << realtime_now;
687
Jim Ostrowski33208982024-03-02 15:01:45 -0800688 CameraNode &camera_node = node_list[i + 1];
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800689 const std::string calibration_filename =
Jim Ostrowski33208982024-03-02 15:01:45 -0800690 frc971::vision::CalibrationFilename(
691 FLAGS_output_folder, camera_node.node_name, FLAGS_team_number,
692 camera_node.camera_number, cal_copy.message().camera_id()->data(),
693 time_ss.str());
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800694 LOG(INFO) << calibration_filename << " -> "
695 << aos::FlatbufferToJson(merged_calibration,
696 {.multi_line = true});
697
698 aos::util::WriteStringToFileOrDie(
699 calibration_filename,
700 aos::FlatbufferToJson(merged_calibration, {.multi_line = true}));
701 }
702 }
703
704 // Cleanup
705 for (uint i = 0; i < image_callbacks.size(); i++) {
706 delete charuco_extractors[i];
707 delete image_callbacks[i];
708 }
709}
Jim Ostrowskiddebdcb2024-02-29 22:25:36 -0800710} // namespace y2024::vision
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800711
712int main(int argc, char **argv) {
713 aos::InitGoogle(&argc, &argv);
Jim Ostrowskiddebdcb2024-02-29 22:25:36 -0800714 y2024::vision::ExtrinsicsMain(argc, argv);
Jim Ostrowski9bf206a2024-01-26 23:31:58 -0800715}