blob: 48dffb95566924c3acc72165c8b3552787a00337 [file] [log] [blame]
Jim Ostrowski2f2685f2023-03-25 11:57:54 -07001#include <numeric>
2
3#include "aos/configuration.h"
4#include "aos/events/logging/log_reader.h"
5#include "aos/events/simulated_event_loop.h"
6#include "aos/init.h"
7#include "aos/util/mcap_logger.h"
8#include "frc971/control_loops/pose.h"
9#include "frc971/control_loops/quaternion_utils.h"
10#include "frc971/vision/calibration_generated.h"
11#include "frc971/vision/charuco_lib.h"
Jim Ostrowski68c321d2023-11-14 21:36:28 -080012#include "frc971/vision/extrinsics_calibration.h"
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070013#include "frc971/vision/target_mapper.h"
14#include "frc971/vision/visualize_robot.h"
15// clang-format off
16// OpenCV eigen files must be included after Eigen includes
17#include "opencv2/aruco.hpp"
18#include "opencv2/calib3d.hpp"
19#include "opencv2/core/eigen.hpp"
20#include "opencv2/features2d.hpp"
21#include "opencv2/highgui.hpp"
22#include "opencv2/highgui/highgui.hpp"
23#include "opencv2/imgproc.hpp"
24// clang-format on
25#include "y2023/constants/simulated_constants_sender.h"
26#include "y2023/vision/aprilrobotics.h"
27#include "y2023/vision/vision_util.h"
28
Jim Ostrowski2be78e32023-03-25 11:57:54 -070029DEFINE_bool(alt_view, false,
30 "If true, show visualization from field level, rather than above");
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070031DEFINE_string(config, "",
32 "If set, override the log's config file with this one.");
33DEFINE_string(constants_path, "y2023/constants/constants.json",
34 "Path to the constant file");
Jim Ostrowski68c321d2023-11-14 21:36:28 -080035DEFINE_double(max_pose_error, 5e-5,
36 "Throw out target poses with a higher pose error than this");
37DEFINE_double(
38 max_pose_error_ratio, 0.4,
39 "Throw out target poses with a higher pose error ratio than this");
40DEFINE_string(output_folder, "/tmp",
41 "Directory in which to store the updated calibration files");
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070042DEFINE_string(target_type, "charuco_diamond",
43 "Type of target being used [aruco, charuco, charuco_diamond]");
44DEFINE_int32(team_number, 0,
45 "Required: Use the calibration for a node with this team number");
Jim Ostrowski68c321d2023-11-14 21:36:28 -080046DEFINE_bool(use_full_logs, false,
47 "If true, extract data from logs with images");
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070048DEFINE_uint64(
49 wait_key, 1,
50 "Time in ms to wait between images (0 to wait indefinitely until click)");
51
Jim Ostrowski68c321d2023-11-14 21:36:28 -080052DECLARE_int32(min_target_id);
53DECLARE_int32(max_target_id);
54
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070055// Calibrate extrinsic relationship between cameras using two targets
56// seen jointly between cameras. Uses two types of information: 1)
57// when a single camera sees two targets, we estimate the pose between
58// targets, and 2) when two separate cameras each see a target, we can
59// use the pose between targets to estimate the pose between cameras.
60
61// We then create the extrinsics for the robot by starting with the
62// given extrinsic for camera 1 (between imu/robot and camera frames)
63// and then map each subsequent camera based on the data collected and
64// the extrinsic poses computed here.
65
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070066// TODO<Jim>: Not currently using estimate from pi1->pi4-- should do full
67// estimation, and probably also include camera->imu extrinsics from all
68// cameras, not just pi1
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070069
Stephan Pleinesf63bde82024-01-13 15:59:33 -080070namespace y2023::vision {
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070071using frc971::vision::DataAdapter;
72using frc971::vision::ImageCallback;
73using frc971::vision::PoseUtils;
74using frc971::vision::TargetMap;
75using frc971::vision::TargetMapper;
Jim Ostrowski2be78e32023-03-25 11:57:54 -070076using frc971::vision::VisualizeRobot;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070077namespace calibration = frc971::vision::calibration;
78
79static constexpr double kImagePeriodMs =
80 1.0 / 30.0 * 1000.0; // Image capture period in ms
81
82// Change reference frame from camera to robot
83Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camera_target,
84 Eigen::Affine3d extrinsics) {
85 const Eigen::Affine3d H_robot_camera = extrinsics;
86 const Eigen::Affine3d H_robot_target = H_robot_camera * H_camera_target;
87 return H_robot_target;
88}
89
90struct TimestampedPiDetection {
91 aos::distributed_clock::time_point time;
92 // Pose of target relative to robot
93 Eigen::Affine3d H_camera_target;
94 // name of pi
95 std::string pi_name;
96 int board_id;
97};
98
99TimestampedPiDetection last_observation;
100std::vector<std::pair<TimestampedPiDetection, TimestampedPiDetection>>
101 detection_list;
102std::vector<TimestampedPiDetection> two_board_extrinsics_list;
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700103VisualizeRobot vis_robot_;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700104
105// TODO<jim>: Implement variance calcs
106Eigen::Affine3d ComputeAveragePose(
107 std::vector<Eigen::Vector3d> &translation_list,
108 std::vector<Eigen::Vector4d> &rotation_list,
109 Eigen::Vector3d *translation_variance = nullptr,
110 Eigen::Vector3d *rotation_variance = nullptr) {
111 Eigen::Vector3d avg_translation =
112 std::accumulate(translation_list.begin(), translation_list.end(),
113 Eigen::Vector3d{0, 0, 0}) /
114 translation_list.size();
115
116 // TODO<Jim>: Use QuaternionMean from quaternion_utils.cc (but this
117 // requires a fixed number of quaternions to be averaged
118 Eigen::Vector4d avg_rotation =
119 std::accumulate(rotation_list.begin(), rotation_list.end(),
120 Eigen::Vector4d{0, 0, 0, 0}) /
121 rotation_list.size();
122 // Normalize, so it's a valid quaternion
123 avg_rotation = avg_rotation / avg_rotation.norm();
124 Eigen::Quaterniond avg_rotation_q{avg_rotation[0], avg_rotation[1],
125 avg_rotation[2], avg_rotation[3]};
126 Eigen::Translation3d translation(avg_translation);
127 Eigen::Affine3d return_pose = translation * avg_rotation_q;
128 if (translation_variance != nullptr) {
129 *translation_variance = Eigen::Vector3d();
130 }
131 if (rotation_variance != nullptr) {
132 LOG(INFO) << *rotation_variance;
133 }
134
135 return return_pose;
136}
137
138Eigen::Affine3d ComputeAveragePose(
139 std::vector<Eigen::Affine3d> &pose_list,
140 Eigen::Vector3d *translation_variance = nullptr,
141 Eigen::Vector3d *rotation_variance = nullptr) {
142 std::vector<Eigen::Vector3d> translation_list;
143 std::vector<Eigen::Vector4d> rotation_list;
144
145 for (Eigen::Affine3d pose : pose_list) {
146 translation_list.push_back(pose.translation());
147 Eigen::Quaterniond quat(pose.rotation().matrix());
148 rotation_list.push_back(
149 Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z()));
150 }
151
152 return ComputeAveragePose(translation_list, rotation_list,
153 translation_variance, rotation_variance);
154}
155
156Eigen::Affine3d ComputeAveragePose(
157 std::vector<TimestampedPiDetection> &pose_list,
158 Eigen::Vector3d *translation_variance = nullptr,
159 Eigen::Vector3d *rotation_variance = nullptr) {
160 std::vector<Eigen::Vector3d> translation_list;
161 std::vector<Eigen::Vector4d> rotation_list;
162
163 for (TimestampedPiDetection pose : pose_list) {
164 translation_list.push_back(pose.H_camera_target.translation());
165 Eigen::Quaterniond quat(pose.H_camera_target.rotation().matrix());
166 rotation_list.push_back(
167 Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z()));
168 }
169 return ComputeAveragePose(translation_list, rotation_list,
170 translation_variance, rotation_variance);
171}
172
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800173void HandlePoses(cv::Mat rgb_image,
174 std::vector<TargetMapper::TargetPose> target_poses,
175 aos::distributed_clock::time_point distributed_eof,
176 std::string node_name) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700177 // This is used to transform points for visualization
178 // Assumes targets are aligned with x->right, y->up, z->out of board
179 Eigen::Affine3d H_world_board;
180 H_world_board = Eigen::Translation3d::Identity() *
181 Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
182 if (FLAGS_alt_view) {
183 // Don't rotate -- this is like viewing from the side
184 H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
185 }
186
187 bool draw_vis = false;
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800188 CHECK_LE(target_poses.size(), 2u)
189 << "Can't handle more than two tags in field of view";
190 if (target_poses.size() == 2) {
191 draw_vis = true;
192 VLOG(1) << "Saw two boards in same view from " << node_name;
193 int from_index = 0;
194 int to_index = 1;
195 // Handle when we see two boards at once
196 // We'll store them referenced to the lower id board
197 if (target_poses[from_index].id > target_poses[to_index].id) {
198 std::swap<int>(from_index, to_index);
199 }
200
201 // Create "from" (A) and "to" (B) transforms
202 Eigen::Affine3d H_camera_boardA =
203 PoseUtils::Pose3dToAffine3d(target_poses[from_index].pose);
204 Eigen::Affine3d H_camera_boardB =
205 PoseUtils::Pose3dToAffine3d(target_poses[to_index].pose);
206
207 Eigen::Affine3d H_boardA_boardB =
208 H_camera_boardA.inverse() * H_camera_boardB;
209
210 TimestampedPiDetection boardA_boardB{
211 .time = distributed_eof,
212 .H_camera_target = H_boardA_boardB,
213 .pi_name = node_name,
214 .board_id = target_poses[from_index].id};
215
216 VLOG(1) << "Map from board " << from_index << " to " << to_index << " is\n"
217 << H_boardA_boardB.matrix();
218 // Store this observation of the transform between two boards
219 two_board_extrinsics_list.push_back(boardA_boardB);
220
221 if (FLAGS_visualize) {
222 vis_robot_.DrawFrameAxes(
223 H_world_board,
224 std::string("board ") + std::to_string(target_poses[from_index].id),
225 cv::Scalar(0, 255, 0));
226 vis_robot_.DrawFrameAxes(
227 H_world_board * boardA_boardB.H_camera_target,
228 std::string("board ") + std::to_string(target_poses[to_index].id),
229 cv::Scalar(255, 0, 0));
230 VLOG(2) << "Detection map from camera " << node_name << " to board "
231 << target_poses[from_index].id << " is\n"
232 << H_camera_boardA.matrix() << "\n and inverse is\n"
233 << H_camera_boardA.inverse().matrix()
234 << "\n and with world to board rotation is\n"
235 << H_world_board * H_camera_boardA.inverse().matrix();
236 vis_robot_.DrawRobotOutline(H_world_board * H_camera_boardA.inverse(),
237 node_name, cv::Scalar(0, 0, 255));
238 }
239 } else if (target_poses.size() == 1) {
240 VLOG(1) << "Saw single board in camera " << node_name;
241 Eigen::Affine3d H_camera2_board2 =
242 PoseUtils::Pose3dToAffine3d(target_poses[0].pose);
243 TimestampedPiDetection new_observation{.time = distributed_eof,
244 .H_camera_target = H_camera2_board2,
245 .pi_name = node_name,
246 .board_id = target_poses[0].id};
247
248 // Only take two observations if they're within half an image cycle of each
249 // other (i.e., as close in time as possible)
250 if (std::abs((distributed_eof - last_observation.time).count()) <
251 kImagePeriodMs / 2.0 * 1000000.0) {
252 // Sort by pi numbering, since this is how we will handle them
253 std::pair<TimestampedPiDetection, TimestampedPiDetection> new_pair;
254 if (last_observation.pi_name < new_observation.pi_name) {
255 new_pair = std::pair(last_observation, new_observation);
256 } else if (last_observation.pi_name > new_observation.pi_name) {
257 new_pair = std::pair(new_observation, last_observation);
258 } else {
259 LOG(WARNING) << "Got 2 observations in a row from same pi. Probably "
260 "not too much of an issue???";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700261 }
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800262 detection_list.push_back(new_pair);
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700263
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800264 // This bit is just for visualization and checking purposes-- use the
265 // last two-board observation to figure out the current estimate
266 // between the two cameras
267 if (FLAGS_visualize && two_board_extrinsics_list.size() > 0) {
268 draw_vis = true;
269 TimestampedPiDetection &last_two_board_ext =
270 two_board_extrinsics_list[two_board_extrinsics_list.size() - 1];
271 Eigen::Affine3d &H_boardA_boardB = last_two_board_ext.H_camera_target;
272 int boardA_boardB_id = last_two_board_ext.board_id;
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700273
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800274 TimestampedPiDetection camera1_boardA = new_pair.first;
275 TimestampedPiDetection camera2_boardB = new_pair.second;
276 // If camera1_boardA doesn't point to the first target, then swap
277 // these two
278 if (camera1_boardA.board_id != boardA_boardB_id) {
279 camera1_boardA = new_pair.second;
280 camera2_boardB = new_pair.first;
281 }
282 VLOG(1) << "Camera " << camera1_boardA.pi_name << " seeing board "
283 << camera1_boardA.board_id << " and camera "
284 << camera2_boardB.pi_name << " seeing board "
285 << camera2_boardB.board_id;
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700286
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800287 vis_robot_.DrawRobotOutline(
288 H_world_board * camera1_boardA.H_camera_target.inverse(),
289 camera1_boardA.pi_name, cv::Scalar(0, 0, 255));
290 vis_robot_.DrawRobotOutline(
291 H_world_board * H_boardA_boardB *
292 camera2_boardB.H_camera_target.inverse(),
293 camera2_boardB.pi_name, cv::Scalar(128, 128, 0));
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700294 vis_robot_.DrawFrameAxes(
295 H_world_board,
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800296 std::string("Board ") + std::to_string(last_two_board_ext.board_id),
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700297 cv::Scalar(0, 255, 0));
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800298 vis_robot_.DrawFrameAxes(H_world_board * H_boardA_boardB, "Board B",
299 cv::Scalar(255, 0, 0));
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700300 }
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800301 VLOG(1) << "Storing observation between " << new_pair.first.pi_name
302 << ", target " << new_pair.first.board_id << " and "
303 << new_pair.second.pi_name << ", target "
304 << new_pair.second.board_id;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700305 } else {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800306 VLOG(2) << "Storing observation for " << node_name << " at time "
307 << distributed_eof;
308 last_observation = new_observation;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700309 }
310 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700311
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700312 if (FLAGS_visualize) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800313 if (!rgb_image.empty()) {
314 std::string image_name = node_name + " Image";
315 cv::Mat rgb_small;
316 cv::resize(rgb_image, rgb_small, cv::Size(), 0.5, 0.5);
317 cv::imshow(image_name, rgb_small);
318 cv::waitKey(FLAGS_wait_key);
319 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700320
321 if (draw_vis) {
322 cv::imshow("View", vis_robot_.image_);
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800323 cv::waitKey(FLAGS_wait_key);
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700324 vis_robot_.ClearImage();
325 }
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700326 }
327}
328
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800329void HandleTargetMap(const TargetMap &map,
330 aos::distributed_clock::time_point distributed_eof,
331 std::string node_name) {
332 VLOG(1) << "Got april tag map call from node " << node_name;
333 // Create empty RGB image in this case
334 cv::Mat rgb_image;
335 std::vector<TargetMapper::TargetPose> target_poses;
336
337 for (const auto *target_pose_fbs : *map.target_poses()) {
338 // Skip detections with invalid ids
339 if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) <
340 FLAGS_min_target_id ||
341 static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) >
342 FLAGS_max_target_id) {
343 LOG(INFO) << "Skipping tag with invalid id of " << target_pose_fbs->id();
344 continue;
345 }
346
347 // Skip detections with high pose errors
348 if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) {
349 LOG(INFO) << "Skipping tag " << target_pose_fbs->id()
350 << " due to pose error of " << target_pose_fbs->pose_error();
351 continue;
352 }
353 // Skip detections with high pose error ratios
354 if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) {
355 LOG(INFO) << "Skipping tag " << target_pose_fbs->id()
356 << " due to pose error ratio of "
357 << target_pose_fbs->pose_error_ratio();
358 continue;
359 }
360
361 const TargetMapper::TargetPose target_pose =
362 PoseUtils::TargetPoseFromFbs(*target_pose_fbs);
363
364 target_poses.emplace_back(target_pose);
365
366 Eigen::Affine3d H_camera_target =
367 PoseUtils::Pose3dToAffine3d(target_pose.pose);
368 VLOG(2) << node_name << " saw target " << target_pose.id
369 << " from TargetMap at timestamp " << distributed_eof
370 << " with pose = " << H_camera_target.matrix();
371 }
372 HandlePoses(rgb_image, target_poses, distributed_eof, node_name);
373}
374
375void HandleImage(aos::EventLoop *event_loop, cv::Mat rgb_image,
376 const aos::monotonic_clock::time_point eof,
377 aos::distributed_clock::time_point distributed_eof,
378 frc971::vision::CharucoExtractor &charuco_extractor,
379 std::string node_name) {
380 std::vector<cv::Vec4i> charuco_ids;
381 std::vector<std::vector<cv::Point2f>> charuco_corners;
382 bool valid = false;
383 std::vector<Eigen::Vector3d> rvecs_eigen;
384 std::vector<Eigen::Vector3d> tvecs_eigen;
385 // Why eof vs. distributed_eof?
386 charuco_extractor.ProcessImage(rgb_image, eof, event_loop->monotonic_now(),
387 charuco_ids, charuco_corners, valid,
388 rvecs_eigen, tvecs_eigen);
389 if (rvecs_eigen.size() > 0 && !valid) {
390 LOG(WARNING) << "Charuco extractor returned not valid";
391 return;
392 }
393
394 std::vector<TargetMapper::TargetPose> target_poses;
395 for (size_t i = 0; i < tvecs_eigen.size(); i++) {
396 Eigen::Quaterniond rotation(
397 frc971::controls::ToQuaternionFromRotationVector(rvecs_eigen[i]));
398 ceres::examples::Pose3d pose(Eigen::Vector3d(tvecs_eigen[i]), rotation);
399 TargetMapper::TargetPose target_pose{charuco_ids[i][0], pose};
400 target_poses.emplace_back(target_pose);
401
402 Eigen::Affine3d H_camera_target = PoseUtils::Pose3dToAffine3d(pose);
403 VLOG(2) << node_name << " saw target " << target_pose.id
404 << " from image at timestamp " << distributed_eof
405 << " with pose = " << H_camera_target.matrix();
406 }
407 HandlePoses(rgb_image, target_poses, distributed_eof, node_name);
408}
409
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700410void ExtrinsicsMain(int argc, char *argv[]) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700411 vis_robot_ = VisualizeRobot(cv::Size(1000, 1000));
412 vis_robot_.ClearImage();
413 const double kFocalLength = 1000.0;
414 const int kImageWidth = 1000;
415 vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700416
417 std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config =
418 (FLAGS_config.empty()
419 ? std::nullopt
420 : std::make_optional(aos::configuration::ReadConfig(FLAGS_config)));
421
422 // open logfiles
423 aos::logger::LogReader reader(
424 aos::logger::SortParts(aos::logger::FindLogs(argc, argv)),
425 config.has_value() ? &config->message() : nullptr);
426
427 constexpr size_t kNumPis = 4;
428 for (size_t i = 1; i <= kNumPis; i++) {
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700429 reader.RemapLoggedChannel(absl::StrFormat("/pi%u/constants", i),
430 "y2023.Constants");
431 }
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800432
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700433 reader.RemapLoggedChannel("/imu/constants", "y2023.Constants");
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800434 reader.RemapLoggedChannel("/logger/constants", "y2023.Constants");
435 reader.RemapLoggedChannel("/roborio/constants", "y2023.Constants");
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700436 reader.Register();
437
438 SendSimulationConstants(reader.event_loop_factory(), FLAGS_team_number,
439 FLAGS_constants_path);
440
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700441 VLOG(1) << "Using target type " << FLAGS_target_type;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700442 std::vector<std::string> node_list;
443 node_list.push_back("pi1");
444 node_list.push_back("pi2");
445 node_list.push_back("pi3");
446 node_list.push_back("pi4");
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800447 std::vector<const calibration::CameraCalibration *> calibration_list;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700448
449 std::vector<std::unique_ptr<aos::EventLoop>> detection_event_loops;
450 std::vector<frc971::vision::CharucoExtractor *> charuco_extractors;
451 std::vector<frc971::vision::ImageCallback *> image_callbacks;
452 std::vector<Eigen::Affine3d> default_extrinsics;
453
454 for (uint i = 0; i < node_list.size(); i++) {
455 std::string node = node_list[i];
456 const aos::Node *pi =
457 aos::configuration::GetNode(reader.configuration(), node.c_str());
458
459 detection_event_loops.emplace_back(
460 reader.event_loop_factory()->MakeEventLoop(
461 (node + "_detection").c_str(), pi));
462
463 frc971::constants::ConstantsFetcher<y2023::Constants> constants_fetcher(
464 detection_event_loops.back().get());
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700465
466 const calibration::CameraCalibration *calibration =
467 FindCameraCalibration(constants_fetcher.constants(), node);
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800468 calibration_list.push_back(calibration);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700469
470 frc971::vision::TargetType target_type =
471 frc971::vision::TargetTypeFromString(FLAGS_target_type);
472 frc971::vision::CharucoExtractor *charuco_ext =
473 new frc971::vision::CharucoExtractor(calibration, target_type);
474 charuco_extractors.emplace_back(charuco_ext);
475
476 cv::Mat extrinsics_cv = CameraExtrinsics(calibration).value();
477 Eigen::Matrix4d extrinsics_matrix;
478 cv::cv2eigen(extrinsics_cv, extrinsics_matrix);
479 const auto ext_H_robot_pi = Eigen::Affine3d(extrinsics_matrix);
480 default_extrinsics.emplace_back(ext_H_robot_pi);
481
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700482 VLOG(1) << "Got extrinsics for " << node << " as\n"
483 << default_extrinsics.back().matrix();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700484
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800485 if (FLAGS_use_full_logs) {
486 LOG(INFO) << "Set up image callback for node " << node_list[i];
487 frc971::vision::ImageCallback *image_callback =
488 new frc971::vision::ImageCallback(
489 detection_event_loops[i].get(), "/" + node_list[i] + "/camera",
490 [&reader, &charuco_extractors, &detection_event_loops, &node_list,
491 i](cv::Mat rgb_image,
492 const aos::monotonic_clock::time_point eof) {
493 aos::distributed_clock::time_point pi_distributed_time =
494 reader.event_loop_factory()
495 ->GetNodeEventLoopFactory(
496 detection_event_loops[i].get()->node())
497 ->ToDistributedClock(eof);
498 HandleImage(detection_event_loops[i].get(), rgb_image, eof,
499 pi_distributed_time, *charuco_extractors[i],
500 node_list[i]);
501 });
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700502
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800503 image_callbacks.emplace_back(image_callback);
504 } else {
505 detection_event_loops[i]->MakeWatcher(
506 "/camera", [&reader, &detection_event_loops, &node_list,
507 i](const TargetMap &map) {
508 aos::distributed_clock::time_point pi_distributed_time =
509 reader.event_loop_factory()
510 ->GetNodeEventLoopFactory(detection_event_loops[i]->node())
511 ->ToDistributedClock(aos::monotonic_clock::time_point(
512 aos::monotonic_clock::duration(
513 map.monotonic_timestamp_ns())));
514
515 HandleTargetMap(map, pi_distributed_time, node_list[i]);
516 });
517 LOG(INFO) << "Created watcher for using the detection event loop for "
518 << node_list[i] << " with i = " << i << " and size "
519 << detection_event_loops.size();
520 }
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700521 }
522
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700523 reader.event_loop_factory()->Run();
524
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700525 // Do quick check to see what averaged two-board pose for each pi is
526 // individually
527 CHECK_GT(two_board_extrinsics_list.size(), 0u)
528 << "Must have at least one view of both boards";
529 int base_target_id = two_board_extrinsics_list[0].board_id;
530 VLOG(1) << "Base id for two_board_extrinsics_list is " << base_target_id;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700531 for (auto node : node_list) {
532 std::vector<TimestampedPiDetection> pose_list;
533 for (auto ext : two_board_extrinsics_list) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700534 CHECK_EQ(base_target_id, ext.board_id)
535 << " All boards should have same reference id";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700536 if (ext.pi_name == node) {
537 pose_list.push_back(ext);
538 }
539 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700540 Eigen::Affine3d avg_pose_from_pi = ComputeAveragePose(pose_list);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700541 VLOG(1) << "Estimate from " << node << " with " << pose_list.size()
542 << " observations is:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700543 << avg_pose_from_pi.matrix();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700544 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700545 Eigen::Affine3d H_boardA_boardB_avg =
546 ComputeAveragePose(two_board_extrinsics_list);
547 // TODO: Should probably do some outlier rejection
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700548 LOG(INFO) << "Estimate of two board pose using all nodes with "
549 << two_board_extrinsics_list.size() << " observations is:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700550 << H_boardA_boardB_avg.matrix() << "\n";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700551
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700552 // Next, compute the relative camera poses
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700553 LOG(INFO) << "Got " << detection_list.size() << " extrinsic observations";
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700554 std::vector<Eigen::Affine3d> H_camera1_camera2_list;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700555 std::vector<Eigen::Affine3d> updated_extrinsics;
556 // Use the first node's extrinsics as our base, and fix from there
557 updated_extrinsics.push_back(default_extrinsics[0]);
558 LOG(INFO) << "Default extrinsic for node " << node_list[0] << " is "
559 << default_extrinsics[0].matrix();
560 for (uint i = 0; i < node_list.size() - 1; i++) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700561 H_camera1_camera2_list.clear();
562 // Go through the list, and find successive pairs of cameras
563 for (auto [pose1, pose2] : detection_list) {
564 if ((pose1.pi_name == node_list[i]) &&
565 (pose2.pi_name == node_list[i + 1])) {
566 Eigen::Affine3d H_camera1_boardA = pose1.H_camera_target;
567 // If pose1 isn't referenced to base_target_id, correct that
568 if (pose1.board_id != base_target_id) {
569 // pose1.H_camera_target references boardB, so map back to boardA
570 H_camera1_boardA =
571 pose1.H_camera_target * H_boardA_boardB_avg.inverse();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700572 }
573
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700574 // Now, get the camera2->boardA map (notice it's boardA, same as
575 // camera1, so we can compute the difference based both on boardA)
576 Eigen::Affine3d H_camera2_boardA = pose2.H_camera_target;
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800577 // If pose2 isn't referenced to boardA (base_target_id), correct
578 // that
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700579 if (pose2.board_id != base_target_id) {
580 // pose2.H_camera_target references boardB, so map back to boardA
581 H_camera2_boardA =
Jim Ostrowski1f7cdc62023-12-02 14:09:23 -0800582 pose2.H_camera_target * H_boardA_boardB_avg.inverse();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700583 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700584
585 // Compute camera1->camera2 map
586 Eigen::Affine3d H_camera1_camera2 =
587 H_camera1_boardA * H_camera2_boardA.inverse();
588 H_camera1_camera2_list.push_back(H_camera1_camera2);
589 VLOG(1) << "Map from camera " << pose1.pi_name << " and tag "
590 << pose1.board_id << " with observation: \n"
591 << pose1.H_camera_target.matrix() << "\n to camera "
592 << pose2.pi_name << " and tag " << pose2.board_id
593 << " with observation: \n"
594 << pose2.H_camera_target.matrix() << "\ngot map as\n"
595 << H_camera1_camera2.matrix();
596
597 Eigen::Affine3d H_world_board;
598 H_world_board = Eigen::Translation3d::Identity() *
599 Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
600 if (FLAGS_alt_view) {
601 H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
602 }
603
604 VLOG(2) << "Camera1 " << pose1.pi_name << " in world frame is \n"
605 << (H_world_board * H_camera1_boardA.inverse()).matrix();
606 VLOG(2) << "Camera2 " << pose2.pi_name << " in world frame is \n"
607 << (H_world_board * H_camera2_boardA.inverse()).matrix();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700608 }
609 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700610 // TODO<Jim>: If we don't get any matches, we could just use default
611 // extrinsics
612 CHECK(H_camera1_camera2_list.size() > 0)
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700613 << "Failed with zero poses for node " << node_list[i];
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700614 if (H_camera1_camera2_list.size() > 0) {
615 Eigen::Affine3d H_camera1_camera2_avg =
616 ComputeAveragePose(H_camera1_camera2_list);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700617 LOG(INFO) << "From " << node_list[i] << " to " << node_list[i + 1]
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700618 << " found " << H_camera1_camera2_list.size()
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700619 << " observations, and the average pose is:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700620 << H_camera1_camera2_avg.matrix();
621 Eigen::Affine3d H_camera1_camera2_default =
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700622 default_extrinsics[i].inverse() * default_extrinsics[i + 1];
623 LOG(INFO) << "Compare this to that from default values:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700624 << H_camera1_camera2_default.matrix();
625 Eigen::Affine3d H_camera1_camera2_diff =
626 H_camera1_camera2_avg * H_camera1_camera2_default.inverse();
627 LOG(INFO)
628 << "Difference between averaged and default delta poses "
629 "has |T| = "
630 << H_camera1_camera2_diff.translation().norm() << "m and |R| = "
631 << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle()
632 << " radians ("
633 << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle() *
634 180.0 / M_PI
635 << " degrees)";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700636 // Next extrinsic is just previous one * avg_delta_pose
637 Eigen::Affine3d next_extrinsic =
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700638 updated_extrinsics.back() * H_camera1_camera2_avg;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700639 updated_extrinsics.push_back(next_extrinsic);
640 LOG(INFO) << "Default Extrinsic for " << node_list[i + 1] << " is \n"
641 << default_extrinsics[i + 1].matrix();
642 LOG(INFO) << "--> Updated Extrinsic for " << node_list[i + 1] << " is \n"
643 << next_extrinsic.matrix();
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800644
645 // Wirte out this extrinsic to a file
646 flatbuffers::FlatBufferBuilder fbb;
647 flatbuffers::Offset<flatbuffers::Vector<float>> data_offset =
648 fbb.CreateVector(
649 frc971::vision::MatrixToVector(next_extrinsic.matrix()));
650 calibration::TransformationMatrix::Builder matrix_builder(fbb);
651 matrix_builder.add_data(data_offset);
652 flatbuffers::Offset<calibration::TransformationMatrix>
653 extrinsic_calibration_offset = matrix_builder.Finish();
654
655 calibration::CameraCalibration::Builder calibration_builder(fbb);
656 calibration_builder.add_fixed_extrinsics(extrinsic_calibration_offset);
657 const aos::realtime_clock::time_point realtime_now =
658 aos::realtime_clock::now();
659 calibration_builder.add_calibration_timestamp(
660 realtime_now.time_since_epoch().count());
661 fbb.Finish(calibration_builder.Finish());
662 aos::FlatbufferDetachedBuffer<calibration::CameraCalibration>
663 solved_extrinsics = fbb.Release();
664
665 aos::FlatbufferDetachedBuffer<
666 frc971::vision::calibration::CameraCalibration>
667 cal_copy = aos::RecursiveCopyFlatBuffer(calibration_list[i + 1]);
668 cal_copy.mutable_message()->clear_fixed_extrinsics();
669 cal_copy.mutable_message()->clear_calibration_timestamp();
670 aos::FlatbufferDetachedBuffer<calibration::CameraCalibration>
671 merged_calibration = aos::MergeFlatBuffers(
672 &cal_copy.message(), &solved_extrinsics.message());
673
674 std::stringstream time_ss;
675 time_ss << realtime_now;
676
677 // Assumes node_list name is of form "pi#" to create camera id
678 const std::string calibration_filename =
679 FLAGS_output_folder +
680 absl::StrFormat("/calibration_pi-%d-%s_cam-%s_%s.json",
681 FLAGS_team_number, node_list[i + 1].substr(2, 3),
682 calibration_list[i + 1]->camera_id()->data(),
683 time_ss.str());
684
685 LOG(INFO) << calibration_filename << " -> "
686 << aos::FlatbufferToJson(merged_calibration,
687 {.multi_line = true});
688
689 aos::util::WriteStringToFileOrDie(
690 calibration_filename,
691 aos::FlatbufferToJson(merged_calibration, {.multi_line = true}));
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700692 }
693 }
694
695 // Cleanup
696 for (uint i = 0; i < image_callbacks.size(); i++) {
697 delete charuco_extractors[i];
698 delete image_callbacks[i];
699 }
700}
Stephan Pleinesf63bde82024-01-13 15:59:33 -0800701} // namespace y2023::vision
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700702
703int main(int argc, char **argv) {
704 aos::InitGoogle(&argc, &argv);
705 y2023::vision::ExtrinsicsMain(argc, argv);
706}