blob: 8032c764a7127319102b16bcee48b893b06b2484 [file] [log] [blame]
Jim Ostrowski2f2685f2023-03-25 11:57:54 -07001#include <numeric>
2
Austin Schuh99f7c6a2024-06-25 22:07:44 -07003#include "absl/flags/flag.h"
4
Jim Ostrowski2f2685f2023-03-25 11:57:54 -07005#include "aos/configuration.h"
6#include "aos/events/logging/log_reader.h"
7#include "aos/events/simulated_event_loop.h"
8#include "aos/init.h"
9#include "aos/util/mcap_logger.h"
10#include "frc971/control_loops/pose.h"
11#include "frc971/control_loops/quaternion_utils.h"
12#include "frc971/vision/calibration_generated.h"
13#include "frc971/vision/charuco_lib.h"
Jim Ostrowski68c321d2023-11-14 21:36:28 -080014#include "frc971/vision/extrinsics_calibration.h"
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070015#include "frc971/vision/target_mapper.h"
16#include "frc971/vision/visualize_robot.h"
17// clang-format off
18// OpenCV eigen files must be included after Eigen includes
19#include "opencv2/aruco.hpp"
20#include "opencv2/calib3d.hpp"
21#include "opencv2/core/eigen.hpp"
22#include "opencv2/features2d.hpp"
23#include "opencv2/highgui.hpp"
24#include "opencv2/highgui/highgui.hpp"
25#include "opencv2/imgproc.hpp"
26// clang-format on
27#include "y2023/constants/simulated_constants_sender.h"
28#include "y2023/vision/aprilrobotics.h"
29#include "y2023/vision/vision_util.h"
30
Austin Schuh99f7c6a2024-06-25 22:07:44 -070031ABSL_FLAG(bool, alt_view, false,
32 "If true, show visualization from field level, rather than above");
33ABSL_FLAG(std::string, config, "",
34 "If set, override the log's config file with this one.");
35ABSL_FLAG(std::string, constants_path, "y2023/constants/constants.json",
36 "Path to the constant file");
37ABSL_FLAG(double, max_pose_error, 5e-5,
38 "Throw out target poses with a higher pose error than this");
39ABSL_FLAG(double, max_pose_error_ratio, 0.4,
40 "Throw out target poses with a higher pose error ratio than this");
41ABSL_FLAG(std::string, output_folder, "/tmp",
42 "Directory in which to store the updated calibration files");
43ABSL_FLAG(std::string, target_type, "charuco_diamond",
44 "Type of target being used [aruco, charuco, charuco_diamond]");
45ABSL_FLAG(int32_t, team_number, 0,
46 "Required: Use the calibration for a node with this team number");
47ABSL_FLAG(bool, use_full_logs, false,
48 "If true, extract data from logs with images");
49ABSL_FLAG(
50 uint64_t, wait_key, 1,
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070051 "Time in ms to wait between images (0 to wait indefinitely until click)");
52
Austin Schuh99f7c6a2024-06-25 22:07:44 -070053ABSL_DECLARE_FLAG(int32_t, min_target_id);
54ABSL_DECLARE_FLAG(int32_t, max_target_id);
Jim Ostrowski68c321d2023-11-14 21:36:28 -080055
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070056// Calibrate extrinsic relationship between cameras using two targets
57// seen jointly between cameras. Uses two types of information: 1)
58// when a single camera sees two targets, we estimate the pose between
59// targets, and 2) when two separate cameras each see a target, we can
60// use the pose between targets to estimate the pose between cameras.
61
62// We then create the extrinsics for the robot by starting with the
63// given extrinsic for camera 1 (between imu/robot and camera frames)
64// and then map each subsequent camera based on the data collected and
65// the extrinsic poses computed here.
66
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070067// TODO<Jim>: Not currently using estimate from pi1->pi4-- should do full
68// estimation, and probably also include camera->imu extrinsics from all
69// cameras, not just pi1
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070070
Stephan Pleinesf63bde82024-01-13 15:59:33 -080071namespace y2023::vision {
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070072using frc971::vision::DataAdapter;
73using frc971::vision::ImageCallback;
74using frc971::vision::PoseUtils;
75using frc971::vision::TargetMap;
76using frc971::vision::TargetMapper;
Jim Ostrowski2be78e32023-03-25 11:57:54 -070077using frc971::vision::VisualizeRobot;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -070078namespace calibration = frc971::vision::calibration;
79
80static constexpr double kImagePeriodMs =
81 1.0 / 30.0 * 1000.0; // Image capture period in ms
82
83// Change reference frame from camera to robot
84Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camera_target,
85 Eigen::Affine3d extrinsics) {
86 const Eigen::Affine3d H_robot_camera = extrinsics;
87 const Eigen::Affine3d H_robot_target = H_robot_camera * H_camera_target;
88 return H_robot_target;
89}
90
91struct TimestampedPiDetection {
92 aos::distributed_clock::time_point time;
93 // Pose of target relative to robot
94 Eigen::Affine3d H_camera_target;
95 // name of pi
96 std::string pi_name;
97 int board_id;
98};
99
100TimestampedPiDetection last_observation;
101std::vector<std::pair<TimestampedPiDetection, TimestampedPiDetection>>
102 detection_list;
103std::vector<TimestampedPiDetection> two_board_extrinsics_list;
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700104VisualizeRobot vis_robot_;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700105
106// TODO<jim>: Implement variance calcs
107Eigen::Affine3d ComputeAveragePose(
108 std::vector<Eigen::Vector3d> &translation_list,
109 std::vector<Eigen::Vector4d> &rotation_list,
110 Eigen::Vector3d *translation_variance = nullptr,
111 Eigen::Vector3d *rotation_variance = nullptr) {
112 Eigen::Vector3d avg_translation =
113 std::accumulate(translation_list.begin(), translation_list.end(),
114 Eigen::Vector3d{0, 0, 0}) /
115 translation_list.size();
116
117 // TODO<Jim>: Use QuaternionMean from quaternion_utils.cc (but this
118 // requires a fixed number of quaternions to be averaged
119 Eigen::Vector4d avg_rotation =
120 std::accumulate(rotation_list.begin(), rotation_list.end(),
121 Eigen::Vector4d{0, 0, 0, 0}) /
122 rotation_list.size();
123 // Normalize, so it's a valid quaternion
124 avg_rotation = avg_rotation / avg_rotation.norm();
125 Eigen::Quaterniond avg_rotation_q{avg_rotation[0], avg_rotation[1],
126 avg_rotation[2], avg_rotation[3]};
127 Eigen::Translation3d translation(avg_translation);
128 Eigen::Affine3d return_pose = translation * avg_rotation_q;
129 if (translation_variance != nullptr) {
130 *translation_variance = Eigen::Vector3d();
131 }
132 if (rotation_variance != nullptr) {
133 LOG(INFO) << *rotation_variance;
134 }
135
136 return return_pose;
137}
138
139Eigen::Affine3d ComputeAveragePose(
140 std::vector<Eigen::Affine3d> &pose_list,
141 Eigen::Vector3d *translation_variance = nullptr,
142 Eigen::Vector3d *rotation_variance = nullptr) {
143 std::vector<Eigen::Vector3d> translation_list;
144 std::vector<Eigen::Vector4d> rotation_list;
145
146 for (Eigen::Affine3d pose : pose_list) {
147 translation_list.push_back(pose.translation());
148 Eigen::Quaterniond quat(pose.rotation().matrix());
149 rotation_list.push_back(
150 Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z()));
151 }
152
153 return ComputeAveragePose(translation_list, rotation_list,
154 translation_variance, rotation_variance);
155}
156
157Eigen::Affine3d ComputeAveragePose(
158 std::vector<TimestampedPiDetection> &pose_list,
159 Eigen::Vector3d *translation_variance = nullptr,
160 Eigen::Vector3d *rotation_variance = nullptr) {
161 std::vector<Eigen::Vector3d> translation_list;
162 std::vector<Eigen::Vector4d> rotation_list;
163
164 for (TimestampedPiDetection pose : pose_list) {
165 translation_list.push_back(pose.H_camera_target.translation());
166 Eigen::Quaterniond quat(pose.H_camera_target.rotation().matrix());
167 rotation_list.push_back(
168 Eigen::Vector4d(quat.w(), quat.x(), quat.y(), quat.z()));
169 }
170 return ComputeAveragePose(translation_list, rotation_list,
171 translation_variance, rotation_variance);
172}
173
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800174void HandlePoses(cv::Mat rgb_image,
175 std::vector<TargetMapper::TargetPose> target_poses,
176 aos::distributed_clock::time_point distributed_eof,
177 std::string node_name) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700178 // This is used to transform points for visualization
179 // Assumes targets are aligned with x->right, y->up, z->out of board
180 Eigen::Affine3d H_world_board;
181 H_world_board = Eigen::Translation3d::Identity() *
182 Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700183 if (absl::GetFlag(FLAGS_alt_view)) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700184 // Don't rotate -- this is like viewing from the side
185 H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
186 }
187
188 bool draw_vis = false;
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800189 CHECK_LE(target_poses.size(), 2u)
190 << "Can't handle more than two tags in field of view";
191 if (target_poses.size() == 2) {
192 draw_vis = true;
193 VLOG(1) << "Saw two boards in same view from " << node_name;
194 int from_index = 0;
195 int to_index = 1;
196 // Handle when we see two boards at once
197 // We'll store them referenced to the lower id board
198 if (target_poses[from_index].id > target_poses[to_index].id) {
199 std::swap<int>(from_index, to_index);
200 }
201
202 // Create "from" (A) and "to" (B) transforms
203 Eigen::Affine3d H_camera_boardA =
204 PoseUtils::Pose3dToAffine3d(target_poses[from_index].pose);
205 Eigen::Affine3d H_camera_boardB =
206 PoseUtils::Pose3dToAffine3d(target_poses[to_index].pose);
207
208 Eigen::Affine3d H_boardA_boardB =
209 H_camera_boardA.inverse() * H_camera_boardB;
210
211 TimestampedPiDetection boardA_boardB{
212 .time = distributed_eof,
213 .H_camera_target = H_boardA_boardB,
214 .pi_name = node_name,
215 .board_id = target_poses[from_index].id};
216
217 VLOG(1) << "Map from board " << from_index << " to " << to_index << " is\n"
218 << H_boardA_boardB.matrix();
219 // Store this observation of the transform between two boards
220 two_board_extrinsics_list.push_back(boardA_boardB);
221
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700222 if (absl::GetFlag(FLAGS_visualize)) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800223 vis_robot_.DrawFrameAxes(
224 H_world_board,
225 std::string("board ") + std::to_string(target_poses[from_index].id),
226 cv::Scalar(0, 255, 0));
227 vis_robot_.DrawFrameAxes(
228 H_world_board * boardA_boardB.H_camera_target,
229 std::string("board ") + std::to_string(target_poses[to_index].id),
230 cv::Scalar(255, 0, 0));
231 VLOG(2) << "Detection map from camera " << node_name << " to board "
232 << target_poses[from_index].id << " is\n"
233 << H_camera_boardA.matrix() << "\n and inverse is\n"
234 << H_camera_boardA.inverse().matrix()
235 << "\n and with world to board rotation is\n"
236 << H_world_board * H_camera_boardA.inverse().matrix();
237 vis_robot_.DrawRobotOutline(H_world_board * H_camera_boardA.inverse(),
238 node_name, cv::Scalar(0, 0, 255));
239 }
240 } else if (target_poses.size() == 1) {
241 VLOG(1) << "Saw single board in camera " << node_name;
242 Eigen::Affine3d H_camera2_board2 =
243 PoseUtils::Pose3dToAffine3d(target_poses[0].pose);
244 TimestampedPiDetection new_observation{.time = distributed_eof,
245 .H_camera_target = H_camera2_board2,
246 .pi_name = node_name,
247 .board_id = target_poses[0].id};
248
249 // Only take two observations if they're within half an image cycle of each
250 // other (i.e., as close in time as possible)
251 if (std::abs((distributed_eof - last_observation.time).count()) <
252 kImagePeriodMs / 2.0 * 1000000.0) {
253 // Sort by pi numbering, since this is how we will handle them
254 std::pair<TimestampedPiDetection, TimestampedPiDetection> new_pair;
255 if (last_observation.pi_name < new_observation.pi_name) {
256 new_pair = std::pair(last_observation, new_observation);
257 } else if (last_observation.pi_name > new_observation.pi_name) {
258 new_pair = std::pair(new_observation, last_observation);
259 } else {
260 LOG(WARNING) << "Got 2 observations in a row from same pi. Probably "
261 "not too much of an issue???";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700262 }
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800263 detection_list.push_back(new_pair);
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700264
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800265 // This bit is just for visualization and checking purposes-- use the
266 // last two-board observation to figure out the current estimate
267 // between the two cameras
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700268 if (absl::GetFlag(FLAGS_visualize) &&
269 two_board_extrinsics_list.size() > 0) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800270 draw_vis = true;
271 TimestampedPiDetection &last_two_board_ext =
272 two_board_extrinsics_list[two_board_extrinsics_list.size() - 1];
273 Eigen::Affine3d &H_boardA_boardB = last_two_board_ext.H_camera_target;
274 int boardA_boardB_id = last_two_board_ext.board_id;
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700275
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800276 TimestampedPiDetection camera1_boardA = new_pair.first;
277 TimestampedPiDetection camera2_boardB = new_pair.second;
278 // If camera1_boardA doesn't point to the first target, then swap
279 // these two
280 if (camera1_boardA.board_id != boardA_boardB_id) {
281 camera1_boardA = new_pair.second;
282 camera2_boardB = new_pair.first;
283 }
284 VLOG(1) << "Camera " << camera1_boardA.pi_name << " seeing board "
285 << camera1_boardA.board_id << " and camera "
286 << camera2_boardB.pi_name << " seeing board "
287 << camera2_boardB.board_id;
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700288
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800289 vis_robot_.DrawRobotOutline(
290 H_world_board * camera1_boardA.H_camera_target.inverse(),
291 camera1_boardA.pi_name, cv::Scalar(0, 0, 255));
292 vis_robot_.DrawRobotOutline(
293 H_world_board * H_boardA_boardB *
294 camera2_boardB.H_camera_target.inverse(),
295 camera2_boardB.pi_name, cv::Scalar(128, 128, 0));
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700296 vis_robot_.DrawFrameAxes(
297 H_world_board,
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800298 std::string("Board ") + std::to_string(last_two_board_ext.board_id),
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700299 cv::Scalar(0, 255, 0));
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800300 vis_robot_.DrawFrameAxes(H_world_board * H_boardA_boardB, "Board B",
301 cv::Scalar(255, 0, 0));
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700302 }
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800303 VLOG(1) << "Storing observation between " << new_pair.first.pi_name
304 << ", target " << new_pair.first.board_id << " and "
305 << new_pair.second.pi_name << ", target "
306 << new_pair.second.board_id;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700307 } else {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800308 VLOG(2) << "Storing observation for " << node_name << " at time "
309 << distributed_eof;
310 last_observation = new_observation;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700311 }
312 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700313
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700314 if (absl::GetFlag(FLAGS_visualize)) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800315 if (!rgb_image.empty()) {
316 std::string image_name = node_name + " Image";
317 cv::Mat rgb_small;
318 cv::resize(rgb_image, rgb_small, cv::Size(), 0.5, 0.5);
319 cv::imshow(image_name, rgb_small);
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700320 cv::waitKey(absl::GetFlag(FLAGS_wait_key));
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800321 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700322
323 if (draw_vis) {
324 cv::imshow("View", vis_robot_.image_);
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700325 cv::waitKey(absl::GetFlag(FLAGS_wait_key));
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700326 vis_robot_.ClearImage();
327 }
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700328 }
329}
330
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800331void HandleTargetMap(const TargetMap &map,
332 aos::distributed_clock::time_point distributed_eof,
333 std::string node_name) {
334 VLOG(1) << "Got april tag map call from node " << node_name;
335 // Create empty RGB image in this case
336 cv::Mat rgb_image;
337 std::vector<TargetMapper::TargetPose> target_poses;
338
339 for (const auto *target_pose_fbs : *map.target_poses()) {
340 // Skip detections with invalid ids
341 if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) <
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700342 absl::GetFlag(FLAGS_min_target_id) ||
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800343 static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) >
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700344 absl::GetFlag(FLAGS_max_target_id)) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800345 LOG(INFO) << "Skipping tag with invalid id of " << target_pose_fbs->id();
346 continue;
347 }
348
349 // Skip detections with high pose errors
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700350 if (target_pose_fbs->pose_error() > absl::GetFlag(FLAGS_max_pose_error)) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800351 LOG(INFO) << "Skipping tag " << target_pose_fbs->id()
352 << " due to pose error of " << target_pose_fbs->pose_error();
353 continue;
354 }
355 // Skip detections with high pose error ratios
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700356 if (target_pose_fbs->pose_error_ratio() >
357 absl::GetFlag(FLAGS_max_pose_error_ratio)) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800358 LOG(INFO) << "Skipping tag " << target_pose_fbs->id()
359 << " due to pose error ratio of "
360 << target_pose_fbs->pose_error_ratio();
361 continue;
362 }
363
364 const TargetMapper::TargetPose target_pose =
365 PoseUtils::TargetPoseFromFbs(*target_pose_fbs);
366
367 target_poses.emplace_back(target_pose);
368
369 Eigen::Affine3d H_camera_target =
370 PoseUtils::Pose3dToAffine3d(target_pose.pose);
371 VLOG(2) << node_name << " saw target " << target_pose.id
372 << " from TargetMap at timestamp " << distributed_eof
373 << " with pose = " << H_camera_target.matrix();
374 }
375 HandlePoses(rgb_image, target_poses, distributed_eof, node_name);
376}
377
378void HandleImage(aos::EventLoop *event_loop, cv::Mat rgb_image,
379 const aos::monotonic_clock::time_point eof,
380 aos::distributed_clock::time_point distributed_eof,
381 frc971::vision::CharucoExtractor &charuco_extractor,
382 std::string node_name) {
383 std::vector<cv::Vec4i> charuco_ids;
384 std::vector<std::vector<cv::Point2f>> charuco_corners;
385 bool valid = false;
386 std::vector<Eigen::Vector3d> rvecs_eigen;
387 std::vector<Eigen::Vector3d> tvecs_eigen;
388 // Why eof vs. distributed_eof?
389 charuco_extractor.ProcessImage(rgb_image, eof, event_loop->monotonic_now(),
390 charuco_ids, charuco_corners, valid,
391 rvecs_eigen, tvecs_eigen);
392 if (rvecs_eigen.size() > 0 && !valid) {
393 LOG(WARNING) << "Charuco extractor returned not valid";
394 return;
395 }
396
397 std::vector<TargetMapper::TargetPose> target_poses;
398 for (size_t i = 0; i < tvecs_eigen.size(); i++) {
399 Eigen::Quaterniond rotation(
400 frc971::controls::ToQuaternionFromRotationVector(rvecs_eigen[i]));
401 ceres::examples::Pose3d pose(Eigen::Vector3d(tvecs_eigen[i]), rotation);
402 TargetMapper::TargetPose target_pose{charuco_ids[i][0], pose};
403 target_poses.emplace_back(target_pose);
404
405 Eigen::Affine3d H_camera_target = PoseUtils::Pose3dToAffine3d(pose);
406 VLOG(2) << node_name << " saw target " << target_pose.id
407 << " from image at timestamp " << distributed_eof
408 << " with pose = " << H_camera_target.matrix();
409 }
410 HandlePoses(rgb_image, target_poses, distributed_eof, node_name);
411}
412
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700413void ExtrinsicsMain(int argc, char *argv[]) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700414 vis_robot_ = VisualizeRobot(cv::Size(1000, 1000));
415 vis_robot_.ClearImage();
416 const double kFocalLength = 1000.0;
417 const int kImageWidth = 1000;
418 vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700419
420 std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config =
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700421 (absl::GetFlag(FLAGS_config).empty()
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700422 ? std::nullopt
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700423 : std::make_optional(
424 aos::configuration::ReadConfig(absl::GetFlag(FLAGS_config))));
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700425
426 // open logfiles
427 aos::logger::LogReader reader(
428 aos::logger::SortParts(aos::logger::FindLogs(argc, argv)),
429 config.has_value() ? &config->message() : nullptr);
430
431 constexpr size_t kNumPis = 4;
432 for (size_t i = 1; i <= kNumPis; i++) {
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700433 reader.RemapLoggedChannel(absl::StrFormat("/pi%u/constants", i),
434 "y2023.Constants");
435 }
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800436
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700437 reader.RemapLoggedChannel("/imu/constants", "y2023.Constants");
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800438 reader.RemapLoggedChannel("/logger/constants", "y2023.Constants");
439 reader.RemapLoggedChannel("/roborio/constants", "y2023.Constants");
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700440 reader.Register();
441
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700442 SendSimulationConstants(reader.event_loop_factory(),
443 absl::GetFlag(FLAGS_team_number),
444 absl::GetFlag(FLAGS_constants_path));
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700445
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700446 VLOG(1) << "Using target type " << absl::GetFlag(FLAGS_target_type);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700447 std::vector<std::string> node_list;
448 node_list.push_back("pi1");
449 node_list.push_back("pi2");
450 node_list.push_back("pi3");
451 node_list.push_back("pi4");
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800452 std::vector<const calibration::CameraCalibration *> calibration_list;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700453
454 std::vector<std::unique_ptr<aos::EventLoop>> detection_event_loops;
455 std::vector<frc971::vision::CharucoExtractor *> charuco_extractors;
456 std::vector<frc971::vision::ImageCallback *> image_callbacks;
457 std::vector<Eigen::Affine3d> default_extrinsics;
458
459 for (uint i = 0; i < node_list.size(); i++) {
460 std::string node = node_list[i];
461 const aos::Node *pi =
462 aos::configuration::GetNode(reader.configuration(), node.c_str());
463
464 detection_event_loops.emplace_back(
465 reader.event_loop_factory()->MakeEventLoop(
466 (node + "_detection").c_str(), pi));
467
468 frc971::constants::ConstantsFetcher<y2023::Constants> constants_fetcher(
469 detection_event_loops.back().get());
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700470
471 const calibration::CameraCalibration *calibration =
472 FindCameraCalibration(constants_fetcher.constants(), node);
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800473 calibration_list.push_back(calibration);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700474
475 frc971::vision::TargetType target_type =
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700476 frc971::vision::TargetTypeFromString(absl::GetFlag(FLAGS_target_type));
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700477 frc971::vision::CharucoExtractor *charuco_ext =
478 new frc971::vision::CharucoExtractor(calibration, target_type);
479 charuco_extractors.emplace_back(charuco_ext);
480
481 cv::Mat extrinsics_cv = CameraExtrinsics(calibration).value();
482 Eigen::Matrix4d extrinsics_matrix;
483 cv::cv2eigen(extrinsics_cv, extrinsics_matrix);
484 const auto ext_H_robot_pi = Eigen::Affine3d(extrinsics_matrix);
485 default_extrinsics.emplace_back(ext_H_robot_pi);
486
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700487 VLOG(1) << "Got extrinsics for " << node << " as\n"
488 << default_extrinsics.back().matrix();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700489
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700490 if (absl::GetFlag(FLAGS_use_full_logs)) {
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800491 LOG(INFO) << "Set up image callback for node " << node_list[i];
492 frc971::vision::ImageCallback *image_callback =
493 new frc971::vision::ImageCallback(
494 detection_event_loops[i].get(), "/" + node_list[i] + "/camera",
495 [&reader, &charuco_extractors, &detection_event_loops, &node_list,
496 i](cv::Mat rgb_image,
497 const aos::monotonic_clock::time_point eof) {
498 aos::distributed_clock::time_point pi_distributed_time =
499 reader.event_loop_factory()
500 ->GetNodeEventLoopFactory(
501 detection_event_loops[i].get()->node())
502 ->ToDistributedClock(eof);
503 HandleImage(detection_event_loops[i].get(), rgb_image, eof,
504 pi_distributed_time, *charuco_extractors[i],
505 node_list[i]);
506 });
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700507
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800508 image_callbacks.emplace_back(image_callback);
509 } else {
510 detection_event_loops[i]->MakeWatcher(
511 "/camera", [&reader, &detection_event_loops, &node_list,
512 i](const TargetMap &map) {
513 aos::distributed_clock::time_point pi_distributed_time =
514 reader.event_loop_factory()
515 ->GetNodeEventLoopFactory(detection_event_loops[i]->node())
516 ->ToDistributedClock(aos::monotonic_clock::time_point(
517 aos::monotonic_clock::duration(
518 map.monotonic_timestamp_ns())));
519
520 HandleTargetMap(map, pi_distributed_time, node_list[i]);
521 });
522 LOG(INFO) << "Created watcher for using the detection event loop for "
523 << node_list[i] << " with i = " << i << " and size "
524 << detection_event_loops.size();
525 }
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700526 }
527
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700528 reader.event_loop_factory()->Run();
529
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700530 // Do quick check to see what averaged two-board pose for each pi is
531 // individually
532 CHECK_GT(two_board_extrinsics_list.size(), 0u)
533 << "Must have at least one view of both boards";
534 int base_target_id = two_board_extrinsics_list[0].board_id;
535 VLOG(1) << "Base id for two_board_extrinsics_list is " << base_target_id;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700536 for (auto node : node_list) {
537 std::vector<TimestampedPiDetection> pose_list;
538 for (auto ext : two_board_extrinsics_list) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700539 CHECK_EQ(base_target_id, ext.board_id)
540 << " All boards should have same reference id";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700541 if (ext.pi_name == node) {
542 pose_list.push_back(ext);
543 }
544 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700545 Eigen::Affine3d avg_pose_from_pi = ComputeAveragePose(pose_list);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700546 VLOG(1) << "Estimate from " << node << " with " << pose_list.size()
547 << " observations is:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700548 << avg_pose_from_pi.matrix();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700549 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700550 Eigen::Affine3d H_boardA_boardB_avg =
551 ComputeAveragePose(two_board_extrinsics_list);
552 // TODO: Should probably do some outlier rejection
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700553 LOG(INFO) << "Estimate of two board pose using all nodes with "
554 << two_board_extrinsics_list.size() << " observations is:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700555 << H_boardA_boardB_avg.matrix() << "\n";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700556
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700557 // Next, compute the relative camera poses
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700558 LOG(INFO) << "Got " << detection_list.size() << " extrinsic observations";
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700559 std::vector<Eigen::Affine3d> H_camera1_camera2_list;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700560 std::vector<Eigen::Affine3d> updated_extrinsics;
561 // Use the first node's extrinsics as our base, and fix from there
562 updated_extrinsics.push_back(default_extrinsics[0]);
563 LOG(INFO) << "Default extrinsic for node " << node_list[0] << " is "
564 << default_extrinsics[0].matrix();
565 for (uint i = 0; i < node_list.size() - 1; i++) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700566 H_camera1_camera2_list.clear();
567 // Go through the list, and find successive pairs of cameras
568 for (auto [pose1, pose2] : detection_list) {
569 if ((pose1.pi_name == node_list[i]) &&
570 (pose2.pi_name == node_list[i + 1])) {
571 Eigen::Affine3d H_camera1_boardA = pose1.H_camera_target;
572 // If pose1 isn't referenced to base_target_id, correct that
573 if (pose1.board_id != base_target_id) {
574 // pose1.H_camera_target references boardB, so map back to boardA
575 H_camera1_boardA =
576 pose1.H_camera_target * H_boardA_boardB_avg.inverse();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700577 }
578
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700579 // Now, get the camera2->boardA map (notice it's boardA, same as
580 // camera1, so we can compute the difference based both on boardA)
581 Eigen::Affine3d H_camera2_boardA = pose2.H_camera_target;
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800582 // If pose2 isn't referenced to boardA (base_target_id), correct
583 // that
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700584 if (pose2.board_id != base_target_id) {
585 // pose2.H_camera_target references boardB, so map back to boardA
586 H_camera2_boardA =
Jim Ostrowski1f7cdc62023-12-02 14:09:23 -0800587 pose2.H_camera_target * H_boardA_boardB_avg.inverse();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700588 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700589
590 // Compute camera1->camera2 map
591 Eigen::Affine3d H_camera1_camera2 =
592 H_camera1_boardA * H_camera2_boardA.inverse();
593 H_camera1_camera2_list.push_back(H_camera1_camera2);
594 VLOG(1) << "Map from camera " << pose1.pi_name << " and tag "
595 << pose1.board_id << " with observation: \n"
596 << pose1.H_camera_target.matrix() << "\n to camera "
597 << pose2.pi_name << " and tag " << pose2.board_id
598 << " with observation: \n"
599 << pose2.H_camera_target.matrix() << "\ngot map as\n"
600 << H_camera1_camera2.matrix();
601
602 Eigen::Affine3d H_world_board;
603 H_world_board = Eigen::Translation3d::Identity() *
604 Eigen::AngleAxisd(M_PI / 2.0, Eigen::Vector3d::UnitX());
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700605 if (absl::GetFlag(FLAGS_alt_view)) {
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700606 H_world_board = Eigen::Translation3d(0.0, 0.0, 3.0);
607 }
608
609 VLOG(2) << "Camera1 " << pose1.pi_name << " in world frame is \n"
610 << (H_world_board * H_camera1_boardA.inverse()).matrix();
611 VLOG(2) << "Camera2 " << pose2.pi_name << " in world frame is \n"
612 << (H_world_board * H_camera2_boardA.inverse()).matrix();
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700613 }
614 }
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700615 // TODO<Jim>: If we don't get any matches, we could just use default
616 // extrinsics
617 CHECK(H_camera1_camera2_list.size() > 0)
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700618 << "Failed with zero poses for node " << node_list[i];
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700619 if (H_camera1_camera2_list.size() > 0) {
620 Eigen::Affine3d H_camera1_camera2_avg =
621 ComputeAveragePose(H_camera1_camera2_list);
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700622 LOG(INFO) << "From " << node_list[i] << " to " << node_list[i + 1]
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700623 << " found " << H_camera1_camera2_list.size()
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700624 << " observations, and the average pose is:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700625 << H_camera1_camera2_avg.matrix();
626 Eigen::Affine3d H_camera1_camera2_default =
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700627 default_extrinsics[i].inverse() * default_extrinsics[i + 1];
628 LOG(INFO) << "Compare this to that from default values:\n"
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700629 << H_camera1_camera2_default.matrix();
630 Eigen::Affine3d H_camera1_camera2_diff =
631 H_camera1_camera2_avg * H_camera1_camera2_default.inverse();
632 LOG(INFO)
633 << "Difference between averaged and default delta poses "
634 "has |T| = "
635 << H_camera1_camera2_diff.translation().norm() << "m and |R| = "
636 << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle()
637 << " radians ("
638 << Eigen::AngleAxisd(H_camera1_camera2_diff.rotation()).angle() *
639 180.0 / M_PI
640 << " degrees)";
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700641 // Next extrinsic is just previous one * avg_delta_pose
642 Eigen::Affine3d next_extrinsic =
Jim Ostrowski2be78e32023-03-25 11:57:54 -0700643 updated_extrinsics.back() * H_camera1_camera2_avg;
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700644 updated_extrinsics.push_back(next_extrinsic);
645 LOG(INFO) << "Default Extrinsic for " << node_list[i + 1] << " is \n"
646 << default_extrinsics[i + 1].matrix();
647 LOG(INFO) << "--> Updated Extrinsic for " << node_list[i + 1] << " is \n"
648 << next_extrinsic.matrix();
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800649
650 // Wirte out this extrinsic to a file
651 flatbuffers::FlatBufferBuilder fbb;
652 flatbuffers::Offset<flatbuffers::Vector<float>> data_offset =
653 fbb.CreateVector(
654 frc971::vision::MatrixToVector(next_extrinsic.matrix()));
655 calibration::TransformationMatrix::Builder matrix_builder(fbb);
656 matrix_builder.add_data(data_offset);
657 flatbuffers::Offset<calibration::TransformationMatrix>
658 extrinsic_calibration_offset = matrix_builder.Finish();
659
660 calibration::CameraCalibration::Builder calibration_builder(fbb);
661 calibration_builder.add_fixed_extrinsics(extrinsic_calibration_offset);
662 const aos::realtime_clock::time_point realtime_now =
663 aos::realtime_clock::now();
664 calibration_builder.add_calibration_timestamp(
665 realtime_now.time_since_epoch().count());
666 fbb.Finish(calibration_builder.Finish());
667 aos::FlatbufferDetachedBuffer<calibration::CameraCalibration>
668 solved_extrinsics = fbb.Release();
669
670 aos::FlatbufferDetachedBuffer<
671 frc971::vision::calibration::CameraCalibration>
672 cal_copy = aos::RecursiveCopyFlatBuffer(calibration_list[i + 1]);
673 cal_copy.mutable_message()->clear_fixed_extrinsics();
674 cal_copy.mutable_message()->clear_calibration_timestamp();
675 aos::FlatbufferDetachedBuffer<calibration::CameraCalibration>
676 merged_calibration = aos::MergeFlatBuffers(
677 &cal_copy.message(), &solved_extrinsics.message());
678
679 std::stringstream time_ss;
680 time_ss << realtime_now;
681
682 // Assumes node_list name is of form "pi#" to create camera id
683 const std::string calibration_filename =
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700684 absl::GetFlag(FLAGS_output_folder) +
685 absl::StrFormat(
686 "/calibration_pi-%d-%s_cam-%s_%s.json",
687 absl::GetFlag(FLAGS_team_number), node_list[i + 1].substr(2, 3),
688 calibration_list[i + 1]->camera_id()->data(), time_ss.str());
Jim Ostrowski68c321d2023-11-14 21:36:28 -0800689
690 LOG(INFO) << calibration_filename << " -> "
691 << aos::FlatbufferToJson(merged_calibration,
692 {.multi_line = true});
693
694 aos::util::WriteStringToFileOrDie(
695 calibration_filename,
696 aos::FlatbufferToJson(merged_calibration, {.multi_line = true}));
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700697 }
698 }
699
700 // Cleanup
701 for (uint i = 0; i < image_callbacks.size(); i++) {
702 delete charuco_extractors[i];
703 delete image_callbacks[i];
704 }
705}
Stephan Pleinesf63bde82024-01-13 15:59:33 -0800706} // namespace y2023::vision
Jim Ostrowski2f2685f2023-03-25 11:57:54 -0700707
708int main(int argc, char **argv) {
709 aos::InitGoogle(&argc, &argv);
710 y2023::vision::ExtrinsicsMain(argc, argv);
711}