blob: c540654c468ef73adfb4839ae0d7ddbda811f96c [file] [log] [blame]
Yash Maheshwarie0b25c52024-05-22 20:23:36 -07001#include <string>
2
3#include "Eigen/Dense"
4#include "opencv2/aruco.hpp"
5#include "opencv2/calib3d.hpp"
6#include "opencv2/core/eigen.hpp"
7#include "opencv2/features2d.hpp"
8#include "opencv2/highgui.hpp"
9#include "opencv2/highgui/highgui.hpp"
10#include "opencv2/imgproc.hpp"
11
12#include "aos/configuration.h"
13#include "aos/events/logging/log_reader.h"
14#include "aos/events/simulated_event_loop.h"
15#include "aos/init.h"
16#include "aos/util/mcap_logger.h"
17#include "frc971/constants/constants_sender_lib.h"
18#include "frc971/control_loops/pose.h"
19#include "frc971/vision/calibration_generated.h"
20#include "frc971/vision/charuco_lib.h"
21#include "frc971/vision/target_mapper.h"
22#include "frc971/vision/vision_generated.h"
23#include "frc971/vision/vision_util_lib.h"
24#include "frc971/vision/visualize_robot.h"
25#include "y2024_swerve/constants/simulated_constants_sender.h"
26#include "y2024_swerve/vision/vision_util.h"
27
28DEFINE_string(config, "",
29 "If set, override the log's config file with this one.");
30DEFINE_string(constants_path, "y2024_swerve/constants/constants.json",
31 "Path to the constant file");
32DEFINE_string(dump_constraints_to, "/tmp/mapping_constraints.txt",
33 "Write the target constraints to this path");
34DEFINE_string(dump_stats_to, "/tmp/mapping_stats.txt",
35 "Write the mapping stats to this path");
36DEFINE_string(field_name, "crescendo",
37 "Field name, for the output json filename and flatbuffer field");
38DEFINE_string(json_path, "y2024_swerve/vision/maps/target_map.json",
39 "Specify path for json with initial pose guesses.");
40DEFINE_double(max_pose_error, 1e-6,
41 "Throw out target poses with a higher pose error than this");
42DEFINE_double(
43 max_pose_error_ratio, 0.4,
44 "Throw out target poses with a higher pose error ratio than this");
45DEFINE_string(mcap_output_path, "", "Log to output.");
46DEFINE_string(output_dir, "y2024_swerve/vision/maps",
47 "Directory to write solved target map to");
48DEFINE_double(pause_on_distance, 2.0,
49 "Pause if two consecutive implied robot positions differ by more "
50 "than this many meters");
51DEFINE_string(orin, "orin1",
52 "Orin name to generate mcap log for; defaults to orin1.");
53DEFINE_uint64(skip_to, 1,
54 "Start at combined image of this number (1 is the first image)");
55DEFINE_bool(solve, true, "Whether to solve for the field's target map.");
56DEFINE_bool(split_field, false,
57 "Whether to break solve into two sides of field");
58DEFINE_int32(team_number, 0,
59 "Required: Use the calibration for a node with this team number");
60DEFINE_uint64(wait_key, 1,
61 "Time in ms to wait between images, if no click (0 to wait "
62 "indefinitely until click).");
63
64DECLARE_int32(frozen_target_id);
65DECLARE_int32(min_target_id);
66DECLARE_int32(max_target_id);
67DECLARE_bool(visualize_solver);
68
69namespace y2024_swerve::vision {
70using frc971::vision::DataAdapter;
71using frc971::vision::ImageCallback;
72using frc971::vision::PoseUtils;
73using frc971::vision::TargetMap;
74using frc971::vision::TargetMapper;
75using frc971::vision::VisualizeRobot;
76namespace calibration = frc971::vision::calibration;
77
78// Class to handle reading target poses from a replayed log,
79// displaying various debug info, and passing the poses to
80// frc971::vision::TargetMapper for field mapping.
81class TargetMapperReplay {
82 public:
83 TargetMapperReplay(aos::logger::LogReader *reader);
84
85 // Solves for the target poses with the accumulated detections if FLAGS_solve.
86 void MaybeSolve();
87
88 private:
89 static constexpr int kImageWidth = 1280;
90
91 // Contains fixed target poses without solving, for use with visualization
92 static const TargetMapper kFixedTargetMapper;
93
94 // Map of TargetId to alliance "color" for splitting field
95 static std::map<uint, std::string> kIdAllianceMap;
96
97 // Change reference frame from camera to robot
98 static Eigen::Affine3d CameraToRobotDetection(Eigen::Affine3d H_camera_target,
99 Eigen::Affine3d extrinsics);
100
101 // Adds april tag detections into the detection list, and handles
102 // visualization
103 void HandleAprilTags(const TargetMap &map,
104 aos::distributed_clock::time_point node_distributed_time,
105 std::string camera_name, Eigen::Affine3d extrinsics);
106 // Gets images from the given pi and passes apriltag positions to
107 // HandleAprilTags()
108 void HandleNodeCaptures(
109 aos::EventLoop *mapping_event_loop,
110 frc971::constants::ConstantsFetcher<y2024_swerve::Constants>
111 *constants_fetcher,
112 int camera_number);
113
114 aos::logger::LogReader *reader_;
115 // April tag detections from all pis
116 std::vector<DataAdapter::TimestampedDetection> timestamped_target_detections_;
117
118 VisualizeRobot vis_robot_;
119 // Set of camera names which are currently drawn on the display
120 std::set<std::string> drawn_cameras_;
121 // Number of frames displayed
122 size_t display_count_;
123 // Last time we drew onto the display image.
124 // This is different from when we actually call imshow() to update
125 // the display window
126 aos::distributed_clock::time_point last_draw_time_;
127
128 Eigen::Affine3d last_H_world_robot_;
129 // Maximum distance between consecutive T_world_robot's in one display frame,
130 // used to determine if we need to pause for the user to see this frame
131 // clearly
132 double max_delta_T_world_robot_;
133 double ignore_count_;
134
135 std::vector<std::unique_ptr<aos::EventLoop>> mapping_event_loops_;
136
137 std::unique_ptr<aos::EventLoop> mcap_event_loop_;
138 std::unique_ptr<aos::McapLogger> relogger_;
139};
140
141std::vector<CameraNode> node_list(y2024_swerve::vision::CreateNodeList());
142
143std::map<std::string, int> camera_ordering_map(
144 y2024_swerve::vision::CreateOrderingMap(node_list));
145
146std::map<uint, std::string> TargetMapperReplay::kIdAllianceMap = {
147 {1, "red"}, {2, "red"}, {3, "red"}, {4, "red"},
148 {5, "red"}, {6, "blue"}, {7, "blue"}, {8, "blue"},
149 {9, "blue"}, {10, "blue"}, {11, "red"}, {12, "red"},
150 {13, "red"}, {14, "blue"}, {15, "blue"}, {16, "blue"}};
151
152const auto TargetMapperReplay::kFixedTargetMapper =
153 TargetMapper(FLAGS_json_path, ceres::examples::VectorOfConstraints{});
154
155Eigen::Affine3d TargetMapperReplay::CameraToRobotDetection(
156 Eigen::Affine3d H_camera_target, Eigen::Affine3d extrinsics) {
157 const Eigen::Affine3d H_robot_camera = extrinsics;
158 const Eigen::Affine3d H_robot_target = H_robot_camera * H_camera_target;
159 return H_robot_target;
160}
161
162TargetMapperReplay::TargetMapperReplay(aos::logger::LogReader *reader)
163 : reader_(reader),
164 timestamped_target_detections_(),
165 vis_robot_(cv::Size(1280, 1000)),
166 drawn_cameras_(),
167 display_count_(0),
168 last_draw_time_(aos::distributed_clock::min_time),
169 last_H_world_robot_(Eigen::Matrix4d::Identity()),
170 max_delta_T_world_robot_(0.0) {
171 reader_->RemapLoggedChannel("/orin1/constants", "y2024_swerve.Constants");
172 reader_->RemapLoggedChannel("/imu/constants", "y2024_swerve.Constants");
173 // If it's Box of Orins, don't remap roborio constants
174 reader_->MaybeRemapLoggedChannel<Constants>("/roborio/constants");
175 reader_->Register();
176
177 SendSimulationConstants(reader_->event_loop_factory(), FLAGS_team_number,
178 FLAGS_constants_path);
179
180 if (FLAGS_visualize_solver) {
181 vis_robot_.ClearImage();
182 // Set focal length to zoomed in, to view extrinsics
183 const double kFocalLength = 1500.0;
184 vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength);
185 }
186
187 for (const CameraNode &camera_node : node_list) {
188 const aos::Node *node = aos::configuration::GetNode(
189 reader_->configuration(), camera_node.node_name.c_str());
190
191 mapping_event_loops_.emplace_back(
192 reader_->event_loop_factory()->MakeEventLoop(
193 camera_node.node_name + "mapping", node));
194
195 frc971::constants::ConstantsFetcher<y2024_swerve::Constants>
196 constants_fetcher(
197 mapping_event_loops_[mapping_event_loops_.size() - 1].get());
198 HandleNodeCaptures(
199 mapping_event_loops_[mapping_event_loops_.size() - 1].get(),
200 &constants_fetcher, camera_node.camera_number);
201
202 if (FLAGS_visualize_solver) {
203 // Show the extrinsics calibration to start, for reference to confirm
204 const auto *calibration = FindCameraCalibration(
205 constants_fetcher.constants(),
206 mapping_event_loops_.back()->node()->name()->string_view(),
207 camera_node.camera_number);
208 cv::Mat extrinsics_cv =
209 frc971::vision::CameraExtrinsics(calibration).value();
210 Eigen::Matrix4d extrinsics_matrix;
211 cv::cv2eigen(extrinsics_cv, extrinsics_matrix);
212 const auto extrinsics = Eigen::Affine3d(extrinsics_matrix);
213
214 vis_robot_.DrawRobotOutline(extrinsics, camera_node.camera_name(),
215 kOrinColors.at(camera_node.camera_name()));
216 }
217 }
218
219 if (FLAGS_visualize_solver) {
220 cv::imshow("Extrinsics", vis_robot_.image_);
221 cv::waitKey(0);
222 vis_robot_.ClearImage();
223 // Reset focal length to more zoomed out view for field
224 const double kFocalLength = 500.0;
225 vis_robot_.SetDefaultViewpoint(kImageWidth, kFocalLength);
226 }
227}
228
229// Add detected apriltag poses relative to the robot to
230// timestamped_target_detections
231void TargetMapperReplay::HandleAprilTags(
232 const TargetMap &map,
233 aos::distributed_clock::time_point node_distributed_time,
234 std::string camera_name, Eigen::Affine3d extrinsics) {
235 bool drew = false;
236 std::stringstream label;
237 label << camera_name << " - ";
238
239 if (map.target_poses()->size() == 0) {
240 VLOG(2) << "Got 0 AprilTags for camera " << camera_name;
241 return;
242 }
243
244 for (const auto *target_pose_fbs : *map.target_poses()) {
245 // Skip detections with invalid ids
246 if (static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) <
247 FLAGS_min_target_id ||
248 static_cast<TargetMapper::TargetId>(target_pose_fbs->id()) >
249 FLAGS_max_target_id) {
250 VLOG(1) << "Skipping tag with invalid id of " << target_pose_fbs->id();
251 continue;
252 }
253
254 // Skip detections with high pose errors
255 if (target_pose_fbs->pose_error() > FLAGS_max_pose_error) {
256 VLOG(1) << "Skipping tag " << target_pose_fbs->id()
257 << " due to pose error of " << target_pose_fbs->pose_error();
258 continue;
259 }
260 // Skip detections with high pose error ratios
261 if (target_pose_fbs->pose_error_ratio() > FLAGS_max_pose_error_ratio) {
262 VLOG(1) << "Skipping tag " << target_pose_fbs->id()
263 << " due to pose error ratio of "
264 << target_pose_fbs->pose_error_ratio();
265 continue;
266 }
267
268 const TargetMapper::TargetPose target_pose =
269 PoseUtils::TargetPoseFromFbs(*target_pose_fbs);
270
271 Eigen::Affine3d H_camera_target =
272 Eigen::Translation3d(target_pose.pose.p) * target_pose.pose.q;
273 Eigen::Affine3d H_robot_target =
274 CameraToRobotDetection(H_camera_target, extrinsics);
275
276 ceres::examples::Pose3d target_pose_camera =
277 PoseUtils::Affine3dToPose3d(H_camera_target);
278 double distance_from_camera = target_pose_camera.p.norm();
279 double distortion_factor = target_pose_fbs->distortion_factor();
280
281 double distance_threshold = 5.0;
282 if (distance_from_camera > distance_threshold) {
283 ignore_count_++;
284 LOG(INFO) << "Ignored " << ignore_count_ << " AprilTags with distance "
285 << distance_from_camera << " > " << distance_threshold;
286 continue;
287 }
288
289 CHECK(map.has_monotonic_timestamp_ns())
290 << "Need detection timestamps for mapping";
291
292 // Detection is usable, so store it
293 timestamped_target_detections_.emplace_back(
294 DataAdapter::TimestampedDetection{
295 .time = node_distributed_time,
296 .H_robot_target = H_robot_target,
297 .distance_from_camera = distance_from_camera,
298 .distortion_factor = distortion_factor,
299 .id = static_cast<TargetMapper::TargetId>(target_pose.id)});
300
301 if (FLAGS_visualize_solver) {
302 // If we've already drawn this camera_name in the current image,
303 // display the image before clearing and adding the new poses
304 if (drawn_cameras_.count(camera_name) != 0) {
305 display_count_++;
306 cv::putText(vis_robot_.image_,
307 "Poses #" + std::to_string(display_count_),
308 cv::Point(600, 10), cv::FONT_HERSHEY_PLAIN, 1.0,
309 cv::Scalar(255, 255, 255));
310
311 if (display_count_ >= FLAGS_skip_to) {
312 VLOG(1) << "Showing image for camera " << camera_name
313 << " since we've drawn it already";
314 cv::imshow("View", vis_robot_.image_);
315 // Pause if delta_T is too large, but only after first image (to make
316 // sure the delta's are correct)
317 if (max_delta_T_world_robot_ > FLAGS_pause_on_distance &&
318 display_count_ > 1) {
319 LOG(INFO) << "Pausing since the delta between robot estimates is "
320 << max_delta_T_world_robot_ << " which is > threshold of "
321 << FLAGS_pause_on_distance;
322 cv::waitKey(0);
323 } else {
324 cv::waitKey(FLAGS_wait_key);
325 }
326 max_delta_T_world_robot_ = 0.0;
327 } else {
328 VLOG(2) << "At poses #" << std::to_string(display_count_);
329 }
330 vis_robot_.ClearImage();
331 drawn_cameras_.clear();
332 }
333
334 Eigen::Affine3d H_world_target = PoseUtils::Pose3dToAffine3d(
335 kFixedTargetMapper.GetTargetPoseById(target_pose_fbs->id())->pose);
336 Eigen::Affine3d H_world_robot = H_world_target * H_robot_target.inverse();
337 VLOG(2) << camera_name << ", id " << target_pose_fbs->id()
338 << ", t = " << node_distributed_time
339 << ", pose_error = " << target_pose_fbs->pose_error()
340 << ", pose_error_ratio = " << target_pose_fbs->pose_error_ratio()
341 << ", robot_pos (x,y,z) = "
342 << H_world_robot.translation().transpose();
343
344 label << "id " << target_pose_fbs->id()
345 << ": err (% of max): " << target_pose_fbs->pose_error() << " ("
346 << (target_pose_fbs->pose_error() / FLAGS_max_pose_error)
347 << ") err_ratio: " << target_pose_fbs->pose_error_ratio() << " ";
348
349 vis_robot_.DrawRobotOutline(H_world_robot, camera_name,
350 kOrinColors.at(camera_name));
351 vis_robot_.DrawFrameAxes(H_world_target,
352 std::to_string(target_pose_fbs->id()),
353 kOrinColors.at(camera_name));
354
355 double delta_T_world_robot =
356 (H_world_robot.translation() - last_H_world_robot_.translation())
357 .norm();
358 max_delta_T_world_robot_ =
359 std::max(delta_T_world_robot, max_delta_T_world_robot_);
360
361 VLOG(1) << "Drew in info for camera " << camera_name << " and target #"
362 << target_pose_fbs->id();
363 drew = true;
364 last_draw_time_ = node_distributed_time;
365 last_H_world_robot_ = H_world_robot;
366 }
367 }
368 if (FLAGS_visualize_solver) {
369 if (drew) {
370 // Collect all the labels from a given camera, and add the text
371 // TODO: Need to fix this one
372 int position_number = camera_ordering_map[camera_name];
373 cv::putText(vis_robot_.image_, label.str(),
374 cv::Point(10, 30 + 20 * position_number),
375 cv::FONT_HERSHEY_PLAIN, 1.0, kOrinColors.at(camera_name));
376 drawn_cameras_.emplace(camera_name);
377 } else if (node_distributed_time - last_draw_time_ >
378 std::chrono::milliseconds(30) &&
379 display_count_ >= FLAGS_skip_to && drew) {
380 // TODO: Check on 30ms value-- does this make sense?
381 double delta_t = (node_distributed_time - last_draw_time_).count() / 1e6;
382 VLOG(1) << "Last result was " << delta_t << "ms ago";
383 cv::putText(vis_robot_.image_, "No detections in last 30ms",
384 cv::Point(10, 0), cv::FONT_HERSHEY_PLAIN, 1.0,
385 kOrinColors.at(camera_name));
386 // Display and clear the image if we haven't draw in a while
387 VLOG(1) << "Displaying image due to time lapse";
388 cv::imshow("View", vis_robot_.image_);
389 cv::waitKey(FLAGS_wait_key);
390 max_delta_T_world_robot_ = 0.0;
391 drawn_cameras_.clear();
392 }
393 }
394}
395
396void TargetMapperReplay::HandleNodeCaptures(
397 aos::EventLoop *mapping_event_loop,
398 frc971::constants::ConstantsFetcher<y2024_swerve::Constants>
399 *constants_fetcher,
400 int camera_number) {
401 // Get the camera extrinsics
402 std::string node_name =
403 std::string(mapping_event_loop->node()->name()->string_view());
404 const auto *calibration = FindCameraCalibration(
405 constants_fetcher->constants(), node_name, camera_number);
406 cv::Mat extrinsics_cv = frc971::vision::CameraExtrinsics(calibration).value();
407 Eigen::Matrix4d extrinsics_matrix;
408 cv::cv2eigen(extrinsics_cv, extrinsics_matrix);
409 const auto extrinsics = Eigen::Affine3d(extrinsics_matrix);
410 std::string camera_name = absl::StrFormat(
411 "/%s/camera%d", mapping_event_loop->node()->name()->str(), camera_number);
412
413 mapping_event_loop->MakeWatcher(
414 camera_name.c_str(), [this, mapping_event_loop, extrinsics,
415 camera_name](const TargetMap &map) {
416 aos::distributed_clock::time_point node_distributed_time =
417 reader_->event_loop_factory()
418 ->GetNodeEventLoopFactory(mapping_event_loop->node())
419 ->ToDistributedClock(aos::monotonic_clock::time_point(
420 aos::monotonic_clock::duration(
421 map.monotonic_timestamp_ns())));
422
423 HandleAprilTags(map, node_distributed_time, camera_name, extrinsics);
424 });
425}
426
427void TargetMapperReplay::MaybeSolve() {
428 if (FLAGS_solve) {
429 auto target_constraints =
430 DataAdapter::MatchTargetDetections(timestamped_target_detections_);
431
432 if (FLAGS_split_field) {
433 // Remove constraints between the two sides of the field - these are
434 // basically garbage because of how far the camera is. We will use seeding
435 // below to connect the two sides
436 target_constraints.erase(
437 std::remove_if(
438 target_constraints.begin(), target_constraints.end(),
439 [](const auto &constraint) {
440 return (
441 kIdAllianceMap[static_cast<uint>(constraint.id_begin)] !=
442 kIdAllianceMap[static_cast<uint>(constraint.id_end)]);
443 }),
444 target_constraints.end());
445 }
446
447 LOG(INFO) << "Solving for locations of tags with "
448 << target_constraints.size() << " constraints";
449 TargetMapper mapper(FLAGS_json_path, target_constraints);
450 mapper.Solve(FLAGS_field_name, FLAGS_output_dir);
451
452 if (!FLAGS_dump_constraints_to.empty()) {
453 mapper.DumpConstraints(FLAGS_dump_constraints_to);
454 }
455 if (!FLAGS_dump_stats_to.empty()) {
456 mapper.DumpStats(FLAGS_dump_stats_to);
457 }
458 mapper.PrintDiffs();
459 }
460}
461
462void MappingMain(int argc, char *argv[]) {
463 std::vector<DataAdapter::TimestampedDetection> timestamped_target_detections;
464
465 std::optional<aos::FlatbufferDetachedBuffer<aos::Configuration>> config =
466 (FLAGS_config.empty()
467 ? std::nullopt
468 : std::make_optional(aos::configuration::ReadConfig(FLAGS_config)));
469
470 // Open logfiles
471 aos::logger::LogReader reader(
472 aos::logger::SortParts(aos::logger::FindLogs(argc, argv)),
473 config.has_value() ? &config->message() : nullptr);
474
475 TargetMapperReplay mapper_replay(&reader);
476 reader.event_loop_factory()->Run();
477 mapper_replay.MaybeSolve();
478}
479
480} // namespace y2024_swerve::vision
481
482int main(int argc, char **argv) {
483 aos::InitGoogle(&argc, &argv);
484 y2024_swerve::vision::MappingMain(argc, argv);
485}