blob: 3bf0adf28bcd9a153154e4f10c599ec3c9c8d347 [file] [log] [blame]
Maxwell Hendersonfebee252023-01-28 16:53:52 -08001#include "y2023/vision/aprilrobotics.h"
2
Maxwell Hendersonfebee252023-01-28 16:53:52 -08003DEFINE_bool(
4 debug, false,
5 "If true, dump a ton of debug and crash on the first valid detection.");
6
7DEFINE_int32(team_number, 971,
8 "Use the calibration for a node with this team number");
9namespace y2023 {
10namespace vision {
11
12AprilRoboticsDetector::AprilRoboticsDetector(aos::EventLoop *event_loop,
13 std::string_view channel_name)
14 : calibration_data_(CalibrationData()),
15 ftrace_(),
16 image_callback_(event_loop, channel_name,
17 [&](cv::Mat image_color_mat,
18 const aos::monotonic_clock::time_point /*eof*/) {
19 HandleImage(image_color_mat);
20 }),
21 target_map_sender_(
22 event_loop->MakeSender<frc971::vision::TargetMap>("/camera")) {
23 tag_family_ = tag16h5_create();
24 tag_detector_ = apriltag_detector_create();
25
26 apriltag_detector_add_family_bits(tag_detector_, tag_family_, 1);
27 tag_detector_->nthreads = 6;
28 tag_detector_->wp = workerpool_create(tag_detector_->nthreads);
29 tag_detector_->qtp.min_white_black_diff = 5;
30 tag_detector_->debug = FLAGS_debug;
31
32 std::string hostname = aos::network::GetHostname();
33
34 // Check team string is valid
35 std::optional<uint16_t> pi_number = aos::network::ParsePiNumber(hostname);
36 std::optional<uint16_t> team_number =
37 aos::network::team_number_internal::ParsePiTeamNumber(hostname);
38 CHECK(pi_number) << "Unable to parse pi number from '" << hostname << "'";
39 CHECK(team_number);
40
41 calibration_ = FindCameraCalibration(&calibration_data_.message(),
42 "pi" + std::to_string(*pi_number));
43 intrinsics_ = CameraIntrinsics(calibration_);
44 camera_distortion_coeffs_ = CameraDistCoeffs(calibration_);
45
46 image_callback_.set_format(frc971::vision::ImageCallback::Format::GRAYSCALE);
47}
48
49AprilRoboticsDetector::~AprilRoboticsDetector() {
50 apriltag_detector_destroy(tag_detector_);
51 free(tag_family_);
52}
53
54void AprilRoboticsDetector::SetWorkerpoolAffinities() {
55 for (int i = 0; i < tag_detector_->wp->nthreads; i++) {
56 cpu_set_t affinity;
57 CPU_ZERO(&affinity);
58 CPU_SET(i, &affinity);
59 pthread_setaffinity_np(tag_detector_->wp->threads[i], sizeof(affinity),
60 &affinity);
61 struct sched_param param;
62 param.sched_priority = 20;
63 int res = pthread_setschedparam(tag_detector_->wp->threads[i], SCHED_FIFO,
64 &param);
65 PCHECK(res == 0) << "Failed to set priority of threadpool threads";
66 }
67}
68
69void AprilRoboticsDetector::HandleImage(cv::Mat image_color_mat) {
70 std::vector<std::pair<apriltag_detection_t, apriltag_pose_t>> detections =
71 DetectTags(image_color_mat);
72
73 auto builder = target_map_sender_.MakeBuilder();
74 std::vector<flatbuffers::Offset<frc971::vision::TargetPoseFbs>> target_poses;
75 for (const auto &[detection, pose] : detections) {
76 target_poses.emplace_back(
77 BuildTargetPose(pose, detection.id, builder.fbb()));
78 }
79 const auto target_poses_offset = builder.fbb()->CreateVector(target_poses);
80 auto target_map_builder = builder.MakeBuilder<frc971::vision::TargetMap>();
81
82 target_map_builder.add_target_poses(target_poses_offset);
83 builder.CheckOk(builder.Send(target_map_builder.Finish()));
84}
85
86flatbuffers::Offset<frc971::vision::TargetPoseFbs>
87AprilRoboticsDetector::BuildTargetPose(
88 const apriltag_pose_t &pose,
89 frc971::vision::TargetMapper::TargetId target_id,
90 flatbuffers::FlatBufferBuilder *fbb) {
91 const auto T =
92 Eigen::Translation3d(pose.t->data[0], pose.t->data[1], pose.t->data[2]);
93 const auto rpy = frc971::vision::PoseUtils::RotationMatrixToEulerAngles(
94 Eigen::Matrix3d(pose.R->data));
95 return frc971::vision::CreateTargetPoseFbs(*fbb, target_id, T.x(), T.y(),
96 T.z(), rpy(0), rpy(1), rpy(2));
97}
98
99std::vector<std::pair<apriltag_detection_t, apriltag_pose_t>>
100AprilRoboticsDetector::DetectTags(cv::Mat image) {
101 const aos::monotonic_clock::time_point start_time =
102 aos::monotonic_clock::now();
103
104 image_u8_t im = {
105 .width = image.cols,
106 .height = image.rows,
107 .stride = image.cols,
108 .buf = image.data,
109 };
110
111 ftrace_.FormatMessage("Starting detect\n");
112 zarray_t *detections = apriltag_detector_detect(tag_detector_, &im);
113 ftrace_.FormatMessage("Done detecting\n");
114
115 std::vector<std::pair<apriltag_detection_t, apriltag_pose_t>> results;
116
117 for (int i = 0; i < zarray_size(detections); i++) {
118 apriltag_detection_t *det;
119 zarray_get(detections, i, &det);
120
121 if (det->decision_margin > 30) {
122 VLOG(1) << "Found tag number " << det->id << " hamming: " << det->hamming
123 << " margin: " << det->decision_margin;
124
125 const aos::monotonic_clock::time_point before_pose_estimation =
126 aos::monotonic_clock::now();
127 // First create an apriltag_detection_info_t struct using your known
128 // parameters.
129 apriltag_detection_info_t info;
130 info.det = det;
131 info.tagsize = 0.1524;
132 info.fx = intrinsics_.at<double>(0, 0);
133 info.fy = intrinsics_.at<double>(1, 1);
134 info.cx = intrinsics_.at<double>(0, 2);
135 info.cy = intrinsics_.at<double>(1, 2);
136
137 apriltag_pose_t pose;
138 double err = estimate_tag_pose(&info, &pose);
139
140 VLOG(1) << "err: " << err;
141
142 results.emplace_back(*det, pose);
143
144 const aos::monotonic_clock::time_point after_pose_estimation =
145 aos::monotonic_clock::now();
146
147 VLOG(1) << "Took "
148 << std::chrono::duration<double>(after_pose_estimation -
149 before_pose_estimation)
150 .count()
151 << " seconds for pose estimation";
152 }
153 }
154
155 apriltag_detections_destroy(detections);
156
157 const aos::monotonic_clock::time_point end_time = aos::monotonic_clock::now();
158
159 timeprofile_display(tag_detector_->tp);
160
161 VLOG(1) << "Took "
162 << std::chrono::duration<double>(end_time - start_time).count()
163 << " seconds to detect overall";
164
165 return results;
166}
167
Maxwell Hendersonfebee252023-01-28 16:53:52 -0800168} // namespace vision
169} // namespace y2023