blob: d73d171a6c8a2f71ca0a9e904c557cc15e578edb [file] [log] [blame]
James Kuszmaul313e9ce2024-02-11 17:47:33 -08001#include "y2024/localizer/localizer.h"
2
3#include "gflags/gflags.h"
4
5#include "aos/containers/sized_array.h"
6#include "frc971/control_loops/drivetrain/localizer_generated.h"
7#include "frc971/control_loops/pose.h"
James Kuszmaul86116c22024-03-15 22:50:34 -07008#include "frc971/math/flatbuffers_matrix.h"
James Kuszmaul313e9ce2024-02-11 17:47:33 -08009#include "frc971/vision/target_map_utils.h"
10#include "y2024/constants.h"
11
James Kuszmaul86116c22024-03-15 22:50:34 -070012DEFINE_double(max_pose_error, 1e-5,
James Kuszmaul313e9ce2024-02-11 17:47:33 -080013 "Throw out target poses with a higher pose error than this");
Austin Schuh98b732d2024-03-22 19:59:09 -070014DEFINE_double(max_distortion, 1000.0, "");
James Kuszmaul313e9ce2024-02-11 17:47:33 -080015DEFINE_double(
16 max_pose_error_ratio, 0.4,
17 "Throw out target poses with a higher pose error ratio than this");
James Kuszmaul86116c22024-03-15 22:50:34 -070018DEFINE_double(distortion_noise_scalar, 4.0,
James Kuszmaul313e9ce2024-02-11 17:47:33 -080019 "Scale the target pose distortion factor by this when computing "
20 "the noise.");
21DEFINE_double(
James Kuszmaul86116c22024-03-15 22:50:34 -070022 max_implied_yaw_error, 5.0,
James Kuszmaul313e9ce2024-02-11 17:47:33 -080023 "Reject target poses that imply a robot yaw of more than this many degrees "
24 "off from our estimate.");
25DEFINE_double(
26 max_implied_teleop_yaw_error, 30.0,
27 "Reject target poses that imply a robot yaw of more than this many degrees "
28 "off from our estimate.");
29DEFINE_double(max_distance_to_target, 5.0,
30 "Reject target poses that have a 3d distance of more than this "
31 "many meters.");
James Kuszmaul592055e2024-03-23 20:12:59 -070032DEFINE_double(max_auto_image_robot_speed, 5.0,
James Kuszmaul313e9ce2024-02-11 17:47:33 -080033 "Reject target poses when the robot is travelling faster than "
34 "this speed in auto.");
James Kuszmaul86116c22024-03-15 22:50:34 -070035DEFINE_bool(
James Kuszmaul592055e2024-03-23 20:12:59 -070036 do_xytheta_corrections, false,
James Kuszmaul86116c22024-03-15 22:50:34 -070037 "If set, uses the x/y/theta corrector rather than a heading/distance/skew "
38 "one. This is better conditioned currently, but is theoretically worse due "
39 "to not capturing noise effectively.");
40DEFINE_bool(
41 always_use_extra_tags, true,
42 "If set, we will use the \"deweighted\" tags even in auto mode (this "
43 "affects april tags whose field positions we do not trust as much).");
James Kuszmaul313e9ce2024-02-11 17:47:33 -080044
45namespace y2024::localizer {
46namespace {
47constexpr std::array<std::string_view, Localizer::kNumCameras>
Maxwell Henderson3279bc52024-03-01 09:50:53 -080048 kDetectionChannels{"/orin1/camera0", "/orin1/camera1", "/imu/camera0",
49 "/imu/camera1"};
James Kuszmaul313e9ce2024-02-11 17:47:33 -080050
51size_t CameraIndexForName(std::string_view name) {
52 for (size_t index = 0; index < kDetectionChannels.size(); ++index) {
53 if (name == kDetectionChannels.at(index)) {
54 return index;
55 }
56 }
57 LOG(FATAL) << "No camera channel named " << name;
58}
59
60std::map<uint64_t, Localizer::Transform> GetTargetLocations(
61 const Constants &constants) {
62 CHECK(constants.has_common());
63 CHECK(constants.common()->has_target_map());
64 CHECK(constants.common()->target_map()->has_target_poses());
65 std::map<uint64_t, Localizer::Transform> transforms;
66 for (const frc971::vision::TargetPoseFbs *target :
67 *constants.common()->target_map()->target_poses()) {
68 CHECK(target->has_id());
69 CHECK(target->has_position());
70 CHECK(target->has_orientation());
71 CHECK_EQ(0u, transforms.count(target->id()));
72 transforms[target->id()] = PoseToTransform(target);
73 }
74 return transforms;
75}
James Kuszmaul86116c22024-03-15 22:50:34 -070076
77// Returns the "nominal" covariance of localizer---i.e., the values to which it
78// tends to converge during normal operation. By initializing the localizer's
79// covariance this way, we reduce the likelihood that the first few corrections
80// we receive will result in insane jumps in robot state.
81Eigen::Matrix<double, Localizer::HybridEkf::kNStates,
82 Localizer::HybridEkf::kNStates>
83NominalCovariance() {
84 Eigen::Matrix<double, Localizer::HybridEkf::kNStates,
85 Localizer::HybridEkf::kNStates>
86 P_transpose;
87 // Grabbed from when the robot was in a steady-state.
James Kuszmaul592055e2024-03-23 20:12:59 -070088 P_transpose << 0.00478504226469438, 0.000253940126278529,
89 -0.000162526741742492, 2.25403185759796e-09, 0.0101734987442698,
90 2.25403195618803e-09, 0.0101734987442698, 0.0253922208811703,
91 0.0253922210268363, -2.21692792749728e-10, 1.30552506376491e-05,
92 8.24314992005184e-07, 0.000253940126278532, 0.00189751717312843,
93 0.000513974713526466, 2.03445653416419e-10, 0.00091777414692514,
94 2.03445505573468e-10, 0.00091777414692514, 0.002190445323373,
95 0.00219044511582939, 3.32473307499143e-10, 1.45178014834701e-06,
96 1.71788107973058e-05, -0.000162526741742491, 0.000513974713526467,
97 0.000241235997378754, -2.30353529071927e-12, -1.03627077991455e-05,
98 -2.30350039899681e-12, -1.03627077991157e-05, -6.36337811958761e-06,
99 -6.62065263890835e-06, 1.24447423005307e-09, -1.228397466134e-07,
100 2.45695800192927e-06, 2.25403185760077e-09, 2.0344565341686e-10,
101 -2.30353529071687e-12, 4.99964876555835e-09, 4.09452976434092e-08,
102 -1.11086080247582e-15, 4.09452976433419e-08, 1.61945884581856e-07,
103 1.61950413812579e-07, 4.58556491207338e-08, -1.0257731581937e-12,
104 3.0118336328036e-13, 0.0101734987442698, 0.000917774146925141,
105 -1.03627077991456e-05, 4.09452976433247e-08, 0.186711669156372,
106 4.09452978206736e-08, 0.186711669156351, 0.747606782854604,
107 0.747606783311591, -3.98476625129118e-10, 4.53292935526394e-05,
108 1.34809505728832e-06, 2.2540319561823e-09, 2.03445505573217e-10,
109 -2.30350039893596e-12, -1.11086077014401e-15, 4.09452978206229e-08,
110 4.99964876557609e-09, 4.0945297820556e-08, 1.61950414014674e-07,
111 1.61945884988215e-07, -4.58556492982177e-08, -1.02577287448067e-12,
112 3.01180296453645e-13, 0.0101734987442698, 0.000917774146925141,
113 -1.03627077991158e-05, 4.09452976433153e-08, 0.186711669156351,
114 4.09452978206613e-08, 0.186711669156372, 0.747606782852986,
115 0.747606783313209, -3.98476449084643e-10, 4.53292935526394e-05,
116 1.34809505728832e-06, 0.0253922208811701, 0.00219044532337299,
117 -6.36337811958279e-06, 1.61945884583411e-07, 0.747606782854602,
118 1.61950414014798e-07, 0.747606782852984, 4.36530695987946,
119 4.17234874741425, 7.37989263565032e-07, 0.000112905097332305,
120 1.0761727407346e-06, 0.025392221026836, 0.00219044511582942,
121 -6.62065263891535e-06, 1.61950413812353e-07, 0.747606783311594,
122 1.61945884987625e-07, 0.747606783313212, 4.17234874741427,
123 4.36530696204959, -7.39350829913324e-07, 0.000112905097765367,
124 1.07616825738023e-06, -2.21692793550929e-10, 3.32473307500738e-10,
125 1.24447423005688e-09, 4.58556491207295e-08, -3.98476620685685e-10,
126 -4.58556492984426e-08, -3.98476445724907e-10, 7.3798926341289e-07,
127 -7.39350829974392e-07, 0.212257282137077, -6.38021734059486e-13,
128 6.89673203238e-12, 1.30552506376492e-05, 1.451780148347e-06,
129 -1.22839746613403e-07, -1.02577315819363e-12, 4.53292935526395e-05,
130 -1.02577287448185e-12, 4.53292935526395e-05, 0.000112905097332305,
131 0.000112905097765368, -6.38021733687597e-13, 4.99487202342848e-05,
132 7.45706935797857e-09, 8.24314992005172e-07, 1.7178810797306e-05,
133 2.45695800192931e-06, 3.01183363281006e-13, 1.34809505728833e-06,
134 3.01180296453493e-13, 1.34809505728833e-06, 1.07617274073465e-06,
135 1.07616825738027e-06, 6.89673203233812e-12, 7.45706935797858e-09,
136 4.97065161286885e-05;
James Kuszmaul86116c22024-03-15 22:50:34 -0700137 return P_transpose.transpose();
138}
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800139} // namespace
140
141std::array<Localizer::CameraState, Localizer::kNumCameras>
142Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) {
James Kuszmaule8f550e2024-05-29 19:39:31 -0700143 CHECK(constants.has_cameras());
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800144 std::array<Localizer::CameraState, Localizer::kNumCameras> cameras;
James Kuszmaule8f550e2024-05-29 19:39:31 -0700145 for (const CameraConfiguration *camera : *constants.cameras()) {
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800146 CHECK(camera->has_calibration());
147 const frc971::vision::calibration::CameraCalibration *calibration =
148 camera->calibration();
149 CHECK(!calibration->has_turret_extrinsics())
150 << "The 2024 robot does not have cameras on a turret.";
151 CHECK(calibration->has_node_name());
152 const std::string channel_name =
153 absl::StrFormat("/%s/camera%d", calibration->node_name()->string_view(),
154 calibration->camera_number());
155 const size_t index = CameraIndexForName(channel_name);
156 // We default-construct the extrinsics matrix to all-zeros; use that to
157 // sanity-check whether we have populated the matrix yet or not.
158 CHECK(cameras.at(index).extrinsics.norm() == 0)
159 << "Got multiple calibrations for "
160 << calibration->node_name()->string_view();
161 CHECK(calibration->has_fixed_extrinsics());
162 cameras.at(index).extrinsics =
163 frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
164 *calibration->fixed_extrinsics());
165 cameras.at(index).debug_sender =
166 event_loop->MakeSender<VisualizationStatic>(channel_name);
167 }
168 for (const CameraState &camera : cameras) {
169 CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration.";
170 }
171 return cameras;
172}
173
174Localizer::Localizer(aos::EventLoop *event_loop)
175 : event_loop_(event_loop),
176 constants_fetcher_(event_loop),
177 dt_config_(
178 frc971::control_loops::drivetrain::DrivetrainConfig<double>::
179 FromFlatbuffer(*CHECK_NOTNULL(
180 constants_fetcher_.constants().common()->drivetrain()))),
181 cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)),
182 target_poses_(GetTargetLocations(constants_fetcher_.constants())),
183 down_estimator_(dt_config_),
James Kuszmaul86116c22024-03-15 22:50:34 -0700184 // Force the dt to 1 ms (the nominal IMU frequency) since we have observed
185 // issues with timing on the orins.
186 // TODO(james): Ostensibly, we should be able to use the timestamps from
187 // the IMU board itself for exactly this; however, I am currently worried
188 // about the impacts of clock drift in using that.
189 ekf_(dt_config_, std::chrono::milliseconds(1)),
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800190 observations_(&ekf_),
James Kuszmaul86116c22024-03-15 22:50:34 -0700191 xyz_observations_(&ekf_),
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800192 imu_watcher_(event_loop, dt_config_,
193 y2024::constants::Values::DrivetrainEncoderToMeters(1),
194 std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
195 std::placeholders::_2, std::placeholders::_3,
196 std::placeholders::_4, std::placeholders::_5),
197 frc971::controls::ImuWatcher::TimestampSource::kPi),
198 utils_(event_loop),
199 status_sender_(event_loop->MakeSender<Status>("/localizer")),
200 output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
201 "/localizer")),
202 server_statistics_fetcher_(
203 event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
204 "/aos")),
205 client_statistics_fetcher_(
206 event_loop_->MakeFetcher<aos::message_bridge::ClientStatistics>(
James Kuszmaul86116c22024-03-15 22:50:34 -0700207 "/aos")),
208 control_fetcher_(event_loop_->MakeFetcher<
209 frc971::control_loops::drivetrain::LocalizerControl>(
210 "/drivetrain")) {
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800211 if (dt_config_.is_simulated) {
212 down_estimator_.assume_perfect_gravity();
213 }
214
215 for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) {
216 const std::string_view channel_name = kDetectionChannels.at(camera_index);
217 const aos::Channel *const channel = CHECK_NOTNULL(
218 event_loop->GetChannel<frc971::vision::TargetMap>(channel_name));
219 event_loop->MakeWatcher(
220 channel_name, [this, channel,
221 camera_index](const frc971::vision::TargetMap &targets) {
222 CHECK(targets.has_target_poses());
223 CHECK(targets.has_monotonic_timestamp_ns());
224 const std::optional<aos::monotonic_clock::duration> clock_offset =
225 utils_.ClockOffset(channel->source_node()->string_view());
226 if (!clock_offset.has_value()) {
227 VLOG(1) << "Rejecting image due to disconnected message bridge at "
228 << event_loop_->monotonic_now();
229 cameras_.at(camera_index)
230 .rejection_counter.IncrementError(
231 RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
232 return;
233 }
234 const aos::monotonic_clock::time_point orin_capture_time(
235 std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) -
236 clock_offset.value());
237 if (orin_capture_time > event_loop_->context().monotonic_event_time) {
238 VLOG(1) << "Rejecting image due to being from future at "
239 << event_loop_->monotonic_now() << " with timestamp of "
240 << orin_capture_time << " and event time pf "
241 << event_loop_->context().monotonic_event_time;
242 cameras_.at(camera_index)
243 .rejection_counter.IncrementError(
244 RejectionReason::IMAGE_FROM_FUTURE);
245 return;
246 }
247 auto debug_builder =
248 cameras_.at(camera_index).debug_sender.MakeStaticBuilder();
249 auto target_debug_list = debug_builder->add_targets();
250 // The static_length should already be 20.
251 CHECK(target_debug_list->reserve(20));
252 for (const frc971::vision::TargetPoseFbs *target :
253 *targets.target_poses()) {
254 VLOG(1) << "Handling target from " << camera_index;
255 HandleTarget(camera_index, orin_capture_time, *target,
256 target_debug_list->emplace_back());
257 }
258 StatisticsForCamera(cameras_.at(camera_index),
259 debug_builder->add_statistics());
260 debug_builder.CheckOk(debug_builder.Send());
261 SendStatus();
262 });
263 }
264
265 event_loop_->AddPhasedLoop([this](int) { SendOutput(); },
266 std::chrono::milliseconds(20));
267
268 event_loop_->MakeWatcher(
269 "/drivetrain",
270 [this](
271 const frc971::control_loops::drivetrain::LocalizerControl &control) {
James Kuszmaul86116c22024-03-15 22:50:34 -0700272 HandleControl(control);
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800273 });
274
275 ekf_.set_ignore_accel(true);
276 // Priority should be lower than the imu reading process, but non-zero.
277 event_loop->SetRuntimeRealtimePriority(10);
278 event_loop->OnRun([this, event_loop]() {
279 ekf_.ResetInitialState(event_loop->monotonic_now(),
James Kuszmaul86116c22024-03-15 22:50:34 -0700280 HybridEkf::State::Zero(), NominalCovariance());
281 if (control_fetcher_.Fetch()) {
282 HandleControl(*control_fetcher_.get());
283 }
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800284 });
285}
286
James Kuszmaul86116c22024-03-15 22:50:34 -0700287void Localizer::HandleControl(
288 const frc971::control_loops::drivetrain::LocalizerControl &control) {
289 // This is triggered whenever we need to force the X/Y/(maybe theta)
290 // position of the robot to a particular point---e.g., during pre-match
291 // setup, or when commanded by a button on the driverstation.
292
293 // For some forms of reset, we choose to keep our current yaw estimate
294 // rather than overriding it from the control message.
295 const double theta = control.keep_current_theta()
296 ? ekf_.X_hat(StateIdx::kTheta)
297 : control.theta();
298 // Encoder values need to be reset based on the current values to ensure
299 // that we don't get weird corrections on the next encoder update.
300 const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder);
301 const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder);
302 ekf_.ResetInitialState(t_,
303 (HybridEkf::State() << control.x(), control.y(), theta,
304 left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0)
305 .finished(),
306 NominalCovariance());
307 VLOG(1) << "Reset state";
308}
309
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800310void Localizer::HandleImu(aos::monotonic_clock::time_point /*sample_time_pico*/,
311 aos::monotonic_clock::time_point sample_time_orin,
312 std::optional<Eigen::Vector2d> /*encoders*/,
313 Eigen::Vector3d gyro, Eigen::Vector3d accel) {
314 std::optional<Eigen::Vector2d> encoders = utils_.Encoders(sample_time_orin);
315 last_encoder_readings_ = encoders;
James Kuszmaul86116c22024-03-15 22:50:34 -0700316 VLOG(1) << "Got encoders";
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800317 if (t_ == aos::monotonic_clock::min_time) {
318 t_ = sample_time_orin;
319 }
320 if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_orin) {
321 t_ = sample_time_orin;
322 ++clock_resets_;
323 }
324 const aos::monotonic_clock::duration dt = sample_time_orin - t_;
325 t_ = sample_time_orin;
326 // We don't actually use the down estimator currently, but it's really
327 // convenient for debugging.
328 down_estimator_.Predict(gyro, accel, dt);
329 const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
James Kuszmaul2700e0f2024-03-16 16:45:48 -0700330 ekf_.UpdateEncodersAndGyro(
331 encoders.has_value() ? std::make_optional<double>(encoders.value()(0))
332 : std::nullopt,
333 encoders.has_value() ? std::make_optional<double>(encoders.value()(1))
334 : std::nullopt,
335 yaw_rate, utils_.VoltageOrZero(sample_time_orin), accel, t_);
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800336 SendStatus();
337}
338
339void Localizer::RejectImage(int camera_index, RejectionReason reason,
340 TargetEstimateDebugStatic *builder) {
341 if (builder != nullptr) {
342 builder->set_accepted(false);
343 builder->set_rejection_reason(reason);
344 }
345 cameras_.at(camera_index).rejection_counter.IncrementError(reason);
346}
347
348// Only use april tags present in the target map; this method has also been used
349// (in the past) for ignoring april tags that tend to produce problematic
350// readings.
351bool Localizer::UseAprilTag(uint64_t target_id) {
James Kuszmaul86116c22024-03-15 22:50:34 -0700352 if (target_poses_.count(target_id) == 0) {
353 return false;
354 }
355 return true;
356}
357
358bool Localizer::DeweightAprilTag(uint64_t target_id) {
359 const flatbuffers::Vector<uint64_t> *ignore_tags = nullptr;
360
361 switch (utils_.Alliance()) {
362 case aos::Alliance::kRed:
363 ignore_tags = CHECK_NOTNULL(
364 constants_fetcher_.constants().common()->ignore_targets()->red());
365 break;
366 case aos::Alliance::kBlue:
367 ignore_tags = CHECK_NOTNULL(
368 constants_fetcher_.constants().common()->ignore_targets()->blue());
369 break;
370 case aos::Alliance::kInvalid:
371 return false;
372 }
373 return std::find(ignore_tags->begin(), ignore_tags->end(), target_id) !=
374 ignore_tags->end();
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800375}
376
377namespace {
378// Converts a camera transformation matrix from treating the +Z axis from
379// pointing straight out the lens to having the +X pointing straight out the
380// lens, with +Z going "up" (i.e., -Y in the normal convention) and +Y going
381// leftwards (i.e., -X in the normal convention).
382Localizer::Transform ZToXCamera(const Localizer::Transform &transform) {
383 return transform *
384 Eigen::Matrix4d{
385 {0, -1, 0, 0}, {0, 0, -1, 0}, {1, 0, 0, 0}, {0, 0, 0, 1}};
386}
387} // namespace
388
389void Localizer::HandleTarget(
390 int camera_index, const aos::monotonic_clock::time_point capture_time,
391 const frc971::vision::TargetPoseFbs &target,
392 TargetEstimateDebugStatic *debug_builder) {
393 ++total_candidate_targets_;
394 ++cameras_.at(camera_index).total_candidate_targets;
395 const uint64_t target_id = target.id();
396
397 if (debug_builder == nullptr) {
398 AOS_LOG(ERROR, "Dropped message from debug vector.");
399 } else {
400 debug_builder->set_camera(camera_index);
401 debug_builder->set_image_age_sec(aos::time::DurationInSeconds(
402 event_loop_->monotonic_now() - capture_time));
403 debug_builder->set_image_monotonic_timestamp_ns(
404 std::chrono::duration_cast<std::chrono::nanoseconds>(
405 capture_time.time_since_epoch())
406 .count());
407 debug_builder->set_april_tag(target_id);
408 }
409 VLOG(2) << aos::FlatbufferToJson(&target);
410 if (!UseAprilTag(target_id)) {
411 VLOG(1) << "Rejecting target due to invalid ID " << target_id;
412 RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, debug_builder);
413 return;
414 }
James Kuszmaul86116c22024-03-15 22:50:34 -0700415 double april_tag_noise_scalar = 1.0;
416 if (DeweightAprilTag(target_id)) {
417 if (!FLAGS_always_use_extra_tags && utils_.MaybeInAutonomous()) {
418 VLOG(1) << "Rejecting target due to auto invalid ID " << target_id;
419 RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, debug_builder);
420 return;
421 } else {
James Kuszmaul592055e2024-03-23 20:12:59 -0700422 if (utils_.MaybeInAutonomous()) {
James Kuszmaul6caee212024-05-04 13:46:23 -0700423 april_tag_noise_scalar = 1.5;
James Kuszmaul592055e2024-03-23 20:12:59 -0700424 } else {
James Kuszmaul6caee212024-05-04 13:46:23 -0700425 if (target_id == 13 || target_id == 14) {
426 april_tag_noise_scalar = 5.0;
427 } else {
428 april_tag_noise_scalar = 5.0;
429 }
James Kuszmaul592055e2024-03-23 20:12:59 -0700430 }
James Kuszmaul86116c22024-03-15 22:50:34 -0700431 }
432 }
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800433
434 const Transform &H_field_target = target_poses_.at(target_id);
435 const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics;
436
437 const Transform H_camera_target = PoseToTransform(&target);
438
439 // In order to do the EKF correction, we determine the expected state based
440 // on the state at the time the image was captured; however, we insert the
441 // correction update itself at the current time. This is technically not
442 // quite correct, but saves substantial CPU usage & code complexity by
443 // making it so that we don't have to constantly rewind the entire EKF
444 // history.
445 const std::optional<State> state_at_capture =
446 ekf_.LastStateBeforeTime(capture_time);
447
448 if (!state_at_capture.has_value()) {
449 VLOG(1) << "Rejecting image due to being too old.";
450 return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD,
451 debug_builder);
452 } else if (target.pose_error() > FLAGS_max_pose_error) {
453 VLOG(1) << "Rejecting target due to high pose error "
454 << target.pose_error();
455 return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR,
456 debug_builder);
457 } else if (target.pose_error_ratio() > FLAGS_max_pose_error_ratio) {
458 VLOG(1) << "Rejecting target due to high pose error ratio "
459 << target.pose_error_ratio();
460 return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR_RATIO,
461 debug_builder);
462 }
463
James Kuszmaul592055e2024-03-23 20:12:59 -0700464 const double robot_speed =
465 (state_at_capture.value()(StateIdx::kLeftVelocity) +
466 state_at_capture.value()(StateIdx::kRightVelocity)) /
467 2.0;
468
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800469 Corrector corrector(state_at_capture.value(), H_field_target, H_robot_camera,
470 H_camera_target);
471 const double distance_to_target = corrector.observed()(Corrector::kDistance);
472
473 // Heading, distance, skew at 1 meter.
James Kuszmaul592055e2024-03-23 20:12:59 -0700474 Eigen::Matrix<double, 3, 1> noises(0.03, 0.25, 0.15);
475 noises *= 2.0;
James Kuszmaul86116c22024-03-15 22:50:34 -0700476 const double distance_noise_scalar =
477 std::min(1.0, std::pow(distance_to_target, 2.0));
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800478 noises(Corrector::kDistance) *= distance_noise_scalar;
479 noises(Corrector::kSkew) *= distance_noise_scalar;
480 // TODO(james): This is leftover from last year; figure out if we want it.
481 // Scale noise by the distortion factor for this detection
482 noises *= (1.0 + FLAGS_distortion_noise_scalar * target.distortion_factor());
James Kuszmaul86116c22024-03-15 22:50:34 -0700483 noises *= april_tag_noise_scalar;
James Kuszmaul592055e2024-03-23 20:12:59 -0700484 noises *= (1.0 + std::abs(robot_speed));
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800485
486 Eigen::Matrix3d R = Eigen::Matrix3d::Zero();
487 R.diagonal() = noises.cwiseAbs2();
James Kuszmaul86116c22024-03-15 22:50:34 -0700488 const Eigen::Vector3d camera_position =
489 corrector.observed_camera_pose().abs_pos();
490 // Calculate the camera-to-robot transformation matrix ignoring the
491 // pitch/roll of the camera.
492 const Transform H_camera_robot_stripped =
493 frc971::control_loops::Pose(ZToXCamera(H_robot_camera))
494 .AsTransformationMatrix()
495 .inverse();
496 const frc971::control_loops::Pose measured_pose(
497 corrector.observed_camera_pose().AsTransformationMatrix() *
498 H_camera_robot_stripped);
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800499 if (debug_builder != nullptr) {
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800500 debug_builder->set_camera_x(camera_position.x());
501 debug_builder->set_camera_y(camera_position.y());
502 debug_builder->set_camera_theta(
503 corrector.observed_camera_pose().abs_theta());
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800504 debug_builder->set_implied_robot_x(measured_pose.rel_pos().x());
505 debug_builder->set_implied_robot_y(measured_pose.rel_pos().y());
506 debug_builder->set_implied_robot_theta(measured_pose.rel_theta());
507
508 Corrector::PopulateMeasurement(corrector.expected(),
509 debug_builder->add_expected_observation());
510 Corrector::PopulateMeasurement(corrector.observed(),
511 debug_builder->add_actual_observation());
512 Corrector::PopulateMeasurement(noises, debug_builder->add_modeled_noise());
513 }
514
515 const double camera_yaw_error =
516 aos::math::NormalizeAngle(corrector.expected_camera_pose().abs_theta() -
517 corrector.observed_camera_pose().abs_theta());
518 constexpr double kDegToRad = M_PI / 180.0;
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800519 const double yaw_threshold =
520 (utils_.MaybeInAutonomous() ? FLAGS_max_implied_yaw_error
521 : FLAGS_max_implied_teleop_yaw_error) *
522 kDegToRad;
523
James Kuszmaul86116c22024-03-15 22:50:34 -0700524 if (target.distortion_factor() > FLAGS_max_distortion) {
525 VLOG(1) << "Rejecting target due to high distortion.";
526 return RejectImage(camera_index, RejectionReason::HIGH_DISTORTION,
527 debug_builder);
528 } else if (utils_.MaybeInAutonomous() &&
529 (std::abs(robot_speed) > FLAGS_max_auto_image_robot_speed)) {
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800530 return RejectImage(camera_index, RejectionReason::ROBOT_TOO_FAST,
531 debug_builder);
532 } else if (std::abs(camera_yaw_error) > yaw_threshold) {
533 return RejectImage(camera_index, RejectionReason::HIGH_IMPLIED_YAW_ERROR,
534 debug_builder);
535 } else if (distance_to_target > FLAGS_max_distance_to_target) {
536 return RejectImage(camera_index, RejectionReason::HIGH_DISTANCE_TO_TARGET,
537 debug_builder);
538 }
539
540 const Input U = ekf_.MostRecentInput();
James Kuszmaul86116c22024-03-15 22:50:34 -0700541 VLOG(1) << "previous state " << ekf_.X_hat().transpose();
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800542 const State prior_state = ekf_.X_hat();
543 // For the correction step, instead of passing in the measurement directly,
544 // we pass in (0, 0, 0) as the measurement and then for the expected
545 // measurement (Zhat) we calculate the error between the pose implied by
546 // the camera measurement and the current estimate of the
547 // pose. This doesn't affect any of the math, it just makes the code a bit
548 // more convenient to write given the Correct() interface we already have.
James Kuszmaul86116c22024-03-15 22:50:34 -0700549 if (FLAGS_do_xytheta_corrections) {
550 Eigen::Vector3d Z(measured_pose.rel_pos().x(), measured_pose.rel_pos().y(),
551 measured_pose.rel_theta());
552 Eigen::Matrix<double, 3, 1> xyz_noises(0.2, 0.2, 0.5);
553 xyz_noises *= distance_noise_scalar;
554 xyz_noises *= april_tag_noise_scalar;
555 // Scale noise by the distortion factor for this detection
556 xyz_noises *=
557 (1.0 + FLAGS_distortion_noise_scalar * target.distortion_factor());
558
559 Eigen::Matrix3d R_xyz = Eigen::Matrix3d::Zero();
560 R_xyz.diagonal() = xyz_noises.cwiseAbs2();
561 xyz_observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U,
562 XyzCorrector(state_at_capture.value(), Z),
563 R_xyz, t_);
564 } else {
565 observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U, corrector, R, t_);
566 }
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800567 ++total_accepted_targets_;
568 ++cameras_.at(camera_index).total_accepted_targets;
James Kuszmaul86116c22024-03-15 22:50:34 -0700569 VLOG(1) << "new state " << ekf_.X_hat().transpose();
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800570 if (debug_builder != nullptr) {
571 debug_builder->set_correction_x(ekf_.X_hat()(StateIdx::kX) -
572 prior_state(StateIdx::kX));
573 debug_builder->set_correction_y(ekf_.X_hat()(StateIdx::kY) -
574 prior_state(StateIdx::kY));
575 debug_builder->set_correction_theta(ekf_.X_hat()(StateIdx::kTheta) -
576 prior_state(StateIdx::kTheta));
577 debug_builder->set_accepted(true);
James Kuszmaul86116c22024-03-15 22:50:34 -0700578 debug_builder->set_expected_robot_x(ekf_.X_hat()(StateIdx::kX));
579 debug_builder->set_expected_robot_y(ekf_.X_hat()(StateIdx::kY));
James Kuszmaul81d5f2d2024-04-05 21:57:14 -0700580 debug_builder->set_expected_robot_theta(
581 aos::math::NormalizeAngle(ekf_.X_hat()(StateIdx::kTheta)));
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800582 }
583}
584
585void Localizer::SendOutput() {
586 auto builder = output_sender_.MakeBuilder();
587 frc971::controls::LocalizerOutput::Builder output_builder =
588 builder.MakeBuilder<frc971::controls::LocalizerOutput>();
589 output_builder.add_monotonic_timestamp_ns(
590 std::chrono::duration_cast<std::chrono::nanoseconds>(
591 event_loop_->context().monotonic_event_time.time_since_epoch())
592 .count());
593 output_builder.add_x(ekf_.X_hat(StateIdx::kX));
594 output_builder.add_y(ekf_.X_hat(StateIdx::kY));
595 output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
596 output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
597 output_builder.add_image_accepted_count(total_accepted_targets_);
598 const Eigen::Quaterniond &orientation =
599 Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta),
600 Eigen::Vector3d::UnitZ()) *
601 down_estimator_.X_hat();
602 frc971::controls::Quaternion quaternion;
603 quaternion.mutate_x(orientation.x());
604 quaternion.mutate_y(orientation.y());
605 quaternion.mutate_z(orientation.z());
606 quaternion.mutate_w(orientation.w());
607 output_builder.add_orientation(&quaternion);
608 server_statistics_fetcher_.Fetch();
609 client_statistics_fetcher_.Fetch();
610
611 bool orins_connected = true;
612
613 if (server_statistics_fetcher_.get()) {
614 for (const auto *orin_server_status :
615 *server_statistics_fetcher_->connections()) {
616 if (orin_server_status->state() ==
617 aos::message_bridge::State::DISCONNECTED) {
618 orins_connected = false;
619 }
620 }
621 }
622
623 if (client_statistics_fetcher_.get()) {
624 for (const auto *pi_client_status :
625 *client_statistics_fetcher_->connections()) {
626 if (pi_client_status->state() ==
627 aos::message_bridge::State::DISCONNECTED) {
628 orins_connected = false;
629 }
630 }
631 }
632
633 // The output message is year-agnostic, and retains "pi" naming for histrocial
634 // reasons.
635 output_builder.add_all_pis_connected(orins_connected);
636 builder.CheckOk(builder.Send(output_builder.Finish()));
637}
638
639flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
640Localizer::PopulateState(const State &X_hat,
641 flatbuffers::FlatBufferBuilder *fbb) {
642 frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb);
643 builder.add_x(X_hat(StateIdx::kX));
644 builder.add_y(X_hat(StateIdx::kY));
Stephan Pleines78df16d2024-04-03 20:45:26 -0700645 builder.add_theta(aos::math::NormalizeAngle(X_hat(StateIdx::kTheta)));
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800646 builder.add_left_velocity(X_hat(StateIdx::kLeftVelocity));
647 builder.add_right_velocity(X_hat(StateIdx::kRightVelocity));
648 builder.add_left_encoder(X_hat(StateIdx::kLeftEncoder));
649 builder.add_right_encoder(X_hat(StateIdx::kRightEncoder));
650 builder.add_left_voltage_error(X_hat(StateIdx::kLeftVoltageError));
651 builder.add_right_voltage_error(X_hat(StateIdx::kRightVoltageError));
652 builder.add_angular_error(X_hat(StateIdx::kAngularError));
653 builder.add_longitudinal_velocity_offset(
654 X_hat(StateIdx::kLongitudinalVelocityOffset));
655 builder.add_lateral_velocity(X_hat(StateIdx::kLateralVelocity));
656 return builder.Finish();
657}
658
659flatbuffers::Offset<ImuStatus> Localizer::PopulateImu(
660 flatbuffers::FlatBufferBuilder *fbb) const {
661 const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb);
662 const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb);
663 ImuStatus::Builder builder(*fbb);
664 builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
665 builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
666 builder.add_zeroing(zeroer_offset);
667 if (imu_watcher_.pico_offset().has_value()) {
668 builder.add_board_offset_ns(imu_watcher_.pico_offset().value().count());
669 builder.add_board_offset_error_ns(imu_watcher_.pico_offset_error().count());
670 }
671 if (last_encoder_readings_.has_value()) {
672 builder.add_left_encoder(last_encoder_readings_.value()(0));
673 builder.add_right_encoder(last_encoder_readings_.value()(1));
674 }
675 builder.add_imu_failures(failures_offset);
676 return builder.Finish();
677}
678
679flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera(
680 const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) {
681 const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb);
682 CumulativeStatistics::Builder stats_builder(*fbb);
683 stats_builder.add_total_accepted(camera.total_accepted_targets);
684 stats_builder.add_total_candidates(camera.total_candidate_targets);
685 stats_builder.add_rejection_reasons(counts_offset);
686 return stats_builder.Finish();
687}
688
689void Localizer::StatisticsForCamera(const CameraState &camera,
690 CumulativeStatisticsStatic *builder) {
691 camera.rejection_counter.PopulateCountsStaticFbs(
692 builder->add_rejection_reasons());
693 builder->set_total_accepted(camera.total_accepted_targets);
694 builder->set_total_candidates(camera.total_candidate_targets);
695}
696
697void Localizer::SendStatus() {
698 auto builder = status_sender_.MakeBuilder();
699 std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
700 stats_offsets;
701 for (size_t ii = 0; ii < kNumCameras; ++ii) {
702 stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb());
703 }
704 auto stats_offset =
705 builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
706 auto down_estimator_offset =
707 down_estimator_.PopulateStatus(builder.fbb(), t_);
708 auto imu_offset = PopulateImu(builder.fbb());
709 auto state_offset = PopulateState(ekf_.X_hat(), builder.fbb());
James Kuszmaul86116c22024-03-15 22:50:34 -0700710 // covariance is a square; we use the number of rows in the state as the rows
711 // and cols of the covariance.
712 auto covariance_offset =
713 frc971::FromEigen<State::RowsAtCompileTime, State::RowsAtCompileTime>(
714 ekf_.P(), builder.fbb());
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800715 Status::Builder status_builder = builder.MakeBuilder<Status>();
716 status_builder.add_state(state_offset);
717 status_builder.add_down_estimator(down_estimator_offset);
718 status_builder.add_imu(imu_offset);
719 status_builder.add_statistics(stats_offset);
James Kuszmaul86116c22024-03-15 22:50:34 -0700720 status_builder.add_ekf_covariance(covariance_offset);
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800721 builder.CheckOk(builder.Send(status_builder.Finish()));
722}
723
724Eigen::Vector3d Localizer::Corrector::HeadingDistanceSkew(
725 const Pose &relative_pose) {
726 const double heading = relative_pose.heading();
727 const double distance = relative_pose.xy_norm();
728 const double skew =
729 ::aos::math::NormalizeAngle(relative_pose.rel_theta() - heading);
730 return {heading, distance, skew};
731}
732
733Localizer::Corrector Localizer::Corrector::CalculateHeadingDistanceSkewH(
734 const State &state_at_capture, const Transform &H_field_target,
735 const Transform &H_robot_camera, const Transform &H_camera_target) {
736 const Transform H_field_camera = H_field_target * H_camera_target.inverse();
737 const Pose expected_robot_pose(
738 {state_at_capture(StateIdx::kX), state_at_capture(StateIdx::kY), 0.0},
739 state_at_capture(StateIdx::kTheta));
740 // Observed position on the field, reduced to just the 2-D pose.
741 const Pose observed_camera(ZToXCamera(H_field_camera));
742 const Pose expected_camera(expected_robot_pose.AsTransformationMatrix() *
743 ZToXCamera(H_robot_camera));
744 const Pose nominal_target(ZToXCamera(H_field_target));
745 const Pose observed_target = nominal_target.Rebase(&observed_camera);
746 const Pose expected_target = nominal_target.Rebase(&expected_camera);
747 return Localizer::Corrector{
748 expected_robot_pose,
749 observed_camera,
750 expected_camera,
751 HeadingDistanceSkew(expected_target),
752 HeadingDistanceSkew(observed_target),
753 frc971::control_loops::drivetrain::HMatrixForCameraHeadingDistanceSkew(
754 nominal_target, observed_camera)};
755}
756
757Localizer::Corrector::Corrector(const State &state_at_capture,
758 const Transform &H_field_target,
759 const Transform &H_robot_camera,
760 const Transform &H_camera_target)
761 : Corrector(CalculateHeadingDistanceSkewH(
762 state_at_capture, H_field_target, H_robot_camera, H_camera_target)) {}
763
764Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
765 return expected_ - observed_;
766}
767
James Kuszmaul86116c22024-03-15 22:50:34 -0700768Localizer::Output Localizer::XyzCorrector::H(const State &, const Input &) {
769 CHECK(Z_.allFinite());
770 Eigen::Vector3d Zhat = H_ * state_at_capture_ - Z_;
771 // Rewrap angle difference to put it back in range.
772 Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
773 VLOG(1) << "Zhat " << Zhat.transpose() << " Z_ " << Z_.transpose()
774 << " state " << (H_ * state_at_capture_).transpose();
775 return Zhat;
776}
777
James Kuszmaul313e9ce2024-02-11 17:47:33 -0800778} // namespace y2024::localizer