blob: 586ffb62a8d7419e51371c7a34613a988d2b20e4 [file] [log] [blame]
James Kuszmaul04a343c2023-02-20 16:38:22 -08001#include "y2023/localizer/localizer.h"
2
Austin Schuh99f7c6a2024-06-25 22:07:44 -07003#include "absl/flags/flag.h"
Philipp Schrader790cb542023-07-05 21:06:52 -07004
James Kuszmaul04a343c2023-02-20 16:38:22 -08005#include "aos/containers/sized_array.h"
6#include "frc971/control_loops/drivetrain/localizer_generated.h"
7#include "frc971/control_loops/pose.h"
James Kuszmaul9c3db182024-02-09 22:02:18 -08008#include "frc971/vision/target_map_utils.h"
James Kuszmaul04a343c2023-02-20 16:38:22 -08009#include "y2023/constants.h"
10
Austin Schuh99f7c6a2024-06-25 22:07:44 -070011ABSL_FLAG(double, max_pose_error, 1e-6,
12 "Throw out target poses with a higher pose error than this");
13ABSL_FLAG(double, max_pose_error_ratio, 0.4,
14 "Throw out target poses with a higher pose error ratio than this");
15ABSL_FLAG(double, distortion_noise_scalar, 1.0,
16 "Scale the target pose distortion factor by this when computing "
17 "the noise.");
18ABSL_FLAG(
19 double, max_implied_yaw_error, 3.0,
milind-u48601192023-03-11 19:44:46 -080020 "Reject target poses that imply a robot yaw of more than this many degrees "
21 "off from our estimate.");
Austin Schuh99f7c6a2024-06-25 22:07:44 -070022ABSL_FLAG(
23 double, max_implied_teleop_yaw_error, 30.0,
James Kuszmaul3c6a9682023-03-23 20:36:53 -070024 "Reject target poses that imply a robot yaw of more than this many degrees "
25 "off from our estimate.");
Austin Schuh99f7c6a2024-06-25 22:07:44 -070026ABSL_FLAG(double, max_distance_to_target, 5.0,
27 "Reject target poses that have a 3d distance of more than this "
28 "many meters.");
29ABSL_FLAG(double, max_auto_image_robot_speed, 2.0,
30 "Reject target poses when the robot is travelling faster than "
31 "this speed in auto.");
milind-u7a7f6662023-02-26 16:41:29 -080032
James Kuszmaul04a343c2023-02-20 16:38:22 -080033namespace y2023::localizer {
34namespace {
35constexpr std::array<std::string_view, Localizer::kNumCameras> kPisToUse{
36 "pi1", "pi2", "pi3", "pi4"};
37
38size_t CameraIndexForName(std::string_view name) {
39 for (size_t index = 0; index < kPisToUse.size(); ++index) {
40 if (name == kPisToUse.at(index)) {
41 return index;
42 }
43 }
44 LOG(FATAL) << "No camera named " << name;
45}
46
47std::map<uint64_t, Localizer::Transform> GetTargetLocations(
48 const Constants &constants) {
49 CHECK(constants.has_target_map());
50 CHECK(constants.target_map()->has_target_poses());
51 std::map<uint64_t, Localizer::Transform> transforms;
52 for (const frc971::vision::TargetPoseFbs *target :
53 *constants.target_map()->target_poses()) {
54 CHECK(target->has_id());
55 CHECK(target->has_position());
56 CHECK(target->has_orientation());
57 CHECK_EQ(0u, transforms.count(target->id()));
58 transforms[target->id()] = PoseToTransform(target);
59 }
60 return transforms;
61}
62} // namespace
63
James Kuszmaul04a343c2023-02-20 16:38:22 -080064std::array<Localizer::CameraState, Localizer::kNumCameras>
65Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) {
66 CHECK(constants.has_cameras());
67 std::array<Localizer::CameraState, Localizer::kNumCameras> cameras;
68 for (const CameraConfiguration *camera : *constants.cameras()) {
69 CHECK(camera->has_calibration());
70 const frc971::vision::calibration::CameraCalibration *calibration =
71 camera->calibration();
72 CHECK(!calibration->has_turret_extrinsics())
73 << "The 2023 robot does not have a turret.";
74 CHECK(calibration->has_node_name());
75 const size_t index =
76 CameraIndexForName(calibration->node_name()->string_view());
77 // We default-construct the extrinsics matrix to all-zeros; use that to
78 // sanity-check whether we have populated the matrix yet or not.
79 CHECK(cameras.at(index).extrinsics.norm() == 0)
80 << "Got multiple calibrations for "
81 << calibration->node_name()->string_view();
82 CHECK(calibration->has_fixed_extrinsics());
83 cameras.at(index).extrinsics =
84 frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
85 *calibration->fixed_extrinsics());
86 cameras.at(index).debug_sender = event_loop->MakeSender<Visualization>(
87 absl::StrCat("/", calibration->node_name()->string_view(), "/camera"));
88 }
89 for (const CameraState &camera : cameras) {
90 CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration.";
91 }
92 return cameras;
93}
94
95Localizer::Localizer(
96 aos::EventLoop *event_loop,
97 const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config)
98 : event_loop_(event_loop),
99 dt_config_(dt_config),
100 constants_fetcher_(event_loop),
101 cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)),
102 target_poses_(GetTargetLocations(constants_fetcher_.constants())),
103 down_estimator_(dt_config),
104 ekf_(dt_config),
105 observations_(&ekf_),
106 imu_watcher_(event_loop, dt_config,
107 y2023::constants::Values::DrivetrainEncoderToMeters(1),
108 std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
109 std::placeholders::_2, std::placeholders::_3,
James Kuszmaul54fe9d02023-03-23 20:32:40 -0700110 std::placeholders::_4, std::placeholders::_5),
111 frc971::controls::ImuWatcher::TimestampSource::kPi),
James Kuszmaul04a343c2023-02-20 16:38:22 -0800112 utils_(event_loop),
113 status_sender_(event_loop->MakeSender<Status>("/localizer")),
114 output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
Maxwell Henderson85d81872023-04-07 18:21:59 -0700115 "/localizer")),
116 server_statistics_fetcher_(
117 event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
118 "/aos")),
119 client_statistics_fetcher_(
120 event_loop_->MakeFetcher<aos::message_bridge::ClientStatistics>(
121 "/aos")) {
James Kuszmaul04a343c2023-02-20 16:38:22 -0800122 if (dt_config_.is_simulated) {
123 down_estimator_.assume_perfect_gravity();
124 }
125
126 for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) {
127 const std::string_view pi_name = kPisToUse.at(camera_index);
128 event_loop->MakeWatcher(
129 absl::StrCat("/", pi_name, "/camera"),
130 [this, pi_name,
131 camera_index](const frc971::vision::TargetMap &targets) {
132 CHECK(targets.has_target_poses());
133 CHECK(targets.has_monotonic_timestamp_ns());
134 const std::optional<aos::monotonic_clock::duration> clock_offset =
135 utils_.ClockOffset(pi_name);
136 if (!clock_offset.has_value()) {
137 VLOG(1) << "Rejecting image due to disconnected message bridge at "
138 << event_loop_->monotonic_now();
139 cameras_.at(camera_index)
140 .rejection_counter.IncrementError(
141 RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
142 return;
143 }
144 const aos::monotonic_clock::time_point pi_capture_time(
145 std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) -
146 clock_offset.value());
147 const aos::monotonic_clock::time_point capture_time =
148 pi_capture_time - imu_watcher_.pico_offset_error();
149 VLOG(2) << "Capture time of "
150 << targets.monotonic_timestamp_ns() * 1e-9
151 << " clock offset of "
152 << aos::time::DurationInSeconds(clock_offset.value())
153 << " pico error "
154 << aos::time::DurationInSeconds(
155 imu_watcher_.pico_offset_error());
156 if (pi_capture_time > event_loop_->context().monotonic_event_time) {
157 VLOG(1) << "Rejecting image due to being from future at "
158 << event_loop_->monotonic_now() << " with timestamp of "
159 << pi_capture_time << " and event time pf "
160 << event_loop_->context().monotonic_event_time;
161 cameras_.at(camera_index)
162 .rejection_counter.IncrementError(
163 RejectionReason::IMAGE_FROM_FUTURE);
164 return;
165 }
166 auto builder = cameras_.at(camera_index).debug_sender.MakeBuilder();
167 aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, 20>
168 debug_offsets;
169 for (const frc971::vision::TargetPoseFbs *target :
170 *targets.target_poses()) {
171 VLOG(1) << "Handling target from " << camera_index;
172 auto offset = HandleTarget(camera_index, capture_time, *target,
173 builder.fbb());
174 if (debug_offsets.size() < debug_offsets.capacity()) {
175 debug_offsets.push_back(offset);
176 } else {
177 AOS_LOG(ERROR, "Dropped message from debug vector.");
178 }
179 }
180 auto vector_offset = builder.fbb()->CreateVector(
181 debug_offsets.data(), debug_offsets.size());
James Kuszmaulfb894572023-02-23 17:25:06 -0800182 auto stats_offset =
183 StatisticsForCamera(cameras_.at(camera_index), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800184 Visualization::Builder visualize_builder(*builder.fbb());
185 visualize_builder.add_targets(vector_offset);
James Kuszmaulfb894572023-02-23 17:25:06 -0800186 visualize_builder.add_statistics(stats_offset);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800187 builder.CheckOk(builder.Send(visualize_builder.Finish()));
188 SendStatus();
189 });
190 }
191
192 event_loop_->AddPhasedLoop([this](int) { SendOutput(); },
James Kuszmaul456774b2023-03-08 21:29:15 -0800193 std::chrono::milliseconds(20));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800194
195 event_loop_->MakeWatcher(
196 "/drivetrain",
197 [this](
198 const frc971::control_loops::drivetrain::LocalizerControl &control) {
199 const double theta = control.keep_current_theta()
200 ? ekf_.X_hat(StateIdx::kTheta)
201 : control.theta();
202 const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder);
203 const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder);
204 ekf_.ResetInitialState(
205 t_,
206 (HybridEkf::State() << control.x(), control.y(), theta,
207 left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0)
208 .finished(),
209 ekf_.P());
210 });
211
212 ekf_.set_ignore_accel(true);
213 // Priority should be lower than the imu reading process, but non-zero.
214 event_loop->SetRuntimeRealtimePriority(10);
215 event_loop->OnRun([this, event_loop]() {
216 ekf_.ResetInitialState(event_loop->monotonic_now(),
217 HybridEkf::State::Zero(), ekf_.P());
218 });
219}
220
221void Localizer::HandleImu(aos::monotonic_clock::time_point sample_time_pico,
222 aos::monotonic_clock::time_point sample_time_pi,
223 std::optional<Eigen::Vector2d> encoders,
224 Eigen::Vector3d gyro, Eigen::Vector3d accel) {
225 last_encoder_readings_ = encoders;
226 // Ignore ivnalid readings; the HybridEkf will handle it reasonably.
227 if (!encoders.has_value()) {
228 return;
229 }
230 if (t_ == aos::monotonic_clock::min_time) {
231 t_ = sample_time_pico;
232 }
233 if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_pico) {
234 t_ = sample_time_pico;
235 ++clock_resets_;
236 }
237 const aos::monotonic_clock::duration dt = sample_time_pico - t_;
238 t_ = sample_time_pico;
239 // We don't actually use the down estimator currently, but it's really
240 // convenient for debugging.
241 down_estimator_.Predict(gyro, accel, dt);
242 const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
243 ekf_.UpdateEncodersAndGyro(encoders.value()(0), encoders.value()(1), yaw_rate,
244 utils_.VoltageOrZero(sample_time_pi), accel, t_);
245 SendStatus();
246}
247
248flatbuffers::Offset<TargetEstimateDebug> Localizer::RejectImage(
249 int camera_index, RejectionReason reason,
250 TargetEstimateDebug::Builder *builder) {
251 builder->add_accepted(false);
252 builder->add_rejection_reason(reason);
253 cameras_.at(camera_index).rejection_counter.IncrementError(reason);
254 return builder->Finish();
255}
256
James Kuszmaul4fe845a2023-03-26 12:57:30 -0700257bool Localizer::UseAprilTag(uint64_t target_id) {
258 if (target_poses_.count(target_id) == 0) {
259 return false;
260 }
261
262 const flatbuffers::Vector<uint64_t> *ignore_tags = nullptr;
263
264 switch (utils_.Alliance()) {
265 case aos::Alliance::kRed:
Austin Schuh6bdcc372024-06-27 14:49:11 -0700266 ignore_tags = constants_fetcher_.constants().ignore_targets()->red();
267 CHECK(ignore_tags != nullptr);
James Kuszmaul4fe845a2023-03-26 12:57:30 -0700268 break;
269 case aos::Alliance::kBlue:
Austin Schuh6bdcc372024-06-27 14:49:11 -0700270 ignore_tags = constants_fetcher_.constants().ignore_targets()->blue();
271 CHECK(ignore_tags != nullptr);
James Kuszmaul4fe845a2023-03-26 12:57:30 -0700272 break;
273 case aos::Alliance::kInvalid:
274 return true;
275 }
276 return std::find(ignore_tags->begin(), ignore_tags->end(), target_id) ==
277 ignore_tags->end();
278}
279
James Kuszmaul04a343c2023-02-20 16:38:22 -0800280flatbuffers::Offset<TargetEstimateDebug> Localizer::HandleTarget(
281 int camera_index, const aos::monotonic_clock::time_point capture_time,
282 const frc971::vision::TargetPoseFbs &target,
283 flatbuffers::FlatBufferBuilder *debug_fbb) {
284 ++total_candidate_targets_;
285 ++cameras_.at(camera_index).total_candidate_targets;
286
287 TargetEstimateDebug::Builder builder(*debug_fbb);
288 builder.add_camera(camera_index);
289 builder.add_image_age_sec(aos::time::DurationInSeconds(
290 event_loop_->monotonic_now() - capture_time));
James Kuszmaule600d172023-03-12 10:19:44 -0700291 builder.add_image_monotonic_timestamp_ns(
292 std::chrono::duration_cast<std::chrono::nanoseconds>(
293 capture_time.time_since_epoch())
294 .count());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800295
296 const uint64_t target_id = target.id();
James Kuszmaule600d172023-03-12 10:19:44 -0700297 builder.add_april_tag(target_id);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800298 VLOG(2) << aos::FlatbufferToJson(&target);
James Kuszmaul4fe845a2023-03-26 12:57:30 -0700299 if (!UseAprilTag(target_id)) {
James Kuszmaul04a343c2023-02-20 16:38:22 -0800300 VLOG(1) << "Rejecting target due to invalid ID " << target_id;
301 return RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, &builder);
302 }
303
304 const Transform &H_field_target = target_poses_.at(target_id);
305 const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics;
306
307 const Transform H_camera_target = PoseToTransform(&target);
308
309 const Transform H_field_camera = H_field_target * H_camera_target.inverse();
310 // Back out the robot position that is implied by the current camera
311 // reading. Note that the Pose object ignores any roll/pitch components, so
312 // if the camera's extrinsics for pitch/roll are off, this should just
313 // ignore it.
314 const frc971::control_loops::Pose measured_camera_pose(H_field_camera);
315 builder.add_camera_x(measured_camera_pose.rel_pos().x());
316 builder.add_camera_y(measured_camera_pose.rel_pos().y());
317 // Because the camera uses Z as forwards rather than X, just calculate the
318 // debugging theta value using the transformation matrix directly.
319 builder.add_camera_theta(
320 std::atan2(H_field_camera(1, 2), H_field_camera(0, 2)));
321 // Calculate the camera-to-robot transformation matrix ignoring the
322 // pitch/roll of the camera.
323 const Transform H_camera_robot_stripped =
324 frc971::control_loops::Pose(H_robot_camera)
325 .AsTransformationMatrix()
326 .inverse();
327 const frc971::control_loops::Pose measured_pose(
328 measured_camera_pose.AsTransformationMatrix() * H_camera_robot_stripped);
329 // This "Z" is the robot pose directly implied by the camera results.
330 const Eigen::Matrix<double, 3, 1> Z(measured_pose.rel_pos().x(),
331 measured_pose.rel_pos().y(),
332 measured_pose.rel_theta());
333 builder.add_implied_robot_x(Z(Corrector::kX));
334 builder.add_implied_robot_y(Z(Corrector::kY));
335 builder.add_implied_robot_theta(Z(Corrector::kTheta));
336
milind-u5e055a02023-03-12 14:20:06 -0700337 Eigen::AngleAxisd rvec_camera_target(
338 Eigen::Affine3d(H_camera_target).rotation());
339 // Use y angle (around vertical axis) to compute skew
340 double skew = rvec_camera_target.axis().y() * rvec_camera_target.angle();
341 builder.add_skew(skew);
342
milind-u48601192023-03-11 19:44:46 -0800343 double distance_to_target =
344 Eigen::Affine3d(H_camera_target).translation().norm();
345
James Kuszmaul04a343c2023-02-20 16:38:22 -0800346 // TODO(james): Tune this. Also, gain schedule for auto mode?
347 Eigen::Matrix<double, 3, 1> noises(1.0, 1.0, 0.5);
James Kuszmaul285a7902023-03-05 18:06:36 -0800348 noises /= 4.0;
milind-ua4f44ac2023-02-26 17:03:43 -0800349 // Scale noise by the distortion factor for this detection
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700350 noises *=
351 (1.0 + absl::GetFlag(FLAGS_distortion_noise_scalar) *
352 target.distortion_factor() * std::exp(distance_to_target));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800353
354 Eigen::Matrix3d R = Eigen::Matrix3d::Zero();
355 R.diagonal() = noises.cwiseAbs2();
356 // In order to do the EKF correction, we determine the expected state based
357 // on the state at the time the image was captured; however, we insert the
358 // correction update itself at the current time. This is technically not
359 // quite correct, but saves substantial CPU usage & code complexity by
360 // making
361 // it so that we don't have to constantly rewind the entire EKF history.
362 const std::optional<State> state_at_capture =
363 ekf_.LastStateBeforeTime(capture_time);
364
365 if (!state_at_capture.has_value()) {
366 VLOG(1) << "Rejecting image due to being too old.";
367 return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD, &builder);
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700368 } else if (target.pose_error() > absl::GetFlag(FLAGS_max_pose_error)) {
milind-u7a7f6662023-02-26 16:41:29 -0800369 VLOG(1) << "Rejecting target due to high pose error "
370 << target.pose_error();
371 return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR,
372 &builder);
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700373 } else if (target.pose_error_ratio() >
374 absl::GetFlag(FLAGS_max_pose_error_ratio)) {
milind-u08fb9722023-03-25 18:23:59 -0700375 VLOG(1) << "Rejecting target due to high pose error ratio "
376 << target.pose_error_ratio();
377 return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR_RATIO,
378 &builder);
James Kuszmaul9da3b7a2023-03-11 21:02:25 -0800379 }
380
381 double theta_at_capture = state_at_capture.value()(StateIdx::kTheta);
382 double camera_implied_theta = Z(Corrector::kTheta);
383 constexpr double kDegToRad = M_PI / 180.0;
384
James Kuszmaul3c6a9682023-03-23 20:36:53 -0700385 const double robot_speed =
386 (state_at_capture.value()(StateIdx::kLeftVelocity) +
387 state_at_capture.value()(StateIdx::kRightVelocity)) /
388 2.0;
389 const double yaw_threshold =
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700390 (utils_.MaybeInAutonomous()
391 ? absl::GetFlag(FLAGS_max_implied_yaw_error)
392 : absl::GetFlag(FLAGS_max_implied_teleop_yaw_error)) *
James Kuszmaul3c6a9682023-03-23 20:36:53 -0700393 kDegToRad;
394 if (utils_.MaybeInAutonomous() &&
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700395 (std::abs(robot_speed) >
396 absl::GetFlag(FLAGS_max_auto_image_robot_speed))) {
James Kuszmaul3c6a9682023-03-23 20:36:53 -0700397 return RejectImage(camera_index, RejectionReason::ROBOT_TOO_FAST, &builder);
398 } else if (std::abs(aos::math::NormalizeAngle(
399 camera_implied_theta - theta_at_capture)) > yaw_threshold) {
milind-u48601192023-03-11 19:44:46 -0800400 return RejectImage(camera_index, RejectionReason::HIGH_IMPLIED_YAW_ERROR,
401 &builder);
Austin Schuh99f7c6a2024-06-25 22:07:44 -0700402 } else if (distance_to_target > absl::GetFlag(FLAGS_max_distance_to_target)) {
milind-u48601192023-03-11 19:44:46 -0800403 return RejectImage(camera_index, RejectionReason::HIGH_DISTANCE_TO_TARGET,
404 &builder);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800405 }
406
407 const Input U = ekf_.MostRecentInput();
408 VLOG(1) << "previous state " << ekf_.X_hat().topRows<3>().transpose();
James Kuszmaule600d172023-03-12 10:19:44 -0700409 const State prior_state = ekf_.X_hat();
James Kuszmaul04a343c2023-02-20 16:38:22 -0800410 // For the correction step, instead of passing in the measurement directly,
411 // we pass in (0, 0, 0) as the measurement and then for the expected
412 // measurement (Zhat) we calculate the error between the pose implied by
413 // the camera measurement and the current estimate of the
414 // pose. This doesn't affect any of the math, it just makes the code a bit
415 // more convenient to write given the Correct() interface we already have.
416 observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U,
417 Corrector(state_at_capture.value(), Z), R, t_);
418 ++total_accepted_targets_;
419 ++cameras_.at(camera_index).total_accepted_targets;
420 VLOG(1) << "new state " << ekf_.X_hat().topRows<3>().transpose();
James Kuszmaule600d172023-03-12 10:19:44 -0700421 builder.add_correction_x(ekf_.X_hat()(StateIdx::kX) -
422 prior_state(StateIdx::kX));
423 builder.add_correction_y(ekf_.X_hat()(StateIdx::kY) -
424 prior_state(StateIdx::kY));
425 builder.add_correction_theta(ekf_.X_hat()(StateIdx::kTheta) -
426 prior_state(StateIdx::kTheta));
James Kuszmaul71518872023-02-25 18:00:15 -0800427 builder.add_accepted(true);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800428 return builder.Finish();
429}
430
431Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
432 CHECK(Z_.allFinite());
433 Eigen::Vector3d Zhat = H_ * state_at_capture_ - Z_;
434 // Rewrap angle difference to put it back in range.
435 Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
436 VLOG(1) << "Zhat " << Zhat.transpose() << " Z_ " << Z_.transpose()
437 << " state " << (H_ * state_at_capture_).transpose();
438 return Zhat;
439}
440
441void Localizer::SendOutput() {
442 auto builder = output_sender_.MakeBuilder();
443 frc971::controls::LocalizerOutput::Builder output_builder =
444 builder.MakeBuilder<frc971::controls::LocalizerOutput>();
445 output_builder.add_monotonic_timestamp_ns(
446 std::chrono::duration_cast<std::chrono::nanoseconds>(
447 event_loop_->context().monotonic_event_time.time_since_epoch())
448 .count());
449 output_builder.add_x(ekf_.X_hat(StateIdx::kX));
450 output_builder.add_y(ekf_.X_hat(StateIdx::kY));
451 output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
452 output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
453 output_builder.add_image_accepted_count(total_accepted_targets_);
454 const Eigen::Quaterniond &orientation =
455 Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta),
456 Eigen::Vector3d::UnitZ()) *
457 down_estimator_.X_hat();
458 frc971::controls::Quaternion quaternion;
459 quaternion.mutate_x(orientation.x());
460 quaternion.mutate_y(orientation.y());
461 quaternion.mutate_z(orientation.z());
462 quaternion.mutate_w(orientation.w());
463 output_builder.add_orientation(&quaternion);
Maxwell Henderson85d81872023-04-07 18:21:59 -0700464 server_statistics_fetcher_.Fetch();
465 client_statistics_fetcher_.Fetch();
466
467 bool pis_connected = true;
468
469 if (server_statistics_fetcher_.get()) {
470 for (const auto *pi_server_status :
471 *server_statistics_fetcher_->connections()) {
472 if (pi_server_status->state() ==
473 aos::message_bridge::State::DISCONNECTED &&
474 pi_server_status->node()->name()->string_view() != "logger") {
475 pis_connected = false;
476 }
477 }
478 }
479
480 if (client_statistics_fetcher_.get()) {
481 for (const auto *pi_client_status :
482 *client_statistics_fetcher_->connections()) {
483 if (pi_client_status->state() ==
484 aos::message_bridge::State::DISCONNECTED &&
485 pi_client_status->node()->name()->string_view() != "logger") {
486 pis_connected = false;
487 }
488 }
489 }
490
491 output_builder.add_all_pis_connected(pis_connected);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800492 builder.CheckOk(builder.Send(output_builder.Finish()));
493}
494
495flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
James Kuszmaule600d172023-03-12 10:19:44 -0700496Localizer::PopulateState(const State &X_hat,
497 flatbuffers::FlatBufferBuilder *fbb) {
James Kuszmaul04a343c2023-02-20 16:38:22 -0800498 frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb);
James Kuszmaule600d172023-03-12 10:19:44 -0700499 builder.add_x(X_hat(StateIdx::kX));
500 builder.add_y(X_hat(StateIdx::kY));
501 builder.add_theta(X_hat(StateIdx::kTheta));
502 builder.add_left_velocity(X_hat(StateIdx::kLeftVelocity));
503 builder.add_right_velocity(X_hat(StateIdx::kRightVelocity));
504 builder.add_left_encoder(X_hat(StateIdx::kLeftEncoder));
505 builder.add_right_encoder(X_hat(StateIdx::kRightEncoder));
506 builder.add_left_voltage_error(X_hat(StateIdx::kLeftVoltageError));
507 builder.add_right_voltage_error(X_hat(StateIdx::kRightVoltageError));
508 builder.add_angular_error(X_hat(StateIdx::kAngularError));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800509 builder.add_longitudinal_velocity_offset(
James Kuszmaule600d172023-03-12 10:19:44 -0700510 X_hat(StateIdx::kLongitudinalVelocityOffset));
511 builder.add_lateral_velocity(X_hat(StateIdx::kLateralVelocity));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800512 return builder.Finish();
513}
514
515flatbuffers::Offset<ImuStatus> Localizer::PopulateImu(
516 flatbuffers::FlatBufferBuilder *fbb) const {
517 const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb);
518 const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb);
519 ImuStatus::Builder builder(*fbb);
520 builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
521 builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
522 builder.add_zeroing(zeroer_offset);
523 if (imu_watcher_.pico_offset().has_value()) {
524 builder.add_pico_offset_ns(imu_watcher_.pico_offset().value().count());
525 builder.add_pico_offset_error_ns(imu_watcher_.pico_offset_error().count());
526 }
527 if (last_encoder_readings_.has_value()) {
528 builder.add_left_encoder(last_encoder_readings_.value()(0));
529 builder.add_right_encoder(last_encoder_readings_.value()(1));
530 }
531 builder.add_imu_failures(failures_offset);
532 return builder.Finish();
533}
534
James Kuszmaulfb894572023-02-23 17:25:06 -0800535flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera(
536 const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) {
537 const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb);
538 CumulativeStatistics::Builder stats_builder(*fbb);
539 stats_builder.add_total_accepted(camera.total_accepted_targets);
540 stats_builder.add_total_candidates(camera.total_candidate_targets);
541 stats_builder.add_rejection_reasons(counts_offset);
542 return stats_builder.Finish();
543}
544
James Kuszmaul04a343c2023-02-20 16:38:22 -0800545void Localizer::SendStatus() {
546 auto builder = status_sender_.MakeBuilder();
547 std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
548 stats_offsets;
549 for (size_t ii = 0; ii < kNumCameras; ++ii) {
James Kuszmaulfb894572023-02-23 17:25:06 -0800550 stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800551 }
552 auto stats_offset =
553 builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
554 auto down_estimator_offset =
555 down_estimator_.PopulateStatus(builder.fbb(), t_);
556 auto imu_offset = PopulateImu(builder.fbb());
James Kuszmaule600d172023-03-12 10:19:44 -0700557 auto state_offset = PopulateState(ekf_.X_hat(), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800558 Status::Builder status_builder = builder.MakeBuilder<Status>();
559 status_builder.add_state(state_offset);
560 status_builder.add_down_estimator(down_estimator_offset);
561 status_builder.add_imu(imu_offset);
562 status_builder.add_statistics(stats_offset);
563 builder.CheckOk(builder.Send(status_builder.Finish()));
564}
565
566} // namespace y2023::localizer