blob: e64f90aefe2653d5937797a0d89c2f94d7191e42 [file] [log] [blame]
James Kuszmaul04a343c2023-02-20 16:38:22 -08001#include "y2023/localizer/localizer.h"
2
3#include "aos/containers/sized_array.h"
4#include "frc971/control_loops/drivetrain/localizer_generated.h"
5#include "frc971/control_loops/pose.h"
milind-u7a7f6662023-02-26 16:41:29 -08006#include "gflags/gflags.h"
James Kuszmaul04a343c2023-02-20 16:38:22 -08007#include "y2023/constants.h"
James Kuszmaul18008f82023-02-23 20:52:50 -08008#include "y2023/localizer/utils.h"
James Kuszmaul04a343c2023-02-20 16:38:22 -08009
milind-u7a7f6662023-02-26 16:41:29 -080010DEFINE_double(max_pose_error, 1e-6,
11 "Throw out target poses with a higher pose error than this");
milind-ua4f44ac2023-02-26 17:03:43 -080012DEFINE_double(distortion_noise_scalar, 1.0,
13 "Scale the target pose distortion factor by this when computing "
14 "the noise.");
milind-u48601192023-03-11 19:44:46 -080015DEFINE_double(
James Kuszmaul3c6a9682023-03-23 20:36:53 -070016 max_implied_yaw_error, 3.0,
milind-u48601192023-03-11 19:44:46 -080017 "Reject target poses that imply a robot yaw of more than this many degrees "
18 "off from our estimate.");
James Kuszmaul3c6a9682023-03-23 20:36:53 -070019DEFINE_double(
20 max_implied_teleop_yaw_error, 30.0,
21 "Reject target poses that imply a robot yaw of more than this many degrees "
22 "off from our estimate.");
23DEFINE_double(max_distance_to_target, 3.5,
milind-u48601192023-03-11 19:44:46 -080024 "Reject target poses that have a 3d distance of more than this "
25 "many meters.");
James Kuszmaul3c6a9682023-03-23 20:36:53 -070026DEFINE_double(max_auto_image_robot_speed, 2.0,
27 "Reject target poses when the robot is travelling faster than "
28 "this speed in auto.");
milind-u7a7f6662023-02-26 16:41:29 -080029
James Kuszmaul04a343c2023-02-20 16:38:22 -080030namespace y2023::localizer {
31namespace {
32constexpr std::array<std::string_view, Localizer::kNumCameras> kPisToUse{
33 "pi1", "pi2", "pi3", "pi4"};
34
35size_t CameraIndexForName(std::string_view name) {
36 for (size_t index = 0; index < kPisToUse.size(); ++index) {
37 if (name == kPisToUse.at(index)) {
38 return index;
39 }
40 }
41 LOG(FATAL) << "No camera named " << name;
42}
43
44std::map<uint64_t, Localizer::Transform> GetTargetLocations(
45 const Constants &constants) {
46 CHECK(constants.has_target_map());
47 CHECK(constants.target_map()->has_target_poses());
48 std::map<uint64_t, Localizer::Transform> transforms;
49 for (const frc971::vision::TargetPoseFbs *target :
50 *constants.target_map()->target_poses()) {
51 CHECK(target->has_id());
52 CHECK(target->has_position());
53 CHECK(target->has_orientation());
54 CHECK_EQ(0u, transforms.count(target->id()));
55 transforms[target->id()] = PoseToTransform(target);
56 }
57 return transforms;
58}
59} // namespace
60
James Kuszmaul04a343c2023-02-20 16:38:22 -080061std::array<Localizer::CameraState, Localizer::kNumCameras>
62Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) {
63 CHECK(constants.has_cameras());
64 std::array<Localizer::CameraState, Localizer::kNumCameras> cameras;
65 for (const CameraConfiguration *camera : *constants.cameras()) {
66 CHECK(camera->has_calibration());
67 const frc971::vision::calibration::CameraCalibration *calibration =
68 camera->calibration();
69 CHECK(!calibration->has_turret_extrinsics())
70 << "The 2023 robot does not have a turret.";
71 CHECK(calibration->has_node_name());
72 const size_t index =
73 CameraIndexForName(calibration->node_name()->string_view());
74 // We default-construct the extrinsics matrix to all-zeros; use that to
75 // sanity-check whether we have populated the matrix yet or not.
76 CHECK(cameras.at(index).extrinsics.norm() == 0)
77 << "Got multiple calibrations for "
78 << calibration->node_name()->string_view();
79 CHECK(calibration->has_fixed_extrinsics());
80 cameras.at(index).extrinsics =
81 frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
82 *calibration->fixed_extrinsics());
83 cameras.at(index).debug_sender = event_loop->MakeSender<Visualization>(
84 absl::StrCat("/", calibration->node_name()->string_view(), "/camera"));
85 }
86 for (const CameraState &camera : cameras) {
87 CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration.";
88 }
89 return cameras;
90}
91
92Localizer::Localizer(
93 aos::EventLoop *event_loop,
94 const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config)
95 : event_loop_(event_loop),
96 dt_config_(dt_config),
97 constants_fetcher_(event_loop),
98 cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)),
99 target_poses_(GetTargetLocations(constants_fetcher_.constants())),
100 down_estimator_(dt_config),
101 ekf_(dt_config),
102 observations_(&ekf_),
103 imu_watcher_(event_loop, dt_config,
104 y2023::constants::Values::DrivetrainEncoderToMeters(1),
105 std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
106 std::placeholders::_2, std::placeholders::_3,
James Kuszmaul54fe9d02023-03-23 20:32:40 -0700107 std::placeholders::_4, std::placeholders::_5),
108 frc971::controls::ImuWatcher::TimestampSource::kPi),
James Kuszmaul04a343c2023-02-20 16:38:22 -0800109 utils_(event_loop),
110 status_sender_(event_loop->MakeSender<Status>("/localizer")),
111 output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
112 "/localizer")) {
113 if (dt_config_.is_simulated) {
114 down_estimator_.assume_perfect_gravity();
115 }
116
117 for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) {
118 const std::string_view pi_name = kPisToUse.at(camera_index);
119 event_loop->MakeWatcher(
120 absl::StrCat("/", pi_name, "/camera"),
121 [this, pi_name,
122 camera_index](const frc971::vision::TargetMap &targets) {
123 CHECK(targets.has_target_poses());
124 CHECK(targets.has_monotonic_timestamp_ns());
125 const std::optional<aos::monotonic_clock::duration> clock_offset =
126 utils_.ClockOffset(pi_name);
127 if (!clock_offset.has_value()) {
128 VLOG(1) << "Rejecting image due to disconnected message bridge at "
129 << event_loop_->monotonic_now();
130 cameras_.at(camera_index)
131 .rejection_counter.IncrementError(
132 RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
133 return;
134 }
135 const aos::monotonic_clock::time_point pi_capture_time(
136 std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) -
137 clock_offset.value());
138 const aos::monotonic_clock::time_point capture_time =
139 pi_capture_time - imu_watcher_.pico_offset_error();
140 VLOG(2) << "Capture time of "
141 << targets.monotonic_timestamp_ns() * 1e-9
142 << " clock offset of "
143 << aos::time::DurationInSeconds(clock_offset.value())
144 << " pico error "
145 << aos::time::DurationInSeconds(
146 imu_watcher_.pico_offset_error());
147 if (pi_capture_time > event_loop_->context().monotonic_event_time) {
148 VLOG(1) << "Rejecting image due to being from future at "
149 << event_loop_->monotonic_now() << " with timestamp of "
150 << pi_capture_time << " and event time pf "
151 << event_loop_->context().monotonic_event_time;
152 cameras_.at(camera_index)
153 .rejection_counter.IncrementError(
154 RejectionReason::IMAGE_FROM_FUTURE);
155 return;
156 }
157 auto builder = cameras_.at(camera_index).debug_sender.MakeBuilder();
158 aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, 20>
159 debug_offsets;
160 for (const frc971::vision::TargetPoseFbs *target :
161 *targets.target_poses()) {
162 VLOG(1) << "Handling target from " << camera_index;
163 auto offset = HandleTarget(camera_index, capture_time, *target,
164 builder.fbb());
165 if (debug_offsets.size() < debug_offsets.capacity()) {
166 debug_offsets.push_back(offset);
167 } else {
168 AOS_LOG(ERROR, "Dropped message from debug vector.");
169 }
170 }
171 auto vector_offset = builder.fbb()->CreateVector(
172 debug_offsets.data(), debug_offsets.size());
James Kuszmaulfb894572023-02-23 17:25:06 -0800173 auto stats_offset =
174 StatisticsForCamera(cameras_.at(camera_index), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800175 Visualization::Builder visualize_builder(*builder.fbb());
176 visualize_builder.add_targets(vector_offset);
James Kuszmaulfb894572023-02-23 17:25:06 -0800177 visualize_builder.add_statistics(stats_offset);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800178 builder.CheckOk(builder.Send(visualize_builder.Finish()));
179 SendStatus();
180 });
181 }
182
183 event_loop_->AddPhasedLoop([this](int) { SendOutput(); },
James Kuszmaul456774b2023-03-08 21:29:15 -0800184 std::chrono::milliseconds(20));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800185
186 event_loop_->MakeWatcher(
187 "/drivetrain",
188 [this](
189 const frc971::control_loops::drivetrain::LocalizerControl &control) {
190 const double theta = control.keep_current_theta()
191 ? ekf_.X_hat(StateIdx::kTheta)
192 : control.theta();
193 const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder);
194 const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder);
195 ekf_.ResetInitialState(
196 t_,
197 (HybridEkf::State() << control.x(), control.y(), theta,
198 left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0)
199 .finished(),
200 ekf_.P());
201 });
202
203 ekf_.set_ignore_accel(true);
204 // Priority should be lower than the imu reading process, but non-zero.
205 event_loop->SetRuntimeRealtimePriority(10);
206 event_loop->OnRun([this, event_loop]() {
207 ekf_.ResetInitialState(event_loop->monotonic_now(),
208 HybridEkf::State::Zero(), ekf_.P());
209 });
210}
211
212void Localizer::HandleImu(aos::monotonic_clock::time_point sample_time_pico,
213 aos::monotonic_clock::time_point sample_time_pi,
214 std::optional<Eigen::Vector2d> encoders,
215 Eigen::Vector3d gyro, Eigen::Vector3d accel) {
216 last_encoder_readings_ = encoders;
217 // Ignore ivnalid readings; the HybridEkf will handle it reasonably.
218 if (!encoders.has_value()) {
219 return;
220 }
221 if (t_ == aos::monotonic_clock::min_time) {
222 t_ = sample_time_pico;
223 }
224 if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_pico) {
225 t_ = sample_time_pico;
226 ++clock_resets_;
227 }
228 const aos::monotonic_clock::duration dt = sample_time_pico - t_;
229 t_ = sample_time_pico;
230 // We don't actually use the down estimator currently, but it's really
231 // convenient for debugging.
232 down_estimator_.Predict(gyro, accel, dt);
233 const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
234 ekf_.UpdateEncodersAndGyro(encoders.value()(0), encoders.value()(1), yaw_rate,
235 utils_.VoltageOrZero(sample_time_pi), accel, t_);
236 SendStatus();
237}
238
239flatbuffers::Offset<TargetEstimateDebug> Localizer::RejectImage(
240 int camera_index, RejectionReason reason,
241 TargetEstimateDebug::Builder *builder) {
242 builder->add_accepted(false);
243 builder->add_rejection_reason(reason);
244 cameras_.at(camera_index).rejection_counter.IncrementError(reason);
245 return builder->Finish();
246}
247
248flatbuffers::Offset<TargetEstimateDebug> Localizer::HandleTarget(
249 int camera_index, const aos::monotonic_clock::time_point capture_time,
250 const frc971::vision::TargetPoseFbs &target,
251 flatbuffers::FlatBufferBuilder *debug_fbb) {
252 ++total_candidate_targets_;
253 ++cameras_.at(camera_index).total_candidate_targets;
254
255 TargetEstimateDebug::Builder builder(*debug_fbb);
256 builder.add_camera(camera_index);
257 builder.add_image_age_sec(aos::time::DurationInSeconds(
258 event_loop_->monotonic_now() - capture_time));
James Kuszmaule600d172023-03-12 10:19:44 -0700259 builder.add_image_monotonic_timestamp_ns(
260 std::chrono::duration_cast<std::chrono::nanoseconds>(
261 capture_time.time_since_epoch())
262 .count());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800263
264 const uint64_t target_id = target.id();
James Kuszmaule600d172023-03-12 10:19:44 -0700265 builder.add_april_tag(target_id);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800266 VLOG(2) << aos::FlatbufferToJson(&target);
267 if (target_poses_.count(target_id) == 0) {
268 VLOG(1) << "Rejecting target due to invalid ID " << target_id;
269 return RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, &builder);
270 }
271
272 const Transform &H_field_target = target_poses_.at(target_id);
273 const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics;
274
275 const Transform H_camera_target = PoseToTransform(&target);
276
277 const Transform H_field_camera = H_field_target * H_camera_target.inverse();
278 // Back out the robot position that is implied by the current camera
279 // reading. Note that the Pose object ignores any roll/pitch components, so
280 // if the camera's extrinsics for pitch/roll are off, this should just
281 // ignore it.
282 const frc971::control_loops::Pose measured_camera_pose(H_field_camera);
283 builder.add_camera_x(measured_camera_pose.rel_pos().x());
284 builder.add_camera_y(measured_camera_pose.rel_pos().y());
285 // Because the camera uses Z as forwards rather than X, just calculate the
286 // debugging theta value using the transformation matrix directly.
287 builder.add_camera_theta(
288 std::atan2(H_field_camera(1, 2), H_field_camera(0, 2)));
289 // Calculate the camera-to-robot transformation matrix ignoring the
290 // pitch/roll of the camera.
291 const Transform H_camera_robot_stripped =
292 frc971::control_loops::Pose(H_robot_camera)
293 .AsTransformationMatrix()
294 .inverse();
295 const frc971::control_loops::Pose measured_pose(
296 measured_camera_pose.AsTransformationMatrix() * H_camera_robot_stripped);
297 // This "Z" is the robot pose directly implied by the camera results.
298 const Eigen::Matrix<double, 3, 1> Z(measured_pose.rel_pos().x(),
299 measured_pose.rel_pos().y(),
300 measured_pose.rel_theta());
301 builder.add_implied_robot_x(Z(Corrector::kX));
302 builder.add_implied_robot_y(Z(Corrector::kY));
303 builder.add_implied_robot_theta(Z(Corrector::kTheta));
304
milind-u48601192023-03-11 19:44:46 -0800305 double distance_to_target =
306 Eigen::Affine3d(H_camera_target).translation().norm();
307
James Kuszmaul04a343c2023-02-20 16:38:22 -0800308 // TODO(james): Tune this. Also, gain schedule for auto mode?
309 Eigen::Matrix<double, 3, 1> noises(1.0, 1.0, 0.5);
James Kuszmaul285a7902023-03-05 18:06:36 -0800310 noises /= 4.0;
milind-ua4f44ac2023-02-26 17:03:43 -0800311 // Scale noise by the distortion factor for this detection
milind-u48601192023-03-11 19:44:46 -0800312 noises *= (1.0 + FLAGS_distortion_noise_scalar * target.distortion_factor() *
313 std::exp(distance_to_target));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800314
315 Eigen::Matrix3d R = Eigen::Matrix3d::Zero();
316 R.diagonal() = noises.cwiseAbs2();
317 // In order to do the EKF correction, we determine the expected state based
318 // on the state at the time the image was captured; however, we insert the
319 // correction update itself at the current time. This is technically not
320 // quite correct, but saves substantial CPU usage & code complexity by
321 // making
322 // it so that we don't have to constantly rewind the entire EKF history.
323 const std::optional<State> state_at_capture =
324 ekf_.LastStateBeforeTime(capture_time);
325
326 if (!state_at_capture.has_value()) {
327 VLOG(1) << "Rejecting image due to being too old.";
328 return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD, &builder);
milind-u7a7f6662023-02-26 16:41:29 -0800329 } else if (target.pose_error() > FLAGS_max_pose_error) {
330 VLOG(1) << "Rejecting target due to high pose error "
331 << target.pose_error();
332 return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR,
333 &builder);
James Kuszmaul9da3b7a2023-03-11 21:02:25 -0800334 }
335
336 double theta_at_capture = state_at_capture.value()(StateIdx::kTheta);
337 double camera_implied_theta = Z(Corrector::kTheta);
338 constexpr double kDegToRad = M_PI / 180.0;
339
James Kuszmaul3c6a9682023-03-23 20:36:53 -0700340 const double robot_speed =
341 (state_at_capture.value()(StateIdx::kLeftVelocity) +
342 state_at_capture.value()(StateIdx::kRightVelocity)) /
343 2.0;
344 const double yaw_threshold =
345 (utils_.MaybeInAutonomous() ? FLAGS_max_implied_yaw_error
346 : FLAGS_max_implied_teleop_yaw_error) *
347 kDegToRad;
348 if (utils_.MaybeInAutonomous() &&
349 (std::abs(robot_speed) > FLAGS_max_auto_image_robot_speed)) {
350 return RejectImage(camera_index, RejectionReason::ROBOT_TOO_FAST, &builder);
351 } else if (std::abs(aos::math::NormalizeAngle(
352 camera_implied_theta - theta_at_capture)) > yaw_threshold) {
milind-u48601192023-03-11 19:44:46 -0800353 return RejectImage(camera_index, RejectionReason::HIGH_IMPLIED_YAW_ERROR,
354 &builder);
355 } else if (distance_to_target > FLAGS_max_distance_to_target) {
356 return RejectImage(camera_index, RejectionReason::HIGH_DISTANCE_TO_TARGET,
357 &builder);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800358 }
359
360 const Input U = ekf_.MostRecentInput();
361 VLOG(1) << "previous state " << ekf_.X_hat().topRows<3>().transpose();
James Kuszmaule600d172023-03-12 10:19:44 -0700362 const State prior_state = ekf_.X_hat();
James Kuszmaul04a343c2023-02-20 16:38:22 -0800363 // For the correction step, instead of passing in the measurement directly,
364 // we pass in (0, 0, 0) as the measurement and then for the expected
365 // measurement (Zhat) we calculate the error between the pose implied by
366 // the camera measurement and the current estimate of the
367 // pose. This doesn't affect any of the math, it just makes the code a bit
368 // more convenient to write given the Correct() interface we already have.
369 observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U,
370 Corrector(state_at_capture.value(), Z), R, t_);
371 ++total_accepted_targets_;
372 ++cameras_.at(camera_index).total_accepted_targets;
373 VLOG(1) << "new state " << ekf_.X_hat().topRows<3>().transpose();
James Kuszmaule600d172023-03-12 10:19:44 -0700374 builder.add_correction_x(ekf_.X_hat()(StateIdx::kX) -
375 prior_state(StateIdx::kX));
376 builder.add_correction_y(ekf_.X_hat()(StateIdx::kY) -
377 prior_state(StateIdx::kY));
378 builder.add_correction_theta(ekf_.X_hat()(StateIdx::kTheta) -
379 prior_state(StateIdx::kTheta));
James Kuszmaul71518872023-02-25 18:00:15 -0800380 builder.add_accepted(true);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800381 return builder.Finish();
382}
383
384Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
385 CHECK(Z_.allFinite());
386 Eigen::Vector3d Zhat = H_ * state_at_capture_ - Z_;
387 // Rewrap angle difference to put it back in range.
388 Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
389 VLOG(1) << "Zhat " << Zhat.transpose() << " Z_ " << Z_.transpose()
390 << " state " << (H_ * state_at_capture_).transpose();
391 return Zhat;
392}
393
394void Localizer::SendOutput() {
395 auto builder = output_sender_.MakeBuilder();
396 frc971::controls::LocalizerOutput::Builder output_builder =
397 builder.MakeBuilder<frc971::controls::LocalizerOutput>();
398 output_builder.add_monotonic_timestamp_ns(
399 std::chrono::duration_cast<std::chrono::nanoseconds>(
400 event_loop_->context().monotonic_event_time.time_since_epoch())
401 .count());
402 output_builder.add_x(ekf_.X_hat(StateIdx::kX));
403 output_builder.add_y(ekf_.X_hat(StateIdx::kY));
404 output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
405 output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
406 output_builder.add_image_accepted_count(total_accepted_targets_);
407 const Eigen::Quaterniond &orientation =
408 Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta),
409 Eigen::Vector3d::UnitZ()) *
410 down_estimator_.X_hat();
411 frc971::controls::Quaternion quaternion;
412 quaternion.mutate_x(orientation.x());
413 quaternion.mutate_y(orientation.y());
414 quaternion.mutate_z(orientation.z());
415 quaternion.mutate_w(orientation.w());
416 output_builder.add_orientation(&quaternion);
417 builder.CheckOk(builder.Send(output_builder.Finish()));
418}
419
420flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
James Kuszmaule600d172023-03-12 10:19:44 -0700421Localizer::PopulateState(const State &X_hat,
422 flatbuffers::FlatBufferBuilder *fbb) {
James Kuszmaul04a343c2023-02-20 16:38:22 -0800423 frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb);
James Kuszmaule600d172023-03-12 10:19:44 -0700424 builder.add_x(X_hat(StateIdx::kX));
425 builder.add_y(X_hat(StateIdx::kY));
426 builder.add_theta(X_hat(StateIdx::kTheta));
427 builder.add_left_velocity(X_hat(StateIdx::kLeftVelocity));
428 builder.add_right_velocity(X_hat(StateIdx::kRightVelocity));
429 builder.add_left_encoder(X_hat(StateIdx::kLeftEncoder));
430 builder.add_right_encoder(X_hat(StateIdx::kRightEncoder));
431 builder.add_left_voltage_error(X_hat(StateIdx::kLeftVoltageError));
432 builder.add_right_voltage_error(X_hat(StateIdx::kRightVoltageError));
433 builder.add_angular_error(X_hat(StateIdx::kAngularError));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800434 builder.add_longitudinal_velocity_offset(
James Kuszmaule600d172023-03-12 10:19:44 -0700435 X_hat(StateIdx::kLongitudinalVelocityOffset));
436 builder.add_lateral_velocity(X_hat(StateIdx::kLateralVelocity));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800437 return builder.Finish();
438}
439
440flatbuffers::Offset<ImuStatus> Localizer::PopulateImu(
441 flatbuffers::FlatBufferBuilder *fbb) const {
442 const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb);
443 const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb);
444 ImuStatus::Builder builder(*fbb);
445 builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
446 builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
447 builder.add_zeroing(zeroer_offset);
448 if (imu_watcher_.pico_offset().has_value()) {
449 builder.add_pico_offset_ns(imu_watcher_.pico_offset().value().count());
450 builder.add_pico_offset_error_ns(imu_watcher_.pico_offset_error().count());
451 }
452 if (last_encoder_readings_.has_value()) {
453 builder.add_left_encoder(last_encoder_readings_.value()(0));
454 builder.add_right_encoder(last_encoder_readings_.value()(1));
455 }
456 builder.add_imu_failures(failures_offset);
457 return builder.Finish();
458}
459
James Kuszmaulfb894572023-02-23 17:25:06 -0800460flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera(
461 const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) {
462 const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb);
463 CumulativeStatistics::Builder stats_builder(*fbb);
464 stats_builder.add_total_accepted(camera.total_accepted_targets);
465 stats_builder.add_total_candidates(camera.total_candidate_targets);
466 stats_builder.add_rejection_reasons(counts_offset);
467 return stats_builder.Finish();
468}
469
James Kuszmaul04a343c2023-02-20 16:38:22 -0800470void Localizer::SendStatus() {
471 auto builder = status_sender_.MakeBuilder();
472 std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
473 stats_offsets;
474 for (size_t ii = 0; ii < kNumCameras; ++ii) {
James Kuszmaulfb894572023-02-23 17:25:06 -0800475 stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800476 }
477 auto stats_offset =
478 builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
479 auto down_estimator_offset =
480 down_estimator_.PopulateStatus(builder.fbb(), t_);
481 auto imu_offset = PopulateImu(builder.fbb());
James Kuszmaule600d172023-03-12 10:19:44 -0700482 auto state_offset = PopulateState(ekf_.X_hat(), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800483 Status::Builder status_builder = builder.MakeBuilder<Status>();
484 status_builder.add_state(state_offset);
485 status_builder.add_down_estimator(down_estimator_offset);
486 status_builder.add_imu(imu_offset);
487 status_builder.add_statistics(stats_offset);
488 builder.CheckOk(builder.Send(status_builder.Finish()));
489}
490
491} // namespace y2023::localizer