blob: 4378f617d5bbf333bc313f6e59416fc85808b315 [file] [log] [blame]
James Kuszmaul04a343c2023-02-20 16:38:22 -08001#include "y2023/localizer/localizer.h"
2
3#include "aos/containers/sized_array.h"
4#include "frc971/control_loops/drivetrain/localizer_generated.h"
5#include "frc971/control_loops/pose.h"
6#include "y2023/constants.h"
James Kuszmaul18008f82023-02-23 20:52:50 -08007#include "y2023/localizer/utils.h"
James Kuszmaul04a343c2023-02-20 16:38:22 -08008
9namespace y2023::localizer {
10namespace {
11constexpr std::array<std::string_view, Localizer::kNumCameras> kPisToUse{
12 "pi1", "pi2", "pi3", "pi4"};
13
14size_t CameraIndexForName(std::string_view name) {
15 for (size_t index = 0; index < kPisToUse.size(); ++index) {
16 if (name == kPisToUse.at(index)) {
17 return index;
18 }
19 }
20 LOG(FATAL) << "No camera named " << name;
21}
22
23std::map<uint64_t, Localizer::Transform> GetTargetLocations(
24 const Constants &constants) {
25 CHECK(constants.has_target_map());
26 CHECK(constants.target_map()->has_target_poses());
27 std::map<uint64_t, Localizer::Transform> transforms;
28 for (const frc971::vision::TargetPoseFbs *target :
29 *constants.target_map()->target_poses()) {
30 CHECK(target->has_id());
31 CHECK(target->has_position());
32 CHECK(target->has_orientation());
33 CHECK_EQ(0u, transforms.count(target->id()));
34 transforms[target->id()] = PoseToTransform(target);
35 }
36 return transforms;
37}
38} // namespace
39
James Kuszmaul04a343c2023-02-20 16:38:22 -080040
41std::array<Localizer::CameraState, Localizer::kNumCameras>
42Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) {
43 CHECK(constants.has_cameras());
44 std::array<Localizer::CameraState, Localizer::kNumCameras> cameras;
45 for (const CameraConfiguration *camera : *constants.cameras()) {
46 CHECK(camera->has_calibration());
47 const frc971::vision::calibration::CameraCalibration *calibration =
48 camera->calibration();
49 CHECK(!calibration->has_turret_extrinsics())
50 << "The 2023 robot does not have a turret.";
51 CHECK(calibration->has_node_name());
52 const size_t index =
53 CameraIndexForName(calibration->node_name()->string_view());
54 // We default-construct the extrinsics matrix to all-zeros; use that to
55 // sanity-check whether we have populated the matrix yet or not.
56 CHECK(cameras.at(index).extrinsics.norm() == 0)
57 << "Got multiple calibrations for "
58 << calibration->node_name()->string_view();
59 CHECK(calibration->has_fixed_extrinsics());
60 cameras.at(index).extrinsics =
61 frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
62 *calibration->fixed_extrinsics());
63 cameras.at(index).debug_sender = event_loop->MakeSender<Visualization>(
64 absl::StrCat("/", calibration->node_name()->string_view(), "/camera"));
65 }
66 for (const CameraState &camera : cameras) {
67 CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration.";
68 }
69 return cameras;
70}
71
72Localizer::Localizer(
73 aos::EventLoop *event_loop,
74 const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config)
75 : event_loop_(event_loop),
76 dt_config_(dt_config),
77 constants_fetcher_(event_loop),
78 cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)),
79 target_poses_(GetTargetLocations(constants_fetcher_.constants())),
80 down_estimator_(dt_config),
81 ekf_(dt_config),
82 observations_(&ekf_),
83 imu_watcher_(event_loop, dt_config,
84 y2023::constants::Values::DrivetrainEncoderToMeters(1),
85 std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
86 std::placeholders::_2, std::placeholders::_3,
87 std::placeholders::_4, std::placeholders::_5)),
88 utils_(event_loop),
89 status_sender_(event_loop->MakeSender<Status>("/localizer")),
90 output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
91 "/localizer")) {
92 if (dt_config_.is_simulated) {
93 down_estimator_.assume_perfect_gravity();
94 }
95
96 for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) {
97 const std::string_view pi_name = kPisToUse.at(camera_index);
98 event_loop->MakeWatcher(
99 absl::StrCat("/", pi_name, "/camera"),
100 [this, pi_name,
101 camera_index](const frc971::vision::TargetMap &targets) {
102 CHECK(targets.has_target_poses());
103 CHECK(targets.has_monotonic_timestamp_ns());
104 const std::optional<aos::monotonic_clock::duration> clock_offset =
105 utils_.ClockOffset(pi_name);
106 if (!clock_offset.has_value()) {
107 VLOG(1) << "Rejecting image due to disconnected message bridge at "
108 << event_loop_->monotonic_now();
109 cameras_.at(camera_index)
110 .rejection_counter.IncrementError(
111 RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
112 return;
113 }
114 const aos::monotonic_clock::time_point pi_capture_time(
115 std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) -
116 clock_offset.value());
117 const aos::monotonic_clock::time_point capture_time =
118 pi_capture_time - imu_watcher_.pico_offset_error();
119 VLOG(2) << "Capture time of "
120 << targets.monotonic_timestamp_ns() * 1e-9
121 << " clock offset of "
122 << aos::time::DurationInSeconds(clock_offset.value())
123 << " pico error "
124 << aos::time::DurationInSeconds(
125 imu_watcher_.pico_offset_error());
126 if (pi_capture_time > event_loop_->context().monotonic_event_time) {
127 VLOG(1) << "Rejecting image due to being from future at "
128 << event_loop_->monotonic_now() << " with timestamp of "
129 << pi_capture_time << " and event time pf "
130 << event_loop_->context().monotonic_event_time;
131 cameras_.at(camera_index)
132 .rejection_counter.IncrementError(
133 RejectionReason::IMAGE_FROM_FUTURE);
134 return;
135 }
136 auto builder = cameras_.at(camera_index).debug_sender.MakeBuilder();
137 aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, 20>
138 debug_offsets;
139 for (const frc971::vision::TargetPoseFbs *target :
140 *targets.target_poses()) {
141 VLOG(1) << "Handling target from " << camera_index;
142 auto offset = HandleTarget(camera_index, capture_time, *target,
143 builder.fbb());
144 if (debug_offsets.size() < debug_offsets.capacity()) {
145 debug_offsets.push_back(offset);
146 } else {
147 AOS_LOG(ERROR, "Dropped message from debug vector.");
148 }
149 }
150 auto vector_offset = builder.fbb()->CreateVector(
151 debug_offsets.data(), debug_offsets.size());
James Kuszmaulfb894572023-02-23 17:25:06 -0800152 auto stats_offset =
153 StatisticsForCamera(cameras_.at(camera_index), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800154 Visualization::Builder visualize_builder(*builder.fbb());
155 visualize_builder.add_targets(vector_offset);
James Kuszmaulfb894572023-02-23 17:25:06 -0800156 visualize_builder.add_statistics(stats_offset);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800157 builder.CheckOk(builder.Send(visualize_builder.Finish()));
158 SendStatus();
159 });
160 }
161
162 event_loop_->AddPhasedLoop([this](int) { SendOutput(); },
163 std::chrono::milliseconds(5));
164
165 event_loop_->MakeWatcher(
166 "/drivetrain",
167 [this](
168 const frc971::control_loops::drivetrain::LocalizerControl &control) {
169 const double theta = control.keep_current_theta()
170 ? ekf_.X_hat(StateIdx::kTheta)
171 : control.theta();
172 const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder);
173 const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder);
174 ekf_.ResetInitialState(
175 t_,
176 (HybridEkf::State() << control.x(), control.y(), theta,
177 left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0)
178 .finished(),
179 ekf_.P());
180 });
181
182 ekf_.set_ignore_accel(true);
183 // Priority should be lower than the imu reading process, but non-zero.
184 event_loop->SetRuntimeRealtimePriority(10);
185 event_loop->OnRun([this, event_loop]() {
186 ekf_.ResetInitialState(event_loop->monotonic_now(),
187 HybridEkf::State::Zero(), ekf_.P());
188 });
189}
190
191void Localizer::HandleImu(aos::monotonic_clock::time_point sample_time_pico,
192 aos::monotonic_clock::time_point sample_time_pi,
193 std::optional<Eigen::Vector2d> encoders,
194 Eigen::Vector3d gyro, Eigen::Vector3d accel) {
195 last_encoder_readings_ = encoders;
196 // Ignore ivnalid readings; the HybridEkf will handle it reasonably.
197 if (!encoders.has_value()) {
198 return;
199 }
200 if (t_ == aos::monotonic_clock::min_time) {
201 t_ = sample_time_pico;
202 }
203 if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_pico) {
204 t_ = sample_time_pico;
205 ++clock_resets_;
206 }
207 const aos::monotonic_clock::duration dt = sample_time_pico - t_;
208 t_ = sample_time_pico;
209 // We don't actually use the down estimator currently, but it's really
210 // convenient for debugging.
211 down_estimator_.Predict(gyro, accel, dt);
212 const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
213 ekf_.UpdateEncodersAndGyro(encoders.value()(0), encoders.value()(1), yaw_rate,
214 utils_.VoltageOrZero(sample_time_pi), accel, t_);
215 SendStatus();
216}
217
218flatbuffers::Offset<TargetEstimateDebug> Localizer::RejectImage(
219 int camera_index, RejectionReason reason,
220 TargetEstimateDebug::Builder *builder) {
221 builder->add_accepted(false);
222 builder->add_rejection_reason(reason);
223 cameras_.at(camera_index).rejection_counter.IncrementError(reason);
224 return builder->Finish();
225}
226
227flatbuffers::Offset<TargetEstimateDebug> Localizer::HandleTarget(
228 int camera_index, const aos::monotonic_clock::time_point capture_time,
229 const frc971::vision::TargetPoseFbs &target,
230 flatbuffers::FlatBufferBuilder *debug_fbb) {
231 ++total_candidate_targets_;
232 ++cameras_.at(camera_index).total_candidate_targets;
233
234 TargetEstimateDebug::Builder builder(*debug_fbb);
235 builder.add_camera(camera_index);
236 builder.add_image_age_sec(aos::time::DurationInSeconds(
237 event_loop_->monotonic_now() - capture_time));
238
239 const uint64_t target_id = target.id();
240 VLOG(2) << aos::FlatbufferToJson(&target);
241 if (target_poses_.count(target_id) == 0) {
242 VLOG(1) << "Rejecting target due to invalid ID " << target_id;
243 return RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, &builder);
244 }
245
246 const Transform &H_field_target = target_poses_.at(target_id);
247 const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics;
248
249 const Transform H_camera_target = PoseToTransform(&target);
250
251 const Transform H_field_camera = H_field_target * H_camera_target.inverse();
252 // Back out the robot position that is implied by the current camera
253 // reading. Note that the Pose object ignores any roll/pitch components, so
254 // if the camera's extrinsics for pitch/roll are off, this should just
255 // ignore it.
256 const frc971::control_loops::Pose measured_camera_pose(H_field_camera);
257 builder.add_camera_x(measured_camera_pose.rel_pos().x());
258 builder.add_camera_y(measured_camera_pose.rel_pos().y());
259 // Because the camera uses Z as forwards rather than X, just calculate the
260 // debugging theta value using the transformation matrix directly.
261 builder.add_camera_theta(
262 std::atan2(H_field_camera(1, 2), H_field_camera(0, 2)));
263 // Calculate the camera-to-robot transformation matrix ignoring the
264 // pitch/roll of the camera.
265 const Transform H_camera_robot_stripped =
266 frc971::control_loops::Pose(H_robot_camera)
267 .AsTransformationMatrix()
268 .inverse();
269 const frc971::control_loops::Pose measured_pose(
270 measured_camera_pose.AsTransformationMatrix() * H_camera_robot_stripped);
271 // This "Z" is the robot pose directly implied by the camera results.
272 const Eigen::Matrix<double, 3, 1> Z(measured_pose.rel_pos().x(),
273 measured_pose.rel_pos().y(),
274 measured_pose.rel_theta());
275 builder.add_implied_robot_x(Z(Corrector::kX));
276 builder.add_implied_robot_y(Z(Corrector::kY));
277 builder.add_implied_robot_theta(Z(Corrector::kTheta));
278
279 // TODO(james): Tune this. Also, gain schedule for auto mode?
280 Eigen::Matrix<double, 3, 1> noises(1.0, 1.0, 0.5);
281
282 Eigen::Matrix3d R = Eigen::Matrix3d::Zero();
283 R.diagonal() = noises.cwiseAbs2();
284 // In order to do the EKF correction, we determine the expected state based
285 // on the state at the time the image was captured; however, we insert the
286 // correction update itself at the current time. This is technically not
287 // quite correct, but saves substantial CPU usage & code complexity by
288 // making
289 // it so that we don't have to constantly rewind the entire EKF history.
290 const std::optional<State> state_at_capture =
291 ekf_.LastStateBeforeTime(capture_time);
292
293 if (!state_at_capture.has_value()) {
294 VLOG(1) << "Rejecting image due to being too old.";
295 return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD, &builder);
296 }
297
298 const Input U = ekf_.MostRecentInput();
299 VLOG(1) << "previous state " << ekf_.X_hat().topRows<3>().transpose();
300 // For the correction step, instead of passing in the measurement directly,
301 // we pass in (0, 0, 0) as the measurement and then for the expected
302 // measurement (Zhat) we calculate the error between the pose implied by
303 // the camera measurement and the current estimate of the
304 // pose. This doesn't affect any of the math, it just makes the code a bit
305 // more convenient to write given the Correct() interface we already have.
306 observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U,
307 Corrector(state_at_capture.value(), Z), R, t_);
308 ++total_accepted_targets_;
309 ++cameras_.at(camera_index).total_accepted_targets;
310 VLOG(1) << "new state " << ekf_.X_hat().topRows<3>().transpose();
James Kuszmaul71518872023-02-25 18:00:15 -0800311 builder.add_accepted(true);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800312 return builder.Finish();
313}
314
315Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
316 CHECK(Z_.allFinite());
317 Eigen::Vector3d Zhat = H_ * state_at_capture_ - Z_;
318 // Rewrap angle difference to put it back in range.
319 Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
320 VLOG(1) << "Zhat " << Zhat.transpose() << " Z_ " << Z_.transpose()
321 << " state " << (H_ * state_at_capture_).transpose();
322 return Zhat;
323}
324
325void Localizer::SendOutput() {
326 auto builder = output_sender_.MakeBuilder();
327 frc971::controls::LocalizerOutput::Builder output_builder =
328 builder.MakeBuilder<frc971::controls::LocalizerOutput>();
329 output_builder.add_monotonic_timestamp_ns(
330 std::chrono::duration_cast<std::chrono::nanoseconds>(
331 event_loop_->context().monotonic_event_time.time_since_epoch())
332 .count());
333 output_builder.add_x(ekf_.X_hat(StateIdx::kX));
334 output_builder.add_y(ekf_.X_hat(StateIdx::kY));
335 output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
336 output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
337 output_builder.add_image_accepted_count(total_accepted_targets_);
338 const Eigen::Quaterniond &orientation =
339 Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta),
340 Eigen::Vector3d::UnitZ()) *
341 down_estimator_.X_hat();
342 frc971::controls::Quaternion quaternion;
343 quaternion.mutate_x(orientation.x());
344 quaternion.mutate_y(orientation.y());
345 quaternion.mutate_z(orientation.z());
346 quaternion.mutate_w(orientation.w());
347 output_builder.add_orientation(&quaternion);
348 builder.CheckOk(builder.Send(output_builder.Finish()));
349}
350
351flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
352Localizer::PopulateState(flatbuffers::FlatBufferBuilder *fbb) const {
353 frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb);
354 builder.add_x(ekf_.X_hat(StateIdx::kX));
355 builder.add_y(ekf_.X_hat(StateIdx::kY));
356 builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
357 builder.add_left_velocity(ekf_.X_hat(StateIdx::kLeftVelocity));
358 builder.add_right_velocity(ekf_.X_hat(StateIdx::kRightVelocity));
359 builder.add_left_encoder(ekf_.X_hat(StateIdx::kLeftEncoder));
360 builder.add_right_encoder(ekf_.X_hat(StateIdx::kRightEncoder));
361 builder.add_left_voltage_error(ekf_.X_hat(StateIdx::kLeftVoltageError));
362 builder.add_right_voltage_error(ekf_.X_hat(StateIdx::kRightVoltageError));
363 builder.add_angular_error(ekf_.X_hat(StateIdx::kAngularError));
364 builder.add_longitudinal_velocity_offset(
365 ekf_.X_hat(StateIdx::kLongitudinalVelocityOffset));
366 builder.add_lateral_velocity(ekf_.X_hat(StateIdx::kLateralVelocity));
367 return builder.Finish();
368}
369
370flatbuffers::Offset<ImuStatus> Localizer::PopulateImu(
371 flatbuffers::FlatBufferBuilder *fbb) const {
372 const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb);
373 const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb);
374 ImuStatus::Builder builder(*fbb);
375 builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
376 builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
377 builder.add_zeroing(zeroer_offset);
378 if (imu_watcher_.pico_offset().has_value()) {
379 builder.add_pico_offset_ns(imu_watcher_.pico_offset().value().count());
380 builder.add_pico_offset_error_ns(imu_watcher_.pico_offset_error().count());
381 }
382 if (last_encoder_readings_.has_value()) {
383 builder.add_left_encoder(last_encoder_readings_.value()(0));
384 builder.add_right_encoder(last_encoder_readings_.value()(1));
385 }
386 builder.add_imu_failures(failures_offset);
387 return builder.Finish();
388}
389
James Kuszmaulfb894572023-02-23 17:25:06 -0800390flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera(
391 const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) {
392 const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb);
393 CumulativeStatistics::Builder stats_builder(*fbb);
394 stats_builder.add_total_accepted(camera.total_accepted_targets);
395 stats_builder.add_total_candidates(camera.total_candidate_targets);
396 stats_builder.add_rejection_reasons(counts_offset);
397 return stats_builder.Finish();
398}
399
James Kuszmaul04a343c2023-02-20 16:38:22 -0800400void Localizer::SendStatus() {
401 auto builder = status_sender_.MakeBuilder();
402 std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
403 stats_offsets;
404 for (size_t ii = 0; ii < kNumCameras; ++ii) {
James Kuszmaulfb894572023-02-23 17:25:06 -0800405 stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800406 }
407 auto stats_offset =
408 builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
409 auto down_estimator_offset =
410 down_estimator_.PopulateStatus(builder.fbb(), t_);
411 auto imu_offset = PopulateImu(builder.fbb());
412 auto state_offset = PopulateState(builder.fbb());
413 Status::Builder status_builder = builder.MakeBuilder<Status>();
414 status_builder.add_state(state_offset);
415 status_builder.add_down_estimator(down_estimator_offset);
416 status_builder.add_imu(imu_offset);
417 status_builder.add_statistics(stats_offset);
418 builder.CheckOk(builder.Send(status_builder.Finish()));
419}
420
421} // namespace y2023::localizer