blob: d7ca4f9ca47e496d2de8170b64953244d7a4a4c7 [file] [log] [blame]
James Kuszmaul04a343c2023-02-20 16:38:22 -08001#include "y2023/localizer/localizer.h"
2
3#include "aos/containers/sized_array.h"
4#include "frc971/control_loops/drivetrain/localizer_generated.h"
5#include "frc971/control_loops/pose.h"
milind-u7a7f6662023-02-26 16:41:29 -08006#include "gflags/gflags.h"
James Kuszmaul04a343c2023-02-20 16:38:22 -08007#include "y2023/constants.h"
James Kuszmaul18008f82023-02-23 20:52:50 -08008#include "y2023/localizer/utils.h"
James Kuszmaul04a343c2023-02-20 16:38:22 -08009
milind-u7a7f6662023-02-26 16:41:29 -080010DEFINE_double(max_pose_error, 1e-6,
11 "Throw out target poses with a higher pose error than this");
milind-ua4f44ac2023-02-26 17:03:43 -080012DEFINE_double(distortion_noise_scalar, 1.0,
13 "Scale the target pose distortion factor by this when computing "
14 "the noise.");
milind-ub45c6d92023-03-11 18:56:15 -080015DEFINE_double(max_implied_yaw_error, 30.0,
16 "Reject target poses that imply a robot yaw of this many degrees "
17 "off from our estimate.");
milind-u7a7f6662023-02-26 16:41:29 -080018
James Kuszmaul04a343c2023-02-20 16:38:22 -080019namespace y2023::localizer {
20namespace {
21constexpr std::array<std::string_view, Localizer::kNumCameras> kPisToUse{
22 "pi1", "pi2", "pi3", "pi4"};
23
24size_t CameraIndexForName(std::string_view name) {
25 for (size_t index = 0; index < kPisToUse.size(); ++index) {
26 if (name == kPisToUse.at(index)) {
27 return index;
28 }
29 }
30 LOG(FATAL) << "No camera named " << name;
31}
32
33std::map<uint64_t, Localizer::Transform> GetTargetLocations(
34 const Constants &constants) {
35 CHECK(constants.has_target_map());
36 CHECK(constants.target_map()->has_target_poses());
37 std::map<uint64_t, Localizer::Transform> transforms;
38 for (const frc971::vision::TargetPoseFbs *target :
39 *constants.target_map()->target_poses()) {
40 CHECK(target->has_id());
41 CHECK(target->has_position());
42 CHECK(target->has_orientation());
43 CHECK_EQ(0u, transforms.count(target->id()));
44 transforms[target->id()] = PoseToTransform(target);
45 }
46 return transforms;
47}
48} // namespace
49
James Kuszmaul04a343c2023-02-20 16:38:22 -080050std::array<Localizer::CameraState, Localizer::kNumCameras>
51Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) {
52 CHECK(constants.has_cameras());
53 std::array<Localizer::CameraState, Localizer::kNumCameras> cameras;
54 for (const CameraConfiguration *camera : *constants.cameras()) {
55 CHECK(camera->has_calibration());
56 const frc971::vision::calibration::CameraCalibration *calibration =
57 camera->calibration();
58 CHECK(!calibration->has_turret_extrinsics())
59 << "The 2023 robot does not have a turret.";
60 CHECK(calibration->has_node_name());
61 const size_t index =
62 CameraIndexForName(calibration->node_name()->string_view());
63 // We default-construct the extrinsics matrix to all-zeros; use that to
64 // sanity-check whether we have populated the matrix yet or not.
65 CHECK(cameras.at(index).extrinsics.norm() == 0)
66 << "Got multiple calibrations for "
67 << calibration->node_name()->string_view();
68 CHECK(calibration->has_fixed_extrinsics());
69 cameras.at(index).extrinsics =
70 frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
71 *calibration->fixed_extrinsics());
72 cameras.at(index).debug_sender = event_loop->MakeSender<Visualization>(
73 absl::StrCat("/", calibration->node_name()->string_view(), "/camera"));
74 }
75 for (const CameraState &camera : cameras) {
76 CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration.";
77 }
78 return cameras;
79}
80
81Localizer::Localizer(
82 aos::EventLoop *event_loop,
83 const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config)
84 : event_loop_(event_loop),
85 dt_config_(dt_config),
86 constants_fetcher_(event_loop),
87 cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)),
88 target_poses_(GetTargetLocations(constants_fetcher_.constants())),
89 down_estimator_(dt_config),
90 ekf_(dt_config),
91 observations_(&ekf_),
92 imu_watcher_(event_loop, dt_config,
93 y2023::constants::Values::DrivetrainEncoderToMeters(1),
94 std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
95 std::placeholders::_2, std::placeholders::_3,
96 std::placeholders::_4, std::placeholders::_5)),
97 utils_(event_loop),
98 status_sender_(event_loop->MakeSender<Status>("/localizer")),
99 output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
100 "/localizer")) {
101 if (dt_config_.is_simulated) {
102 down_estimator_.assume_perfect_gravity();
103 }
104
105 for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) {
106 const std::string_view pi_name = kPisToUse.at(camera_index);
107 event_loop->MakeWatcher(
108 absl::StrCat("/", pi_name, "/camera"),
109 [this, pi_name,
110 camera_index](const frc971::vision::TargetMap &targets) {
111 CHECK(targets.has_target_poses());
112 CHECK(targets.has_monotonic_timestamp_ns());
113 const std::optional<aos::monotonic_clock::duration> clock_offset =
114 utils_.ClockOffset(pi_name);
115 if (!clock_offset.has_value()) {
116 VLOG(1) << "Rejecting image due to disconnected message bridge at "
117 << event_loop_->monotonic_now();
118 cameras_.at(camera_index)
119 .rejection_counter.IncrementError(
120 RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
121 return;
122 }
123 const aos::monotonic_clock::time_point pi_capture_time(
124 std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) -
125 clock_offset.value());
126 const aos::monotonic_clock::time_point capture_time =
127 pi_capture_time - imu_watcher_.pico_offset_error();
128 VLOG(2) << "Capture time of "
129 << targets.monotonic_timestamp_ns() * 1e-9
130 << " clock offset of "
131 << aos::time::DurationInSeconds(clock_offset.value())
132 << " pico error "
133 << aos::time::DurationInSeconds(
134 imu_watcher_.pico_offset_error());
135 if (pi_capture_time > event_loop_->context().monotonic_event_time) {
136 VLOG(1) << "Rejecting image due to being from future at "
137 << event_loop_->monotonic_now() << " with timestamp of "
138 << pi_capture_time << " and event time pf "
139 << event_loop_->context().monotonic_event_time;
140 cameras_.at(camera_index)
141 .rejection_counter.IncrementError(
142 RejectionReason::IMAGE_FROM_FUTURE);
143 return;
144 }
145 auto builder = cameras_.at(camera_index).debug_sender.MakeBuilder();
146 aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, 20>
147 debug_offsets;
148 for (const frc971::vision::TargetPoseFbs *target :
149 *targets.target_poses()) {
150 VLOG(1) << "Handling target from " << camera_index;
151 auto offset = HandleTarget(camera_index, capture_time, *target,
152 builder.fbb());
153 if (debug_offsets.size() < debug_offsets.capacity()) {
154 debug_offsets.push_back(offset);
155 } else {
156 AOS_LOG(ERROR, "Dropped message from debug vector.");
157 }
158 }
159 auto vector_offset = builder.fbb()->CreateVector(
160 debug_offsets.data(), debug_offsets.size());
James Kuszmaulfb894572023-02-23 17:25:06 -0800161 auto stats_offset =
162 StatisticsForCamera(cameras_.at(camera_index), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800163 Visualization::Builder visualize_builder(*builder.fbb());
164 visualize_builder.add_targets(vector_offset);
James Kuszmaulfb894572023-02-23 17:25:06 -0800165 visualize_builder.add_statistics(stats_offset);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800166 builder.CheckOk(builder.Send(visualize_builder.Finish()));
167 SendStatus();
168 });
169 }
170
171 event_loop_->AddPhasedLoop([this](int) { SendOutput(); },
James Kuszmaul456774b2023-03-08 21:29:15 -0800172 std::chrono::milliseconds(20));
James Kuszmaul04a343c2023-02-20 16:38:22 -0800173
174 event_loop_->MakeWatcher(
175 "/drivetrain",
176 [this](
177 const frc971::control_loops::drivetrain::LocalizerControl &control) {
178 const double theta = control.keep_current_theta()
179 ? ekf_.X_hat(StateIdx::kTheta)
180 : control.theta();
181 const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder);
182 const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder);
183 ekf_.ResetInitialState(
184 t_,
185 (HybridEkf::State() << control.x(), control.y(), theta,
186 left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0)
187 .finished(),
188 ekf_.P());
189 });
190
191 ekf_.set_ignore_accel(true);
192 // Priority should be lower than the imu reading process, but non-zero.
193 event_loop->SetRuntimeRealtimePriority(10);
194 event_loop->OnRun([this, event_loop]() {
195 ekf_.ResetInitialState(event_loop->monotonic_now(),
196 HybridEkf::State::Zero(), ekf_.P());
197 });
198}
199
200void Localizer::HandleImu(aos::monotonic_clock::time_point sample_time_pico,
201 aos::monotonic_clock::time_point sample_time_pi,
202 std::optional<Eigen::Vector2d> encoders,
203 Eigen::Vector3d gyro, Eigen::Vector3d accel) {
204 last_encoder_readings_ = encoders;
205 // Ignore ivnalid readings; the HybridEkf will handle it reasonably.
206 if (!encoders.has_value()) {
207 return;
208 }
209 if (t_ == aos::monotonic_clock::min_time) {
210 t_ = sample_time_pico;
211 }
212 if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_pico) {
213 t_ = sample_time_pico;
214 ++clock_resets_;
215 }
216 const aos::monotonic_clock::duration dt = sample_time_pico - t_;
217 t_ = sample_time_pico;
218 // We don't actually use the down estimator currently, but it's really
219 // convenient for debugging.
220 down_estimator_.Predict(gyro, accel, dt);
221 const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
222 ekf_.UpdateEncodersAndGyro(encoders.value()(0), encoders.value()(1), yaw_rate,
223 utils_.VoltageOrZero(sample_time_pi), accel, t_);
224 SendStatus();
225}
226
227flatbuffers::Offset<TargetEstimateDebug> Localizer::RejectImage(
228 int camera_index, RejectionReason reason,
229 TargetEstimateDebug::Builder *builder) {
230 builder->add_accepted(false);
231 builder->add_rejection_reason(reason);
232 cameras_.at(camera_index).rejection_counter.IncrementError(reason);
233 return builder->Finish();
234}
235
236flatbuffers::Offset<TargetEstimateDebug> Localizer::HandleTarget(
237 int camera_index, const aos::monotonic_clock::time_point capture_time,
238 const frc971::vision::TargetPoseFbs &target,
239 flatbuffers::FlatBufferBuilder *debug_fbb) {
240 ++total_candidate_targets_;
241 ++cameras_.at(camera_index).total_candidate_targets;
242
243 TargetEstimateDebug::Builder builder(*debug_fbb);
244 builder.add_camera(camera_index);
245 builder.add_image_age_sec(aos::time::DurationInSeconds(
246 event_loop_->monotonic_now() - capture_time));
247
248 const uint64_t target_id = target.id();
249 VLOG(2) << aos::FlatbufferToJson(&target);
250 if (target_poses_.count(target_id) == 0) {
251 VLOG(1) << "Rejecting target due to invalid ID " << target_id;
252 return RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, &builder);
253 }
254
255 const Transform &H_field_target = target_poses_.at(target_id);
256 const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics;
257
258 const Transform H_camera_target = PoseToTransform(&target);
259
260 const Transform H_field_camera = H_field_target * H_camera_target.inverse();
261 // Back out the robot position that is implied by the current camera
262 // reading. Note that the Pose object ignores any roll/pitch components, so
263 // if the camera's extrinsics for pitch/roll are off, this should just
264 // ignore it.
265 const frc971::control_loops::Pose measured_camera_pose(H_field_camera);
266 builder.add_camera_x(measured_camera_pose.rel_pos().x());
267 builder.add_camera_y(measured_camera_pose.rel_pos().y());
268 // Because the camera uses Z as forwards rather than X, just calculate the
269 // debugging theta value using the transformation matrix directly.
270 builder.add_camera_theta(
271 std::atan2(H_field_camera(1, 2), H_field_camera(0, 2)));
272 // Calculate the camera-to-robot transformation matrix ignoring the
273 // pitch/roll of the camera.
274 const Transform H_camera_robot_stripped =
275 frc971::control_loops::Pose(H_robot_camera)
276 .AsTransformationMatrix()
277 .inverse();
278 const frc971::control_loops::Pose measured_pose(
279 measured_camera_pose.AsTransformationMatrix() * H_camera_robot_stripped);
280 // This "Z" is the robot pose directly implied by the camera results.
281 const Eigen::Matrix<double, 3, 1> Z(measured_pose.rel_pos().x(),
282 measured_pose.rel_pos().y(),
283 measured_pose.rel_theta());
284 builder.add_implied_robot_x(Z(Corrector::kX));
285 builder.add_implied_robot_y(Z(Corrector::kY));
286 builder.add_implied_robot_theta(Z(Corrector::kTheta));
287
288 // TODO(james): Tune this. Also, gain schedule for auto mode?
289 Eigen::Matrix<double, 3, 1> noises(1.0, 1.0, 0.5);
James Kuszmaul285a7902023-03-05 18:06:36 -0800290 noises /= 4.0;
milind-ua4f44ac2023-02-26 17:03:43 -0800291 // Scale noise by the distortion factor for this detection
292 noises *= (1.0 + FLAGS_distortion_noise_scalar * target.distortion_factor());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800293
294 Eigen::Matrix3d R = Eigen::Matrix3d::Zero();
295 R.diagonal() = noises.cwiseAbs2();
296 // In order to do the EKF correction, we determine the expected state based
297 // on the state at the time the image was captured; however, we insert the
298 // correction update itself at the current time. This is technically not
299 // quite correct, but saves substantial CPU usage & code complexity by
300 // making
301 // it so that we don't have to constantly rewind the entire EKF history.
302 const std::optional<State> state_at_capture =
303 ekf_.LastStateBeforeTime(capture_time);
304
305 if (!state_at_capture.has_value()) {
306 VLOG(1) << "Rejecting image due to being too old.";
307 return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD, &builder);
milind-u7a7f6662023-02-26 16:41:29 -0800308 } else if (target.pose_error() > FLAGS_max_pose_error) {
309 VLOG(1) << "Rejecting target due to high pose error "
310 << target.pose_error();
311 return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR,
312 &builder);
milind-ub45c6d92023-03-11 18:56:15 -0800313 } else {
314 double theta_at_capture = state_at_capture.value()(StateIdx::kTheta);
315 double camera_implied_theta = Z(Corrector::kTheta);
316 constexpr double kDegToRad = M_PI / 180.0;
317 if (std::abs(camera_implied_theta - theta_at_capture) >
318 FLAGS_max_implied_yaw_error * kDegToRad) {
319 return RejectImage(camera_index, RejectionReason::HIGH_IMPLIED_YAW_ERROR,
320 &builder);
321 }
James Kuszmaul04a343c2023-02-20 16:38:22 -0800322 }
323
324 const Input U = ekf_.MostRecentInput();
325 VLOG(1) << "previous state " << ekf_.X_hat().topRows<3>().transpose();
326 // For the correction step, instead of passing in the measurement directly,
327 // we pass in (0, 0, 0) as the measurement and then for the expected
328 // measurement (Zhat) we calculate the error between the pose implied by
329 // the camera measurement and the current estimate of the
330 // pose. This doesn't affect any of the math, it just makes the code a bit
331 // more convenient to write given the Correct() interface we already have.
332 observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U,
333 Corrector(state_at_capture.value(), Z), R, t_);
334 ++total_accepted_targets_;
335 ++cameras_.at(camera_index).total_accepted_targets;
336 VLOG(1) << "new state " << ekf_.X_hat().topRows<3>().transpose();
James Kuszmaul71518872023-02-25 18:00:15 -0800337 builder.add_accepted(true);
James Kuszmaul04a343c2023-02-20 16:38:22 -0800338 return builder.Finish();
339}
340
341Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
342 CHECK(Z_.allFinite());
343 Eigen::Vector3d Zhat = H_ * state_at_capture_ - Z_;
344 // Rewrap angle difference to put it back in range.
345 Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
346 VLOG(1) << "Zhat " << Zhat.transpose() << " Z_ " << Z_.transpose()
347 << " state " << (H_ * state_at_capture_).transpose();
348 return Zhat;
349}
350
351void Localizer::SendOutput() {
352 auto builder = output_sender_.MakeBuilder();
353 frc971::controls::LocalizerOutput::Builder output_builder =
354 builder.MakeBuilder<frc971::controls::LocalizerOutput>();
355 output_builder.add_monotonic_timestamp_ns(
356 std::chrono::duration_cast<std::chrono::nanoseconds>(
357 event_loop_->context().monotonic_event_time.time_since_epoch())
358 .count());
359 output_builder.add_x(ekf_.X_hat(StateIdx::kX));
360 output_builder.add_y(ekf_.X_hat(StateIdx::kY));
361 output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
362 output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
363 output_builder.add_image_accepted_count(total_accepted_targets_);
364 const Eigen::Quaterniond &orientation =
365 Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta),
366 Eigen::Vector3d::UnitZ()) *
367 down_estimator_.X_hat();
368 frc971::controls::Quaternion quaternion;
369 quaternion.mutate_x(orientation.x());
370 quaternion.mutate_y(orientation.y());
371 quaternion.mutate_z(orientation.z());
372 quaternion.mutate_w(orientation.w());
373 output_builder.add_orientation(&quaternion);
374 builder.CheckOk(builder.Send(output_builder.Finish()));
375}
376
377flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
378Localizer::PopulateState(flatbuffers::FlatBufferBuilder *fbb) const {
379 frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb);
380 builder.add_x(ekf_.X_hat(StateIdx::kX));
381 builder.add_y(ekf_.X_hat(StateIdx::kY));
382 builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
383 builder.add_left_velocity(ekf_.X_hat(StateIdx::kLeftVelocity));
384 builder.add_right_velocity(ekf_.X_hat(StateIdx::kRightVelocity));
385 builder.add_left_encoder(ekf_.X_hat(StateIdx::kLeftEncoder));
386 builder.add_right_encoder(ekf_.X_hat(StateIdx::kRightEncoder));
387 builder.add_left_voltage_error(ekf_.X_hat(StateIdx::kLeftVoltageError));
388 builder.add_right_voltage_error(ekf_.X_hat(StateIdx::kRightVoltageError));
389 builder.add_angular_error(ekf_.X_hat(StateIdx::kAngularError));
390 builder.add_longitudinal_velocity_offset(
391 ekf_.X_hat(StateIdx::kLongitudinalVelocityOffset));
392 builder.add_lateral_velocity(ekf_.X_hat(StateIdx::kLateralVelocity));
393 return builder.Finish();
394}
395
396flatbuffers::Offset<ImuStatus> Localizer::PopulateImu(
397 flatbuffers::FlatBufferBuilder *fbb) const {
398 const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb);
399 const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb);
400 ImuStatus::Builder builder(*fbb);
401 builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
402 builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
403 builder.add_zeroing(zeroer_offset);
404 if (imu_watcher_.pico_offset().has_value()) {
405 builder.add_pico_offset_ns(imu_watcher_.pico_offset().value().count());
406 builder.add_pico_offset_error_ns(imu_watcher_.pico_offset_error().count());
407 }
408 if (last_encoder_readings_.has_value()) {
409 builder.add_left_encoder(last_encoder_readings_.value()(0));
410 builder.add_right_encoder(last_encoder_readings_.value()(1));
411 }
412 builder.add_imu_failures(failures_offset);
413 return builder.Finish();
414}
415
James Kuszmaulfb894572023-02-23 17:25:06 -0800416flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera(
417 const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) {
418 const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb);
419 CumulativeStatistics::Builder stats_builder(*fbb);
420 stats_builder.add_total_accepted(camera.total_accepted_targets);
421 stats_builder.add_total_candidates(camera.total_candidate_targets);
422 stats_builder.add_rejection_reasons(counts_offset);
423 return stats_builder.Finish();
424}
425
James Kuszmaul04a343c2023-02-20 16:38:22 -0800426void Localizer::SendStatus() {
427 auto builder = status_sender_.MakeBuilder();
428 std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
429 stats_offsets;
430 for (size_t ii = 0; ii < kNumCameras; ++ii) {
James Kuszmaulfb894572023-02-23 17:25:06 -0800431 stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb());
James Kuszmaul04a343c2023-02-20 16:38:22 -0800432 }
433 auto stats_offset =
434 builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
435 auto down_estimator_offset =
436 down_estimator_.PopulateStatus(builder.fbb(), t_);
437 auto imu_offset = PopulateImu(builder.fbb());
438 auto state_offset = PopulateState(builder.fbb());
439 Status::Builder status_builder = builder.MakeBuilder<Status>();
440 status_builder.add_state(state_offset);
441 status_builder.add_down_estimator(down_estimator_offset);
442 status_builder.add_imu(imu_offset);
443 status_builder.add_statistics(stats_offset);
444 builder.CheckOk(builder.Send(status_builder.Finish()));
445}
446
447} // namespace y2023::localizer