blob: e75c014584a823f5ea0d4a0b4652fcfc6d93349b [file] [log] [blame]
James Kuszmaul51fa1ae2022-02-26 00:49:57 -08001#include "y2022/localizer/localizer.h"
James Kuszmaul29c59522022-02-12 16:44:26 -08002
James Kuszmaulf6b69112022-03-12 21:34:39 -08003#include "aos/json_to_flatbuffer.h"
James Kuszmaul29c59522022-02-12 16:44:26 -08004#include "frc971/control_loops/c2d.h"
5#include "frc971/wpilib/imu_batch_generated.h"
James Kuszmaul5ed29dd2022-02-13 18:32:06 -08006#include "y2022/constants.h"
James Kuszmaul29c59522022-02-12 16:44:26 -08007
James Kuszmaulce491e42022-03-12 21:02:10 -08008DEFINE_bool(ignore_accelerometer, false,
9 "If set, ignores the accelerometer readings.");
10
James Kuszmaul29c59522022-02-12 16:44:26 -080011namespace frc971::controls {
12
13namespace {
14constexpr double kG = 9.80665;
15constexpr std::chrono::microseconds kNominalDt(500);
16
James Kuszmaul8c4f6592022-02-26 15:49:30 -080017// Field position of the target (the 2022 target is conveniently in the middle
18// of the field....).
19constexpr double kVisionTargetX = 0.0;
20constexpr double kVisionTargetY = 0.0;
21
James Kuszmaulaa39d962022-03-06 14:54:28 -080022// Minimum confidence to require to use a target match.
James Kuszmaul1b918d82022-03-12 18:27:41 -080023constexpr double kMinTargetEstimateConfidence = 0.75;
James Kuszmaulaa39d962022-03-06 14:54:28 -080024
James Kuszmaul29c59522022-02-12 16:44:26 -080025template <int N>
26Eigen::Matrix<double, N, 1> MakeState(std::vector<double> values) {
27 CHECK_EQ(static_cast<size_t>(N), values.size());
28 Eigen::Matrix<double, N, 1> vector;
29 for (int ii = 0; ii < N; ++ii) {
30 vector(ii, 0) = values[ii];
31 }
32 return vector;
33}
James Kuszmaul29c59522022-02-12 16:44:26 -080034} // namespace
35
36ModelBasedLocalizer::ModelBasedLocalizer(
37 const control_loops::drivetrain::DrivetrainConfig<double> &dt_config)
38 : dt_config_(dt_config),
39 velocity_drivetrain_coefficients_(
40 dt_config.make_hybrid_drivetrain_velocity_loop()
41 .plant()
42 .coefficients()),
43 down_estimator_(dt_config) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -080044 statistics_.rejection_counts.fill(0);
James Kuszmaulf6b69112022-03-12 21:34:39 -080045 CHECK_EQ(branches_.capacity(),
46 static_cast<size_t>(std::chrono::seconds(1) / kNominalDt /
47 kBranchPeriod));
James Kuszmaul29c59522022-02-12 16:44:26 -080048 if (dt_config_.is_simulated) {
49 down_estimator_.assume_perfect_gravity();
50 }
51 A_continuous_accel_.setZero();
52 A_continuous_model_.setZero();
53 B_continuous_accel_.setZero();
54 B_continuous_model_.setZero();
55
56 A_continuous_accel_(kX, kVelocityX) = 1.0;
57 A_continuous_accel_(kY, kVelocityY) = 1.0;
58
59 const double diameter = 2.0 * dt_config_.robot_radius;
60
61 A_continuous_model_(kTheta, kLeftVelocity) = -1.0 / diameter;
62 A_continuous_model_(kTheta, kRightVelocity) = 1.0 / diameter;
63 A_continuous_model_(kLeftEncoder, kLeftVelocity) = 1.0;
64 A_continuous_model_(kRightEncoder, kRightVelocity) = 1.0;
65 const auto &vel_coefs = velocity_drivetrain_coefficients_;
66 A_continuous_model_(kLeftVelocity, kLeftVelocity) =
67 vel_coefs.A_continuous(0, 0);
68 A_continuous_model_(kLeftVelocity, kRightVelocity) =
69 vel_coefs.A_continuous(0, 1);
70 A_continuous_model_(kRightVelocity, kLeftVelocity) =
71 vel_coefs.A_continuous(1, 0);
72 A_continuous_model_(kRightVelocity, kRightVelocity) =
73 vel_coefs.A_continuous(1, 1);
74
75 A_continuous_model_(kLeftVelocity, kLeftVoltageError) =
76 1 * vel_coefs.B_continuous(0, 0);
77 A_continuous_model_(kLeftVelocity, kRightVoltageError) =
78 1 * vel_coefs.B_continuous(0, 1);
79 A_continuous_model_(kRightVelocity, kLeftVoltageError) =
80 1 * vel_coefs.B_continuous(1, 0);
81 A_continuous_model_(kRightVelocity, kRightVoltageError) =
82 1 * vel_coefs.B_continuous(1, 1);
83
84 B_continuous_model_.block<1, 2>(kLeftVelocity, kLeftVoltage) =
85 vel_coefs.B_continuous.row(0);
86 B_continuous_model_.block<1, 2>(kRightVelocity, kLeftVoltage) =
87 vel_coefs.B_continuous.row(1);
88
89 B_continuous_accel_(kVelocityX, kAccelX) = 1.0;
90 B_continuous_accel_(kVelocityY, kAccelY) = 1.0;
91 B_continuous_accel_(kTheta, kThetaRate) = 1.0;
92
93 Q_continuous_model_.setZero();
James Kuszmaul8c4f6592022-02-26 15:49:30 -080094 Q_continuous_model_.diagonal() << 1e-2, 1e-2, 1e-8, 1e-2, 1e-0, 1e-0, 1e-2,
James Kuszmaul29c59522022-02-12 16:44:26 -080095 1e-0, 1e-0;
96
James Kuszmaul8c4f6592022-02-26 15:49:30 -080097 Q_continuous_accel_.setZero();
98 Q_continuous_accel_.diagonal() << 1e-2, 1e-2, 1e-20, 1e-4, 1e-4;
99
James Kuszmaul29c59522022-02-12 16:44:26 -0800100 P_model_ = Q_continuous_model_ * aos::time::DurationInSeconds(kNominalDt);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800101
102 // We can precalculate the discretizations of the accel model because it is
103 // actually LTI.
104
105 DiscretizeQAFast(Q_continuous_accel_, A_continuous_accel_, kNominalDt,
106 &Q_discrete_accel_, &A_discrete_accel_);
107 P_accel_ = Q_discrete_accel_;
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -0700108
109 led_outputs_.fill(LedOutput::ON);
James Kuszmaul29c59522022-02-12 16:44:26 -0800110}
111
112Eigen::Matrix<double, ModelBasedLocalizer::kNModelStates,
113 ModelBasedLocalizer::kNModelStates>
114ModelBasedLocalizer::AModel(
115 const ModelBasedLocalizer::ModelState &state) const {
116 Eigen::Matrix<double, kNModelStates, kNModelStates> A = A_continuous_model_;
117 const double theta = state(kTheta);
118 const double stheta = std::sin(theta);
119 const double ctheta = std::cos(theta);
120 const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0;
121 A(kX, kTheta) = -stheta * velocity;
122 A(kX, kLeftVelocity) = ctheta / 2.0;
123 A(kX, kRightVelocity) = ctheta / 2.0;
124 A(kY, kTheta) = ctheta * velocity;
125 A(kY, kLeftVelocity) = stheta / 2.0;
126 A(kY, kRightVelocity) = stheta / 2.0;
127 return A;
128}
129
130Eigen::Matrix<double, ModelBasedLocalizer::kNAccelStates,
131 ModelBasedLocalizer::kNAccelStates>
132ModelBasedLocalizer::AAccel() const {
133 return A_continuous_accel_;
134}
135
136ModelBasedLocalizer::ModelState ModelBasedLocalizer::DiffModel(
137 const ModelBasedLocalizer::ModelState &state,
138 const ModelBasedLocalizer::ModelInput &U) const {
139 ModelState x_dot = AModel(state) * state + B_continuous_model_ * U;
140 const double theta = state(kTheta);
141 const double stheta = std::sin(theta);
142 const double ctheta = std::cos(theta);
143 const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0;
144 x_dot(kX) = ctheta * velocity;
145 x_dot(kY) = stheta * velocity;
146 return x_dot;
147}
148
149ModelBasedLocalizer::AccelState ModelBasedLocalizer::DiffAccel(
150 const ModelBasedLocalizer::AccelState &state,
151 const ModelBasedLocalizer::AccelInput &U) const {
152 return AAccel() * state + B_continuous_accel_ * U;
153}
154
155ModelBasedLocalizer::ModelState ModelBasedLocalizer::UpdateModel(
156 const ModelBasedLocalizer::ModelState &model,
157 const ModelBasedLocalizer::ModelInput &input,
158 const aos::monotonic_clock::duration dt) const {
159 return control_loops::RungeKutta(
160 std::bind(&ModelBasedLocalizer::DiffModel, this, std::placeholders::_1,
161 input),
162 model, aos::time::DurationInSeconds(dt));
163}
164
165ModelBasedLocalizer::AccelState ModelBasedLocalizer::UpdateAccel(
166 const ModelBasedLocalizer::AccelState &accel,
167 const ModelBasedLocalizer::AccelInput &input,
168 const aos::monotonic_clock::duration dt) const {
169 return control_loops::RungeKutta(
170 std::bind(&ModelBasedLocalizer::DiffAccel, this, std::placeholders::_1,
171 input),
172 accel, aos::time::DurationInSeconds(dt));
173}
174
175ModelBasedLocalizer::AccelState ModelBasedLocalizer::AccelStateForModelState(
176 const ModelBasedLocalizer::ModelState &state) const {
177 const double robot_speed =
178 (state(kLeftVelocity) + state(kRightVelocity)) / 2.0;
James Kuszmaulf6b69112022-03-12 21:34:39 -0800179 const double lat_speed = (AModel(state) * state)(kTheta)*long_offset_;
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800180 const double velocity_x = std::cos(state(kTheta)) * robot_speed -
181 std::sin(state(kTheta)) * lat_speed;
182 const double velocity_y = std::sin(state(kTheta)) * robot_speed +
183 std::cos(state(kTheta)) * lat_speed;
James Kuszmaul29c59522022-02-12 16:44:26 -0800184 return (AccelState() << state(0), state(1), state(2), velocity_x, velocity_y)
185 .finished();
186}
187
188ModelBasedLocalizer::ModelState ModelBasedLocalizer::ModelStateForAccelState(
189 const ModelBasedLocalizer::AccelState &state,
190 const Eigen::Vector2d &encoders, const double yaw_rate) const {
191 const double robot_speed = state(kVelocityX) * std::cos(state(kTheta)) +
192 state(kVelocityY) * std::sin(state(kTheta));
193 const double radius = dt_config_.robot_radius;
194 const double left_velocity = robot_speed - yaw_rate * radius;
195 const double right_velocity = robot_speed + yaw_rate * radius;
196 return (ModelState() << state(0), state(1), state(2), encoders(0),
197 left_velocity, 0.0, encoders(1), right_velocity, 0.0)
198 .finished();
199}
200
201double ModelBasedLocalizer::ModelDivergence(
202 const ModelBasedLocalizer::CombinedState &state,
203 const ModelBasedLocalizer::AccelInput &accel_inputs,
204 const Eigen::Vector2d &filtered_accel,
205 const ModelBasedLocalizer::ModelInput &model_inputs) {
206 // Convert the model state into the acceleration-based state-space and check
207 // the distance between the two (should really be a weighted norm, but all the
208 // numbers are on ~the same scale).
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800209 // TODO(james): Maybe weight lateral velocity divergence different than
210 // longitudinal? Seems like we tend to get false-positives currently when in
211 // sharp turns.
212 // TODO(james): For off-center gyros, maybe reduce noise when turning?
James Kuszmaul29c59522022-02-12 16:44:26 -0800213 VLOG(2) << "divergence: "
214 << (state.accel_state - AccelStateForModelState(state.model_state))
215 .transpose();
216 const AccelState diff_accel = DiffAccel(state.accel_state, accel_inputs);
217 const ModelState diff_model = DiffModel(state.model_state, model_inputs);
218 const double model_lng_velocity =
219 (state.model_state(kLeftVelocity) + state.model_state(kRightVelocity)) /
220 2.0;
221 const double model_lng_accel =
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800222 (diff_model(kLeftVelocity) + diff_model(kRightVelocity)) / 2.0 -
223 diff_model(kTheta) * diff_model(kTheta) * long_offset_;
224 const double model_lat_accel = diff_model(kTheta) * model_lng_velocity;
225 const Eigen::Vector2d robot_frame_accel(model_lng_accel, model_lat_accel);
James Kuszmaul29c59522022-02-12 16:44:26 -0800226 const Eigen::Vector2d model_accel =
227 Eigen::AngleAxisd(state.model_state(kTheta), Eigen::Vector3d::UnitZ())
228 .toRotationMatrix()
229 .block<2, 2>(0, 0) *
230 robot_frame_accel;
231 const double accel_diff = (model_accel - filtered_accel).norm();
232 const double theta_rate_diff =
233 std::abs(diff_accel(kTheta) - diff_model(kTheta));
234
235 const Eigen::Vector2d accel_vel = state.accel_state.bottomRows<2>();
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800236 Eigen::Vector2d model_vel =
James Kuszmaul29c59522022-02-12 16:44:26 -0800237 AccelStateForModelState(state.model_state).bottomRows<2>();
238 velocity_residual_ = (accel_vel - model_vel).norm() /
239 (1.0 + accel_vel.norm() + model_vel.norm());
240 theta_rate_residual_ = theta_rate_diff;
241 accel_residual_ = accel_diff / 4.0;
242 return velocity_residual_ + theta_rate_residual_ + accel_residual_;
243}
244
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800245void ModelBasedLocalizer::UpdateState(
246 CombinedState *state,
247 const Eigen::Matrix<double, kNModelStates, kNModelOutputs> &K,
248 const Eigen::Matrix<double, kNModelOutputs, 1> &Z,
249 const Eigen::Matrix<double, kNModelOutputs, kNModelStates> &H,
250 const AccelInput &accel_input, const ModelInput &model_input,
251 aos::monotonic_clock::duration dt) {
252 state->accel_state = UpdateAccel(state->accel_state, accel_input, dt);
253 if (down_estimator_.consecutive_still() > 500.0) {
254 state->accel_state(kVelocityX) *= 0.9;
255 state->accel_state(kVelocityY) *= 0.9;
256 }
257 state->model_state = UpdateModel(state->model_state, model_input, dt);
258 state->model_state += K * (Z - H * state->model_state);
259}
260
James Kuszmaul29c59522022-02-12 16:44:26 -0800261void ModelBasedLocalizer::HandleImu(aos::monotonic_clock::time_point t,
262 const Eigen::Vector3d &gyro,
263 const Eigen::Vector3d &accel,
264 const Eigen::Vector2d encoders,
265 const Eigen::Vector2d voltage) {
266 VLOG(2) << t;
267 if (t_ == aos::monotonic_clock::min_time) {
268 t_ = t;
269 }
270 if (t_ + 2 * kNominalDt < t) {
271 t_ = t;
272 ++clock_resets_;
273 }
274 const aos::monotonic_clock::duration dt = t - t_;
275 t_ = t;
276 down_estimator_.Predict(gyro, accel, dt);
277 // TODO(james): Should we prefer this or use the down-estimator corrected
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800278 // version? Using the down estimator is more principled, but does create more
279 // opportunities for subtle biases.
James Kuszmaul29c59522022-02-12 16:44:26 -0800280 const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
281 const double diameter = 2.0 * dt_config_.robot_radius;
282
283 const Eigen::AngleAxis<double> orientation(
284 Eigen::AngleAxis<double>(xytheta()(kTheta), Eigen::Vector3d::UnitZ()) *
285 down_estimator_.X_hat());
James Kuszmaul10d3fd42022-02-25 21:57:36 -0800286 last_orientation_ = orientation;
James Kuszmaul29c59522022-02-12 16:44:26 -0800287
288 const Eigen::Vector3d absolute_accel =
289 orientation * dt_config_.imu_transform * kG * accel;
290 abs_accel_ = absolute_accel;
James Kuszmaul29c59522022-02-12 16:44:26 -0800291
292 VLOG(2) << "abs accel " << absolute_accel.transpose();
James Kuszmaul29c59522022-02-12 16:44:26 -0800293 VLOG(2) << "dt " << aos::time::DurationInSeconds(dt);
294
295 // Update all the branched states.
296 const AccelInput accel_input(absolute_accel.x(), absolute_accel.y(),
297 yaw_rate);
298 const ModelInput model_input(voltage);
299
300 const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous =
301 AModel(current_state_.model_state);
302
303 Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete;
304 Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete;
305
306 DiscretizeQAFast(Q_continuous_model_, A_continuous, dt, &Q_discrete,
307 &A_discrete);
308
309 P_model_ = A_discrete * P_model_ * A_discrete.transpose() + Q_discrete;
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800310 P_accel_ = A_discrete_accel_ * P_accel_ * A_discrete_accel_.transpose() +
311 Q_discrete_accel_;
James Kuszmaul29c59522022-02-12 16:44:26 -0800312
313 Eigen::Matrix<double, kNModelOutputs, kNModelStates> H;
314 Eigen::Matrix<double, kNModelOutputs, kNModelOutputs> R;
315 {
316 H.setZero();
317 R.setZero();
318 H(0, kLeftEncoder) = 1.0;
319 H(1, kRightEncoder) = 1.0;
320 H(2, kRightVelocity) = 1.0 / diameter;
321 H(2, kLeftVelocity) = -1.0 / diameter;
322
323 R.diagonal() << 1e-9, 1e-9, 1e-13;
324 }
325
326 const Eigen::Matrix<double, kNModelOutputs, 1> Z(encoders(0), encoders(1),
327 yaw_rate);
328
329 if (branches_.empty()) {
330 VLOG(2) << "Initializing";
James Kuszmaul29c59522022-02-12 16:44:26 -0800331 current_state_.model_state(kLeftEncoder) = encoders(0);
332 current_state_.model_state(kRightEncoder) = encoders(1);
333 current_state_.branch_time = t;
334 branches_.Push(current_state_);
335 }
336
337 const Eigen::Matrix<double, kNModelStates, kNModelOutputs> K =
338 P_model_ * H.transpose() * (H * P_model_ * H.transpose() + R).inverse();
339 P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() -
340 K * H) *
341 P_model_;
342 VLOG(2) << "K\n" << K;
343 VLOG(2) << "Z\n" << Z.transpose();
344
345 for (CombinedState &state : branches_) {
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800346 UpdateState(&state, K, Z, H, accel_input, model_input, dt);
James Kuszmaul29c59522022-02-12 16:44:26 -0800347 }
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800348 UpdateState(&current_state_, K, Z, H, accel_input, model_input, dt);
James Kuszmaul29c59522022-02-12 16:44:26 -0800349
350 VLOG(2) << "oildest accel " << branches_[0].accel_state.transpose();
351 VLOG(2) << "oildest accel diff "
352 << DiffAccel(branches_[0].accel_state, accel_input).transpose();
353 VLOG(2) << "oildest model " << branches_[0].model_state.transpose();
354
355 // Determine whether to switch modes--if we are currently in model-based mode,
356 // swap to accel-based if the two states have divergeed meaningfully in the
357 // oldest branch. If we are currently in accel-based, then swap back to model
358 // if the oldest model branch matches has matched the
359 filtered_residual_accel_ +=
360 0.01 * (accel_input.topRows<2>() - filtered_residual_accel_);
361 const double model_divergence =
362 branches_.full() ? ModelDivergence(branches_[0], accel_input,
363 filtered_residual_accel_, model_input)
364 : 0.0;
365 filtered_residual_ +=
366 (1.0 - std::exp(-aos::time::DurationInSeconds(kNominalDt) / 0.0095)) *
367 (model_divergence - filtered_residual_);
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800368 // TODO(james): Tune this more. Currently set to generally trust the model,
369 // perhaps a bit too much.
370 // When the residual exceeds the accel threshold, we start using the inertials
371 // alone; when it drops back below the model threshold, we go back to being
372 // model-based.
373 constexpr double kUseAccelThreshold = 2.0;
374 constexpr double kUseModelThreshold = 0.5;
James Kuszmaul29c59522022-02-12 16:44:26 -0800375 constexpr size_t kShareStates = kNModelStates;
376 static_assert(kUseModelThreshold < kUseAccelThreshold);
377 if (using_model_) {
James Kuszmaulce491e42022-03-12 21:02:10 -0800378 if (!FLAGS_ignore_accelerometer &&
379 filtered_residual_ > kUseAccelThreshold) {
James Kuszmaul29c59522022-02-12 16:44:26 -0800380 hysteresis_count_++;
381 } else {
382 hysteresis_count_ = 0;
383 }
384 if (hysteresis_count_ > 0) {
385 using_model_ = false;
386 // Grab the accel-based state from back when we started diverging.
387 // TODO(james): This creates a problematic selection bias, because
388 // we will tend to bias towards deliberately out-of-tune measurements.
389 current_state_.accel_state = branches_[0].accel_state;
390 current_state_.model_state = branches_[0].model_state;
391 current_state_.model_state = ModelStateForAccelState(
392 current_state_.accel_state, encoders, yaw_rate);
393 } else {
394 VLOG(2) << "Normal branching";
James Kuszmaul29c59522022-02-12 16:44:26 -0800395 current_state_.accel_state =
396 AccelStateForModelState(current_state_.model_state);
397 current_state_.branch_time = t;
398 }
399 hysteresis_count_ = 0;
400 } else {
401 if (filtered_residual_ < kUseModelThreshold) {
402 hysteresis_count_++;
403 } else {
404 hysteresis_count_ = 0;
405 }
406 if (hysteresis_count_ > 100) {
407 using_model_ = true;
408 // Grab the model-based state from back when we stopped diverging.
409 current_state_.model_state.topRows<kShareStates>() =
410 ModelStateForAccelState(branches_[0].accel_state, encoders, yaw_rate)
411 .topRows<kShareStates>();
412 current_state_.accel_state =
413 AccelStateForModelState(current_state_.model_state);
414 } else {
James Kuszmaul29c59522022-02-12 16:44:26 -0800415 // TODO(james): Why was I leaving the encoders/wheel velocities in place?
James Kuszmaul29c59522022-02-12 16:44:26 -0800416 current_state_.model_state = ModelStateForAccelState(
417 current_state_.accel_state, encoders, yaw_rate);
418 current_state_.branch_time = t;
419 }
420 }
421
422 // Generate a new branch, with the accel state reset based on the model-based
423 // state (really, just getting rid of the lateral velocity).
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800424 // By resetting the accel state in the new branch, this tries to minimize the
425 // odds of runaway lateral velocities. This doesn't help with runaway
426 // longitudinal velocities, however.
James Kuszmaul29c59522022-02-12 16:44:26 -0800427 CombinedState new_branch = current_state_;
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800428 new_branch.accel_state = AccelStateForModelState(new_branch.model_state);
James Kuszmaul29c59522022-02-12 16:44:26 -0800429 new_branch.accumulated_divergence = 0.0;
430
James Kuszmaul93825a02022-02-13 16:50:33 -0800431 ++branch_counter_;
432 if (branch_counter_ % kBranchPeriod == 0) {
433 branches_.Push(new_branch);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800434 old_positions_.Push(OldPosition{t, xytheta(), latest_turret_position_,
435 latest_turret_velocity_});
James Kuszmaul93825a02022-02-13 16:50:33 -0800436 branch_counter_ = 0;
437 }
James Kuszmaul29c59522022-02-12 16:44:26 -0800438
439 last_residual_ = model_divergence;
440
441 VLOG(2) << "Using " << (using_model_ ? "model" : "accel");
442 VLOG(2) << "Residual " << last_residual_;
443 VLOG(2) << "Filtered Residual " << filtered_residual_;
444 VLOG(2) << "buffer size " << branches_.size();
445 VLOG(2) << "Model state " << current_state_.model_state.transpose();
446 VLOG(2) << "Accel state " << current_state_.accel_state.transpose();
447 VLOG(2) << "Accel state for model "
James Kuszmaulf6b69112022-03-12 21:34:39 -0800448 << AccelStateForModelState(current_state_.model_state).transpose();
James Kuszmaul29c59522022-02-12 16:44:26 -0800449 VLOG(2) << "Input acce " << accel.transpose();
450 VLOG(2) << "Input gyro " << gyro.transpose();
451 VLOG(2) << "Input voltage " << voltage.transpose();
452 VLOG(2) << "Input encoder " << encoders.transpose();
453 VLOG(2) << "yaw rate " << yaw_rate;
454
455 CHECK(std::isfinite(last_residual_));
456}
457
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800458const ModelBasedLocalizer::OldPosition ModelBasedLocalizer::GetStateForTime(
459 aos::monotonic_clock::time_point time) {
460 if (old_positions_.empty()) {
461 return OldPosition{};
462 }
463
464 aos::monotonic_clock::duration lowest_time_error =
465 aos::monotonic_clock::duration::max();
466 const OldPosition *best_match = nullptr;
467 for (const OldPosition &sample : old_positions_) {
468 const aos::monotonic_clock::duration time_error =
469 std::chrono::abs(sample.sample_time - time);
470 if (time_error < lowest_time_error) {
471 lowest_time_error = time_error;
472 best_match = &sample;
473 }
474 }
475 return *best_match;
476}
477
478namespace {
479// Converts a flatbuffer TransformationMatrix to an Eigen matrix. Technically,
480// this should be able to do a single memcpy, but the extra verbosity here seems
481// appropriate.
482Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix(
483 const frc971::vision::calibration::TransformationMatrix &flatbuffer) {
484 CHECK_EQ(16u, CHECK_NOTNULL(flatbuffer.data())->size());
485 Eigen::Matrix<double, 4, 4> result;
486 result.setIdentity();
487 for (int row = 0; row < 4; ++row) {
488 for (int col = 0; col < 4; ++col) {
489 result(row, col) = (*flatbuffer.data())[row * 4 + col];
490 }
491 }
492 return result;
493}
494
495// Node names of the pis to listen for cameras from.
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -0700496constexpr std::array<std::string_view, ModelBasedLocalizer::kNumPis> kPisToUse{
497 "pi1", "pi2", "pi3", "pi4"};
James Kuszmaulf6b69112022-03-12 21:34:39 -0800498} // namespace
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800499
500const Eigen::Matrix<double, 4, 4> ModelBasedLocalizer::CameraTransform(
501 const OldPosition &state,
502 const frc971::vision::calibration::CameraCalibration *calibration,
503 std::optional<RejectionReason> *rejection_reason) const {
504 CHECK_NOTNULL(rejection_reason);
505 CHECK_NOTNULL(calibration);
506 // Per the CameraCalibration specification, we can actually determine whether
507 // the camera is the turret camera just from the presence of the
508 // turret_extrinsics member.
509 const bool is_turret = calibration->has_turret_extrinsics();
510 // Ignore readings when the turret is spinning too fast, on the assumption
511 // that the odds of screwing up the time compensation are higher.
512 // Note that the current number here is chosen pretty arbitrarily--1 rad / sec
513 // seems reasonable, but may be unnecessarily low or high.
514 constexpr double kMaxTurretVelocity = 1.0;
515 if (is_turret && std::abs(state.turret_velocity) > kMaxTurretVelocity &&
516 !rejection_reason->has_value()) {
517 *rejection_reason = RejectionReason::TURRET_TOO_FAST;
518 }
519 CHECK(calibration->has_fixed_extrinsics());
520 const Eigen::Matrix<double, 4, 4> fixed_extrinsics =
521 FlatbufferToTransformationMatrix(*calibration->fixed_extrinsics());
522
523 // Calculate the pose of the camera relative to the robot origin.
524 Eigen::Matrix<double, 4, 4> H_robot_camera = fixed_extrinsics;
525 if (is_turret) {
526 H_robot_camera =
527 H_robot_camera *
528 frc971::control_loops::TransformationMatrixForYaw<double>(
529 state.turret_position) *
530 FlatbufferToTransformationMatrix(*calibration->turret_extrinsics());
531 }
532 return H_robot_camera;
533}
534
535const std::optional<Eigen::Vector2d>
536ModelBasedLocalizer::CameraMeasuredRobotPosition(
537 const OldPosition &state, const y2022::vision::TargetEstimate *target,
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800538 std::optional<RejectionReason> *rejection_reason,
539 Eigen::Matrix<double, 4, 4> *H_field_camera_measured) const {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800540 if (!target->has_camera_calibration()) {
541 *rejection_reason = RejectionReason::NO_CALIBRATION;
542 return std::nullopt;
543 }
544 const Eigen::Matrix<double, 4, 4> H_robot_camera =
545 CameraTransform(state, target->camera_calibration(), rejection_reason);
546 const control_loops::Pose robot_pose(
547 {state.xytheta(0), state.xytheta(1), 0.0}, state.xytheta(2));
548 const Eigen::Matrix<double, 4, 4> H_field_robot =
549 robot_pose.AsTransformationMatrix();
550 // Current estimated pose of the camera in the global frame.
551 // Note that this is all really just an elaborate way of extracting the
552 // current estimated camera yaw, and nothing else.
553 const Eigen::Matrix<double, 4, 4> H_field_camera =
554 H_field_robot * H_robot_camera;
555 // Grab the implied yaw of the camera (the +Z axis is coming out of the front
556 // of the cameras).
557 const Eigen::Vector3d rotated_camera_z =
558 H_field_camera.block<3, 3>(0, 0) * Eigen::Vector3d(0, 0, 1);
559 const double camera_yaw =
560 std::atan2(rotated_camera_z.y(), rotated_camera_z.x());
561 // All right, now we need to use the heading and distance from the
562 // TargetEstimate, plus the yaw embedded in the camera_pose, to determine what
563 // the implied X/Y position of the robot is. To do this, we calculate the
564 // heading/distance from the target to the robot. The distance is easy, since
565 // that's the same as the distance from the robot to the target. The heading
566 // isn't too hard, but is obnoxious to think about, since the heading from the
567 // target to the robot is distinct from the heading from the robot to the
568 // target.
569
570 // Just to walk through examples to confirm that the below calculation is
571 // correct:
572 // * If yaw = 0, and angle_to_target = 0, we are at 180 deg relative to the
573 // target.
574 // * If yaw = 90 deg, and angle_to_target = 0, we are at -90 deg relative to
575 // the target.
576 // * If yaw = 0, and angle_to_target = 90 deg, we are at -90 deg relative to
577 // the target.
578 const double heading_from_target =
579 aos::math::NormalizeAngle(M_PI + camera_yaw + target->angle_to_target());
580 const double distance_from_target = target->distance();
581 // Extract the implied camera position on the field.
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800582 *H_field_camera_measured = H_field_camera;
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800583 // TODO(james): Are we going to need to evict the roll/pitch components of the
584 // camera extrinsics this year as well?
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800585 (*H_field_camera_measured)(0, 3) =
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800586 distance_from_target * std::cos(heading_from_target) + kVisionTargetX;
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800587 (*H_field_camera_measured)(1, 3) =
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800588 distance_from_target * std::sin(heading_from_target) + kVisionTargetY;
589 const Eigen::Matrix<double, 4, 4> H_field_robot_measured =
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800590 *H_field_camera_measured * H_robot_camera.inverse();
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800591 return H_field_robot_measured.block<2, 1>(0, 3);
592}
593
594void ModelBasedLocalizer::HandleImageMatch(
595 aos::monotonic_clock::time_point sample_time,
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800596 const y2022::vision::TargetEstimate *target, int camera_index) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800597 std::optional<RejectionReason> rejection_reason;
598
James Kuszmaulaa39d962022-03-06 14:54:28 -0800599 if (target->confidence() < kMinTargetEstimateConfidence) {
600 rejection_reason = RejectionReason::LOW_CONFIDENCE;
601 TallyRejection(rejection_reason.value());
602 return;
603 }
604
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800605 const OldPosition &state = GetStateForTime(sample_time);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800606 Eigen::Matrix<double, 4, 4> H_field_camera_measured;
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800607 const std::optional<Eigen::Vector2d> measured_robot_position =
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800608 CameraMeasuredRobotPosition(state, target, &rejection_reason,
609 &H_field_camera_measured);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800610 // Technically, rejection_reason should always be set if
611 // measured_robot_position is nullopt, but in the future we may have more
612 // recoverable rejection reasons that we wish to allow to propagate further
613 // into the process.
614 if (!measured_robot_position || rejection_reason.has_value()) {
615 CHECK(rejection_reason.has_value());
616 TallyRejection(rejection_reason.value());
617 return;
618 }
619
620 // Next, go through and do the actual Kalman corrections for the x/y
621 // measurement, for both the accel state and the model-based state.
622 const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model =
623 AModel(current_state_.model_state);
624
625 Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete_model;
626 Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete_model;
627
628 DiscretizeQAFast(Q_continuous_model_, A_continuous_model, kNominalDt,
629 &Q_discrete_model, &A_discrete_model);
630
631 Eigen::Matrix<double, 2, kNModelStates> H_model;
632 H_model.setZero();
633 Eigen::Matrix<double, 2, kNAccelStates> H_accel;
634 H_accel.setZero();
635 Eigen::Matrix<double, 2, 2> R;
636 R.setZero();
637 H_model(0, kX) = 1.0;
638 H_model(1, kY) = 1.0;
639 H_accel(0, kX) = 1.0;
640 H_accel(1, kY) = 1.0;
641 R.diagonal() << 1e-2, 1e-2;
642
643 const Eigen::Matrix<double, kNModelStates, 2> K_model =
644 P_model_ * H_model.transpose() *
645 (H_model * P_model_ * H_model.transpose() + R).inverse();
646 const Eigen::Matrix<double, kNAccelStates, 2> K_accel =
647 P_accel_ * H_accel.transpose() *
648 (H_accel * P_accel_ * H_accel.transpose() + R).inverse();
649 P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() -
650 K_model * H_model) *
651 P_model_;
652 P_accel_ = (Eigen::Matrix<double, kNAccelStates, kNAccelStates>::Identity() -
653 K_accel * H_accel) *
654 P_accel_;
655 // And now we have to correct *everything* on all the branches:
656 for (CombinedState &state : branches_) {
657 state.model_state += K_model * (measured_robot_position.value() -
James Kuszmaulf6b69112022-03-12 21:34:39 -0800658 H_model * state.model_state);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800659 state.accel_state += K_accel * (measured_robot_position.value() -
James Kuszmaulf6b69112022-03-12 21:34:39 -0800660 H_accel * state.accel_state);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800661 }
662 current_state_.model_state +=
663 K_model *
664 (measured_robot_position.value() - H_model * current_state_.model_state);
665 current_state_.accel_state +=
666 K_accel *
667 (measured_robot_position.value() - H_accel * current_state_.accel_state);
668
669 statistics_.total_accepted++;
670 statistics_.total_candidates++;
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800671
672 const Eigen::Vector3d camera_z_in_field =
673 H_field_camera_measured.block<3, 3>(0, 0) * Eigen::Vector3d::UnitZ();
674 const double camera_yaw =
675 std::atan2(camera_z_in_field.y(), camera_z_in_field.x());
676
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -0700677 // TODO(milind): actually control this
678 led_outputs_[camera_index] = LedOutput::ON;
679
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800680 TargetEstimateDebugT debug;
681 debug.camera = static_cast<uint8_t>(camera_index);
682 debug.camera_x = H_field_camera_measured(0, 3);
683 debug.camera_y = H_field_camera_measured(1, 3);
684 debug.camera_theta = camera_yaw;
685 debug.implied_robot_x = measured_robot_position.value().x();
686 debug.implied_robot_y = measured_robot_position.value().y();
687 debug.implied_robot_theta = xytheta()(2);
688 debug.implied_turret_goal =
689 aos::math::NormalizeAngle(camera_yaw + target->angle_to_target());
690 debug.accepted = true;
691 debug.image_age_sec = aos::time::DurationInSeconds(t_ - sample_time);
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800692 CHECK_LT(image_debugs_.size(), kDebugBufferSize);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800693 image_debugs_.push_back(debug);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800694}
695
696void ModelBasedLocalizer::HandleTurret(
697 aos::monotonic_clock::time_point sample_time, double turret_position,
698 double turret_velocity) {
699 last_turret_update_ = sample_time;
700 latest_turret_position_ = turret_position;
701 latest_turret_velocity_ = turret_velocity;
702}
703
James Kuszmaul29c59522022-02-12 16:44:26 -0800704void ModelBasedLocalizer::HandleReset(aos::monotonic_clock::time_point now,
705 const Eigen::Vector3d &xytheta) {
706 branches_.Reset();
James Kuszmaulf6b69112022-03-12 21:34:39 -0800707 t_ = now;
James Kuszmaul29c59522022-02-12 16:44:26 -0800708 using_model_ = true;
709 current_state_.model_state << xytheta(0), xytheta(1), xytheta(2),
710 current_state_.model_state(kLeftEncoder), 0.0, 0.0,
711 current_state_.model_state(kRightEncoder), 0.0, 0.0;
712 current_state_.accel_state =
713 AccelStateForModelState(current_state_.model_state);
714 last_residual_ = 0.0;
715 filtered_residual_ = 0.0;
716 filtered_residual_accel_.setZero();
717 abs_accel_.setZero();
718}
719
720flatbuffers::Offset<AccelBasedState> ModelBasedLocalizer::BuildAccelState(
721 flatbuffers::FlatBufferBuilder *fbb, const AccelState &state) {
722 AccelBasedState::Builder accel_state_builder(*fbb);
723 accel_state_builder.add_x(state(kX));
724 accel_state_builder.add_y(state(kY));
725 accel_state_builder.add_theta(state(kTheta));
726 accel_state_builder.add_velocity_x(state(kVelocityX));
727 accel_state_builder.add_velocity_y(state(kVelocityY));
728 return accel_state_builder.Finish();
729}
730
731flatbuffers::Offset<ModelBasedState> ModelBasedLocalizer::BuildModelState(
732 flatbuffers::FlatBufferBuilder *fbb, const ModelState &state) {
733 ModelBasedState::Builder model_state_builder(*fbb);
734 model_state_builder.add_x(state(kX));
735 model_state_builder.add_y(state(kY));
736 model_state_builder.add_theta(state(kTheta));
737 model_state_builder.add_left_encoder(state(kLeftEncoder));
738 model_state_builder.add_left_velocity(state(kLeftVelocity));
739 model_state_builder.add_left_voltage_error(state(kLeftVoltageError));
740 model_state_builder.add_right_encoder(state(kRightEncoder));
741 model_state_builder.add_right_velocity(state(kRightVelocity));
742 model_state_builder.add_right_voltage_error(state(kRightVoltageError));
743 return model_state_builder.Finish();
744}
745
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800746flatbuffers::Offset<CumulativeStatistics>
747ModelBasedLocalizer::PopulateStatistics(flatbuffers::FlatBufferBuilder *fbb) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800748 const auto rejections_offset = fbb->CreateVector(
749 statistics_.rejection_counts.data(), statistics_.rejection_counts.size());
750
751 CumulativeStatistics::Builder stats_builder(*fbb);
752 stats_builder.add_total_accepted(statistics_.total_accepted);
753 stats_builder.add_total_candidates(statistics_.total_candidates);
754 stats_builder.add_rejection_reason_count(rejections_offset);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800755 return stats_builder.Finish();
756}
757
758flatbuffers::Offset<ModelBasedStatus> ModelBasedLocalizer::PopulateStatus(
759 flatbuffers::FlatBufferBuilder *fbb) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800760 const flatbuffers::Offset<CumulativeStatistics> stats_offset =
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800761 PopulateStatistics(fbb);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800762
James Kuszmaul29c59522022-02-12 16:44:26 -0800763 const flatbuffers::Offset<control_loops::drivetrain::DownEstimatorState>
764 down_estimator_offset = down_estimator_.PopulateStatus(fbb, t_);
765
766 const CombinedState &state = current_state_;
767
768 const flatbuffers::Offset<ModelBasedState> model_state_offset =
James Kuszmaulf6b69112022-03-12 21:34:39 -0800769 BuildModelState(fbb, state.model_state);
James Kuszmaul29c59522022-02-12 16:44:26 -0800770
771 const flatbuffers::Offset<AccelBasedState> accel_state_offset =
772 BuildAccelState(fbb, state.accel_state);
773
774 const flatbuffers::Offset<AccelBasedState> oldest_accel_state_offset =
775 branches_.empty() ? flatbuffers::Offset<AccelBasedState>()
776 : BuildAccelState(fbb, branches_[0].accel_state);
777
778 const flatbuffers::Offset<ModelBasedState> oldest_model_state_offset =
779 branches_.empty() ? flatbuffers::Offset<ModelBasedState>()
780 : BuildModelState(fbb, branches_[0].model_state);
781
782 ModelBasedStatus::Builder builder(*fbb);
783 builder.add_accel_state(accel_state_offset);
784 builder.add_oldest_accel_state(oldest_accel_state_offset);
785 builder.add_oldest_model_state(oldest_model_state_offset);
786 builder.add_model_state(model_state_offset);
787 builder.add_using_model(using_model_);
788 builder.add_residual(last_residual_);
789 builder.add_filtered_residual(filtered_residual_);
790 builder.add_velocity_residual(velocity_residual_);
791 builder.add_accel_residual(accel_residual_);
792 builder.add_theta_rate_residual(theta_rate_residual_);
793 builder.add_down_estimator(down_estimator_offset);
794 builder.add_x(xytheta()(0));
795 builder.add_y(xytheta()(1));
796 builder.add_theta(xytheta()(2));
797 builder.add_implied_accel_x(abs_accel_(0));
798 builder.add_implied_accel_y(abs_accel_(1));
799 builder.add_implied_accel_z(abs_accel_(2));
800 builder.add_clock_resets(clock_resets_);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800801 builder.add_statistics(stats_offset);
James Kuszmaul29c59522022-02-12 16:44:26 -0800802 return builder.Finish();
803}
804
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800805flatbuffers::Offset<LocalizerVisualization>
806ModelBasedLocalizer::PopulateVisualization(
807 flatbuffers::FlatBufferBuilder *fbb) {
808 const flatbuffers::Offset<CumulativeStatistics> stats_offset =
809 PopulateStatistics(fbb);
810
811 aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, kDebugBufferSize>
812 debug_offsets;
813
James Kuszmaulf6b69112022-03-12 21:34:39 -0800814 for (const TargetEstimateDebugT &debug : image_debugs_) {
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800815 debug_offsets.push_back(PackTargetEstimateDebug(debug, fbb));
816 }
817
818 image_debugs_.clear();
819
820 const flatbuffers::Offset<
821 flatbuffers::Vector<flatbuffers::Offset<TargetEstimateDebug>>>
822 debug_offset =
823 fbb->CreateVector(debug_offsets.data(), debug_offsets.size());
824
825 LocalizerVisualization::Builder builder(*fbb);
826 builder.add_statistics(stats_offset);
827 builder.add_targets(debug_offset);
828 return builder.Finish();
829}
830
831void ModelBasedLocalizer::TallyRejection(const RejectionReason reason) {
832 statistics_.total_candidates++;
833 statistics_.rejection_counts[static_cast<size_t>(reason)]++;
834 TargetEstimateDebugT debug;
835 debug.accepted = false;
836 debug.rejection_reason = reason;
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800837 CHECK_LT(image_debugs_.size(), kDebugBufferSize);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800838 image_debugs_.push_back(debug);
839}
840
841flatbuffers::Offset<TargetEstimateDebug>
842ModelBasedLocalizer::PackTargetEstimateDebug(
843 const TargetEstimateDebugT &debug, flatbuffers::FlatBufferBuilder *fbb) {
844 if (!debug.accepted) {
845 TargetEstimateDebug::Builder builder(*fbb);
846 builder.add_accepted(debug.accepted);
847 builder.add_rejection_reason(debug.rejection_reason);
848 return builder.Finish();
849 } else {
850 flatbuffers::Offset<TargetEstimateDebug> offset =
851 TargetEstimateDebug::Pack(*fbb, &debug);
852 flatbuffers::GetMutableTemporaryPointer(*fbb, offset)
853 ->clear_rejection_reason();
854 return offset;
855 }
856}
857
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800858namespace {
859// Period at which the encoder readings from the IMU board wrap.
860static double DrivetrainWrapPeriod() {
861 return y2022::constants::Values::DrivetrainEncoderToMeters(1 << 16);
862}
James Kuszmaulf6b69112022-03-12 21:34:39 -0800863} // namespace
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800864
James Kuszmaul29c59522022-02-12 16:44:26 -0800865EventLoopLocalizer::EventLoopLocalizer(
866 aos::EventLoop *event_loop,
867 const control_loops::drivetrain::DrivetrainConfig<double> &dt_config)
868 : event_loop_(event_loop),
James Kuszmaul6d6e1302022-03-12 15:22:48 -0800869 dt_config_(dt_config),
James Kuszmaul29c59522022-02-12 16:44:26 -0800870 model_based_(dt_config),
871 status_sender_(event_loop_->MakeSender<LocalizerStatus>("/localizer")),
872 output_sender_(event_loop_->MakeSender<LocalizerOutput>("/localizer")),
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800873 visualization_sender_(
874 event_loop_->MakeSender<LocalizerVisualization>("/localizer")),
James Kuszmaul29c59522022-02-12 16:44:26 -0800875 output_fetcher_(
876 event_loop_->MakeFetcher<frc971::control_loops::drivetrain::Output>(
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800877 "/drivetrain")),
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800878 clock_offset_fetcher_(
879 event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
880 "/aos")),
James Kuszmaulf6b69112022-03-12 21:34:39 -0800881 superstructure_fetcher_(
882 event_loop_
883 ->MakeFetcher<y2022::control_loops::superstructure::Status>(
884 "/superstructure")),
James Kuszmaulfd254bb2022-03-13 21:08:15 -0700885 zeroer_(zeroing::ImuZeroer::FaultBehavior::kTemporary),
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800886 left_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()),
887 right_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()) {
James Kuszmaul29c59522022-02-12 16:44:26 -0800888 event_loop_->MakeWatcher(
889 "/drivetrain",
890 [this](
891 const frc971::control_loops::drivetrain::LocalizerControl &control) {
892 const double theta = control.keep_current_theta()
893 ? model_based_.xytheta()(2)
894 : control.theta();
895 model_based_.HandleReset(event_loop_->monotonic_now(),
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800896 {control.x(), control.y(), theta});
James Kuszmaul29c59522022-02-12 16:44:26 -0800897 });
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800898 aos::TimerHandler *superstructure_timer = event_loop_->AddTimer([this]() {
899 if (superstructure_fetcher_.Fetch()) {
900 const y2022::control_loops::superstructure::Status &status =
901 *superstructure_fetcher_.get();
902 if (!status.has_turret()) {
903 return;
904 }
905 CHECK(status.has_turret());
906 model_based_.HandleTurret(
907 superstructure_fetcher_.context().monotonic_event_time,
908 status.turret()->position(), status.turret()->velocity());
909 }
910 });
911 event_loop_->OnRun([this, superstructure_timer]() {
912 superstructure_timer->Setup(event_loop_->monotonic_now(),
913 std::chrono::milliseconds(20));
914 });
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800915
James Kuszmaulf6b69112022-03-12 21:34:39 -0800916 for (size_t camera_index = 0; camera_index < kPisToUse.size();
917 ++camera_index) {
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800918 CHECK_LT(camera_index, target_estimate_fetchers_.size());
919 target_estimate_fetchers_[camera_index] =
920 event_loop_->MakeFetcher<y2022::vision::TargetEstimate>(
921 absl::StrCat("/", kPisToUse[camera_index], "/camera"));
922 }
James Kuszmaulf6b69112022-03-12 21:34:39 -0800923 aos::TimerHandler *estimate_timer = event_loop_->AddTimer([this]() {
924 for (size_t camera_index = 0; camera_index < kPisToUse.size();
925 ++camera_index) {
926 if (model_based_.NumQueuedImageDebugs() ==
927 ModelBasedLocalizer::kDebugBufferSize ||
928 (last_visualization_send_ + kMinVisualizationPeriod <
929 event_loop_->monotonic_now())) {
930 auto builder = visualization_sender_.MakeBuilder();
931 visualization_sender_.CheckOk(
932 builder.Send(model_based_.PopulateVisualization(builder.fbb())));
933 }
934 if (target_estimate_fetchers_[camera_index].Fetch()) {
935 const std::optional<aos::monotonic_clock::duration> monotonic_offset =
936 ClockOffset(kPisToUse[camera_index]);
937 if (!monotonic_offset.has_value()) {
938 continue;
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800939 }
James Kuszmaulf6b69112022-03-12 21:34:39 -0800940 // TODO(james): Get timestamp from message contents.
941 aos::monotonic_clock::time_point capture_time(
942 target_estimate_fetchers_[camera_index]
943 .context()
944 .monotonic_remote_time -
945 monotonic_offset.value());
946 if (capture_time > target_estimate_fetchers_[camera_index]
947 .context()
948 .monotonic_event_time) {
949 model_based_.TallyRejection(RejectionReason::IMAGE_FROM_FUTURE);
950 continue;
951 }
952 model_based_.HandleImageMatch(
953 capture_time, target_estimate_fetchers_[camera_index].get(),
954 camera_index);
955 }
956 }
957 });
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800958 event_loop_->OnRun([this, estimate_timer]() {
959 estimate_timer->Setup(event_loop_->monotonic_now(),
960 std::chrono::milliseconds(100));
961 });
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800962 event_loop_->MakeWatcher(
James Kuszmaul29c59522022-02-12 16:44:26 -0800963 "/localizer", [this](const frc971::IMUValuesBatch &values) {
964 CHECK(values.has_readings());
James Kuszmaul29c59522022-02-12 16:44:26 -0800965 output_fetcher_.Fetch();
966 for (const IMUValues *value : *values.readings()) {
967 zeroer_.InsertAndProcessMeasurement(*value);
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800968 const Eigen::Vector2d encoders{
969 left_encoder_.Unwrap(value->left_encoder()),
970 right_encoder_.Unwrap(value->right_encoder())};
James Kuszmaul6d6e1302022-03-12 15:22:48 -0800971 {
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800972 const aos::monotonic_clock::time_point pico_timestamp{
973 std::chrono::microseconds(value->pico_timestamp_us())};
974 // TODO(james): If we get large enough drift off of the pico,
975 // actually do something about it.
976 if (!pico_offset_.has_value()) {
977 pico_offset_ =
978 event_loop_->context().monotonic_event_time - pico_timestamp;
979 last_pico_timestamp_ = pico_timestamp;
James Kuszmaule5f67dd2022-02-12 20:08:29 -0800980 }
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800981 if (pico_timestamp < last_pico_timestamp_) {
982 pico_offset_.value() += std::chrono::microseconds(1ULL << 32);
983 }
984 const aos::monotonic_clock::time_point sample_timestamp =
985 pico_offset_.value() + pico_timestamp;
986 pico_offset_error_ =
987 event_loop_->context().monotonic_event_time - sample_timestamp;
988 const bool disabled =
989 (output_fetcher_.get() == nullptr) ||
990 (output_fetcher_.context().monotonic_event_time +
991 std::chrono::milliseconds(10) <
992 event_loop_->context().monotonic_event_time);
James Kuszmaul6d6e1302022-03-12 15:22:48 -0800993 const bool zeroed = zeroer_.Zeroed();
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800994 model_based_.HandleImu(
James Kuszmaul6d6e1302022-03-12 15:22:48 -0800995 sample_timestamp,
996 zeroed ? zeroer_.ZeroedGyro().value() : Eigen::Vector3d::Zero(),
997 zeroed ? zeroer_.ZeroedAccel().value()
998 : dt_config_.imu_transform.transpose() *
999 Eigen::Vector3d::UnitZ(),
James Kuszmaulf6b69112022-03-12 21:34:39 -08001000 encoders,
James Kuszmaul5ed29dd2022-02-13 18:32:06 -08001001 disabled ? Eigen::Vector2d::Zero()
1002 : Eigen::Vector2d{output_fetcher_->left_voltage(),
1003 output_fetcher_->right_voltage()});
1004 last_pico_timestamp_ = pico_timestamp;
James Kuszmaul29c59522022-02-12 16:44:26 -08001005 }
1006 {
1007 auto builder = status_sender_.MakeBuilder();
1008 const flatbuffers::Offset<ModelBasedStatus> model_based_status =
1009 model_based_.PopulateStatus(builder.fbb());
James Kuszmaul5ed29dd2022-02-13 18:32:06 -08001010 const flatbuffers::Offset<control_loops::drivetrain::ImuZeroerState>
1011 zeroer_status = zeroer_.PopulateStatus(builder.fbb());
James Kuszmaul29c59522022-02-12 16:44:26 -08001012 LocalizerStatus::Builder status_builder =
1013 builder.MakeBuilder<LocalizerStatus>();
1014 status_builder.add_model_based(model_based_status);
1015 status_builder.add_zeroed(zeroer_.Zeroed());
1016 status_builder.add_faulted_zero(zeroer_.Faulted());
James Kuszmaul5ed29dd2022-02-13 18:32:06 -08001017 status_builder.add_zeroing(zeroer_status);
1018 status_builder.add_left_encoder(encoders(0));
1019 status_builder.add_right_encoder(encoders(1));
1020 if (pico_offset_.has_value()) {
1021 status_builder.add_pico_offset_ns(pico_offset_.value().count());
1022 status_builder.add_pico_offset_error_ns(
1023 pico_offset_error_.count());
1024 }
James Kuszmaul29c59522022-02-12 16:44:26 -08001025 builder.CheckOk(builder.Send(status_builder.Finish()));
1026 }
1027 if (last_output_send_ + std::chrono::milliseconds(5) <
1028 event_loop_->context().monotonic_event_time) {
1029 auto builder = output_sender_.MakeBuilder();
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -07001030
1031 const auto led_outputs_offset =
1032 builder.fbb()->CreateVector(model_based_.led_outputs().data(),
1033 model_based_.led_outputs().size());
1034
James Kuszmaul29c59522022-02-12 16:44:26 -08001035 LocalizerOutput::Builder output_builder =
1036 builder.MakeBuilder<LocalizerOutput>();
James Kuszmaul1798c072022-02-13 15:32:11 -08001037 // TODO(james): Should we bother to try to estimate time offsets for
1038 // the pico?
1039 output_builder.add_monotonic_timestamp_ns(
1040 value->monotonic_timestamp_ns());
James Kuszmaul29c59522022-02-12 16:44:26 -08001041 output_builder.add_x(model_based_.xytheta()(0));
1042 output_builder.add_y(model_based_.xytheta()(1));
1043 output_builder.add_theta(model_based_.xytheta()(2));
James Kuszmaulf3ef9e12022-03-05 17:13:00 -08001044 output_builder.add_zeroed(zeroer_.Zeroed());
James Kuszmaul10d3fd42022-02-25 21:57:36 -08001045 const Eigen::Quaterniond &orientation = model_based_.orientation();
1046 Quaternion quaternion;
1047 quaternion.mutate_x(orientation.x());
1048 quaternion.mutate_y(orientation.y());
1049 quaternion.mutate_z(orientation.z());
1050 quaternion.mutate_w(orientation.w());
1051 output_builder.add_orientation(&quaternion);
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -07001052 output_builder.add_led_outputs(led_outputs_offset);
James Kuszmaul29c59522022-02-12 16:44:26 -08001053 builder.CheckOk(builder.Send(output_builder.Finish()));
1054 last_output_send_ = event_loop_->monotonic_now();
1055 }
1056 }
1057 });
1058}
1059
James Kuszmaul8c4f6592022-02-26 15:49:30 -08001060std::optional<aos::monotonic_clock::duration> EventLoopLocalizer::ClockOffset(
1061 std::string_view pi) {
1062 std::optional<aos::monotonic_clock::duration> monotonic_offset;
1063 clock_offset_fetcher_.Fetch();
1064 if (clock_offset_fetcher_.get() != nullptr) {
1065 for (const auto connection : *clock_offset_fetcher_->connections()) {
1066 if (connection->has_node() && connection->node()->has_name() &&
1067 connection->node()->name()->string_view() == pi) {
1068 if (connection->has_monotonic_offset()) {
1069 monotonic_offset =
1070 std::chrono::nanoseconds(connection->monotonic_offset());
1071 } else {
1072 // If we don't have a monotonic offset, that means we aren't
1073 // connected.
1074 model_based_.TallyRejection(
1075 RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
1076 return std::nullopt;
1077 }
1078 break;
1079 }
1080 }
1081 }
1082 CHECK(monotonic_offset.has_value());
1083 return monotonic_offset;
1084}
1085
James Kuszmaul29c59522022-02-12 16:44:26 -08001086} // namespace frc971::controls