blob: e7dfdf1d3ea593e914b41d148466e55bf638b56a [file] [log] [blame]
James Kuszmaul51fa1ae2022-02-26 00:49:57 -08001#include "y2022/localizer/localizer.h"
James Kuszmaul29c59522022-02-12 16:44:26 -08002
James Kuszmaulf6b69112022-03-12 21:34:39 -08003#include "aos/json_to_flatbuffer.h"
James Kuszmaul29c59522022-02-12 16:44:26 -08004#include "frc971/control_loops/c2d.h"
5#include "frc971/wpilib/imu_batch_generated.h"
James Kuszmaul5ed29dd2022-02-13 18:32:06 -08006#include "y2022/constants.h"
James Kuszmaul29c59522022-02-12 16:44:26 -08007
James Kuszmaulce491e42022-03-12 21:02:10 -08008DEFINE_bool(ignore_accelerometer, false,
9 "If set, ignores the accelerometer readings.");
10
James Kuszmaul29c59522022-02-12 16:44:26 -080011namespace frc971::controls {
12
13namespace {
14constexpr double kG = 9.80665;
James Kuszmaul9f2f53c2023-02-19 14:08:18 -080015static constexpr std::chrono::microseconds kNominalDt = ImuWatcher::kNominalDt;
James Kuszmaul8c4f6592022-02-26 15:49:30 -080016// Field position of the target (the 2022 target is conveniently in the middle
17// of the field....).
18constexpr double kVisionTargetX = 0.0;
19constexpr double kVisionTargetY = 0.0;
20
James Kuszmaulaa39d962022-03-06 14:54:28 -080021// Minimum confidence to require to use a target match.
James Kuszmaul1b918d82022-03-12 18:27:41 -080022constexpr double kMinTargetEstimateConfidence = 0.75;
James Kuszmaulaa39d962022-03-06 14:54:28 -080023
James Kuszmaul29c59522022-02-12 16:44:26 -080024template <int N>
25Eigen::Matrix<double, N, 1> MakeState(std::vector<double> values) {
26 CHECK_EQ(static_cast<size_t>(N), values.size());
27 Eigen::Matrix<double, N, 1> vector;
28 for (int ii = 0; ii < N; ++ii) {
29 vector(ii, 0) = values[ii];
30 }
31 return vector;
32}
James Kuszmaul29c59522022-02-12 16:44:26 -080033} // namespace
34
35ModelBasedLocalizer::ModelBasedLocalizer(
36 const control_loops::drivetrain::DrivetrainConfig<double> &dt_config)
37 : dt_config_(dt_config),
38 velocity_drivetrain_coefficients_(
39 dt_config.make_hybrid_drivetrain_velocity_loop()
40 .plant()
41 .coefficients()),
42 down_estimator_(dt_config) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -080043 statistics_.rejection_counts.fill(0);
James Kuszmaulf6b69112022-03-12 21:34:39 -080044 CHECK_EQ(branches_.capacity(),
45 static_cast<size_t>(std::chrono::seconds(1) / kNominalDt /
46 kBranchPeriod));
James Kuszmaul29c59522022-02-12 16:44:26 -080047 if (dt_config_.is_simulated) {
48 down_estimator_.assume_perfect_gravity();
49 }
50 A_continuous_accel_.setZero();
51 A_continuous_model_.setZero();
52 B_continuous_accel_.setZero();
53 B_continuous_model_.setZero();
54
55 A_continuous_accel_(kX, kVelocityX) = 1.0;
56 A_continuous_accel_(kY, kVelocityY) = 1.0;
57
58 const double diameter = 2.0 * dt_config_.robot_radius;
59
60 A_continuous_model_(kTheta, kLeftVelocity) = -1.0 / diameter;
61 A_continuous_model_(kTheta, kRightVelocity) = 1.0 / diameter;
62 A_continuous_model_(kLeftEncoder, kLeftVelocity) = 1.0;
63 A_continuous_model_(kRightEncoder, kRightVelocity) = 1.0;
64 const auto &vel_coefs = velocity_drivetrain_coefficients_;
65 A_continuous_model_(kLeftVelocity, kLeftVelocity) =
66 vel_coefs.A_continuous(0, 0);
67 A_continuous_model_(kLeftVelocity, kRightVelocity) =
68 vel_coefs.A_continuous(0, 1);
69 A_continuous_model_(kRightVelocity, kLeftVelocity) =
70 vel_coefs.A_continuous(1, 0);
71 A_continuous_model_(kRightVelocity, kRightVelocity) =
72 vel_coefs.A_continuous(1, 1);
73
74 A_continuous_model_(kLeftVelocity, kLeftVoltageError) =
75 1 * vel_coefs.B_continuous(0, 0);
76 A_continuous_model_(kLeftVelocity, kRightVoltageError) =
77 1 * vel_coefs.B_continuous(0, 1);
78 A_continuous_model_(kRightVelocity, kLeftVoltageError) =
79 1 * vel_coefs.B_continuous(1, 0);
80 A_continuous_model_(kRightVelocity, kRightVoltageError) =
81 1 * vel_coefs.B_continuous(1, 1);
82
83 B_continuous_model_.block<1, 2>(kLeftVelocity, kLeftVoltage) =
84 vel_coefs.B_continuous.row(0);
85 B_continuous_model_.block<1, 2>(kRightVelocity, kLeftVoltage) =
86 vel_coefs.B_continuous.row(1);
87
88 B_continuous_accel_(kVelocityX, kAccelX) = 1.0;
89 B_continuous_accel_(kVelocityY, kAccelY) = 1.0;
90 B_continuous_accel_(kTheta, kThetaRate) = 1.0;
91
92 Q_continuous_model_.setZero();
James Kuszmaul8c4f6592022-02-26 15:49:30 -080093 Q_continuous_model_.diagonal() << 1e-2, 1e-2, 1e-8, 1e-2, 1e-0, 1e-0, 1e-2,
James Kuszmaul29c59522022-02-12 16:44:26 -080094 1e-0, 1e-0;
95
James Kuszmaul8c4f6592022-02-26 15:49:30 -080096 Q_continuous_accel_.setZero();
97 Q_continuous_accel_.diagonal() << 1e-2, 1e-2, 1e-20, 1e-4, 1e-4;
98
James Kuszmaul29c59522022-02-12 16:44:26 -080099 P_model_ = Q_continuous_model_ * aos::time::DurationInSeconds(kNominalDt);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800100
101 // We can precalculate the discretizations of the accel model because it is
102 // actually LTI.
103
104 DiscretizeQAFast(Q_continuous_accel_, A_continuous_accel_, kNominalDt,
105 &Q_discrete_accel_, &A_discrete_accel_);
106 P_accel_ = Q_discrete_accel_;
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -0700107
108 led_outputs_.fill(LedOutput::ON);
James Kuszmaul29c59522022-02-12 16:44:26 -0800109}
110
111Eigen::Matrix<double, ModelBasedLocalizer::kNModelStates,
112 ModelBasedLocalizer::kNModelStates>
113ModelBasedLocalizer::AModel(
114 const ModelBasedLocalizer::ModelState &state) const {
115 Eigen::Matrix<double, kNModelStates, kNModelStates> A = A_continuous_model_;
116 const double theta = state(kTheta);
117 const double stheta = std::sin(theta);
118 const double ctheta = std::cos(theta);
119 const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0;
120 A(kX, kTheta) = -stheta * velocity;
121 A(kX, kLeftVelocity) = ctheta / 2.0;
122 A(kX, kRightVelocity) = ctheta / 2.0;
123 A(kY, kTheta) = ctheta * velocity;
124 A(kY, kLeftVelocity) = stheta / 2.0;
125 A(kY, kRightVelocity) = stheta / 2.0;
126 return A;
127}
128
129Eigen::Matrix<double, ModelBasedLocalizer::kNAccelStates,
130 ModelBasedLocalizer::kNAccelStates>
131ModelBasedLocalizer::AAccel() const {
132 return A_continuous_accel_;
133}
134
135ModelBasedLocalizer::ModelState ModelBasedLocalizer::DiffModel(
136 const ModelBasedLocalizer::ModelState &state,
137 const ModelBasedLocalizer::ModelInput &U) const {
138 ModelState x_dot = AModel(state) * state + B_continuous_model_ * U;
139 const double theta = state(kTheta);
140 const double stheta = std::sin(theta);
141 const double ctheta = std::cos(theta);
142 const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0;
143 x_dot(kX) = ctheta * velocity;
144 x_dot(kY) = stheta * velocity;
145 return x_dot;
146}
147
148ModelBasedLocalizer::AccelState ModelBasedLocalizer::DiffAccel(
149 const ModelBasedLocalizer::AccelState &state,
150 const ModelBasedLocalizer::AccelInput &U) const {
151 return AAccel() * state + B_continuous_accel_ * U;
152}
153
154ModelBasedLocalizer::ModelState ModelBasedLocalizer::UpdateModel(
155 const ModelBasedLocalizer::ModelState &model,
156 const ModelBasedLocalizer::ModelInput &input,
157 const aos::monotonic_clock::duration dt) const {
158 return control_loops::RungeKutta(
159 std::bind(&ModelBasedLocalizer::DiffModel, this, std::placeholders::_1,
160 input),
161 model, aos::time::DurationInSeconds(dt));
162}
163
164ModelBasedLocalizer::AccelState ModelBasedLocalizer::UpdateAccel(
165 const ModelBasedLocalizer::AccelState &accel,
166 const ModelBasedLocalizer::AccelInput &input,
167 const aos::monotonic_clock::duration dt) const {
168 return control_loops::RungeKutta(
169 std::bind(&ModelBasedLocalizer::DiffAccel, this, std::placeholders::_1,
170 input),
171 accel, aos::time::DurationInSeconds(dt));
172}
173
174ModelBasedLocalizer::AccelState ModelBasedLocalizer::AccelStateForModelState(
175 const ModelBasedLocalizer::ModelState &state) const {
176 const double robot_speed =
177 (state(kLeftVelocity) + state(kRightVelocity)) / 2.0;
James Kuszmaulf6b69112022-03-12 21:34:39 -0800178 const double lat_speed = (AModel(state) * state)(kTheta)*long_offset_;
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800179 const double velocity_x = std::cos(state(kTheta)) * robot_speed -
180 std::sin(state(kTheta)) * lat_speed;
181 const double velocity_y = std::sin(state(kTheta)) * robot_speed +
182 std::cos(state(kTheta)) * lat_speed;
James Kuszmaul29c59522022-02-12 16:44:26 -0800183 return (AccelState() << state(0), state(1), state(2), velocity_x, velocity_y)
184 .finished();
185}
186
187ModelBasedLocalizer::ModelState ModelBasedLocalizer::ModelStateForAccelState(
188 const ModelBasedLocalizer::AccelState &state,
189 const Eigen::Vector2d &encoders, const double yaw_rate) const {
190 const double robot_speed = state(kVelocityX) * std::cos(state(kTheta)) +
191 state(kVelocityY) * std::sin(state(kTheta));
192 const double radius = dt_config_.robot_radius;
193 const double left_velocity = robot_speed - yaw_rate * radius;
194 const double right_velocity = robot_speed + yaw_rate * radius;
195 return (ModelState() << state(0), state(1), state(2), encoders(0),
196 left_velocity, 0.0, encoders(1), right_velocity, 0.0)
197 .finished();
198}
199
200double ModelBasedLocalizer::ModelDivergence(
201 const ModelBasedLocalizer::CombinedState &state,
202 const ModelBasedLocalizer::AccelInput &accel_inputs,
203 const Eigen::Vector2d &filtered_accel,
204 const ModelBasedLocalizer::ModelInput &model_inputs) {
205 // Convert the model state into the acceleration-based state-space and check
206 // the distance between the two (should really be a weighted norm, but all the
207 // numbers are on ~the same scale).
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800208 // TODO(james): Maybe weight lateral velocity divergence different than
209 // longitudinal? Seems like we tend to get false-positives currently when in
210 // sharp turns.
211 // TODO(james): For off-center gyros, maybe reduce noise when turning?
James Kuszmaul29c59522022-02-12 16:44:26 -0800212 VLOG(2) << "divergence: "
213 << (state.accel_state - AccelStateForModelState(state.model_state))
214 .transpose();
215 const AccelState diff_accel = DiffAccel(state.accel_state, accel_inputs);
216 const ModelState diff_model = DiffModel(state.model_state, model_inputs);
217 const double model_lng_velocity =
218 (state.model_state(kLeftVelocity) + state.model_state(kRightVelocity)) /
219 2.0;
220 const double model_lng_accel =
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800221 (diff_model(kLeftVelocity) + diff_model(kRightVelocity)) / 2.0 -
222 diff_model(kTheta) * diff_model(kTheta) * long_offset_;
223 const double model_lat_accel = diff_model(kTheta) * model_lng_velocity;
224 const Eigen::Vector2d robot_frame_accel(model_lng_accel, model_lat_accel);
James Kuszmaul29c59522022-02-12 16:44:26 -0800225 const Eigen::Vector2d model_accel =
226 Eigen::AngleAxisd(state.model_state(kTheta), Eigen::Vector3d::UnitZ())
227 .toRotationMatrix()
228 .block<2, 2>(0, 0) *
229 robot_frame_accel;
230 const double accel_diff = (model_accel - filtered_accel).norm();
231 const double theta_rate_diff =
232 std::abs(diff_accel(kTheta) - diff_model(kTheta));
233
234 const Eigen::Vector2d accel_vel = state.accel_state.bottomRows<2>();
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800235 Eigen::Vector2d model_vel =
James Kuszmaul29c59522022-02-12 16:44:26 -0800236 AccelStateForModelState(state.model_state).bottomRows<2>();
237 velocity_residual_ = (accel_vel - model_vel).norm() /
238 (1.0 + accel_vel.norm() + model_vel.norm());
239 theta_rate_residual_ = theta_rate_diff;
240 accel_residual_ = accel_diff / 4.0;
241 return velocity_residual_ + theta_rate_residual_ + accel_residual_;
242}
243
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800244void ModelBasedLocalizer::UpdateState(
245 CombinedState *state,
246 const Eigen::Matrix<double, kNModelStates, kNModelOutputs> &K,
247 const Eigen::Matrix<double, kNModelOutputs, 1> &Z,
248 const Eigen::Matrix<double, kNModelOutputs, kNModelStates> &H,
249 const AccelInput &accel_input, const ModelInput &model_input,
250 aos::monotonic_clock::duration dt) {
251 state->accel_state = UpdateAccel(state->accel_state, accel_input, dt);
252 if (down_estimator_.consecutive_still() > 500.0) {
253 state->accel_state(kVelocityX) *= 0.9;
254 state->accel_state(kVelocityY) *= 0.9;
255 }
256 state->model_state = UpdateModel(state->model_state, model_input, dt);
257 state->model_state += K * (Z - H * state->model_state);
258}
259
James Kuszmaul5a5a7832022-03-16 23:15:08 -0700260void ModelBasedLocalizer::HandleImu(
261 aos::monotonic_clock::time_point t, const Eigen::Vector3d &gyro,
262 const Eigen::Vector3d &accel, const std::optional<Eigen::Vector2d> encoders,
263 const Eigen::Vector2d voltage) {
James Kuszmaul29c59522022-02-12 16:44:26 -0800264 VLOG(2) << t;
265 if (t_ == aos::monotonic_clock::min_time) {
266 t_ = t;
267 }
James Kuszmaul5f27d8b2022-03-17 09:08:26 -0700268 if (t_ + 10 * kNominalDt < t) {
James Kuszmaul29c59522022-02-12 16:44:26 -0800269 t_ = t;
270 ++clock_resets_;
271 }
272 const aos::monotonic_clock::duration dt = t - t_;
273 t_ = t;
274 down_estimator_.Predict(gyro, accel, dt);
275 // TODO(james): Should we prefer this or use the down-estimator corrected
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800276 // version? Using the down estimator is more principled, but does create more
277 // opportunities for subtle biases.
James Kuszmaul29c59522022-02-12 16:44:26 -0800278 const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
279 const double diameter = 2.0 * dt_config_.robot_radius;
280
281 const Eigen::AngleAxis<double> orientation(
282 Eigen::AngleAxis<double>(xytheta()(kTheta), Eigen::Vector3d::UnitZ()) *
283 down_estimator_.X_hat());
James Kuszmaul10d3fd42022-02-25 21:57:36 -0800284 last_orientation_ = orientation;
James Kuszmaul29c59522022-02-12 16:44:26 -0800285
286 const Eigen::Vector3d absolute_accel =
287 orientation * dt_config_.imu_transform * kG * accel;
288 abs_accel_ = absolute_accel;
James Kuszmaul29c59522022-02-12 16:44:26 -0800289
290 VLOG(2) << "abs accel " << absolute_accel.transpose();
James Kuszmaul29c59522022-02-12 16:44:26 -0800291 VLOG(2) << "dt " << aos::time::DurationInSeconds(dt);
292
293 // Update all the branched states.
294 const AccelInput accel_input(absolute_accel.x(), absolute_accel.y(),
295 yaw_rate);
296 const ModelInput model_input(voltage);
297
298 const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous =
299 AModel(current_state_.model_state);
300
301 Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete;
302 Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete;
303
304 DiscretizeQAFast(Q_continuous_model_, A_continuous, dt, &Q_discrete,
305 &A_discrete);
306
307 P_model_ = A_discrete * P_model_ * A_discrete.transpose() + Q_discrete;
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800308 P_accel_ = A_discrete_accel_ * P_accel_ * A_discrete_accel_.transpose() +
309 Q_discrete_accel_;
James Kuszmaul29c59522022-02-12 16:44:26 -0800310
311 Eigen::Matrix<double, kNModelOutputs, kNModelStates> H;
312 Eigen::Matrix<double, kNModelOutputs, kNModelOutputs> R;
313 {
314 H.setZero();
315 R.setZero();
316 H(0, kLeftEncoder) = 1.0;
317 H(1, kRightEncoder) = 1.0;
318 H(2, kRightVelocity) = 1.0 / diameter;
319 H(2, kLeftVelocity) = -1.0 / diameter;
320
321 R.diagonal() << 1e-9, 1e-9, 1e-13;
322 }
323
James Kuszmaul5a5a7832022-03-16 23:15:08 -0700324 const Eigen::Matrix<double, kNModelOutputs, 1> Z =
325 encoders.has_value()
326 ? Eigen::Vector3d(encoders.value()(0), encoders.value()(1), yaw_rate)
327 : Eigen::Vector3d(current_state_.model_state(kLeftEncoder),
328 current_state_.model_state(kRightEncoder),
329 yaw_rate);
James Kuszmaul29c59522022-02-12 16:44:26 -0800330
331 if (branches_.empty()) {
332 VLOG(2) << "Initializing";
James Kuszmaul5a5a7832022-03-16 23:15:08 -0700333 current_state_.model_state(kLeftEncoder) = Z(0);
334 current_state_.model_state(kRightEncoder) = Z(1);
James Kuszmaul29c59522022-02-12 16:44:26 -0800335 current_state_.branch_time = t;
336 branches_.Push(current_state_);
337 }
338
339 const Eigen::Matrix<double, kNModelStates, kNModelOutputs> K =
340 P_model_ * H.transpose() * (H * P_model_ * H.transpose() + R).inverse();
341 P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() -
342 K * H) *
343 P_model_;
344 VLOG(2) << "K\n" << K;
345 VLOG(2) << "Z\n" << Z.transpose();
346
347 for (CombinedState &state : branches_) {
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800348 UpdateState(&state, K, Z, H, accel_input, model_input, dt);
James Kuszmaul29c59522022-02-12 16:44:26 -0800349 }
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800350 UpdateState(&current_state_, K, Z, H, accel_input, model_input, dt);
James Kuszmaul29c59522022-02-12 16:44:26 -0800351
352 VLOG(2) << "oildest accel " << branches_[0].accel_state.transpose();
353 VLOG(2) << "oildest accel diff "
354 << DiffAccel(branches_[0].accel_state, accel_input).transpose();
355 VLOG(2) << "oildest model " << branches_[0].model_state.transpose();
356
357 // Determine whether to switch modes--if we are currently in model-based mode,
358 // swap to accel-based if the two states have divergeed meaningfully in the
359 // oldest branch. If we are currently in accel-based, then swap back to model
360 // if the oldest model branch matches has matched the
361 filtered_residual_accel_ +=
362 0.01 * (accel_input.topRows<2>() - filtered_residual_accel_);
363 const double model_divergence =
364 branches_.full() ? ModelDivergence(branches_[0], accel_input,
365 filtered_residual_accel_, model_input)
366 : 0.0;
367 filtered_residual_ +=
368 (1.0 - std::exp(-aos::time::DurationInSeconds(kNominalDt) / 0.0095)) *
369 (model_divergence - filtered_residual_);
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800370 // TODO(james): Tune this more. Currently set to generally trust the model,
371 // perhaps a bit too much.
372 // When the residual exceeds the accel threshold, we start using the inertials
373 // alone; when it drops back below the model threshold, we go back to being
374 // model-based.
375 constexpr double kUseAccelThreshold = 2.0;
376 constexpr double kUseModelThreshold = 0.5;
James Kuszmaul29c59522022-02-12 16:44:26 -0800377 constexpr size_t kShareStates = kNModelStates;
378 static_assert(kUseModelThreshold < kUseAccelThreshold);
379 if (using_model_) {
James Kuszmaulce491e42022-03-12 21:02:10 -0800380 if (!FLAGS_ignore_accelerometer &&
381 filtered_residual_ > kUseAccelThreshold) {
James Kuszmaul29c59522022-02-12 16:44:26 -0800382 hysteresis_count_++;
383 } else {
384 hysteresis_count_ = 0;
385 }
386 if (hysteresis_count_ > 0) {
387 using_model_ = false;
388 // Grab the accel-based state from back when we started diverging.
389 // TODO(james): This creates a problematic selection bias, because
390 // we will tend to bias towards deliberately out-of-tune measurements.
391 current_state_.accel_state = branches_[0].accel_state;
392 current_state_.model_state = branches_[0].model_state;
393 current_state_.model_state = ModelStateForAccelState(
James Kuszmaul5a5a7832022-03-16 23:15:08 -0700394 current_state_.accel_state, Z.topRows<2>(), yaw_rate);
James Kuszmaul29c59522022-02-12 16:44:26 -0800395 } else {
396 VLOG(2) << "Normal branching";
James Kuszmaul29c59522022-02-12 16:44:26 -0800397 current_state_.accel_state =
398 AccelStateForModelState(current_state_.model_state);
399 current_state_.branch_time = t;
400 }
401 hysteresis_count_ = 0;
402 } else {
403 if (filtered_residual_ < kUseModelThreshold) {
404 hysteresis_count_++;
405 } else {
406 hysteresis_count_ = 0;
407 }
408 if (hysteresis_count_ > 100) {
409 using_model_ = true;
410 // Grab the model-based state from back when we stopped diverging.
411 current_state_.model_state.topRows<kShareStates>() =
James Kuszmaul5a5a7832022-03-16 23:15:08 -0700412 ModelStateForAccelState(branches_[0].accel_state, Z.topRows<2>(),
413 yaw_rate)
James Kuszmaul29c59522022-02-12 16:44:26 -0800414 .topRows<kShareStates>();
415 current_state_.accel_state =
416 AccelStateForModelState(current_state_.model_state);
417 } else {
James Kuszmaul29c59522022-02-12 16:44:26 -0800418 // TODO(james): Why was I leaving the encoders/wheel velocities in place?
James Kuszmaul29c59522022-02-12 16:44:26 -0800419 current_state_.model_state = ModelStateForAccelState(
James Kuszmaul5a5a7832022-03-16 23:15:08 -0700420 current_state_.accel_state, Z.topRows<2>(), yaw_rate);
James Kuszmaul29c59522022-02-12 16:44:26 -0800421 current_state_.branch_time = t;
422 }
423 }
424
425 // Generate a new branch, with the accel state reset based on the model-based
426 // state (really, just getting rid of the lateral velocity).
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800427 // By resetting the accel state in the new branch, this tries to minimize the
428 // odds of runaway lateral velocities. This doesn't help with runaway
429 // longitudinal velocities, however.
James Kuszmaul29c59522022-02-12 16:44:26 -0800430 CombinedState new_branch = current_state_;
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800431 new_branch.accel_state = AccelStateForModelState(new_branch.model_state);
James Kuszmaul29c59522022-02-12 16:44:26 -0800432 new_branch.accumulated_divergence = 0.0;
433
James Kuszmaul93825a02022-02-13 16:50:33 -0800434 ++branch_counter_;
435 if (branch_counter_ % kBranchPeriod == 0) {
436 branches_.Push(new_branch);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800437 old_positions_.Push(OldPosition{t, xytheta(), latest_turret_position_,
438 latest_turret_velocity_});
James Kuszmaul93825a02022-02-13 16:50:33 -0800439 branch_counter_ = 0;
440 }
James Kuszmaul29c59522022-02-12 16:44:26 -0800441
442 last_residual_ = model_divergence;
443
444 VLOG(2) << "Using " << (using_model_ ? "model" : "accel");
445 VLOG(2) << "Residual " << last_residual_;
446 VLOG(2) << "Filtered Residual " << filtered_residual_;
447 VLOG(2) << "buffer size " << branches_.size();
448 VLOG(2) << "Model state " << current_state_.model_state.transpose();
449 VLOG(2) << "Accel state " << current_state_.accel_state.transpose();
450 VLOG(2) << "Accel state for model "
James Kuszmaulf6b69112022-03-12 21:34:39 -0800451 << AccelStateForModelState(current_state_.model_state).transpose();
James Kuszmaul29c59522022-02-12 16:44:26 -0800452 VLOG(2) << "Input acce " << accel.transpose();
453 VLOG(2) << "Input gyro " << gyro.transpose();
454 VLOG(2) << "Input voltage " << voltage.transpose();
James Kuszmaul5a5a7832022-03-16 23:15:08 -0700455 VLOG(2) << "Input encoder " << Z.topRows<2>().transpose();
James Kuszmaul29c59522022-02-12 16:44:26 -0800456 VLOG(2) << "yaw rate " << yaw_rate;
457
458 CHECK(std::isfinite(last_residual_));
459}
460
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800461const ModelBasedLocalizer::OldPosition ModelBasedLocalizer::GetStateForTime(
462 aos::monotonic_clock::time_point time) {
463 if (old_positions_.empty()) {
464 return OldPosition{};
465 }
466
467 aos::monotonic_clock::duration lowest_time_error =
468 aos::monotonic_clock::duration::max();
469 const OldPosition *best_match = nullptr;
470 for (const OldPosition &sample : old_positions_) {
471 const aos::monotonic_clock::duration time_error =
472 std::chrono::abs(sample.sample_time - time);
473 if (time_error < lowest_time_error) {
474 lowest_time_error = time_error;
475 best_match = &sample;
476 }
477 }
478 return *best_match;
479}
480
481namespace {
482// Converts a flatbuffer TransformationMatrix to an Eigen matrix. Technically,
483// this should be able to do a single memcpy, but the extra verbosity here seems
484// appropriate.
485Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix(
486 const frc971::vision::calibration::TransformationMatrix &flatbuffer) {
487 CHECK_EQ(16u, CHECK_NOTNULL(flatbuffer.data())->size());
488 Eigen::Matrix<double, 4, 4> result;
489 result.setIdentity();
490 for (int row = 0; row < 4; ++row) {
491 for (int col = 0; col < 4; ++col) {
492 result(row, col) = (*flatbuffer.data())[row * 4 + col];
493 }
494 }
495 return result;
496}
497
498// Node names of the pis to listen for cameras from.
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -0700499constexpr std::array<std::string_view, ModelBasedLocalizer::kNumPis> kPisToUse{
500 "pi1", "pi2", "pi3", "pi4"};
James Kuszmaulf6b69112022-03-12 21:34:39 -0800501} // namespace
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800502
503const Eigen::Matrix<double, 4, 4> ModelBasedLocalizer::CameraTransform(
504 const OldPosition &state,
505 const frc971::vision::calibration::CameraCalibration *calibration,
506 std::optional<RejectionReason> *rejection_reason) const {
507 CHECK_NOTNULL(rejection_reason);
508 CHECK_NOTNULL(calibration);
509 // Per the CameraCalibration specification, we can actually determine whether
510 // the camera is the turret camera just from the presence of the
511 // turret_extrinsics member.
512 const bool is_turret = calibration->has_turret_extrinsics();
513 // Ignore readings when the turret is spinning too fast, on the assumption
514 // that the odds of screwing up the time compensation are higher.
515 // Note that the current number here is chosen pretty arbitrarily--1 rad / sec
516 // seems reasonable, but may be unnecessarily low or high.
517 constexpr double kMaxTurretVelocity = 1.0;
518 if (is_turret && std::abs(state.turret_velocity) > kMaxTurretVelocity &&
519 !rejection_reason->has_value()) {
520 *rejection_reason = RejectionReason::TURRET_TOO_FAST;
521 }
522 CHECK(calibration->has_fixed_extrinsics());
523 const Eigen::Matrix<double, 4, 4> fixed_extrinsics =
524 FlatbufferToTransformationMatrix(*calibration->fixed_extrinsics());
525
526 // Calculate the pose of the camera relative to the robot origin.
527 Eigen::Matrix<double, 4, 4> H_robot_camera = fixed_extrinsics;
528 if (is_turret) {
529 H_robot_camera =
530 H_robot_camera *
531 frc971::control_loops::TransformationMatrixForYaw<double>(
532 state.turret_position) *
533 FlatbufferToTransformationMatrix(*calibration->turret_extrinsics());
534 }
535 return H_robot_camera;
536}
537
538const std::optional<Eigen::Vector2d>
539ModelBasedLocalizer::CameraMeasuredRobotPosition(
540 const OldPosition &state, const y2022::vision::TargetEstimate *target,
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800541 std::optional<RejectionReason> *rejection_reason,
542 Eigen::Matrix<double, 4, 4> *H_field_camera_measured) const {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800543 if (!target->has_camera_calibration()) {
544 *rejection_reason = RejectionReason::NO_CALIBRATION;
545 return std::nullopt;
546 }
547 const Eigen::Matrix<double, 4, 4> H_robot_camera =
548 CameraTransform(state, target->camera_calibration(), rejection_reason);
549 const control_loops::Pose robot_pose(
550 {state.xytheta(0), state.xytheta(1), 0.0}, state.xytheta(2));
551 const Eigen::Matrix<double, 4, 4> H_field_robot =
552 robot_pose.AsTransformationMatrix();
553 // Current estimated pose of the camera in the global frame.
554 // Note that this is all really just an elaborate way of extracting the
555 // current estimated camera yaw, and nothing else.
556 const Eigen::Matrix<double, 4, 4> H_field_camera =
557 H_field_robot * H_robot_camera;
558 // Grab the implied yaw of the camera (the +Z axis is coming out of the front
559 // of the cameras).
560 const Eigen::Vector3d rotated_camera_z =
561 H_field_camera.block<3, 3>(0, 0) * Eigen::Vector3d(0, 0, 1);
562 const double camera_yaw =
563 std::atan2(rotated_camera_z.y(), rotated_camera_z.x());
564 // All right, now we need to use the heading and distance from the
565 // TargetEstimate, plus the yaw embedded in the camera_pose, to determine what
566 // the implied X/Y position of the robot is. To do this, we calculate the
567 // heading/distance from the target to the robot. The distance is easy, since
568 // that's the same as the distance from the robot to the target. The heading
569 // isn't too hard, but is obnoxious to think about, since the heading from the
570 // target to the robot is distinct from the heading from the robot to the
571 // target.
572
573 // Just to walk through examples to confirm that the below calculation is
574 // correct:
575 // * If yaw = 0, and angle_to_target = 0, we are at 180 deg relative to the
576 // target.
577 // * If yaw = 90 deg, and angle_to_target = 0, we are at -90 deg relative to
578 // the target.
579 // * If yaw = 0, and angle_to_target = 90 deg, we are at -90 deg relative to
580 // the target.
581 const double heading_from_target =
582 aos::math::NormalizeAngle(M_PI + camera_yaw + target->angle_to_target());
583 const double distance_from_target = target->distance();
584 // Extract the implied camera position on the field.
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800585 *H_field_camera_measured = H_field_camera;
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800586 // TODO(james): Are we going to need to evict the roll/pitch components of the
587 // camera extrinsics this year as well?
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800588 (*H_field_camera_measured)(0, 3) =
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800589 distance_from_target * std::cos(heading_from_target) + kVisionTargetX;
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800590 (*H_field_camera_measured)(1, 3) =
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800591 distance_from_target * std::sin(heading_from_target) + kVisionTargetY;
592 const Eigen::Matrix<double, 4, 4> H_field_robot_measured =
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800593 *H_field_camera_measured * H_robot_camera.inverse();
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800594 return H_field_robot_measured.block<2, 1>(0, 3);
595}
596
597void ModelBasedLocalizer::HandleImageMatch(
598 aos::monotonic_clock::time_point sample_time,
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800599 const y2022::vision::TargetEstimate *target, int camera_index) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800600 std::optional<RejectionReason> rejection_reason;
601
James Kuszmaulaa39d962022-03-06 14:54:28 -0800602 if (target->confidence() < kMinTargetEstimateConfidence) {
603 rejection_reason = RejectionReason::LOW_CONFIDENCE;
604 TallyRejection(rejection_reason.value());
605 return;
606 }
607
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800608 const OldPosition &state = GetStateForTime(sample_time);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800609 Eigen::Matrix<double, 4, 4> H_field_camera_measured;
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800610 const std::optional<Eigen::Vector2d> measured_robot_position =
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800611 CameraMeasuredRobotPosition(state, target, &rejection_reason,
612 &H_field_camera_measured);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800613 // Technically, rejection_reason should always be set if
614 // measured_robot_position is nullopt, but in the future we may have more
615 // recoverable rejection reasons that we wish to allow to propagate further
616 // into the process.
617 if (!measured_robot_position || rejection_reason.has_value()) {
618 CHECK(rejection_reason.has_value());
619 TallyRejection(rejection_reason.value());
620 return;
621 }
622
623 // Next, go through and do the actual Kalman corrections for the x/y
624 // measurement, for both the accel state and the model-based state.
625 const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model =
626 AModel(current_state_.model_state);
627
628 Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete_model;
629 Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete_model;
630
631 DiscretizeQAFast(Q_continuous_model_, A_continuous_model, kNominalDt,
632 &Q_discrete_model, &A_discrete_model);
633
634 Eigen::Matrix<double, 2, kNModelStates> H_model;
635 H_model.setZero();
636 Eigen::Matrix<double, 2, kNAccelStates> H_accel;
637 H_accel.setZero();
638 Eigen::Matrix<double, 2, 2> R;
639 R.setZero();
640 H_model(0, kX) = 1.0;
641 H_model(1, kY) = 1.0;
642 H_accel(0, kX) = 1.0;
643 H_accel(1, kY) = 1.0;
James Kuszmaul20014542022-04-06 21:35:44 -0700644 if (aggressive_corrections_) {
645 R.diagonal() << 1e-2, 1e-2;
646 } else {
647 R.diagonal() << 1e-0, 1e-0;
648 }
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800649
650 const Eigen::Matrix<double, kNModelStates, 2> K_model =
651 P_model_ * H_model.transpose() *
652 (H_model * P_model_ * H_model.transpose() + R).inverse();
653 const Eigen::Matrix<double, kNAccelStates, 2> K_accel =
654 P_accel_ * H_accel.transpose() *
655 (H_accel * P_accel_ * H_accel.transpose() + R).inverse();
656 P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() -
657 K_model * H_model) *
658 P_model_;
659 P_accel_ = (Eigen::Matrix<double, kNAccelStates, kNAccelStates>::Identity() -
660 K_accel * H_accel) *
661 P_accel_;
662 // And now we have to correct *everything* on all the branches:
663 for (CombinedState &state : branches_) {
664 state.model_state += K_model * (measured_robot_position.value() -
James Kuszmaulf6b69112022-03-12 21:34:39 -0800665 H_model * state.model_state);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800666 state.accel_state += K_accel * (measured_robot_position.value() -
James Kuszmaulf6b69112022-03-12 21:34:39 -0800667 H_accel * state.accel_state);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800668 }
669 current_state_.model_state +=
670 K_model *
671 (measured_robot_position.value() - H_model * current_state_.model_state);
672 current_state_.accel_state +=
673 K_accel *
674 (measured_robot_position.value() - H_accel * current_state_.accel_state);
675
676 statistics_.total_accepted++;
677 statistics_.total_candidates++;
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800678
679 const Eigen::Vector3d camera_z_in_field =
680 H_field_camera_measured.block<3, 3>(0, 0) * Eigen::Vector3d::UnitZ();
681 const double camera_yaw =
682 std::atan2(camera_z_in_field.y(), camera_z_in_field.x());
683
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -0700684 // TODO(milind): actually control this
685 led_outputs_[camera_index] = LedOutput::ON;
686
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800687 TargetEstimateDebugT debug;
688 debug.camera = static_cast<uint8_t>(camera_index);
689 debug.camera_x = H_field_camera_measured(0, 3);
690 debug.camera_y = H_field_camera_measured(1, 3);
691 debug.camera_theta = camera_yaw;
692 debug.implied_robot_x = measured_robot_position.value().x();
693 debug.implied_robot_y = measured_robot_position.value().y();
694 debug.implied_robot_theta = xytheta()(2);
695 debug.implied_turret_goal =
696 aos::math::NormalizeAngle(camera_yaw + target->angle_to_target());
697 debug.accepted = true;
698 debug.image_age_sec = aos::time::DurationInSeconds(t_ - sample_time);
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800699 CHECK_LT(image_debugs_.size(), kDebugBufferSize);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800700 image_debugs_.push_back(debug);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800701}
702
703void ModelBasedLocalizer::HandleTurret(
704 aos::monotonic_clock::time_point sample_time, double turret_position,
705 double turret_velocity) {
706 last_turret_update_ = sample_time;
707 latest_turret_position_ = turret_position;
708 latest_turret_velocity_ = turret_velocity;
709}
710
James Kuszmaul29c59522022-02-12 16:44:26 -0800711void ModelBasedLocalizer::HandleReset(aos::monotonic_clock::time_point now,
712 const Eigen::Vector3d &xytheta) {
713 branches_.Reset();
James Kuszmaulf6b69112022-03-12 21:34:39 -0800714 t_ = now;
James Kuszmaul29c59522022-02-12 16:44:26 -0800715 using_model_ = true;
716 current_state_.model_state << xytheta(0), xytheta(1), xytheta(2),
717 current_state_.model_state(kLeftEncoder), 0.0, 0.0,
718 current_state_.model_state(kRightEncoder), 0.0, 0.0;
719 current_state_.accel_state =
720 AccelStateForModelState(current_state_.model_state);
721 last_residual_ = 0.0;
722 filtered_residual_ = 0.0;
723 filtered_residual_accel_.setZero();
724 abs_accel_.setZero();
725}
726
727flatbuffers::Offset<AccelBasedState> ModelBasedLocalizer::BuildAccelState(
728 flatbuffers::FlatBufferBuilder *fbb, const AccelState &state) {
729 AccelBasedState::Builder accel_state_builder(*fbb);
730 accel_state_builder.add_x(state(kX));
731 accel_state_builder.add_y(state(kY));
732 accel_state_builder.add_theta(state(kTheta));
733 accel_state_builder.add_velocity_x(state(kVelocityX));
734 accel_state_builder.add_velocity_y(state(kVelocityY));
735 return accel_state_builder.Finish();
736}
737
738flatbuffers::Offset<ModelBasedState> ModelBasedLocalizer::BuildModelState(
739 flatbuffers::FlatBufferBuilder *fbb, const ModelState &state) {
740 ModelBasedState::Builder model_state_builder(*fbb);
741 model_state_builder.add_x(state(kX));
742 model_state_builder.add_y(state(kY));
743 model_state_builder.add_theta(state(kTheta));
744 model_state_builder.add_left_encoder(state(kLeftEncoder));
745 model_state_builder.add_left_velocity(state(kLeftVelocity));
746 model_state_builder.add_left_voltage_error(state(kLeftVoltageError));
747 model_state_builder.add_right_encoder(state(kRightEncoder));
748 model_state_builder.add_right_velocity(state(kRightVelocity));
749 model_state_builder.add_right_voltage_error(state(kRightVoltageError));
750 return model_state_builder.Finish();
751}
752
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800753flatbuffers::Offset<CumulativeStatistics>
754ModelBasedLocalizer::PopulateStatistics(flatbuffers::FlatBufferBuilder *fbb) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800755 const auto rejections_offset = fbb->CreateVector(
756 statistics_.rejection_counts.data(), statistics_.rejection_counts.size());
757
758 CumulativeStatistics::Builder stats_builder(*fbb);
759 stats_builder.add_total_accepted(statistics_.total_accepted);
760 stats_builder.add_total_candidates(statistics_.total_candidates);
761 stats_builder.add_rejection_reason_count(rejections_offset);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800762 return stats_builder.Finish();
763}
764
765flatbuffers::Offset<ModelBasedStatus> ModelBasedLocalizer::PopulateStatus(
766 flatbuffers::FlatBufferBuilder *fbb) {
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800767 const flatbuffers::Offset<CumulativeStatistics> stats_offset =
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800768 PopulateStatistics(fbb);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800769
James Kuszmaul29c59522022-02-12 16:44:26 -0800770 const flatbuffers::Offset<control_loops::drivetrain::DownEstimatorState>
771 down_estimator_offset = down_estimator_.PopulateStatus(fbb, t_);
772
773 const CombinedState &state = current_state_;
774
775 const flatbuffers::Offset<ModelBasedState> model_state_offset =
James Kuszmaulf6b69112022-03-12 21:34:39 -0800776 BuildModelState(fbb, state.model_state);
James Kuszmaul29c59522022-02-12 16:44:26 -0800777
778 const flatbuffers::Offset<AccelBasedState> accel_state_offset =
779 BuildAccelState(fbb, state.accel_state);
780
781 const flatbuffers::Offset<AccelBasedState> oldest_accel_state_offset =
782 branches_.empty() ? flatbuffers::Offset<AccelBasedState>()
783 : BuildAccelState(fbb, branches_[0].accel_state);
784
785 const flatbuffers::Offset<ModelBasedState> oldest_model_state_offset =
786 branches_.empty() ? flatbuffers::Offset<ModelBasedState>()
787 : BuildModelState(fbb, branches_[0].model_state);
788
789 ModelBasedStatus::Builder builder(*fbb);
790 builder.add_accel_state(accel_state_offset);
791 builder.add_oldest_accel_state(oldest_accel_state_offset);
792 builder.add_oldest_model_state(oldest_model_state_offset);
793 builder.add_model_state(model_state_offset);
794 builder.add_using_model(using_model_);
795 builder.add_residual(last_residual_);
796 builder.add_filtered_residual(filtered_residual_);
797 builder.add_velocity_residual(velocity_residual_);
798 builder.add_accel_residual(accel_residual_);
799 builder.add_theta_rate_residual(theta_rate_residual_);
800 builder.add_down_estimator(down_estimator_offset);
801 builder.add_x(xytheta()(0));
802 builder.add_y(xytheta()(1));
803 builder.add_theta(xytheta()(2));
804 builder.add_implied_accel_x(abs_accel_(0));
805 builder.add_implied_accel_y(abs_accel_(1));
806 builder.add_implied_accel_z(abs_accel_(2));
807 builder.add_clock_resets(clock_resets_);
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800808 builder.add_statistics(stats_offset);
James Kuszmaul29c59522022-02-12 16:44:26 -0800809 return builder.Finish();
810}
811
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800812flatbuffers::Offset<LocalizerVisualization>
813ModelBasedLocalizer::PopulateVisualization(
814 flatbuffers::FlatBufferBuilder *fbb) {
815 const flatbuffers::Offset<CumulativeStatistics> stats_offset =
816 PopulateStatistics(fbb);
817
818 aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, kDebugBufferSize>
819 debug_offsets;
820
James Kuszmaulf6b69112022-03-12 21:34:39 -0800821 for (const TargetEstimateDebugT &debug : image_debugs_) {
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800822 debug_offsets.push_back(PackTargetEstimateDebug(debug, fbb));
823 }
824
825 image_debugs_.clear();
826
827 const flatbuffers::Offset<
828 flatbuffers::Vector<flatbuffers::Offset<TargetEstimateDebug>>>
829 debug_offset =
830 fbb->CreateVector(debug_offsets.data(), debug_offsets.size());
831
832 LocalizerVisualization::Builder builder(*fbb);
833 builder.add_statistics(stats_offset);
834 builder.add_targets(debug_offset);
835 return builder.Finish();
836}
837
838void ModelBasedLocalizer::TallyRejection(const RejectionReason reason) {
839 statistics_.total_candidates++;
840 statistics_.rejection_counts[static_cast<size_t>(reason)]++;
841 TargetEstimateDebugT debug;
842 debug.accepted = false;
843 debug.rejection_reason = reason;
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800844 CHECK_LT(image_debugs_.size(), kDebugBufferSize);
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800845 image_debugs_.push_back(debug);
846}
847
848flatbuffers::Offset<TargetEstimateDebug>
849ModelBasedLocalizer::PackTargetEstimateDebug(
850 const TargetEstimateDebugT &debug, flatbuffers::FlatBufferBuilder *fbb) {
851 if (!debug.accepted) {
852 TargetEstimateDebug::Builder builder(*fbb);
853 builder.add_accepted(debug.accepted);
854 builder.add_rejection_reason(debug.rejection_reason);
855 return builder.Finish();
856 } else {
857 flatbuffers::Offset<TargetEstimateDebug> offset =
858 TargetEstimateDebug::Pack(*fbb, &debug);
859 flatbuffers::GetMutableTemporaryPointer(*fbb, offset)
860 ->clear_rejection_reason();
861 return offset;
862 }
863}
864
James Kuszmaul29c59522022-02-12 16:44:26 -0800865EventLoopLocalizer::EventLoopLocalizer(
866 aos::EventLoop *event_loop,
867 const control_loops::drivetrain::DrivetrainConfig<double> &dt_config)
868 : event_loop_(event_loop),
869 model_based_(dt_config),
870 status_sender_(event_loop_->MakeSender<LocalizerStatus>("/localizer")),
871 output_sender_(event_loop_->MakeSender<LocalizerOutput>("/localizer")),
James Kuszmaul0dedb5e2022-03-05 16:02:20 -0800872 visualization_sender_(
873 event_loop_->MakeSender<LocalizerVisualization>("/localizer")),
James Kuszmaul29c59522022-02-12 16:44:26 -0800874 output_fetcher_(
875 event_loop_->MakeFetcher<frc971::control_loops::drivetrain::Output>(
James Kuszmaul5ed29dd2022-02-13 18:32:06 -0800876 "/drivetrain")),
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800877 clock_offset_fetcher_(
878 event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
879 "/aos")),
James Kuszmaulf6b69112022-03-12 21:34:39 -0800880 superstructure_fetcher_(
881 event_loop_
882 ->MakeFetcher<y2022::control_loops::superstructure::Status>(
883 "/superstructure")),
James Kuszmaul20014542022-04-06 21:35:44 -0700884 joystick_state_fetcher_(
885 event_loop_->MakeFetcher<aos::JoystickState>("/roborio/aos")),
James Kuszmaul9f2f53c2023-02-19 14:08:18 -0800886 imu_watcher_(event_loop, dt_config,
887 y2022::constants::Values::DrivetrainEncoderToMeters(1),
888 [this](aos::monotonic_clock::time_point sample_time_pico,
889 aos::monotonic_clock::time_point sample_time_pi,
890 std::optional<Eigen::Vector2d> encoders,
891 Eigen::Vector3d gyro, Eigen::Vector3d accel) {
892 HandleImu(sample_time_pico, sample_time_pi, encoders, gyro,
893 accel);
894 }) {
James Kuszmaula391dde2022-03-17 00:03:30 -0700895 event_loop->SetRuntimeRealtimePriority(10);
James Kuszmaul29c59522022-02-12 16:44:26 -0800896 event_loop_->MakeWatcher(
897 "/drivetrain",
898 [this](
899 const frc971::control_loops::drivetrain::LocalizerControl &control) {
900 const double theta = control.keep_current_theta()
901 ? model_based_.xytheta()(2)
902 : control.theta();
903 model_based_.HandleReset(event_loop_->monotonic_now(),
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800904 {control.x(), control.y(), theta});
James Kuszmaul29c59522022-02-12 16:44:26 -0800905 });
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800906 aos::TimerHandler *superstructure_timer = event_loop_->AddTimer([this]() {
907 if (superstructure_fetcher_.Fetch()) {
908 const y2022::control_loops::superstructure::Status &status =
909 *superstructure_fetcher_.get();
910 if (!status.has_turret()) {
911 return;
912 }
913 CHECK(status.has_turret());
914 model_based_.HandleTurret(
915 superstructure_fetcher_.context().monotonic_event_time,
916 status.turret()->position(), status.turret()->velocity());
917 }
918 });
919 event_loop_->OnRun([this, superstructure_timer]() {
920 superstructure_timer->Setup(event_loop_->monotonic_now(),
921 std::chrono::milliseconds(20));
922 });
James Kuszmaul8c4f6592022-02-26 15:49:30 -0800923
James Kuszmaulf6b69112022-03-12 21:34:39 -0800924 for (size_t camera_index = 0; camera_index < kPisToUse.size();
925 ++camera_index) {
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800926 CHECK_LT(camera_index, target_estimate_fetchers_.size());
927 target_estimate_fetchers_[camera_index] =
928 event_loop_->MakeFetcher<y2022::vision::TargetEstimate>(
929 absl::StrCat("/", kPisToUse[camera_index], "/camera"));
930 }
James Kuszmaulf6b69112022-03-12 21:34:39 -0800931 aos::TimerHandler *estimate_timer = event_loop_->AddTimer([this]() {
James Kuszmaul20014542022-04-06 21:35:44 -0700932 joystick_state_fetcher_.Fetch();
933 const bool maybe_in_auto = (joystick_state_fetcher_.get() != nullptr)
934 ? joystick_state_fetcher_->autonomous()
935 : true;
936 model_based_.set_use_aggressive_image_corrections(!maybe_in_auto);
937 for (size_t camera_index = 0; camera_index < kPisToUse.size();
938 ++camera_index) {
939 if (model_based_.NumQueuedImageDebugs() ==
940 ModelBasedLocalizer::kDebugBufferSize ||
941 (last_visualization_send_ + kMinVisualizationPeriod <
942 event_loop_->monotonic_now())) {
943 auto builder = visualization_sender_.MakeBuilder();
944 visualization_sender_.CheckOk(
945 builder.Send(model_based_.PopulateVisualization(builder.fbb())));
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800946 }
James Kuszmaul20014542022-04-06 21:35:44 -0700947 if (target_estimate_fetchers_[camera_index].Fetch()) {
948 const std::optional<aos::monotonic_clock::duration> monotonic_offset =
949 ClockOffset(kPisToUse[camera_index]);
950 if (!monotonic_offset.has_value()) {
951 continue;
952 }
953 // TODO(james): Get timestamp from message contents.
954 aos::monotonic_clock::time_point capture_time(
955 target_estimate_fetchers_[camera_index]
956 .context()
957 .monotonic_remote_time -
958 monotonic_offset.value());
959 if (capture_time > target_estimate_fetchers_[camera_index]
960 .context()
961 .monotonic_event_time) {
962 model_based_.TallyRejection(RejectionReason::IMAGE_FROM_FUTURE);
963 continue;
964 }
James Kuszmaul9f2f53c2023-02-19 14:08:18 -0800965 capture_time -= imu_watcher_.pico_offset_error();
James Kuszmaul20014542022-04-06 21:35:44 -0700966 model_based_.HandleImageMatch(
967 capture_time, target_estimate_fetchers_[camera_index].get(),
968 camera_index);
James Kuszmaulf6b69112022-03-12 21:34:39 -0800969 }
James Kuszmaulf6b69112022-03-12 21:34:39 -0800970 }
971 });
James Kuszmaul2b2f8772022-03-12 15:25:35 -0800972 event_loop_->OnRun([this, estimate_timer]() {
973 estimate_timer->Setup(event_loop_->monotonic_now(),
974 std::chrono::milliseconds(100));
975 });
James Kuszmaul9f2f53c2023-02-19 14:08:18 -0800976}
James Kuszmaul5f27d8b2022-03-17 09:08:26 -0700977
James Kuszmaul9f2f53c2023-02-19 14:08:18 -0800978void EventLoopLocalizer::HandleImu(
979 aos::monotonic_clock::time_point sample_time_pico,
980 aos::monotonic_clock::time_point sample_time_pi,
981 std::optional<Eigen::Vector2d> encoders, Eigen::Vector3d gyro,
982 Eigen::Vector3d accel) {
983 output_fetcher_.Fetch();
984 {
985 const bool disabled = (output_fetcher_.get() == nullptr) ||
986 (output_fetcher_.context().monotonic_event_time +
987 std::chrono::milliseconds(10) <
988 event_loop_->context().monotonic_event_time);
989 // For gyros, use the most recent gyro reading if we aren't zeroed,
990 // to avoid missing integration cycles.
991 model_based_.HandleImu(
992 sample_time_pico, gyro, accel, encoders,
993 disabled ? Eigen::Vector2d::Zero()
994 : Eigen::Vector2d{output_fetcher_->left_voltage(),
995 output_fetcher_->right_voltage()});
996 }
997 {
998 auto builder = status_sender_.MakeBuilder();
999 const flatbuffers::Offset<ModelBasedStatus> model_based_status =
1000 model_based_.PopulateStatus(builder.fbb());
1001 const flatbuffers::Offset<control_loops::drivetrain::ImuZeroerState>
1002 zeroer_status = imu_watcher_.zeroer().PopulateStatus(builder.fbb());
1003 const flatbuffers::Offset<ImuFailures> imu_failures =
1004 imu_watcher_.PopulateImuFailures(builder.fbb());
1005 LocalizerStatus::Builder status_builder =
1006 builder.MakeBuilder<LocalizerStatus>();
1007 status_builder.add_model_based(model_based_status);
1008 status_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
1009 status_builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
1010 status_builder.add_zeroing(zeroer_status);
1011 status_builder.add_imu_failures(imu_failures);
1012 if (encoders.has_value()) {
1013 status_builder.add_left_encoder(encoders.value()(0));
1014 status_builder.add_right_encoder(encoders.value()(1));
1015 }
1016 if (imu_watcher_.pico_offset().has_value()) {
1017 status_builder.add_pico_offset_ns(
1018 imu_watcher_.pico_offset().value().count());
1019 status_builder.add_pico_offset_error_ns(
1020 imu_watcher_.pico_offset_error().count());
1021 }
1022 builder.CheckOk(builder.Send(status_builder.Finish()));
1023 }
1024 if (last_output_send_ + std::chrono::milliseconds(5) <
1025 event_loop_->context().monotonic_event_time) {
1026 auto builder = output_sender_.MakeBuilder();
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -07001027
James Kuszmaul9f2f53c2023-02-19 14:08:18 -08001028 const auto led_outputs_offset = builder.fbb()->CreateVector(
1029 model_based_.led_outputs().data(), model_based_.led_outputs().size());
Milind Upadhyayd67e9cf2022-03-13 13:56:57 -07001030
James Kuszmaul9f2f53c2023-02-19 14:08:18 -08001031 LocalizerOutput::Builder output_builder =
1032 builder.MakeBuilder<LocalizerOutput>();
1033 output_builder.add_monotonic_timestamp_ns(
1034 std::chrono::duration_cast<std::chrono::nanoseconds>(
1035 sample_time_pi.time_since_epoch())
1036 .count());
1037 output_builder.add_x(model_based_.xytheta()(0));
1038 output_builder.add_y(model_based_.xytheta()(1));
1039 output_builder.add_theta(model_based_.xytheta()(2));
1040 output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
1041 output_builder.add_image_accepted_count(model_based_.total_accepted());
1042 const Eigen::Quaterniond &orientation = model_based_.orientation();
1043 Quaternion quaternion;
1044 quaternion.mutate_x(orientation.x());
1045 quaternion.mutate_y(orientation.y());
1046 quaternion.mutate_z(orientation.z());
1047 quaternion.mutate_w(orientation.w());
1048 output_builder.add_orientation(&quaternion);
1049 output_builder.add_led_outputs(led_outputs_offset);
1050 builder.CheckOk(builder.Send(output_builder.Finish()));
1051 last_output_send_ = event_loop_->monotonic_now();
1052 }
James Kuszmaul29c59522022-02-12 16:44:26 -08001053}
1054
James Kuszmaul8c4f6592022-02-26 15:49:30 -08001055std::optional<aos::monotonic_clock::duration> EventLoopLocalizer::ClockOffset(
1056 std::string_view pi) {
1057 std::optional<aos::monotonic_clock::duration> monotonic_offset;
1058 clock_offset_fetcher_.Fetch();
1059 if (clock_offset_fetcher_.get() != nullptr) {
1060 for (const auto connection : *clock_offset_fetcher_->connections()) {
1061 if (connection->has_node() && connection->node()->has_name() &&
1062 connection->node()->name()->string_view() == pi) {
1063 if (connection->has_monotonic_offset()) {
1064 monotonic_offset =
1065 std::chrono::nanoseconds(connection->monotonic_offset());
1066 } else {
1067 // If we don't have a monotonic offset, that means we aren't
1068 // connected.
1069 model_based_.TallyRejection(
1070 RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
1071 return std::nullopt;
1072 }
1073 break;
1074 }
1075 }
1076 }
1077 CHECK(monotonic_offset.has_value());
1078 return monotonic_offset;
1079}
1080
James Kuszmaul29c59522022-02-12 16:44:26 -08001081} // namespace frc971::controls