James Kuszmaul | 51fa1ae | 2022-02-26 00:49:57 -0800 | [diff] [blame] | 1 | #include "y2022/localizer/localizer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 2 | |
| 3 | #include "frc971/control_loops/c2d.h" |
| 4 | #include "frc971/wpilib/imu_batch_generated.h" |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 5 | #include "y2022/constants.h" |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 6 | #include "aos/json_to_flatbuffer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 7 | |
| 8 | namespace frc971::controls { |
| 9 | |
| 10 | namespace { |
| 11 | constexpr double kG = 9.80665; |
| 12 | constexpr std::chrono::microseconds kNominalDt(500); |
| 13 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 14 | // Field position of the target (the 2022 target is conveniently in the middle |
| 15 | // of the field....). |
| 16 | constexpr double kVisionTargetX = 0.0; |
| 17 | constexpr double kVisionTargetY = 0.0; |
| 18 | |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 19 | // Minimum confidence to require to use a target match. |
James Kuszmaul | 1b918d8 | 2022-03-12 18:27:41 -0800 | [diff] [blame^] | 20 | constexpr double kMinTargetEstimateConfidence = 0.75; |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 21 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 22 | template <int N> |
| 23 | Eigen::Matrix<double, N, 1> MakeState(std::vector<double> values) { |
| 24 | CHECK_EQ(static_cast<size_t>(N), values.size()); |
| 25 | Eigen::Matrix<double, N, 1> vector; |
| 26 | for (int ii = 0; ii < N; ++ii) { |
| 27 | vector(ii, 0) = values[ii]; |
| 28 | } |
| 29 | return vector; |
| 30 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 31 | } // namespace |
| 32 | |
| 33 | ModelBasedLocalizer::ModelBasedLocalizer( |
| 34 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 35 | : dt_config_(dt_config), |
| 36 | velocity_drivetrain_coefficients_( |
| 37 | dt_config.make_hybrid_drivetrain_velocity_loop() |
| 38 | .plant() |
| 39 | .coefficients()), |
| 40 | down_estimator_(dt_config) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 41 | statistics_.rejection_counts.fill(0); |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 42 | CHECK_EQ(branches_.capacity(), static_cast<size_t>(std::chrono::seconds(1) / |
| 43 | kNominalDt / kBranchPeriod)); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 44 | if (dt_config_.is_simulated) { |
| 45 | down_estimator_.assume_perfect_gravity(); |
| 46 | } |
| 47 | A_continuous_accel_.setZero(); |
| 48 | A_continuous_model_.setZero(); |
| 49 | B_continuous_accel_.setZero(); |
| 50 | B_continuous_model_.setZero(); |
| 51 | |
| 52 | A_continuous_accel_(kX, kVelocityX) = 1.0; |
| 53 | A_continuous_accel_(kY, kVelocityY) = 1.0; |
| 54 | |
| 55 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 56 | |
| 57 | A_continuous_model_(kTheta, kLeftVelocity) = -1.0 / diameter; |
| 58 | A_continuous_model_(kTheta, kRightVelocity) = 1.0 / diameter; |
| 59 | A_continuous_model_(kLeftEncoder, kLeftVelocity) = 1.0; |
| 60 | A_continuous_model_(kRightEncoder, kRightVelocity) = 1.0; |
| 61 | const auto &vel_coefs = velocity_drivetrain_coefficients_; |
| 62 | A_continuous_model_(kLeftVelocity, kLeftVelocity) = |
| 63 | vel_coefs.A_continuous(0, 0); |
| 64 | A_continuous_model_(kLeftVelocity, kRightVelocity) = |
| 65 | vel_coefs.A_continuous(0, 1); |
| 66 | A_continuous_model_(kRightVelocity, kLeftVelocity) = |
| 67 | vel_coefs.A_continuous(1, 0); |
| 68 | A_continuous_model_(kRightVelocity, kRightVelocity) = |
| 69 | vel_coefs.A_continuous(1, 1); |
| 70 | |
| 71 | A_continuous_model_(kLeftVelocity, kLeftVoltageError) = |
| 72 | 1 * vel_coefs.B_continuous(0, 0); |
| 73 | A_continuous_model_(kLeftVelocity, kRightVoltageError) = |
| 74 | 1 * vel_coefs.B_continuous(0, 1); |
| 75 | A_continuous_model_(kRightVelocity, kLeftVoltageError) = |
| 76 | 1 * vel_coefs.B_continuous(1, 0); |
| 77 | A_continuous_model_(kRightVelocity, kRightVoltageError) = |
| 78 | 1 * vel_coefs.B_continuous(1, 1); |
| 79 | |
| 80 | B_continuous_model_.block<1, 2>(kLeftVelocity, kLeftVoltage) = |
| 81 | vel_coefs.B_continuous.row(0); |
| 82 | B_continuous_model_.block<1, 2>(kRightVelocity, kLeftVoltage) = |
| 83 | vel_coefs.B_continuous.row(1); |
| 84 | |
| 85 | B_continuous_accel_(kVelocityX, kAccelX) = 1.0; |
| 86 | B_continuous_accel_(kVelocityY, kAccelY) = 1.0; |
| 87 | B_continuous_accel_(kTheta, kThetaRate) = 1.0; |
| 88 | |
| 89 | Q_continuous_model_.setZero(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 90 | Q_continuous_model_.diagonal() << 1e-2, 1e-2, 1e-8, 1e-2, 1e-0, 1e-0, 1e-2, |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 91 | 1e-0, 1e-0; |
| 92 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 93 | Q_continuous_accel_.setZero(); |
| 94 | Q_continuous_accel_.diagonal() << 1e-2, 1e-2, 1e-20, 1e-4, 1e-4; |
| 95 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 96 | P_model_ = Q_continuous_model_ * aos::time::DurationInSeconds(kNominalDt); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 97 | |
| 98 | // We can precalculate the discretizations of the accel model because it is |
| 99 | // actually LTI. |
| 100 | |
| 101 | DiscretizeQAFast(Q_continuous_accel_, A_continuous_accel_, kNominalDt, |
| 102 | &Q_discrete_accel_, &A_discrete_accel_); |
| 103 | P_accel_ = Q_discrete_accel_; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 104 | } |
| 105 | |
| 106 | Eigen::Matrix<double, ModelBasedLocalizer::kNModelStates, |
| 107 | ModelBasedLocalizer::kNModelStates> |
| 108 | ModelBasedLocalizer::AModel( |
| 109 | const ModelBasedLocalizer::ModelState &state) const { |
| 110 | Eigen::Matrix<double, kNModelStates, kNModelStates> A = A_continuous_model_; |
| 111 | const double theta = state(kTheta); |
| 112 | const double stheta = std::sin(theta); |
| 113 | const double ctheta = std::cos(theta); |
| 114 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 115 | A(kX, kTheta) = -stheta * velocity; |
| 116 | A(kX, kLeftVelocity) = ctheta / 2.0; |
| 117 | A(kX, kRightVelocity) = ctheta / 2.0; |
| 118 | A(kY, kTheta) = ctheta * velocity; |
| 119 | A(kY, kLeftVelocity) = stheta / 2.0; |
| 120 | A(kY, kRightVelocity) = stheta / 2.0; |
| 121 | return A; |
| 122 | } |
| 123 | |
| 124 | Eigen::Matrix<double, ModelBasedLocalizer::kNAccelStates, |
| 125 | ModelBasedLocalizer::kNAccelStates> |
| 126 | ModelBasedLocalizer::AAccel() const { |
| 127 | return A_continuous_accel_; |
| 128 | } |
| 129 | |
| 130 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::DiffModel( |
| 131 | const ModelBasedLocalizer::ModelState &state, |
| 132 | const ModelBasedLocalizer::ModelInput &U) const { |
| 133 | ModelState x_dot = AModel(state) * state + B_continuous_model_ * U; |
| 134 | const double theta = state(kTheta); |
| 135 | const double stheta = std::sin(theta); |
| 136 | const double ctheta = std::cos(theta); |
| 137 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 138 | x_dot(kX) = ctheta * velocity; |
| 139 | x_dot(kY) = stheta * velocity; |
| 140 | return x_dot; |
| 141 | } |
| 142 | |
| 143 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::DiffAccel( |
| 144 | const ModelBasedLocalizer::AccelState &state, |
| 145 | const ModelBasedLocalizer::AccelInput &U) const { |
| 146 | return AAccel() * state + B_continuous_accel_ * U; |
| 147 | } |
| 148 | |
| 149 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::UpdateModel( |
| 150 | const ModelBasedLocalizer::ModelState &model, |
| 151 | const ModelBasedLocalizer::ModelInput &input, |
| 152 | const aos::monotonic_clock::duration dt) const { |
| 153 | return control_loops::RungeKutta( |
| 154 | std::bind(&ModelBasedLocalizer::DiffModel, this, std::placeholders::_1, |
| 155 | input), |
| 156 | model, aos::time::DurationInSeconds(dt)); |
| 157 | } |
| 158 | |
| 159 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::UpdateAccel( |
| 160 | const ModelBasedLocalizer::AccelState &accel, |
| 161 | const ModelBasedLocalizer::AccelInput &input, |
| 162 | const aos::monotonic_clock::duration dt) const { |
| 163 | return control_loops::RungeKutta( |
| 164 | std::bind(&ModelBasedLocalizer::DiffAccel, this, std::placeholders::_1, |
| 165 | input), |
| 166 | accel, aos::time::DurationInSeconds(dt)); |
| 167 | } |
| 168 | |
| 169 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::AccelStateForModelState( |
| 170 | const ModelBasedLocalizer::ModelState &state) const { |
| 171 | const double robot_speed = |
| 172 | (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 173 | const double lat_speed = (AModel(state) * state)(kTheta) * long_offset_; |
| 174 | const double velocity_x = std::cos(state(kTheta)) * robot_speed - |
| 175 | std::sin(state(kTheta)) * lat_speed; |
| 176 | const double velocity_y = std::sin(state(kTheta)) * robot_speed + |
| 177 | std::cos(state(kTheta)) * lat_speed; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 178 | return (AccelState() << state(0), state(1), state(2), velocity_x, velocity_y) |
| 179 | .finished(); |
| 180 | } |
| 181 | |
| 182 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::ModelStateForAccelState( |
| 183 | const ModelBasedLocalizer::AccelState &state, |
| 184 | const Eigen::Vector2d &encoders, const double yaw_rate) const { |
| 185 | const double robot_speed = state(kVelocityX) * std::cos(state(kTheta)) + |
| 186 | state(kVelocityY) * std::sin(state(kTheta)); |
| 187 | const double radius = dt_config_.robot_radius; |
| 188 | const double left_velocity = robot_speed - yaw_rate * radius; |
| 189 | const double right_velocity = robot_speed + yaw_rate * radius; |
| 190 | return (ModelState() << state(0), state(1), state(2), encoders(0), |
| 191 | left_velocity, 0.0, encoders(1), right_velocity, 0.0) |
| 192 | .finished(); |
| 193 | } |
| 194 | |
| 195 | double ModelBasedLocalizer::ModelDivergence( |
| 196 | const ModelBasedLocalizer::CombinedState &state, |
| 197 | const ModelBasedLocalizer::AccelInput &accel_inputs, |
| 198 | const Eigen::Vector2d &filtered_accel, |
| 199 | const ModelBasedLocalizer::ModelInput &model_inputs) { |
| 200 | // Convert the model state into the acceleration-based state-space and check |
| 201 | // the distance between the two (should really be a weighted norm, but all the |
| 202 | // numbers are on ~the same scale). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 203 | // TODO(james): Maybe weight lateral velocity divergence different than |
| 204 | // longitudinal? Seems like we tend to get false-positives currently when in |
| 205 | // sharp turns. |
| 206 | // TODO(james): For off-center gyros, maybe reduce noise when turning? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 207 | VLOG(2) << "divergence: " |
| 208 | << (state.accel_state - AccelStateForModelState(state.model_state)) |
| 209 | .transpose(); |
| 210 | const AccelState diff_accel = DiffAccel(state.accel_state, accel_inputs); |
| 211 | const ModelState diff_model = DiffModel(state.model_state, model_inputs); |
| 212 | const double model_lng_velocity = |
| 213 | (state.model_state(kLeftVelocity) + state.model_state(kRightVelocity)) / |
| 214 | 2.0; |
| 215 | const double model_lng_accel = |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 216 | (diff_model(kLeftVelocity) + diff_model(kRightVelocity)) / 2.0 - |
| 217 | diff_model(kTheta) * diff_model(kTheta) * long_offset_; |
| 218 | const double model_lat_accel = diff_model(kTheta) * model_lng_velocity; |
| 219 | const Eigen::Vector2d robot_frame_accel(model_lng_accel, model_lat_accel); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 220 | const Eigen::Vector2d model_accel = |
| 221 | Eigen::AngleAxisd(state.model_state(kTheta), Eigen::Vector3d::UnitZ()) |
| 222 | .toRotationMatrix() |
| 223 | .block<2, 2>(0, 0) * |
| 224 | robot_frame_accel; |
| 225 | const double accel_diff = (model_accel - filtered_accel).norm(); |
| 226 | const double theta_rate_diff = |
| 227 | std::abs(diff_accel(kTheta) - diff_model(kTheta)); |
| 228 | |
| 229 | const Eigen::Vector2d accel_vel = state.accel_state.bottomRows<2>(); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 230 | Eigen::Vector2d model_vel = |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 231 | AccelStateForModelState(state.model_state).bottomRows<2>(); |
| 232 | velocity_residual_ = (accel_vel - model_vel).norm() / |
| 233 | (1.0 + accel_vel.norm() + model_vel.norm()); |
| 234 | theta_rate_residual_ = theta_rate_diff; |
| 235 | accel_residual_ = accel_diff / 4.0; |
| 236 | return velocity_residual_ + theta_rate_residual_ + accel_residual_; |
| 237 | } |
| 238 | |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 239 | void ModelBasedLocalizer::UpdateState( |
| 240 | CombinedState *state, |
| 241 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> &K, |
| 242 | const Eigen::Matrix<double, kNModelOutputs, 1> &Z, |
| 243 | const Eigen::Matrix<double, kNModelOutputs, kNModelStates> &H, |
| 244 | const AccelInput &accel_input, const ModelInput &model_input, |
| 245 | aos::monotonic_clock::duration dt) { |
| 246 | state->accel_state = UpdateAccel(state->accel_state, accel_input, dt); |
| 247 | if (down_estimator_.consecutive_still() > 500.0) { |
| 248 | state->accel_state(kVelocityX) *= 0.9; |
| 249 | state->accel_state(kVelocityY) *= 0.9; |
| 250 | } |
| 251 | state->model_state = UpdateModel(state->model_state, model_input, dt); |
| 252 | state->model_state += K * (Z - H * state->model_state); |
| 253 | } |
| 254 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 255 | void ModelBasedLocalizer::HandleImu(aos::monotonic_clock::time_point t, |
| 256 | const Eigen::Vector3d &gyro, |
| 257 | const Eigen::Vector3d &accel, |
| 258 | const Eigen::Vector2d encoders, |
| 259 | const Eigen::Vector2d voltage) { |
| 260 | VLOG(2) << t; |
| 261 | if (t_ == aos::monotonic_clock::min_time) { |
| 262 | t_ = t; |
| 263 | } |
| 264 | if (t_ + 2 * kNominalDt < t) { |
| 265 | t_ = t; |
| 266 | ++clock_resets_; |
| 267 | } |
| 268 | const aos::monotonic_clock::duration dt = t - t_; |
| 269 | t_ = t; |
| 270 | down_estimator_.Predict(gyro, accel, dt); |
| 271 | // TODO(james): Should we prefer this or use the down-estimator corrected |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 272 | // version? Using the down estimator is more principled, but does create more |
| 273 | // opportunities for subtle biases. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 274 | const double yaw_rate = (dt_config_.imu_transform * gyro)(2); |
| 275 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 276 | |
| 277 | const Eigen::AngleAxis<double> orientation( |
| 278 | Eigen::AngleAxis<double>(xytheta()(kTheta), Eigen::Vector3d::UnitZ()) * |
| 279 | down_estimator_.X_hat()); |
James Kuszmaul | 10d3fd4 | 2022-02-25 21:57:36 -0800 | [diff] [blame] | 280 | last_orientation_ = orientation; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 281 | |
| 282 | const Eigen::Vector3d absolute_accel = |
| 283 | orientation * dt_config_.imu_transform * kG * accel; |
| 284 | abs_accel_ = absolute_accel; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 285 | |
| 286 | VLOG(2) << "abs accel " << absolute_accel.transpose(); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 287 | VLOG(2) << "dt " << aos::time::DurationInSeconds(dt); |
| 288 | |
| 289 | // Update all the branched states. |
| 290 | const AccelInput accel_input(absolute_accel.x(), absolute_accel.y(), |
| 291 | yaw_rate); |
| 292 | const ModelInput model_input(voltage); |
| 293 | |
| 294 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous = |
| 295 | AModel(current_state_.model_state); |
| 296 | |
| 297 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete; |
| 298 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete; |
| 299 | |
| 300 | DiscretizeQAFast(Q_continuous_model_, A_continuous, dt, &Q_discrete, |
| 301 | &A_discrete); |
| 302 | |
| 303 | P_model_ = A_discrete * P_model_ * A_discrete.transpose() + Q_discrete; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 304 | P_accel_ = A_discrete_accel_ * P_accel_ * A_discrete_accel_.transpose() + |
| 305 | Q_discrete_accel_; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 306 | |
| 307 | Eigen::Matrix<double, kNModelOutputs, kNModelStates> H; |
| 308 | Eigen::Matrix<double, kNModelOutputs, kNModelOutputs> R; |
| 309 | { |
| 310 | H.setZero(); |
| 311 | R.setZero(); |
| 312 | H(0, kLeftEncoder) = 1.0; |
| 313 | H(1, kRightEncoder) = 1.0; |
| 314 | H(2, kRightVelocity) = 1.0 / diameter; |
| 315 | H(2, kLeftVelocity) = -1.0 / diameter; |
| 316 | |
| 317 | R.diagonal() << 1e-9, 1e-9, 1e-13; |
| 318 | } |
| 319 | |
| 320 | const Eigen::Matrix<double, kNModelOutputs, 1> Z(encoders(0), encoders(1), |
| 321 | yaw_rate); |
| 322 | |
| 323 | if (branches_.empty()) { |
| 324 | VLOG(2) << "Initializing"; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 325 | current_state_.model_state(kLeftEncoder) = encoders(0); |
| 326 | current_state_.model_state(kRightEncoder) = encoders(1); |
| 327 | current_state_.branch_time = t; |
| 328 | branches_.Push(current_state_); |
| 329 | } |
| 330 | |
| 331 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> K = |
| 332 | P_model_ * H.transpose() * (H * P_model_ * H.transpose() + R).inverse(); |
| 333 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 334 | K * H) * |
| 335 | P_model_; |
| 336 | VLOG(2) << "K\n" << K; |
| 337 | VLOG(2) << "Z\n" << Z.transpose(); |
| 338 | |
| 339 | for (CombinedState &state : branches_) { |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 340 | UpdateState(&state, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 341 | } |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 342 | UpdateState(¤t_state_, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 343 | |
| 344 | VLOG(2) << "oildest accel " << branches_[0].accel_state.transpose(); |
| 345 | VLOG(2) << "oildest accel diff " |
| 346 | << DiffAccel(branches_[0].accel_state, accel_input).transpose(); |
| 347 | VLOG(2) << "oildest model " << branches_[0].model_state.transpose(); |
| 348 | |
| 349 | // Determine whether to switch modes--if we are currently in model-based mode, |
| 350 | // swap to accel-based if the two states have divergeed meaningfully in the |
| 351 | // oldest branch. If we are currently in accel-based, then swap back to model |
| 352 | // if the oldest model branch matches has matched the |
| 353 | filtered_residual_accel_ += |
| 354 | 0.01 * (accel_input.topRows<2>() - filtered_residual_accel_); |
| 355 | const double model_divergence = |
| 356 | branches_.full() ? ModelDivergence(branches_[0], accel_input, |
| 357 | filtered_residual_accel_, model_input) |
| 358 | : 0.0; |
| 359 | filtered_residual_ += |
| 360 | (1.0 - std::exp(-aos::time::DurationInSeconds(kNominalDt) / 0.0095)) * |
| 361 | (model_divergence - filtered_residual_); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 362 | // TODO(james): Tune this more. Currently set to generally trust the model, |
| 363 | // perhaps a bit too much. |
| 364 | // When the residual exceeds the accel threshold, we start using the inertials |
| 365 | // alone; when it drops back below the model threshold, we go back to being |
| 366 | // model-based. |
| 367 | constexpr double kUseAccelThreshold = 2.0; |
| 368 | constexpr double kUseModelThreshold = 0.5; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 369 | constexpr size_t kShareStates = kNModelStates; |
| 370 | static_assert(kUseModelThreshold < kUseAccelThreshold); |
| 371 | if (using_model_) { |
| 372 | if (filtered_residual_ > kUseAccelThreshold) { |
| 373 | hysteresis_count_++; |
| 374 | } else { |
| 375 | hysteresis_count_ = 0; |
| 376 | } |
| 377 | if (hysteresis_count_ > 0) { |
| 378 | using_model_ = false; |
| 379 | // Grab the accel-based state from back when we started diverging. |
| 380 | // TODO(james): This creates a problematic selection bias, because |
| 381 | // we will tend to bias towards deliberately out-of-tune measurements. |
| 382 | current_state_.accel_state = branches_[0].accel_state; |
| 383 | current_state_.model_state = branches_[0].model_state; |
| 384 | current_state_.model_state = ModelStateForAccelState( |
| 385 | current_state_.accel_state, encoders, yaw_rate); |
| 386 | } else { |
| 387 | VLOG(2) << "Normal branching"; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 388 | current_state_.accel_state = |
| 389 | AccelStateForModelState(current_state_.model_state); |
| 390 | current_state_.branch_time = t; |
| 391 | } |
| 392 | hysteresis_count_ = 0; |
| 393 | } else { |
| 394 | if (filtered_residual_ < kUseModelThreshold) { |
| 395 | hysteresis_count_++; |
| 396 | } else { |
| 397 | hysteresis_count_ = 0; |
| 398 | } |
| 399 | if (hysteresis_count_ > 100) { |
| 400 | using_model_ = true; |
| 401 | // Grab the model-based state from back when we stopped diverging. |
| 402 | current_state_.model_state.topRows<kShareStates>() = |
| 403 | ModelStateForAccelState(branches_[0].accel_state, encoders, yaw_rate) |
| 404 | .topRows<kShareStates>(); |
| 405 | current_state_.accel_state = |
| 406 | AccelStateForModelState(current_state_.model_state); |
| 407 | } else { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 408 | // TODO(james): Why was I leaving the encoders/wheel velocities in place? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 409 | current_state_.model_state = ModelStateForAccelState( |
| 410 | current_state_.accel_state, encoders, yaw_rate); |
| 411 | current_state_.branch_time = t; |
| 412 | } |
| 413 | } |
| 414 | |
| 415 | // Generate a new branch, with the accel state reset based on the model-based |
| 416 | // state (really, just getting rid of the lateral velocity). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 417 | // By resetting the accel state in the new branch, this tries to minimize the |
| 418 | // odds of runaway lateral velocities. This doesn't help with runaway |
| 419 | // longitudinal velocities, however. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 420 | CombinedState new_branch = current_state_; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 421 | new_branch.accel_state = AccelStateForModelState(new_branch.model_state); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 422 | new_branch.accumulated_divergence = 0.0; |
| 423 | |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 424 | ++branch_counter_; |
| 425 | if (branch_counter_ % kBranchPeriod == 0) { |
| 426 | branches_.Push(new_branch); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 427 | old_positions_.Push(OldPosition{t, xytheta(), latest_turret_position_, |
| 428 | latest_turret_velocity_}); |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 429 | branch_counter_ = 0; |
| 430 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 431 | |
| 432 | last_residual_ = model_divergence; |
| 433 | |
| 434 | VLOG(2) << "Using " << (using_model_ ? "model" : "accel"); |
| 435 | VLOG(2) << "Residual " << last_residual_; |
| 436 | VLOG(2) << "Filtered Residual " << filtered_residual_; |
| 437 | VLOG(2) << "buffer size " << branches_.size(); |
| 438 | VLOG(2) << "Model state " << current_state_.model_state.transpose(); |
| 439 | VLOG(2) << "Accel state " << current_state_.accel_state.transpose(); |
| 440 | VLOG(2) << "Accel state for model " |
| 441 | << AccelStateForModelState(current_state_.model_state).transpose(); |
| 442 | VLOG(2) << "Input acce " << accel.transpose(); |
| 443 | VLOG(2) << "Input gyro " << gyro.transpose(); |
| 444 | VLOG(2) << "Input voltage " << voltage.transpose(); |
| 445 | VLOG(2) << "Input encoder " << encoders.transpose(); |
| 446 | VLOG(2) << "yaw rate " << yaw_rate; |
| 447 | |
| 448 | CHECK(std::isfinite(last_residual_)); |
| 449 | } |
| 450 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 451 | const ModelBasedLocalizer::OldPosition ModelBasedLocalizer::GetStateForTime( |
| 452 | aos::monotonic_clock::time_point time) { |
| 453 | if (old_positions_.empty()) { |
| 454 | return OldPosition{}; |
| 455 | } |
| 456 | |
| 457 | aos::monotonic_clock::duration lowest_time_error = |
| 458 | aos::monotonic_clock::duration::max(); |
| 459 | const OldPosition *best_match = nullptr; |
| 460 | for (const OldPosition &sample : old_positions_) { |
| 461 | const aos::monotonic_clock::duration time_error = |
| 462 | std::chrono::abs(sample.sample_time - time); |
| 463 | if (time_error < lowest_time_error) { |
| 464 | lowest_time_error = time_error; |
| 465 | best_match = &sample; |
| 466 | } |
| 467 | } |
| 468 | return *best_match; |
| 469 | } |
| 470 | |
| 471 | namespace { |
| 472 | // Converts a flatbuffer TransformationMatrix to an Eigen matrix. Technically, |
| 473 | // this should be able to do a single memcpy, but the extra verbosity here seems |
| 474 | // appropriate. |
| 475 | Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix( |
| 476 | const frc971::vision::calibration::TransformationMatrix &flatbuffer) { |
| 477 | CHECK_EQ(16u, CHECK_NOTNULL(flatbuffer.data())->size()); |
| 478 | Eigen::Matrix<double, 4, 4> result; |
| 479 | result.setIdentity(); |
| 480 | for (int row = 0; row < 4; ++row) { |
| 481 | for (int col = 0; col < 4; ++col) { |
| 482 | result(row, col) = (*flatbuffer.data())[row * 4 + col]; |
| 483 | } |
| 484 | } |
| 485 | return result; |
| 486 | } |
| 487 | |
| 488 | // Node names of the pis to listen for cameras from. |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 489 | const std::array<std::string_view, 4> kPisToUse{"pi1", "pi2", "pi3", "pi4"}; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 490 | } |
| 491 | |
| 492 | const Eigen::Matrix<double, 4, 4> ModelBasedLocalizer::CameraTransform( |
| 493 | const OldPosition &state, |
| 494 | const frc971::vision::calibration::CameraCalibration *calibration, |
| 495 | std::optional<RejectionReason> *rejection_reason) const { |
| 496 | CHECK_NOTNULL(rejection_reason); |
| 497 | CHECK_NOTNULL(calibration); |
| 498 | // Per the CameraCalibration specification, we can actually determine whether |
| 499 | // the camera is the turret camera just from the presence of the |
| 500 | // turret_extrinsics member. |
| 501 | const bool is_turret = calibration->has_turret_extrinsics(); |
| 502 | // Ignore readings when the turret is spinning too fast, on the assumption |
| 503 | // that the odds of screwing up the time compensation are higher. |
| 504 | // Note that the current number here is chosen pretty arbitrarily--1 rad / sec |
| 505 | // seems reasonable, but may be unnecessarily low or high. |
| 506 | constexpr double kMaxTurretVelocity = 1.0; |
| 507 | if (is_turret && std::abs(state.turret_velocity) > kMaxTurretVelocity && |
| 508 | !rejection_reason->has_value()) { |
| 509 | *rejection_reason = RejectionReason::TURRET_TOO_FAST; |
| 510 | } |
| 511 | CHECK(calibration->has_fixed_extrinsics()); |
| 512 | const Eigen::Matrix<double, 4, 4> fixed_extrinsics = |
| 513 | FlatbufferToTransformationMatrix(*calibration->fixed_extrinsics()); |
| 514 | |
| 515 | // Calculate the pose of the camera relative to the robot origin. |
| 516 | Eigen::Matrix<double, 4, 4> H_robot_camera = fixed_extrinsics; |
| 517 | if (is_turret) { |
| 518 | H_robot_camera = |
| 519 | H_robot_camera * |
| 520 | frc971::control_loops::TransformationMatrixForYaw<double>( |
| 521 | state.turret_position) * |
| 522 | FlatbufferToTransformationMatrix(*calibration->turret_extrinsics()); |
| 523 | } |
| 524 | return H_robot_camera; |
| 525 | } |
| 526 | |
| 527 | const std::optional<Eigen::Vector2d> |
| 528 | ModelBasedLocalizer::CameraMeasuredRobotPosition( |
| 529 | const OldPosition &state, const y2022::vision::TargetEstimate *target, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 530 | std::optional<RejectionReason> *rejection_reason, |
| 531 | Eigen::Matrix<double, 4, 4> *H_field_camera_measured) const { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 532 | if (!target->has_camera_calibration()) { |
| 533 | *rejection_reason = RejectionReason::NO_CALIBRATION; |
| 534 | return std::nullopt; |
| 535 | } |
| 536 | const Eigen::Matrix<double, 4, 4> H_robot_camera = |
| 537 | CameraTransform(state, target->camera_calibration(), rejection_reason); |
| 538 | const control_loops::Pose robot_pose( |
| 539 | {state.xytheta(0), state.xytheta(1), 0.0}, state.xytheta(2)); |
| 540 | const Eigen::Matrix<double, 4, 4> H_field_robot = |
| 541 | robot_pose.AsTransformationMatrix(); |
| 542 | // Current estimated pose of the camera in the global frame. |
| 543 | // Note that this is all really just an elaborate way of extracting the |
| 544 | // current estimated camera yaw, and nothing else. |
| 545 | const Eigen::Matrix<double, 4, 4> H_field_camera = |
| 546 | H_field_robot * H_robot_camera; |
| 547 | // Grab the implied yaw of the camera (the +Z axis is coming out of the front |
| 548 | // of the cameras). |
| 549 | const Eigen::Vector3d rotated_camera_z = |
| 550 | H_field_camera.block<3, 3>(0, 0) * Eigen::Vector3d(0, 0, 1); |
| 551 | const double camera_yaw = |
| 552 | std::atan2(rotated_camera_z.y(), rotated_camera_z.x()); |
| 553 | // All right, now we need to use the heading and distance from the |
| 554 | // TargetEstimate, plus the yaw embedded in the camera_pose, to determine what |
| 555 | // the implied X/Y position of the robot is. To do this, we calculate the |
| 556 | // heading/distance from the target to the robot. The distance is easy, since |
| 557 | // that's the same as the distance from the robot to the target. The heading |
| 558 | // isn't too hard, but is obnoxious to think about, since the heading from the |
| 559 | // target to the robot is distinct from the heading from the robot to the |
| 560 | // target. |
| 561 | |
| 562 | // Just to walk through examples to confirm that the below calculation is |
| 563 | // correct: |
| 564 | // * If yaw = 0, and angle_to_target = 0, we are at 180 deg relative to the |
| 565 | // target. |
| 566 | // * If yaw = 90 deg, and angle_to_target = 0, we are at -90 deg relative to |
| 567 | // the target. |
| 568 | // * If yaw = 0, and angle_to_target = 90 deg, we are at -90 deg relative to |
| 569 | // the target. |
| 570 | const double heading_from_target = |
| 571 | aos::math::NormalizeAngle(M_PI + camera_yaw + target->angle_to_target()); |
| 572 | const double distance_from_target = target->distance(); |
| 573 | // Extract the implied camera position on the field. |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 574 | *H_field_camera_measured = H_field_camera; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 575 | // TODO(james): Are we going to need to evict the roll/pitch components of the |
| 576 | // camera extrinsics this year as well? |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 577 | (*H_field_camera_measured)(0, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 578 | distance_from_target * std::cos(heading_from_target) + kVisionTargetX; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 579 | (*H_field_camera_measured)(1, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 580 | distance_from_target * std::sin(heading_from_target) + kVisionTargetY; |
| 581 | const Eigen::Matrix<double, 4, 4> H_field_robot_measured = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 582 | *H_field_camera_measured * H_robot_camera.inverse(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 583 | return H_field_robot_measured.block<2, 1>(0, 3); |
| 584 | } |
| 585 | |
| 586 | void ModelBasedLocalizer::HandleImageMatch( |
| 587 | aos::monotonic_clock::time_point sample_time, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 588 | const y2022::vision::TargetEstimate *target, int camera_index) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 589 | std::optional<RejectionReason> rejection_reason; |
| 590 | |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 591 | if (target->confidence() < kMinTargetEstimateConfidence) { |
| 592 | rejection_reason = RejectionReason::LOW_CONFIDENCE; |
| 593 | TallyRejection(rejection_reason.value()); |
| 594 | return; |
| 595 | } |
| 596 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 597 | const OldPosition &state = GetStateForTime(sample_time); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 598 | Eigen::Matrix<double, 4, 4> H_field_camera_measured; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 599 | const std::optional<Eigen::Vector2d> measured_robot_position = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 600 | CameraMeasuredRobotPosition(state, target, &rejection_reason, |
| 601 | &H_field_camera_measured); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 602 | // Technically, rejection_reason should always be set if |
| 603 | // measured_robot_position is nullopt, but in the future we may have more |
| 604 | // recoverable rejection reasons that we wish to allow to propagate further |
| 605 | // into the process. |
| 606 | if (!measured_robot_position || rejection_reason.has_value()) { |
| 607 | CHECK(rejection_reason.has_value()); |
| 608 | TallyRejection(rejection_reason.value()); |
| 609 | return; |
| 610 | } |
| 611 | |
| 612 | // Next, go through and do the actual Kalman corrections for the x/y |
| 613 | // measurement, for both the accel state and the model-based state. |
| 614 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model = |
| 615 | AModel(current_state_.model_state); |
| 616 | |
| 617 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete_model; |
| 618 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete_model; |
| 619 | |
| 620 | DiscretizeQAFast(Q_continuous_model_, A_continuous_model, kNominalDt, |
| 621 | &Q_discrete_model, &A_discrete_model); |
| 622 | |
| 623 | Eigen::Matrix<double, 2, kNModelStates> H_model; |
| 624 | H_model.setZero(); |
| 625 | Eigen::Matrix<double, 2, kNAccelStates> H_accel; |
| 626 | H_accel.setZero(); |
| 627 | Eigen::Matrix<double, 2, 2> R; |
| 628 | R.setZero(); |
| 629 | H_model(0, kX) = 1.0; |
| 630 | H_model(1, kY) = 1.0; |
| 631 | H_accel(0, kX) = 1.0; |
| 632 | H_accel(1, kY) = 1.0; |
| 633 | R.diagonal() << 1e-2, 1e-2; |
| 634 | |
| 635 | const Eigen::Matrix<double, kNModelStates, 2> K_model = |
| 636 | P_model_ * H_model.transpose() * |
| 637 | (H_model * P_model_ * H_model.transpose() + R).inverse(); |
| 638 | const Eigen::Matrix<double, kNAccelStates, 2> K_accel = |
| 639 | P_accel_ * H_accel.transpose() * |
| 640 | (H_accel * P_accel_ * H_accel.transpose() + R).inverse(); |
| 641 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 642 | K_model * H_model) * |
| 643 | P_model_; |
| 644 | P_accel_ = (Eigen::Matrix<double, kNAccelStates, kNAccelStates>::Identity() - |
| 645 | K_accel * H_accel) * |
| 646 | P_accel_; |
| 647 | // And now we have to correct *everything* on all the branches: |
| 648 | for (CombinedState &state : branches_) { |
| 649 | state.model_state += K_model * (measured_robot_position.value() - |
| 650 | H_model * state.model_state); |
| 651 | state.accel_state += K_accel * (measured_robot_position.value() - |
| 652 | H_accel * state.accel_state); |
| 653 | } |
| 654 | current_state_.model_state += |
| 655 | K_model * |
| 656 | (measured_robot_position.value() - H_model * current_state_.model_state); |
| 657 | current_state_.accel_state += |
| 658 | K_accel * |
| 659 | (measured_robot_position.value() - H_accel * current_state_.accel_state); |
| 660 | |
| 661 | statistics_.total_accepted++; |
| 662 | statistics_.total_candidates++; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 663 | |
| 664 | const Eigen::Vector3d camera_z_in_field = |
| 665 | H_field_camera_measured.block<3, 3>(0, 0) * Eigen::Vector3d::UnitZ(); |
| 666 | const double camera_yaw = |
| 667 | std::atan2(camera_z_in_field.y(), camera_z_in_field.x()); |
| 668 | |
| 669 | TargetEstimateDebugT debug; |
| 670 | debug.camera = static_cast<uint8_t>(camera_index); |
| 671 | debug.camera_x = H_field_camera_measured(0, 3); |
| 672 | debug.camera_y = H_field_camera_measured(1, 3); |
| 673 | debug.camera_theta = camera_yaw; |
| 674 | debug.implied_robot_x = measured_robot_position.value().x(); |
| 675 | debug.implied_robot_y = measured_robot_position.value().y(); |
| 676 | debug.implied_robot_theta = xytheta()(2); |
| 677 | debug.implied_turret_goal = |
| 678 | aos::math::NormalizeAngle(camera_yaw + target->angle_to_target()); |
| 679 | debug.accepted = true; |
| 680 | debug.image_age_sec = aos::time::DurationInSeconds(t_ - sample_time); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 681 | CHECK_LT(image_debugs_.size(), kDebugBufferSize); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 682 | image_debugs_.push_back(debug); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 683 | } |
| 684 | |
| 685 | void ModelBasedLocalizer::HandleTurret( |
| 686 | aos::monotonic_clock::time_point sample_time, double turret_position, |
| 687 | double turret_velocity) { |
| 688 | last_turret_update_ = sample_time; |
| 689 | latest_turret_position_ = turret_position; |
| 690 | latest_turret_velocity_ = turret_velocity; |
| 691 | } |
| 692 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 693 | void ModelBasedLocalizer::HandleReset(aos::monotonic_clock::time_point now, |
| 694 | const Eigen::Vector3d &xytheta) { |
| 695 | branches_.Reset(); |
| 696 | t_ = now; |
| 697 | using_model_ = true; |
| 698 | current_state_.model_state << xytheta(0), xytheta(1), xytheta(2), |
| 699 | current_state_.model_state(kLeftEncoder), 0.0, 0.0, |
| 700 | current_state_.model_state(kRightEncoder), 0.0, 0.0; |
| 701 | current_state_.accel_state = |
| 702 | AccelStateForModelState(current_state_.model_state); |
| 703 | last_residual_ = 0.0; |
| 704 | filtered_residual_ = 0.0; |
| 705 | filtered_residual_accel_.setZero(); |
| 706 | abs_accel_.setZero(); |
| 707 | } |
| 708 | |
| 709 | flatbuffers::Offset<AccelBasedState> ModelBasedLocalizer::BuildAccelState( |
| 710 | flatbuffers::FlatBufferBuilder *fbb, const AccelState &state) { |
| 711 | AccelBasedState::Builder accel_state_builder(*fbb); |
| 712 | accel_state_builder.add_x(state(kX)); |
| 713 | accel_state_builder.add_y(state(kY)); |
| 714 | accel_state_builder.add_theta(state(kTheta)); |
| 715 | accel_state_builder.add_velocity_x(state(kVelocityX)); |
| 716 | accel_state_builder.add_velocity_y(state(kVelocityY)); |
| 717 | return accel_state_builder.Finish(); |
| 718 | } |
| 719 | |
| 720 | flatbuffers::Offset<ModelBasedState> ModelBasedLocalizer::BuildModelState( |
| 721 | flatbuffers::FlatBufferBuilder *fbb, const ModelState &state) { |
| 722 | ModelBasedState::Builder model_state_builder(*fbb); |
| 723 | model_state_builder.add_x(state(kX)); |
| 724 | model_state_builder.add_y(state(kY)); |
| 725 | model_state_builder.add_theta(state(kTheta)); |
| 726 | model_state_builder.add_left_encoder(state(kLeftEncoder)); |
| 727 | model_state_builder.add_left_velocity(state(kLeftVelocity)); |
| 728 | model_state_builder.add_left_voltage_error(state(kLeftVoltageError)); |
| 729 | model_state_builder.add_right_encoder(state(kRightEncoder)); |
| 730 | model_state_builder.add_right_velocity(state(kRightVelocity)); |
| 731 | model_state_builder.add_right_voltage_error(state(kRightVoltageError)); |
| 732 | return model_state_builder.Finish(); |
| 733 | } |
| 734 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 735 | flatbuffers::Offset<CumulativeStatistics> |
| 736 | ModelBasedLocalizer::PopulateStatistics(flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 737 | const auto rejections_offset = fbb->CreateVector( |
| 738 | statistics_.rejection_counts.data(), statistics_.rejection_counts.size()); |
| 739 | |
| 740 | CumulativeStatistics::Builder stats_builder(*fbb); |
| 741 | stats_builder.add_total_accepted(statistics_.total_accepted); |
| 742 | stats_builder.add_total_candidates(statistics_.total_candidates); |
| 743 | stats_builder.add_rejection_reason_count(rejections_offset); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 744 | return stats_builder.Finish(); |
| 745 | } |
| 746 | |
| 747 | flatbuffers::Offset<ModelBasedStatus> ModelBasedLocalizer::PopulateStatus( |
| 748 | flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 749 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 750 | PopulateStatistics(fbb); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 751 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 752 | const flatbuffers::Offset<control_loops::drivetrain::DownEstimatorState> |
| 753 | down_estimator_offset = down_estimator_.PopulateStatus(fbb, t_); |
| 754 | |
| 755 | const CombinedState &state = current_state_; |
| 756 | |
| 757 | const flatbuffers::Offset<ModelBasedState> model_state_offset = |
| 758 | BuildModelState(fbb, state.model_state); |
| 759 | |
| 760 | const flatbuffers::Offset<AccelBasedState> accel_state_offset = |
| 761 | BuildAccelState(fbb, state.accel_state); |
| 762 | |
| 763 | const flatbuffers::Offset<AccelBasedState> oldest_accel_state_offset = |
| 764 | branches_.empty() ? flatbuffers::Offset<AccelBasedState>() |
| 765 | : BuildAccelState(fbb, branches_[0].accel_state); |
| 766 | |
| 767 | const flatbuffers::Offset<ModelBasedState> oldest_model_state_offset = |
| 768 | branches_.empty() ? flatbuffers::Offset<ModelBasedState>() |
| 769 | : BuildModelState(fbb, branches_[0].model_state); |
| 770 | |
| 771 | ModelBasedStatus::Builder builder(*fbb); |
| 772 | builder.add_accel_state(accel_state_offset); |
| 773 | builder.add_oldest_accel_state(oldest_accel_state_offset); |
| 774 | builder.add_oldest_model_state(oldest_model_state_offset); |
| 775 | builder.add_model_state(model_state_offset); |
| 776 | builder.add_using_model(using_model_); |
| 777 | builder.add_residual(last_residual_); |
| 778 | builder.add_filtered_residual(filtered_residual_); |
| 779 | builder.add_velocity_residual(velocity_residual_); |
| 780 | builder.add_accel_residual(accel_residual_); |
| 781 | builder.add_theta_rate_residual(theta_rate_residual_); |
| 782 | builder.add_down_estimator(down_estimator_offset); |
| 783 | builder.add_x(xytheta()(0)); |
| 784 | builder.add_y(xytheta()(1)); |
| 785 | builder.add_theta(xytheta()(2)); |
| 786 | builder.add_implied_accel_x(abs_accel_(0)); |
| 787 | builder.add_implied_accel_y(abs_accel_(1)); |
| 788 | builder.add_implied_accel_z(abs_accel_(2)); |
| 789 | builder.add_clock_resets(clock_resets_); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 790 | builder.add_statistics(stats_offset); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 791 | return builder.Finish(); |
| 792 | } |
| 793 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 794 | flatbuffers::Offset<LocalizerVisualization> |
| 795 | ModelBasedLocalizer::PopulateVisualization( |
| 796 | flatbuffers::FlatBufferBuilder *fbb) { |
| 797 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
| 798 | PopulateStatistics(fbb); |
| 799 | |
| 800 | aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, kDebugBufferSize> |
| 801 | debug_offsets; |
| 802 | |
| 803 | for (const TargetEstimateDebugT& debug : image_debugs_) { |
| 804 | debug_offsets.push_back(PackTargetEstimateDebug(debug, fbb)); |
| 805 | } |
| 806 | |
| 807 | image_debugs_.clear(); |
| 808 | |
| 809 | const flatbuffers::Offset< |
| 810 | flatbuffers::Vector<flatbuffers::Offset<TargetEstimateDebug>>> |
| 811 | debug_offset = |
| 812 | fbb->CreateVector(debug_offsets.data(), debug_offsets.size()); |
| 813 | |
| 814 | LocalizerVisualization::Builder builder(*fbb); |
| 815 | builder.add_statistics(stats_offset); |
| 816 | builder.add_targets(debug_offset); |
| 817 | return builder.Finish(); |
| 818 | } |
| 819 | |
| 820 | void ModelBasedLocalizer::TallyRejection(const RejectionReason reason) { |
| 821 | statistics_.total_candidates++; |
| 822 | statistics_.rejection_counts[static_cast<size_t>(reason)]++; |
| 823 | TargetEstimateDebugT debug; |
| 824 | debug.accepted = false; |
| 825 | debug.rejection_reason = reason; |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 826 | CHECK_LT(image_debugs_.size(), kDebugBufferSize); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 827 | image_debugs_.push_back(debug); |
| 828 | } |
| 829 | |
| 830 | flatbuffers::Offset<TargetEstimateDebug> |
| 831 | ModelBasedLocalizer::PackTargetEstimateDebug( |
| 832 | const TargetEstimateDebugT &debug, flatbuffers::FlatBufferBuilder *fbb) { |
| 833 | if (!debug.accepted) { |
| 834 | TargetEstimateDebug::Builder builder(*fbb); |
| 835 | builder.add_accepted(debug.accepted); |
| 836 | builder.add_rejection_reason(debug.rejection_reason); |
| 837 | return builder.Finish(); |
| 838 | } else { |
| 839 | flatbuffers::Offset<TargetEstimateDebug> offset = |
| 840 | TargetEstimateDebug::Pack(*fbb, &debug); |
| 841 | flatbuffers::GetMutableTemporaryPointer(*fbb, offset) |
| 842 | ->clear_rejection_reason(); |
| 843 | return offset; |
| 844 | } |
| 845 | } |
| 846 | |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 847 | namespace { |
| 848 | // Period at which the encoder readings from the IMU board wrap. |
| 849 | static double DrivetrainWrapPeriod() { |
| 850 | return y2022::constants::Values::DrivetrainEncoderToMeters(1 << 16); |
| 851 | } |
| 852 | } |
| 853 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 854 | EventLoopLocalizer::EventLoopLocalizer( |
| 855 | aos::EventLoop *event_loop, |
| 856 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 857 | : event_loop_(event_loop), |
| 858 | model_based_(dt_config), |
| 859 | status_sender_(event_loop_->MakeSender<LocalizerStatus>("/localizer")), |
| 860 | output_sender_(event_loop_->MakeSender<LocalizerOutput>("/localizer")), |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 861 | visualization_sender_( |
| 862 | event_loop_->MakeSender<LocalizerVisualization>("/localizer")), |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 863 | output_fetcher_( |
| 864 | event_loop_->MakeFetcher<frc971::control_loops::drivetrain::Output>( |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 865 | "/drivetrain")), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 866 | clock_offset_fetcher_( |
| 867 | event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>( |
| 868 | "/aos")), |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 869 | superstructure_fetcher_(event_loop_->MakeFetcher<y2022::control_loops::superstructure::Status>( |
| 870 | "/superstructure")), |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 871 | left_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()), |
| 872 | right_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()) { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 873 | event_loop_->MakeWatcher( |
| 874 | "/drivetrain", |
| 875 | [this]( |
| 876 | const frc971::control_loops::drivetrain::LocalizerControl &control) { |
| 877 | const double theta = control.keep_current_theta() |
| 878 | ? model_based_.xytheta()(2) |
| 879 | : control.theta(); |
| 880 | model_based_.HandleReset(event_loop_->monotonic_now(), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 881 | {control.x(), control.y(), theta}); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 882 | }); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 883 | aos::TimerHandler *superstructure_timer = event_loop_->AddTimer([this]() { |
| 884 | if (superstructure_fetcher_.Fetch()) { |
| 885 | const y2022::control_loops::superstructure::Status &status = |
| 886 | *superstructure_fetcher_.get(); |
| 887 | if (!status.has_turret()) { |
| 888 | return; |
| 889 | } |
| 890 | CHECK(status.has_turret()); |
| 891 | model_based_.HandleTurret( |
| 892 | superstructure_fetcher_.context().monotonic_event_time, |
| 893 | status.turret()->position(), status.turret()->velocity()); |
| 894 | } |
| 895 | }); |
| 896 | event_loop_->OnRun([this, superstructure_timer]() { |
| 897 | superstructure_timer->Setup(event_loop_->monotonic_now(), |
| 898 | std::chrono::milliseconds(20)); |
| 899 | }); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 900 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 901 | for (size_t camera_index = 0; camera_index < kPisToUse.size(); ++camera_index) { |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 902 | CHECK_LT(camera_index, target_estimate_fetchers_.size()); |
| 903 | target_estimate_fetchers_[camera_index] = |
| 904 | event_loop_->MakeFetcher<y2022::vision::TargetEstimate>( |
| 905 | absl::StrCat("/", kPisToUse[camera_index], "/camera")); |
| 906 | } |
| 907 | aos::TimerHandler *estimate_timer = event_loop_->AddTimer( |
| 908 | [this]() { |
| 909 | for (size_t camera_index = 0; camera_index < kPisToUse.size(); |
| 910 | ++camera_index) { |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 911 | if (model_based_.NumQueuedImageDebugs() == |
| 912 | ModelBasedLocalizer::kDebugBufferSize || |
| 913 | (last_visualization_send_ + kMinVisualizationPeriod < |
| 914 | event_loop_->monotonic_now())) { |
| 915 | auto builder = visualization_sender_.MakeBuilder(); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 916 | visualization_sender_.CheckOk(builder.Send( |
| 917 | model_based_.PopulateVisualization(builder.fbb()))); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 918 | } |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 919 | if (target_estimate_fetchers_[camera_index].Fetch()) { |
| 920 | const std::optional<aos::monotonic_clock::duration> |
| 921 | monotonic_offset = ClockOffset(kPisToUse[camera_index]); |
| 922 | if (!monotonic_offset.has_value()) { |
| 923 | continue; |
| 924 | } |
| 925 | // TODO(james): Get timestamp from message contents. |
| 926 | aos::monotonic_clock::time_point capture_time( |
| 927 | target_estimate_fetchers_[camera_index] |
| 928 | .context() |
| 929 | .monotonic_remote_time - |
| 930 | monotonic_offset.value()); |
| 931 | if (capture_time > target_estimate_fetchers_[camera_index] |
| 932 | .context() |
| 933 | .monotonic_event_time) { |
| 934 | model_based_.TallyRejection(RejectionReason::IMAGE_FROM_FUTURE); |
| 935 | continue; |
| 936 | } |
| 937 | model_based_.HandleImageMatch( |
| 938 | capture_time, target_estimate_fetchers_[camera_index].get(), |
| 939 | camera_index); |
| 940 | } |
| 941 | } |
| 942 | }); |
| 943 | event_loop_->OnRun([this, estimate_timer]() { |
| 944 | estimate_timer->Setup(event_loop_->monotonic_now(), |
| 945 | std::chrono::milliseconds(100)); |
| 946 | }); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 947 | event_loop_->MakeWatcher( |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 948 | "/localizer", [this](const frc971::IMUValuesBatch &values) { |
| 949 | CHECK(values.has_readings()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 950 | output_fetcher_.Fetch(); |
| 951 | for (const IMUValues *value : *values.readings()) { |
| 952 | zeroer_.InsertAndProcessMeasurement(*value); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 953 | const Eigen::Vector2d encoders{ |
| 954 | left_encoder_.Unwrap(value->left_encoder()), |
| 955 | right_encoder_.Unwrap(value->right_encoder())}; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 956 | if (zeroer_.Zeroed()) { |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 957 | const aos::monotonic_clock::time_point pico_timestamp{ |
| 958 | std::chrono::microseconds(value->pico_timestamp_us())}; |
| 959 | // TODO(james): If we get large enough drift off of the pico, |
| 960 | // actually do something about it. |
| 961 | if (!pico_offset_.has_value()) { |
| 962 | pico_offset_ = |
| 963 | event_loop_->context().monotonic_event_time - pico_timestamp; |
| 964 | last_pico_timestamp_ = pico_timestamp; |
James Kuszmaul | e5f67dd | 2022-02-12 20:08:29 -0800 | [diff] [blame] | 965 | } |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 966 | if (pico_timestamp < last_pico_timestamp_) { |
| 967 | pico_offset_.value() += std::chrono::microseconds(1ULL << 32); |
| 968 | } |
| 969 | const aos::monotonic_clock::time_point sample_timestamp = |
| 970 | pico_offset_.value() + pico_timestamp; |
| 971 | pico_offset_error_ = |
| 972 | event_loop_->context().monotonic_event_time - sample_timestamp; |
| 973 | const bool disabled = |
| 974 | (output_fetcher_.get() == nullptr) || |
| 975 | (output_fetcher_.context().monotonic_event_time + |
| 976 | std::chrono::milliseconds(10) < |
| 977 | event_loop_->context().monotonic_event_time); |
| 978 | model_based_.HandleImu( |
| 979 | sample_timestamp, |
| 980 | zeroer_.ZeroedGyro(), zeroer_.ZeroedAccel(), encoders, |
| 981 | disabled ? Eigen::Vector2d::Zero() |
| 982 | : Eigen::Vector2d{output_fetcher_->left_voltage(), |
| 983 | output_fetcher_->right_voltage()}); |
| 984 | last_pico_timestamp_ = pico_timestamp; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 985 | } |
| 986 | { |
| 987 | auto builder = status_sender_.MakeBuilder(); |
| 988 | const flatbuffers::Offset<ModelBasedStatus> model_based_status = |
| 989 | model_based_.PopulateStatus(builder.fbb()); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 990 | const flatbuffers::Offset<control_loops::drivetrain::ImuZeroerState> |
| 991 | zeroer_status = zeroer_.PopulateStatus(builder.fbb()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 992 | LocalizerStatus::Builder status_builder = |
| 993 | builder.MakeBuilder<LocalizerStatus>(); |
| 994 | status_builder.add_model_based(model_based_status); |
| 995 | status_builder.add_zeroed(zeroer_.Zeroed()); |
| 996 | status_builder.add_faulted_zero(zeroer_.Faulted()); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 997 | status_builder.add_zeroing(zeroer_status); |
| 998 | status_builder.add_left_encoder(encoders(0)); |
| 999 | status_builder.add_right_encoder(encoders(1)); |
| 1000 | if (pico_offset_.has_value()) { |
| 1001 | status_builder.add_pico_offset_ns(pico_offset_.value().count()); |
| 1002 | status_builder.add_pico_offset_error_ns( |
| 1003 | pico_offset_error_.count()); |
| 1004 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1005 | builder.CheckOk(builder.Send(status_builder.Finish())); |
| 1006 | } |
| 1007 | if (last_output_send_ + std::chrono::milliseconds(5) < |
| 1008 | event_loop_->context().monotonic_event_time) { |
| 1009 | auto builder = output_sender_.MakeBuilder(); |
| 1010 | LocalizerOutput::Builder output_builder = |
| 1011 | builder.MakeBuilder<LocalizerOutput>(); |
James Kuszmaul | 1798c07 | 2022-02-13 15:32:11 -0800 | [diff] [blame] | 1012 | // TODO(james): Should we bother to try to estimate time offsets for |
| 1013 | // the pico? |
| 1014 | output_builder.add_monotonic_timestamp_ns( |
| 1015 | value->monotonic_timestamp_ns()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1016 | output_builder.add_x(model_based_.xytheta()(0)); |
| 1017 | output_builder.add_y(model_based_.xytheta()(1)); |
| 1018 | output_builder.add_theta(model_based_.xytheta()(2)); |
James Kuszmaul | f3ef9e1 | 2022-03-05 17:13:00 -0800 | [diff] [blame] | 1019 | output_builder.add_zeroed(zeroer_.Zeroed()); |
James Kuszmaul | 10d3fd4 | 2022-02-25 21:57:36 -0800 | [diff] [blame] | 1020 | const Eigen::Quaterniond &orientation = model_based_.orientation(); |
| 1021 | Quaternion quaternion; |
| 1022 | quaternion.mutate_x(orientation.x()); |
| 1023 | quaternion.mutate_y(orientation.y()); |
| 1024 | quaternion.mutate_z(orientation.z()); |
| 1025 | quaternion.mutate_w(orientation.w()); |
| 1026 | output_builder.add_orientation(&quaternion); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1027 | builder.CheckOk(builder.Send(output_builder.Finish())); |
| 1028 | last_output_send_ = event_loop_->monotonic_now(); |
| 1029 | } |
| 1030 | } |
| 1031 | }); |
| 1032 | } |
| 1033 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 1034 | std::optional<aos::monotonic_clock::duration> EventLoopLocalizer::ClockOffset( |
| 1035 | std::string_view pi) { |
| 1036 | std::optional<aos::monotonic_clock::duration> monotonic_offset; |
| 1037 | clock_offset_fetcher_.Fetch(); |
| 1038 | if (clock_offset_fetcher_.get() != nullptr) { |
| 1039 | for (const auto connection : *clock_offset_fetcher_->connections()) { |
| 1040 | if (connection->has_node() && connection->node()->has_name() && |
| 1041 | connection->node()->name()->string_view() == pi) { |
| 1042 | if (connection->has_monotonic_offset()) { |
| 1043 | monotonic_offset = |
| 1044 | std::chrono::nanoseconds(connection->monotonic_offset()); |
| 1045 | } else { |
| 1046 | // If we don't have a monotonic offset, that means we aren't |
| 1047 | // connected. |
| 1048 | model_based_.TallyRejection( |
| 1049 | RejectionReason::MESSAGE_BRIDGE_DISCONNECTED); |
| 1050 | return std::nullopt; |
| 1051 | } |
| 1052 | break; |
| 1053 | } |
| 1054 | } |
| 1055 | } |
| 1056 | CHECK(monotonic_offset.has_value()); |
| 1057 | return monotonic_offset; |
| 1058 | } |
| 1059 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1060 | } // namespace frc971::controls |