James Kuszmaul | 51fa1ae | 2022-02-26 00:49:57 -0800 | [diff] [blame] | 1 | #include "y2022/localizer/localizer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 2 | |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 3 | #include "aos/json_to_flatbuffer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 4 | #include "frc971/control_loops/c2d.h" |
| 5 | #include "frc971/wpilib/imu_batch_generated.h" |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 6 | #include "y2022/constants.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 7 | |
James Kuszmaul | ce491e4 | 2022-03-12 21:02:10 -0800 | [diff] [blame] | 8 | DEFINE_bool(ignore_accelerometer, false, |
| 9 | "If set, ignores the accelerometer readings."); |
| 10 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 11 | namespace frc971::controls { |
| 12 | |
| 13 | namespace { |
| 14 | constexpr double kG = 9.80665; |
James Kuszmaul | 9f2f53c | 2023-02-19 14:08:18 -0800 | [diff] [blame^] | 15 | static constexpr std::chrono::microseconds kNominalDt = ImuWatcher::kNominalDt; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 16 | // Field position of the target (the 2022 target is conveniently in the middle |
| 17 | // of the field....). |
| 18 | constexpr double kVisionTargetX = 0.0; |
| 19 | constexpr double kVisionTargetY = 0.0; |
| 20 | |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 21 | // Minimum confidence to require to use a target match. |
James Kuszmaul | 1b918d8 | 2022-03-12 18:27:41 -0800 | [diff] [blame] | 22 | constexpr double kMinTargetEstimateConfidence = 0.75; |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 23 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 24 | template <int N> |
| 25 | Eigen::Matrix<double, N, 1> MakeState(std::vector<double> values) { |
| 26 | CHECK_EQ(static_cast<size_t>(N), values.size()); |
| 27 | Eigen::Matrix<double, N, 1> vector; |
| 28 | for (int ii = 0; ii < N; ++ii) { |
| 29 | vector(ii, 0) = values[ii]; |
| 30 | } |
| 31 | return vector; |
| 32 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 33 | } // namespace |
| 34 | |
| 35 | ModelBasedLocalizer::ModelBasedLocalizer( |
| 36 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 37 | : dt_config_(dt_config), |
| 38 | velocity_drivetrain_coefficients_( |
| 39 | dt_config.make_hybrid_drivetrain_velocity_loop() |
| 40 | .plant() |
| 41 | .coefficients()), |
| 42 | down_estimator_(dt_config) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 43 | statistics_.rejection_counts.fill(0); |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 44 | CHECK_EQ(branches_.capacity(), |
| 45 | static_cast<size_t>(std::chrono::seconds(1) / kNominalDt / |
| 46 | kBranchPeriod)); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 47 | if (dt_config_.is_simulated) { |
| 48 | down_estimator_.assume_perfect_gravity(); |
| 49 | } |
| 50 | A_continuous_accel_.setZero(); |
| 51 | A_continuous_model_.setZero(); |
| 52 | B_continuous_accel_.setZero(); |
| 53 | B_continuous_model_.setZero(); |
| 54 | |
| 55 | A_continuous_accel_(kX, kVelocityX) = 1.0; |
| 56 | A_continuous_accel_(kY, kVelocityY) = 1.0; |
| 57 | |
| 58 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 59 | |
| 60 | A_continuous_model_(kTheta, kLeftVelocity) = -1.0 / diameter; |
| 61 | A_continuous_model_(kTheta, kRightVelocity) = 1.0 / diameter; |
| 62 | A_continuous_model_(kLeftEncoder, kLeftVelocity) = 1.0; |
| 63 | A_continuous_model_(kRightEncoder, kRightVelocity) = 1.0; |
| 64 | const auto &vel_coefs = velocity_drivetrain_coefficients_; |
| 65 | A_continuous_model_(kLeftVelocity, kLeftVelocity) = |
| 66 | vel_coefs.A_continuous(0, 0); |
| 67 | A_continuous_model_(kLeftVelocity, kRightVelocity) = |
| 68 | vel_coefs.A_continuous(0, 1); |
| 69 | A_continuous_model_(kRightVelocity, kLeftVelocity) = |
| 70 | vel_coefs.A_continuous(1, 0); |
| 71 | A_continuous_model_(kRightVelocity, kRightVelocity) = |
| 72 | vel_coefs.A_continuous(1, 1); |
| 73 | |
| 74 | A_continuous_model_(kLeftVelocity, kLeftVoltageError) = |
| 75 | 1 * vel_coefs.B_continuous(0, 0); |
| 76 | A_continuous_model_(kLeftVelocity, kRightVoltageError) = |
| 77 | 1 * vel_coefs.B_continuous(0, 1); |
| 78 | A_continuous_model_(kRightVelocity, kLeftVoltageError) = |
| 79 | 1 * vel_coefs.B_continuous(1, 0); |
| 80 | A_continuous_model_(kRightVelocity, kRightVoltageError) = |
| 81 | 1 * vel_coefs.B_continuous(1, 1); |
| 82 | |
| 83 | B_continuous_model_.block<1, 2>(kLeftVelocity, kLeftVoltage) = |
| 84 | vel_coefs.B_continuous.row(0); |
| 85 | B_continuous_model_.block<1, 2>(kRightVelocity, kLeftVoltage) = |
| 86 | vel_coefs.B_continuous.row(1); |
| 87 | |
| 88 | B_continuous_accel_(kVelocityX, kAccelX) = 1.0; |
| 89 | B_continuous_accel_(kVelocityY, kAccelY) = 1.0; |
| 90 | B_continuous_accel_(kTheta, kThetaRate) = 1.0; |
| 91 | |
| 92 | Q_continuous_model_.setZero(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 93 | Q_continuous_model_.diagonal() << 1e-2, 1e-2, 1e-8, 1e-2, 1e-0, 1e-0, 1e-2, |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 94 | 1e-0, 1e-0; |
| 95 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 96 | Q_continuous_accel_.setZero(); |
| 97 | Q_continuous_accel_.diagonal() << 1e-2, 1e-2, 1e-20, 1e-4, 1e-4; |
| 98 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 99 | P_model_ = Q_continuous_model_ * aos::time::DurationInSeconds(kNominalDt); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 100 | |
| 101 | // We can precalculate the discretizations of the accel model because it is |
| 102 | // actually LTI. |
| 103 | |
| 104 | DiscretizeQAFast(Q_continuous_accel_, A_continuous_accel_, kNominalDt, |
| 105 | &Q_discrete_accel_, &A_discrete_accel_); |
| 106 | P_accel_ = Q_discrete_accel_; |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 107 | |
| 108 | led_outputs_.fill(LedOutput::ON); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 109 | } |
| 110 | |
| 111 | Eigen::Matrix<double, ModelBasedLocalizer::kNModelStates, |
| 112 | ModelBasedLocalizer::kNModelStates> |
| 113 | ModelBasedLocalizer::AModel( |
| 114 | const ModelBasedLocalizer::ModelState &state) const { |
| 115 | Eigen::Matrix<double, kNModelStates, kNModelStates> A = A_continuous_model_; |
| 116 | const double theta = state(kTheta); |
| 117 | const double stheta = std::sin(theta); |
| 118 | const double ctheta = std::cos(theta); |
| 119 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 120 | A(kX, kTheta) = -stheta * velocity; |
| 121 | A(kX, kLeftVelocity) = ctheta / 2.0; |
| 122 | A(kX, kRightVelocity) = ctheta / 2.0; |
| 123 | A(kY, kTheta) = ctheta * velocity; |
| 124 | A(kY, kLeftVelocity) = stheta / 2.0; |
| 125 | A(kY, kRightVelocity) = stheta / 2.0; |
| 126 | return A; |
| 127 | } |
| 128 | |
| 129 | Eigen::Matrix<double, ModelBasedLocalizer::kNAccelStates, |
| 130 | ModelBasedLocalizer::kNAccelStates> |
| 131 | ModelBasedLocalizer::AAccel() const { |
| 132 | return A_continuous_accel_; |
| 133 | } |
| 134 | |
| 135 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::DiffModel( |
| 136 | const ModelBasedLocalizer::ModelState &state, |
| 137 | const ModelBasedLocalizer::ModelInput &U) const { |
| 138 | ModelState x_dot = AModel(state) * state + B_continuous_model_ * U; |
| 139 | const double theta = state(kTheta); |
| 140 | const double stheta = std::sin(theta); |
| 141 | const double ctheta = std::cos(theta); |
| 142 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 143 | x_dot(kX) = ctheta * velocity; |
| 144 | x_dot(kY) = stheta * velocity; |
| 145 | return x_dot; |
| 146 | } |
| 147 | |
| 148 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::DiffAccel( |
| 149 | const ModelBasedLocalizer::AccelState &state, |
| 150 | const ModelBasedLocalizer::AccelInput &U) const { |
| 151 | return AAccel() * state + B_continuous_accel_ * U; |
| 152 | } |
| 153 | |
| 154 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::UpdateModel( |
| 155 | const ModelBasedLocalizer::ModelState &model, |
| 156 | const ModelBasedLocalizer::ModelInput &input, |
| 157 | const aos::monotonic_clock::duration dt) const { |
| 158 | return control_loops::RungeKutta( |
| 159 | std::bind(&ModelBasedLocalizer::DiffModel, this, std::placeholders::_1, |
| 160 | input), |
| 161 | model, aos::time::DurationInSeconds(dt)); |
| 162 | } |
| 163 | |
| 164 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::UpdateAccel( |
| 165 | const ModelBasedLocalizer::AccelState &accel, |
| 166 | const ModelBasedLocalizer::AccelInput &input, |
| 167 | const aos::monotonic_clock::duration dt) const { |
| 168 | return control_loops::RungeKutta( |
| 169 | std::bind(&ModelBasedLocalizer::DiffAccel, this, std::placeholders::_1, |
| 170 | input), |
| 171 | accel, aos::time::DurationInSeconds(dt)); |
| 172 | } |
| 173 | |
| 174 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::AccelStateForModelState( |
| 175 | const ModelBasedLocalizer::ModelState &state) const { |
| 176 | const double robot_speed = |
| 177 | (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 178 | const double lat_speed = (AModel(state) * state)(kTheta)*long_offset_; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 179 | const double velocity_x = std::cos(state(kTheta)) * robot_speed - |
| 180 | std::sin(state(kTheta)) * lat_speed; |
| 181 | const double velocity_y = std::sin(state(kTheta)) * robot_speed + |
| 182 | std::cos(state(kTheta)) * lat_speed; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 183 | return (AccelState() << state(0), state(1), state(2), velocity_x, velocity_y) |
| 184 | .finished(); |
| 185 | } |
| 186 | |
| 187 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::ModelStateForAccelState( |
| 188 | const ModelBasedLocalizer::AccelState &state, |
| 189 | const Eigen::Vector2d &encoders, const double yaw_rate) const { |
| 190 | const double robot_speed = state(kVelocityX) * std::cos(state(kTheta)) + |
| 191 | state(kVelocityY) * std::sin(state(kTheta)); |
| 192 | const double radius = dt_config_.robot_radius; |
| 193 | const double left_velocity = robot_speed - yaw_rate * radius; |
| 194 | const double right_velocity = robot_speed + yaw_rate * radius; |
| 195 | return (ModelState() << state(0), state(1), state(2), encoders(0), |
| 196 | left_velocity, 0.0, encoders(1), right_velocity, 0.0) |
| 197 | .finished(); |
| 198 | } |
| 199 | |
| 200 | double ModelBasedLocalizer::ModelDivergence( |
| 201 | const ModelBasedLocalizer::CombinedState &state, |
| 202 | const ModelBasedLocalizer::AccelInput &accel_inputs, |
| 203 | const Eigen::Vector2d &filtered_accel, |
| 204 | const ModelBasedLocalizer::ModelInput &model_inputs) { |
| 205 | // Convert the model state into the acceleration-based state-space and check |
| 206 | // the distance between the two (should really be a weighted norm, but all the |
| 207 | // numbers are on ~the same scale). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 208 | // TODO(james): Maybe weight lateral velocity divergence different than |
| 209 | // longitudinal? Seems like we tend to get false-positives currently when in |
| 210 | // sharp turns. |
| 211 | // TODO(james): For off-center gyros, maybe reduce noise when turning? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 212 | VLOG(2) << "divergence: " |
| 213 | << (state.accel_state - AccelStateForModelState(state.model_state)) |
| 214 | .transpose(); |
| 215 | const AccelState diff_accel = DiffAccel(state.accel_state, accel_inputs); |
| 216 | const ModelState diff_model = DiffModel(state.model_state, model_inputs); |
| 217 | const double model_lng_velocity = |
| 218 | (state.model_state(kLeftVelocity) + state.model_state(kRightVelocity)) / |
| 219 | 2.0; |
| 220 | const double model_lng_accel = |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 221 | (diff_model(kLeftVelocity) + diff_model(kRightVelocity)) / 2.0 - |
| 222 | diff_model(kTheta) * diff_model(kTheta) * long_offset_; |
| 223 | const double model_lat_accel = diff_model(kTheta) * model_lng_velocity; |
| 224 | const Eigen::Vector2d robot_frame_accel(model_lng_accel, model_lat_accel); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 225 | const Eigen::Vector2d model_accel = |
| 226 | Eigen::AngleAxisd(state.model_state(kTheta), Eigen::Vector3d::UnitZ()) |
| 227 | .toRotationMatrix() |
| 228 | .block<2, 2>(0, 0) * |
| 229 | robot_frame_accel; |
| 230 | const double accel_diff = (model_accel - filtered_accel).norm(); |
| 231 | const double theta_rate_diff = |
| 232 | std::abs(diff_accel(kTheta) - diff_model(kTheta)); |
| 233 | |
| 234 | const Eigen::Vector2d accel_vel = state.accel_state.bottomRows<2>(); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 235 | Eigen::Vector2d model_vel = |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 236 | AccelStateForModelState(state.model_state).bottomRows<2>(); |
| 237 | velocity_residual_ = (accel_vel - model_vel).norm() / |
| 238 | (1.0 + accel_vel.norm() + model_vel.norm()); |
| 239 | theta_rate_residual_ = theta_rate_diff; |
| 240 | accel_residual_ = accel_diff / 4.0; |
| 241 | return velocity_residual_ + theta_rate_residual_ + accel_residual_; |
| 242 | } |
| 243 | |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 244 | void ModelBasedLocalizer::UpdateState( |
| 245 | CombinedState *state, |
| 246 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> &K, |
| 247 | const Eigen::Matrix<double, kNModelOutputs, 1> &Z, |
| 248 | const Eigen::Matrix<double, kNModelOutputs, kNModelStates> &H, |
| 249 | const AccelInput &accel_input, const ModelInput &model_input, |
| 250 | aos::monotonic_clock::duration dt) { |
| 251 | state->accel_state = UpdateAccel(state->accel_state, accel_input, dt); |
| 252 | if (down_estimator_.consecutive_still() > 500.0) { |
| 253 | state->accel_state(kVelocityX) *= 0.9; |
| 254 | state->accel_state(kVelocityY) *= 0.9; |
| 255 | } |
| 256 | state->model_state = UpdateModel(state->model_state, model_input, dt); |
| 257 | state->model_state += K * (Z - H * state->model_state); |
| 258 | } |
| 259 | |
James Kuszmaul | 5a5a783 | 2022-03-16 23:15:08 -0700 | [diff] [blame] | 260 | void ModelBasedLocalizer::HandleImu( |
| 261 | aos::monotonic_clock::time_point t, const Eigen::Vector3d &gyro, |
| 262 | const Eigen::Vector3d &accel, const std::optional<Eigen::Vector2d> encoders, |
| 263 | const Eigen::Vector2d voltage) { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 264 | VLOG(2) << t; |
| 265 | if (t_ == aos::monotonic_clock::min_time) { |
| 266 | t_ = t; |
| 267 | } |
James Kuszmaul | 5f27d8b | 2022-03-17 09:08:26 -0700 | [diff] [blame] | 268 | if (t_ + 10 * kNominalDt < t) { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 269 | t_ = t; |
| 270 | ++clock_resets_; |
| 271 | } |
| 272 | const aos::monotonic_clock::duration dt = t - t_; |
| 273 | t_ = t; |
| 274 | down_estimator_.Predict(gyro, accel, dt); |
| 275 | // TODO(james): Should we prefer this or use the down-estimator corrected |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 276 | // version? Using the down estimator is more principled, but does create more |
| 277 | // opportunities for subtle biases. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 278 | const double yaw_rate = (dt_config_.imu_transform * gyro)(2); |
| 279 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 280 | |
| 281 | const Eigen::AngleAxis<double> orientation( |
| 282 | Eigen::AngleAxis<double>(xytheta()(kTheta), Eigen::Vector3d::UnitZ()) * |
| 283 | down_estimator_.X_hat()); |
James Kuszmaul | 10d3fd4 | 2022-02-25 21:57:36 -0800 | [diff] [blame] | 284 | last_orientation_ = orientation; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 285 | |
| 286 | const Eigen::Vector3d absolute_accel = |
| 287 | orientation * dt_config_.imu_transform * kG * accel; |
| 288 | abs_accel_ = absolute_accel; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 289 | |
| 290 | VLOG(2) << "abs accel " << absolute_accel.transpose(); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 291 | VLOG(2) << "dt " << aos::time::DurationInSeconds(dt); |
| 292 | |
| 293 | // Update all the branched states. |
| 294 | const AccelInput accel_input(absolute_accel.x(), absolute_accel.y(), |
| 295 | yaw_rate); |
| 296 | const ModelInput model_input(voltage); |
| 297 | |
| 298 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous = |
| 299 | AModel(current_state_.model_state); |
| 300 | |
| 301 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete; |
| 302 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete; |
| 303 | |
| 304 | DiscretizeQAFast(Q_continuous_model_, A_continuous, dt, &Q_discrete, |
| 305 | &A_discrete); |
| 306 | |
| 307 | P_model_ = A_discrete * P_model_ * A_discrete.transpose() + Q_discrete; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 308 | P_accel_ = A_discrete_accel_ * P_accel_ * A_discrete_accel_.transpose() + |
| 309 | Q_discrete_accel_; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 310 | |
| 311 | Eigen::Matrix<double, kNModelOutputs, kNModelStates> H; |
| 312 | Eigen::Matrix<double, kNModelOutputs, kNModelOutputs> R; |
| 313 | { |
| 314 | H.setZero(); |
| 315 | R.setZero(); |
| 316 | H(0, kLeftEncoder) = 1.0; |
| 317 | H(1, kRightEncoder) = 1.0; |
| 318 | H(2, kRightVelocity) = 1.0 / diameter; |
| 319 | H(2, kLeftVelocity) = -1.0 / diameter; |
| 320 | |
| 321 | R.diagonal() << 1e-9, 1e-9, 1e-13; |
| 322 | } |
| 323 | |
James Kuszmaul | 5a5a783 | 2022-03-16 23:15:08 -0700 | [diff] [blame] | 324 | const Eigen::Matrix<double, kNModelOutputs, 1> Z = |
| 325 | encoders.has_value() |
| 326 | ? Eigen::Vector3d(encoders.value()(0), encoders.value()(1), yaw_rate) |
| 327 | : Eigen::Vector3d(current_state_.model_state(kLeftEncoder), |
| 328 | current_state_.model_state(kRightEncoder), |
| 329 | yaw_rate); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 330 | |
| 331 | if (branches_.empty()) { |
| 332 | VLOG(2) << "Initializing"; |
James Kuszmaul | 5a5a783 | 2022-03-16 23:15:08 -0700 | [diff] [blame] | 333 | current_state_.model_state(kLeftEncoder) = Z(0); |
| 334 | current_state_.model_state(kRightEncoder) = Z(1); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 335 | current_state_.branch_time = t; |
| 336 | branches_.Push(current_state_); |
| 337 | } |
| 338 | |
| 339 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> K = |
| 340 | P_model_ * H.transpose() * (H * P_model_ * H.transpose() + R).inverse(); |
| 341 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 342 | K * H) * |
| 343 | P_model_; |
| 344 | VLOG(2) << "K\n" << K; |
| 345 | VLOG(2) << "Z\n" << Z.transpose(); |
| 346 | |
| 347 | for (CombinedState &state : branches_) { |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 348 | UpdateState(&state, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 349 | } |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 350 | UpdateState(¤t_state_, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 351 | |
| 352 | VLOG(2) << "oildest accel " << branches_[0].accel_state.transpose(); |
| 353 | VLOG(2) << "oildest accel diff " |
| 354 | << DiffAccel(branches_[0].accel_state, accel_input).transpose(); |
| 355 | VLOG(2) << "oildest model " << branches_[0].model_state.transpose(); |
| 356 | |
| 357 | // Determine whether to switch modes--if we are currently in model-based mode, |
| 358 | // swap to accel-based if the two states have divergeed meaningfully in the |
| 359 | // oldest branch. If we are currently in accel-based, then swap back to model |
| 360 | // if the oldest model branch matches has matched the |
| 361 | filtered_residual_accel_ += |
| 362 | 0.01 * (accel_input.topRows<2>() - filtered_residual_accel_); |
| 363 | const double model_divergence = |
| 364 | branches_.full() ? ModelDivergence(branches_[0], accel_input, |
| 365 | filtered_residual_accel_, model_input) |
| 366 | : 0.0; |
| 367 | filtered_residual_ += |
| 368 | (1.0 - std::exp(-aos::time::DurationInSeconds(kNominalDt) / 0.0095)) * |
| 369 | (model_divergence - filtered_residual_); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 370 | // TODO(james): Tune this more. Currently set to generally trust the model, |
| 371 | // perhaps a bit too much. |
| 372 | // When the residual exceeds the accel threshold, we start using the inertials |
| 373 | // alone; when it drops back below the model threshold, we go back to being |
| 374 | // model-based. |
| 375 | constexpr double kUseAccelThreshold = 2.0; |
| 376 | constexpr double kUseModelThreshold = 0.5; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 377 | constexpr size_t kShareStates = kNModelStates; |
| 378 | static_assert(kUseModelThreshold < kUseAccelThreshold); |
| 379 | if (using_model_) { |
James Kuszmaul | ce491e4 | 2022-03-12 21:02:10 -0800 | [diff] [blame] | 380 | if (!FLAGS_ignore_accelerometer && |
| 381 | filtered_residual_ > kUseAccelThreshold) { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 382 | hysteresis_count_++; |
| 383 | } else { |
| 384 | hysteresis_count_ = 0; |
| 385 | } |
| 386 | if (hysteresis_count_ > 0) { |
| 387 | using_model_ = false; |
| 388 | // Grab the accel-based state from back when we started diverging. |
| 389 | // TODO(james): This creates a problematic selection bias, because |
| 390 | // we will tend to bias towards deliberately out-of-tune measurements. |
| 391 | current_state_.accel_state = branches_[0].accel_state; |
| 392 | current_state_.model_state = branches_[0].model_state; |
| 393 | current_state_.model_state = ModelStateForAccelState( |
James Kuszmaul | 5a5a783 | 2022-03-16 23:15:08 -0700 | [diff] [blame] | 394 | current_state_.accel_state, Z.topRows<2>(), yaw_rate); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 395 | } else { |
| 396 | VLOG(2) << "Normal branching"; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 397 | current_state_.accel_state = |
| 398 | AccelStateForModelState(current_state_.model_state); |
| 399 | current_state_.branch_time = t; |
| 400 | } |
| 401 | hysteresis_count_ = 0; |
| 402 | } else { |
| 403 | if (filtered_residual_ < kUseModelThreshold) { |
| 404 | hysteresis_count_++; |
| 405 | } else { |
| 406 | hysteresis_count_ = 0; |
| 407 | } |
| 408 | if (hysteresis_count_ > 100) { |
| 409 | using_model_ = true; |
| 410 | // Grab the model-based state from back when we stopped diverging. |
| 411 | current_state_.model_state.topRows<kShareStates>() = |
James Kuszmaul | 5a5a783 | 2022-03-16 23:15:08 -0700 | [diff] [blame] | 412 | ModelStateForAccelState(branches_[0].accel_state, Z.topRows<2>(), |
| 413 | yaw_rate) |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 414 | .topRows<kShareStates>(); |
| 415 | current_state_.accel_state = |
| 416 | AccelStateForModelState(current_state_.model_state); |
| 417 | } else { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 418 | // TODO(james): Why was I leaving the encoders/wheel velocities in place? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 419 | current_state_.model_state = ModelStateForAccelState( |
James Kuszmaul | 5a5a783 | 2022-03-16 23:15:08 -0700 | [diff] [blame] | 420 | current_state_.accel_state, Z.topRows<2>(), yaw_rate); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 421 | current_state_.branch_time = t; |
| 422 | } |
| 423 | } |
| 424 | |
| 425 | // Generate a new branch, with the accel state reset based on the model-based |
| 426 | // state (really, just getting rid of the lateral velocity). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 427 | // By resetting the accel state in the new branch, this tries to minimize the |
| 428 | // odds of runaway lateral velocities. This doesn't help with runaway |
| 429 | // longitudinal velocities, however. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 430 | CombinedState new_branch = current_state_; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 431 | new_branch.accel_state = AccelStateForModelState(new_branch.model_state); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 432 | new_branch.accumulated_divergence = 0.0; |
| 433 | |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 434 | ++branch_counter_; |
| 435 | if (branch_counter_ % kBranchPeriod == 0) { |
| 436 | branches_.Push(new_branch); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 437 | old_positions_.Push(OldPosition{t, xytheta(), latest_turret_position_, |
| 438 | latest_turret_velocity_}); |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 439 | branch_counter_ = 0; |
| 440 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 441 | |
| 442 | last_residual_ = model_divergence; |
| 443 | |
| 444 | VLOG(2) << "Using " << (using_model_ ? "model" : "accel"); |
| 445 | VLOG(2) << "Residual " << last_residual_; |
| 446 | VLOG(2) << "Filtered Residual " << filtered_residual_; |
| 447 | VLOG(2) << "buffer size " << branches_.size(); |
| 448 | VLOG(2) << "Model state " << current_state_.model_state.transpose(); |
| 449 | VLOG(2) << "Accel state " << current_state_.accel_state.transpose(); |
| 450 | VLOG(2) << "Accel state for model " |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 451 | << AccelStateForModelState(current_state_.model_state).transpose(); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 452 | VLOG(2) << "Input acce " << accel.transpose(); |
| 453 | VLOG(2) << "Input gyro " << gyro.transpose(); |
| 454 | VLOG(2) << "Input voltage " << voltage.transpose(); |
James Kuszmaul | 5a5a783 | 2022-03-16 23:15:08 -0700 | [diff] [blame] | 455 | VLOG(2) << "Input encoder " << Z.topRows<2>().transpose(); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 456 | VLOG(2) << "yaw rate " << yaw_rate; |
| 457 | |
| 458 | CHECK(std::isfinite(last_residual_)); |
| 459 | } |
| 460 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 461 | const ModelBasedLocalizer::OldPosition ModelBasedLocalizer::GetStateForTime( |
| 462 | aos::monotonic_clock::time_point time) { |
| 463 | if (old_positions_.empty()) { |
| 464 | return OldPosition{}; |
| 465 | } |
| 466 | |
| 467 | aos::monotonic_clock::duration lowest_time_error = |
| 468 | aos::monotonic_clock::duration::max(); |
| 469 | const OldPosition *best_match = nullptr; |
| 470 | for (const OldPosition &sample : old_positions_) { |
| 471 | const aos::monotonic_clock::duration time_error = |
| 472 | std::chrono::abs(sample.sample_time - time); |
| 473 | if (time_error < lowest_time_error) { |
| 474 | lowest_time_error = time_error; |
| 475 | best_match = &sample; |
| 476 | } |
| 477 | } |
| 478 | return *best_match; |
| 479 | } |
| 480 | |
| 481 | namespace { |
| 482 | // Converts a flatbuffer TransformationMatrix to an Eigen matrix. Technically, |
| 483 | // this should be able to do a single memcpy, but the extra verbosity here seems |
| 484 | // appropriate. |
| 485 | Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix( |
| 486 | const frc971::vision::calibration::TransformationMatrix &flatbuffer) { |
| 487 | CHECK_EQ(16u, CHECK_NOTNULL(flatbuffer.data())->size()); |
| 488 | Eigen::Matrix<double, 4, 4> result; |
| 489 | result.setIdentity(); |
| 490 | for (int row = 0; row < 4; ++row) { |
| 491 | for (int col = 0; col < 4; ++col) { |
| 492 | result(row, col) = (*flatbuffer.data())[row * 4 + col]; |
| 493 | } |
| 494 | } |
| 495 | return result; |
| 496 | } |
| 497 | |
| 498 | // Node names of the pis to listen for cameras from. |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 499 | constexpr std::array<std::string_view, ModelBasedLocalizer::kNumPis> kPisToUse{ |
| 500 | "pi1", "pi2", "pi3", "pi4"}; |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 501 | } // namespace |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 502 | |
| 503 | const Eigen::Matrix<double, 4, 4> ModelBasedLocalizer::CameraTransform( |
| 504 | const OldPosition &state, |
| 505 | const frc971::vision::calibration::CameraCalibration *calibration, |
| 506 | std::optional<RejectionReason> *rejection_reason) const { |
| 507 | CHECK_NOTNULL(rejection_reason); |
| 508 | CHECK_NOTNULL(calibration); |
| 509 | // Per the CameraCalibration specification, we can actually determine whether |
| 510 | // the camera is the turret camera just from the presence of the |
| 511 | // turret_extrinsics member. |
| 512 | const bool is_turret = calibration->has_turret_extrinsics(); |
| 513 | // Ignore readings when the turret is spinning too fast, on the assumption |
| 514 | // that the odds of screwing up the time compensation are higher. |
| 515 | // Note that the current number here is chosen pretty arbitrarily--1 rad / sec |
| 516 | // seems reasonable, but may be unnecessarily low or high. |
| 517 | constexpr double kMaxTurretVelocity = 1.0; |
| 518 | if (is_turret && std::abs(state.turret_velocity) > kMaxTurretVelocity && |
| 519 | !rejection_reason->has_value()) { |
| 520 | *rejection_reason = RejectionReason::TURRET_TOO_FAST; |
| 521 | } |
| 522 | CHECK(calibration->has_fixed_extrinsics()); |
| 523 | const Eigen::Matrix<double, 4, 4> fixed_extrinsics = |
| 524 | FlatbufferToTransformationMatrix(*calibration->fixed_extrinsics()); |
| 525 | |
| 526 | // Calculate the pose of the camera relative to the robot origin. |
| 527 | Eigen::Matrix<double, 4, 4> H_robot_camera = fixed_extrinsics; |
| 528 | if (is_turret) { |
| 529 | H_robot_camera = |
| 530 | H_robot_camera * |
| 531 | frc971::control_loops::TransformationMatrixForYaw<double>( |
| 532 | state.turret_position) * |
| 533 | FlatbufferToTransformationMatrix(*calibration->turret_extrinsics()); |
| 534 | } |
| 535 | return H_robot_camera; |
| 536 | } |
| 537 | |
| 538 | const std::optional<Eigen::Vector2d> |
| 539 | ModelBasedLocalizer::CameraMeasuredRobotPosition( |
| 540 | const OldPosition &state, const y2022::vision::TargetEstimate *target, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 541 | std::optional<RejectionReason> *rejection_reason, |
| 542 | Eigen::Matrix<double, 4, 4> *H_field_camera_measured) const { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 543 | if (!target->has_camera_calibration()) { |
| 544 | *rejection_reason = RejectionReason::NO_CALIBRATION; |
| 545 | return std::nullopt; |
| 546 | } |
| 547 | const Eigen::Matrix<double, 4, 4> H_robot_camera = |
| 548 | CameraTransform(state, target->camera_calibration(), rejection_reason); |
| 549 | const control_loops::Pose robot_pose( |
| 550 | {state.xytheta(0), state.xytheta(1), 0.0}, state.xytheta(2)); |
| 551 | const Eigen::Matrix<double, 4, 4> H_field_robot = |
| 552 | robot_pose.AsTransformationMatrix(); |
| 553 | // Current estimated pose of the camera in the global frame. |
| 554 | // Note that this is all really just an elaborate way of extracting the |
| 555 | // current estimated camera yaw, and nothing else. |
| 556 | const Eigen::Matrix<double, 4, 4> H_field_camera = |
| 557 | H_field_robot * H_robot_camera; |
| 558 | // Grab the implied yaw of the camera (the +Z axis is coming out of the front |
| 559 | // of the cameras). |
| 560 | const Eigen::Vector3d rotated_camera_z = |
| 561 | H_field_camera.block<3, 3>(0, 0) * Eigen::Vector3d(0, 0, 1); |
| 562 | const double camera_yaw = |
| 563 | std::atan2(rotated_camera_z.y(), rotated_camera_z.x()); |
| 564 | // All right, now we need to use the heading and distance from the |
| 565 | // TargetEstimate, plus the yaw embedded in the camera_pose, to determine what |
| 566 | // the implied X/Y position of the robot is. To do this, we calculate the |
| 567 | // heading/distance from the target to the robot. The distance is easy, since |
| 568 | // that's the same as the distance from the robot to the target. The heading |
| 569 | // isn't too hard, but is obnoxious to think about, since the heading from the |
| 570 | // target to the robot is distinct from the heading from the robot to the |
| 571 | // target. |
| 572 | |
| 573 | // Just to walk through examples to confirm that the below calculation is |
| 574 | // correct: |
| 575 | // * If yaw = 0, and angle_to_target = 0, we are at 180 deg relative to the |
| 576 | // target. |
| 577 | // * If yaw = 90 deg, and angle_to_target = 0, we are at -90 deg relative to |
| 578 | // the target. |
| 579 | // * If yaw = 0, and angle_to_target = 90 deg, we are at -90 deg relative to |
| 580 | // the target. |
| 581 | const double heading_from_target = |
| 582 | aos::math::NormalizeAngle(M_PI + camera_yaw + target->angle_to_target()); |
| 583 | const double distance_from_target = target->distance(); |
| 584 | // Extract the implied camera position on the field. |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 585 | *H_field_camera_measured = H_field_camera; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 586 | // TODO(james): Are we going to need to evict the roll/pitch components of the |
| 587 | // camera extrinsics this year as well? |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 588 | (*H_field_camera_measured)(0, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 589 | distance_from_target * std::cos(heading_from_target) + kVisionTargetX; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 590 | (*H_field_camera_measured)(1, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 591 | distance_from_target * std::sin(heading_from_target) + kVisionTargetY; |
| 592 | const Eigen::Matrix<double, 4, 4> H_field_robot_measured = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 593 | *H_field_camera_measured * H_robot_camera.inverse(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 594 | return H_field_robot_measured.block<2, 1>(0, 3); |
| 595 | } |
| 596 | |
| 597 | void ModelBasedLocalizer::HandleImageMatch( |
| 598 | aos::monotonic_clock::time_point sample_time, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 599 | const y2022::vision::TargetEstimate *target, int camera_index) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 600 | std::optional<RejectionReason> rejection_reason; |
| 601 | |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 602 | if (target->confidence() < kMinTargetEstimateConfidence) { |
| 603 | rejection_reason = RejectionReason::LOW_CONFIDENCE; |
| 604 | TallyRejection(rejection_reason.value()); |
| 605 | return; |
| 606 | } |
| 607 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 608 | const OldPosition &state = GetStateForTime(sample_time); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 609 | Eigen::Matrix<double, 4, 4> H_field_camera_measured; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 610 | const std::optional<Eigen::Vector2d> measured_robot_position = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 611 | CameraMeasuredRobotPosition(state, target, &rejection_reason, |
| 612 | &H_field_camera_measured); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 613 | // Technically, rejection_reason should always be set if |
| 614 | // measured_robot_position is nullopt, but in the future we may have more |
| 615 | // recoverable rejection reasons that we wish to allow to propagate further |
| 616 | // into the process. |
| 617 | if (!measured_robot_position || rejection_reason.has_value()) { |
| 618 | CHECK(rejection_reason.has_value()); |
| 619 | TallyRejection(rejection_reason.value()); |
| 620 | return; |
| 621 | } |
| 622 | |
| 623 | // Next, go through and do the actual Kalman corrections for the x/y |
| 624 | // measurement, for both the accel state and the model-based state. |
| 625 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model = |
| 626 | AModel(current_state_.model_state); |
| 627 | |
| 628 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete_model; |
| 629 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete_model; |
| 630 | |
| 631 | DiscretizeQAFast(Q_continuous_model_, A_continuous_model, kNominalDt, |
| 632 | &Q_discrete_model, &A_discrete_model); |
| 633 | |
| 634 | Eigen::Matrix<double, 2, kNModelStates> H_model; |
| 635 | H_model.setZero(); |
| 636 | Eigen::Matrix<double, 2, kNAccelStates> H_accel; |
| 637 | H_accel.setZero(); |
| 638 | Eigen::Matrix<double, 2, 2> R; |
| 639 | R.setZero(); |
| 640 | H_model(0, kX) = 1.0; |
| 641 | H_model(1, kY) = 1.0; |
| 642 | H_accel(0, kX) = 1.0; |
| 643 | H_accel(1, kY) = 1.0; |
James Kuszmaul | 2001454 | 2022-04-06 21:35:44 -0700 | [diff] [blame] | 644 | if (aggressive_corrections_) { |
| 645 | R.diagonal() << 1e-2, 1e-2; |
| 646 | } else { |
| 647 | R.diagonal() << 1e-0, 1e-0; |
| 648 | } |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 649 | |
| 650 | const Eigen::Matrix<double, kNModelStates, 2> K_model = |
| 651 | P_model_ * H_model.transpose() * |
| 652 | (H_model * P_model_ * H_model.transpose() + R).inverse(); |
| 653 | const Eigen::Matrix<double, kNAccelStates, 2> K_accel = |
| 654 | P_accel_ * H_accel.transpose() * |
| 655 | (H_accel * P_accel_ * H_accel.transpose() + R).inverse(); |
| 656 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 657 | K_model * H_model) * |
| 658 | P_model_; |
| 659 | P_accel_ = (Eigen::Matrix<double, kNAccelStates, kNAccelStates>::Identity() - |
| 660 | K_accel * H_accel) * |
| 661 | P_accel_; |
| 662 | // And now we have to correct *everything* on all the branches: |
| 663 | for (CombinedState &state : branches_) { |
| 664 | state.model_state += K_model * (measured_robot_position.value() - |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 665 | H_model * state.model_state); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 666 | state.accel_state += K_accel * (measured_robot_position.value() - |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 667 | H_accel * state.accel_state); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 668 | } |
| 669 | current_state_.model_state += |
| 670 | K_model * |
| 671 | (measured_robot_position.value() - H_model * current_state_.model_state); |
| 672 | current_state_.accel_state += |
| 673 | K_accel * |
| 674 | (measured_robot_position.value() - H_accel * current_state_.accel_state); |
| 675 | |
| 676 | statistics_.total_accepted++; |
| 677 | statistics_.total_candidates++; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 678 | |
| 679 | const Eigen::Vector3d camera_z_in_field = |
| 680 | H_field_camera_measured.block<3, 3>(0, 0) * Eigen::Vector3d::UnitZ(); |
| 681 | const double camera_yaw = |
| 682 | std::atan2(camera_z_in_field.y(), camera_z_in_field.x()); |
| 683 | |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 684 | // TODO(milind): actually control this |
| 685 | led_outputs_[camera_index] = LedOutput::ON; |
| 686 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 687 | TargetEstimateDebugT debug; |
| 688 | debug.camera = static_cast<uint8_t>(camera_index); |
| 689 | debug.camera_x = H_field_camera_measured(0, 3); |
| 690 | debug.camera_y = H_field_camera_measured(1, 3); |
| 691 | debug.camera_theta = camera_yaw; |
| 692 | debug.implied_robot_x = measured_robot_position.value().x(); |
| 693 | debug.implied_robot_y = measured_robot_position.value().y(); |
| 694 | debug.implied_robot_theta = xytheta()(2); |
| 695 | debug.implied_turret_goal = |
| 696 | aos::math::NormalizeAngle(camera_yaw + target->angle_to_target()); |
| 697 | debug.accepted = true; |
| 698 | debug.image_age_sec = aos::time::DurationInSeconds(t_ - sample_time); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 699 | CHECK_LT(image_debugs_.size(), kDebugBufferSize); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 700 | image_debugs_.push_back(debug); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 701 | } |
| 702 | |
| 703 | void ModelBasedLocalizer::HandleTurret( |
| 704 | aos::monotonic_clock::time_point sample_time, double turret_position, |
| 705 | double turret_velocity) { |
| 706 | last_turret_update_ = sample_time; |
| 707 | latest_turret_position_ = turret_position; |
| 708 | latest_turret_velocity_ = turret_velocity; |
| 709 | } |
| 710 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 711 | void ModelBasedLocalizer::HandleReset(aos::monotonic_clock::time_point now, |
| 712 | const Eigen::Vector3d &xytheta) { |
| 713 | branches_.Reset(); |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 714 | t_ = now; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 715 | using_model_ = true; |
| 716 | current_state_.model_state << xytheta(0), xytheta(1), xytheta(2), |
| 717 | current_state_.model_state(kLeftEncoder), 0.0, 0.0, |
| 718 | current_state_.model_state(kRightEncoder), 0.0, 0.0; |
| 719 | current_state_.accel_state = |
| 720 | AccelStateForModelState(current_state_.model_state); |
| 721 | last_residual_ = 0.0; |
| 722 | filtered_residual_ = 0.0; |
| 723 | filtered_residual_accel_.setZero(); |
| 724 | abs_accel_.setZero(); |
| 725 | } |
| 726 | |
| 727 | flatbuffers::Offset<AccelBasedState> ModelBasedLocalizer::BuildAccelState( |
| 728 | flatbuffers::FlatBufferBuilder *fbb, const AccelState &state) { |
| 729 | AccelBasedState::Builder accel_state_builder(*fbb); |
| 730 | accel_state_builder.add_x(state(kX)); |
| 731 | accel_state_builder.add_y(state(kY)); |
| 732 | accel_state_builder.add_theta(state(kTheta)); |
| 733 | accel_state_builder.add_velocity_x(state(kVelocityX)); |
| 734 | accel_state_builder.add_velocity_y(state(kVelocityY)); |
| 735 | return accel_state_builder.Finish(); |
| 736 | } |
| 737 | |
| 738 | flatbuffers::Offset<ModelBasedState> ModelBasedLocalizer::BuildModelState( |
| 739 | flatbuffers::FlatBufferBuilder *fbb, const ModelState &state) { |
| 740 | ModelBasedState::Builder model_state_builder(*fbb); |
| 741 | model_state_builder.add_x(state(kX)); |
| 742 | model_state_builder.add_y(state(kY)); |
| 743 | model_state_builder.add_theta(state(kTheta)); |
| 744 | model_state_builder.add_left_encoder(state(kLeftEncoder)); |
| 745 | model_state_builder.add_left_velocity(state(kLeftVelocity)); |
| 746 | model_state_builder.add_left_voltage_error(state(kLeftVoltageError)); |
| 747 | model_state_builder.add_right_encoder(state(kRightEncoder)); |
| 748 | model_state_builder.add_right_velocity(state(kRightVelocity)); |
| 749 | model_state_builder.add_right_voltage_error(state(kRightVoltageError)); |
| 750 | return model_state_builder.Finish(); |
| 751 | } |
| 752 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 753 | flatbuffers::Offset<CumulativeStatistics> |
| 754 | ModelBasedLocalizer::PopulateStatistics(flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 755 | const auto rejections_offset = fbb->CreateVector( |
| 756 | statistics_.rejection_counts.data(), statistics_.rejection_counts.size()); |
| 757 | |
| 758 | CumulativeStatistics::Builder stats_builder(*fbb); |
| 759 | stats_builder.add_total_accepted(statistics_.total_accepted); |
| 760 | stats_builder.add_total_candidates(statistics_.total_candidates); |
| 761 | stats_builder.add_rejection_reason_count(rejections_offset); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 762 | return stats_builder.Finish(); |
| 763 | } |
| 764 | |
| 765 | flatbuffers::Offset<ModelBasedStatus> ModelBasedLocalizer::PopulateStatus( |
| 766 | flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 767 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 768 | PopulateStatistics(fbb); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 769 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 770 | const flatbuffers::Offset<control_loops::drivetrain::DownEstimatorState> |
| 771 | down_estimator_offset = down_estimator_.PopulateStatus(fbb, t_); |
| 772 | |
| 773 | const CombinedState &state = current_state_; |
| 774 | |
| 775 | const flatbuffers::Offset<ModelBasedState> model_state_offset = |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 776 | BuildModelState(fbb, state.model_state); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 777 | |
| 778 | const flatbuffers::Offset<AccelBasedState> accel_state_offset = |
| 779 | BuildAccelState(fbb, state.accel_state); |
| 780 | |
| 781 | const flatbuffers::Offset<AccelBasedState> oldest_accel_state_offset = |
| 782 | branches_.empty() ? flatbuffers::Offset<AccelBasedState>() |
| 783 | : BuildAccelState(fbb, branches_[0].accel_state); |
| 784 | |
| 785 | const flatbuffers::Offset<ModelBasedState> oldest_model_state_offset = |
| 786 | branches_.empty() ? flatbuffers::Offset<ModelBasedState>() |
| 787 | : BuildModelState(fbb, branches_[0].model_state); |
| 788 | |
| 789 | ModelBasedStatus::Builder builder(*fbb); |
| 790 | builder.add_accel_state(accel_state_offset); |
| 791 | builder.add_oldest_accel_state(oldest_accel_state_offset); |
| 792 | builder.add_oldest_model_state(oldest_model_state_offset); |
| 793 | builder.add_model_state(model_state_offset); |
| 794 | builder.add_using_model(using_model_); |
| 795 | builder.add_residual(last_residual_); |
| 796 | builder.add_filtered_residual(filtered_residual_); |
| 797 | builder.add_velocity_residual(velocity_residual_); |
| 798 | builder.add_accel_residual(accel_residual_); |
| 799 | builder.add_theta_rate_residual(theta_rate_residual_); |
| 800 | builder.add_down_estimator(down_estimator_offset); |
| 801 | builder.add_x(xytheta()(0)); |
| 802 | builder.add_y(xytheta()(1)); |
| 803 | builder.add_theta(xytheta()(2)); |
| 804 | builder.add_implied_accel_x(abs_accel_(0)); |
| 805 | builder.add_implied_accel_y(abs_accel_(1)); |
| 806 | builder.add_implied_accel_z(abs_accel_(2)); |
| 807 | builder.add_clock_resets(clock_resets_); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 808 | builder.add_statistics(stats_offset); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 809 | return builder.Finish(); |
| 810 | } |
| 811 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 812 | flatbuffers::Offset<LocalizerVisualization> |
| 813 | ModelBasedLocalizer::PopulateVisualization( |
| 814 | flatbuffers::FlatBufferBuilder *fbb) { |
| 815 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
| 816 | PopulateStatistics(fbb); |
| 817 | |
| 818 | aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, kDebugBufferSize> |
| 819 | debug_offsets; |
| 820 | |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 821 | for (const TargetEstimateDebugT &debug : image_debugs_) { |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 822 | debug_offsets.push_back(PackTargetEstimateDebug(debug, fbb)); |
| 823 | } |
| 824 | |
| 825 | image_debugs_.clear(); |
| 826 | |
| 827 | const flatbuffers::Offset< |
| 828 | flatbuffers::Vector<flatbuffers::Offset<TargetEstimateDebug>>> |
| 829 | debug_offset = |
| 830 | fbb->CreateVector(debug_offsets.data(), debug_offsets.size()); |
| 831 | |
| 832 | LocalizerVisualization::Builder builder(*fbb); |
| 833 | builder.add_statistics(stats_offset); |
| 834 | builder.add_targets(debug_offset); |
| 835 | return builder.Finish(); |
| 836 | } |
| 837 | |
| 838 | void ModelBasedLocalizer::TallyRejection(const RejectionReason reason) { |
| 839 | statistics_.total_candidates++; |
| 840 | statistics_.rejection_counts[static_cast<size_t>(reason)]++; |
| 841 | TargetEstimateDebugT debug; |
| 842 | debug.accepted = false; |
| 843 | debug.rejection_reason = reason; |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 844 | CHECK_LT(image_debugs_.size(), kDebugBufferSize); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 845 | image_debugs_.push_back(debug); |
| 846 | } |
| 847 | |
| 848 | flatbuffers::Offset<TargetEstimateDebug> |
| 849 | ModelBasedLocalizer::PackTargetEstimateDebug( |
| 850 | const TargetEstimateDebugT &debug, flatbuffers::FlatBufferBuilder *fbb) { |
| 851 | if (!debug.accepted) { |
| 852 | TargetEstimateDebug::Builder builder(*fbb); |
| 853 | builder.add_accepted(debug.accepted); |
| 854 | builder.add_rejection_reason(debug.rejection_reason); |
| 855 | return builder.Finish(); |
| 856 | } else { |
| 857 | flatbuffers::Offset<TargetEstimateDebug> offset = |
| 858 | TargetEstimateDebug::Pack(*fbb, &debug); |
| 859 | flatbuffers::GetMutableTemporaryPointer(*fbb, offset) |
| 860 | ->clear_rejection_reason(); |
| 861 | return offset; |
| 862 | } |
| 863 | } |
| 864 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 865 | EventLoopLocalizer::EventLoopLocalizer( |
| 866 | aos::EventLoop *event_loop, |
| 867 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 868 | : event_loop_(event_loop), |
| 869 | model_based_(dt_config), |
| 870 | status_sender_(event_loop_->MakeSender<LocalizerStatus>("/localizer")), |
| 871 | output_sender_(event_loop_->MakeSender<LocalizerOutput>("/localizer")), |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 872 | visualization_sender_( |
| 873 | event_loop_->MakeSender<LocalizerVisualization>("/localizer")), |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 874 | output_fetcher_( |
| 875 | event_loop_->MakeFetcher<frc971::control_loops::drivetrain::Output>( |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 876 | "/drivetrain")), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 877 | clock_offset_fetcher_( |
| 878 | event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>( |
| 879 | "/aos")), |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 880 | superstructure_fetcher_( |
| 881 | event_loop_ |
| 882 | ->MakeFetcher<y2022::control_loops::superstructure::Status>( |
| 883 | "/superstructure")), |
James Kuszmaul | 2001454 | 2022-04-06 21:35:44 -0700 | [diff] [blame] | 884 | joystick_state_fetcher_( |
| 885 | event_loop_->MakeFetcher<aos::JoystickState>("/roborio/aos")), |
James Kuszmaul | 9f2f53c | 2023-02-19 14:08:18 -0800 | [diff] [blame^] | 886 | imu_watcher_(event_loop, dt_config, |
| 887 | y2022::constants::Values::DrivetrainEncoderToMeters(1), |
| 888 | [this](aos::monotonic_clock::time_point sample_time_pico, |
| 889 | aos::monotonic_clock::time_point sample_time_pi, |
| 890 | std::optional<Eigen::Vector2d> encoders, |
| 891 | Eigen::Vector3d gyro, Eigen::Vector3d accel) { |
| 892 | HandleImu(sample_time_pico, sample_time_pi, encoders, gyro, |
| 893 | accel); |
| 894 | }) { |
James Kuszmaul | a391dde | 2022-03-17 00:03:30 -0700 | [diff] [blame] | 895 | event_loop->SetRuntimeRealtimePriority(10); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 896 | event_loop_->MakeWatcher( |
| 897 | "/drivetrain", |
| 898 | [this]( |
| 899 | const frc971::control_loops::drivetrain::LocalizerControl &control) { |
| 900 | const double theta = control.keep_current_theta() |
| 901 | ? model_based_.xytheta()(2) |
| 902 | : control.theta(); |
| 903 | model_based_.HandleReset(event_loop_->monotonic_now(), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 904 | {control.x(), control.y(), theta}); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 905 | }); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 906 | aos::TimerHandler *superstructure_timer = event_loop_->AddTimer([this]() { |
| 907 | if (superstructure_fetcher_.Fetch()) { |
| 908 | const y2022::control_loops::superstructure::Status &status = |
| 909 | *superstructure_fetcher_.get(); |
| 910 | if (!status.has_turret()) { |
| 911 | return; |
| 912 | } |
| 913 | CHECK(status.has_turret()); |
| 914 | model_based_.HandleTurret( |
| 915 | superstructure_fetcher_.context().monotonic_event_time, |
| 916 | status.turret()->position(), status.turret()->velocity()); |
| 917 | } |
| 918 | }); |
| 919 | event_loop_->OnRun([this, superstructure_timer]() { |
| 920 | superstructure_timer->Setup(event_loop_->monotonic_now(), |
| 921 | std::chrono::milliseconds(20)); |
| 922 | }); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 923 | |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 924 | for (size_t camera_index = 0; camera_index < kPisToUse.size(); |
| 925 | ++camera_index) { |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 926 | CHECK_LT(camera_index, target_estimate_fetchers_.size()); |
| 927 | target_estimate_fetchers_[camera_index] = |
| 928 | event_loop_->MakeFetcher<y2022::vision::TargetEstimate>( |
| 929 | absl::StrCat("/", kPisToUse[camera_index], "/camera")); |
| 930 | } |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 931 | aos::TimerHandler *estimate_timer = event_loop_->AddTimer([this]() { |
James Kuszmaul | 2001454 | 2022-04-06 21:35:44 -0700 | [diff] [blame] | 932 | joystick_state_fetcher_.Fetch(); |
| 933 | const bool maybe_in_auto = (joystick_state_fetcher_.get() != nullptr) |
| 934 | ? joystick_state_fetcher_->autonomous() |
| 935 | : true; |
| 936 | model_based_.set_use_aggressive_image_corrections(!maybe_in_auto); |
| 937 | for (size_t camera_index = 0; camera_index < kPisToUse.size(); |
| 938 | ++camera_index) { |
| 939 | if (model_based_.NumQueuedImageDebugs() == |
| 940 | ModelBasedLocalizer::kDebugBufferSize || |
| 941 | (last_visualization_send_ + kMinVisualizationPeriod < |
| 942 | event_loop_->monotonic_now())) { |
| 943 | auto builder = visualization_sender_.MakeBuilder(); |
| 944 | visualization_sender_.CheckOk( |
| 945 | builder.Send(model_based_.PopulateVisualization(builder.fbb()))); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 946 | } |
James Kuszmaul | 2001454 | 2022-04-06 21:35:44 -0700 | [diff] [blame] | 947 | if (target_estimate_fetchers_[camera_index].Fetch()) { |
| 948 | const std::optional<aos::monotonic_clock::duration> monotonic_offset = |
| 949 | ClockOffset(kPisToUse[camera_index]); |
| 950 | if (!monotonic_offset.has_value()) { |
| 951 | continue; |
| 952 | } |
| 953 | // TODO(james): Get timestamp from message contents. |
| 954 | aos::monotonic_clock::time_point capture_time( |
| 955 | target_estimate_fetchers_[camera_index] |
| 956 | .context() |
| 957 | .monotonic_remote_time - |
| 958 | monotonic_offset.value()); |
| 959 | if (capture_time > target_estimate_fetchers_[camera_index] |
| 960 | .context() |
| 961 | .monotonic_event_time) { |
| 962 | model_based_.TallyRejection(RejectionReason::IMAGE_FROM_FUTURE); |
| 963 | continue; |
| 964 | } |
James Kuszmaul | 9f2f53c | 2023-02-19 14:08:18 -0800 | [diff] [blame^] | 965 | capture_time -= imu_watcher_.pico_offset_error(); |
James Kuszmaul | 2001454 | 2022-04-06 21:35:44 -0700 | [diff] [blame] | 966 | model_based_.HandleImageMatch( |
| 967 | capture_time, target_estimate_fetchers_[camera_index].get(), |
| 968 | camera_index); |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 969 | } |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 970 | } |
| 971 | }); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 972 | event_loop_->OnRun([this, estimate_timer]() { |
| 973 | estimate_timer->Setup(event_loop_->monotonic_now(), |
| 974 | std::chrono::milliseconds(100)); |
| 975 | }); |
James Kuszmaul | 9f2f53c | 2023-02-19 14:08:18 -0800 | [diff] [blame^] | 976 | } |
James Kuszmaul | 5f27d8b | 2022-03-17 09:08:26 -0700 | [diff] [blame] | 977 | |
James Kuszmaul | 9f2f53c | 2023-02-19 14:08:18 -0800 | [diff] [blame^] | 978 | void EventLoopLocalizer::HandleImu( |
| 979 | aos::monotonic_clock::time_point sample_time_pico, |
| 980 | aos::monotonic_clock::time_point sample_time_pi, |
| 981 | std::optional<Eigen::Vector2d> encoders, Eigen::Vector3d gyro, |
| 982 | Eigen::Vector3d accel) { |
| 983 | output_fetcher_.Fetch(); |
| 984 | { |
| 985 | const bool disabled = (output_fetcher_.get() == nullptr) || |
| 986 | (output_fetcher_.context().monotonic_event_time + |
| 987 | std::chrono::milliseconds(10) < |
| 988 | event_loop_->context().monotonic_event_time); |
| 989 | // For gyros, use the most recent gyro reading if we aren't zeroed, |
| 990 | // to avoid missing integration cycles. |
| 991 | model_based_.HandleImu( |
| 992 | sample_time_pico, gyro, accel, encoders, |
| 993 | disabled ? Eigen::Vector2d::Zero() |
| 994 | : Eigen::Vector2d{output_fetcher_->left_voltage(), |
| 995 | output_fetcher_->right_voltage()}); |
| 996 | } |
| 997 | { |
| 998 | auto builder = status_sender_.MakeBuilder(); |
| 999 | const flatbuffers::Offset<ModelBasedStatus> model_based_status = |
| 1000 | model_based_.PopulateStatus(builder.fbb()); |
| 1001 | const flatbuffers::Offset<control_loops::drivetrain::ImuZeroerState> |
| 1002 | zeroer_status = imu_watcher_.zeroer().PopulateStatus(builder.fbb()); |
| 1003 | const flatbuffers::Offset<ImuFailures> imu_failures = |
| 1004 | imu_watcher_.PopulateImuFailures(builder.fbb()); |
| 1005 | LocalizerStatus::Builder status_builder = |
| 1006 | builder.MakeBuilder<LocalizerStatus>(); |
| 1007 | status_builder.add_model_based(model_based_status); |
| 1008 | status_builder.add_zeroed(imu_watcher_.zeroer().Zeroed()); |
| 1009 | status_builder.add_faulted_zero(imu_watcher_.zeroer().Faulted()); |
| 1010 | status_builder.add_zeroing(zeroer_status); |
| 1011 | status_builder.add_imu_failures(imu_failures); |
| 1012 | if (encoders.has_value()) { |
| 1013 | status_builder.add_left_encoder(encoders.value()(0)); |
| 1014 | status_builder.add_right_encoder(encoders.value()(1)); |
| 1015 | } |
| 1016 | if (imu_watcher_.pico_offset().has_value()) { |
| 1017 | status_builder.add_pico_offset_ns( |
| 1018 | imu_watcher_.pico_offset().value().count()); |
| 1019 | status_builder.add_pico_offset_error_ns( |
| 1020 | imu_watcher_.pico_offset_error().count()); |
| 1021 | } |
| 1022 | builder.CheckOk(builder.Send(status_builder.Finish())); |
| 1023 | } |
| 1024 | if (last_output_send_ + std::chrono::milliseconds(5) < |
| 1025 | event_loop_->context().monotonic_event_time) { |
| 1026 | auto builder = output_sender_.MakeBuilder(); |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 1027 | |
James Kuszmaul | 9f2f53c | 2023-02-19 14:08:18 -0800 | [diff] [blame^] | 1028 | const auto led_outputs_offset = builder.fbb()->CreateVector( |
| 1029 | model_based_.led_outputs().data(), model_based_.led_outputs().size()); |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 1030 | |
James Kuszmaul | 9f2f53c | 2023-02-19 14:08:18 -0800 | [diff] [blame^] | 1031 | LocalizerOutput::Builder output_builder = |
| 1032 | builder.MakeBuilder<LocalizerOutput>(); |
| 1033 | output_builder.add_monotonic_timestamp_ns( |
| 1034 | std::chrono::duration_cast<std::chrono::nanoseconds>( |
| 1035 | sample_time_pi.time_since_epoch()) |
| 1036 | .count()); |
| 1037 | output_builder.add_x(model_based_.xytheta()(0)); |
| 1038 | output_builder.add_y(model_based_.xytheta()(1)); |
| 1039 | output_builder.add_theta(model_based_.xytheta()(2)); |
| 1040 | output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed()); |
| 1041 | output_builder.add_image_accepted_count(model_based_.total_accepted()); |
| 1042 | const Eigen::Quaterniond &orientation = model_based_.orientation(); |
| 1043 | Quaternion quaternion; |
| 1044 | quaternion.mutate_x(orientation.x()); |
| 1045 | quaternion.mutate_y(orientation.y()); |
| 1046 | quaternion.mutate_z(orientation.z()); |
| 1047 | quaternion.mutate_w(orientation.w()); |
| 1048 | output_builder.add_orientation(&quaternion); |
| 1049 | output_builder.add_led_outputs(led_outputs_offset); |
| 1050 | builder.CheckOk(builder.Send(output_builder.Finish())); |
| 1051 | last_output_send_ = event_loop_->monotonic_now(); |
| 1052 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1053 | } |
| 1054 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 1055 | std::optional<aos::monotonic_clock::duration> EventLoopLocalizer::ClockOffset( |
| 1056 | std::string_view pi) { |
| 1057 | std::optional<aos::monotonic_clock::duration> monotonic_offset; |
| 1058 | clock_offset_fetcher_.Fetch(); |
| 1059 | if (clock_offset_fetcher_.get() != nullptr) { |
| 1060 | for (const auto connection : *clock_offset_fetcher_->connections()) { |
| 1061 | if (connection->has_node() && connection->node()->has_name() && |
| 1062 | connection->node()->name()->string_view() == pi) { |
| 1063 | if (connection->has_monotonic_offset()) { |
| 1064 | monotonic_offset = |
| 1065 | std::chrono::nanoseconds(connection->monotonic_offset()); |
| 1066 | } else { |
| 1067 | // If we don't have a monotonic offset, that means we aren't |
| 1068 | // connected. |
| 1069 | model_based_.TallyRejection( |
| 1070 | RejectionReason::MESSAGE_BRIDGE_DISCONNECTED); |
| 1071 | return std::nullopt; |
| 1072 | } |
| 1073 | break; |
| 1074 | } |
| 1075 | } |
| 1076 | } |
| 1077 | CHECK(monotonic_offset.has_value()); |
| 1078 | return monotonic_offset; |
| 1079 | } |
| 1080 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1081 | } // namespace frc971::controls |