James Kuszmaul | 51fa1ae | 2022-02-26 00:49:57 -0800 | [diff] [blame] | 1 | #include "y2022/localizer/localizer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 2 | |
| 3 | #include "frc971/control_loops/c2d.h" |
| 4 | #include "frc971/wpilib/imu_batch_generated.h" |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 5 | #include "y2022/constants.h" |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 6 | #include "aos/json_to_flatbuffer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 7 | |
| 8 | namespace frc971::controls { |
| 9 | |
| 10 | namespace { |
| 11 | constexpr double kG = 9.80665; |
| 12 | constexpr std::chrono::microseconds kNominalDt(500); |
| 13 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 14 | // Field position of the target (the 2022 target is conveniently in the middle |
| 15 | // of the field....). |
| 16 | constexpr double kVisionTargetX = 0.0; |
| 17 | constexpr double kVisionTargetY = 0.0; |
| 18 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 19 | template <int N> |
| 20 | Eigen::Matrix<double, N, 1> MakeState(std::vector<double> values) { |
| 21 | CHECK_EQ(static_cast<size_t>(N), values.size()); |
| 22 | Eigen::Matrix<double, N, 1> vector; |
| 23 | for (int ii = 0; ii < N; ++ii) { |
| 24 | vector(ii, 0) = values[ii]; |
| 25 | } |
| 26 | return vector; |
| 27 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 28 | } // namespace |
| 29 | |
| 30 | ModelBasedLocalizer::ModelBasedLocalizer( |
| 31 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 32 | : dt_config_(dt_config), |
| 33 | velocity_drivetrain_coefficients_( |
| 34 | dt_config.make_hybrid_drivetrain_velocity_loop() |
| 35 | .plant() |
| 36 | .coefficients()), |
| 37 | down_estimator_(dt_config) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 38 | statistics_.rejection_counts.fill(0); |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 39 | CHECK_EQ(branches_.capacity(), static_cast<size_t>(std::chrono::seconds(1) / |
| 40 | kNominalDt / kBranchPeriod)); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 41 | if (dt_config_.is_simulated) { |
| 42 | down_estimator_.assume_perfect_gravity(); |
| 43 | } |
| 44 | A_continuous_accel_.setZero(); |
| 45 | A_continuous_model_.setZero(); |
| 46 | B_continuous_accel_.setZero(); |
| 47 | B_continuous_model_.setZero(); |
| 48 | |
| 49 | A_continuous_accel_(kX, kVelocityX) = 1.0; |
| 50 | A_continuous_accel_(kY, kVelocityY) = 1.0; |
| 51 | |
| 52 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 53 | |
| 54 | A_continuous_model_(kTheta, kLeftVelocity) = -1.0 / diameter; |
| 55 | A_continuous_model_(kTheta, kRightVelocity) = 1.0 / diameter; |
| 56 | A_continuous_model_(kLeftEncoder, kLeftVelocity) = 1.0; |
| 57 | A_continuous_model_(kRightEncoder, kRightVelocity) = 1.0; |
| 58 | const auto &vel_coefs = velocity_drivetrain_coefficients_; |
| 59 | A_continuous_model_(kLeftVelocity, kLeftVelocity) = |
| 60 | vel_coefs.A_continuous(0, 0); |
| 61 | A_continuous_model_(kLeftVelocity, kRightVelocity) = |
| 62 | vel_coefs.A_continuous(0, 1); |
| 63 | A_continuous_model_(kRightVelocity, kLeftVelocity) = |
| 64 | vel_coefs.A_continuous(1, 0); |
| 65 | A_continuous_model_(kRightVelocity, kRightVelocity) = |
| 66 | vel_coefs.A_continuous(1, 1); |
| 67 | |
| 68 | A_continuous_model_(kLeftVelocity, kLeftVoltageError) = |
| 69 | 1 * vel_coefs.B_continuous(0, 0); |
| 70 | A_continuous_model_(kLeftVelocity, kRightVoltageError) = |
| 71 | 1 * vel_coefs.B_continuous(0, 1); |
| 72 | A_continuous_model_(kRightVelocity, kLeftVoltageError) = |
| 73 | 1 * vel_coefs.B_continuous(1, 0); |
| 74 | A_continuous_model_(kRightVelocity, kRightVoltageError) = |
| 75 | 1 * vel_coefs.B_continuous(1, 1); |
| 76 | |
| 77 | B_continuous_model_.block<1, 2>(kLeftVelocity, kLeftVoltage) = |
| 78 | vel_coefs.B_continuous.row(0); |
| 79 | B_continuous_model_.block<1, 2>(kRightVelocity, kLeftVoltage) = |
| 80 | vel_coefs.B_continuous.row(1); |
| 81 | |
| 82 | B_continuous_accel_(kVelocityX, kAccelX) = 1.0; |
| 83 | B_continuous_accel_(kVelocityY, kAccelY) = 1.0; |
| 84 | B_continuous_accel_(kTheta, kThetaRate) = 1.0; |
| 85 | |
| 86 | Q_continuous_model_.setZero(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 87 | Q_continuous_model_.diagonal() << 1e-2, 1e-2, 1e-8, 1e-2, 1e-0, 1e-0, 1e-2, |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 88 | 1e-0, 1e-0; |
| 89 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 90 | Q_continuous_accel_.setZero(); |
| 91 | Q_continuous_accel_.diagonal() << 1e-2, 1e-2, 1e-20, 1e-4, 1e-4; |
| 92 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 93 | P_model_ = Q_continuous_model_ * aos::time::DurationInSeconds(kNominalDt); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 94 | |
| 95 | // We can precalculate the discretizations of the accel model because it is |
| 96 | // actually LTI. |
| 97 | |
| 98 | DiscretizeQAFast(Q_continuous_accel_, A_continuous_accel_, kNominalDt, |
| 99 | &Q_discrete_accel_, &A_discrete_accel_); |
| 100 | P_accel_ = Q_discrete_accel_; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 101 | } |
| 102 | |
| 103 | Eigen::Matrix<double, ModelBasedLocalizer::kNModelStates, |
| 104 | ModelBasedLocalizer::kNModelStates> |
| 105 | ModelBasedLocalizer::AModel( |
| 106 | const ModelBasedLocalizer::ModelState &state) const { |
| 107 | Eigen::Matrix<double, kNModelStates, kNModelStates> A = A_continuous_model_; |
| 108 | const double theta = state(kTheta); |
| 109 | const double stheta = std::sin(theta); |
| 110 | const double ctheta = std::cos(theta); |
| 111 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 112 | A(kX, kTheta) = -stheta * velocity; |
| 113 | A(kX, kLeftVelocity) = ctheta / 2.0; |
| 114 | A(kX, kRightVelocity) = ctheta / 2.0; |
| 115 | A(kY, kTheta) = ctheta * velocity; |
| 116 | A(kY, kLeftVelocity) = stheta / 2.0; |
| 117 | A(kY, kRightVelocity) = stheta / 2.0; |
| 118 | return A; |
| 119 | } |
| 120 | |
| 121 | Eigen::Matrix<double, ModelBasedLocalizer::kNAccelStates, |
| 122 | ModelBasedLocalizer::kNAccelStates> |
| 123 | ModelBasedLocalizer::AAccel() const { |
| 124 | return A_continuous_accel_; |
| 125 | } |
| 126 | |
| 127 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::DiffModel( |
| 128 | const ModelBasedLocalizer::ModelState &state, |
| 129 | const ModelBasedLocalizer::ModelInput &U) const { |
| 130 | ModelState x_dot = AModel(state) * state + B_continuous_model_ * U; |
| 131 | const double theta = state(kTheta); |
| 132 | const double stheta = std::sin(theta); |
| 133 | const double ctheta = std::cos(theta); |
| 134 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 135 | x_dot(kX) = ctheta * velocity; |
| 136 | x_dot(kY) = stheta * velocity; |
| 137 | return x_dot; |
| 138 | } |
| 139 | |
| 140 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::DiffAccel( |
| 141 | const ModelBasedLocalizer::AccelState &state, |
| 142 | const ModelBasedLocalizer::AccelInput &U) const { |
| 143 | return AAccel() * state + B_continuous_accel_ * U; |
| 144 | } |
| 145 | |
| 146 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::UpdateModel( |
| 147 | const ModelBasedLocalizer::ModelState &model, |
| 148 | const ModelBasedLocalizer::ModelInput &input, |
| 149 | const aos::monotonic_clock::duration dt) const { |
| 150 | return control_loops::RungeKutta( |
| 151 | std::bind(&ModelBasedLocalizer::DiffModel, this, std::placeholders::_1, |
| 152 | input), |
| 153 | model, aos::time::DurationInSeconds(dt)); |
| 154 | } |
| 155 | |
| 156 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::UpdateAccel( |
| 157 | const ModelBasedLocalizer::AccelState &accel, |
| 158 | const ModelBasedLocalizer::AccelInput &input, |
| 159 | const aos::monotonic_clock::duration dt) const { |
| 160 | return control_loops::RungeKutta( |
| 161 | std::bind(&ModelBasedLocalizer::DiffAccel, this, std::placeholders::_1, |
| 162 | input), |
| 163 | accel, aos::time::DurationInSeconds(dt)); |
| 164 | } |
| 165 | |
| 166 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::AccelStateForModelState( |
| 167 | const ModelBasedLocalizer::ModelState &state) const { |
| 168 | const double robot_speed = |
| 169 | (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 170 | const double lat_speed = (AModel(state) * state)(kTheta) * long_offset_; |
| 171 | const double velocity_x = std::cos(state(kTheta)) * robot_speed - |
| 172 | std::sin(state(kTheta)) * lat_speed; |
| 173 | const double velocity_y = std::sin(state(kTheta)) * robot_speed + |
| 174 | std::cos(state(kTheta)) * lat_speed; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 175 | return (AccelState() << state(0), state(1), state(2), velocity_x, velocity_y) |
| 176 | .finished(); |
| 177 | } |
| 178 | |
| 179 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::ModelStateForAccelState( |
| 180 | const ModelBasedLocalizer::AccelState &state, |
| 181 | const Eigen::Vector2d &encoders, const double yaw_rate) const { |
| 182 | const double robot_speed = state(kVelocityX) * std::cos(state(kTheta)) + |
| 183 | state(kVelocityY) * std::sin(state(kTheta)); |
| 184 | const double radius = dt_config_.robot_radius; |
| 185 | const double left_velocity = robot_speed - yaw_rate * radius; |
| 186 | const double right_velocity = robot_speed + yaw_rate * radius; |
| 187 | return (ModelState() << state(0), state(1), state(2), encoders(0), |
| 188 | left_velocity, 0.0, encoders(1), right_velocity, 0.0) |
| 189 | .finished(); |
| 190 | } |
| 191 | |
| 192 | double ModelBasedLocalizer::ModelDivergence( |
| 193 | const ModelBasedLocalizer::CombinedState &state, |
| 194 | const ModelBasedLocalizer::AccelInput &accel_inputs, |
| 195 | const Eigen::Vector2d &filtered_accel, |
| 196 | const ModelBasedLocalizer::ModelInput &model_inputs) { |
| 197 | // Convert the model state into the acceleration-based state-space and check |
| 198 | // the distance between the two (should really be a weighted norm, but all the |
| 199 | // numbers are on ~the same scale). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 200 | // TODO(james): Maybe weight lateral velocity divergence different than |
| 201 | // longitudinal? Seems like we tend to get false-positives currently when in |
| 202 | // sharp turns. |
| 203 | // TODO(james): For off-center gyros, maybe reduce noise when turning? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 204 | VLOG(2) << "divergence: " |
| 205 | << (state.accel_state - AccelStateForModelState(state.model_state)) |
| 206 | .transpose(); |
| 207 | const AccelState diff_accel = DiffAccel(state.accel_state, accel_inputs); |
| 208 | const ModelState diff_model = DiffModel(state.model_state, model_inputs); |
| 209 | const double model_lng_velocity = |
| 210 | (state.model_state(kLeftVelocity) + state.model_state(kRightVelocity)) / |
| 211 | 2.0; |
| 212 | const double model_lng_accel = |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 213 | (diff_model(kLeftVelocity) + diff_model(kRightVelocity)) / 2.0 - |
| 214 | diff_model(kTheta) * diff_model(kTheta) * long_offset_; |
| 215 | const double model_lat_accel = diff_model(kTheta) * model_lng_velocity; |
| 216 | const Eigen::Vector2d robot_frame_accel(model_lng_accel, model_lat_accel); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 217 | const Eigen::Vector2d model_accel = |
| 218 | Eigen::AngleAxisd(state.model_state(kTheta), Eigen::Vector3d::UnitZ()) |
| 219 | .toRotationMatrix() |
| 220 | .block<2, 2>(0, 0) * |
| 221 | robot_frame_accel; |
| 222 | const double accel_diff = (model_accel - filtered_accel).norm(); |
| 223 | const double theta_rate_diff = |
| 224 | std::abs(diff_accel(kTheta) - diff_model(kTheta)); |
| 225 | |
| 226 | const Eigen::Vector2d accel_vel = state.accel_state.bottomRows<2>(); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 227 | Eigen::Vector2d model_vel = |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 228 | AccelStateForModelState(state.model_state).bottomRows<2>(); |
| 229 | velocity_residual_ = (accel_vel - model_vel).norm() / |
| 230 | (1.0 + accel_vel.norm() + model_vel.norm()); |
| 231 | theta_rate_residual_ = theta_rate_diff; |
| 232 | accel_residual_ = accel_diff / 4.0; |
| 233 | return velocity_residual_ + theta_rate_residual_ + accel_residual_; |
| 234 | } |
| 235 | |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 236 | void ModelBasedLocalizer::UpdateState( |
| 237 | CombinedState *state, |
| 238 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> &K, |
| 239 | const Eigen::Matrix<double, kNModelOutputs, 1> &Z, |
| 240 | const Eigen::Matrix<double, kNModelOutputs, kNModelStates> &H, |
| 241 | const AccelInput &accel_input, const ModelInput &model_input, |
| 242 | aos::monotonic_clock::duration dt) { |
| 243 | state->accel_state = UpdateAccel(state->accel_state, accel_input, dt); |
| 244 | if (down_estimator_.consecutive_still() > 500.0) { |
| 245 | state->accel_state(kVelocityX) *= 0.9; |
| 246 | state->accel_state(kVelocityY) *= 0.9; |
| 247 | } |
| 248 | state->model_state = UpdateModel(state->model_state, model_input, dt); |
| 249 | state->model_state += K * (Z - H * state->model_state); |
| 250 | } |
| 251 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 252 | void ModelBasedLocalizer::HandleImu(aos::monotonic_clock::time_point t, |
| 253 | const Eigen::Vector3d &gyro, |
| 254 | const Eigen::Vector3d &accel, |
| 255 | const Eigen::Vector2d encoders, |
| 256 | const Eigen::Vector2d voltage) { |
| 257 | VLOG(2) << t; |
| 258 | if (t_ == aos::monotonic_clock::min_time) { |
| 259 | t_ = t; |
| 260 | } |
| 261 | if (t_ + 2 * kNominalDt < t) { |
| 262 | t_ = t; |
| 263 | ++clock_resets_; |
| 264 | } |
| 265 | const aos::monotonic_clock::duration dt = t - t_; |
| 266 | t_ = t; |
| 267 | down_estimator_.Predict(gyro, accel, dt); |
| 268 | // TODO(james): Should we prefer this or use the down-estimator corrected |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 269 | // version? Using the down estimator is more principled, but does create more |
| 270 | // opportunities for subtle biases. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 271 | const double yaw_rate = (dt_config_.imu_transform * gyro)(2); |
| 272 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 273 | |
| 274 | const Eigen::AngleAxis<double> orientation( |
| 275 | Eigen::AngleAxis<double>(xytheta()(kTheta), Eigen::Vector3d::UnitZ()) * |
| 276 | down_estimator_.X_hat()); |
James Kuszmaul | 10d3fd4 | 2022-02-25 21:57:36 -0800 | [diff] [blame] | 277 | last_orientation_ = orientation; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 278 | |
| 279 | const Eigen::Vector3d absolute_accel = |
| 280 | orientation * dt_config_.imu_transform * kG * accel; |
| 281 | abs_accel_ = absolute_accel; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 282 | |
| 283 | VLOG(2) << "abs accel " << absolute_accel.transpose(); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 284 | VLOG(2) << "dt " << aos::time::DurationInSeconds(dt); |
| 285 | |
| 286 | // Update all the branched states. |
| 287 | const AccelInput accel_input(absolute_accel.x(), absolute_accel.y(), |
| 288 | yaw_rate); |
| 289 | const ModelInput model_input(voltage); |
| 290 | |
| 291 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous = |
| 292 | AModel(current_state_.model_state); |
| 293 | |
| 294 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete; |
| 295 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete; |
| 296 | |
| 297 | DiscretizeQAFast(Q_continuous_model_, A_continuous, dt, &Q_discrete, |
| 298 | &A_discrete); |
| 299 | |
| 300 | P_model_ = A_discrete * P_model_ * A_discrete.transpose() + Q_discrete; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 301 | P_accel_ = A_discrete_accel_ * P_accel_ * A_discrete_accel_.transpose() + |
| 302 | Q_discrete_accel_; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 303 | |
| 304 | Eigen::Matrix<double, kNModelOutputs, kNModelStates> H; |
| 305 | Eigen::Matrix<double, kNModelOutputs, kNModelOutputs> R; |
| 306 | { |
| 307 | H.setZero(); |
| 308 | R.setZero(); |
| 309 | H(0, kLeftEncoder) = 1.0; |
| 310 | H(1, kRightEncoder) = 1.0; |
| 311 | H(2, kRightVelocity) = 1.0 / diameter; |
| 312 | H(2, kLeftVelocity) = -1.0 / diameter; |
| 313 | |
| 314 | R.diagonal() << 1e-9, 1e-9, 1e-13; |
| 315 | } |
| 316 | |
| 317 | const Eigen::Matrix<double, kNModelOutputs, 1> Z(encoders(0), encoders(1), |
| 318 | yaw_rate); |
| 319 | |
| 320 | if (branches_.empty()) { |
| 321 | VLOG(2) << "Initializing"; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 322 | current_state_.model_state(kLeftEncoder) = encoders(0); |
| 323 | current_state_.model_state(kRightEncoder) = encoders(1); |
| 324 | current_state_.branch_time = t; |
| 325 | branches_.Push(current_state_); |
| 326 | } |
| 327 | |
| 328 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> K = |
| 329 | P_model_ * H.transpose() * (H * P_model_ * H.transpose() + R).inverse(); |
| 330 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 331 | K * H) * |
| 332 | P_model_; |
| 333 | VLOG(2) << "K\n" << K; |
| 334 | VLOG(2) << "Z\n" << Z.transpose(); |
| 335 | |
| 336 | for (CombinedState &state : branches_) { |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 337 | UpdateState(&state, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 338 | } |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 339 | UpdateState(¤t_state_, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 340 | |
| 341 | VLOG(2) << "oildest accel " << branches_[0].accel_state.transpose(); |
| 342 | VLOG(2) << "oildest accel diff " |
| 343 | << DiffAccel(branches_[0].accel_state, accel_input).transpose(); |
| 344 | VLOG(2) << "oildest model " << branches_[0].model_state.transpose(); |
| 345 | |
| 346 | // Determine whether to switch modes--if we are currently in model-based mode, |
| 347 | // swap to accel-based if the two states have divergeed meaningfully in the |
| 348 | // oldest branch. If we are currently in accel-based, then swap back to model |
| 349 | // if the oldest model branch matches has matched the |
| 350 | filtered_residual_accel_ += |
| 351 | 0.01 * (accel_input.topRows<2>() - filtered_residual_accel_); |
| 352 | const double model_divergence = |
| 353 | branches_.full() ? ModelDivergence(branches_[0], accel_input, |
| 354 | filtered_residual_accel_, model_input) |
| 355 | : 0.0; |
| 356 | filtered_residual_ += |
| 357 | (1.0 - std::exp(-aos::time::DurationInSeconds(kNominalDt) / 0.0095)) * |
| 358 | (model_divergence - filtered_residual_); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 359 | // TODO(james): Tune this more. Currently set to generally trust the model, |
| 360 | // perhaps a bit too much. |
| 361 | // When the residual exceeds the accel threshold, we start using the inertials |
| 362 | // alone; when it drops back below the model threshold, we go back to being |
| 363 | // model-based. |
| 364 | constexpr double kUseAccelThreshold = 2.0; |
| 365 | constexpr double kUseModelThreshold = 0.5; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 366 | constexpr size_t kShareStates = kNModelStates; |
| 367 | static_assert(kUseModelThreshold < kUseAccelThreshold); |
| 368 | if (using_model_) { |
| 369 | if (filtered_residual_ > kUseAccelThreshold) { |
| 370 | hysteresis_count_++; |
| 371 | } else { |
| 372 | hysteresis_count_ = 0; |
| 373 | } |
| 374 | if (hysteresis_count_ > 0) { |
| 375 | using_model_ = false; |
| 376 | // Grab the accel-based state from back when we started diverging. |
| 377 | // TODO(james): This creates a problematic selection bias, because |
| 378 | // we will tend to bias towards deliberately out-of-tune measurements. |
| 379 | current_state_.accel_state = branches_[0].accel_state; |
| 380 | current_state_.model_state = branches_[0].model_state; |
| 381 | current_state_.model_state = ModelStateForAccelState( |
| 382 | current_state_.accel_state, encoders, yaw_rate); |
| 383 | } else { |
| 384 | VLOG(2) << "Normal branching"; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 385 | current_state_.accel_state = |
| 386 | AccelStateForModelState(current_state_.model_state); |
| 387 | current_state_.branch_time = t; |
| 388 | } |
| 389 | hysteresis_count_ = 0; |
| 390 | } else { |
| 391 | if (filtered_residual_ < kUseModelThreshold) { |
| 392 | hysteresis_count_++; |
| 393 | } else { |
| 394 | hysteresis_count_ = 0; |
| 395 | } |
| 396 | if (hysteresis_count_ > 100) { |
| 397 | using_model_ = true; |
| 398 | // Grab the model-based state from back when we stopped diverging. |
| 399 | current_state_.model_state.topRows<kShareStates>() = |
| 400 | ModelStateForAccelState(branches_[0].accel_state, encoders, yaw_rate) |
| 401 | .topRows<kShareStates>(); |
| 402 | current_state_.accel_state = |
| 403 | AccelStateForModelState(current_state_.model_state); |
| 404 | } else { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 405 | // TODO(james): Why was I leaving the encoders/wheel velocities in place? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 406 | current_state_.model_state = ModelStateForAccelState( |
| 407 | current_state_.accel_state, encoders, yaw_rate); |
| 408 | current_state_.branch_time = t; |
| 409 | } |
| 410 | } |
| 411 | |
| 412 | // Generate a new branch, with the accel state reset based on the model-based |
| 413 | // state (really, just getting rid of the lateral velocity). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 414 | // By resetting the accel state in the new branch, this tries to minimize the |
| 415 | // odds of runaway lateral velocities. This doesn't help with runaway |
| 416 | // longitudinal velocities, however. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 417 | CombinedState new_branch = current_state_; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 418 | new_branch.accel_state = AccelStateForModelState(new_branch.model_state); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 419 | new_branch.accumulated_divergence = 0.0; |
| 420 | |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 421 | ++branch_counter_; |
| 422 | if (branch_counter_ % kBranchPeriod == 0) { |
| 423 | branches_.Push(new_branch); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 424 | old_positions_.Push(OldPosition{t, xytheta(), latest_turret_position_, |
| 425 | latest_turret_velocity_}); |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 426 | branch_counter_ = 0; |
| 427 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 428 | |
| 429 | last_residual_ = model_divergence; |
| 430 | |
| 431 | VLOG(2) << "Using " << (using_model_ ? "model" : "accel"); |
| 432 | VLOG(2) << "Residual " << last_residual_; |
| 433 | VLOG(2) << "Filtered Residual " << filtered_residual_; |
| 434 | VLOG(2) << "buffer size " << branches_.size(); |
| 435 | VLOG(2) << "Model state " << current_state_.model_state.transpose(); |
| 436 | VLOG(2) << "Accel state " << current_state_.accel_state.transpose(); |
| 437 | VLOG(2) << "Accel state for model " |
| 438 | << AccelStateForModelState(current_state_.model_state).transpose(); |
| 439 | VLOG(2) << "Input acce " << accel.transpose(); |
| 440 | VLOG(2) << "Input gyro " << gyro.transpose(); |
| 441 | VLOG(2) << "Input voltage " << voltage.transpose(); |
| 442 | VLOG(2) << "Input encoder " << encoders.transpose(); |
| 443 | VLOG(2) << "yaw rate " << yaw_rate; |
| 444 | |
| 445 | CHECK(std::isfinite(last_residual_)); |
| 446 | } |
| 447 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 448 | const ModelBasedLocalizer::OldPosition ModelBasedLocalizer::GetStateForTime( |
| 449 | aos::monotonic_clock::time_point time) { |
| 450 | if (old_positions_.empty()) { |
| 451 | return OldPosition{}; |
| 452 | } |
| 453 | |
| 454 | aos::monotonic_clock::duration lowest_time_error = |
| 455 | aos::monotonic_clock::duration::max(); |
| 456 | const OldPosition *best_match = nullptr; |
| 457 | for (const OldPosition &sample : old_positions_) { |
| 458 | const aos::monotonic_clock::duration time_error = |
| 459 | std::chrono::abs(sample.sample_time - time); |
| 460 | if (time_error < lowest_time_error) { |
| 461 | lowest_time_error = time_error; |
| 462 | best_match = &sample; |
| 463 | } |
| 464 | } |
| 465 | return *best_match; |
| 466 | } |
| 467 | |
| 468 | namespace { |
| 469 | // Converts a flatbuffer TransformationMatrix to an Eigen matrix. Technically, |
| 470 | // this should be able to do a single memcpy, but the extra verbosity here seems |
| 471 | // appropriate. |
| 472 | Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix( |
| 473 | const frc971::vision::calibration::TransformationMatrix &flatbuffer) { |
| 474 | CHECK_EQ(16u, CHECK_NOTNULL(flatbuffer.data())->size()); |
| 475 | Eigen::Matrix<double, 4, 4> result; |
| 476 | result.setIdentity(); |
| 477 | for (int row = 0; row < 4; ++row) { |
| 478 | for (int col = 0; col < 4; ++col) { |
| 479 | result(row, col) = (*flatbuffer.data())[row * 4 + col]; |
| 480 | } |
| 481 | } |
| 482 | return result; |
| 483 | } |
| 484 | |
| 485 | // Node names of the pis to listen for cameras from. |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 486 | const std::array<std::string_view, 4> kPisToUse{"pi1", "pi2", "pi3", "pi4"}; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 487 | } |
| 488 | |
| 489 | const Eigen::Matrix<double, 4, 4> ModelBasedLocalizer::CameraTransform( |
| 490 | const OldPosition &state, |
| 491 | const frc971::vision::calibration::CameraCalibration *calibration, |
| 492 | std::optional<RejectionReason> *rejection_reason) const { |
| 493 | CHECK_NOTNULL(rejection_reason); |
| 494 | CHECK_NOTNULL(calibration); |
| 495 | // Per the CameraCalibration specification, we can actually determine whether |
| 496 | // the camera is the turret camera just from the presence of the |
| 497 | // turret_extrinsics member. |
| 498 | const bool is_turret = calibration->has_turret_extrinsics(); |
| 499 | // Ignore readings when the turret is spinning too fast, on the assumption |
| 500 | // that the odds of screwing up the time compensation are higher. |
| 501 | // Note that the current number here is chosen pretty arbitrarily--1 rad / sec |
| 502 | // seems reasonable, but may be unnecessarily low or high. |
| 503 | constexpr double kMaxTurretVelocity = 1.0; |
| 504 | if (is_turret && std::abs(state.turret_velocity) > kMaxTurretVelocity && |
| 505 | !rejection_reason->has_value()) { |
| 506 | *rejection_reason = RejectionReason::TURRET_TOO_FAST; |
| 507 | } |
| 508 | CHECK(calibration->has_fixed_extrinsics()); |
| 509 | const Eigen::Matrix<double, 4, 4> fixed_extrinsics = |
| 510 | FlatbufferToTransformationMatrix(*calibration->fixed_extrinsics()); |
| 511 | |
| 512 | // Calculate the pose of the camera relative to the robot origin. |
| 513 | Eigen::Matrix<double, 4, 4> H_robot_camera = fixed_extrinsics; |
| 514 | if (is_turret) { |
| 515 | H_robot_camera = |
| 516 | H_robot_camera * |
| 517 | frc971::control_loops::TransformationMatrixForYaw<double>( |
| 518 | state.turret_position) * |
| 519 | FlatbufferToTransformationMatrix(*calibration->turret_extrinsics()); |
| 520 | } |
| 521 | return H_robot_camera; |
| 522 | } |
| 523 | |
| 524 | const std::optional<Eigen::Vector2d> |
| 525 | ModelBasedLocalizer::CameraMeasuredRobotPosition( |
| 526 | const OldPosition &state, const y2022::vision::TargetEstimate *target, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 527 | std::optional<RejectionReason> *rejection_reason, |
| 528 | Eigen::Matrix<double, 4, 4> *H_field_camera_measured) const { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 529 | if (!target->has_camera_calibration()) { |
| 530 | *rejection_reason = RejectionReason::NO_CALIBRATION; |
| 531 | return std::nullopt; |
| 532 | } |
| 533 | const Eigen::Matrix<double, 4, 4> H_robot_camera = |
| 534 | CameraTransform(state, target->camera_calibration(), rejection_reason); |
| 535 | const control_loops::Pose robot_pose( |
| 536 | {state.xytheta(0), state.xytheta(1), 0.0}, state.xytheta(2)); |
| 537 | const Eigen::Matrix<double, 4, 4> H_field_robot = |
| 538 | robot_pose.AsTransformationMatrix(); |
| 539 | // Current estimated pose of the camera in the global frame. |
| 540 | // Note that this is all really just an elaborate way of extracting the |
| 541 | // current estimated camera yaw, and nothing else. |
| 542 | const Eigen::Matrix<double, 4, 4> H_field_camera = |
| 543 | H_field_robot * H_robot_camera; |
| 544 | // Grab the implied yaw of the camera (the +Z axis is coming out of the front |
| 545 | // of the cameras). |
| 546 | const Eigen::Vector3d rotated_camera_z = |
| 547 | H_field_camera.block<3, 3>(0, 0) * Eigen::Vector3d(0, 0, 1); |
| 548 | const double camera_yaw = |
| 549 | std::atan2(rotated_camera_z.y(), rotated_camera_z.x()); |
| 550 | // All right, now we need to use the heading and distance from the |
| 551 | // TargetEstimate, plus the yaw embedded in the camera_pose, to determine what |
| 552 | // the implied X/Y position of the robot is. To do this, we calculate the |
| 553 | // heading/distance from the target to the robot. The distance is easy, since |
| 554 | // that's the same as the distance from the robot to the target. The heading |
| 555 | // isn't too hard, but is obnoxious to think about, since the heading from the |
| 556 | // target to the robot is distinct from the heading from the robot to the |
| 557 | // target. |
| 558 | |
| 559 | // Just to walk through examples to confirm that the below calculation is |
| 560 | // correct: |
| 561 | // * If yaw = 0, and angle_to_target = 0, we are at 180 deg relative to the |
| 562 | // target. |
| 563 | // * If yaw = 90 deg, and angle_to_target = 0, we are at -90 deg relative to |
| 564 | // the target. |
| 565 | // * If yaw = 0, and angle_to_target = 90 deg, we are at -90 deg relative to |
| 566 | // the target. |
| 567 | const double heading_from_target = |
| 568 | aos::math::NormalizeAngle(M_PI + camera_yaw + target->angle_to_target()); |
| 569 | const double distance_from_target = target->distance(); |
| 570 | // Extract the implied camera position on the field. |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 571 | *H_field_camera_measured = H_field_camera; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 572 | // TODO(james): Are we going to need to evict the roll/pitch components of the |
| 573 | // camera extrinsics this year as well? |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 574 | (*H_field_camera_measured)(0, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 575 | distance_from_target * std::cos(heading_from_target) + kVisionTargetX; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 576 | (*H_field_camera_measured)(1, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 577 | distance_from_target * std::sin(heading_from_target) + kVisionTargetY; |
| 578 | const Eigen::Matrix<double, 4, 4> H_field_robot_measured = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 579 | *H_field_camera_measured * H_robot_camera.inverse(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 580 | return H_field_robot_measured.block<2, 1>(0, 3); |
| 581 | } |
| 582 | |
| 583 | void ModelBasedLocalizer::HandleImageMatch( |
| 584 | aos::monotonic_clock::time_point sample_time, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 585 | const y2022::vision::TargetEstimate *target, int camera_index) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 586 | std::optional<RejectionReason> rejection_reason; |
| 587 | |
| 588 | const OldPosition &state = GetStateForTime(sample_time); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 589 | Eigen::Matrix<double, 4, 4> H_field_camera_measured; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 590 | const std::optional<Eigen::Vector2d> measured_robot_position = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 591 | CameraMeasuredRobotPosition(state, target, &rejection_reason, |
| 592 | &H_field_camera_measured); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 593 | // Technically, rejection_reason should always be set if |
| 594 | // measured_robot_position is nullopt, but in the future we may have more |
| 595 | // recoverable rejection reasons that we wish to allow to propagate further |
| 596 | // into the process. |
| 597 | if (!measured_robot_position || rejection_reason.has_value()) { |
| 598 | CHECK(rejection_reason.has_value()); |
| 599 | TallyRejection(rejection_reason.value()); |
| 600 | return; |
| 601 | } |
| 602 | |
| 603 | // Next, go through and do the actual Kalman corrections for the x/y |
| 604 | // measurement, for both the accel state and the model-based state. |
| 605 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model = |
| 606 | AModel(current_state_.model_state); |
| 607 | |
| 608 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete_model; |
| 609 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete_model; |
| 610 | |
| 611 | DiscretizeQAFast(Q_continuous_model_, A_continuous_model, kNominalDt, |
| 612 | &Q_discrete_model, &A_discrete_model); |
| 613 | |
| 614 | Eigen::Matrix<double, 2, kNModelStates> H_model; |
| 615 | H_model.setZero(); |
| 616 | Eigen::Matrix<double, 2, kNAccelStates> H_accel; |
| 617 | H_accel.setZero(); |
| 618 | Eigen::Matrix<double, 2, 2> R; |
| 619 | R.setZero(); |
| 620 | H_model(0, kX) = 1.0; |
| 621 | H_model(1, kY) = 1.0; |
| 622 | H_accel(0, kX) = 1.0; |
| 623 | H_accel(1, kY) = 1.0; |
| 624 | R.diagonal() << 1e-2, 1e-2; |
| 625 | |
| 626 | const Eigen::Matrix<double, kNModelStates, 2> K_model = |
| 627 | P_model_ * H_model.transpose() * |
| 628 | (H_model * P_model_ * H_model.transpose() + R).inverse(); |
| 629 | const Eigen::Matrix<double, kNAccelStates, 2> K_accel = |
| 630 | P_accel_ * H_accel.transpose() * |
| 631 | (H_accel * P_accel_ * H_accel.transpose() + R).inverse(); |
| 632 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 633 | K_model * H_model) * |
| 634 | P_model_; |
| 635 | P_accel_ = (Eigen::Matrix<double, kNAccelStates, kNAccelStates>::Identity() - |
| 636 | K_accel * H_accel) * |
| 637 | P_accel_; |
| 638 | // And now we have to correct *everything* on all the branches: |
| 639 | for (CombinedState &state : branches_) { |
| 640 | state.model_state += K_model * (measured_robot_position.value() - |
| 641 | H_model * state.model_state); |
| 642 | state.accel_state += K_accel * (measured_robot_position.value() - |
| 643 | H_accel * state.accel_state); |
| 644 | } |
| 645 | current_state_.model_state += |
| 646 | K_model * |
| 647 | (measured_robot_position.value() - H_model * current_state_.model_state); |
| 648 | current_state_.accel_state += |
| 649 | K_accel * |
| 650 | (measured_robot_position.value() - H_accel * current_state_.accel_state); |
| 651 | |
| 652 | statistics_.total_accepted++; |
| 653 | statistics_.total_candidates++; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 654 | |
| 655 | const Eigen::Vector3d camera_z_in_field = |
| 656 | H_field_camera_measured.block<3, 3>(0, 0) * Eigen::Vector3d::UnitZ(); |
| 657 | const double camera_yaw = |
| 658 | std::atan2(camera_z_in_field.y(), camera_z_in_field.x()); |
| 659 | |
| 660 | TargetEstimateDebugT debug; |
| 661 | debug.camera = static_cast<uint8_t>(camera_index); |
| 662 | debug.camera_x = H_field_camera_measured(0, 3); |
| 663 | debug.camera_y = H_field_camera_measured(1, 3); |
| 664 | debug.camera_theta = camera_yaw; |
| 665 | debug.implied_robot_x = measured_robot_position.value().x(); |
| 666 | debug.implied_robot_y = measured_robot_position.value().y(); |
| 667 | debug.implied_robot_theta = xytheta()(2); |
| 668 | debug.implied_turret_goal = |
| 669 | aos::math::NormalizeAngle(camera_yaw + target->angle_to_target()); |
| 670 | debug.accepted = true; |
| 671 | debug.image_age_sec = aos::time::DurationInSeconds(t_ - sample_time); |
| 672 | image_debugs_.push_back(debug); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 673 | } |
| 674 | |
| 675 | void ModelBasedLocalizer::HandleTurret( |
| 676 | aos::monotonic_clock::time_point sample_time, double turret_position, |
| 677 | double turret_velocity) { |
| 678 | last_turret_update_ = sample_time; |
| 679 | latest_turret_position_ = turret_position; |
| 680 | latest_turret_velocity_ = turret_velocity; |
| 681 | } |
| 682 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 683 | void ModelBasedLocalizer::HandleReset(aos::monotonic_clock::time_point now, |
| 684 | const Eigen::Vector3d &xytheta) { |
| 685 | branches_.Reset(); |
| 686 | t_ = now; |
| 687 | using_model_ = true; |
| 688 | current_state_.model_state << xytheta(0), xytheta(1), xytheta(2), |
| 689 | current_state_.model_state(kLeftEncoder), 0.0, 0.0, |
| 690 | current_state_.model_state(kRightEncoder), 0.0, 0.0; |
| 691 | current_state_.accel_state = |
| 692 | AccelStateForModelState(current_state_.model_state); |
| 693 | last_residual_ = 0.0; |
| 694 | filtered_residual_ = 0.0; |
| 695 | filtered_residual_accel_.setZero(); |
| 696 | abs_accel_.setZero(); |
| 697 | } |
| 698 | |
| 699 | flatbuffers::Offset<AccelBasedState> ModelBasedLocalizer::BuildAccelState( |
| 700 | flatbuffers::FlatBufferBuilder *fbb, const AccelState &state) { |
| 701 | AccelBasedState::Builder accel_state_builder(*fbb); |
| 702 | accel_state_builder.add_x(state(kX)); |
| 703 | accel_state_builder.add_y(state(kY)); |
| 704 | accel_state_builder.add_theta(state(kTheta)); |
| 705 | accel_state_builder.add_velocity_x(state(kVelocityX)); |
| 706 | accel_state_builder.add_velocity_y(state(kVelocityY)); |
| 707 | return accel_state_builder.Finish(); |
| 708 | } |
| 709 | |
| 710 | flatbuffers::Offset<ModelBasedState> ModelBasedLocalizer::BuildModelState( |
| 711 | flatbuffers::FlatBufferBuilder *fbb, const ModelState &state) { |
| 712 | ModelBasedState::Builder model_state_builder(*fbb); |
| 713 | model_state_builder.add_x(state(kX)); |
| 714 | model_state_builder.add_y(state(kY)); |
| 715 | model_state_builder.add_theta(state(kTheta)); |
| 716 | model_state_builder.add_left_encoder(state(kLeftEncoder)); |
| 717 | model_state_builder.add_left_velocity(state(kLeftVelocity)); |
| 718 | model_state_builder.add_left_voltage_error(state(kLeftVoltageError)); |
| 719 | model_state_builder.add_right_encoder(state(kRightEncoder)); |
| 720 | model_state_builder.add_right_velocity(state(kRightVelocity)); |
| 721 | model_state_builder.add_right_voltage_error(state(kRightVoltageError)); |
| 722 | return model_state_builder.Finish(); |
| 723 | } |
| 724 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 725 | flatbuffers::Offset<CumulativeStatistics> |
| 726 | ModelBasedLocalizer::PopulateStatistics(flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 727 | const auto rejections_offset = fbb->CreateVector( |
| 728 | statistics_.rejection_counts.data(), statistics_.rejection_counts.size()); |
| 729 | |
| 730 | CumulativeStatistics::Builder stats_builder(*fbb); |
| 731 | stats_builder.add_total_accepted(statistics_.total_accepted); |
| 732 | stats_builder.add_total_candidates(statistics_.total_candidates); |
| 733 | stats_builder.add_rejection_reason_count(rejections_offset); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 734 | return stats_builder.Finish(); |
| 735 | } |
| 736 | |
| 737 | flatbuffers::Offset<ModelBasedStatus> ModelBasedLocalizer::PopulateStatus( |
| 738 | flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 739 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 740 | PopulateStatistics(fbb); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 741 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 742 | const flatbuffers::Offset<control_loops::drivetrain::DownEstimatorState> |
| 743 | down_estimator_offset = down_estimator_.PopulateStatus(fbb, t_); |
| 744 | |
| 745 | const CombinedState &state = current_state_; |
| 746 | |
| 747 | const flatbuffers::Offset<ModelBasedState> model_state_offset = |
| 748 | BuildModelState(fbb, state.model_state); |
| 749 | |
| 750 | const flatbuffers::Offset<AccelBasedState> accel_state_offset = |
| 751 | BuildAccelState(fbb, state.accel_state); |
| 752 | |
| 753 | const flatbuffers::Offset<AccelBasedState> oldest_accel_state_offset = |
| 754 | branches_.empty() ? flatbuffers::Offset<AccelBasedState>() |
| 755 | : BuildAccelState(fbb, branches_[0].accel_state); |
| 756 | |
| 757 | const flatbuffers::Offset<ModelBasedState> oldest_model_state_offset = |
| 758 | branches_.empty() ? flatbuffers::Offset<ModelBasedState>() |
| 759 | : BuildModelState(fbb, branches_[0].model_state); |
| 760 | |
| 761 | ModelBasedStatus::Builder builder(*fbb); |
| 762 | builder.add_accel_state(accel_state_offset); |
| 763 | builder.add_oldest_accel_state(oldest_accel_state_offset); |
| 764 | builder.add_oldest_model_state(oldest_model_state_offset); |
| 765 | builder.add_model_state(model_state_offset); |
| 766 | builder.add_using_model(using_model_); |
| 767 | builder.add_residual(last_residual_); |
| 768 | builder.add_filtered_residual(filtered_residual_); |
| 769 | builder.add_velocity_residual(velocity_residual_); |
| 770 | builder.add_accel_residual(accel_residual_); |
| 771 | builder.add_theta_rate_residual(theta_rate_residual_); |
| 772 | builder.add_down_estimator(down_estimator_offset); |
| 773 | builder.add_x(xytheta()(0)); |
| 774 | builder.add_y(xytheta()(1)); |
| 775 | builder.add_theta(xytheta()(2)); |
| 776 | builder.add_implied_accel_x(abs_accel_(0)); |
| 777 | builder.add_implied_accel_y(abs_accel_(1)); |
| 778 | builder.add_implied_accel_z(abs_accel_(2)); |
| 779 | builder.add_clock_resets(clock_resets_); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 780 | builder.add_statistics(stats_offset); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 781 | return builder.Finish(); |
| 782 | } |
| 783 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 784 | flatbuffers::Offset<LocalizerVisualization> |
| 785 | ModelBasedLocalizer::PopulateVisualization( |
| 786 | flatbuffers::FlatBufferBuilder *fbb) { |
| 787 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
| 788 | PopulateStatistics(fbb); |
| 789 | |
| 790 | aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, kDebugBufferSize> |
| 791 | debug_offsets; |
| 792 | |
| 793 | for (const TargetEstimateDebugT& debug : image_debugs_) { |
| 794 | debug_offsets.push_back(PackTargetEstimateDebug(debug, fbb)); |
| 795 | } |
| 796 | |
| 797 | image_debugs_.clear(); |
| 798 | |
| 799 | const flatbuffers::Offset< |
| 800 | flatbuffers::Vector<flatbuffers::Offset<TargetEstimateDebug>>> |
| 801 | debug_offset = |
| 802 | fbb->CreateVector(debug_offsets.data(), debug_offsets.size()); |
| 803 | |
| 804 | LocalizerVisualization::Builder builder(*fbb); |
| 805 | builder.add_statistics(stats_offset); |
| 806 | builder.add_targets(debug_offset); |
| 807 | return builder.Finish(); |
| 808 | } |
| 809 | |
| 810 | void ModelBasedLocalizer::TallyRejection(const RejectionReason reason) { |
| 811 | statistics_.total_candidates++; |
| 812 | statistics_.rejection_counts[static_cast<size_t>(reason)]++; |
| 813 | TargetEstimateDebugT debug; |
| 814 | debug.accepted = false; |
| 815 | debug.rejection_reason = reason; |
| 816 | image_debugs_.push_back(debug); |
| 817 | } |
| 818 | |
| 819 | flatbuffers::Offset<TargetEstimateDebug> |
| 820 | ModelBasedLocalizer::PackTargetEstimateDebug( |
| 821 | const TargetEstimateDebugT &debug, flatbuffers::FlatBufferBuilder *fbb) { |
| 822 | if (!debug.accepted) { |
| 823 | TargetEstimateDebug::Builder builder(*fbb); |
| 824 | builder.add_accepted(debug.accepted); |
| 825 | builder.add_rejection_reason(debug.rejection_reason); |
| 826 | return builder.Finish(); |
| 827 | } else { |
| 828 | flatbuffers::Offset<TargetEstimateDebug> offset = |
| 829 | TargetEstimateDebug::Pack(*fbb, &debug); |
| 830 | flatbuffers::GetMutableTemporaryPointer(*fbb, offset) |
| 831 | ->clear_rejection_reason(); |
| 832 | return offset; |
| 833 | } |
| 834 | } |
| 835 | |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 836 | namespace { |
| 837 | // Period at which the encoder readings from the IMU board wrap. |
| 838 | static double DrivetrainWrapPeriod() { |
| 839 | return y2022::constants::Values::DrivetrainEncoderToMeters(1 << 16); |
| 840 | } |
| 841 | } |
| 842 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 843 | EventLoopLocalizer::EventLoopLocalizer( |
| 844 | aos::EventLoop *event_loop, |
| 845 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 846 | : event_loop_(event_loop), |
| 847 | model_based_(dt_config), |
| 848 | status_sender_(event_loop_->MakeSender<LocalizerStatus>("/localizer")), |
| 849 | output_sender_(event_loop_->MakeSender<LocalizerOutput>("/localizer")), |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 850 | visualization_sender_( |
| 851 | event_loop_->MakeSender<LocalizerVisualization>("/localizer")), |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 852 | output_fetcher_( |
| 853 | event_loop_->MakeFetcher<frc971::control_loops::drivetrain::Output>( |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 854 | "/drivetrain")), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 855 | clock_offset_fetcher_( |
| 856 | event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>( |
| 857 | "/aos")), |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 858 | left_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()), |
| 859 | right_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()) { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 860 | event_loop_->MakeWatcher( |
| 861 | "/drivetrain", |
| 862 | [this]( |
| 863 | const frc971::control_loops::drivetrain::LocalizerControl &control) { |
| 864 | const double theta = control.keep_current_theta() |
| 865 | ? model_based_.xytheta()(2) |
| 866 | : control.theta(); |
| 867 | model_based_.HandleReset(event_loop_->monotonic_now(), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 868 | {control.x(), control.y(), theta}); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 869 | }); |
| 870 | event_loop_->MakeWatcher( |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 871 | "/superstructure", |
| 872 | [this](const y2022::control_loops::superstructure::Status &status) { |
| 873 | if (!status.has_turret()) { |
| 874 | return; |
| 875 | } |
| 876 | CHECK(status.has_turret()); |
| 877 | model_based_.HandleTurret(event_loop_->context().monotonic_event_time, |
| 878 | status.turret()->position(), |
| 879 | status.turret()->velocity()); |
| 880 | }); |
| 881 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 882 | for (size_t camera_index = 0; camera_index < kPisToUse.size(); ++camera_index) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 883 | event_loop_->MakeWatcher( |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 884 | absl::StrCat("/", kPisToUse[camera_index], "/camera"), |
| 885 | [this, camera_index](const y2022::vision::TargetEstimate &target) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 886 | const std::optional<aos::monotonic_clock::duration> monotonic_offset = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 887 | ClockOffset(kPisToUse[camera_index]); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 888 | if (!monotonic_offset.has_value()) { |
| 889 | return; |
| 890 | } |
| 891 | // TODO(james): Get timestamp from message contents. |
| 892 | aos::monotonic_clock::time_point capture_time( |
| 893 | event_loop_->context().monotonic_remote_time - monotonic_offset.value()); |
| 894 | if (capture_time > event_loop_->context().monotonic_event_time) { |
| 895 | model_based_.TallyRejection(RejectionReason::IMAGE_FROM_FUTURE); |
| 896 | return; |
| 897 | } |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 898 | model_based_.HandleImageMatch(capture_time, &target, camera_index); |
| 899 | if (model_based_.NumQueuedImageDebugs() == |
| 900 | ModelBasedLocalizer::kDebugBufferSize || |
| 901 | (last_visualization_send_ + kMinVisualizationPeriod < |
| 902 | event_loop_->monotonic_now())) { |
| 903 | auto builder = visualization_sender_.MakeBuilder(); |
| 904 | visualization_sender_.CheckOk( |
| 905 | builder.Send(model_based_.PopulateVisualization(builder.fbb()))); |
| 906 | } |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 907 | }); |
| 908 | } |
| 909 | event_loop_->MakeWatcher( |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 910 | "/localizer", [this](const frc971::IMUValuesBatch &values) { |
| 911 | CHECK(values.has_readings()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 912 | output_fetcher_.Fetch(); |
| 913 | for (const IMUValues *value : *values.readings()) { |
| 914 | zeroer_.InsertAndProcessMeasurement(*value); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 915 | const Eigen::Vector2d encoders{ |
| 916 | left_encoder_.Unwrap(value->left_encoder()), |
| 917 | right_encoder_.Unwrap(value->right_encoder())}; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 918 | if (zeroer_.Zeroed()) { |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 919 | const aos::monotonic_clock::time_point pico_timestamp{ |
| 920 | std::chrono::microseconds(value->pico_timestamp_us())}; |
| 921 | // TODO(james): If we get large enough drift off of the pico, |
| 922 | // actually do something about it. |
| 923 | if (!pico_offset_.has_value()) { |
| 924 | pico_offset_ = |
| 925 | event_loop_->context().monotonic_event_time - pico_timestamp; |
| 926 | last_pico_timestamp_ = pico_timestamp; |
James Kuszmaul | e5f67dd | 2022-02-12 20:08:29 -0800 | [diff] [blame] | 927 | } |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 928 | if (pico_timestamp < last_pico_timestamp_) { |
| 929 | pico_offset_.value() += std::chrono::microseconds(1ULL << 32); |
| 930 | } |
| 931 | const aos::monotonic_clock::time_point sample_timestamp = |
| 932 | pico_offset_.value() + pico_timestamp; |
| 933 | pico_offset_error_ = |
| 934 | event_loop_->context().monotonic_event_time - sample_timestamp; |
| 935 | const bool disabled = |
| 936 | (output_fetcher_.get() == nullptr) || |
| 937 | (output_fetcher_.context().monotonic_event_time + |
| 938 | std::chrono::milliseconds(10) < |
| 939 | event_loop_->context().monotonic_event_time); |
| 940 | model_based_.HandleImu( |
| 941 | sample_timestamp, |
| 942 | zeroer_.ZeroedGyro(), zeroer_.ZeroedAccel(), encoders, |
| 943 | disabled ? Eigen::Vector2d::Zero() |
| 944 | : Eigen::Vector2d{output_fetcher_->left_voltage(), |
| 945 | output_fetcher_->right_voltage()}); |
| 946 | last_pico_timestamp_ = pico_timestamp; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 947 | } |
| 948 | { |
| 949 | auto builder = status_sender_.MakeBuilder(); |
| 950 | const flatbuffers::Offset<ModelBasedStatus> model_based_status = |
| 951 | model_based_.PopulateStatus(builder.fbb()); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 952 | const flatbuffers::Offset<control_loops::drivetrain::ImuZeroerState> |
| 953 | zeroer_status = zeroer_.PopulateStatus(builder.fbb()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 954 | LocalizerStatus::Builder status_builder = |
| 955 | builder.MakeBuilder<LocalizerStatus>(); |
| 956 | status_builder.add_model_based(model_based_status); |
| 957 | status_builder.add_zeroed(zeroer_.Zeroed()); |
| 958 | status_builder.add_faulted_zero(zeroer_.Faulted()); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 959 | status_builder.add_zeroing(zeroer_status); |
| 960 | status_builder.add_left_encoder(encoders(0)); |
| 961 | status_builder.add_right_encoder(encoders(1)); |
| 962 | if (pico_offset_.has_value()) { |
| 963 | status_builder.add_pico_offset_ns(pico_offset_.value().count()); |
| 964 | status_builder.add_pico_offset_error_ns( |
| 965 | pico_offset_error_.count()); |
| 966 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 967 | builder.CheckOk(builder.Send(status_builder.Finish())); |
| 968 | } |
| 969 | if (last_output_send_ + std::chrono::milliseconds(5) < |
| 970 | event_loop_->context().monotonic_event_time) { |
| 971 | auto builder = output_sender_.MakeBuilder(); |
| 972 | LocalizerOutput::Builder output_builder = |
| 973 | builder.MakeBuilder<LocalizerOutput>(); |
James Kuszmaul | 1798c07 | 2022-02-13 15:32:11 -0800 | [diff] [blame] | 974 | // TODO(james): Should we bother to try to estimate time offsets for |
| 975 | // the pico? |
| 976 | output_builder.add_monotonic_timestamp_ns( |
| 977 | value->monotonic_timestamp_ns()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 978 | output_builder.add_x(model_based_.xytheta()(0)); |
| 979 | output_builder.add_y(model_based_.xytheta()(1)); |
| 980 | output_builder.add_theta(model_based_.xytheta()(2)); |
James Kuszmaul | f3ef9e1 | 2022-03-05 17:13:00 -0800 | [diff] [blame^] | 981 | output_builder.add_zeroed(zeroer_.Zeroed()); |
James Kuszmaul | 10d3fd4 | 2022-02-25 21:57:36 -0800 | [diff] [blame] | 982 | const Eigen::Quaterniond &orientation = model_based_.orientation(); |
| 983 | Quaternion quaternion; |
| 984 | quaternion.mutate_x(orientation.x()); |
| 985 | quaternion.mutate_y(orientation.y()); |
| 986 | quaternion.mutate_z(orientation.z()); |
| 987 | quaternion.mutate_w(orientation.w()); |
| 988 | output_builder.add_orientation(&quaternion); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 989 | builder.CheckOk(builder.Send(output_builder.Finish())); |
| 990 | last_output_send_ = event_loop_->monotonic_now(); |
| 991 | } |
| 992 | } |
| 993 | }); |
| 994 | } |
| 995 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 996 | std::optional<aos::monotonic_clock::duration> EventLoopLocalizer::ClockOffset( |
| 997 | std::string_view pi) { |
| 998 | std::optional<aos::monotonic_clock::duration> monotonic_offset; |
| 999 | clock_offset_fetcher_.Fetch(); |
| 1000 | if (clock_offset_fetcher_.get() != nullptr) { |
| 1001 | for (const auto connection : *clock_offset_fetcher_->connections()) { |
| 1002 | if (connection->has_node() && connection->node()->has_name() && |
| 1003 | connection->node()->name()->string_view() == pi) { |
| 1004 | if (connection->has_monotonic_offset()) { |
| 1005 | monotonic_offset = |
| 1006 | std::chrono::nanoseconds(connection->monotonic_offset()); |
| 1007 | } else { |
| 1008 | // If we don't have a monotonic offset, that means we aren't |
| 1009 | // connected. |
| 1010 | model_based_.TallyRejection( |
| 1011 | RejectionReason::MESSAGE_BRIDGE_DISCONNECTED); |
| 1012 | return std::nullopt; |
| 1013 | } |
| 1014 | break; |
| 1015 | } |
| 1016 | } |
| 1017 | } |
| 1018 | CHECK(monotonic_offset.has_value()); |
| 1019 | return monotonic_offset; |
| 1020 | } |
| 1021 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1022 | } // namespace frc971::controls |