James Kuszmaul | 51fa1ae | 2022-02-26 00:49:57 -0800 | [diff] [blame] | 1 | #include "y2022/localizer/localizer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 2 | |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 3 | #include "aos/json_to_flatbuffer.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 4 | #include "frc971/control_loops/c2d.h" |
| 5 | #include "frc971/wpilib/imu_batch_generated.h" |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 6 | #include "y2022/constants.h" |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 7 | |
| 8 | namespace frc971::controls { |
| 9 | |
| 10 | namespace { |
| 11 | constexpr double kG = 9.80665; |
| 12 | constexpr std::chrono::microseconds kNominalDt(500); |
| 13 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 14 | // Field position of the target (the 2022 target is conveniently in the middle |
| 15 | // of the field....). |
| 16 | constexpr double kVisionTargetX = 0.0; |
| 17 | constexpr double kVisionTargetY = 0.0; |
| 18 | |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 19 | // Minimum confidence to require to use a target match. |
James Kuszmaul | 1b918d8 | 2022-03-12 18:27:41 -0800 | [diff] [blame] | 20 | constexpr double kMinTargetEstimateConfidence = 0.75; |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 21 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 22 | template <int N> |
| 23 | Eigen::Matrix<double, N, 1> MakeState(std::vector<double> values) { |
| 24 | CHECK_EQ(static_cast<size_t>(N), values.size()); |
| 25 | Eigen::Matrix<double, N, 1> vector; |
| 26 | for (int ii = 0; ii < N; ++ii) { |
| 27 | vector(ii, 0) = values[ii]; |
| 28 | } |
| 29 | return vector; |
| 30 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 31 | } // namespace |
| 32 | |
| 33 | ModelBasedLocalizer::ModelBasedLocalizer( |
| 34 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 35 | : dt_config_(dt_config), |
| 36 | velocity_drivetrain_coefficients_( |
| 37 | dt_config.make_hybrid_drivetrain_velocity_loop() |
| 38 | .plant() |
| 39 | .coefficients()), |
| 40 | down_estimator_(dt_config) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 41 | statistics_.rejection_counts.fill(0); |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 42 | CHECK_EQ(branches_.capacity(), |
| 43 | static_cast<size_t>(std::chrono::seconds(1) / kNominalDt / |
| 44 | kBranchPeriod)); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 45 | if (dt_config_.is_simulated) { |
| 46 | down_estimator_.assume_perfect_gravity(); |
| 47 | } |
| 48 | A_continuous_accel_.setZero(); |
| 49 | A_continuous_model_.setZero(); |
| 50 | B_continuous_accel_.setZero(); |
| 51 | B_continuous_model_.setZero(); |
| 52 | |
| 53 | A_continuous_accel_(kX, kVelocityX) = 1.0; |
| 54 | A_continuous_accel_(kY, kVelocityY) = 1.0; |
| 55 | |
| 56 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 57 | |
| 58 | A_continuous_model_(kTheta, kLeftVelocity) = -1.0 / diameter; |
| 59 | A_continuous_model_(kTheta, kRightVelocity) = 1.0 / diameter; |
| 60 | A_continuous_model_(kLeftEncoder, kLeftVelocity) = 1.0; |
| 61 | A_continuous_model_(kRightEncoder, kRightVelocity) = 1.0; |
| 62 | const auto &vel_coefs = velocity_drivetrain_coefficients_; |
| 63 | A_continuous_model_(kLeftVelocity, kLeftVelocity) = |
| 64 | vel_coefs.A_continuous(0, 0); |
| 65 | A_continuous_model_(kLeftVelocity, kRightVelocity) = |
| 66 | vel_coefs.A_continuous(0, 1); |
| 67 | A_continuous_model_(kRightVelocity, kLeftVelocity) = |
| 68 | vel_coefs.A_continuous(1, 0); |
| 69 | A_continuous_model_(kRightVelocity, kRightVelocity) = |
| 70 | vel_coefs.A_continuous(1, 1); |
| 71 | |
| 72 | A_continuous_model_(kLeftVelocity, kLeftVoltageError) = |
| 73 | 1 * vel_coefs.B_continuous(0, 0); |
| 74 | A_continuous_model_(kLeftVelocity, kRightVoltageError) = |
| 75 | 1 * vel_coefs.B_continuous(0, 1); |
| 76 | A_continuous_model_(kRightVelocity, kLeftVoltageError) = |
| 77 | 1 * vel_coefs.B_continuous(1, 0); |
| 78 | A_continuous_model_(kRightVelocity, kRightVoltageError) = |
| 79 | 1 * vel_coefs.B_continuous(1, 1); |
| 80 | |
| 81 | B_continuous_model_.block<1, 2>(kLeftVelocity, kLeftVoltage) = |
| 82 | vel_coefs.B_continuous.row(0); |
| 83 | B_continuous_model_.block<1, 2>(kRightVelocity, kLeftVoltage) = |
| 84 | vel_coefs.B_continuous.row(1); |
| 85 | |
| 86 | B_continuous_accel_(kVelocityX, kAccelX) = 1.0; |
| 87 | B_continuous_accel_(kVelocityY, kAccelY) = 1.0; |
| 88 | B_continuous_accel_(kTheta, kThetaRate) = 1.0; |
| 89 | |
| 90 | Q_continuous_model_.setZero(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 91 | Q_continuous_model_.diagonal() << 1e-2, 1e-2, 1e-8, 1e-2, 1e-0, 1e-0, 1e-2, |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 92 | 1e-0, 1e-0; |
| 93 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 94 | Q_continuous_accel_.setZero(); |
| 95 | Q_continuous_accel_.diagonal() << 1e-2, 1e-2, 1e-20, 1e-4, 1e-4; |
| 96 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 97 | P_model_ = Q_continuous_model_ * aos::time::DurationInSeconds(kNominalDt); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 98 | |
| 99 | // We can precalculate the discretizations of the accel model because it is |
| 100 | // actually LTI. |
| 101 | |
| 102 | DiscretizeQAFast(Q_continuous_accel_, A_continuous_accel_, kNominalDt, |
| 103 | &Q_discrete_accel_, &A_discrete_accel_); |
| 104 | P_accel_ = Q_discrete_accel_; |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 105 | |
| 106 | led_outputs_.fill(LedOutput::ON); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 107 | } |
| 108 | |
| 109 | Eigen::Matrix<double, ModelBasedLocalizer::kNModelStates, |
| 110 | ModelBasedLocalizer::kNModelStates> |
| 111 | ModelBasedLocalizer::AModel( |
| 112 | const ModelBasedLocalizer::ModelState &state) const { |
| 113 | Eigen::Matrix<double, kNModelStates, kNModelStates> A = A_continuous_model_; |
| 114 | const double theta = state(kTheta); |
| 115 | const double stheta = std::sin(theta); |
| 116 | const double ctheta = std::cos(theta); |
| 117 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 118 | A(kX, kTheta) = -stheta * velocity; |
| 119 | A(kX, kLeftVelocity) = ctheta / 2.0; |
| 120 | A(kX, kRightVelocity) = ctheta / 2.0; |
| 121 | A(kY, kTheta) = ctheta * velocity; |
| 122 | A(kY, kLeftVelocity) = stheta / 2.0; |
| 123 | A(kY, kRightVelocity) = stheta / 2.0; |
| 124 | return A; |
| 125 | } |
| 126 | |
| 127 | Eigen::Matrix<double, ModelBasedLocalizer::kNAccelStates, |
| 128 | ModelBasedLocalizer::kNAccelStates> |
| 129 | ModelBasedLocalizer::AAccel() const { |
| 130 | return A_continuous_accel_; |
| 131 | } |
| 132 | |
| 133 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::DiffModel( |
| 134 | const ModelBasedLocalizer::ModelState &state, |
| 135 | const ModelBasedLocalizer::ModelInput &U) const { |
| 136 | ModelState x_dot = AModel(state) * state + B_continuous_model_ * U; |
| 137 | const double theta = state(kTheta); |
| 138 | const double stheta = std::sin(theta); |
| 139 | const double ctheta = std::cos(theta); |
| 140 | const double velocity = (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
| 141 | x_dot(kX) = ctheta * velocity; |
| 142 | x_dot(kY) = stheta * velocity; |
| 143 | return x_dot; |
| 144 | } |
| 145 | |
| 146 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::DiffAccel( |
| 147 | const ModelBasedLocalizer::AccelState &state, |
| 148 | const ModelBasedLocalizer::AccelInput &U) const { |
| 149 | return AAccel() * state + B_continuous_accel_ * U; |
| 150 | } |
| 151 | |
| 152 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::UpdateModel( |
| 153 | const ModelBasedLocalizer::ModelState &model, |
| 154 | const ModelBasedLocalizer::ModelInput &input, |
| 155 | const aos::monotonic_clock::duration dt) const { |
| 156 | return control_loops::RungeKutta( |
| 157 | std::bind(&ModelBasedLocalizer::DiffModel, this, std::placeholders::_1, |
| 158 | input), |
| 159 | model, aos::time::DurationInSeconds(dt)); |
| 160 | } |
| 161 | |
| 162 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::UpdateAccel( |
| 163 | const ModelBasedLocalizer::AccelState &accel, |
| 164 | const ModelBasedLocalizer::AccelInput &input, |
| 165 | const aos::monotonic_clock::duration dt) const { |
| 166 | return control_loops::RungeKutta( |
| 167 | std::bind(&ModelBasedLocalizer::DiffAccel, this, std::placeholders::_1, |
| 168 | input), |
| 169 | accel, aos::time::DurationInSeconds(dt)); |
| 170 | } |
| 171 | |
| 172 | ModelBasedLocalizer::AccelState ModelBasedLocalizer::AccelStateForModelState( |
| 173 | const ModelBasedLocalizer::ModelState &state) const { |
| 174 | const double robot_speed = |
| 175 | (state(kLeftVelocity) + state(kRightVelocity)) / 2.0; |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 176 | const double lat_speed = (AModel(state) * state)(kTheta)*long_offset_; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 177 | const double velocity_x = std::cos(state(kTheta)) * robot_speed - |
| 178 | std::sin(state(kTheta)) * lat_speed; |
| 179 | const double velocity_y = std::sin(state(kTheta)) * robot_speed + |
| 180 | std::cos(state(kTheta)) * lat_speed; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 181 | return (AccelState() << state(0), state(1), state(2), velocity_x, velocity_y) |
| 182 | .finished(); |
| 183 | } |
| 184 | |
| 185 | ModelBasedLocalizer::ModelState ModelBasedLocalizer::ModelStateForAccelState( |
| 186 | const ModelBasedLocalizer::AccelState &state, |
| 187 | const Eigen::Vector2d &encoders, const double yaw_rate) const { |
| 188 | const double robot_speed = state(kVelocityX) * std::cos(state(kTheta)) + |
| 189 | state(kVelocityY) * std::sin(state(kTheta)); |
| 190 | const double radius = dt_config_.robot_radius; |
| 191 | const double left_velocity = robot_speed - yaw_rate * radius; |
| 192 | const double right_velocity = robot_speed + yaw_rate * radius; |
| 193 | return (ModelState() << state(0), state(1), state(2), encoders(0), |
| 194 | left_velocity, 0.0, encoders(1), right_velocity, 0.0) |
| 195 | .finished(); |
| 196 | } |
| 197 | |
| 198 | double ModelBasedLocalizer::ModelDivergence( |
| 199 | const ModelBasedLocalizer::CombinedState &state, |
| 200 | const ModelBasedLocalizer::AccelInput &accel_inputs, |
| 201 | const Eigen::Vector2d &filtered_accel, |
| 202 | const ModelBasedLocalizer::ModelInput &model_inputs) { |
| 203 | // Convert the model state into the acceleration-based state-space and check |
| 204 | // the distance between the two (should really be a weighted norm, but all the |
| 205 | // numbers are on ~the same scale). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 206 | // TODO(james): Maybe weight lateral velocity divergence different than |
| 207 | // longitudinal? Seems like we tend to get false-positives currently when in |
| 208 | // sharp turns. |
| 209 | // TODO(james): For off-center gyros, maybe reduce noise when turning? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 210 | VLOG(2) << "divergence: " |
| 211 | << (state.accel_state - AccelStateForModelState(state.model_state)) |
| 212 | .transpose(); |
| 213 | const AccelState diff_accel = DiffAccel(state.accel_state, accel_inputs); |
| 214 | const ModelState diff_model = DiffModel(state.model_state, model_inputs); |
| 215 | const double model_lng_velocity = |
| 216 | (state.model_state(kLeftVelocity) + state.model_state(kRightVelocity)) / |
| 217 | 2.0; |
| 218 | const double model_lng_accel = |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 219 | (diff_model(kLeftVelocity) + diff_model(kRightVelocity)) / 2.0 - |
| 220 | diff_model(kTheta) * diff_model(kTheta) * long_offset_; |
| 221 | const double model_lat_accel = diff_model(kTheta) * model_lng_velocity; |
| 222 | const Eigen::Vector2d robot_frame_accel(model_lng_accel, model_lat_accel); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 223 | const Eigen::Vector2d model_accel = |
| 224 | Eigen::AngleAxisd(state.model_state(kTheta), Eigen::Vector3d::UnitZ()) |
| 225 | .toRotationMatrix() |
| 226 | .block<2, 2>(0, 0) * |
| 227 | robot_frame_accel; |
| 228 | const double accel_diff = (model_accel - filtered_accel).norm(); |
| 229 | const double theta_rate_diff = |
| 230 | std::abs(diff_accel(kTheta) - diff_model(kTheta)); |
| 231 | |
| 232 | const Eigen::Vector2d accel_vel = state.accel_state.bottomRows<2>(); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 233 | Eigen::Vector2d model_vel = |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 234 | AccelStateForModelState(state.model_state).bottomRows<2>(); |
| 235 | velocity_residual_ = (accel_vel - model_vel).norm() / |
| 236 | (1.0 + accel_vel.norm() + model_vel.norm()); |
| 237 | theta_rate_residual_ = theta_rate_diff; |
| 238 | accel_residual_ = accel_diff / 4.0; |
| 239 | return velocity_residual_ + theta_rate_residual_ + accel_residual_; |
| 240 | } |
| 241 | |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 242 | void ModelBasedLocalizer::UpdateState( |
| 243 | CombinedState *state, |
| 244 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> &K, |
| 245 | const Eigen::Matrix<double, kNModelOutputs, 1> &Z, |
| 246 | const Eigen::Matrix<double, kNModelOutputs, kNModelStates> &H, |
| 247 | const AccelInput &accel_input, const ModelInput &model_input, |
| 248 | aos::monotonic_clock::duration dt) { |
| 249 | state->accel_state = UpdateAccel(state->accel_state, accel_input, dt); |
| 250 | if (down_estimator_.consecutive_still() > 500.0) { |
| 251 | state->accel_state(kVelocityX) *= 0.9; |
| 252 | state->accel_state(kVelocityY) *= 0.9; |
| 253 | } |
| 254 | state->model_state = UpdateModel(state->model_state, model_input, dt); |
| 255 | state->model_state += K * (Z - H * state->model_state); |
| 256 | } |
| 257 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 258 | void ModelBasedLocalizer::HandleImu(aos::monotonic_clock::time_point t, |
| 259 | const Eigen::Vector3d &gyro, |
| 260 | const Eigen::Vector3d &accel, |
| 261 | const Eigen::Vector2d encoders, |
| 262 | const Eigen::Vector2d voltage) { |
| 263 | VLOG(2) << t; |
| 264 | if (t_ == aos::monotonic_clock::min_time) { |
| 265 | t_ = t; |
| 266 | } |
| 267 | if (t_ + 2 * kNominalDt < t) { |
| 268 | t_ = t; |
| 269 | ++clock_resets_; |
| 270 | } |
| 271 | const aos::monotonic_clock::duration dt = t - t_; |
| 272 | t_ = t; |
| 273 | down_estimator_.Predict(gyro, accel, dt); |
| 274 | // TODO(james): Should we prefer this or use the down-estimator corrected |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 275 | // version? Using the down estimator is more principled, but does create more |
| 276 | // opportunities for subtle biases. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 277 | const double yaw_rate = (dt_config_.imu_transform * gyro)(2); |
| 278 | const double diameter = 2.0 * dt_config_.robot_radius; |
| 279 | |
| 280 | const Eigen::AngleAxis<double> orientation( |
| 281 | Eigen::AngleAxis<double>(xytheta()(kTheta), Eigen::Vector3d::UnitZ()) * |
| 282 | down_estimator_.X_hat()); |
James Kuszmaul | 10d3fd4 | 2022-02-25 21:57:36 -0800 | [diff] [blame] | 283 | last_orientation_ = orientation; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 284 | |
| 285 | const Eigen::Vector3d absolute_accel = |
| 286 | orientation * dt_config_.imu_transform * kG * accel; |
| 287 | abs_accel_ = absolute_accel; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 288 | |
| 289 | VLOG(2) << "abs accel " << absolute_accel.transpose(); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 290 | VLOG(2) << "dt " << aos::time::DurationInSeconds(dt); |
| 291 | |
| 292 | // Update all the branched states. |
| 293 | const AccelInput accel_input(absolute_accel.x(), absolute_accel.y(), |
| 294 | yaw_rate); |
| 295 | const ModelInput model_input(voltage); |
| 296 | |
| 297 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous = |
| 298 | AModel(current_state_.model_state); |
| 299 | |
| 300 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete; |
| 301 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete; |
| 302 | |
| 303 | DiscretizeQAFast(Q_continuous_model_, A_continuous, dt, &Q_discrete, |
| 304 | &A_discrete); |
| 305 | |
| 306 | P_model_ = A_discrete * P_model_ * A_discrete.transpose() + Q_discrete; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 307 | P_accel_ = A_discrete_accel_ * P_accel_ * A_discrete_accel_.transpose() + |
| 308 | Q_discrete_accel_; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 309 | |
| 310 | Eigen::Matrix<double, kNModelOutputs, kNModelStates> H; |
| 311 | Eigen::Matrix<double, kNModelOutputs, kNModelOutputs> R; |
| 312 | { |
| 313 | H.setZero(); |
| 314 | R.setZero(); |
| 315 | H(0, kLeftEncoder) = 1.0; |
| 316 | H(1, kRightEncoder) = 1.0; |
| 317 | H(2, kRightVelocity) = 1.0 / diameter; |
| 318 | H(2, kLeftVelocity) = -1.0 / diameter; |
| 319 | |
| 320 | R.diagonal() << 1e-9, 1e-9, 1e-13; |
| 321 | } |
| 322 | |
| 323 | const Eigen::Matrix<double, kNModelOutputs, 1> Z(encoders(0), encoders(1), |
| 324 | yaw_rate); |
| 325 | |
| 326 | if (branches_.empty()) { |
| 327 | VLOG(2) << "Initializing"; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 328 | current_state_.model_state(kLeftEncoder) = encoders(0); |
| 329 | current_state_.model_state(kRightEncoder) = encoders(1); |
| 330 | current_state_.branch_time = t; |
| 331 | branches_.Push(current_state_); |
| 332 | } |
| 333 | |
| 334 | const Eigen::Matrix<double, kNModelStates, kNModelOutputs> K = |
| 335 | P_model_ * H.transpose() * (H * P_model_ * H.transpose() + R).inverse(); |
| 336 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 337 | K * H) * |
| 338 | P_model_; |
| 339 | VLOG(2) << "K\n" << K; |
| 340 | VLOG(2) << "Z\n" << Z.transpose(); |
| 341 | |
| 342 | for (CombinedState &state : branches_) { |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 343 | UpdateState(&state, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 344 | } |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 345 | UpdateState(¤t_state_, K, Z, H, accel_input, model_input, dt); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 346 | |
| 347 | VLOG(2) << "oildest accel " << branches_[0].accel_state.transpose(); |
| 348 | VLOG(2) << "oildest accel diff " |
| 349 | << DiffAccel(branches_[0].accel_state, accel_input).transpose(); |
| 350 | VLOG(2) << "oildest model " << branches_[0].model_state.transpose(); |
| 351 | |
| 352 | // Determine whether to switch modes--if we are currently in model-based mode, |
| 353 | // swap to accel-based if the two states have divergeed meaningfully in the |
| 354 | // oldest branch. If we are currently in accel-based, then swap back to model |
| 355 | // if the oldest model branch matches has matched the |
| 356 | filtered_residual_accel_ += |
| 357 | 0.01 * (accel_input.topRows<2>() - filtered_residual_accel_); |
| 358 | const double model_divergence = |
| 359 | branches_.full() ? ModelDivergence(branches_[0], accel_input, |
| 360 | filtered_residual_accel_, model_input) |
| 361 | : 0.0; |
| 362 | filtered_residual_ += |
| 363 | (1.0 - std::exp(-aos::time::DurationInSeconds(kNominalDt) / 0.0095)) * |
| 364 | (model_divergence - filtered_residual_); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 365 | // TODO(james): Tune this more. Currently set to generally trust the model, |
| 366 | // perhaps a bit too much. |
| 367 | // When the residual exceeds the accel threshold, we start using the inertials |
| 368 | // alone; when it drops back below the model threshold, we go back to being |
| 369 | // model-based. |
| 370 | constexpr double kUseAccelThreshold = 2.0; |
| 371 | constexpr double kUseModelThreshold = 0.5; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 372 | constexpr size_t kShareStates = kNModelStates; |
| 373 | static_assert(kUseModelThreshold < kUseAccelThreshold); |
| 374 | if (using_model_) { |
| 375 | if (filtered_residual_ > kUseAccelThreshold) { |
| 376 | hysteresis_count_++; |
| 377 | } else { |
| 378 | hysteresis_count_ = 0; |
| 379 | } |
| 380 | if (hysteresis_count_ > 0) { |
| 381 | using_model_ = false; |
| 382 | // Grab the accel-based state from back when we started diverging. |
| 383 | // TODO(james): This creates a problematic selection bias, because |
| 384 | // we will tend to bias towards deliberately out-of-tune measurements. |
| 385 | current_state_.accel_state = branches_[0].accel_state; |
| 386 | current_state_.model_state = branches_[0].model_state; |
| 387 | current_state_.model_state = ModelStateForAccelState( |
| 388 | current_state_.accel_state, encoders, yaw_rate); |
| 389 | } else { |
| 390 | VLOG(2) << "Normal branching"; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 391 | current_state_.accel_state = |
| 392 | AccelStateForModelState(current_state_.model_state); |
| 393 | current_state_.branch_time = t; |
| 394 | } |
| 395 | hysteresis_count_ = 0; |
| 396 | } else { |
| 397 | if (filtered_residual_ < kUseModelThreshold) { |
| 398 | hysteresis_count_++; |
| 399 | } else { |
| 400 | hysteresis_count_ = 0; |
| 401 | } |
| 402 | if (hysteresis_count_ > 100) { |
| 403 | using_model_ = true; |
| 404 | // Grab the model-based state from back when we stopped diverging. |
| 405 | current_state_.model_state.topRows<kShareStates>() = |
| 406 | ModelStateForAccelState(branches_[0].accel_state, encoders, yaw_rate) |
| 407 | .topRows<kShareStates>(); |
| 408 | current_state_.accel_state = |
| 409 | AccelStateForModelState(current_state_.model_state); |
| 410 | } else { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 411 | // TODO(james): Why was I leaving the encoders/wheel velocities in place? |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 412 | current_state_.model_state = ModelStateForAccelState( |
| 413 | current_state_.accel_state, encoders, yaw_rate); |
| 414 | current_state_.branch_time = t; |
| 415 | } |
| 416 | } |
| 417 | |
| 418 | // Generate a new branch, with the accel state reset based on the model-based |
| 419 | // state (really, just getting rid of the lateral velocity). |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 420 | // By resetting the accel state in the new branch, this tries to minimize the |
| 421 | // odds of runaway lateral velocities. This doesn't help with runaway |
| 422 | // longitudinal velocities, however. |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 423 | CombinedState new_branch = current_state_; |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 424 | new_branch.accel_state = AccelStateForModelState(new_branch.model_state); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 425 | new_branch.accumulated_divergence = 0.0; |
| 426 | |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 427 | ++branch_counter_; |
| 428 | if (branch_counter_ % kBranchPeriod == 0) { |
| 429 | branches_.Push(new_branch); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 430 | old_positions_.Push(OldPosition{t, xytheta(), latest_turret_position_, |
| 431 | latest_turret_velocity_}); |
James Kuszmaul | 93825a0 | 2022-02-13 16:50:33 -0800 | [diff] [blame] | 432 | branch_counter_ = 0; |
| 433 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 434 | |
| 435 | last_residual_ = model_divergence; |
| 436 | |
| 437 | VLOG(2) << "Using " << (using_model_ ? "model" : "accel"); |
| 438 | VLOG(2) << "Residual " << last_residual_; |
| 439 | VLOG(2) << "Filtered Residual " << filtered_residual_; |
| 440 | VLOG(2) << "buffer size " << branches_.size(); |
| 441 | VLOG(2) << "Model state " << current_state_.model_state.transpose(); |
| 442 | VLOG(2) << "Accel state " << current_state_.accel_state.transpose(); |
| 443 | VLOG(2) << "Accel state for model " |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 444 | << AccelStateForModelState(current_state_.model_state).transpose(); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 445 | VLOG(2) << "Input acce " << accel.transpose(); |
| 446 | VLOG(2) << "Input gyro " << gyro.transpose(); |
| 447 | VLOG(2) << "Input voltage " << voltage.transpose(); |
| 448 | VLOG(2) << "Input encoder " << encoders.transpose(); |
| 449 | VLOG(2) << "yaw rate " << yaw_rate; |
| 450 | |
| 451 | CHECK(std::isfinite(last_residual_)); |
| 452 | } |
| 453 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 454 | const ModelBasedLocalizer::OldPosition ModelBasedLocalizer::GetStateForTime( |
| 455 | aos::monotonic_clock::time_point time) { |
| 456 | if (old_positions_.empty()) { |
| 457 | return OldPosition{}; |
| 458 | } |
| 459 | |
| 460 | aos::monotonic_clock::duration lowest_time_error = |
| 461 | aos::monotonic_clock::duration::max(); |
| 462 | const OldPosition *best_match = nullptr; |
| 463 | for (const OldPosition &sample : old_positions_) { |
| 464 | const aos::monotonic_clock::duration time_error = |
| 465 | std::chrono::abs(sample.sample_time - time); |
| 466 | if (time_error < lowest_time_error) { |
| 467 | lowest_time_error = time_error; |
| 468 | best_match = &sample; |
| 469 | } |
| 470 | } |
| 471 | return *best_match; |
| 472 | } |
| 473 | |
| 474 | namespace { |
| 475 | // Converts a flatbuffer TransformationMatrix to an Eigen matrix. Technically, |
| 476 | // this should be able to do a single memcpy, but the extra verbosity here seems |
| 477 | // appropriate. |
| 478 | Eigen::Matrix<double, 4, 4> FlatbufferToTransformationMatrix( |
| 479 | const frc971::vision::calibration::TransformationMatrix &flatbuffer) { |
| 480 | CHECK_EQ(16u, CHECK_NOTNULL(flatbuffer.data())->size()); |
| 481 | Eigen::Matrix<double, 4, 4> result; |
| 482 | result.setIdentity(); |
| 483 | for (int row = 0; row < 4; ++row) { |
| 484 | for (int col = 0; col < 4; ++col) { |
| 485 | result(row, col) = (*flatbuffer.data())[row * 4 + col]; |
| 486 | } |
| 487 | } |
| 488 | return result; |
| 489 | } |
| 490 | |
| 491 | // Node names of the pis to listen for cameras from. |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 492 | constexpr std::array<std::string_view, ModelBasedLocalizer::kNumPis> kPisToUse{ |
| 493 | "pi1", "pi2", "pi3", "pi4"}; |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 494 | } // namespace |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 495 | |
| 496 | const Eigen::Matrix<double, 4, 4> ModelBasedLocalizer::CameraTransform( |
| 497 | const OldPosition &state, |
| 498 | const frc971::vision::calibration::CameraCalibration *calibration, |
| 499 | std::optional<RejectionReason> *rejection_reason) const { |
| 500 | CHECK_NOTNULL(rejection_reason); |
| 501 | CHECK_NOTNULL(calibration); |
| 502 | // Per the CameraCalibration specification, we can actually determine whether |
| 503 | // the camera is the turret camera just from the presence of the |
| 504 | // turret_extrinsics member. |
| 505 | const bool is_turret = calibration->has_turret_extrinsics(); |
| 506 | // Ignore readings when the turret is spinning too fast, on the assumption |
| 507 | // that the odds of screwing up the time compensation are higher. |
| 508 | // Note that the current number here is chosen pretty arbitrarily--1 rad / sec |
| 509 | // seems reasonable, but may be unnecessarily low or high. |
| 510 | constexpr double kMaxTurretVelocity = 1.0; |
| 511 | if (is_turret && std::abs(state.turret_velocity) > kMaxTurretVelocity && |
| 512 | !rejection_reason->has_value()) { |
| 513 | *rejection_reason = RejectionReason::TURRET_TOO_FAST; |
| 514 | } |
| 515 | CHECK(calibration->has_fixed_extrinsics()); |
| 516 | const Eigen::Matrix<double, 4, 4> fixed_extrinsics = |
| 517 | FlatbufferToTransformationMatrix(*calibration->fixed_extrinsics()); |
| 518 | |
| 519 | // Calculate the pose of the camera relative to the robot origin. |
| 520 | Eigen::Matrix<double, 4, 4> H_robot_camera = fixed_extrinsics; |
| 521 | if (is_turret) { |
| 522 | H_robot_camera = |
| 523 | H_robot_camera * |
| 524 | frc971::control_loops::TransformationMatrixForYaw<double>( |
| 525 | state.turret_position) * |
| 526 | FlatbufferToTransformationMatrix(*calibration->turret_extrinsics()); |
| 527 | } |
| 528 | return H_robot_camera; |
| 529 | } |
| 530 | |
| 531 | const std::optional<Eigen::Vector2d> |
| 532 | ModelBasedLocalizer::CameraMeasuredRobotPosition( |
| 533 | const OldPosition &state, const y2022::vision::TargetEstimate *target, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 534 | std::optional<RejectionReason> *rejection_reason, |
| 535 | Eigen::Matrix<double, 4, 4> *H_field_camera_measured) const { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 536 | if (!target->has_camera_calibration()) { |
| 537 | *rejection_reason = RejectionReason::NO_CALIBRATION; |
| 538 | return std::nullopt; |
| 539 | } |
| 540 | const Eigen::Matrix<double, 4, 4> H_robot_camera = |
| 541 | CameraTransform(state, target->camera_calibration(), rejection_reason); |
| 542 | const control_loops::Pose robot_pose( |
| 543 | {state.xytheta(0), state.xytheta(1), 0.0}, state.xytheta(2)); |
| 544 | const Eigen::Matrix<double, 4, 4> H_field_robot = |
| 545 | robot_pose.AsTransformationMatrix(); |
| 546 | // Current estimated pose of the camera in the global frame. |
| 547 | // Note that this is all really just an elaborate way of extracting the |
| 548 | // current estimated camera yaw, and nothing else. |
| 549 | const Eigen::Matrix<double, 4, 4> H_field_camera = |
| 550 | H_field_robot * H_robot_camera; |
| 551 | // Grab the implied yaw of the camera (the +Z axis is coming out of the front |
| 552 | // of the cameras). |
| 553 | const Eigen::Vector3d rotated_camera_z = |
| 554 | H_field_camera.block<3, 3>(0, 0) * Eigen::Vector3d(0, 0, 1); |
| 555 | const double camera_yaw = |
| 556 | std::atan2(rotated_camera_z.y(), rotated_camera_z.x()); |
| 557 | // All right, now we need to use the heading and distance from the |
| 558 | // TargetEstimate, plus the yaw embedded in the camera_pose, to determine what |
| 559 | // the implied X/Y position of the robot is. To do this, we calculate the |
| 560 | // heading/distance from the target to the robot. The distance is easy, since |
| 561 | // that's the same as the distance from the robot to the target. The heading |
| 562 | // isn't too hard, but is obnoxious to think about, since the heading from the |
| 563 | // target to the robot is distinct from the heading from the robot to the |
| 564 | // target. |
| 565 | |
| 566 | // Just to walk through examples to confirm that the below calculation is |
| 567 | // correct: |
| 568 | // * If yaw = 0, and angle_to_target = 0, we are at 180 deg relative to the |
| 569 | // target. |
| 570 | // * If yaw = 90 deg, and angle_to_target = 0, we are at -90 deg relative to |
| 571 | // the target. |
| 572 | // * If yaw = 0, and angle_to_target = 90 deg, we are at -90 deg relative to |
| 573 | // the target. |
| 574 | const double heading_from_target = |
| 575 | aos::math::NormalizeAngle(M_PI + camera_yaw + target->angle_to_target()); |
| 576 | const double distance_from_target = target->distance(); |
| 577 | // Extract the implied camera position on the field. |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 578 | *H_field_camera_measured = H_field_camera; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 579 | // TODO(james): Are we going to need to evict the roll/pitch components of the |
| 580 | // camera extrinsics this year as well? |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 581 | (*H_field_camera_measured)(0, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 582 | distance_from_target * std::cos(heading_from_target) + kVisionTargetX; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 583 | (*H_field_camera_measured)(1, 3) = |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 584 | distance_from_target * std::sin(heading_from_target) + kVisionTargetY; |
| 585 | const Eigen::Matrix<double, 4, 4> H_field_robot_measured = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 586 | *H_field_camera_measured * H_robot_camera.inverse(); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 587 | return H_field_robot_measured.block<2, 1>(0, 3); |
| 588 | } |
| 589 | |
| 590 | void ModelBasedLocalizer::HandleImageMatch( |
| 591 | aos::monotonic_clock::time_point sample_time, |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 592 | const y2022::vision::TargetEstimate *target, int camera_index) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 593 | std::optional<RejectionReason> rejection_reason; |
| 594 | |
James Kuszmaul | aa39d96 | 2022-03-06 14:54:28 -0800 | [diff] [blame] | 595 | if (target->confidence() < kMinTargetEstimateConfidence) { |
| 596 | rejection_reason = RejectionReason::LOW_CONFIDENCE; |
| 597 | TallyRejection(rejection_reason.value()); |
| 598 | return; |
| 599 | } |
| 600 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 601 | const OldPosition &state = GetStateForTime(sample_time); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 602 | Eigen::Matrix<double, 4, 4> H_field_camera_measured; |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 603 | const std::optional<Eigen::Vector2d> measured_robot_position = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 604 | CameraMeasuredRobotPosition(state, target, &rejection_reason, |
| 605 | &H_field_camera_measured); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 606 | // Technically, rejection_reason should always be set if |
| 607 | // measured_robot_position is nullopt, but in the future we may have more |
| 608 | // recoverable rejection reasons that we wish to allow to propagate further |
| 609 | // into the process. |
| 610 | if (!measured_robot_position || rejection_reason.has_value()) { |
| 611 | CHECK(rejection_reason.has_value()); |
| 612 | TallyRejection(rejection_reason.value()); |
| 613 | return; |
| 614 | } |
| 615 | |
| 616 | // Next, go through and do the actual Kalman corrections for the x/y |
| 617 | // measurement, for both the accel state and the model-based state. |
| 618 | const Eigen::Matrix<double, kNModelStates, kNModelStates> A_continuous_model = |
| 619 | AModel(current_state_.model_state); |
| 620 | |
| 621 | Eigen::Matrix<double, kNModelStates, kNModelStates> A_discrete_model; |
| 622 | Eigen::Matrix<double, kNModelStates, kNModelStates> Q_discrete_model; |
| 623 | |
| 624 | DiscretizeQAFast(Q_continuous_model_, A_continuous_model, kNominalDt, |
| 625 | &Q_discrete_model, &A_discrete_model); |
| 626 | |
| 627 | Eigen::Matrix<double, 2, kNModelStates> H_model; |
| 628 | H_model.setZero(); |
| 629 | Eigen::Matrix<double, 2, kNAccelStates> H_accel; |
| 630 | H_accel.setZero(); |
| 631 | Eigen::Matrix<double, 2, 2> R; |
| 632 | R.setZero(); |
| 633 | H_model(0, kX) = 1.0; |
| 634 | H_model(1, kY) = 1.0; |
| 635 | H_accel(0, kX) = 1.0; |
| 636 | H_accel(1, kY) = 1.0; |
| 637 | R.diagonal() << 1e-2, 1e-2; |
| 638 | |
| 639 | const Eigen::Matrix<double, kNModelStates, 2> K_model = |
| 640 | P_model_ * H_model.transpose() * |
| 641 | (H_model * P_model_ * H_model.transpose() + R).inverse(); |
| 642 | const Eigen::Matrix<double, kNAccelStates, 2> K_accel = |
| 643 | P_accel_ * H_accel.transpose() * |
| 644 | (H_accel * P_accel_ * H_accel.transpose() + R).inverse(); |
| 645 | P_model_ = (Eigen::Matrix<double, kNModelStates, kNModelStates>::Identity() - |
| 646 | K_model * H_model) * |
| 647 | P_model_; |
| 648 | P_accel_ = (Eigen::Matrix<double, kNAccelStates, kNAccelStates>::Identity() - |
| 649 | K_accel * H_accel) * |
| 650 | P_accel_; |
| 651 | // And now we have to correct *everything* on all the branches: |
| 652 | for (CombinedState &state : branches_) { |
| 653 | state.model_state += K_model * (measured_robot_position.value() - |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 654 | H_model * state.model_state); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 655 | state.accel_state += K_accel * (measured_robot_position.value() - |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 656 | H_accel * state.accel_state); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 657 | } |
| 658 | current_state_.model_state += |
| 659 | K_model * |
| 660 | (measured_robot_position.value() - H_model * current_state_.model_state); |
| 661 | current_state_.accel_state += |
| 662 | K_accel * |
| 663 | (measured_robot_position.value() - H_accel * current_state_.accel_state); |
| 664 | |
| 665 | statistics_.total_accepted++; |
| 666 | statistics_.total_candidates++; |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 667 | |
| 668 | const Eigen::Vector3d camera_z_in_field = |
| 669 | H_field_camera_measured.block<3, 3>(0, 0) * Eigen::Vector3d::UnitZ(); |
| 670 | const double camera_yaw = |
| 671 | std::atan2(camera_z_in_field.y(), camera_z_in_field.x()); |
| 672 | |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 673 | // TODO(milind): actually control this |
| 674 | led_outputs_[camera_index] = LedOutput::ON; |
| 675 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 676 | TargetEstimateDebugT debug; |
| 677 | debug.camera = static_cast<uint8_t>(camera_index); |
| 678 | debug.camera_x = H_field_camera_measured(0, 3); |
| 679 | debug.camera_y = H_field_camera_measured(1, 3); |
| 680 | debug.camera_theta = camera_yaw; |
| 681 | debug.implied_robot_x = measured_robot_position.value().x(); |
| 682 | debug.implied_robot_y = measured_robot_position.value().y(); |
| 683 | debug.implied_robot_theta = xytheta()(2); |
| 684 | debug.implied_turret_goal = |
| 685 | aos::math::NormalizeAngle(camera_yaw + target->angle_to_target()); |
| 686 | debug.accepted = true; |
| 687 | debug.image_age_sec = aos::time::DurationInSeconds(t_ - sample_time); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 688 | CHECK_LT(image_debugs_.size(), kDebugBufferSize); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 689 | image_debugs_.push_back(debug); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 690 | } |
| 691 | |
| 692 | void ModelBasedLocalizer::HandleTurret( |
| 693 | aos::monotonic_clock::time_point sample_time, double turret_position, |
| 694 | double turret_velocity) { |
| 695 | last_turret_update_ = sample_time; |
| 696 | latest_turret_position_ = turret_position; |
| 697 | latest_turret_velocity_ = turret_velocity; |
| 698 | } |
| 699 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 700 | void ModelBasedLocalizer::HandleReset(aos::monotonic_clock::time_point now, |
| 701 | const Eigen::Vector3d &xytheta) { |
| 702 | branches_.Reset(); |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 703 | t_ = now; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 704 | using_model_ = true; |
| 705 | current_state_.model_state << xytheta(0), xytheta(1), xytheta(2), |
| 706 | current_state_.model_state(kLeftEncoder), 0.0, 0.0, |
| 707 | current_state_.model_state(kRightEncoder), 0.0, 0.0; |
| 708 | current_state_.accel_state = |
| 709 | AccelStateForModelState(current_state_.model_state); |
| 710 | last_residual_ = 0.0; |
| 711 | filtered_residual_ = 0.0; |
| 712 | filtered_residual_accel_.setZero(); |
| 713 | abs_accel_.setZero(); |
| 714 | } |
| 715 | |
| 716 | flatbuffers::Offset<AccelBasedState> ModelBasedLocalizer::BuildAccelState( |
| 717 | flatbuffers::FlatBufferBuilder *fbb, const AccelState &state) { |
| 718 | AccelBasedState::Builder accel_state_builder(*fbb); |
| 719 | accel_state_builder.add_x(state(kX)); |
| 720 | accel_state_builder.add_y(state(kY)); |
| 721 | accel_state_builder.add_theta(state(kTheta)); |
| 722 | accel_state_builder.add_velocity_x(state(kVelocityX)); |
| 723 | accel_state_builder.add_velocity_y(state(kVelocityY)); |
| 724 | return accel_state_builder.Finish(); |
| 725 | } |
| 726 | |
| 727 | flatbuffers::Offset<ModelBasedState> ModelBasedLocalizer::BuildModelState( |
| 728 | flatbuffers::FlatBufferBuilder *fbb, const ModelState &state) { |
| 729 | ModelBasedState::Builder model_state_builder(*fbb); |
| 730 | model_state_builder.add_x(state(kX)); |
| 731 | model_state_builder.add_y(state(kY)); |
| 732 | model_state_builder.add_theta(state(kTheta)); |
| 733 | model_state_builder.add_left_encoder(state(kLeftEncoder)); |
| 734 | model_state_builder.add_left_velocity(state(kLeftVelocity)); |
| 735 | model_state_builder.add_left_voltage_error(state(kLeftVoltageError)); |
| 736 | model_state_builder.add_right_encoder(state(kRightEncoder)); |
| 737 | model_state_builder.add_right_velocity(state(kRightVelocity)); |
| 738 | model_state_builder.add_right_voltage_error(state(kRightVoltageError)); |
| 739 | return model_state_builder.Finish(); |
| 740 | } |
| 741 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 742 | flatbuffers::Offset<CumulativeStatistics> |
| 743 | ModelBasedLocalizer::PopulateStatistics(flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 744 | const auto rejections_offset = fbb->CreateVector( |
| 745 | statistics_.rejection_counts.data(), statistics_.rejection_counts.size()); |
| 746 | |
| 747 | CumulativeStatistics::Builder stats_builder(*fbb); |
| 748 | stats_builder.add_total_accepted(statistics_.total_accepted); |
| 749 | stats_builder.add_total_candidates(statistics_.total_candidates); |
| 750 | stats_builder.add_rejection_reason_count(rejections_offset); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 751 | return stats_builder.Finish(); |
| 752 | } |
| 753 | |
| 754 | flatbuffers::Offset<ModelBasedStatus> ModelBasedLocalizer::PopulateStatus( |
| 755 | flatbuffers::FlatBufferBuilder *fbb) { |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 756 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 757 | PopulateStatistics(fbb); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 758 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 759 | const flatbuffers::Offset<control_loops::drivetrain::DownEstimatorState> |
| 760 | down_estimator_offset = down_estimator_.PopulateStatus(fbb, t_); |
| 761 | |
| 762 | const CombinedState &state = current_state_; |
| 763 | |
| 764 | const flatbuffers::Offset<ModelBasedState> model_state_offset = |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 765 | BuildModelState(fbb, state.model_state); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 766 | |
| 767 | const flatbuffers::Offset<AccelBasedState> accel_state_offset = |
| 768 | BuildAccelState(fbb, state.accel_state); |
| 769 | |
| 770 | const flatbuffers::Offset<AccelBasedState> oldest_accel_state_offset = |
| 771 | branches_.empty() ? flatbuffers::Offset<AccelBasedState>() |
| 772 | : BuildAccelState(fbb, branches_[0].accel_state); |
| 773 | |
| 774 | const flatbuffers::Offset<ModelBasedState> oldest_model_state_offset = |
| 775 | branches_.empty() ? flatbuffers::Offset<ModelBasedState>() |
| 776 | : BuildModelState(fbb, branches_[0].model_state); |
| 777 | |
| 778 | ModelBasedStatus::Builder builder(*fbb); |
| 779 | builder.add_accel_state(accel_state_offset); |
| 780 | builder.add_oldest_accel_state(oldest_accel_state_offset); |
| 781 | builder.add_oldest_model_state(oldest_model_state_offset); |
| 782 | builder.add_model_state(model_state_offset); |
| 783 | builder.add_using_model(using_model_); |
| 784 | builder.add_residual(last_residual_); |
| 785 | builder.add_filtered_residual(filtered_residual_); |
| 786 | builder.add_velocity_residual(velocity_residual_); |
| 787 | builder.add_accel_residual(accel_residual_); |
| 788 | builder.add_theta_rate_residual(theta_rate_residual_); |
| 789 | builder.add_down_estimator(down_estimator_offset); |
| 790 | builder.add_x(xytheta()(0)); |
| 791 | builder.add_y(xytheta()(1)); |
| 792 | builder.add_theta(xytheta()(2)); |
| 793 | builder.add_implied_accel_x(abs_accel_(0)); |
| 794 | builder.add_implied_accel_y(abs_accel_(1)); |
| 795 | builder.add_implied_accel_z(abs_accel_(2)); |
| 796 | builder.add_clock_resets(clock_resets_); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 797 | builder.add_statistics(stats_offset); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 798 | return builder.Finish(); |
| 799 | } |
| 800 | |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 801 | flatbuffers::Offset<LocalizerVisualization> |
| 802 | ModelBasedLocalizer::PopulateVisualization( |
| 803 | flatbuffers::FlatBufferBuilder *fbb) { |
| 804 | const flatbuffers::Offset<CumulativeStatistics> stats_offset = |
| 805 | PopulateStatistics(fbb); |
| 806 | |
| 807 | aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, kDebugBufferSize> |
| 808 | debug_offsets; |
| 809 | |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 810 | for (const TargetEstimateDebugT &debug : image_debugs_) { |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 811 | debug_offsets.push_back(PackTargetEstimateDebug(debug, fbb)); |
| 812 | } |
| 813 | |
| 814 | image_debugs_.clear(); |
| 815 | |
| 816 | const flatbuffers::Offset< |
| 817 | flatbuffers::Vector<flatbuffers::Offset<TargetEstimateDebug>>> |
| 818 | debug_offset = |
| 819 | fbb->CreateVector(debug_offsets.data(), debug_offsets.size()); |
| 820 | |
| 821 | LocalizerVisualization::Builder builder(*fbb); |
| 822 | builder.add_statistics(stats_offset); |
| 823 | builder.add_targets(debug_offset); |
| 824 | return builder.Finish(); |
| 825 | } |
| 826 | |
| 827 | void ModelBasedLocalizer::TallyRejection(const RejectionReason reason) { |
| 828 | statistics_.total_candidates++; |
| 829 | statistics_.rejection_counts[static_cast<size_t>(reason)]++; |
| 830 | TargetEstimateDebugT debug; |
| 831 | debug.accepted = false; |
| 832 | debug.rejection_reason = reason; |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 833 | CHECK_LT(image_debugs_.size(), kDebugBufferSize); |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 834 | image_debugs_.push_back(debug); |
| 835 | } |
| 836 | |
| 837 | flatbuffers::Offset<TargetEstimateDebug> |
| 838 | ModelBasedLocalizer::PackTargetEstimateDebug( |
| 839 | const TargetEstimateDebugT &debug, flatbuffers::FlatBufferBuilder *fbb) { |
| 840 | if (!debug.accepted) { |
| 841 | TargetEstimateDebug::Builder builder(*fbb); |
| 842 | builder.add_accepted(debug.accepted); |
| 843 | builder.add_rejection_reason(debug.rejection_reason); |
| 844 | return builder.Finish(); |
| 845 | } else { |
| 846 | flatbuffers::Offset<TargetEstimateDebug> offset = |
| 847 | TargetEstimateDebug::Pack(*fbb, &debug); |
| 848 | flatbuffers::GetMutableTemporaryPointer(*fbb, offset) |
| 849 | ->clear_rejection_reason(); |
| 850 | return offset; |
| 851 | } |
| 852 | } |
| 853 | |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 854 | namespace { |
| 855 | // Period at which the encoder readings from the IMU board wrap. |
| 856 | static double DrivetrainWrapPeriod() { |
| 857 | return y2022::constants::Values::DrivetrainEncoderToMeters(1 << 16); |
| 858 | } |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 859 | } // namespace |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 860 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 861 | EventLoopLocalizer::EventLoopLocalizer( |
| 862 | aos::EventLoop *event_loop, |
| 863 | const control_loops::drivetrain::DrivetrainConfig<double> &dt_config) |
| 864 | : event_loop_(event_loop), |
James Kuszmaul | 6d6e130 | 2022-03-12 15:22:48 -0800 | [diff] [blame^] | 865 | dt_config_(dt_config), |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 866 | model_based_(dt_config), |
| 867 | status_sender_(event_loop_->MakeSender<LocalizerStatus>("/localizer")), |
| 868 | output_sender_(event_loop_->MakeSender<LocalizerOutput>("/localizer")), |
James Kuszmaul | 0dedb5e | 2022-03-05 16:02:20 -0800 | [diff] [blame] | 869 | visualization_sender_( |
| 870 | event_loop_->MakeSender<LocalizerVisualization>("/localizer")), |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 871 | output_fetcher_( |
| 872 | event_loop_->MakeFetcher<frc971::control_loops::drivetrain::Output>( |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 873 | "/drivetrain")), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 874 | clock_offset_fetcher_( |
| 875 | event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>( |
| 876 | "/aos")), |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 877 | superstructure_fetcher_( |
| 878 | event_loop_ |
| 879 | ->MakeFetcher<y2022::control_loops::superstructure::Status>( |
| 880 | "/superstructure")), |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 881 | left_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()), |
| 882 | right_encoder_(-DrivetrainWrapPeriod() / 2.0, DrivetrainWrapPeriod()) { |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 883 | event_loop_->MakeWatcher( |
| 884 | "/drivetrain", |
| 885 | [this]( |
| 886 | const frc971::control_loops::drivetrain::LocalizerControl &control) { |
| 887 | const double theta = control.keep_current_theta() |
| 888 | ? model_based_.xytheta()(2) |
| 889 | : control.theta(); |
| 890 | model_based_.HandleReset(event_loop_->monotonic_now(), |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 891 | {control.x(), control.y(), theta}); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 892 | }); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 893 | aos::TimerHandler *superstructure_timer = event_loop_->AddTimer([this]() { |
| 894 | if (superstructure_fetcher_.Fetch()) { |
| 895 | const y2022::control_loops::superstructure::Status &status = |
| 896 | *superstructure_fetcher_.get(); |
| 897 | if (!status.has_turret()) { |
| 898 | return; |
| 899 | } |
| 900 | CHECK(status.has_turret()); |
| 901 | model_based_.HandleTurret( |
| 902 | superstructure_fetcher_.context().monotonic_event_time, |
| 903 | status.turret()->position(), status.turret()->velocity()); |
| 904 | } |
| 905 | }); |
| 906 | event_loop_->OnRun([this, superstructure_timer]() { |
| 907 | superstructure_timer->Setup(event_loop_->monotonic_now(), |
| 908 | std::chrono::milliseconds(20)); |
| 909 | }); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 910 | |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 911 | for (size_t camera_index = 0; camera_index < kPisToUse.size(); |
| 912 | ++camera_index) { |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 913 | CHECK_LT(camera_index, target_estimate_fetchers_.size()); |
| 914 | target_estimate_fetchers_[camera_index] = |
| 915 | event_loop_->MakeFetcher<y2022::vision::TargetEstimate>( |
| 916 | absl::StrCat("/", kPisToUse[camera_index], "/camera")); |
| 917 | } |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 918 | aos::TimerHandler *estimate_timer = event_loop_->AddTimer([this]() { |
| 919 | for (size_t camera_index = 0; camera_index < kPisToUse.size(); |
| 920 | ++camera_index) { |
| 921 | if (model_based_.NumQueuedImageDebugs() == |
| 922 | ModelBasedLocalizer::kDebugBufferSize || |
| 923 | (last_visualization_send_ + kMinVisualizationPeriod < |
| 924 | event_loop_->monotonic_now())) { |
| 925 | auto builder = visualization_sender_.MakeBuilder(); |
| 926 | visualization_sender_.CheckOk( |
| 927 | builder.Send(model_based_.PopulateVisualization(builder.fbb()))); |
| 928 | } |
| 929 | if (target_estimate_fetchers_[camera_index].Fetch()) { |
| 930 | const std::optional<aos::monotonic_clock::duration> monotonic_offset = |
| 931 | ClockOffset(kPisToUse[camera_index]); |
| 932 | if (!monotonic_offset.has_value()) { |
| 933 | continue; |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 934 | } |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 935 | // TODO(james): Get timestamp from message contents. |
| 936 | aos::monotonic_clock::time_point capture_time( |
| 937 | target_estimate_fetchers_[camera_index] |
| 938 | .context() |
| 939 | .monotonic_remote_time - |
| 940 | monotonic_offset.value()); |
| 941 | if (capture_time > target_estimate_fetchers_[camera_index] |
| 942 | .context() |
| 943 | .monotonic_event_time) { |
| 944 | model_based_.TallyRejection(RejectionReason::IMAGE_FROM_FUTURE); |
| 945 | continue; |
| 946 | } |
| 947 | model_based_.HandleImageMatch( |
| 948 | capture_time, target_estimate_fetchers_[camera_index].get(), |
| 949 | camera_index); |
| 950 | } |
| 951 | } |
| 952 | }); |
James Kuszmaul | 2b2f877 | 2022-03-12 15:25:35 -0800 | [diff] [blame] | 953 | event_loop_->OnRun([this, estimate_timer]() { |
| 954 | estimate_timer->Setup(event_loop_->monotonic_now(), |
| 955 | std::chrono::milliseconds(100)); |
| 956 | }); |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 957 | event_loop_->MakeWatcher( |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 958 | "/localizer", [this](const frc971::IMUValuesBatch &values) { |
| 959 | CHECK(values.has_readings()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 960 | output_fetcher_.Fetch(); |
| 961 | for (const IMUValues *value : *values.readings()) { |
| 962 | zeroer_.InsertAndProcessMeasurement(*value); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 963 | const Eigen::Vector2d encoders{ |
| 964 | left_encoder_.Unwrap(value->left_encoder()), |
| 965 | right_encoder_.Unwrap(value->right_encoder())}; |
James Kuszmaul | 6d6e130 | 2022-03-12 15:22:48 -0800 | [diff] [blame^] | 966 | { |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 967 | const aos::monotonic_clock::time_point pico_timestamp{ |
| 968 | std::chrono::microseconds(value->pico_timestamp_us())}; |
| 969 | // TODO(james): If we get large enough drift off of the pico, |
| 970 | // actually do something about it. |
| 971 | if (!pico_offset_.has_value()) { |
| 972 | pico_offset_ = |
| 973 | event_loop_->context().monotonic_event_time - pico_timestamp; |
| 974 | last_pico_timestamp_ = pico_timestamp; |
James Kuszmaul | e5f67dd | 2022-02-12 20:08:29 -0800 | [diff] [blame] | 975 | } |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 976 | if (pico_timestamp < last_pico_timestamp_) { |
| 977 | pico_offset_.value() += std::chrono::microseconds(1ULL << 32); |
| 978 | } |
| 979 | const aos::monotonic_clock::time_point sample_timestamp = |
| 980 | pico_offset_.value() + pico_timestamp; |
| 981 | pico_offset_error_ = |
| 982 | event_loop_->context().monotonic_event_time - sample_timestamp; |
| 983 | const bool disabled = |
| 984 | (output_fetcher_.get() == nullptr) || |
| 985 | (output_fetcher_.context().monotonic_event_time + |
| 986 | std::chrono::milliseconds(10) < |
| 987 | event_loop_->context().monotonic_event_time); |
James Kuszmaul | 6d6e130 | 2022-03-12 15:22:48 -0800 | [diff] [blame^] | 988 | const bool zeroed = zeroer_.Zeroed(); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 989 | model_based_.HandleImu( |
James Kuszmaul | 6d6e130 | 2022-03-12 15:22:48 -0800 | [diff] [blame^] | 990 | sample_timestamp, |
| 991 | zeroed ? zeroer_.ZeroedGyro().value() : Eigen::Vector3d::Zero(), |
| 992 | zeroed ? zeroer_.ZeroedAccel().value() |
| 993 | : dt_config_.imu_transform.transpose() * |
| 994 | Eigen::Vector3d::UnitZ(), |
James Kuszmaul | f6b6911 | 2022-03-12 21:34:39 -0800 | [diff] [blame] | 995 | encoders, |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 996 | disabled ? Eigen::Vector2d::Zero() |
| 997 | : Eigen::Vector2d{output_fetcher_->left_voltage(), |
| 998 | output_fetcher_->right_voltage()}); |
| 999 | last_pico_timestamp_ = pico_timestamp; |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1000 | } |
| 1001 | { |
| 1002 | auto builder = status_sender_.MakeBuilder(); |
| 1003 | const flatbuffers::Offset<ModelBasedStatus> model_based_status = |
| 1004 | model_based_.PopulateStatus(builder.fbb()); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 1005 | const flatbuffers::Offset<control_loops::drivetrain::ImuZeroerState> |
| 1006 | zeroer_status = zeroer_.PopulateStatus(builder.fbb()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1007 | LocalizerStatus::Builder status_builder = |
| 1008 | builder.MakeBuilder<LocalizerStatus>(); |
| 1009 | status_builder.add_model_based(model_based_status); |
| 1010 | status_builder.add_zeroed(zeroer_.Zeroed()); |
| 1011 | status_builder.add_faulted_zero(zeroer_.Faulted()); |
James Kuszmaul | 5ed29dd | 2022-02-13 18:32:06 -0800 | [diff] [blame] | 1012 | status_builder.add_zeroing(zeroer_status); |
| 1013 | status_builder.add_left_encoder(encoders(0)); |
| 1014 | status_builder.add_right_encoder(encoders(1)); |
| 1015 | if (pico_offset_.has_value()) { |
| 1016 | status_builder.add_pico_offset_ns(pico_offset_.value().count()); |
| 1017 | status_builder.add_pico_offset_error_ns( |
| 1018 | pico_offset_error_.count()); |
| 1019 | } |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1020 | builder.CheckOk(builder.Send(status_builder.Finish())); |
| 1021 | } |
| 1022 | if (last_output_send_ + std::chrono::milliseconds(5) < |
| 1023 | event_loop_->context().monotonic_event_time) { |
| 1024 | auto builder = output_sender_.MakeBuilder(); |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 1025 | |
| 1026 | const auto led_outputs_offset = |
| 1027 | builder.fbb()->CreateVector(model_based_.led_outputs().data(), |
| 1028 | model_based_.led_outputs().size()); |
| 1029 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1030 | LocalizerOutput::Builder output_builder = |
| 1031 | builder.MakeBuilder<LocalizerOutput>(); |
James Kuszmaul | 1798c07 | 2022-02-13 15:32:11 -0800 | [diff] [blame] | 1032 | // TODO(james): Should we bother to try to estimate time offsets for |
| 1033 | // the pico? |
| 1034 | output_builder.add_monotonic_timestamp_ns( |
| 1035 | value->monotonic_timestamp_ns()); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1036 | output_builder.add_x(model_based_.xytheta()(0)); |
| 1037 | output_builder.add_y(model_based_.xytheta()(1)); |
| 1038 | output_builder.add_theta(model_based_.xytheta()(2)); |
James Kuszmaul | f3ef9e1 | 2022-03-05 17:13:00 -0800 | [diff] [blame] | 1039 | output_builder.add_zeroed(zeroer_.Zeroed()); |
James Kuszmaul | 10d3fd4 | 2022-02-25 21:57:36 -0800 | [diff] [blame] | 1040 | const Eigen::Quaterniond &orientation = model_based_.orientation(); |
| 1041 | Quaternion quaternion; |
| 1042 | quaternion.mutate_x(orientation.x()); |
| 1043 | quaternion.mutate_y(orientation.y()); |
| 1044 | quaternion.mutate_z(orientation.z()); |
| 1045 | quaternion.mutate_w(orientation.w()); |
| 1046 | output_builder.add_orientation(&quaternion); |
Milind Upadhyay | d67e9cf | 2022-03-13 13:56:57 -0700 | [diff] [blame] | 1047 | output_builder.add_led_outputs(led_outputs_offset); |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1048 | builder.CheckOk(builder.Send(output_builder.Finish())); |
| 1049 | last_output_send_ = event_loop_->monotonic_now(); |
| 1050 | } |
| 1051 | } |
| 1052 | }); |
| 1053 | } |
| 1054 | |
James Kuszmaul | 8c4f659 | 2022-02-26 15:49:30 -0800 | [diff] [blame] | 1055 | std::optional<aos::monotonic_clock::duration> EventLoopLocalizer::ClockOffset( |
| 1056 | std::string_view pi) { |
| 1057 | std::optional<aos::monotonic_clock::duration> monotonic_offset; |
| 1058 | clock_offset_fetcher_.Fetch(); |
| 1059 | if (clock_offset_fetcher_.get() != nullptr) { |
| 1060 | for (const auto connection : *clock_offset_fetcher_->connections()) { |
| 1061 | if (connection->has_node() && connection->node()->has_name() && |
| 1062 | connection->node()->name()->string_view() == pi) { |
| 1063 | if (connection->has_monotonic_offset()) { |
| 1064 | monotonic_offset = |
| 1065 | std::chrono::nanoseconds(connection->monotonic_offset()); |
| 1066 | } else { |
| 1067 | // If we don't have a monotonic offset, that means we aren't |
| 1068 | // connected. |
| 1069 | model_based_.TallyRejection( |
| 1070 | RejectionReason::MESSAGE_BRIDGE_DISCONNECTED); |
| 1071 | return std::nullopt; |
| 1072 | } |
| 1073 | break; |
| 1074 | } |
| 1075 | } |
| 1076 | } |
| 1077 | CHECK(monotonic_offset.has_value()); |
| 1078 | return monotonic_offset; |
| 1079 | } |
| 1080 | |
James Kuszmaul | 29c5952 | 2022-02-12 16:44:26 -0800 | [diff] [blame] | 1081 | } // namespace frc971::controls |