James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 1 | #ifndef Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_ |
| 2 | #define Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_ |
| 3 | |
| 4 | #include <cmath> |
| 5 | #include <memory> |
| 6 | |
James Kuszmaul | f4ede20 | 2020-02-14 08:47:40 -0800 | [diff] [blame] | 7 | #include "frc971/control_loops/drivetrain/camera.h" |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 8 | #include "frc971/control_loops/drivetrain/hybrid_ekf.h" |
James Kuszmaul | 19612ab | 2024-02-17 20:45:58 -0800 | [diff] [blame^] | 9 | #include "frc971/control_loops/drivetrain/localization/utils.h" |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 10 | #include "frc971/control_loops/pose.h" |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 11 | |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 12 | #if !defined(__clang__) && defined(__GNUC__) |
| 13 | // GCC miss-detects that when zero is set to true, the member variables could be |
| 14 | // uninitialized. Rather than spend the CPU to initialize them in addition to |
| 15 | // the memory for no good reason, tell GCC to stop doing that. Clang appears to |
| 16 | // get it. |
| 17 | #pragma GCC diagnostic push |
| 18 | #pragma GCC diagnostic ignored "-Wmaybe-uninitialized" |
| 19 | #endif |
| 20 | |
Stephan Pleines | d99b1ee | 2024-02-02 20:56:44 -0800 | [diff] [blame] | 21 | namespace y2019::control_loops { |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 22 | |
| 23 | template <int num_cameras, int num_targets, int num_obstacles, |
| 24 | int max_targets_per_frame, typename Scalar = double> |
| 25 | class TypedLocalizer |
| 26 | : public ::frc971::control_loops::drivetrain::HybridEkf<Scalar> { |
| 27 | public: |
James Kuszmaul | f4ede20 | 2020-02-14 08:47:40 -0800 | [diff] [blame] | 28 | typedef frc971::control_loops::TypedCamera<num_targets, num_obstacles, Scalar> |
| 29 | Camera; |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 30 | typedef typename Camera::TargetView TargetView; |
| 31 | typedef typename Camera::Pose Pose; |
James Kuszmaul | f4ede20 | 2020-02-14 08:47:40 -0800 | [diff] [blame] | 32 | typedef typename frc971::control_loops::Target Target; |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 33 | typedef ::frc971::control_loops::drivetrain::HybridEkf<Scalar> HybridEkf; |
| 34 | typedef typename HybridEkf::State State; |
| 35 | typedef typename HybridEkf::StateSquare StateSquare; |
| 36 | typedef typename HybridEkf::Input Input; |
| 37 | typedef typename HybridEkf::Output Output; |
| 38 | using HybridEkf::kNInputs; |
| 39 | using HybridEkf::kNOutputs; |
| 40 | using HybridEkf::kNStates; |
| 41 | |
| 42 | // robot_pose should be the object that is used by the cameras, such that when |
| 43 | // we update robot_pose, the cameras will change what they report the relative |
| 44 | // position of the targets as. |
| 45 | // Note that the parameters for the cameras should be set to allow slightly |
| 46 | // larger fields of view and slightly longer range than the true cameras so |
| 47 | // that we can identify potential matches for targets even when we have slight |
| 48 | // modelling errors. |
| 49 | TypedLocalizer( |
| 50 | const ::frc971::control_loops::drivetrain::DrivetrainConfig<Scalar> |
| 51 | &dt_config, |
| 52 | Pose *robot_pose) |
| 53 | : ::frc971::control_loops::drivetrain::HybridEkf<Scalar>(dt_config), |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 54 | robot_pose_(robot_pose), |
| 55 | h_queue_(this), |
| 56 | make_h_queue_(this) {} |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 57 | |
| 58 | // Performs a kalman filter correction with a single camera frame, consisting |
| 59 | // of up to max_targets_per_frame targets and taken at time t. |
| 60 | // camera is the Camera used to take the images. |
| 61 | void UpdateTargets( |
| 62 | const Camera &camera, |
| 63 | const ::aos::SizedArray<TargetView, max_targets_per_frame> &targets, |
| 64 | ::aos::monotonic_clock::time_point t) { |
| 65 | if (targets.empty()) { |
| 66 | return; |
| 67 | } |
| 68 | |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 69 | if (!SanitizeTargets(targets)) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 70 | AOS_LOG(ERROR, "Throwing out targets due to in insane values.\n"); |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 71 | return; |
| 72 | } |
| 73 | |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 74 | if (t > HybridEkf::latest_t()) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 75 | AOS_LOG(ERROR, |
| 76 | "target observations must be older than most recent encoder/gyro " |
| 77 | "update.\n"); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 78 | return; |
| 79 | } |
| 80 | |
| 81 | Output z; |
| 82 | Eigen::Matrix<Scalar, kNOutputs, kNOutputs> R; |
| 83 | TargetViewToMatrices(targets[0], &z, &R); |
| 84 | |
| 85 | // In order to perform the correction steps for the targets, we will |
| 86 | // separately perform a Correct step for each following target. |
| 87 | // This way, we can have the first correction figure out the mappings |
| 88 | // between targets in the image and targets on the field, and then re-use |
| 89 | // those mappings for all the remaining corrections. |
| 90 | // As such, we need to store the EKF functions that the remaining targets |
| 91 | // will need in arrays: |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 92 | ::aos::SizedArray<HFunction, max_targets_per_frame> h_functions; |
| 93 | ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>, |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 94 | max_targets_per_frame> |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 95 | dhdx; |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 96 | make_h_queue_.CorrectKnownHBuilder( |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 97 | z, nullptr, |
Philipp Schrader | 790cb54 | 2023-07-05 21:06:52 -0700 | [diff] [blame] | 98 | ExpectedObservationBuilder(this, camera, targets, &h_functions, &dhdx), |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 99 | R, t); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 100 | // Fetch cache: |
| 101 | for (size_t ii = 1; ii < targets.size(); ++ii) { |
| 102 | TargetViewToMatrices(targets[ii], &z, &R); |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 103 | h_queue_.CorrectKnownH( |
Philipp Schrader | 790cb54 | 2023-07-05 21:06:52 -0700 | [diff] [blame] | 104 | z, nullptr, ExpectedObservationFunctor(h_functions[ii], dhdx[ii]), R, |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 105 | t); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 106 | } |
| 107 | } |
| 108 | |
| 109 | private: |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 110 | class HFunction { |
| 111 | public: |
| 112 | HFunction() : zero_(true) {} |
| 113 | HFunction(const Camera *camera, const TargetView &best_view, |
| 114 | const TargetView &target_view, TypedLocalizer *localizer) |
| 115 | : zero_(false), |
| 116 | camera_(camera), |
| 117 | best_view_(best_view), |
| 118 | target_view_(target_view), |
| 119 | localizer_(localizer) {} |
| 120 | Output operator()(const State &X, const Input &) { |
| 121 | if (zero_) { |
| 122 | return Output::Zero(); |
| 123 | } |
| 124 | |
| 125 | // This function actually handles determining what the Output should |
| 126 | // be at a given state, now that we have chosen the target that |
| 127 | // we want to match to. |
| 128 | *localizer_->robot_pose_->mutable_pos() << X(0, 0), X(1, 0), 0.0; |
| 129 | localizer_->robot_pose_->set_theta(X(2, 0)); |
| 130 | const Pose relative_pose = |
| 131 | best_view_.target->pose().Rebase(&camera_->pose()); |
| 132 | const Scalar heading = relative_pose.heading(); |
| 133 | const Scalar distance = relative_pose.xy_norm(); |
| 134 | const Scalar skew = |
| 135 | ::aos::math::NormalizeAngle(relative_pose.rel_theta() - heading); |
| 136 | return Output(heading, distance, skew); |
| 137 | } |
| 138 | |
| 139 | private: |
| 140 | bool zero_ = false; |
| 141 | |
| 142 | const Camera *camera_; |
| 143 | TargetView best_view_; |
| 144 | TargetView target_view_; |
| 145 | TypedLocalizer *localizer_; |
| 146 | }; |
| 147 | |
| 148 | friend class HFunction; |
| 149 | |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 150 | class ExpectedObservationFunctor |
| 151 | : public HybridEkf::ExpectedObservationFunctor { |
| 152 | public: |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 153 | ExpectedObservationFunctor(const HFunction &h, |
| 154 | Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx) |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 155 | : h_(h), dhdx_(dhdx) {} |
| 156 | |
| 157 | Output H(const State &state, const Input &input) final { |
| 158 | return h_(state, input); |
| 159 | } |
| 160 | |
| 161 | virtual Eigen::Matrix<Scalar, kNOutputs, kNStates> DHDX( |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 162 | const State &) final { |
| 163 | return dhdx_; |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 164 | } |
| 165 | |
| 166 | private: |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 167 | HFunction h_; |
| 168 | Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx_; |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 169 | }; |
| 170 | class ExpectedObservationBuilder |
| 171 | : public HybridEkf::ExpectedObservationBuilder { |
| 172 | public: |
| 173 | ExpectedObservationBuilder( |
| 174 | TypedLocalizer *localizer, const Camera &camera, |
| 175 | const ::aos::SizedArray<TargetView, max_targets_per_frame> |
| 176 | &target_views, |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 177 | ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions, |
| 178 | ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>, |
| 179 | max_targets_per_frame> *dhdx) |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 180 | : localizer_(localizer), |
| 181 | camera_(camera), |
| 182 | target_views_(target_views), |
| 183 | h_functions_(h_functions), |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 184 | dhdx_(dhdx) {} |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 185 | |
| 186 | virtual ExpectedObservationFunctor *MakeExpectedObservations( |
| 187 | const State &state, const StateSquare &P) { |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 188 | HFunction h; |
| 189 | Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx; |
Philipp Schrader | 790cb54 | 2023-07-05 21:06:52 -0700 | [diff] [blame] | 190 | localizer_->MakeH(camera_, target_views_, h_functions_, dhdx_, state, P, |
| 191 | &h, &dhdx); |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 192 | functor_.emplace(h, dhdx); |
| 193 | return &functor_.value(); |
| 194 | } |
| 195 | |
| 196 | private: |
| 197 | TypedLocalizer *localizer_; |
| 198 | const Camera &camera_; |
| 199 | const ::aos::SizedArray<TargetView, max_targets_per_frame> &target_views_; |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 200 | ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions_; |
| 201 | ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>, |
| 202 | max_targets_per_frame> *dhdx_; |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 203 | std::optional<ExpectedObservationFunctor> functor_; |
| 204 | }; |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 205 | |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 206 | // The threshold to use for completely rejecting potentially bad target |
| 207 | // matches. |
| 208 | // TODO(james): Tune |
Austin Schuh | 113a85d | 2019-03-28 17:18:08 -0700 | [diff] [blame] | 209 | static constexpr Scalar kRejectionScore = 1.0; |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 210 | |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 211 | // Checks that the targets coming in make some sense--mostly to prevent NaNs |
| 212 | // or the such from propagating. |
| 213 | bool SanitizeTargets( |
| 214 | const ::aos::SizedArray<TargetView, max_targets_per_frame> &targets) { |
| 215 | for (const TargetView &view : targets) { |
| 216 | const typename TargetView::Reading reading = view.reading; |
| 217 | if (!(::std::isfinite(reading.heading) && |
| 218 | ::std::isfinite(reading.distance) && |
| 219 | ::std::isfinite(reading.skew) && ::std::isfinite(reading.height))) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 220 | AOS_LOG(ERROR, "Got non-finite values in target.\n"); |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 221 | return false; |
| 222 | } |
| 223 | if (reading.distance < 0) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 224 | AOS_LOG(ERROR, "Got negative distance.\n"); |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 225 | return false; |
| 226 | } |
| 227 | if (::std::abs(::aos::math::NormalizeAngle(reading.skew)) > M_PI_2) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 228 | AOS_LOG(ERROR, "Got skew > pi / 2.\n"); |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 229 | return false; |
| 230 | } |
| 231 | } |
| 232 | return true; |
| 233 | } |
| 234 | |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 235 | // Computes the measurement (z) and noise covariance (R) matrices for a given |
| 236 | // TargetView. |
| 237 | void TargetViewToMatrices(const TargetView &view, Output *z, |
| 238 | Eigen::Matrix<Scalar, kNOutputs, kNOutputs> *R) { |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 239 | *z << view.reading.heading, view.reading.distance, |
| 240 | ::aos::math::NormalizeAngle(view.reading.skew); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 241 | // TODO(james): R should account as well for our confidence in the target |
| 242 | // matching. However, handling that properly requires thing a lot more about |
| 243 | // the probabilities. |
| 244 | R->setZero(); |
| 245 | R->diagonal() << ::std::pow(view.noise.heading, 2), |
| 246 | ::std::pow(view.noise.distance, 2), ::std::pow(view.noise.skew, 2); |
| 247 | } |
| 248 | |
| 249 | // This is the function that will be called once the Ekf has inserted the |
| 250 | // measurement into the right spot in the measurement queue and needs the |
| 251 | // output functions to actually perform the corrections. |
| 252 | // Specifically, this will take the estimate of the state at that time and |
| 253 | // figure out how the targets seen by the camera best map onto the actual |
| 254 | // targets on the field. |
| 255 | // It then fills in the h and dhdx functions that are called by the Ekf. |
| 256 | void MakeH( |
| 257 | const Camera &camera, |
| 258 | const ::aos::SizedArray<TargetView, max_targets_per_frame> &target_views, |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 259 | ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions, |
| 260 | ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>, |
| 261 | max_targets_per_frame> *dhdx, |
| 262 | const State &X_hat, const StateSquare &P, HFunction *h, |
| 263 | Eigen::Matrix<Scalar, kNOutputs, kNStates> *current_dhdx) { |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 264 | // Because we need to match camera targets ("views") to actual field |
| 265 | // targets, and because we want to take advantage of the correlations |
| 266 | // between the targets (i.e., if we see two targets in the image, they |
| 267 | // probably correspond to different on-field targets), the matching problem |
| 268 | // here is somewhat non-trivial. Some of the methods we use only work |
| 269 | // because we are dealing with very small N (e.g., handling the correlations |
| 270 | // between multiple views has combinatoric complexity, but since N = 3, |
| 271 | // it's not an issue). |
| 272 | // |
| 273 | // High-level steps: |
| 274 | // 1) Set the base robot pose for the cameras to the Pose implied by X_hat. |
| 275 | // 2) Fetch all the expected target views from the camera. |
| 276 | // 3) Determine the "magnitude" of the Kalman correction from each potential |
| 277 | // view/target pair. |
| 278 | // 4) Match based on the combination of targets with the smallest |
| 279 | // corrections. |
| 280 | // 5) Calculate h and dhdx for each pair of targets. |
| 281 | // |
| 282 | // For the "magnitude" of the correction, we do not directly use the |
| 283 | // standard Kalman correction formula. Instead, we calculate the correction |
| 284 | // we would get from each component of the measurement and take the L2 norm |
| 285 | // of those. This prevents situations where a target matches very poorly but |
| 286 | // produces an overall correction of near-zero. |
| 287 | // TODO(james): I do not know if this is strictly the correct method to |
| 288 | // minimize likely error, but should be reasonable. |
| 289 | // |
| 290 | // For the matching, we do the following (see MatchFrames): |
| 291 | // 1. Compute the best max_targets_per_frame matches for each view. |
| 292 | // 2. Exhaust every possible combination of view/target pairs and |
| 293 | // choose the best one. |
| 294 | // When we don't think the camera should be able to see as many targets as |
| 295 | // we actually got in the frame, then we do permit doubling/tripling/etc. |
| 296 | // up on potential targets once we've exhausted all the targets we think |
| 297 | // we can see. |
| 298 | |
| 299 | // Set the current robot pose so that the cameras know where they are |
| 300 | // (all the cameras have robot_pose_ as their base): |
| 301 | *robot_pose_->mutable_pos() << X_hat(0, 0), X_hat(1, 0), 0.0; |
| 302 | robot_pose_->set_theta(X_hat(2, 0)); |
| 303 | |
| 304 | // Compute the things we *think* the camera should be seeing. |
| 305 | // Note: Because we will not try to match to any targets that are not |
| 306 | // returned by this function, we generally want the modelled camera to have |
| 307 | // a slightly larger field of view than the real camera, and be able to see |
| 308 | // slightly smaller targets. |
| 309 | const ::aos::SizedArray<TargetView, num_targets> camera_views = |
| 310 | camera.target_views(); |
| 311 | |
| 312 | // Each row contains the scores for each pair of target view and camera |
| 313 | // target view. Values in each row will not be populated past |
| 314 | // camera.target_views().size(); of the rows, only the first |
| 315 | // target_views.size() shall be populated. |
| 316 | // Higher scores imply a worse match. Zero implies a perfect match. |
| 317 | Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> scores; |
| 318 | scores.setConstant(::std::numeric_limits<Scalar>::infinity()); |
| 319 | // Each row contains the indices of the best matches per view, where |
| 320 | // index 0 is the best, 1 the second best, and 2 the third, etc. |
| 321 | // -1 indicates an unfilled field. |
| 322 | Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame> |
| 323 | best_matches; |
| 324 | best_matches.setConstant(-1); |
| 325 | // The H matrices for each potential matching. This has the same structure |
| 326 | // as the scores matrix. |
| 327 | ::std::array<::std::array<Eigen::Matrix<Scalar, kNOutputs, kNStates>, |
| 328 | max_targets_per_frame>, |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 329 | num_targets> |
| 330 | all_H_matrices; |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 331 | |
| 332 | // Iterate through and fill out the scores for each potential pairing: |
| 333 | for (size_t ii = 0; ii < target_views.size(); ++ii) { |
| 334 | const TargetView &target_view = target_views[ii]; |
| 335 | Output z; |
| 336 | Eigen::Matrix<Scalar, kNOutputs, kNOutputs> R; |
| 337 | TargetViewToMatrices(target_view, &z, &R); |
| 338 | |
| 339 | for (size_t jj = 0; jj < camera_views.size(); ++jj) { |
| 340 | // Compute the ckalman update for this step: |
| 341 | const TargetView &view = camera_views[jj]; |
| 342 | const Eigen::Matrix<Scalar, kNOutputs, kNStates> H = |
James Kuszmaul | 46f3a21 | 2019-03-10 10:14:24 -0700 | [diff] [blame] | 343 | HMatrix(*view.target, camera.pose()); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 344 | const Eigen::Matrix<Scalar, kNStates, kNOutputs> PH = P * H.transpose(); |
| 345 | const Eigen::Matrix<Scalar, kNOutputs, kNOutputs> S = H * PH + R; |
| 346 | // Note: The inverse here should be very cheap so long as kNOutputs = 3. |
| 347 | const Eigen::Matrix<Scalar, kNStates, kNOutputs> K = PH * S.inverse(); |
| 348 | const Output err = z - Output(view.reading.heading, |
| 349 | view.reading.distance, view.reading.skew); |
| 350 | // In order to compute the actual score, we want to consider each |
| 351 | // component of the error separately, as well as considering the impacts |
| 352 | // on the each of the states separately. As such, we calculate what |
| 353 | // the separate updates from each error component would be, and sum |
| 354 | // the impacts on the states. |
| 355 | Output scorer; |
| 356 | for (size_t kk = 0; kk < kNOutputs; ++kk) { |
| 357 | // TODO(james): squaredNorm or norm or L1-norm? Do we care about the |
| 358 | // square root? Do we prefer a quadratic or linear response? |
| 359 | scorer(kk, 0) = (K.col(kk) * err(kk, 0)).squaredNorm(); |
| 360 | } |
| 361 | // Compute the overall score--note that we add in a term for the height, |
| 362 | // scaled by a manual fudge-factor. The height is not accounted for |
| 363 | // in the Kalman update because we are not trying to estimate the height |
| 364 | // of the robot directly. |
| 365 | Scalar score = |
| 366 | scorer.squaredNorm() + |
| 367 | ::std::pow((view.reading.height - target_view.reading.height) / |
| 368 | target_view.noise.height / 20.0, |
| 369 | 2); |
| 370 | scores(ii, jj) = score; |
| 371 | all_H_matrices[ii][jj] = H; |
| 372 | |
| 373 | // Update the best_matches matrix: |
| 374 | int insert_target = jj; |
| 375 | for (size_t kk = 0; kk < max_targets_per_frame; ++kk) { |
| 376 | int idx = best_matches(ii, kk); |
| 377 | // Note that -1 indicates an unfilled value. |
| 378 | if (idx == -1 || scores(ii, idx) > scores(ii, insert_target)) { |
| 379 | best_matches(ii, kk) = insert_target; |
| 380 | insert_target = idx; |
| 381 | if (idx == -1) { |
| 382 | break; |
| 383 | } |
| 384 | } |
| 385 | } |
| 386 | } |
| 387 | } |
| 388 | |
| 389 | if (camera_views.size() == 0) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 390 | AOS_LOG(DEBUG, "Unable to identify potential target matches.\n"); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 391 | // If we can't get a match, provide H = zero, which will make this |
| 392 | // correction step a nop. |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 393 | *h = HFunction(); |
| 394 | *current_dhdx = Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero(); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 395 | for (size_t ii = 0; ii < target_views.size(); ++ii) { |
| 396 | h_functions->push_back(*h); |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 397 | dhdx->push_back(*current_dhdx); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 398 | } |
| 399 | } else { |
| 400 | // Go through and brute force the issue of what the best combination of |
| 401 | // target matches are. The worst case for this algorithm will be |
| 402 | // max_targets_per_frame!, which is awful for any N > ~4, but since |
| 403 | // max_targets_per_frame = 3, I'm not really worried. |
| 404 | ::std::array<int, max_targets_per_frame> best_frames = |
| 405 | MatchFrames(scores, best_matches, target_views.size()); |
| 406 | for (size_t ii = 0; ii < target_views.size(); ++ii) { |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 407 | size_t view_idx = best_frames[ii]; |
James Kuszmaul | 9776b39 | 2023-01-14 14:08:08 -0800 | [diff] [blame] | 408 | if (view_idx >= camera_views.size()) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 409 | AOS_LOG(ERROR, "Somehow, the view scorer failed.\n"); |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 410 | h_functions->emplace_back(); |
Philipp Schrader | 790cb54 | 2023-07-05 21:06:52 -0700 | [diff] [blame] | 411 | dhdx->push_back(Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero()); |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 412 | continue; |
| 413 | } |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 414 | const Eigen::Matrix<Scalar, kNOutputs, kNStates> best_H = |
| 415 | all_H_matrices[ii][view_idx]; |
| 416 | const TargetView best_view = camera_views[view_idx]; |
| 417 | const TargetView target_view = target_views[ii]; |
| 418 | const Scalar match_score = scores(ii, view_idx); |
| 419 | if (match_score > kRejectionScore) { |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 420 | AOS_LOG(DEBUG, |
| 421 | "Rejecting target at (%f, %f, %f, %f) due to high score.\n", |
| 422 | target_view.reading.heading, target_view.reading.distance, |
| 423 | target_view.reading.skew, target_view.reading.height); |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 424 | h_functions->emplace_back(); |
| 425 | dhdx->push_back(Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero()); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 426 | } else { |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 427 | h_functions->emplace_back(&camera, best_view, target_view, this); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 428 | |
| 429 | // TODO(james): Experiment to better understand whether we want to |
| 430 | // recalculate H or not. |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 431 | dhdx->push_back(best_H); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 432 | } |
| 433 | } |
| 434 | *h = h_functions->at(0); |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 435 | *current_dhdx = dhdx->at(0); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 436 | } |
| 437 | } |
| 438 | |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 439 | Eigen::Matrix<Scalar, kNOutputs, kNStates> HMatrix(const Target &target, |
| 440 | const Pose &camera_pose) { |
James Kuszmaul | 19612ab | 2024-02-17 20:45:58 -0800 | [diff] [blame^] | 441 | return frc971::control_loops::drivetrain:: |
| 442 | HMatrixForCameraHeadingDistanceSkew(target.pose(), camera_pose); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 443 | } |
| 444 | |
| 445 | // A helper function for the fuller version of MatchFrames; this just |
| 446 | // removes some of the arguments that are only needed during the recursion. |
| 447 | // n_views is the number of targets actually seen in the camera image (i.e., |
| 448 | // the number of rows in scores/best_matches that are actually populated). |
| 449 | ::std::array<int, max_targets_per_frame> MatchFrames( |
| 450 | const Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> &scores, |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 451 | const Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame> |
| 452 | &best_matches, |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 453 | int n_views) { |
| 454 | ::std::array<int, max_targets_per_frame> best_set; |
James Kuszmaul | 6f941b7 | 2019-03-08 18:12:25 -0800 | [diff] [blame] | 455 | best_set.fill(-1); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 456 | Scalar best_score; |
| 457 | // We start out without having "used" any views/targets: |
| 458 | ::aos::SizedArray<bool, max_targets_per_frame> used_views; |
| 459 | for (int ii = 0; ii < n_views; ++ii) { |
| 460 | used_views.push_back(false); |
| 461 | } |
| 462 | MatchFrames(scores, best_matches, used_views, {{false}}, &best_set, |
| 463 | &best_score); |
| 464 | return best_set; |
| 465 | } |
| 466 | |
| 467 | // Recursively iterates over every plausible combination of targets/views |
| 468 | // that there is and determines the lowest-scoring combination. |
| 469 | // used_views and used_targets indicate which rows/columns of the |
| 470 | // scores/best_matches matrices should be ignored. When used_views is all |
| 471 | // true, that means that we are done recursing. |
| 472 | void MatchFrames( |
| 473 | const Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> &scores, |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 474 | const Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame> |
| 475 | &best_matches, |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 476 | const ::aos::SizedArray<bool, max_targets_per_frame> &used_views, |
| 477 | const ::std::array<bool, num_targets> &used_targets, |
| 478 | ::std::array<int, max_targets_per_frame> *best_set, Scalar *best_score) { |
| 479 | *best_score = ::std::numeric_limits<Scalar>::infinity(); |
| 480 | // Iterate by letting each target in the camera frame (that isn't in |
| 481 | // used_views) choose it's best match that isn't already taken. We then set |
| 482 | // the appropriate flags in used_views and used_targets and call MatchFrames |
| 483 | // to let all the other views sort themselves out. |
| 484 | for (size_t ii = 0; ii < used_views.size(); ++ii) { |
| 485 | if (used_views[ii]) { |
| 486 | continue; |
| 487 | } |
| 488 | int best_match = -1; |
| 489 | for (size_t jj = 0; jj < max_targets_per_frame; ++jj) { |
| 490 | if (best_matches(ii, jj) == -1) { |
| 491 | // If we run out of potential targets from the camera, then there |
| 492 | // are more targets in the frame than we think there should be. |
| 493 | // In this case, we are going to be doubling/tripling/etc. up |
| 494 | // anyhow. So we just give everyone their top choice: |
| 495 | // TODO(james): If we ever are dealing with larger numbers of |
| 496 | // targets per frame, do something to minimize doubling-up. |
| 497 | best_match = best_matches(ii, 0); |
| 498 | break; |
| 499 | } |
| 500 | best_match = best_matches(ii, jj); |
| 501 | if (!used_targets[best_match]) { |
| 502 | break; |
| 503 | } |
| 504 | } |
| 505 | // If we reach here and best_match = -1, that means that no potential |
| 506 | // targets were generated by the camera, and we should never have gotten |
| 507 | // here. |
Austin Schuh | f257f3c | 2019-10-27 21:00:43 -0700 | [diff] [blame] | 508 | AOS_CHECK(best_match != -1); |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 509 | ::aos::SizedArray<bool, max_targets_per_frame> sub_views = used_views; |
| 510 | sub_views[ii] = true; |
| 511 | ::std::array<bool, num_targets> sub_targets = used_targets; |
| 512 | sub_targets[best_match] = true; |
| 513 | ::std::array<int, max_targets_per_frame> sub_best_set; |
| 514 | Scalar score; |
| 515 | MatchFrames(scores, best_matches, sub_views, sub_targets, &sub_best_set, |
| 516 | &score); |
| 517 | score += scores(ii, best_match); |
| 518 | sub_best_set[ii] = best_match; |
| 519 | if (score < *best_score) { |
| 520 | *best_score = score; |
| 521 | *best_set = sub_best_set; |
| 522 | } |
| 523 | } |
| 524 | // best_score will be infinite if we did not find a result due to there |
| 525 | // being no targets that weren't set in used_vies; this is the |
| 526 | // base case of the recursion and so we set best_score to zero: |
| 527 | if (!::std::isfinite(*best_score)) { |
| 528 | *best_score = 0.0; |
| 529 | } |
| 530 | } |
| 531 | |
| 532 | // The pose that is used by the cameras to determine the location of the robot |
| 533 | // and thus the expected view of the targets. |
| 534 | Pose *robot_pose_; |
James Kuszmaul | 2971b5a | 2023-01-29 15:49:32 -0800 | [diff] [blame] | 535 | |
| 536 | typename HybridEkf::template ExpectedObservationAllocator< |
| 537 | ExpectedObservationFunctor> |
| 538 | h_queue_; |
| 539 | typename HybridEkf::template ExpectedObservationAllocator< |
| 540 | ExpectedObservationBuilder> |
| 541 | make_h_queue_; |
| 542 | |
| 543 | friend class ExpectedObservationBuilder; |
Austin Schuh | 9f45d70 | 2023-05-06 22:18:10 -0700 | [diff] [blame] | 544 | }; |
| 545 | |
| 546 | #if !defined(__clang__) && defined(__GNUC__) |
| 547 | #pragma GCC diagnostic pop |
| 548 | #endif |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 549 | |
Stephan Pleines | d99b1ee | 2024-02-02 20:56:44 -0800 | [diff] [blame] | 550 | } // namespace y2019::control_loops |
James Kuszmaul | 1057ce8 | 2019-02-09 17:58:24 -0800 | [diff] [blame] | 551 | |
| 552 | #endif // Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_ |