blob: 2e7c3a5bcfbab0896b8b219931a07a7115d197dc [file] [log] [blame]
James Kuszmaul1057ce82019-02-09 17:58:24 -08001#ifndef Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_
2#define Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_
3
4#include <cmath>
5#include <memory>
6
James Kuszmaulf4ede202020-02-14 08:47:40 -08007#include "frc971/control_loops/drivetrain/camera.h"
James Kuszmaul1057ce82019-02-09 17:58:24 -08008#include "frc971/control_loops/drivetrain/hybrid_ekf.h"
James Kuszmaul2971b5a2023-01-29 15:49:32 -08009#include "frc971/control_loops/pose.h"
James Kuszmaul1057ce82019-02-09 17:58:24 -080010
Austin Schuh9f45d702023-05-06 22:18:10 -070011#if !defined(__clang__) && defined(__GNUC__)
12// GCC miss-detects that when zero is set to true, the member variables could be
13// uninitialized. Rather than spend the CPU to initialize them in addition to
14// the memory for no good reason, tell GCC to stop doing that. Clang appears to
15// get it.
16#pragma GCC diagnostic push
17#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
18#endif
19
James Kuszmaul1057ce82019-02-09 17:58:24 -080020namespace y2019 {
21namespace control_loops {
22
23template <int num_cameras, int num_targets, int num_obstacles,
24 int max_targets_per_frame, typename Scalar = double>
25class TypedLocalizer
26 : public ::frc971::control_loops::drivetrain::HybridEkf<Scalar> {
27 public:
James Kuszmaulf4ede202020-02-14 08:47:40 -080028 typedef frc971::control_loops::TypedCamera<num_targets, num_obstacles, Scalar>
29 Camera;
James Kuszmaul1057ce82019-02-09 17:58:24 -080030 typedef typename Camera::TargetView TargetView;
31 typedef typename Camera::Pose Pose;
James Kuszmaulf4ede202020-02-14 08:47:40 -080032 typedef typename frc971::control_loops::Target Target;
James Kuszmaul1057ce82019-02-09 17:58:24 -080033 typedef ::frc971::control_loops::drivetrain::HybridEkf<Scalar> HybridEkf;
34 typedef typename HybridEkf::State State;
35 typedef typename HybridEkf::StateSquare StateSquare;
36 typedef typename HybridEkf::Input Input;
37 typedef typename HybridEkf::Output Output;
38 using HybridEkf::kNInputs;
39 using HybridEkf::kNOutputs;
40 using HybridEkf::kNStates;
41
42 // robot_pose should be the object that is used by the cameras, such that when
43 // we update robot_pose, the cameras will change what they report the relative
44 // position of the targets as.
45 // Note that the parameters for the cameras should be set to allow slightly
46 // larger fields of view and slightly longer range than the true cameras so
47 // that we can identify potential matches for targets even when we have slight
48 // modelling errors.
49 TypedLocalizer(
50 const ::frc971::control_loops::drivetrain::DrivetrainConfig<Scalar>
51 &dt_config,
52 Pose *robot_pose)
53 : ::frc971::control_loops::drivetrain::HybridEkf<Scalar>(dt_config),
James Kuszmaul2971b5a2023-01-29 15:49:32 -080054 robot_pose_(robot_pose),
55 h_queue_(this),
56 make_h_queue_(this) {}
James Kuszmaul1057ce82019-02-09 17:58:24 -080057
58 // Performs a kalman filter correction with a single camera frame, consisting
59 // of up to max_targets_per_frame targets and taken at time t.
60 // camera is the Camera used to take the images.
61 void UpdateTargets(
62 const Camera &camera,
63 const ::aos::SizedArray<TargetView, max_targets_per_frame> &targets,
64 ::aos::monotonic_clock::time_point t) {
65 if (targets.empty()) {
66 return;
67 }
68
James Kuszmaul6f941b72019-03-08 18:12:25 -080069 if (!SanitizeTargets(targets)) {
Austin Schuhf257f3c2019-10-27 21:00:43 -070070 AOS_LOG(ERROR, "Throwing out targets due to in insane values.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -080071 return;
72 }
73
James Kuszmaul1057ce82019-02-09 17:58:24 -080074 if (t > HybridEkf::latest_t()) {
Austin Schuhf257f3c2019-10-27 21:00:43 -070075 AOS_LOG(ERROR,
76 "target observations must be older than most recent encoder/gyro "
77 "update.\n");
James Kuszmaul1057ce82019-02-09 17:58:24 -080078 return;
79 }
80
81 Output z;
82 Eigen::Matrix<Scalar, kNOutputs, kNOutputs> R;
83 TargetViewToMatrices(targets[0], &z, &R);
84
85 // In order to perform the correction steps for the targets, we will
86 // separately perform a Correct step for each following target.
87 // This way, we can have the first correction figure out the mappings
88 // between targets in the image and targets on the field, and then re-use
89 // those mappings for all the remaining corrections.
90 // As such, we need to store the EKF functions that the remaining targets
91 // will need in arrays:
Austin Schuh9f45d702023-05-06 22:18:10 -070092 ::aos::SizedArray<HFunction, max_targets_per_frame> h_functions;
93 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
James Kuszmaul2971b5a2023-01-29 15:49:32 -080094 max_targets_per_frame>
Austin Schuh9f45d702023-05-06 22:18:10 -070095 dhdx;
James Kuszmaul2971b5a2023-01-29 15:49:32 -080096 make_h_queue_.CorrectKnownHBuilder(
James Kuszmaul1057ce82019-02-09 17:58:24 -080097 z, nullptr,
Philipp Schrader790cb542023-07-05 21:06:52 -070098 ExpectedObservationBuilder(this, camera, targets, &h_functions, &dhdx),
James Kuszmaul2971b5a2023-01-29 15:49:32 -080099 R, t);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800100 // Fetch cache:
101 for (size_t ii = 1; ii < targets.size(); ++ii) {
102 TargetViewToMatrices(targets[ii], &z, &R);
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800103 h_queue_.CorrectKnownH(
Philipp Schrader790cb542023-07-05 21:06:52 -0700104 z, nullptr, ExpectedObservationFunctor(h_functions[ii], dhdx[ii]), R,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800105 t);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800106 }
107 }
108
109 private:
Austin Schuh9f45d702023-05-06 22:18:10 -0700110 class HFunction {
111 public:
112 HFunction() : zero_(true) {}
113 HFunction(const Camera *camera, const TargetView &best_view,
114 const TargetView &target_view, TypedLocalizer *localizer)
115 : zero_(false),
116 camera_(camera),
117 best_view_(best_view),
118 target_view_(target_view),
119 localizer_(localizer) {}
120 Output operator()(const State &X, const Input &) {
121 if (zero_) {
122 return Output::Zero();
123 }
124
125 // This function actually handles determining what the Output should
126 // be at a given state, now that we have chosen the target that
127 // we want to match to.
128 *localizer_->robot_pose_->mutable_pos() << X(0, 0), X(1, 0), 0.0;
129 localizer_->robot_pose_->set_theta(X(2, 0));
130 const Pose relative_pose =
131 best_view_.target->pose().Rebase(&camera_->pose());
132 const Scalar heading = relative_pose.heading();
133 const Scalar distance = relative_pose.xy_norm();
134 const Scalar skew =
135 ::aos::math::NormalizeAngle(relative_pose.rel_theta() - heading);
136 return Output(heading, distance, skew);
137 }
138
139 private:
140 bool zero_ = false;
141
142 const Camera *camera_;
143 TargetView best_view_;
144 TargetView target_view_;
145 TypedLocalizer *localizer_;
146 };
147
148 friend class HFunction;
149
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800150 class ExpectedObservationFunctor
151 : public HybridEkf::ExpectedObservationFunctor {
152 public:
Austin Schuh9f45d702023-05-06 22:18:10 -0700153 ExpectedObservationFunctor(const HFunction &h,
154 Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx)
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800155 : h_(h), dhdx_(dhdx) {}
156
157 Output H(const State &state, const Input &input) final {
158 return h_(state, input);
159 }
160
161 virtual Eigen::Matrix<Scalar, kNOutputs, kNStates> DHDX(
Austin Schuh9f45d702023-05-06 22:18:10 -0700162 const State &) final {
163 return dhdx_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800164 }
165
166 private:
Austin Schuh9f45d702023-05-06 22:18:10 -0700167 HFunction h_;
168 Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800169 };
170 class ExpectedObservationBuilder
171 : public HybridEkf::ExpectedObservationBuilder {
172 public:
173 ExpectedObservationBuilder(
174 TypedLocalizer *localizer, const Camera &camera,
175 const ::aos::SizedArray<TargetView, max_targets_per_frame>
176 &target_views,
Austin Schuh9f45d702023-05-06 22:18:10 -0700177 ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions,
178 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
179 max_targets_per_frame> *dhdx)
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800180 : localizer_(localizer),
181 camera_(camera),
182 target_views_(target_views),
183 h_functions_(h_functions),
Austin Schuh9f45d702023-05-06 22:18:10 -0700184 dhdx_(dhdx) {}
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800185
186 virtual ExpectedObservationFunctor *MakeExpectedObservations(
187 const State &state, const StateSquare &P) {
Austin Schuh9f45d702023-05-06 22:18:10 -0700188 HFunction h;
189 Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx;
Philipp Schrader790cb542023-07-05 21:06:52 -0700190 localizer_->MakeH(camera_, target_views_, h_functions_, dhdx_, state, P,
191 &h, &dhdx);
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800192 functor_.emplace(h, dhdx);
193 return &functor_.value();
194 }
195
196 private:
197 TypedLocalizer *localizer_;
198 const Camera &camera_;
199 const ::aos::SizedArray<TargetView, max_targets_per_frame> &target_views_;
Austin Schuh9f45d702023-05-06 22:18:10 -0700200 ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions_;
201 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
202 max_targets_per_frame> *dhdx_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800203 std::optional<ExpectedObservationFunctor> functor_;
204 };
Austin Schuh9f45d702023-05-06 22:18:10 -0700205
James Kuszmaul1057ce82019-02-09 17:58:24 -0800206 // The threshold to use for completely rejecting potentially bad target
207 // matches.
208 // TODO(james): Tune
Austin Schuh113a85d2019-03-28 17:18:08 -0700209 static constexpr Scalar kRejectionScore = 1.0;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800210
James Kuszmaul6f941b72019-03-08 18:12:25 -0800211 // Checks that the targets coming in make some sense--mostly to prevent NaNs
212 // or the such from propagating.
213 bool SanitizeTargets(
214 const ::aos::SizedArray<TargetView, max_targets_per_frame> &targets) {
215 for (const TargetView &view : targets) {
216 const typename TargetView::Reading reading = view.reading;
217 if (!(::std::isfinite(reading.heading) &&
218 ::std::isfinite(reading.distance) &&
219 ::std::isfinite(reading.skew) && ::std::isfinite(reading.height))) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700220 AOS_LOG(ERROR, "Got non-finite values in target.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -0800221 return false;
222 }
223 if (reading.distance < 0) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700224 AOS_LOG(ERROR, "Got negative distance.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -0800225 return false;
226 }
227 if (::std::abs(::aos::math::NormalizeAngle(reading.skew)) > M_PI_2) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700228 AOS_LOG(ERROR, "Got skew > pi / 2.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -0800229 return false;
230 }
231 }
232 return true;
233 }
234
James Kuszmaul1057ce82019-02-09 17:58:24 -0800235 // Computes the measurement (z) and noise covariance (R) matrices for a given
236 // TargetView.
237 void TargetViewToMatrices(const TargetView &view, Output *z,
238 Eigen::Matrix<Scalar, kNOutputs, kNOutputs> *R) {
James Kuszmaul6f941b72019-03-08 18:12:25 -0800239 *z << view.reading.heading, view.reading.distance,
240 ::aos::math::NormalizeAngle(view.reading.skew);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800241 // TODO(james): R should account as well for our confidence in the target
242 // matching. However, handling that properly requires thing a lot more about
243 // the probabilities.
244 R->setZero();
245 R->diagonal() << ::std::pow(view.noise.heading, 2),
246 ::std::pow(view.noise.distance, 2), ::std::pow(view.noise.skew, 2);
247 }
248
249 // This is the function that will be called once the Ekf has inserted the
250 // measurement into the right spot in the measurement queue and needs the
251 // output functions to actually perform the corrections.
252 // Specifically, this will take the estimate of the state at that time and
253 // figure out how the targets seen by the camera best map onto the actual
254 // targets on the field.
255 // It then fills in the h and dhdx functions that are called by the Ekf.
256 void MakeH(
257 const Camera &camera,
258 const ::aos::SizedArray<TargetView, max_targets_per_frame> &target_views,
Austin Schuh9f45d702023-05-06 22:18:10 -0700259 ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions,
260 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
261 max_targets_per_frame> *dhdx,
262 const State &X_hat, const StateSquare &P, HFunction *h,
263 Eigen::Matrix<Scalar, kNOutputs, kNStates> *current_dhdx) {
James Kuszmaul1057ce82019-02-09 17:58:24 -0800264 // Because we need to match camera targets ("views") to actual field
265 // targets, and because we want to take advantage of the correlations
266 // between the targets (i.e., if we see two targets in the image, they
267 // probably correspond to different on-field targets), the matching problem
268 // here is somewhat non-trivial. Some of the methods we use only work
269 // because we are dealing with very small N (e.g., handling the correlations
270 // between multiple views has combinatoric complexity, but since N = 3,
271 // it's not an issue).
272 //
273 // High-level steps:
274 // 1) Set the base robot pose for the cameras to the Pose implied by X_hat.
275 // 2) Fetch all the expected target views from the camera.
276 // 3) Determine the "magnitude" of the Kalman correction from each potential
277 // view/target pair.
278 // 4) Match based on the combination of targets with the smallest
279 // corrections.
280 // 5) Calculate h and dhdx for each pair of targets.
281 //
282 // For the "magnitude" of the correction, we do not directly use the
283 // standard Kalman correction formula. Instead, we calculate the correction
284 // we would get from each component of the measurement and take the L2 norm
285 // of those. This prevents situations where a target matches very poorly but
286 // produces an overall correction of near-zero.
287 // TODO(james): I do not know if this is strictly the correct method to
288 // minimize likely error, but should be reasonable.
289 //
290 // For the matching, we do the following (see MatchFrames):
291 // 1. Compute the best max_targets_per_frame matches for each view.
292 // 2. Exhaust every possible combination of view/target pairs and
293 // choose the best one.
294 // When we don't think the camera should be able to see as many targets as
295 // we actually got in the frame, then we do permit doubling/tripling/etc.
296 // up on potential targets once we've exhausted all the targets we think
297 // we can see.
298
299 // Set the current robot pose so that the cameras know where they are
300 // (all the cameras have robot_pose_ as their base):
301 *robot_pose_->mutable_pos() << X_hat(0, 0), X_hat(1, 0), 0.0;
302 robot_pose_->set_theta(X_hat(2, 0));
303
304 // Compute the things we *think* the camera should be seeing.
305 // Note: Because we will not try to match to any targets that are not
306 // returned by this function, we generally want the modelled camera to have
307 // a slightly larger field of view than the real camera, and be able to see
308 // slightly smaller targets.
309 const ::aos::SizedArray<TargetView, num_targets> camera_views =
310 camera.target_views();
311
312 // Each row contains the scores for each pair of target view and camera
313 // target view. Values in each row will not be populated past
314 // camera.target_views().size(); of the rows, only the first
315 // target_views.size() shall be populated.
316 // Higher scores imply a worse match. Zero implies a perfect match.
317 Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> scores;
318 scores.setConstant(::std::numeric_limits<Scalar>::infinity());
319 // Each row contains the indices of the best matches per view, where
320 // index 0 is the best, 1 the second best, and 2 the third, etc.
321 // -1 indicates an unfilled field.
322 Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame>
323 best_matches;
324 best_matches.setConstant(-1);
325 // The H matrices for each potential matching. This has the same structure
326 // as the scores matrix.
327 ::std::array<::std::array<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
328 max_targets_per_frame>,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800329 num_targets>
330 all_H_matrices;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800331
332 // Iterate through and fill out the scores for each potential pairing:
333 for (size_t ii = 0; ii < target_views.size(); ++ii) {
334 const TargetView &target_view = target_views[ii];
335 Output z;
336 Eigen::Matrix<Scalar, kNOutputs, kNOutputs> R;
337 TargetViewToMatrices(target_view, &z, &R);
338
339 for (size_t jj = 0; jj < camera_views.size(); ++jj) {
340 // Compute the ckalman update for this step:
341 const TargetView &view = camera_views[jj];
342 const Eigen::Matrix<Scalar, kNOutputs, kNStates> H =
James Kuszmaul46f3a212019-03-10 10:14:24 -0700343 HMatrix(*view.target, camera.pose());
James Kuszmaul1057ce82019-02-09 17:58:24 -0800344 const Eigen::Matrix<Scalar, kNStates, kNOutputs> PH = P * H.transpose();
345 const Eigen::Matrix<Scalar, kNOutputs, kNOutputs> S = H * PH + R;
346 // Note: The inverse here should be very cheap so long as kNOutputs = 3.
347 const Eigen::Matrix<Scalar, kNStates, kNOutputs> K = PH * S.inverse();
348 const Output err = z - Output(view.reading.heading,
349 view.reading.distance, view.reading.skew);
350 // In order to compute the actual score, we want to consider each
351 // component of the error separately, as well as considering the impacts
352 // on the each of the states separately. As such, we calculate what
353 // the separate updates from each error component would be, and sum
354 // the impacts on the states.
355 Output scorer;
356 for (size_t kk = 0; kk < kNOutputs; ++kk) {
357 // TODO(james): squaredNorm or norm or L1-norm? Do we care about the
358 // square root? Do we prefer a quadratic or linear response?
359 scorer(kk, 0) = (K.col(kk) * err(kk, 0)).squaredNorm();
360 }
361 // Compute the overall score--note that we add in a term for the height,
362 // scaled by a manual fudge-factor. The height is not accounted for
363 // in the Kalman update because we are not trying to estimate the height
364 // of the robot directly.
365 Scalar score =
366 scorer.squaredNorm() +
367 ::std::pow((view.reading.height - target_view.reading.height) /
368 target_view.noise.height / 20.0,
369 2);
370 scores(ii, jj) = score;
371 all_H_matrices[ii][jj] = H;
372
373 // Update the best_matches matrix:
374 int insert_target = jj;
375 for (size_t kk = 0; kk < max_targets_per_frame; ++kk) {
376 int idx = best_matches(ii, kk);
377 // Note that -1 indicates an unfilled value.
378 if (idx == -1 || scores(ii, idx) > scores(ii, insert_target)) {
379 best_matches(ii, kk) = insert_target;
380 insert_target = idx;
381 if (idx == -1) {
382 break;
383 }
384 }
385 }
386 }
387 }
388
389 if (camera_views.size() == 0) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700390 AOS_LOG(DEBUG, "Unable to identify potential target matches.\n");
James Kuszmaul1057ce82019-02-09 17:58:24 -0800391 // If we can't get a match, provide H = zero, which will make this
392 // correction step a nop.
Austin Schuh9f45d702023-05-06 22:18:10 -0700393 *h = HFunction();
394 *current_dhdx = Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero();
James Kuszmaul1057ce82019-02-09 17:58:24 -0800395 for (size_t ii = 0; ii < target_views.size(); ++ii) {
396 h_functions->push_back(*h);
Austin Schuh9f45d702023-05-06 22:18:10 -0700397 dhdx->push_back(*current_dhdx);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800398 }
399 } else {
400 // Go through and brute force the issue of what the best combination of
401 // target matches are. The worst case for this algorithm will be
402 // max_targets_per_frame!, which is awful for any N > ~4, but since
403 // max_targets_per_frame = 3, I'm not really worried.
404 ::std::array<int, max_targets_per_frame> best_frames =
405 MatchFrames(scores, best_matches, target_views.size());
406 for (size_t ii = 0; ii < target_views.size(); ++ii) {
James Kuszmaul6f941b72019-03-08 18:12:25 -0800407 size_t view_idx = best_frames[ii];
James Kuszmaul9776b392023-01-14 14:08:08 -0800408 if (view_idx >= camera_views.size()) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700409 AOS_LOG(ERROR, "Somehow, the view scorer failed.\n");
Austin Schuh9f45d702023-05-06 22:18:10 -0700410 h_functions->emplace_back();
Philipp Schrader790cb542023-07-05 21:06:52 -0700411 dhdx->push_back(Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero());
James Kuszmaul6f941b72019-03-08 18:12:25 -0800412 continue;
413 }
James Kuszmaul1057ce82019-02-09 17:58:24 -0800414 const Eigen::Matrix<Scalar, kNOutputs, kNStates> best_H =
415 all_H_matrices[ii][view_idx];
416 const TargetView best_view = camera_views[view_idx];
417 const TargetView target_view = target_views[ii];
418 const Scalar match_score = scores(ii, view_idx);
419 if (match_score > kRejectionScore) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700420 AOS_LOG(DEBUG,
421 "Rejecting target at (%f, %f, %f, %f) due to high score.\n",
422 target_view.reading.heading, target_view.reading.distance,
423 target_view.reading.skew, target_view.reading.height);
Austin Schuh9f45d702023-05-06 22:18:10 -0700424 h_functions->emplace_back();
425 dhdx->push_back(Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero());
James Kuszmaul1057ce82019-02-09 17:58:24 -0800426 } else {
Austin Schuh9f45d702023-05-06 22:18:10 -0700427 h_functions->emplace_back(&camera, best_view, target_view, this);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800428
429 // TODO(james): Experiment to better understand whether we want to
430 // recalculate H or not.
Austin Schuh9f45d702023-05-06 22:18:10 -0700431 dhdx->push_back(best_H);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800432 }
433 }
434 *h = h_functions->at(0);
Austin Schuh9f45d702023-05-06 22:18:10 -0700435 *current_dhdx = dhdx->at(0);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800436 }
437 }
438
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800439 Eigen::Matrix<Scalar, kNOutputs, kNStates> HMatrix(const Target &target,
440 const Pose &camera_pose) {
James Kuszmaul1057ce82019-02-09 17:58:24 -0800441 // To calculate dheading/d{x,y,theta}:
442 // heading = arctan2(target_pos - camera_pos) - camera_theta
443 Eigen::Matrix<Scalar, 3, 1> target_pos = target.pose().abs_pos();
James Kuszmaul46f3a212019-03-10 10:14:24 -0700444 Eigen::Matrix<Scalar, 3, 1> camera_pos = camera_pose.abs_pos();
James Kuszmaul1057ce82019-02-09 17:58:24 -0800445 Scalar diffx = target_pos.x() - camera_pos.x();
446 Scalar diffy = target_pos.y() - camera_pos.y();
447 Scalar norm2 = diffx * diffx + diffy * diffy;
448 Scalar dheadingdx = diffy / norm2;
449 Scalar dheadingdy = -diffx / norm2;
450 Scalar dheadingdtheta = -1.0;
451
452 // To calculate ddistance/d{x,y}:
453 // distance = sqrt(diffx^2 + diffy^2)
454 Scalar distance = ::std::sqrt(norm2);
455 Scalar ddistdx = -diffx / distance;
456 Scalar ddistdy = -diffy / distance;
457
James Kuszmaul289756f2019-03-05 21:52:10 -0800458 // Skew = target.theta - camera.theta - heading
459 // = target.theta - arctan2(target_pos - camera_pos)
460 Scalar dskewdx = -dheadingdx;
461 Scalar dskewdy = -dheadingdy;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800462 Eigen::Matrix<Scalar, kNOutputs, kNStates> H;
463 H.setZero();
464 H(0, 0) = dheadingdx;
465 H(0, 1) = dheadingdy;
466 H(0, 2) = dheadingdtheta;
467 H(1, 0) = ddistdx;
468 H(1, 1) = ddistdy;
James Kuszmaul289756f2019-03-05 21:52:10 -0800469 H(2, 0) = dskewdx;
470 H(2, 1) = dskewdy;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800471 return H;
472 }
473
474 // A helper function for the fuller version of MatchFrames; this just
475 // removes some of the arguments that are only needed during the recursion.
476 // n_views is the number of targets actually seen in the camera image (i.e.,
477 // the number of rows in scores/best_matches that are actually populated).
478 ::std::array<int, max_targets_per_frame> MatchFrames(
479 const Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> &scores,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800480 const Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame>
481 &best_matches,
James Kuszmaul1057ce82019-02-09 17:58:24 -0800482 int n_views) {
483 ::std::array<int, max_targets_per_frame> best_set;
James Kuszmaul6f941b72019-03-08 18:12:25 -0800484 best_set.fill(-1);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800485 Scalar best_score;
486 // We start out without having "used" any views/targets:
487 ::aos::SizedArray<bool, max_targets_per_frame> used_views;
488 for (int ii = 0; ii < n_views; ++ii) {
489 used_views.push_back(false);
490 }
491 MatchFrames(scores, best_matches, used_views, {{false}}, &best_set,
492 &best_score);
493 return best_set;
494 }
495
496 // Recursively iterates over every plausible combination of targets/views
497 // that there is and determines the lowest-scoring combination.
498 // used_views and used_targets indicate which rows/columns of the
499 // scores/best_matches matrices should be ignored. When used_views is all
500 // true, that means that we are done recursing.
501 void MatchFrames(
502 const Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> &scores,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800503 const Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame>
504 &best_matches,
James Kuszmaul1057ce82019-02-09 17:58:24 -0800505 const ::aos::SizedArray<bool, max_targets_per_frame> &used_views,
506 const ::std::array<bool, num_targets> &used_targets,
507 ::std::array<int, max_targets_per_frame> *best_set, Scalar *best_score) {
508 *best_score = ::std::numeric_limits<Scalar>::infinity();
509 // Iterate by letting each target in the camera frame (that isn't in
510 // used_views) choose it's best match that isn't already taken. We then set
511 // the appropriate flags in used_views and used_targets and call MatchFrames
512 // to let all the other views sort themselves out.
513 for (size_t ii = 0; ii < used_views.size(); ++ii) {
514 if (used_views[ii]) {
515 continue;
516 }
517 int best_match = -1;
518 for (size_t jj = 0; jj < max_targets_per_frame; ++jj) {
519 if (best_matches(ii, jj) == -1) {
520 // If we run out of potential targets from the camera, then there
521 // are more targets in the frame than we think there should be.
522 // In this case, we are going to be doubling/tripling/etc. up
523 // anyhow. So we just give everyone their top choice:
524 // TODO(james): If we ever are dealing with larger numbers of
525 // targets per frame, do something to minimize doubling-up.
526 best_match = best_matches(ii, 0);
527 break;
528 }
529 best_match = best_matches(ii, jj);
530 if (!used_targets[best_match]) {
531 break;
532 }
533 }
534 // If we reach here and best_match = -1, that means that no potential
535 // targets were generated by the camera, and we should never have gotten
536 // here.
Austin Schuhf257f3c2019-10-27 21:00:43 -0700537 AOS_CHECK(best_match != -1);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800538 ::aos::SizedArray<bool, max_targets_per_frame> sub_views = used_views;
539 sub_views[ii] = true;
540 ::std::array<bool, num_targets> sub_targets = used_targets;
541 sub_targets[best_match] = true;
542 ::std::array<int, max_targets_per_frame> sub_best_set;
543 Scalar score;
544 MatchFrames(scores, best_matches, sub_views, sub_targets, &sub_best_set,
545 &score);
546 score += scores(ii, best_match);
547 sub_best_set[ii] = best_match;
548 if (score < *best_score) {
549 *best_score = score;
550 *best_set = sub_best_set;
551 }
552 }
553 // best_score will be infinite if we did not find a result due to there
554 // being no targets that weren't set in used_vies; this is the
555 // base case of the recursion and so we set best_score to zero:
556 if (!::std::isfinite(*best_score)) {
557 *best_score = 0.0;
558 }
559 }
560
561 // The pose that is used by the cameras to determine the location of the robot
562 // and thus the expected view of the targets.
563 Pose *robot_pose_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800564
565 typename HybridEkf::template ExpectedObservationAllocator<
566 ExpectedObservationFunctor>
567 h_queue_;
568 typename HybridEkf::template ExpectedObservationAllocator<
569 ExpectedObservationBuilder>
570 make_h_queue_;
571
572 friend class ExpectedObservationBuilder;
Austin Schuh9f45d702023-05-06 22:18:10 -0700573};
574
575#if !defined(__clang__) && defined(__GNUC__)
576#pragma GCC diagnostic pop
577#endif
James Kuszmaul1057ce82019-02-09 17:58:24 -0800578
579} // namespace control_loops
580} // namespace y2019
581
582#endif // Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_