blob: f4c7a8a7d859e843d2b895219bdc90aae192d856 [file] [log] [blame]
James Kuszmaul1057ce82019-02-09 17:58:24 -08001#ifndef Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_
2#define Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_
3
4#include <cmath>
5#include <memory>
6
James Kuszmaulf4ede202020-02-14 08:47:40 -08007#include "frc971/control_loops/drivetrain/camera.h"
James Kuszmaul1057ce82019-02-09 17:58:24 -08008#include "frc971/control_loops/drivetrain/hybrid_ekf.h"
James Kuszmaul2971b5a2023-01-29 15:49:32 -08009#include "frc971/control_loops/pose.h"
James Kuszmaul1057ce82019-02-09 17:58:24 -080010
Austin Schuh9f45d702023-05-06 22:18:10 -070011#if !defined(__clang__) && defined(__GNUC__)
12// GCC miss-detects that when zero is set to true, the member variables could be
13// uninitialized. Rather than spend the CPU to initialize them in addition to
14// the memory for no good reason, tell GCC to stop doing that. Clang appears to
15// get it.
16#pragma GCC diagnostic push
17#pragma GCC diagnostic ignored "-Wmaybe-uninitialized"
18#endif
19
Stephan Pleinesd99b1ee2024-02-02 20:56:44 -080020namespace y2019::control_loops {
James Kuszmaul1057ce82019-02-09 17:58:24 -080021
22template <int num_cameras, int num_targets, int num_obstacles,
23 int max_targets_per_frame, typename Scalar = double>
24class TypedLocalizer
25 : public ::frc971::control_loops::drivetrain::HybridEkf<Scalar> {
26 public:
James Kuszmaulf4ede202020-02-14 08:47:40 -080027 typedef frc971::control_loops::TypedCamera<num_targets, num_obstacles, Scalar>
28 Camera;
James Kuszmaul1057ce82019-02-09 17:58:24 -080029 typedef typename Camera::TargetView TargetView;
30 typedef typename Camera::Pose Pose;
James Kuszmaulf4ede202020-02-14 08:47:40 -080031 typedef typename frc971::control_loops::Target Target;
James Kuszmaul1057ce82019-02-09 17:58:24 -080032 typedef ::frc971::control_loops::drivetrain::HybridEkf<Scalar> HybridEkf;
33 typedef typename HybridEkf::State State;
34 typedef typename HybridEkf::StateSquare StateSquare;
35 typedef typename HybridEkf::Input Input;
36 typedef typename HybridEkf::Output Output;
37 using HybridEkf::kNInputs;
38 using HybridEkf::kNOutputs;
39 using HybridEkf::kNStates;
40
41 // robot_pose should be the object that is used by the cameras, such that when
42 // we update robot_pose, the cameras will change what they report the relative
43 // position of the targets as.
44 // Note that the parameters for the cameras should be set to allow slightly
45 // larger fields of view and slightly longer range than the true cameras so
46 // that we can identify potential matches for targets even when we have slight
47 // modelling errors.
48 TypedLocalizer(
49 const ::frc971::control_loops::drivetrain::DrivetrainConfig<Scalar>
50 &dt_config,
51 Pose *robot_pose)
52 : ::frc971::control_loops::drivetrain::HybridEkf<Scalar>(dt_config),
James Kuszmaul2971b5a2023-01-29 15:49:32 -080053 robot_pose_(robot_pose),
54 h_queue_(this),
55 make_h_queue_(this) {}
James Kuszmaul1057ce82019-02-09 17:58:24 -080056
57 // Performs a kalman filter correction with a single camera frame, consisting
58 // of up to max_targets_per_frame targets and taken at time t.
59 // camera is the Camera used to take the images.
60 void UpdateTargets(
61 const Camera &camera,
62 const ::aos::SizedArray<TargetView, max_targets_per_frame> &targets,
63 ::aos::monotonic_clock::time_point t) {
64 if (targets.empty()) {
65 return;
66 }
67
James Kuszmaul6f941b72019-03-08 18:12:25 -080068 if (!SanitizeTargets(targets)) {
Austin Schuhf257f3c2019-10-27 21:00:43 -070069 AOS_LOG(ERROR, "Throwing out targets due to in insane values.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -080070 return;
71 }
72
James Kuszmaul1057ce82019-02-09 17:58:24 -080073 if (t > HybridEkf::latest_t()) {
Austin Schuhf257f3c2019-10-27 21:00:43 -070074 AOS_LOG(ERROR,
75 "target observations must be older than most recent encoder/gyro "
76 "update.\n");
James Kuszmaul1057ce82019-02-09 17:58:24 -080077 return;
78 }
79
80 Output z;
81 Eigen::Matrix<Scalar, kNOutputs, kNOutputs> R;
82 TargetViewToMatrices(targets[0], &z, &R);
83
84 // In order to perform the correction steps for the targets, we will
85 // separately perform a Correct step for each following target.
86 // This way, we can have the first correction figure out the mappings
87 // between targets in the image and targets on the field, and then re-use
88 // those mappings for all the remaining corrections.
89 // As such, we need to store the EKF functions that the remaining targets
90 // will need in arrays:
Austin Schuh9f45d702023-05-06 22:18:10 -070091 ::aos::SizedArray<HFunction, max_targets_per_frame> h_functions;
92 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
James Kuszmaul2971b5a2023-01-29 15:49:32 -080093 max_targets_per_frame>
Austin Schuh9f45d702023-05-06 22:18:10 -070094 dhdx;
James Kuszmaul2971b5a2023-01-29 15:49:32 -080095 make_h_queue_.CorrectKnownHBuilder(
James Kuszmaul1057ce82019-02-09 17:58:24 -080096 z, nullptr,
Philipp Schrader790cb542023-07-05 21:06:52 -070097 ExpectedObservationBuilder(this, camera, targets, &h_functions, &dhdx),
James Kuszmaul2971b5a2023-01-29 15:49:32 -080098 R, t);
James Kuszmaul1057ce82019-02-09 17:58:24 -080099 // Fetch cache:
100 for (size_t ii = 1; ii < targets.size(); ++ii) {
101 TargetViewToMatrices(targets[ii], &z, &R);
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800102 h_queue_.CorrectKnownH(
Philipp Schrader790cb542023-07-05 21:06:52 -0700103 z, nullptr, ExpectedObservationFunctor(h_functions[ii], dhdx[ii]), R,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800104 t);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800105 }
106 }
107
108 private:
Austin Schuh9f45d702023-05-06 22:18:10 -0700109 class HFunction {
110 public:
111 HFunction() : zero_(true) {}
112 HFunction(const Camera *camera, const TargetView &best_view,
113 const TargetView &target_view, TypedLocalizer *localizer)
114 : zero_(false),
115 camera_(camera),
116 best_view_(best_view),
117 target_view_(target_view),
118 localizer_(localizer) {}
119 Output operator()(const State &X, const Input &) {
120 if (zero_) {
121 return Output::Zero();
122 }
123
124 // This function actually handles determining what the Output should
125 // be at a given state, now that we have chosen the target that
126 // we want to match to.
127 *localizer_->robot_pose_->mutable_pos() << X(0, 0), X(1, 0), 0.0;
128 localizer_->robot_pose_->set_theta(X(2, 0));
129 const Pose relative_pose =
130 best_view_.target->pose().Rebase(&camera_->pose());
131 const Scalar heading = relative_pose.heading();
132 const Scalar distance = relative_pose.xy_norm();
133 const Scalar skew =
134 ::aos::math::NormalizeAngle(relative_pose.rel_theta() - heading);
135 return Output(heading, distance, skew);
136 }
137
138 private:
139 bool zero_ = false;
140
141 const Camera *camera_;
142 TargetView best_view_;
143 TargetView target_view_;
144 TypedLocalizer *localizer_;
145 };
146
147 friend class HFunction;
148
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800149 class ExpectedObservationFunctor
150 : public HybridEkf::ExpectedObservationFunctor {
151 public:
Austin Schuh9f45d702023-05-06 22:18:10 -0700152 ExpectedObservationFunctor(const HFunction &h,
153 Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx)
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800154 : h_(h), dhdx_(dhdx) {}
155
156 Output H(const State &state, const Input &input) final {
157 return h_(state, input);
158 }
159
160 virtual Eigen::Matrix<Scalar, kNOutputs, kNStates> DHDX(
Austin Schuh9f45d702023-05-06 22:18:10 -0700161 const State &) final {
162 return dhdx_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800163 }
164
165 private:
Austin Schuh9f45d702023-05-06 22:18:10 -0700166 HFunction h_;
167 Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800168 };
169 class ExpectedObservationBuilder
170 : public HybridEkf::ExpectedObservationBuilder {
171 public:
172 ExpectedObservationBuilder(
173 TypedLocalizer *localizer, const Camera &camera,
174 const ::aos::SizedArray<TargetView, max_targets_per_frame>
175 &target_views,
Austin Schuh9f45d702023-05-06 22:18:10 -0700176 ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions,
177 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
178 max_targets_per_frame> *dhdx)
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800179 : localizer_(localizer),
180 camera_(camera),
181 target_views_(target_views),
182 h_functions_(h_functions),
Austin Schuh9f45d702023-05-06 22:18:10 -0700183 dhdx_(dhdx) {}
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800184
185 virtual ExpectedObservationFunctor *MakeExpectedObservations(
186 const State &state, const StateSquare &P) {
Austin Schuh9f45d702023-05-06 22:18:10 -0700187 HFunction h;
188 Eigen::Matrix<Scalar, kNOutputs, kNStates> dhdx;
Philipp Schrader790cb542023-07-05 21:06:52 -0700189 localizer_->MakeH(camera_, target_views_, h_functions_, dhdx_, state, P,
190 &h, &dhdx);
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800191 functor_.emplace(h, dhdx);
192 return &functor_.value();
193 }
194
195 private:
196 TypedLocalizer *localizer_;
197 const Camera &camera_;
198 const ::aos::SizedArray<TargetView, max_targets_per_frame> &target_views_;
Austin Schuh9f45d702023-05-06 22:18:10 -0700199 ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions_;
200 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
201 max_targets_per_frame> *dhdx_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800202 std::optional<ExpectedObservationFunctor> functor_;
203 };
Austin Schuh9f45d702023-05-06 22:18:10 -0700204
James Kuszmaul1057ce82019-02-09 17:58:24 -0800205 // The threshold to use for completely rejecting potentially bad target
206 // matches.
207 // TODO(james): Tune
Austin Schuh113a85d2019-03-28 17:18:08 -0700208 static constexpr Scalar kRejectionScore = 1.0;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800209
James Kuszmaul6f941b72019-03-08 18:12:25 -0800210 // Checks that the targets coming in make some sense--mostly to prevent NaNs
211 // or the such from propagating.
212 bool SanitizeTargets(
213 const ::aos::SizedArray<TargetView, max_targets_per_frame> &targets) {
214 for (const TargetView &view : targets) {
215 const typename TargetView::Reading reading = view.reading;
216 if (!(::std::isfinite(reading.heading) &&
217 ::std::isfinite(reading.distance) &&
218 ::std::isfinite(reading.skew) && ::std::isfinite(reading.height))) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700219 AOS_LOG(ERROR, "Got non-finite values in target.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -0800220 return false;
221 }
222 if (reading.distance < 0) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700223 AOS_LOG(ERROR, "Got negative distance.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -0800224 return false;
225 }
226 if (::std::abs(::aos::math::NormalizeAngle(reading.skew)) > M_PI_2) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700227 AOS_LOG(ERROR, "Got skew > pi / 2.\n");
James Kuszmaul6f941b72019-03-08 18:12:25 -0800228 return false;
229 }
230 }
231 return true;
232 }
233
James Kuszmaul1057ce82019-02-09 17:58:24 -0800234 // Computes the measurement (z) and noise covariance (R) matrices for a given
235 // TargetView.
236 void TargetViewToMatrices(const TargetView &view, Output *z,
237 Eigen::Matrix<Scalar, kNOutputs, kNOutputs> *R) {
James Kuszmaul6f941b72019-03-08 18:12:25 -0800238 *z << view.reading.heading, view.reading.distance,
239 ::aos::math::NormalizeAngle(view.reading.skew);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800240 // TODO(james): R should account as well for our confidence in the target
241 // matching. However, handling that properly requires thing a lot more about
242 // the probabilities.
243 R->setZero();
244 R->diagonal() << ::std::pow(view.noise.heading, 2),
245 ::std::pow(view.noise.distance, 2), ::std::pow(view.noise.skew, 2);
246 }
247
248 // This is the function that will be called once the Ekf has inserted the
249 // measurement into the right spot in the measurement queue and needs the
250 // output functions to actually perform the corrections.
251 // Specifically, this will take the estimate of the state at that time and
252 // figure out how the targets seen by the camera best map onto the actual
253 // targets on the field.
254 // It then fills in the h and dhdx functions that are called by the Ekf.
255 void MakeH(
256 const Camera &camera,
257 const ::aos::SizedArray<TargetView, max_targets_per_frame> &target_views,
Austin Schuh9f45d702023-05-06 22:18:10 -0700258 ::aos::SizedArray<HFunction, max_targets_per_frame> *h_functions,
259 ::aos::SizedArray<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
260 max_targets_per_frame> *dhdx,
261 const State &X_hat, const StateSquare &P, HFunction *h,
262 Eigen::Matrix<Scalar, kNOutputs, kNStates> *current_dhdx) {
James Kuszmaul1057ce82019-02-09 17:58:24 -0800263 // Because we need to match camera targets ("views") to actual field
264 // targets, and because we want to take advantage of the correlations
265 // between the targets (i.e., if we see two targets in the image, they
266 // probably correspond to different on-field targets), the matching problem
267 // here is somewhat non-trivial. Some of the methods we use only work
268 // because we are dealing with very small N (e.g., handling the correlations
269 // between multiple views has combinatoric complexity, but since N = 3,
270 // it's not an issue).
271 //
272 // High-level steps:
273 // 1) Set the base robot pose for the cameras to the Pose implied by X_hat.
274 // 2) Fetch all the expected target views from the camera.
275 // 3) Determine the "magnitude" of the Kalman correction from each potential
276 // view/target pair.
277 // 4) Match based on the combination of targets with the smallest
278 // corrections.
279 // 5) Calculate h and dhdx for each pair of targets.
280 //
281 // For the "magnitude" of the correction, we do not directly use the
282 // standard Kalman correction formula. Instead, we calculate the correction
283 // we would get from each component of the measurement and take the L2 norm
284 // of those. This prevents situations where a target matches very poorly but
285 // produces an overall correction of near-zero.
286 // TODO(james): I do not know if this is strictly the correct method to
287 // minimize likely error, but should be reasonable.
288 //
289 // For the matching, we do the following (see MatchFrames):
290 // 1. Compute the best max_targets_per_frame matches for each view.
291 // 2. Exhaust every possible combination of view/target pairs and
292 // choose the best one.
293 // When we don't think the camera should be able to see as many targets as
294 // we actually got in the frame, then we do permit doubling/tripling/etc.
295 // up on potential targets once we've exhausted all the targets we think
296 // we can see.
297
298 // Set the current robot pose so that the cameras know where they are
299 // (all the cameras have robot_pose_ as their base):
300 *robot_pose_->mutable_pos() << X_hat(0, 0), X_hat(1, 0), 0.0;
301 robot_pose_->set_theta(X_hat(2, 0));
302
303 // Compute the things we *think* the camera should be seeing.
304 // Note: Because we will not try to match to any targets that are not
305 // returned by this function, we generally want the modelled camera to have
306 // a slightly larger field of view than the real camera, and be able to see
307 // slightly smaller targets.
308 const ::aos::SizedArray<TargetView, num_targets> camera_views =
309 camera.target_views();
310
311 // Each row contains the scores for each pair of target view and camera
312 // target view. Values in each row will not be populated past
313 // camera.target_views().size(); of the rows, only the first
314 // target_views.size() shall be populated.
315 // Higher scores imply a worse match. Zero implies a perfect match.
316 Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> scores;
317 scores.setConstant(::std::numeric_limits<Scalar>::infinity());
318 // Each row contains the indices of the best matches per view, where
319 // index 0 is the best, 1 the second best, and 2 the third, etc.
320 // -1 indicates an unfilled field.
321 Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame>
322 best_matches;
323 best_matches.setConstant(-1);
324 // The H matrices for each potential matching. This has the same structure
325 // as the scores matrix.
326 ::std::array<::std::array<Eigen::Matrix<Scalar, kNOutputs, kNStates>,
327 max_targets_per_frame>,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800328 num_targets>
329 all_H_matrices;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800330
331 // Iterate through and fill out the scores for each potential pairing:
332 for (size_t ii = 0; ii < target_views.size(); ++ii) {
333 const TargetView &target_view = target_views[ii];
334 Output z;
335 Eigen::Matrix<Scalar, kNOutputs, kNOutputs> R;
336 TargetViewToMatrices(target_view, &z, &R);
337
338 for (size_t jj = 0; jj < camera_views.size(); ++jj) {
339 // Compute the ckalman update for this step:
340 const TargetView &view = camera_views[jj];
341 const Eigen::Matrix<Scalar, kNOutputs, kNStates> H =
James Kuszmaul46f3a212019-03-10 10:14:24 -0700342 HMatrix(*view.target, camera.pose());
James Kuszmaul1057ce82019-02-09 17:58:24 -0800343 const Eigen::Matrix<Scalar, kNStates, kNOutputs> PH = P * H.transpose();
344 const Eigen::Matrix<Scalar, kNOutputs, kNOutputs> S = H * PH + R;
345 // Note: The inverse here should be very cheap so long as kNOutputs = 3.
346 const Eigen::Matrix<Scalar, kNStates, kNOutputs> K = PH * S.inverse();
347 const Output err = z - Output(view.reading.heading,
348 view.reading.distance, view.reading.skew);
349 // In order to compute the actual score, we want to consider each
350 // component of the error separately, as well as considering the impacts
351 // on the each of the states separately. As such, we calculate what
352 // the separate updates from each error component would be, and sum
353 // the impacts on the states.
354 Output scorer;
355 for (size_t kk = 0; kk < kNOutputs; ++kk) {
356 // TODO(james): squaredNorm or norm or L1-norm? Do we care about the
357 // square root? Do we prefer a quadratic or linear response?
358 scorer(kk, 0) = (K.col(kk) * err(kk, 0)).squaredNorm();
359 }
360 // Compute the overall score--note that we add in a term for the height,
361 // scaled by a manual fudge-factor. The height is not accounted for
362 // in the Kalman update because we are not trying to estimate the height
363 // of the robot directly.
364 Scalar score =
365 scorer.squaredNorm() +
366 ::std::pow((view.reading.height - target_view.reading.height) /
367 target_view.noise.height / 20.0,
368 2);
369 scores(ii, jj) = score;
370 all_H_matrices[ii][jj] = H;
371
372 // Update the best_matches matrix:
373 int insert_target = jj;
374 for (size_t kk = 0; kk < max_targets_per_frame; ++kk) {
375 int idx = best_matches(ii, kk);
376 // Note that -1 indicates an unfilled value.
377 if (idx == -1 || scores(ii, idx) > scores(ii, insert_target)) {
378 best_matches(ii, kk) = insert_target;
379 insert_target = idx;
380 if (idx == -1) {
381 break;
382 }
383 }
384 }
385 }
386 }
387
388 if (camera_views.size() == 0) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700389 AOS_LOG(DEBUG, "Unable to identify potential target matches.\n");
James Kuszmaul1057ce82019-02-09 17:58:24 -0800390 // If we can't get a match, provide H = zero, which will make this
391 // correction step a nop.
Austin Schuh9f45d702023-05-06 22:18:10 -0700392 *h = HFunction();
393 *current_dhdx = Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero();
James Kuszmaul1057ce82019-02-09 17:58:24 -0800394 for (size_t ii = 0; ii < target_views.size(); ++ii) {
395 h_functions->push_back(*h);
Austin Schuh9f45d702023-05-06 22:18:10 -0700396 dhdx->push_back(*current_dhdx);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800397 }
398 } else {
399 // Go through and brute force the issue of what the best combination of
400 // target matches are. The worst case for this algorithm will be
401 // max_targets_per_frame!, which is awful for any N > ~4, but since
402 // max_targets_per_frame = 3, I'm not really worried.
403 ::std::array<int, max_targets_per_frame> best_frames =
404 MatchFrames(scores, best_matches, target_views.size());
405 for (size_t ii = 0; ii < target_views.size(); ++ii) {
James Kuszmaul6f941b72019-03-08 18:12:25 -0800406 size_t view_idx = best_frames[ii];
James Kuszmaul9776b392023-01-14 14:08:08 -0800407 if (view_idx >= camera_views.size()) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700408 AOS_LOG(ERROR, "Somehow, the view scorer failed.\n");
Austin Schuh9f45d702023-05-06 22:18:10 -0700409 h_functions->emplace_back();
Philipp Schrader790cb542023-07-05 21:06:52 -0700410 dhdx->push_back(Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero());
James Kuszmaul6f941b72019-03-08 18:12:25 -0800411 continue;
412 }
James Kuszmaul1057ce82019-02-09 17:58:24 -0800413 const Eigen::Matrix<Scalar, kNOutputs, kNStates> best_H =
414 all_H_matrices[ii][view_idx];
415 const TargetView best_view = camera_views[view_idx];
416 const TargetView target_view = target_views[ii];
417 const Scalar match_score = scores(ii, view_idx);
418 if (match_score > kRejectionScore) {
Austin Schuhf257f3c2019-10-27 21:00:43 -0700419 AOS_LOG(DEBUG,
420 "Rejecting target at (%f, %f, %f, %f) due to high score.\n",
421 target_view.reading.heading, target_view.reading.distance,
422 target_view.reading.skew, target_view.reading.height);
Austin Schuh9f45d702023-05-06 22:18:10 -0700423 h_functions->emplace_back();
424 dhdx->push_back(Eigen::Matrix<Scalar, kNOutputs, kNStates>::Zero());
James Kuszmaul1057ce82019-02-09 17:58:24 -0800425 } else {
Austin Schuh9f45d702023-05-06 22:18:10 -0700426 h_functions->emplace_back(&camera, best_view, target_view, this);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800427
428 // TODO(james): Experiment to better understand whether we want to
429 // recalculate H or not.
Austin Schuh9f45d702023-05-06 22:18:10 -0700430 dhdx->push_back(best_H);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800431 }
432 }
433 *h = h_functions->at(0);
Austin Schuh9f45d702023-05-06 22:18:10 -0700434 *current_dhdx = dhdx->at(0);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800435 }
436 }
437
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800438 Eigen::Matrix<Scalar, kNOutputs, kNStates> HMatrix(const Target &target,
439 const Pose &camera_pose) {
James Kuszmaul1057ce82019-02-09 17:58:24 -0800440 // To calculate dheading/d{x,y,theta}:
441 // heading = arctan2(target_pos - camera_pos) - camera_theta
442 Eigen::Matrix<Scalar, 3, 1> target_pos = target.pose().abs_pos();
James Kuszmaul46f3a212019-03-10 10:14:24 -0700443 Eigen::Matrix<Scalar, 3, 1> camera_pos = camera_pose.abs_pos();
James Kuszmaul1057ce82019-02-09 17:58:24 -0800444 Scalar diffx = target_pos.x() - camera_pos.x();
445 Scalar diffy = target_pos.y() - camera_pos.y();
446 Scalar norm2 = diffx * diffx + diffy * diffy;
447 Scalar dheadingdx = diffy / norm2;
448 Scalar dheadingdy = -diffx / norm2;
449 Scalar dheadingdtheta = -1.0;
450
451 // To calculate ddistance/d{x,y}:
452 // distance = sqrt(diffx^2 + diffy^2)
453 Scalar distance = ::std::sqrt(norm2);
454 Scalar ddistdx = -diffx / distance;
455 Scalar ddistdy = -diffy / distance;
456
James Kuszmaul289756f2019-03-05 21:52:10 -0800457 // Skew = target.theta - camera.theta - heading
458 // = target.theta - arctan2(target_pos - camera_pos)
459 Scalar dskewdx = -dheadingdx;
460 Scalar dskewdy = -dheadingdy;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800461 Eigen::Matrix<Scalar, kNOutputs, kNStates> H;
462 H.setZero();
463 H(0, 0) = dheadingdx;
464 H(0, 1) = dheadingdy;
465 H(0, 2) = dheadingdtheta;
466 H(1, 0) = ddistdx;
467 H(1, 1) = ddistdy;
James Kuszmaul289756f2019-03-05 21:52:10 -0800468 H(2, 0) = dskewdx;
469 H(2, 1) = dskewdy;
James Kuszmaul1057ce82019-02-09 17:58:24 -0800470 return H;
471 }
472
473 // A helper function for the fuller version of MatchFrames; this just
474 // removes some of the arguments that are only needed during the recursion.
475 // n_views is the number of targets actually seen in the camera image (i.e.,
476 // the number of rows in scores/best_matches that are actually populated).
477 ::std::array<int, max_targets_per_frame> MatchFrames(
478 const Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> &scores,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800479 const Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame>
480 &best_matches,
James Kuszmaul1057ce82019-02-09 17:58:24 -0800481 int n_views) {
482 ::std::array<int, max_targets_per_frame> best_set;
James Kuszmaul6f941b72019-03-08 18:12:25 -0800483 best_set.fill(-1);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800484 Scalar best_score;
485 // We start out without having "used" any views/targets:
486 ::aos::SizedArray<bool, max_targets_per_frame> used_views;
487 for (int ii = 0; ii < n_views; ++ii) {
488 used_views.push_back(false);
489 }
490 MatchFrames(scores, best_matches, used_views, {{false}}, &best_set,
491 &best_score);
492 return best_set;
493 }
494
495 // Recursively iterates over every plausible combination of targets/views
496 // that there is and determines the lowest-scoring combination.
497 // used_views and used_targets indicate which rows/columns of the
498 // scores/best_matches matrices should be ignored. When used_views is all
499 // true, that means that we are done recursing.
500 void MatchFrames(
501 const Eigen::Matrix<Scalar, max_targets_per_frame, num_targets> &scores,
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800502 const Eigen::Matrix<int, max_targets_per_frame, max_targets_per_frame>
503 &best_matches,
James Kuszmaul1057ce82019-02-09 17:58:24 -0800504 const ::aos::SizedArray<bool, max_targets_per_frame> &used_views,
505 const ::std::array<bool, num_targets> &used_targets,
506 ::std::array<int, max_targets_per_frame> *best_set, Scalar *best_score) {
507 *best_score = ::std::numeric_limits<Scalar>::infinity();
508 // Iterate by letting each target in the camera frame (that isn't in
509 // used_views) choose it's best match that isn't already taken. We then set
510 // the appropriate flags in used_views and used_targets and call MatchFrames
511 // to let all the other views sort themselves out.
512 for (size_t ii = 0; ii < used_views.size(); ++ii) {
513 if (used_views[ii]) {
514 continue;
515 }
516 int best_match = -1;
517 for (size_t jj = 0; jj < max_targets_per_frame; ++jj) {
518 if (best_matches(ii, jj) == -1) {
519 // If we run out of potential targets from the camera, then there
520 // are more targets in the frame than we think there should be.
521 // In this case, we are going to be doubling/tripling/etc. up
522 // anyhow. So we just give everyone their top choice:
523 // TODO(james): If we ever are dealing with larger numbers of
524 // targets per frame, do something to minimize doubling-up.
525 best_match = best_matches(ii, 0);
526 break;
527 }
528 best_match = best_matches(ii, jj);
529 if (!used_targets[best_match]) {
530 break;
531 }
532 }
533 // If we reach here and best_match = -1, that means that no potential
534 // targets were generated by the camera, and we should never have gotten
535 // here.
Austin Schuhf257f3c2019-10-27 21:00:43 -0700536 AOS_CHECK(best_match != -1);
James Kuszmaul1057ce82019-02-09 17:58:24 -0800537 ::aos::SizedArray<bool, max_targets_per_frame> sub_views = used_views;
538 sub_views[ii] = true;
539 ::std::array<bool, num_targets> sub_targets = used_targets;
540 sub_targets[best_match] = true;
541 ::std::array<int, max_targets_per_frame> sub_best_set;
542 Scalar score;
543 MatchFrames(scores, best_matches, sub_views, sub_targets, &sub_best_set,
544 &score);
545 score += scores(ii, best_match);
546 sub_best_set[ii] = best_match;
547 if (score < *best_score) {
548 *best_score = score;
549 *best_set = sub_best_set;
550 }
551 }
552 // best_score will be infinite if we did not find a result due to there
553 // being no targets that weren't set in used_vies; this is the
554 // base case of the recursion and so we set best_score to zero:
555 if (!::std::isfinite(*best_score)) {
556 *best_score = 0.0;
557 }
558 }
559
560 // The pose that is used by the cameras to determine the location of the robot
561 // and thus the expected view of the targets.
562 Pose *robot_pose_;
James Kuszmaul2971b5a2023-01-29 15:49:32 -0800563
564 typename HybridEkf::template ExpectedObservationAllocator<
565 ExpectedObservationFunctor>
566 h_queue_;
567 typename HybridEkf::template ExpectedObservationAllocator<
568 ExpectedObservationBuilder>
569 make_h_queue_;
570
571 friend class ExpectedObservationBuilder;
Austin Schuh9f45d702023-05-06 22:18:10 -0700572};
573
574#if !defined(__clang__) && defined(__GNUC__)
575#pragma GCC diagnostic pop
576#endif
James Kuszmaul1057ce82019-02-09 17:58:24 -0800577
Stephan Pleinesd99b1ee2024-02-02 20:56:44 -0800578} // namespace y2019::control_loops
James Kuszmaul1057ce82019-02-09 17:58:24 -0800579
580#endif // Y2019_CONTROL_LOOPS_DRIVETRAIN_LOCALIZATER_H_