Stop using std::function in HybridEkf

Removes malloc's from the HybridEkf and localizer as a whole.

This solution feels a bit inelegant, but that's true of the entire
HybridEkf class (I may simplify some of this once we delete y2019).

Change-Id: I2deb5b1221ea17b08baad7e8bb46d6bbd1b987a6
Signed-off-by: James Kuszmaul <jabukuszmaul+collab@gmail.com>
diff --git a/y2020/control_loops/drivetrain/localizer.cc b/y2020/control_loops/drivetrain/localizer.cc
index 505a598..8f8bf87 100644
--- a/y2020/control_loops/drivetrain/localizer.cc
+++ b/y2020/control_loops/drivetrain/localizer.cc
@@ -89,6 +89,7 @@
     : event_loop_(event_loop),
       dt_config_(dt_config),
       ekf_(dt_config),
+      observations_(&ekf_),
       clock_offset_fetcher_(
           event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
               "/aos")),
@@ -244,7 +245,7 @@
     rejection_reason = RejectionReason::IMAGE_FROM_FUTURE;
   }
   if (!result.has_camera_calibration()) {
-    LOG(WARNING) << "Got camera frame without calibration data.";
+    AOS_LOG(WARNING, "Got camera frame without calibration data.\n");
     ImageMatchDebug::Builder builder(*fbb);
     builder.add_camera(camera_index);
     builder.add_accepted(false);
@@ -401,11 +402,6 @@
 
     Eigen::Matrix3f R = Eigen::Matrix3f::Zero();
     R.diagonal() = noises.cwiseAbs2();
-    Eigen::Matrix<float, HybridEkf::kNOutputs, HybridEkf::kNStates> H;
-    H.setZero();
-    H(0, StateIdx::kX) = 1;
-    H(1, StateIdx::kY) = 1;
-    H(2, StateIdx::kTheta) = 1;
     VLOG(1) << "Pose implied by target: " << Z.transpose()
             << " and current pose " << x() << ", " << y() << ", " << theta()
             << " Heading/dist/skew implied by target: "
@@ -447,47 +443,11 @@
     // more convenient to write given the Correct() interface we already have.
     // Note: If we start going back to doing back-in-time rewinds, then we can't
     // get away with passing things by reference.
-    ekf_.Correct(
-        Eigen::Vector3f::Zero(), &U, {},
-        [H, H_field_target, pose_robot_target, state_at_capture, Z,
-         &correction_rejection](const State &,
-                                const Input &) -> Eigen::Vector3f {
-          // Weighting for how much to use the current robot heading estimate
-          // vs. the heading estimate from the image results. A value of 1.0
-          // completely ignores the measured heading, but will prioritize turret
-          // aiming above all else. A value of 0.0 will prioritize correcting
-          // any gyro heading drift.
-          constexpr float kImpliedWeight = 0.99;
-          const float z_yaw_diff = aos::math::NormalizeAngle(
-              state_at_capture.value()(Localizer::StateIdx::kTheta) - Z(2));
-          const float z_yaw = Z(2) + kImpliedWeight * z_yaw_diff;
-          const Eigen::Vector3f Z_implied =
-              CalculateImpliedPose(z_yaw, H_field_target, pose_robot_target);
-          const Eigen::Vector3f Z_used = Z_implied;
-          // Just in case we ever do encounter any, drop measurements if they
-          // have non-finite numbers.
-          if (!Z.allFinite()) {
-            AOS_LOG(WARNING, "Got measurement with infinites or NaNs.\n");
-            correction_rejection = RejectionReason::NONFINITE_MEASUREMENT;
-            return Eigen::Vector3f::Zero();
-          }
-          Eigen::Vector3f Zhat = H * state_at_capture.value() - Z_used;
-          // Rewrap angle difference to put it back in range. Note that this
-          // component of the error is currently ignored (see definition of H
-          // above).
-          Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
-          // If the measurement implies that we are too far from the current
-          // estimate, then ignore it.
-          // Note that I am not entirely sure how much effect this actually has,
-          // because I primarily introduced it to make sure that any grossly
-          // invalid measurements get thrown out.
-          if (Zhat.squaredNorm() > std::pow(10.0, 2)) {
-            correction_rejection = RejectionReason::CORRECTION_TOO_LARGE;
-            return Eigen::Vector3f::Zero();
-          }
-          return Zhat;
-        },
-        [H](const State &) { return H; }, R, now);
+    observations_.CorrectKnownH(
+        Eigen::Vector3f::Zero(), &U,
+        Corrector(H_field_target, pose_robot_target, state_at_capture.value(),
+                  Z, &correction_rejection),
+        R, now);
     if (correction_rejection) {
       builder.add_accepted(false);
       builder.add_rejection_reason(*correction_rejection);
@@ -502,6 +462,43 @@
   return debug_offsets;
 }
 
+Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
+  // Weighting for how much to use the current robot heading estimate
+  // vs. the heading estimate from the image results. A value of 1.0
+  // completely ignores the measured heading, but will prioritize turret
+  // aiming above all else. A value of 0.0 will prioritize correcting
+  // any gyro heading drift.
+  constexpr float kImpliedWeight = 0.99;
+  const float z_yaw_diff = aos::math::NormalizeAngle(
+      state_at_capture_(Localizer::StateIdx::kTheta) - Z_(2));
+  const float z_yaw = Z_(2) + kImpliedWeight * z_yaw_diff;
+  const Eigen::Vector3f Z_implied =
+      CalculateImpliedPose(z_yaw, H_field_target_, pose_robot_target_);
+  const Eigen::Vector3f Z_used = Z_implied;
+  // Just in case we ever do encounter any, drop measurements if they
+  // have non-finite numbers.
+  if (!Z_.allFinite()) {
+    AOS_LOG(WARNING, "Got measurement with infinites or NaNs.\n");
+    *correction_rejection_ = RejectionReason::NONFINITE_MEASUREMENT;
+    return Eigen::Vector3f::Zero();
+  }
+  Eigen::Vector3f Zhat = H_ * state_at_capture_ - Z_used;
+  // Rewrap angle difference to put it back in range. Note that this
+  // component of the error is currently ignored (see definition of H
+  // above).
+  Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
+  // If the measurement implies that we are too far from the current
+  // estimate, then ignore it.
+  // Note that I am not entirely sure how much effect this actually has,
+  // because I primarily introduced it to make sure that any grossly
+  // invalid measurements get thrown out.
+  if (Zhat.squaredNorm() > std::pow(10.0, 2)) {
+    *correction_rejection_ = RejectionReason::CORRECTION_TOO_LARGE;
+    return Eigen::Vector3f::Zero();
+  }
+  return Zhat;
+}
+
 }  // namespace drivetrain
 }  // namespace control_loops
 }  // namespace y2020
diff --git a/y2020/control_loops/drivetrain/localizer.h b/y2020/control_loops/drivetrain/localizer.h
index 5458797..de57091 100644
--- a/y2020/control_loops/drivetrain/localizer.h
+++ b/y2020/control_loops/drivetrain/localizer.h
@@ -9,8 +9,8 @@
 #include "aos/network/message_bridge_server_generated.h"
 #include "frc971/control_loops/drivetrain/hybrid_ekf.h"
 #include "frc971/control_loops/drivetrain/localizer.h"
-#include "y2020/control_loops/superstructure/superstructure_status_generated.h"
 #include "y2020/control_loops/drivetrain/localizer_debug_generated.h"
+#include "y2020/control_loops/superstructure/superstructure_status_generated.h"
 #include "y2020/vision/sift/sift_generated.h"
 
 namespace y2020 {
@@ -95,6 +95,37 @@
     std::array<int, kNumRejectionReasons> rejection_counts;
   };
 
+  class Corrector : public HybridEkf::ExpectedObservationFunctor {
+   public:
+    Corrector(const Eigen::Matrix<float, 4, 4> &H_field_target,
+              const Pose &pose_robot_target, const State &state_at_capture,
+              const Eigen::Vector3f &Z,
+              std::optional<RejectionReason> *correction_rejection)
+        : H_field_target_(H_field_target),
+          pose_robot_target_(pose_robot_target),
+          state_at_capture_(state_at_capture),
+          Z_(Z),
+          correction_rejection_(correction_rejection) {
+      H_.setZero();
+      H_(0, StateIdx::kX) = 1;
+      H_(1, StateIdx::kY) = 1;
+      H_(2, StateIdx::kTheta) = 1;
+    }
+    Output H(const State &, const Input &) final;
+    Eigen::Matrix<float, HybridEkf::kNOutputs, HybridEkf::kNStates> DHDX(
+        const State &) final {
+      return H_;
+    }
+
+   private:
+    Eigen::Matrix<float, HybridEkf::kNOutputs, HybridEkf::kNStates> H_;
+    const Eigen::Matrix<float, 4, 4> H_field_target_;
+    Pose pose_robot_target_;
+    const State state_at_capture_;
+    const Eigen::Vector3f &Z_;
+    std::optional<RejectionReason> *correction_rejection_;
+  };
+
   // Processes new image data from the given pi and updates the EKF.
   aos::SizedArray<flatbuffers::Offset<ImageMatchDebug>, 5> HandleImageMatch(
       size_t camera_index, std::string_view pi,
@@ -113,6 +144,7 @@
   aos::EventLoop *const event_loop_;
   const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
   HybridEkf ekf_;
+  HybridEkf::ExpectedObservationAllocator<Corrector> observations_;
 
   std::vector<aos::Fetcher<frc971::vision::sift::ImageMatchResult>>
       image_fetchers_;
diff --git a/y2020/control_loops/drivetrain/localizer_test.cc b/y2020/control_loops/drivetrain/localizer_test.cc
index 9e9e7dd..d280523 100644
--- a/y2020/control_loops/drivetrain/localizer_test.cc
+++ b/y2020/control_loops/drivetrain/localizer_test.cc
@@ -15,6 +15,7 @@
 
 DEFINE_string(output_file, "",
               "If set, logs all channels to the provided logfile.");
+DECLARE_bool(die_on_malloc);
 
 // This file tests that the full 2020 localizer behaves sanely.
 
@@ -146,6 +147,7 @@
     CHECK_EQ(aos::configuration::GetNodeIndex(configuration(), pi1_), 1);
     set_team_id(frc971::control_loops::testing::kTeamNumber);
     set_battery_voltage(12.0);
+    FLAGS_die_on_malloc = true;
 
     if (!FLAGS_output_file.empty()) {
       logger_event_loop_ = MakeEventLoop("logger", roborio_);