Add y2020 LocalizerDebug message

Add a bunch of debugging information for every image match that the
localizer processes.

Change-Id: Ic09df456972952f3ed4c7227eb70cda638e348a2
Signed-off-by: James Kuszmaul <jabukuszmaul@gmail.com>
diff --git a/y2020/BUILD b/y2020/BUILD
index 45164b4..19debdc 100644
--- a/y2020/BUILD
+++ b/y2020/BUILD
@@ -232,6 +232,7 @@
         "//aos/network:timestamp_fbs",
         "//y2020/control_loops/superstructure:superstructure_goal_fbs",
         "//y2019/control_loops/drivetrain:target_selector_fbs",
+        "//y2020/control_loops/drivetrain:localizer_debug_fbs",
         "//y2020/control_loops/superstructure:superstructure_output_fbs",
         "//y2020/control_loops/superstructure:superstructure_position_fbs",
         "//y2020/control_loops/superstructure:superstructure_status_fbs",
diff --git a/y2020/control_loops/drivetrain/BUILD b/y2020/control_loops/drivetrain/BUILD
index 89d8044..b2a5901 100644
--- a/y2020/control_loops/drivetrain/BUILD
+++ b/y2020/control_loops/drivetrain/BUILD
@@ -56,13 +56,22 @@
     ],
 )
 
+flatbuffer_cc_library(
+    name = "localizer_debug_fbs",
+    srcs = ["localizer_debug.fbs"],
+    gen_reflections = 1,
+    visibility = ["//visibility:public"],
+)
+
 cc_library(
     name = "localizer",
     srcs = ["localizer.cc"],
     hdrs = ["localizer.h"],
     target_compatible_with = ["@platforms//os:linux"],
     deps = [
+        ":localizer_debug_fbs",
         "//aos/containers:ring_buffer",
+        "//aos/containers:sized_array",
         "//aos/network:message_bridge_server_fbs",
         "//frc971/control_loops:profiled_subsystem_fbs",
         "//frc971/control_loops/drivetrain:hybrid_ekf",
diff --git a/y2020/control_loops/drivetrain/drivetrain_replay.cc b/y2020/control_loops/drivetrain/drivetrain_replay.cc
index 65fc0c4..913fbda 100644
--- a/y2020/control_loops/drivetrain/drivetrain_replay.cc
+++ b/y2020/control_loops/drivetrain/drivetrain_replay.cc
@@ -75,6 +75,8 @@
                             "frc971.control_loops.drivetrain.Status");
   reader.RemapLoggedChannel("/drivetrain",
                             "frc971.control_loops.drivetrain.Output");
+  reader.RemapLoggedChannel("/drivetrain",
+                            "y2020.control_loops.drivetrain.LocalizerDebug");
   reader.RemapLoggedChannel("/superstructure",
                             "y2020.control_loops.superstructure.Status");
   reader.RemapLoggedChannel("/superstructure",
diff --git a/y2020/control_loops/drivetrain/localizer.cc b/y2020/control_loops/drivetrain/localizer.cc
index 906bb4b..55bf16a 100644
--- a/y2020/control_loops/drivetrain/localizer.cc
+++ b/y2020/control_loops/drivetrain/localizer.cc
@@ -72,7 +72,11 @@
       ekf_(dt_config),
       clock_offset_fetcher_(
           event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
-              "/aos")) {
+              "/aos")),
+      debug_sender_(
+          event_loop_
+              ->MakeSender<y2020::control_loops::drivetrain::LocalizerDebug>(
+                  "/drivetrain")) {
   // TODO(james): The down estimator has trouble handling situations where the
   // robot is constantly wiggling but not actually moving much, and can cause
   // drift when using accelerometer readings.
@@ -145,17 +149,32 @@
                        double gyro_rate, const Eigen::Vector3d &accel) {
   ekf_.UpdateEncodersAndGyro(left_encoder, right_encoder, gyro_rate,
                              U.cast<float>(), accel.cast<float>(), now);
+  auto builder = debug_sender_.MakeBuilder();
+  aos::SizedArray<flatbuffers::Offset<ImageMatchDebug>, 25> debug_offsets;
   for (size_t ii = 0; ii < kPisToUse.size(); ++ii) {
     auto &image_fetcher = image_fetchers_[ii];
     while (image_fetcher.FetchNext()) {
-      HandleImageMatch(kPisToUse[ii], *image_fetcher, now);
+      const auto offsets = HandleImageMatch(ii, kPisToUse[ii], *image_fetcher,
+                                            now, builder.fbb());
+      for (const auto offset : offsets) {
+        debug_offsets.push_back(offset);
+      }
     }
   }
+  const auto vector_offset =
+      builder.fbb()->CreateVector(debug_offsets.data(), debug_offsets.size());
+  LocalizerDebug::Builder debug_builder = builder.MakeBuilder<LocalizerDebug>();
+  debug_builder.add_matches(vector_offset);
+  CHECK(builder.Send(debug_builder.Finish()));
 }
 
-void Localizer::HandleImageMatch(
-    std::string_view pi, const frc971::vision::sift::ImageMatchResult &result,
-    aos::monotonic_clock::time_point now) {
+aos::SizedArray<flatbuffers::Offset<ImageMatchDebug>, 5>
+Localizer::HandleImageMatch(
+    size_t camera_index, std::string_view pi,
+    const frc971::vision::sift::ImageMatchResult &result,
+    aos::monotonic_clock::time_point now, flatbuffers::FlatBufferBuilder *fbb) {
+  aos::SizedArray<flatbuffers::Offset<ImageMatchDebug>, 5> debug_offsets;
+
   std::chrono::nanoseconds monotonic_offset(0);
   clock_offset_fetcher_.Fetch();
   if (clock_offset_fetcher_.get() != nullptr) {
@@ -177,11 +196,21 @@
           << capture_time;
   if (capture_time > now) {
     LOG(WARNING) << "Got camera frame from the future.";
-    return;
+    ImageMatchDebug::Builder builder(*fbb);
+    builder.add_camera(camera_index);
+    builder.add_accepted(false);
+    builder.add_rejection_reason(RejectionReason::IMAGE_FROM_FUTURE);
+    debug_offsets.push_back(builder.Finish());
+    return debug_offsets;
   }
   if (!result.has_camera_calibration()) {
     LOG(WARNING) << "Got camera frame without calibration data.";
-    return;
+    ImageMatchDebug::Builder builder(*fbb);
+    builder.add_camera(camera_index);
+    builder.add_accepted(false);
+    builder.add_rejection_reason(RejectionReason::NO_CALIBRATION);
+    debug_offsets.push_back(builder.Finish());
+    return debug_offsets;
   }
   // Per the ImageMatchResult specification, we can actually determine whether
   // the camera is the turret camera just from the presence of the
@@ -194,7 +223,12 @@
   // seems reasonable, but may be unnecessarily low or high.
   constexpr float kMaxTurretVelocity = 1.0;
   if (is_turret && std::abs(turret_data.velocity) > kMaxTurretVelocity) {
-    return;
+    ImageMatchDebug::Builder builder(*fbb);
+    builder.add_camera(camera_index);
+    builder.add_accepted(false);
+    builder.add_rejection_reason(RejectionReason::TURRET_TOO_FAST);
+    debug_offsets.push_back(builder.Finish());
+    return debug_offsets;
   }
   CHECK(result.camera_calibration()->has_fixed_extrinsics());
   const Eigen::Matrix<float, 4, 4> fixed_extrinsics =
@@ -212,13 +246,32 @@
   }
 
   if (!result.has_camera_poses()) {
-    return;
+    ImageMatchDebug::Builder builder(*fbb);
+    builder.add_camera(camera_index);
+    builder.add_accepted(false);
+    builder.add_rejection_reason(RejectionReason::NO_RESULTS);
+    debug_offsets.push_back(builder.Finish());
+    return debug_offsets;
   }
 
+  int index = -1;
   for (const frc971::vision::sift::CameraPose *vision_result :
        *result.camera_poses()) {
+    ++index;
+
+    ImageMatchDebug::Builder builder(*fbb);
+    builder.add_camera(camera_index);
+    builder.add_pose_index(index);
+    builder.add_local_image_capture_time_ns(result.image_monotonic_timestamp_ns());
+    builder.add_roborio_image_capture_time_ns(
+        capture_time.time_since_epoch().count());
+    builder.add_image_age_sec(aos::time::DurationInSeconds(now - capture_time));
+
     if (!vision_result->has_camera_to_target() ||
         !vision_result->has_field_to_target()) {
+      builder.add_accepted(false);
+      builder.add_rejection_reason(RejectionReason::NO_TRANSFORMS);
+      debug_offsets.push_back(builder.Finish());
       continue;
     }
     const Eigen::Matrix<float, 4, 4> H_camera_target =
@@ -282,6 +335,9 @@
     // and don't use the correction.
     if (std::abs(aos::math::DiffAngle<float>(theta(), Z(2))) > M_PI_2) {
       AOS_LOG(WARNING, "Dropped image match due to heading mismatch.\n");
+      builder.add_accepted(false);
+      builder.add_rejection_reason(RejectionReason::HIGH_THETA_DIFFERENCE);
+      debug_offsets.push_back(builder.Finish());
       continue;
     }
     // In order to do the EKF correction, we determine the expected state based
@@ -293,17 +349,32 @@
         ekf_.LastStateBeforeTime(capture_time);
     if (!state_at_capture.has_value()) {
       AOS_LOG(WARNING, "Dropped image match due to age of image.\n");
+      builder.add_accepted(false);
+      builder.add_rejection_reason(RejectionReason::IMAGE_TOO_OLD);
+      debug_offsets.push_back(builder.Finish());
       continue;
     }
+
+    builder.add_implied_robot_x(Z(0));
+    builder.add_implied_robot_y(Z(1));
+    builder.add_implied_robot_theta(Z(2));
+
+    // Turret is zero when pointed backwards.
+    builder.add_implied_turret_goal(
+        aos::math::NormalizeAngle(M_PI + pose_robot_target.heading()));
+
+    std::optional<RejectionReason> correction_rejection;
     const Input U = ekf_.MostRecentInput();
     // For the correction step, instead of passing in the measurement directly,
     // we pass in (0, 0, 0) as the measurement and then for the expected
     // measurement (Zhat) we calculate the error between the implied and actual
     // poses. This doesn't affect any of the math, it just makes the code a bit
     // more convenient to write given the Correct() interface we already have.
+    // Note: If we start going back to doing back-in-time rewinds, then we can't
+    // get away with passing things by reference.
     ekf_.Correct(
         Eigen::Vector3f::Zero(), &U, {},
-        [H, H_field_target, pose_robot_target, state_at_capture](
+        [H, H_field_target, pose_robot_target, state_at_capture, &correction_rejection](
             const State &, const Input &) -> Eigen::Vector3f {
           const Eigen::Vector3f Z =
               CalculateImpliedPose(H_field_target, pose_robot_target);
@@ -311,6 +382,7 @@
           // have non-finite numbers.
           if (!Z.allFinite()) {
             AOS_LOG(WARNING, "Got measurement with infinites or NaNs.\n");
+            correction_rejection = RejectionReason::NONFINITE_MEASUREMENT;
             return Eigen::Vector3f::Zero();
           }
           Eigen::Vector3f Zhat = H * state_at_capture.value() - Z;
@@ -324,12 +396,21 @@
           // because I primarily introduced it to make sure that any grossly
           // invalid measurements get thrown out.
           if (Zhat.squaredNorm() > std::pow(10.0, 2)) {
+            correction_rejection = RejectionReason::CORRECTION_TOO_LARGE;
             return Eigen::Vector3f::Zero();
           }
           return Zhat;
         },
         [H](const State &) { return H; }, R, now);
+    if (correction_rejection) {
+      builder.add_accepted(false);
+      builder.add_rejection_reason(*correction_rejection);
+    } else {
+      builder.add_accepted(true);
+    }
+    debug_offsets.push_back(builder.Finish());
   }
+  return debug_offsets;
 }
 
 }  // namespace drivetrain
diff --git a/y2020/control_loops/drivetrain/localizer.h b/y2020/control_loops/drivetrain/localizer.h
index c3b6464..19ee4ad 100644
--- a/y2020/control_loops/drivetrain/localizer.h
+++ b/y2020/control_loops/drivetrain/localizer.h
@@ -4,11 +4,13 @@
 #include <string_view>
 
 #include "aos/containers/ring_buffer.h"
+#include "aos/containers/sized_array.h"
 #include "aos/events/event_loop.h"
 #include "aos/network/message_bridge_server_generated.h"
 #include "frc971/control_loops/drivetrain/hybrid_ekf.h"
 #include "frc971/control_loops/drivetrain/localizer.h"
 #include "y2020/control_loops/superstructure/superstructure_status_generated.h"
+#include "y2020/control_loops/drivetrain/localizer_debug_generated.h"
 #include "y2020/vision/sift/sift_generated.h"
 
 namespace y2020 {
@@ -77,9 +79,11 @@
   };
 
   // Processes new image data from the given pi and updates the EKF.
-  void HandleImageMatch(std::string_view pi,
-                        const frc971::vision::sift::ImageMatchResult &result,
-                        aos::monotonic_clock::time_point now);
+  aos::SizedArray<flatbuffers::Offset<ImageMatchDebug>, 5> HandleImageMatch(
+      size_t camera_index, std::string_view pi,
+      const frc971::vision::sift::ImageMatchResult &result,
+      aos::monotonic_clock::time_point now,
+      flatbuffers::FlatBufferBuilder *fbb);
 
   // Processes the most recent turret position and stores it in the turret_data_
   // buffer.
@@ -98,6 +102,8 @@
 
   aos::Fetcher<aos::message_bridge::ServerStatistics> clock_offset_fetcher_;
 
+  aos::Sender<y2020::control_loops::drivetrain::LocalizerDebug> debug_sender_;
+
   // Buffer of recent turret data--this is used so that when we receive a camera
   // frame from the turret, we can back out what the turret angle was at that
   // time.
diff --git a/y2020/control_loops/drivetrain/localizer_debug.fbs b/y2020/control_loops/drivetrain/localizer_debug.fbs
new file mode 100644
index 0000000..89aea68
--- /dev/null
+++ b/y2020/control_loops/drivetrain/localizer_debug.fbs
@@ -0,0 +1,34 @@
+namespace y2020.control_loops.drivetrain;
+
+enum RejectionReason : byte {
+  IMAGE_FROM_FUTURE = 0,
+  NO_CALIBRATION = 1,
+  TURRET_TOO_FAST = 2,
+  NO_RESULTS = 3,
+  NO_TRANSFORMS = 4,
+  HIGH_THETA_DIFFERENCE = 5,
+  IMAGE_TOO_OLD = 6,
+  NONFINITE_MEASUREMENT = 7,
+  CORRECTION_TOO_LARGE = 8,
+}
+
+table ImageMatchDebug {
+  camera:uint8 (id: 0);
+  pose_index: uint8 (id: 1);
+  local_image_capture_time_ns:long (id: 2);
+  roborio_image_capture_time_ns:long (id: 3);
+  implied_robot_x:float (id: 4);
+  implied_robot_y:float (id: 5);
+  implied_robot_theta:float (id: 6);
+  implied_turret_goal:float (id: 7);
+  accepted:bool (id: 8);
+  rejection_reason:RejectionReason  (id: 9);
+  // Image age (more human-readable than trying to interpret roborio_image_capture_time_ns).
+  image_age_sec:float (id: 10);
+}
+
+table LocalizerDebug {
+  matches:[ImageMatchDebug] (id: 0);
+}
+
+root_type LocalizerDebug;
diff --git a/y2020/y2020_roborio.json b/y2020/y2020_roborio.json
index 5243d5a..db5da14 100644
--- a/y2020/y2020_roborio.json
+++ b/y2020/y2020_roborio.json
@@ -196,6 +196,14 @@
     },
     {
       "name": "/drivetrain",
+      "type": "y2020.control_loops.drivetrain.LocalizerDebug",
+      "source_node": "roborio",
+      "frequency": 250,
+      "max_size": 512,
+      "num_senders": 2
+    },
+    {
+      "name": "/drivetrain",
       "type": "frc971.IMUValuesBatch",
       "source_node": "roborio",
       "frequency": 250,