Create y2023 localizer

This creates a localizer implementation for y2023, and implements simple
tests. This is actually more similar to the y2020 iteration than the
y2022 iteration in implementation, as I backed away from trying to
maintain the more complex y2022 logic (since I wasn't actually using the
features of that code). Somewhat notable changes from past years:
* New target format using the TargetMap that the april robotics detector
  sends out.
* Does not attempt to entirely ignore the implied yaw indicated by the
  target detections, since unlike in 2020/2022, we are not shooting at
  the target and instead attempting to go somewhere offset from the
  target.

Currently has no way of getting confidence values from the april
robotics code, so treats all detections the same. Will need to tune
this.

Change-Id: I072cd3fb2657081bca74c55570842960c5ad7b1b
Signed-off-by: James Kuszmaul <jabukuszmaul+collab@gmail.com>
diff --git a/y2023/localizer/BUILD b/y2023/localizer/BUILD
new file mode 100644
index 0000000..8f1237c
--- /dev/null
+++ b/y2023/localizer/BUILD
@@ -0,0 +1,101 @@
+load("@com_github_google_flatbuffers//:build_defs.bzl", "flatbuffer_cc_library")
+load("@com_github_google_flatbuffers//:typescript.bzl", "flatbuffer_ts_library")
+
+flatbuffer_cc_library(
+    name = "status_fbs",
+    srcs = [
+        "status.fbs",
+    ],
+    gen_reflections = True,
+    target_compatible_with = ["@platforms//os:linux"],
+    visibility = ["//visibility:public"],
+    deps = [
+        "//frc971/control_loops/drivetrain:drivetrain_status_fbs",
+        "//frc971/imu_reader:imu_failures_fbs",
+    ],
+)
+
+flatbuffer_ts_library(
+    name = "status_ts_fbs",
+    srcs = ["status.fbs"],
+    visibility = ["//visibility:public"],
+    deps = [
+        "//frc971/control_loops/drivetrain:drivetrain_status_ts_fbs",
+        "//frc971/imu_reader:imu_failures_ts_fbs",
+    ],
+)
+
+flatbuffer_cc_library(
+    name = "visualization_fbs",
+    srcs = [
+        "visualization.fbs",
+    ],
+    gen_reflections = True,
+    target_compatible_with = ["@platforms//os:linux"],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":status_fbs",
+    ],
+)
+
+flatbuffer_ts_library(
+    name = "visualization_ts_fbs",
+    srcs = ["visualization.fbs"],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":status_ts_fbs",
+    ],
+)
+
+cc_library(
+    name = "localizer",
+    srcs = ["localizer.cc"],
+    hdrs = ["localizer.h"],
+    deps = [
+        ":status_fbs",
+        ":visualization_fbs",
+        "//aos/containers:sized_array",
+        "//aos/events:event_loop",
+        "//frc971/constants:constants_sender_lib",
+        "//frc971/control_loops:pose",
+        "//frc971/control_loops/drivetrain:hybrid_ekf",
+        "//frc971/control_loops/drivetrain:improved_down_estimator",
+        "//frc971/control_loops/drivetrain:localizer_fbs",
+        "//frc971/control_loops/drivetrain/localization:localizer_output_fbs",
+        "//frc971/control_loops/drivetrain/localization:utils",
+        "//frc971/imu_reader:imu_watcher",
+        "//frc971/vision:target_map_fbs",
+        "//y2023:constants",
+        "//y2023/constants:constants_fbs",
+    ],
+)
+
+cc_test(
+    name = "localizer_test",
+    srcs = ["localizer_test.cc"],
+    data = ["//y2023:aos_config"],
+    deps = [
+        ":localizer",
+        ":status_fbs",
+        "//aos/events:simulated_event_loop",
+        "//aos/events/logging:log_writer",
+        "//aos/testing:googletest",
+        "//frc971/control_loops/drivetrain:drivetrain_test_lib",
+        "//frc971/control_loops/drivetrain:localizer_fbs",
+        "//y2023/constants:simulated_constants_sender",
+        "//y2023/control_loops/drivetrain:drivetrain_base",
+    ],
+)
+
+cc_binary(
+    name = "localizer_main",
+    srcs = ["localizer_main.cc"],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":localizer",
+        "//aos:init",
+        "//aos/events:shm_event_loop",
+        "//frc971/constants:constants_sender_lib",
+        "//y2023/control_loops/drivetrain:drivetrain_base",
+    ],
+)
diff --git a/y2023/localizer/localizer.cc b/y2023/localizer/localizer.cc
new file mode 100644
index 0000000..87063ad
--- /dev/null
+++ b/y2023/localizer/localizer.cc
@@ -0,0 +1,423 @@
+#include "y2023/localizer/localizer.h"
+
+#include "aos/containers/sized_array.h"
+#include "frc971/control_loops/drivetrain/localizer_generated.h"
+#include "frc971/control_loops/pose.h"
+#include "y2023/constants.h"
+
+namespace y2023::localizer {
+namespace {
+constexpr std::array<std::string_view, Localizer::kNumCameras> kPisToUse{
+    "pi1", "pi2", "pi3", "pi4"};
+
+size_t CameraIndexForName(std::string_view name) {
+  for (size_t index = 0; index < kPisToUse.size(); ++index) {
+    if (name == kPisToUse.at(index)) {
+      return index;
+    }
+  }
+  LOG(FATAL) << "No camera named " << name;
+}
+
+std::map<uint64_t, Localizer::Transform> GetTargetLocations(
+    const Constants &constants) {
+  CHECK(constants.has_target_map());
+  CHECK(constants.target_map()->has_target_poses());
+  std::map<uint64_t, Localizer::Transform> transforms;
+  for (const frc971::vision::TargetPoseFbs *target :
+       *constants.target_map()->target_poses()) {
+    CHECK(target->has_id());
+    CHECK(target->has_position());
+    CHECK(target->has_orientation());
+    CHECK_EQ(0u, transforms.count(target->id()));
+    transforms[target->id()] = PoseToTransform(target);
+  }
+  return transforms;
+}
+}  // namespace
+
+Localizer::Transform PoseToTransform(
+    const frc971::vision::TargetPoseFbs *pose) {
+  const frc971::vision::Position *position = pose->position();
+  const frc971::vision::Quaternion *quaternion = pose->orientation();
+  return (Eigen::Translation3d(
+              Eigen::Vector3d(position->x(), position->y(), position->z())) *
+          Eigen::Quaterniond(quaternion->w(), quaternion->x(), quaternion->y(),
+                             quaternion->z()))
+      .matrix();
+}
+
+std::array<Localizer::CameraState, Localizer::kNumCameras>
+Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) {
+  CHECK(constants.has_cameras());
+  std::array<Localizer::CameraState, Localizer::kNumCameras> cameras;
+  for (const CameraConfiguration *camera : *constants.cameras()) {
+    CHECK(camera->has_calibration());
+    const frc971::vision::calibration::CameraCalibration *calibration =
+        camera->calibration();
+    CHECK(!calibration->has_turret_extrinsics())
+        << "The 2023 robot does not have a turret.";
+    CHECK(calibration->has_node_name());
+    const size_t index =
+        CameraIndexForName(calibration->node_name()->string_view());
+    // We default-construct the extrinsics matrix to all-zeros; use that to
+    // sanity-check whether we have populated the matrix yet or not.
+    CHECK(cameras.at(index).extrinsics.norm() == 0)
+        << "Got multiple calibrations for "
+        << calibration->node_name()->string_view();
+    CHECK(calibration->has_fixed_extrinsics());
+    cameras.at(index).extrinsics =
+        frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
+            *calibration->fixed_extrinsics());
+    cameras.at(index).debug_sender = event_loop->MakeSender<Visualization>(
+        absl::StrCat("/", calibration->node_name()->string_view(), "/camera"));
+  }
+  for (const CameraState &camera : cameras) {
+    CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration.";
+  }
+  return cameras;
+}
+
+Localizer::Localizer(
+    aos::EventLoop *event_loop,
+    const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config)
+    : event_loop_(event_loop),
+      dt_config_(dt_config),
+      constants_fetcher_(event_loop),
+      cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)),
+      target_poses_(GetTargetLocations(constants_fetcher_.constants())),
+      down_estimator_(dt_config),
+      ekf_(dt_config),
+      observations_(&ekf_),
+      imu_watcher_(event_loop, dt_config,
+                   y2023::constants::Values::DrivetrainEncoderToMeters(1),
+                   std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
+                             std::placeholders::_2, std::placeholders::_3,
+                             std::placeholders::_4, std::placeholders::_5)),
+      utils_(event_loop),
+      status_sender_(event_loop->MakeSender<Status>("/localizer")),
+      output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
+          "/localizer")) {
+  if (dt_config_.is_simulated) {
+    down_estimator_.assume_perfect_gravity();
+  }
+
+  for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) {
+    const std::string_view pi_name = kPisToUse.at(camera_index);
+    event_loop->MakeWatcher(
+        absl::StrCat("/", pi_name, "/camera"),
+        [this, pi_name,
+         camera_index](const frc971::vision::TargetMap &targets) {
+          CHECK(targets.has_target_poses());
+          CHECK(targets.has_monotonic_timestamp_ns());
+          const std::optional<aos::monotonic_clock::duration> clock_offset =
+              utils_.ClockOffset(pi_name);
+          if (!clock_offset.has_value()) {
+            VLOG(1) << "Rejecting image due to disconnected message bridge at "
+                    << event_loop_->monotonic_now();
+            cameras_.at(camera_index)
+                .rejection_counter.IncrementError(
+                    RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
+            return;
+          }
+          const aos::monotonic_clock::time_point pi_capture_time(
+              std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) -
+              clock_offset.value());
+          const aos::monotonic_clock::time_point capture_time =
+              pi_capture_time - imu_watcher_.pico_offset_error();
+          VLOG(2) << "Capture time of "
+                  << targets.monotonic_timestamp_ns() * 1e-9
+                  << " clock offset of "
+                  << aos::time::DurationInSeconds(clock_offset.value())
+                  << " pico error "
+                  << aos::time::DurationInSeconds(
+                         imu_watcher_.pico_offset_error());
+          if (pi_capture_time > event_loop_->context().monotonic_event_time) {
+            VLOG(1) << "Rejecting image due to being from future at "
+                    << event_loop_->monotonic_now() << " with timestamp of "
+                    << pi_capture_time << " and event time pf "
+                    << event_loop_->context().monotonic_event_time;
+            cameras_.at(camera_index)
+                .rejection_counter.IncrementError(
+                    RejectionReason::IMAGE_FROM_FUTURE);
+            return;
+          }
+          auto builder = cameras_.at(camera_index).debug_sender.MakeBuilder();
+          aos::SizedArray<flatbuffers::Offset<TargetEstimateDebug>, 20>
+              debug_offsets;
+          for (const frc971::vision::TargetPoseFbs *target :
+               *targets.target_poses()) {
+            VLOG(1) << "Handling target from " << camera_index;
+            auto offset = HandleTarget(camera_index, capture_time, *target,
+                                       builder.fbb());
+            if (debug_offsets.size() < debug_offsets.capacity()) {
+              debug_offsets.push_back(offset);
+            } else {
+              AOS_LOG(ERROR, "Dropped message from debug vector.");
+            }
+          }
+          auto vector_offset = builder.fbb()->CreateVector(
+              debug_offsets.data(), debug_offsets.size());
+          Visualization::Builder visualize_builder(*builder.fbb());
+          visualize_builder.add_targets(vector_offset);
+          builder.CheckOk(builder.Send(visualize_builder.Finish()));
+          SendStatus();
+        });
+  }
+
+  event_loop_->AddPhasedLoop([this](int) { SendOutput(); },
+                             std::chrono::milliseconds(5));
+
+  event_loop_->MakeWatcher(
+      "/drivetrain",
+      [this](
+          const frc971::control_loops::drivetrain::LocalizerControl &control) {
+        const double theta = control.keep_current_theta()
+                                 ? ekf_.X_hat(StateIdx::kTheta)
+                                 : control.theta();
+        const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder);
+        const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder);
+        ekf_.ResetInitialState(
+            t_,
+            (HybridEkf::State() << control.x(), control.y(), theta,
+             left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0)
+                .finished(),
+            ekf_.P());
+      });
+
+  ekf_.set_ignore_accel(true);
+  // Priority should be lower than the imu reading process, but non-zero.
+  event_loop->SetRuntimeRealtimePriority(10);
+  event_loop->OnRun([this, event_loop]() {
+    ekf_.ResetInitialState(event_loop->monotonic_now(),
+                           HybridEkf::State::Zero(), ekf_.P());
+  });
+}
+
+void Localizer::HandleImu(aos::monotonic_clock::time_point sample_time_pico,
+                          aos::monotonic_clock::time_point sample_time_pi,
+                          std::optional<Eigen::Vector2d> encoders,
+                          Eigen::Vector3d gyro, Eigen::Vector3d accel) {
+  last_encoder_readings_ = encoders;
+  // Ignore ivnalid readings; the HybridEkf will handle it reasonably.
+  if (!encoders.has_value()) {
+    return;
+  }
+  if (t_ == aos::monotonic_clock::min_time) {
+    t_ = sample_time_pico;
+  }
+  if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_pico) {
+    t_ = sample_time_pico;
+    ++clock_resets_;
+  }
+  const aos::monotonic_clock::duration dt = sample_time_pico - t_;
+  t_ = sample_time_pico;
+  // We don't actually use the down estimator currently, but it's really
+  // convenient for debugging.
+  down_estimator_.Predict(gyro, accel, dt);
+  const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
+  ekf_.UpdateEncodersAndGyro(encoders.value()(0), encoders.value()(1), yaw_rate,
+                             utils_.VoltageOrZero(sample_time_pi), accel, t_);
+  SendStatus();
+}
+
+flatbuffers::Offset<TargetEstimateDebug> Localizer::RejectImage(
+    int camera_index, RejectionReason reason,
+    TargetEstimateDebug::Builder *builder) {
+  builder->add_accepted(false);
+  builder->add_rejection_reason(reason);
+  cameras_.at(camera_index).rejection_counter.IncrementError(reason);
+  return builder->Finish();
+}
+
+flatbuffers::Offset<TargetEstimateDebug> Localizer::HandleTarget(
+    int camera_index, const aos::monotonic_clock::time_point capture_time,
+    const frc971::vision::TargetPoseFbs &target,
+    flatbuffers::FlatBufferBuilder *debug_fbb) {
+  ++total_candidate_targets_;
+  ++cameras_.at(camera_index).total_candidate_targets;
+
+  TargetEstimateDebug::Builder builder(*debug_fbb);
+  builder.add_camera(camera_index);
+  builder.add_image_age_sec(aos::time::DurationInSeconds(
+      event_loop_->monotonic_now() - capture_time));
+
+  const uint64_t target_id = target.id();
+  VLOG(2) << aos::FlatbufferToJson(&target);
+  if (target_poses_.count(target_id) == 0) {
+    VLOG(1) << "Rejecting target due to invalid ID " << target_id;
+    return RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, &builder);
+  }
+
+  const Transform &H_field_target = target_poses_.at(target_id);
+  const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics;
+
+  const Transform H_camera_target = PoseToTransform(&target);
+
+  const Transform H_field_camera = H_field_target * H_camera_target.inverse();
+  // Back out the robot position that is implied by the current camera
+  // reading. Note that the Pose object ignores any roll/pitch components, so
+  // if the camera's extrinsics for pitch/roll are off, this should just
+  // ignore it.
+  const frc971::control_loops::Pose measured_camera_pose(H_field_camera);
+  builder.add_camera_x(measured_camera_pose.rel_pos().x());
+  builder.add_camera_y(measured_camera_pose.rel_pos().y());
+  // Because the camera uses Z as forwards rather than X, just calculate the
+  // debugging theta value using the transformation matrix directly.
+  builder.add_camera_theta(
+      std::atan2(H_field_camera(1, 2), H_field_camera(0, 2)));
+  // Calculate the camera-to-robot transformation matrix ignoring the
+  // pitch/roll of the camera.
+  const Transform H_camera_robot_stripped =
+      frc971::control_loops::Pose(H_robot_camera)
+          .AsTransformationMatrix()
+          .inverse();
+  const frc971::control_loops::Pose measured_pose(
+      measured_camera_pose.AsTransformationMatrix() * H_camera_robot_stripped);
+  // This "Z" is the robot pose directly implied by the camera results.
+  const Eigen::Matrix<double, 3, 1> Z(measured_pose.rel_pos().x(),
+                                      measured_pose.rel_pos().y(),
+                                      measured_pose.rel_theta());
+  builder.add_implied_robot_x(Z(Corrector::kX));
+  builder.add_implied_robot_y(Z(Corrector::kY));
+  builder.add_implied_robot_theta(Z(Corrector::kTheta));
+
+  // TODO(james): Tune this. Also, gain schedule for auto mode?
+  Eigen::Matrix<double, 3, 1> noises(1.0, 1.0, 0.5);
+
+  Eigen::Matrix3d R = Eigen::Matrix3d::Zero();
+  R.diagonal() = noises.cwiseAbs2();
+  // In order to do the EKF correction, we determine the expected state based
+  // on the state at the time the image was captured; however, we insert the
+  // correction update itself at the current time. This is technically not
+  // quite correct, but saves substantial CPU usage & code complexity by
+  // making
+  // it so that we don't have to constantly rewind the entire EKF history.
+  const std::optional<State> state_at_capture =
+      ekf_.LastStateBeforeTime(capture_time);
+
+  if (!state_at_capture.has_value()) {
+    VLOG(1) << "Rejecting image due to being too old.";
+    return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD, &builder);
+  }
+
+  const Input U = ekf_.MostRecentInput();
+  VLOG(1) << "previous state " << ekf_.X_hat().topRows<3>().transpose();
+  // For the correction step, instead of passing in the measurement directly,
+  // we pass in (0, 0, 0) as the measurement and then for the expected
+  // measurement (Zhat) we calculate the error between the pose implied by
+  // the camera measurement and the current estimate of the
+  // pose. This doesn't affect any of the math, it just makes the code a bit
+  // more convenient to write given the Correct() interface we already have.
+  observations_.CorrectKnownH(Eigen::Vector3d::Zero(), &U,
+                              Corrector(state_at_capture.value(), Z), R, t_);
+  ++total_accepted_targets_;
+  ++cameras_.at(camera_index).total_accepted_targets;
+  VLOG(1) << "new state " << ekf_.X_hat().topRows<3>().transpose();
+  return builder.Finish();
+}
+
+Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
+  CHECK(Z_.allFinite());
+  Eigen::Vector3d Zhat = H_ * state_at_capture_ - Z_;
+  // Rewrap angle difference to put it back in range.
+  Zhat(2) = aos::math::NormalizeAngle(Zhat(2));
+  VLOG(1) << "Zhat " << Zhat.transpose() << " Z_ " << Z_.transpose()
+          << " state " << (H_ * state_at_capture_).transpose();
+  return Zhat;
+}
+
+void Localizer::SendOutput() {
+  auto builder = output_sender_.MakeBuilder();
+  frc971::controls::LocalizerOutput::Builder output_builder =
+      builder.MakeBuilder<frc971::controls::LocalizerOutput>();
+  output_builder.add_monotonic_timestamp_ns(
+      std::chrono::duration_cast<std::chrono::nanoseconds>(
+          event_loop_->context().monotonic_event_time.time_since_epoch())
+          .count());
+  output_builder.add_x(ekf_.X_hat(StateIdx::kX));
+  output_builder.add_y(ekf_.X_hat(StateIdx::kY));
+  output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
+  output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
+  output_builder.add_image_accepted_count(total_accepted_targets_);
+  const Eigen::Quaterniond &orientation =
+      Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta),
+                               Eigen::Vector3d::UnitZ()) *
+      down_estimator_.X_hat();
+  frc971::controls::Quaternion quaternion;
+  quaternion.mutate_x(orientation.x());
+  quaternion.mutate_y(orientation.y());
+  quaternion.mutate_z(orientation.z());
+  quaternion.mutate_w(orientation.w());
+  output_builder.add_orientation(&quaternion);
+  builder.CheckOk(builder.Send(output_builder.Finish()));
+}
+
+flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
+Localizer::PopulateState(flatbuffers::FlatBufferBuilder *fbb) const {
+  frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb);
+  builder.add_x(ekf_.X_hat(StateIdx::kX));
+  builder.add_y(ekf_.X_hat(StateIdx::kY));
+  builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
+  builder.add_left_velocity(ekf_.X_hat(StateIdx::kLeftVelocity));
+  builder.add_right_velocity(ekf_.X_hat(StateIdx::kRightVelocity));
+  builder.add_left_encoder(ekf_.X_hat(StateIdx::kLeftEncoder));
+  builder.add_right_encoder(ekf_.X_hat(StateIdx::kRightEncoder));
+  builder.add_left_voltage_error(ekf_.X_hat(StateIdx::kLeftVoltageError));
+  builder.add_right_voltage_error(ekf_.X_hat(StateIdx::kRightVoltageError));
+  builder.add_angular_error(ekf_.X_hat(StateIdx::kAngularError));
+  builder.add_longitudinal_velocity_offset(
+      ekf_.X_hat(StateIdx::kLongitudinalVelocityOffset));
+  builder.add_lateral_velocity(ekf_.X_hat(StateIdx::kLateralVelocity));
+  return builder.Finish();
+}
+
+flatbuffers::Offset<ImuStatus> Localizer::PopulateImu(
+    flatbuffers::FlatBufferBuilder *fbb) const {
+  const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb);
+  const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb);
+  ImuStatus::Builder builder(*fbb);
+  builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
+  builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
+  builder.add_zeroing(zeroer_offset);
+  if (imu_watcher_.pico_offset().has_value()) {
+    builder.add_pico_offset_ns(imu_watcher_.pico_offset().value().count());
+    builder.add_pico_offset_error_ns(imu_watcher_.pico_offset_error().count());
+  }
+  if (last_encoder_readings_.has_value()) {
+    builder.add_left_encoder(last_encoder_readings_.value()(0));
+    builder.add_right_encoder(last_encoder_readings_.value()(1));
+  }
+  builder.add_imu_failures(failures_offset);
+  return builder.Finish();
+}
+
+void Localizer::SendStatus() {
+  auto builder = status_sender_.MakeBuilder();
+  std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
+      stats_offsets;
+  for (size_t ii = 0; ii < kNumCameras; ++ii) {
+    const auto counts_offset =
+        cameras_.at(ii).rejection_counter.PopulateCounts(builder.fbb());
+    CumulativeStatistics::Builder stats_builder =
+        builder.MakeBuilder<CumulativeStatistics>();
+    stats_builder.add_total_accepted(cameras_.at(ii).total_accepted_targets);
+    stats_builder.add_total_candidates(cameras_.at(ii).total_candidate_targets);
+    stats_builder.add_rejection_reasons(counts_offset);
+    stats_offsets.at(ii) = stats_builder.Finish();
+  }
+  auto stats_offset =
+      builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
+  auto down_estimator_offset =
+      down_estimator_.PopulateStatus(builder.fbb(), t_);
+  auto imu_offset = PopulateImu(builder.fbb());
+  auto state_offset = PopulateState(builder.fbb());
+  Status::Builder status_builder = builder.MakeBuilder<Status>();
+  status_builder.add_state(state_offset);
+  status_builder.add_down_estimator(down_estimator_offset);
+  status_builder.add_imu(imu_offset);
+  status_builder.add_statistics(stats_offset);
+  builder.CheckOk(builder.Send(status_builder.Finish()));
+}
+
+}  // namespace y2023::localizer
diff --git a/y2023/localizer/localizer.h b/y2023/localizer/localizer.h
new file mode 100644
index 0000000..b89d222
--- /dev/null
+++ b/y2023/localizer/localizer.h
@@ -0,0 +1,121 @@
+#ifndef Y2023_LOCALIZER_LOCALIZER_H_
+#define Y2023_LOCALIZER_LOCALIZER_H_
+
+#include <array>
+#include <map>
+
+#include "frc971/constants/constants_sender_lib.h"
+#include "frc971/control_loops/drivetrain/hybrid_ekf.h"
+#include "frc971/control_loops/drivetrain/improved_down_estimator.h"
+#include "frc971/control_loops/drivetrain/localization/localizer_output_generated.h"
+#include "frc971/control_loops/drivetrain/localization/utils.h"
+#include "frc971/imu_reader/imu_watcher.h"
+#include "frc971/vision/target_map_generated.h"
+#include "y2023/constants/constants_generated.h"
+#include "y2023/localizer/status_generated.h"
+#include "y2023/localizer/visualization_generated.h"
+
+namespace y2023::localizer {
+
+class Localizer {
+ public:
+  static constexpr size_t kNumCameras = 4;
+  typedef Eigen::Matrix<double, 4, 4> Transform;
+  typedef frc971::control_loops::drivetrain::HybridEkf<double> HybridEkf;
+  typedef HybridEkf::State State;
+  typedef HybridEkf::Output Output;
+  typedef HybridEkf::Input Input;
+  typedef HybridEkf::StateIdx StateIdx;
+  Localizer(aos::EventLoop *event_loop,
+            const frc971::control_loops::drivetrain::DrivetrainConfig<double>
+                dt_config);
+
+ private:
+  class Corrector : public HybridEkf::ExpectedObservationFunctor {
+   public:
+    // Indices used for each of the members of the output vector for this
+    // Corrector.
+    enum OutputIdx {
+      kX = 0,
+      kY = 1,
+      kTheta = 2,
+    };
+    Corrector(const State &state_at_capture, const Eigen::Vector3d &Z)
+        : state_at_capture_(state_at_capture), Z_(Z) {
+      H_.setZero();
+      H_(kX, StateIdx::kX) = 1;
+      H_(kY, StateIdx::kY) = 1;
+      H_(kTheta, StateIdx::kTheta) = 1;
+    }
+    Output H(const State &, const Input &) final;
+    Eigen::Matrix<double, HybridEkf::kNOutputs, HybridEkf::kNStates> DHDX(
+        const State &) final {
+      return H_;
+    }
+
+   private:
+    Eigen::Matrix<double, HybridEkf::kNOutputs, HybridEkf::kNStates> H_;
+    const State state_at_capture_;
+    const Eigen::Vector3d &Z_;
+  };
+
+  struct CameraState {
+    aos::Sender<Visualization> debug_sender;
+    Transform extrinsics = Transform::Zero();
+    aos::util::ArrayErrorCounter<RejectionReason, RejectionCount>
+        rejection_counter;
+    size_t total_candidate_targets = 0;
+    size_t total_accepted_targets = 0;
+  };
+
+  static std::array<CameraState, kNumCameras> MakeCameras(
+      const Constants &constants, aos::EventLoop *event_loop);
+  flatbuffers::Offset<TargetEstimateDebug> HandleTarget(
+      int camera_index, const aos::monotonic_clock::time_point capture_time,
+      const frc971::vision::TargetPoseFbs &target,
+      flatbuffers::FlatBufferBuilder *debug_fbb);
+  void HandleImu(aos::monotonic_clock::time_point sample_time_pico,
+                 aos::monotonic_clock::time_point sample_time_pi,
+                 std::optional<Eigen::Vector2d> encoders, Eigen::Vector3d gyro,
+                 Eigen::Vector3d accel);
+  flatbuffers::Offset<TargetEstimateDebug> RejectImage(
+      int camera_index, RejectionReason reason,
+      TargetEstimateDebug::Builder *builder);
+
+  void SendOutput();
+  flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
+  PopulateState(flatbuffers::FlatBufferBuilder *fbb) const;
+  flatbuffers::Offset<ImuStatus> PopulateImu(
+      flatbuffers::FlatBufferBuilder *fbb) const;
+  void SendStatus();
+
+  aos::EventLoop *const event_loop_;
+  const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
+  frc971::constants::ConstantsFetcher<Constants> constants_fetcher_;
+  std::array<CameraState, kNumCameras> cameras_;
+  const std::array<Transform, kNumCameras> camera_extrinsics_;
+  const std::map<uint64_t, Transform> target_poses_;
+
+  frc971::control_loops::drivetrain::DrivetrainUkf down_estimator_;
+  HybridEkf ekf_;
+  HybridEkf::ExpectedObservationAllocator<Corrector> observations_;
+
+  frc971::controls::ImuWatcher imu_watcher_;
+  frc971::control_loops::drivetrain::LocalizationUtils utils_;
+
+  aos::Sender<Status> status_sender_;
+  aos::Sender<frc971::controls::LocalizerOutput> output_sender_;
+  aos::monotonic_clock::time_point t_ = aos::monotonic_clock::min_time;
+  size_t clock_resets_ = 0;
+
+  size_t total_candidate_targets_ = 0;
+  size_t total_accepted_targets_ = 0;
+
+  // For the status message.
+  std::optional<Eigen::Vector2d> last_encoder_readings_;
+};
+
+// Converts a TargetPoseFbs into a transformation matrix.
+Localizer::Transform PoseToTransform(const frc971::vision::TargetPoseFbs *pose);
+}  // namespace y2023::localizer
+#endif  // Y2023_LOCALIZER_LOCALIZER_H_
diff --git a/y2023/localizer/localizer_main.cc b/y2023/localizer/localizer_main.cc
new file mode 100644
index 0000000..98f0e81
--- /dev/null
+++ b/y2023/localizer/localizer_main.cc
@@ -0,0 +1,24 @@
+#include "aos/events/shm_event_loop.h"
+#include "aos/init.h"
+#include "frc971/constants/constants_sender_lib.h"
+#include "y2023/control_loops/drivetrain/drivetrain_base.h"
+#include "y2023/localizer/localizer.h"
+
+DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
+
+int main(int argc, char *argv[]) {
+  aos::InitGoogle(&argc, &argv);
+
+  aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+      aos::configuration::ReadConfig(FLAGS_config);
+
+  frc971::constants::WaitForConstants<y2023::Constants>(&config.message());
+
+  aos::ShmEventLoop event_loop(&config.message());
+  y2023::localizer::Localizer localizer(
+      &event_loop, ::y2023::control_loops::drivetrain::GetDrivetrainConfig());
+
+  event_loop.Run();
+
+  return 0;
+}
diff --git a/y2023/localizer/localizer_test.cc b/y2023/localizer/localizer_test.cc
new file mode 100644
index 0000000..77af69b
--- /dev/null
+++ b/y2023/localizer/localizer_test.cc
@@ -0,0 +1,459 @@
+#include "y2023/localizer/localizer.h"
+
+#include "aos/events/logging/log_writer.h"
+#include "aos/events/simulated_event_loop.h"
+#include "frc971/control_loops/drivetrain/drivetrain_test_lib.h"
+#include "frc971/control_loops/drivetrain/localizer_generated.h"
+#include "frc971/control_loops/pose.h"
+#include "frc971/vision/target_map_generated.h"
+#include "gtest/gtest.h"
+#include "y2023/constants/simulated_constants_sender.h"
+#include "y2023/control_loops/drivetrain/drivetrain_base.h"
+#include "y2023/localizer/status_generated.h"
+
+DEFINE_string(output_folder, "",
+              "If set, logs all channels to the provided logfile.");
+DECLARE_bool(die_on_malloc);
+
+namespace y2023::localizer::testing {
+
+using frc971::control_loops::drivetrain::Output;
+
+class LocalizerTest : public ::testing::Test {
+ protected:
+  static constexpr uint64_t kTargetId = 1;
+  LocalizerTest()
+      : configuration_(aos::configuration::ReadConfig("y2023/aos_config.json")),
+        event_loop_factory_(&configuration_.message()),
+        roborio_node_([this]() {
+          // Get the constants sent before anything else happens.
+          // It has nothing to do with the roborio node.
+          SendSimulationConstants(&event_loop_factory_, 7971,
+                                  "y2023/constants/test_constants.json");
+          return aos::configuration::GetNode(&configuration_.message(),
+                                             "roborio");
+        }()),
+        imu_node_(
+            aos::configuration::GetNode(&configuration_.message(), "imu")),
+        camera_node_(
+            aos::configuration::GetNode(&configuration_.message(), "pi1")),
+        dt_config_(frc971::control_loops::drivetrain::testing::
+                       GetTestDrivetrainConfig()),
+        localizer_event_loop_(
+            event_loop_factory_.MakeEventLoop("localizer", imu_node_)),
+        localizer_(localizer_event_loop_.get(), dt_config_),
+        drivetrain_plant_event_loop_(event_loop_factory_.MakeEventLoop(
+            "drivetrain_plant", roborio_node_)),
+        drivetrain_plant_imu_event_loop_(
+            event_loop_factory_.MakeEventLoop("drivetrain_plant", imu_node_)),
+        drivetrain_plant_(drivetrain_plant_event_loop_.get(),
+                          drivetrain_plant_imu_event_loop_.get(), dt_config_,
+                          std::chrono::microseconds(500)),
+        roborio_test_event_loop_(
+            event_loop_factory_.MakeEventLoop("test", roborio_node_)),
+        imu_test_event_loop_(
+            event_loop_factory_.MakeEventLoop("test", imu_node_)),
+        camera_test_event_loop_(
+            event_loop_factory_.MakeEventLoop("test", camera_node_)),
+        logger_test_event_loop_(
+            event_loop_factory_.GetNodeEventLoopFactory("logger")
+                ->MakeEventLoop("test")),
+        constants_fetcher_(imu_test_event_loop_.get()),
+        output_sender_(
+            roborio_test_event_loop_->MakeSender<Output>("/drivetrain")),
+        target_sender_(
+            camera_test_event_loop_->MakeSender<frc971::vision::TargetMap>(
+                "/camera")),
+        control_sender_(roborio_test_event_loop_->MakeSender<
+                        frc971::control_loops::drivetrain::LocalizerControl>(
+            "/drivetrain")),
+        output_fetcher_(
+            roborio_test_event_loop_
+                ->MakeFetcher<frc971::controls::LocalizerOutput>("/localizer")),
+        status_fetcher_(
+            imu_test_event_loop_->MakeFetcher<Status>("/localizer")) {
+    FLAGS_die_on_malloc = true;
+    {
+      aos::TimerHandler *timer = roborio_test_event_loop_->AddTimer([this]() {
+        {
+          auto builder = output_sender_.MakeBuilder();
+          auto output_builder = builder.MakeBuilder<Output>();
+          output_builder.add_left_voltage(output_voltages_(0));
+          output_builder.add_right_voltage(output_voltages_(1));
+          builder.CheckOk(builder.Send(output_builder.Finish()));
+        }
+      });
+      roborio_test_event_loop_->OnRun([timer, this]() {
+        timer->Setup(roborio_test_event_loop_->monotonic_now(),
+                     std::chrono::milliseconds(5));
+      });
+    }
+    {
+      // Sanity check that the test calibration files look like what we
+      // expect.
+      CHECK_EQ("pi1", constants_fetcher_.constants()
+                          .cameras()
+                          ->Get(0)
+                          ->calibration()
+                          ->node_name()
+                          ->string_view());
+      const Eigen::Matrix<double, 4, 4> H_robot_camera =
+          frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
+              *constants_fetcher_.constants()
+                   .cameras()
+                   ->Get(0)
+                   ->calibration()
+                   ->fixed_extrinsics());
+
+      CHECK_EQ(kTargetId, constants_fetcher_.constants()
+                              .target_map()
+                              ->target_poses()
+                              ->Get(0)
+                              ->id());
+      const Eigen::Matrix<double, 4, 4> H_field_target = PoseToTransform(
+          constants_fetcher_.constants().target_map()->target_poses()->Get(0));
+      // For reference, the camera should pointed straight forwards on the
+      // robot, offset by 1 meter.
+      aos::TimerHandler *timer = camera_test_event_loop_->AddTimer(
+          [this, H_robot_camera, H_field_target]() {
+            if (!send_targets_) {
+              return;
+            }
+            const frc971::control_loops::Pose robot_pose(
+                {drivetrain_plant_.GetPosition().x(),
+                 drivetrain_plant_.GetPosition().y(), 0.0},
+                drivetrain_plant_.state()(2, 0));
+
+            const Eigen::Matrix<double, 4, 4> H_field_camera =
+                robot_pose.AsTransformationMatrix() * H_robot_camera;
+            const Eigen::Matrix<double, 4, 4> H_camera_target =
+                H_field_camera.inverse() * H_field_target;
+
+            const Eigen::Quaterniond quat(H_camera_target.block<3, 3>(0, 0));
+            const Eigen::Vector3d translation(
+                H_camera_target.block<3, 1>(0, 3));
+
+            auto builder = target_sender_.MakeBuilder();
+            frc971::vision::Quaternion::Builder quat_builder(*builder.fbb());
+            quat_builder.add_w(quat.w());
+            quat_builder.add_x(quat.x());
+            quat_builder.add_y(quat.y());
+            quat_builder.add_z(quat.z());
+            auto quat_offset = quat_builder.Finish();
+            frc971::vision::Position::Builder position_builder(*builder.fbb());
+            position_builder.add_x(translation.x());
+            position_builder.add_y(translation.y());
+            position_builder.add_z(translation.z());
+            auto position_offset = position_builder.Finish();
+
+            frc971::vision::TargetPoseFbs::Builder target_builder(
+                *builder.fbb());
+            target_builder.add_id(send_target_id_);
+            target_builder.add_position(position_offset);
+            target_builder.add_orientation(quat_offset);
+            auto target_offset = target_builder.Finish();
+
+            auto targets_offset = builder.fbb()->CreateVector({target_offset});
+            frc971::vision::TargetMap::Builder map_builder(*builder.fbb());
+            map_builder.add_target_poses(targets_offset);
+            map_builder.add_monotonic_timestamp_ns(
+                std::chrono::duration_cast<std::chrono::nanoseconds>(
+                    camera_test_event_loop_->monotonic_now().time_since_epoch())
+                    .count());
+
+            builder.CheckOk(builder.Send(map_builder.Finish()));
+          });
+      camera_test_event_loop_->OnRun([timer, this]() {
+        timer->Setup(camera_test_event_loop_->monotonic_now(),
+                     std::chrono::milliseconds(50));
+      });
+    }
+
+    localizer_control_send_timer_ =
+        roborio_test_event_loop_->AddTimer([this]() {
+          auto builder = control_sender_.MakeBuilder();
+          auto control_builder = builder.MakeBuilder<
+              frc971::control_loops::drivetrain::LocalizerControl>();
+          control_builder.add_x(localizer_control_x_);
+          control_builder.add_y(localizer_control_y_);
+          control_builder.add_theta(localizer_control_theta_);
+          control_builder.add_theta_uncertainty(0.01);
+          control_builder.add_keep_current_theta(false);
+          builder.CheckOk(builder.Send(control_builder.Finish()));
+        });
+
+    // Get things zeroed.
+    event_loop_factory_.RunFor(std::chrono::seconds(10));
+    CHECK(status_fetcher_.Fetch());
+    CHECK(status_fetcher_->imu()->zeroed());
+
+    if (!FLAGS_output_folder.empty()) {
+      logger_event_loop_ =
+          event_loop_factory_.MakeEventLoop("logger", imu_node_);
+      logger_ = std::make_unique<aos::logger::Logger>(logger_event_loop_.get());
+      logger_->StartLoggingOnRun(FLAGS_output_folder);
+    }
+  }
+
+  void SendLocalizerControl(double x, double y, double theta) {
+    localizer_control_x_ = x;
+    localizer_control_y_ = y;
+    localizer_control_theta_ = theta;
+    localizer_control_send_timer_->Setup(
+        roborio_test_event_loop_->monotonic_now());
+  }
+  ::testing::AssertionResult IsNear(double expected, double actual,
+                                    double epsilon) {
+    if (std::abs(expected - actual) < epsilon) {
+      return ::testing::AssertionSuccess();
+    } else {
+      return ::testing::AssertionFailure()
+             << "Expected " << expected << " but got " << actual
+             << " with a max difference of " << epsilon
+             << " and an actual difference of " << std::abs(expected - actual);
+    }
+  }
+  ::testing::AssertionResult VerifyEstimatorAccurate(double eps) {
+    const Eigen::Matrix<double, 5, 1> true_state = drivetrain_plant_.state();
+    ::testing::AssertionResult result(true);
+    status_fetcher_.Fetch();
+    if (!(result = IsNear(status_fetcher_->state()->x(), true_state(0), eps))) {
+      return result;
+    }
+    if (!(result = IsNear(status_fetcher_->state()->y(), true_state(1), eps))) {
+      return result;
+    }
+    if (!(result =
+              IsNear(status_fetcher_->state()->theta(), true_state(2), eps))) {
+      return result;
+    }
+    return result;
+  }
+
+  aos::FlatbufferDetachedBuffer<aos::Configuration> configuration_;
+  aos::SimulatedEventLoopFactory event_loop_factory_;
+  const aos::Node *const roborio_node_;
+  const aos::Node *const imu_node_;
+  const aos::Node *const camera_node_;
+  const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
+  std::unique_ptr<aos::EventLoop> localizer_event_loop_;
+  Localizer localizer_;
+
+  std::unique_ptr<aos::EventLoop> drivetrain_plant_event_loop_;
+  std::unique_ptr<aos::EventLoop> drivetrain_plant_imu_event_loop_;
+  frc971::control_loops::drivetrain::testing::DrivetrainSimulation
+      drivetrain_plant_;
+
+  std::unique_ptr<aos::EventLoop> roborio_test_event_loop_;
+  std::unique_ptr<aos::EventLoop> imu_test_event_loop_;
+  std::unique_ptr<aos::EventLoop> camera_test_event_loop_;
+  std::unique_ptr<aos::EventLoop> logger_test_event_loop_;
+
+  frc971::constants::ConstantsFetcher<Constants> constants_fetcher_;
+
+  aos::Sender<Output> output_sender_;
+  aos::Sender<frc971::vision::TargetMap> target_sender_;
+  aos::Sender<frc971::control_loops::drivetrain::LocalizerControl>
+      control_sender_;
+  aos::Fetcher<frc971::controls::LocalizerOutput> output_fetcher_;
+  aos::Fetcher<Status> status_fetcher_;
+
+  Eigen::Vector2d output_voltages_ = Eigen::Vector2d::Zero();
+
+  aos::TimerHandler *localizer_control_send_timer_;
+
+  bool send_targets_ = false;
+
+  double localizer_control_x_ = 0.0;
+  double localizer_control_y_ = 0.0;
+  double localizer_control_theta_ = 0.0;
+
+  std::unique_ptr<aos::EventLoop> logger_event_loop_;
+  std::unique_ptr<aos::logger::Logger> logger_;
+
+  uint64_t send_target_id_ = kTargetId;
+
+  gflags::FlagSaver flag_saver_;
+};
+
+// Test a simple scenario with no errors where the robot should just drive
+// straight forwards.
+TEST_F(LocalizerTest, Nominal) {
+  output_voltages_ << 1.0, 1.0;
+  event_loop_factory_.RunFor(std::chrono::seconds(2));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 1e-2);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              1e-6);
+  // Confirm that we did indeed drive forwards (and straight), as expected.
+  EXPECT_LT(0.1, output_fetcher_->x());
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->theta(), 1e-10);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->left_voltage_error(), 1e-1);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->right_voltage_error(), 1e-1);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Confirm that when the robot drives backwards that we localize correctly.
+TEST_F(LocalizerTest, NominalReverse) {
+  output_voltages_ << -1.0, -1.0;
+  event_loop_factory_.RunFor(std::chrono::seconds(2));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 1e-2);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              1e-6);
+  // Confirm that we did indeed drive backwards (and straight), as expected.
+  EXPECT_GT(-0.1, output_fetcher_->x());
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->theta(), 1e-10);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->left_voltage_error(), 1e-1);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->right_voltage_error(), 1e-1);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Confirm that when the robot turns counter-clockwise that we localize
+// correctly.
+TEST_F(LocalizerTest, NominalSpinInPlace) {
+  output_voltages_ << -1.0, 1.0;
+  event_loop_factory_.RunFor(std::chrono::seconds(2));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              1e-2);
+  // Confirm that we did indeed turn counter-clockwise.
+  EXPECT_NEAR(0.0, output_fetcher_->x(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_LT(0.1, output_fetcher_->theta());
+  EXPECT_NEAR(0.0, status_fetcher_->state()->left_voltage_error(), 1e-1);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->right_voltage_error(), 1e-1);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Confirm that when the robot drives in a curve that we localize
+// successfully.
+TEST_F(LocalizerTest, NominalCurve) {
+  output_voltages_ << 2.0, 3.0;
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 1e-2);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 1e-2);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              1e-2);
+  // Confirm that we did indeed drive in a rough, counter-clockwise, curve.
+  EXPECT_LT(0.1, output_fetcher_->x());
+  EXPECT_LT(0.1, output_fetcher_->y());
+  EXPECT_LT(0.1, output_fetcher_->theta());
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Tests that, in the presence of a non-zero voltage error, that we correct
+// for it.
+TEST_F(LocalizerTest, VoltageErrorDisabled) {
+  output_voltages_ << 0.0, 0.0;
+  drivetrain_plant_.set_left_voltage_offset(2.0);
+  drivetrain_plant_.set_right_voltage_offset(2.0);
+
+  event_loop_factory_.RunFor(std::chrono::seconds(2));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // We should've just ended up driving straight forwards.
+  EXPECT_LT(0.1, output_fetcher_->x());
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->theta(), 1e-10);
+  EXPECT_NEAR(2.0, status_fetcher_->state()->left_voltage_error(), 1.0);
+  EXPECT_NEAR(2.0, status_fetcher_->state()->right_voltage_error(), 1.0);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(0.05));
+}
+
+// Tests that image corrections in the nominal case (no errors) causes no
+// issues.
+TEST_F(LocalizerTest, NominalImageCorrections) {
+  output_voltages_ << 3.0, 2.0;
+  send_targets_ = true;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(status_fetcher_->statistics()->Get(0)->total_candidates(),
+            status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+// Tests that image corrections when there is an error at the start results
+// in us actually getting corrected over time.
+TEST_F(LocalizerTest, ImageCorrections) {
+  output_voltages_ << 0.0, 0.0;
+  drivetrain_plant_.mutable_state()->x() = 2.0;
+  drivetrain_plant_.mutable_state()->y() = 2.0;
+  SendLocalizerControl(5.0, 3.0, 0.0);
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(output_fetcher_.Fetch());
+  ASSERT_NEAR(5.0, output_fetcher_->x(), 1e-5);
+  ASSERT_NEAR(3.0, output_fetcher_->y(), 1e-5);
+  ASSERT_NEAR(0.0, output_fetcher_->theta(), 1e-5);
+
+  send_targets_ = true;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  EXPECT_TRUE(VerifyEstimatorAccurate(0.1));
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(status_fetcher_->statistics()->Get(0)->total_candidates(),
+            status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+// Tests that we correctly reject an invalid target.
+TEST_F(LocalizerTest, InvalidTargetId) {
+  output_voltages_ << 0.0, 0.0;
+  send_targets_ = true;
+  send_target_id_ = 100;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(0, status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+  ASSERT_EQ(status_fetcher_->statistics()
+                ->Get(0)
+                ->rejection_reasons()
+                ->Get(static_cast<size_t>(RejectionReason::NO_SUCH_TARGET))
+                ->count(),
+            status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+}  // namespace y2023::localizer::testing
diff --git a/y2023/localizer/status.fbs b/y2023/localizer/status.fbs
new file mode 100644
index 0000000..4d0a9c1
--- /dev/null
+++ b/y2023/localizer/status.fbs
@@ -0,0 +1,59 @@
+include "frc971/control_loops/drivetrain/drivetrain_status.fbs";
+include "frc971/imu_reader/imu_failures.fbs";
+
+namespace y2023.localizer;
+
+enum RejectionReason : uint8 {
+  // For some reason, the image timestamp indicates that the image was taken
+  // in the future.
+  IMAGE_FROM_FUTURE = 0,
+  // The image was too old for the buffer of old state estimates that we
+  // maintain.
+  IMAGE_TOO_OLD = 1,
+  // Message bridge is not yet connected, and so we can't get accurate
+  // time offsets betwee nnodes.
+  MESSAGE_BRIDGE_DISCONNECTED = 2,
+  // The target ID does not exist.
+  NO_SUCH_TARGET = 3,
+}
+
+table RejectionCount {
+  error:RejectionReason (id: 0);
+  count:uint (id: 1);
+}
+
+table CumulativeStatistics {
+  total_accepted:int (id: 0);
+  total_candidates:int (id: 1);
+  rejection_reasons:[RejectionCount] (id: 2);
+}
+
+table ImuStatus {
+  // Whether the IMU is zeroed or not.
+  zeroed:bool (id: 0);
+  // Whether the IMU zeroing is faulted or not.
+  faulted_zero:bool (id: 1);
+  zeroing:frc971.control_loops.drivetrain.ImuZeroerState (id: 2);
+  // Offset between the pico clock and the pi clock, such that
+  // pico_timestamp + pico_offset_ns = pi_timestamp
+  pico_offset_ns:int64 (id: 3);
+  // Error in the offset, if we assume that the pi/pico clocks are identical and
+  // that there is a perfectly consistent latency between the two. Will be zero
+  // for the very first cycle, and then referenced off of the initial offset
+  // thereafter. If greater than zero, implies that the pico is "behind",
+  // whether due to unusually large latency or due to clock drift.
+  pico_offset_error_ns:int64 (id: 4);
+  left_encoder:double (id: 5);
+  right_encoder:double (id: 6);
+  imu_failures:frc971.controls.ImuFailures (id: 7);
+}
+
+table Status {
+  state: frc971.control_loops.drivetrain.LocalizerState (id: 0);
+  down_estimator:frc971.control_loops.drivetrain.DownEstimatorState (id: 1);
+  imu:ImuStatus (id: 2);
+  // Statistics are per-camera, by camera index.
+  statistics:[CumulativeStatistics] (id: 3);
+}
+
+root_type Status;
diff --git a/y2023/localizer/visualization.fbs b/y2023/localizer/visualization.fbs
new file mode 100644
index 0000000..2785872
--- /dev/null
+++ b/y2023/localizer/visualization.fbs
@@ -0,0 +1,25 @@
+include "y2023/localizer/status.fbs";
+
+namespace y2023.localizer;
+
+table TargetEstimateDebug {
+  camera:uint8 (id: 0);
+  camera_x:double (id: 1);
+  camera_y:double (id: 2);
+  camera_theta:double (id: 3);
+  implied_robot_x:double (id: 4);
+  implied_robot_y:double (id: 5);
+  implied_robot_theta:double (id: 6);
+  accepted:bool (id: 7);
+  rejection_reason:RejectionReason  (id: 8);
+  // Image age (more human-readable than trying to interpret raw nanosecond
+  // values).
+  image_age_sec:double (id: 9);
+}
+
+table Visualization {
+  targets:[TargetEstimateDebug] (id: 0);
+  statistics:CumulativeStatistics (id: 1);
+}
+
+root_type Visualization;