Create y2024 localizer

This is primarily a copy of the y2023 localizer, with updates to
better characterize the noise of the april tag readings (by separating
out heading/distance/skew measurements).

It also listens to the drivetrain Position message for encoder readings
rather than relying on the IMU board to send them.

This adds a few things:
* The main localizer libraries and processes themselves.
* Updates to the AOS configs to pull in the appropriate localization
  channels.
* Creates the typescript plots for localization debugging.
* Creates some dummy camera extrinsics for use in the tests.

Change-Id: I58d5c1da0d3dc2dad98bd2a9fc10965db51c4f84
Signed-off-by: James Kuszmaul <jabukuszmaul+collab@gmail.com>
diff --git a/frc971/analysis/BUILD b/frc971/analysis/BUILD
index b179ad9..c896ce6 100644
--- a/frc971/analysis/BUILD
+++ b/frc971/analysis/BUILD
@@ -31,6 +31,8 @@
         "//y2023/control_loops/superstructure:superstructure_plotter",
         "//y2023/localizer:corrections_plotter",
         "//y2023/localizer:localizer_plotter",
+        "//y2024/localizer:corrections_plotter",
+        "//y2024/localizer:localizer_plotter",
     ],
 )
 
diff --git a/frc971/analysis/plot_index.ts b/frc971/analysis/plot_index.ts
index 1ee85d1..455c7e9 100644
--- a/frc971/analysis/plot_index.ts
+++ b/frc971/analysis/plot_index.ts
@@ -56,10 +56,14 @@
     '../../y2022/localizer/localizer_plotter'
 import {plotLocalizer as plot2023Localizer} from
     '../../y2023/localizer/localizer_plotter'
+import {plotLocalizer as plot2024Localizer} from
+    '../../y2024/localizer/localizer_plotter'
 import {plotVision as plot2022Vision} from
     '../../y2022/vision/vision_plotter'
 import {plotVision as plot2023Corrections} from
     '../../y2023/localizer/corrections_plotter'
+import {plotVision as plot2024Corrections} from
+    '../../y2024/localizer/corrections_plotter'
 import {plotDemo} from '../../aos/network/www/demo_plot';
 
 const rootDiv = document.createElement('div');
@@ -118,6 +122,8 @@
   ['Spline Debug', new PlotState(plotDiv, plotSpline)],
   ['Down Estimator', new PlotState(plotDiv, plotDownEstimator)],
   ['Robot State', new PlotState(plotDiv, plotRobotState)],
+  ['2024 Vision', new PlotState(plotDiv, plot2024Corrections)],
+  ['2024 Localizer', new PlotState(plotDiv, plot2024Localizer)],
   ['2023 Vision', new PlotState(plotDiv, plot2023Corrections)],
   ['2023 Localizer', new PlotState(plotDiv, plot2023Localizer)],
   ['2023 Superstructure', new PlotState(plotDiv, plot2023Superstructure)],
diff --git a/frc971/control_loops/drivetrain/hybrid_ekf.h b/frc971/control_loops/drivetrain/hybrid_ekf.h
index f784f2e..2f89a0c 100644
--- a/frc971/control_loops/drivetrain/hybrid_ekf.h
+++ b/frc971/control_loops/drivetrain/hybrid_ekf.h
@@ -757,8 +757,9 @@
     aos::monotonic_clock::time_point t) {
   CHECK(!observations_.empty());
   if (!observations_.full() && t < observations_.begin()->t) {
-    LOG(ERROR) << "Dropped an observation that was received before we "
-                  "initialized.\n";
+    AOS_LOG(ERROR,
+            "Dropped an observation that was received before we "
+            "initialized.\n");
     return;
   }
   auto cur_it = observations_.PushFromBottom(
diff --git a/y2024/BUILD b/y2024/BUILD
index f9102f9..f710e7b 100644
--- a/y2024/BUILD
+++ b/y2024/BUILD
@@ -58,6 +58,7 @@
         "//frc971/vision:intrinsics_calibration",
         "//y2024/vision:viewer",
         "//y2024/constants:constants_sender",
+        "//y2024/localizer:localizer_main",
         "//y2024/vision:foxglove_image_converter",
     ],
     data = [
@@ -121,6 +122,8 @@
         "//y2024/constants:constants_fbs",
         "//frc971/control_loops/drivetrain/localization:localizer_output_fbs",
         "//frc971/can_logger:can_logging_fbs",
+        "//y2024/localizer:status_fbs",
+        "//y2024/localizer:visualization_fbs",
         "//aos/network:timestamp_fbs",
         "//aos/network:remote_message_fbs",
     ],
@@ -149,6 +152,7 @@
         "//y2024/control_loops/superstructure:superstructure_can_position_fbs",
         "//y2024/control_loops/superstructure:superstructure_output_fbs",
         "//y2024/control_loops/superstructure:superstructure_position_fbs",
+        "//frc971/control_loops/drivetrain/localization:localizer_output_fbs",
         "//y2024/control_loops/superstructure:superstructure_status_fbs",
         "//frc971/can_logger:can_logging_fbs",
     ],
@@ -175,6 +179,7 @@
             "//frc971/vision:calibration_fbs",
             "//frc971/vision:target_map_fbs",
             "//frc971/vision:vision_fbs",
+            "//y2024/localizer:visualization_fbs",
             "@com_github_foxglove_schemas//:schemas",
         ],
         target_compatible_with = ["@platforms//os:linux"],
diff --git a/y2024/constants/BUILD b/y2024/constants/BUILD
index 1d18a63..b07851e 100644
--- a/y2024/constants/BUILD
+++ b/y2024/constants/BUILD
@@ -117,6 +117,6 @@
 
 constants_json(
     name = "test_constants_json",
-    src = ":constants_unvalidated.json",
+    src = ":test_constants_unvalidated.json",
     out = "test_constants.json",
 )
diff --git a/y2024/constants/test_data/calibration_cam-1.json b/y2024/constants/test_data/calibration_cam-1.json
new file mode 100644
index 0000000..2d81347
--- /dev/null
+++ b/y2024/constants/test_data/calibration_cam-1.json
@@ -0,0 +1,46 @@
+{
+  "node_name": "orin1",
+  "camera_number": 0,
+  "team_number": 7971,
+  "intrinsics": [
+    893.759521,
+    0,
+    645.470764,
+    0,
+    893.222351,
+    388.150269,
+    0,
+    0,
+    1
+  ],
+  "fixed_extrinsics": {
+    "data": [
+      0.0,
+      0.0,
+      1.0,
+      1.0,
+
+      -1.0,
+      0.0,
+      0.0,
+      0.0,
+
+      0.0,
+      -1.0,
+      0.0,
+      0.0,
+
+      0.0,
+      0.0,
+      0.0,
+      1.0
+    ]
+  },
+  "dist_coeffs": [
+    -0.44902,
+    0.248409,
+    -0.000537,
+    -0.000112,
+    -0.076989
+  ]
+}
diff --git a/y2024/constants/test_data/calibration_cam-2.json b/y2024/constants/test_data/calibration_cam-2.json
new file mode 100644
index 0000000..1128799
--- /dev/null
+++ b/y2024/constants/test_data/calibration_cam-2.json
@@ -0,0 +1,46 @@
+{
+  "node_name": "orin1",
+  "camera_number": 1,
+  "team_number": 7971,
+  "intrinsics": [
+    893.759521,
+    0,
+    645.470764,
+    0,
+    893.222351,
+    388.150269,
+    0,
+    0,
+    1
+  ],
+  "fixed_extrinsics": {
+    "data": [
+      1.0,
+      0.0,
+      0.0,
+      1.0,
+
+      0.0,
+      0.0,
+      -1.0,
+      0.0,
+
+      0.0,
+      1.0,
+      0.0,
+      0.0,
+
+      0.0,
+      0.0,
+      0.0,
+      1.0
+    ]
+  },
+  "dist_coeffs": [
+    -0.44902,
+    0.248409,
+    -0.000537,
+    -0.000112,
+    -0.076989
+  ]
+}
diff --git a/y2024/constants/test_data/calibration_cam-3.json b/y2024/constants/test_data/calibration_cam-3.json
new file mode 100644
index 0000000..01d1ac0
--- /dev/null
+++ b/y2024/constants/test_data/calibration_cam-3.json
@@ -0,0 +1,46 @@
+{
+  "node_name": "orin2",
+  "camera_number": 0,
+  "team_number": 7971,
+  "intrinsics": [
+    893.759521,
+    0,
+    645.470764,
+    0,
+    893.222351,
+    388.150269,
+    0,
+    0,
+    1
+  ],
+  "fixed_extrinsics": {
+    "data": [
+      0.0,
+      1.0,
+      0.0,
+      1.0,
+
+      0.0,
+      0.0,
+      -1.0,
+      0.0,
+
+      -1.0,
+      0.0,
+      0.0,
+      0.0,
+
+      0.0,
+      0.0,
+      0.0,
+      1.0
+    ]
+  },
+  "dist_coeffs": [
+    -0.44902,
+    0.248409,
+    -0.000537,
+    -0.000112,
+    -0.076989
+  ]
+}
diff --git a/y2024/constants/test_data/calibration_cam-4.json b/y2024/constants/test_data/calibration_cam-4.json
new file mode 100644
index 0000000..3360781
--- /dev/null
+++ b/y2024/constants/test_data/calibration_cam-4.json
@@ -0,0 +1,46 @@
+{
+  "node_name": "orin2",
+  "camera_number": 1,
+  "team_number": 7971,
+  "intrinsics": [
+    893.759521,
+    0,
+    645.470764,
+    0,
+    893.222351,
+    388.150269,
+    0,
+    0,
+    1
+  ],
+  "fixed_extrinsics": {
+    "data": [
+      -1.0,
+      0.0,
+      0.0,
+      1.0,
+
+      0.0,
+      0.0,
+      -1.0,
+      0.0,
+
+      0.0,
+      -1.0,
+      0.0,
+      0.0,
+
+      0.0,
+      0.0,
+      0.0,
+      1.0
+    ]
+  },
+  "dist_coeffs": [
+    -0.44902,
+    0.248409,
+    -0.000537,
+    -0.000112,
+    -0.076989
+  ]
+}
diff --git a/y2024/constants/test_data/test_team.json b/y2024/constants/test_data/test_team.json
index ec59033..b717224 100644
--- a/y2024/constants/test_data/test_team.json
+++ b/y2024/constants/test_data/test_team.json
@@ -8,7 +8,16 @@
 {
   "cameras": [
     {
-      "calibration": {% include 'y2024/constants/calib_files/calibration_orin-971-1_cam-24-00.json' %}
+      "calibration": {% include 'y2024/constants/test_data/calibration_cam-1.json' %}
+    },
+    {
+      "calibration": {% include 'y2024/constants/test_data/calibration_cam-2.json' %}
+    },
+    {
+      "calibration": {% include 'y2024/constants/test_data/calibration_cam-3.json' %}
+    },
+    {
+      "calibration": {% include 'y2024/constants/test_data/calibration_cam-4.json' %}
     }
   ],
   "robot": {
diff --git a/y2024/localizer/BUILD b/y2024/localizer/BUILD
new file mode 100644
index 0000000..38c8c9d
--- /dev/null
+++ b/y2024/localizer/BUILD
@@ -0,0 +1,149 @@
+load("//aos/flatbuffers:generate.bzl", "static_flatbuffer")
+load("//tools/build_rules:js.bzl", "ts_project")
+load("@com_github_google_flatbuffers//:typescript.bzl", "flatbuffer_ts_library")
+
+ts_project(
+    name = "localizer_plotter",
+    srcs = ["localizer_plotter.ts"],
+    target_compatible_with = ["@platforms//os:linux"],
+    visibility = ["//visibility:public"],
+    deps = [
+        "//aos/network/www:aos_plotter",
+        "//aos/network/www:colors",
+        "//aos/network/www:proxy",
+        "//frc971/wpilib:imu_plot_utils",
+    ],
+)
+
+static_flatbuffer(
+    name = "status_fbs",
+    srcs = [
+        "status.fbs",
+    ],
+    target_compatible_with = ["@platforms//os:linux"],
+    visibility = ["//visibility:public"],
+    deps = [
+        "//frc971/control_loops/drivetrain:drivetrain_status_fbs",
+        "//frc971/imu_reader:imu_failures_fbs",
+    ],
+)
+
+flatbuffer_ts_library(
+    name = "status_ts_fbs",
+    srcs = ["status.fbs"],
+    visibility = ["//visibility:public"],
+    deps = [
+        "//frc971/control_loops/drivetrain:drivetrain_status_ts_fbs",
+        "//frc971/imu_reader:imu_failures_ts_fbs",
+    ],
+)
+
+static_flatbuffer(
+    name = "visualization_fbs",
+    srcs = [
+        "visualization.fbs",
+    ],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":status_fbs",
+    ],
+)
+
+flatbuffer_ts_library(
+    name = "visualization_ts_fbs",
+    srcs = ["visualization.fbs"],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":status_ts_fbs",
+    ],
+)
+
+cc_library(
+    name = "localizer",
+    srcs = ["localizer.cc"],
+    hdrs = ["localizer.h"],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":status_fbs",
+        ":visualization_fbs",
+        "//aos/containers:sized_array",
+        "//aos/events:event_loop",
+        "//aos/network:message_bridge_client_fbs",
+        "//frc971/constants:constants_sender_lib",
+        "//frc971/control_loops:pose",
+        "//frc971/control_loops/drivetrain:hybrid_ekf",
+        "//frc971/control_loops/drivetrain:improved_down_estimator",
+        "//frc971/control_loops/drivetrain:localizer_fbs",
+        "//frc971/control_loops/drivetrain/localization:localizer_output_fbs",
+        "//frc971/control_loops/drivetrain/localization:utils",
+        "//frc971/imu_reader:imu_watcher",
+        "//frc971/vision:target_map_fbs",
+        "//frc971/vision:target_map_utils",
+        "//y2024:constants",
+        "//y2024/constants:constants_fbs",
+    ],
+)
+
+cc_test(
+    name = "localizer_test",
+    srcs = ["localizer_test.cc"],
+    data = ["//y2024:aos_config"],
+    deps = [
+        ":localizer",
+        ":status_fbs",
+        "//aos/events:simulated_event_loop",
+        "//aos/events/logging:log_writer",
+        "//aos/testing:googletest",
+        "//frc971/control_loops/drivetrain:drivetrain_test_lib",
+        "//frc971/control_loops/drivetrain:localizer_fbs",
+        "//frc971/control_loops/drivetrain/localization:utils",
+        "//y2024/constants:simulated_constants_sender",
+        "//y2024/control_loops/drivetrain:drivetrain_base",
+    ],
+)
+
+cc_binary(
+    name = "localizer_main",
+    srcs = ["localizer_main.cc"],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":localizer",
+        "//aos:init",
+        "//aos/events:shm_event_loop",
+        "//frc971/constants:constants_sender_lib",
+        "//y2024/control_loops/drivetrain:drivetrain_base",
+    ],
+)
+
+ts_project(
+    name = "corrections_plotter",
+    srcs = ["corrections_plotter.ts"],
+    target_compatible_with = ["@platforms//os:linux"],
+    visibility = ["//visibility:public"],
+    deps = [
+        ":visualization_ts_fbs",
+        "//aos/network/www:aos_plotter",
+        "//aos/network/www:colors",
+        "//aos/network/www:proxy",
+    ],
+)
+
+cc_binary(
+    name = "localizer_replay",
+    srcs = ["localizer_replay.cc"],
+    data = [
+        "//y2024:aos_config",
+    ],
+    target_compatible_with = ["@platforms//os:linux"],
+    deps = [
+        ":localizer",
+        "//aos:configuration",
+        "//aos:init",
+        "//aos:json_to_flatbuffer",
+        "//aos/events:simulated_event_loop",
+        "//aos/events/logging:log_reader",
+        "//aos/events/logging:log_writer",
+        "//aos/util:simulation_logger",
+        "//y2024/control_loops/drivetrain:drivetrain_base",
+    ],
+)
diff --git a/y2024/localizer/corrections_plotter.ts b/y2024/localizer/corrections_plotter.ts
new file mode 100644
index 0000000..dbfbda6
--- /dev/null
+++ b/y2024/localizer/corrections_plotter.ts
@@ -0,0 +1,177 @@
+import {ByteBuffer} from 'flatbuffers';
+import {AosPlotter} from '../../aos/network/www/aos_plotter';
+import {MessageHandler, TimestampedMessage} from '../../aos/network/www/aos_plotter';
+import {BLUE, BROWN, CYAN, GREEN, PINK, RED, WHITE} from '../../aos/network/www/colors';
+import {Connection} from '../../aos/network/www/proxy';
+import {Table} from '../../aos/network/www/reflection';
+import {Schema} from 'flatbuffers_reflection/reflection_generated';
+import {Visualization, TargetEstimateDebug} from './visualization_generated';
+
+
+const TIME = AosPlotter.TIME;
+// magenta, yellow, cyan, black
+const PI_COLORS = [[255, 0, 255], [255, 255, 0], [0, 255, 255], [0, 0, 0]];
+
+class VisionMessageHandler extends MessageHandler {
+  constructor(private readonly schema: Schema) {
+    super(schema);
+  }
+
+  private readScalar(table: Table, fieldName: string): number|BigInt|null {
+    return this.parser.readScalar(table, fieldName);
+  }
+
+  addMessage(data: Uint8Array, time: number): void {
+    const message = Visualization.getRootAsVisualization(new ByteBuffer(data));
+    for (let ii = 0; ii < message.targetsLength(); ++ii) {
+      const target = message.targets(ii);
+      const time = Number(target.imageMonotonicTimestampNs()) * 1e-9;
+      if (time == 0) {
+        console.log('Dropping message without populated time?');
+        continue;
+      }
+      const table = Table.getNamedTable(
+          target.bb, this.schema, 'y2024.localizer.TargetEstimateDebug', target.bb_pos);
+      this.messages.push(new TimestampedMessage(table, time));
+    }
+  }
+}
+
+export function plotVision(conn: Connection, element: Element): void {
+  const aosPlotter = new AosPlotter(conn);
+
+  const targets = [];
+  const targetLabels = [];
+  for (const orin of ['orin1', 'orin2']) {
+    for (const camera of ['camera0', 'camera1']) {
+      targetLabels.push(orin + ' ' + camera);
+      targets.push(aosPlotter.addRawMessageSource(
+          '/' + orin + '/' + camera, 'y2024.localizer.Visualization',
+          new VisionMessageHandler(
+              conn.getSchema('y2024.localizer.Visualization'))));
+    }
+  }
+  const localizerStatus = aosPlotter.addMessageSource(
+      '/localizer', 'y2024.localizer.Status');
+  const localizerOutput = aosPlotter.addMessageSource(
+      '/localizer', 'frc971.controls.LocalizerOutput');
+
+  const statsPlot = aosPlotter.addPlot(element);
+  statsPlot.plot.getAxisLabels().setTitle('Statistics');
+  statsPlot.plot.getAxisLabels().setXLabel(TIME);
+  statsPlot.plot.getAxisLabels().setYLabel('[bool, enum]');
+
+  statsPlot
+      .addMessageLine(localizerStatus, ['statistics[]', 'total_accepted'])
+      .setDrawLine(false)
+      .setColor(BLUE);
+  statsPlot
+      .addMessageLine(localizerStatus, ['statistics[]', 'total_candidates'])
+      .setDrawLine(false)
+      .setColor(RED);
+
+  const rejectionPlot = aosPlotter.addPlot(element);
+  rejectionPlot.plot.getAxisLabels().setTitle('Rejection Reasons');
+  rejectionPlot.plot.getAxisLabels().setXLabel(TIME);
+  rejectionPlot.plot.getAxisLabels().setYLabel('[bool, enum]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    rejectionPlot.addMessageLine(targets[ii], ['rejection_reason'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel(targetLabels[ii]);
+  }
+
+  const xPlot = aosPlotter.addPlot(element);
+  xPlot.plot.getAxisLabels().setTitle('X Position');
+  xPlot.plot.getAxisLabels().setXLabel(TIME);
+  xPlot.plot.getAxisLabels().setYLabel('[m]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    xPlot.addMessageLine(targets[ii], ['implied_robot_x'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel('pi' + (ii + 1));
+  }
+  xPlot.addMessageLine(localizerOutput, ['x'])
+      .setDrawLine(false)
+      .setColor(BLUE);
+
+  const correctionXPlot = aosPlotter.addPlot(element);
+  correctionXPlot.plot.getAxisLabels().setTitle('X Corrections');
+  correctionXPlot.plot.getAxisLabels().setXLabel(TIME);
+  correctionXPlot.plot.getAxisLabels().setYLabel('[m]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    correctionXPlot.addMessageLine(targets[ii], ['correction_x'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel('pi' + (ii + 1));
+  }
+
+  const yPlot = aosPlotter.addPlot(element);
+  yPlot.plot.getAxisLabels().setTitle('Y Position');
+  yPlot.plot.getAxisLabels().setXLabel(TIME);
+  yPlot.plot.getAxisLabels().setYLabel('[m]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    yPlot.addMessageLine(targets[ii], ['implied_robot_y'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel('pi' + (ii + 1));
+  }
+  yPlot.addMessageLine(localizerOutput, ['y'])
+      .setDrawLine(false)
+      .setColor(BLUE);
+
+  const correctionYPlot = aosPlotter.addPlot(element);
+  correctionYPlot.plot.getAxisLabels().setTitle('Y Corrections');
+  correctionYPlot.plot.getAxisLabels().setXLabel(TIME);
+  correctionYPlot.plot.getAxisLabels().setYLabel('[m]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    correctionYPlot.addMessageLine(targets[ii], ['correction_y'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel('pi' + (ii + 1));
+  }
+
+  const thetaPlot = aosPlotter.addPlot(element);
+  thetaPlot.plot.getAxisLabels().setTitle('Yaw');
+  thetaPlot.plot.getAxisLabels().setXLabel(TIME);
+  thetaPlot.plot.getAxisLabels().setYLabel('[m]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    thetaPlot.addMessageLine(targets[ii], ['implied_robot_theta'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel('pi' + (ii + 1));
+  }
+  thetaPlot.addMessageLine(localizerOutput, ['theta'])
+      .setDrawLine(false)
+      .setColor(BLUE);
+
+  const aprilTagPlot = aosPlotter.addPlot(element);
+  aprilTagPlot.plot.getAxisLabels().setTitle('April Tag IDs');
+  aprilTagPlot.plot.getAxisLabels().setXLabel(TIME);
+  aprilTagPlot.plot.getAxisLabels().setYLabel('[id]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    aprilTagPlot.addMessageLine(targets[ii], ['april_tag'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel('pi' + (ii + 1));
+  }
+
+  const imageAgePlot = aosPlotter.addPlot(element);
+  imageAgePlot.plot.getAxisLabels().setTitle('Image Age');
+  imageAgePlot.plot.getAxisLabels().setXLabel(TIME);
+  imageAgePlot.plot.getAxisLabels().setYLabel('[sec]');
+
+  for (let ii = 0; ii < targets.length; ++ii) {
+    imageAgePlot.addMessageLine(targets[ii], ['image_age_sec'])
+        .setDrawLine(false)
+        .setColor(PI_COLORS[ii])
+        .setLabel('pi' + (ii + 1));
+  }
+}
diff --git a/y2024/localizer/localizer.cc b/y2024/localizer/localizer.cc
new file mode 100644
index 0000000..30bb108
--- /dev/null
+++ b/y2024/localizer/localizer.cc
@@ -0,0 +1,605 @@
+#include "y2024/localizer/localizer.h"
+
+#include "gflags/gflags.h"
+
+#include "aos/containers/sized_array.h"
+#include "frc971/control_loops/drivetrain/localizer_generated.h"
+#include "frc971/control_loops/pose.h"
+#include "frc971/vision/target_map_utils.h"
+#include "y2024/constants.h"
+
+DEFINE_double(max_pose_error, 1e-6,
+              "Throw out target poses with a higher pose error than this");
+DEFINE_double(
+    max_pose_error_ratio, 0.4,
+    "Throw out target poses with a higher pose error ratio than this");
+DEFINE_double(distortion_noise_scalar, 1.0,
+              "Scale the target pose distortion factor by this when computing "
+              "the noise.");
+DEFINE_double(
+    max_implied_yaw_error, 3.0,
+    "Reject target poses that imply a robot yaw of more than this many degrees "
+    "off from our estimate.");
+DEFINE_double(
+    max_implied_teleop_yaw_error, 30.0,
+    "Reject target poses that imply a robot yaw of more than this many degrees "
+    "off from our estimate.");
+DEFINE_double(max_distance_to_target, 5.0,
+              "Reject target poses that have a 3d distance of more than this "
+              "many meters.");
+DEFINE_double(max_auto_image_robot_speed, 2.0,
+              "Reject target poses when the robot is travelling faster than "
+              "this speed in auto.");
+
+namespace y2024::localizer {
+namespace {
+constexpr std::array<std::string_view, Localizer::kNumCameras>
+    kDetectionChannels{"/orin1/camera0", "/orin1/camera1", "/orin2/camera0",
+                       "/orin2/camera1"};
+
+size_t CameraIndexForName(std::string_view name) {
+  for (size_t index = 0; index < kDetectionChannels.size(); ++index) {
+    if (name == kDetectionChannels.at(index)) {
+      return index;
+    }
+  }
+  LOG(FATAL) << "No camera channel named " << name;
+}
+
+std::map<uint64_t, Localizer::Transform> GetTargetLocations(
+    const Constants &constants) {
+  CHECK(constants.has_common());
+  CHECK(constants.common()->has_target_map());
+  CHECK(constants.common()->target_map()->has_target_poses());
+  std::map<uint64_t, Localizer::Transform> transforms;
+  for (const frc971::vision::TargetPoseFbs *target :
+       *constants.common()->target_map()->target_poses()) {
+    CHECK(target->has_id());
+    CHECK(target->has_position());
+    CHECK(target->has_orientation());
+    CHECK_EQ(0u, transforms.count(target->id()));
+    transforms[target->id()] = PoseToTransform(target);
+  }
+  return transforms;
+}
+}  // namespace
+
+std::array<Localizer::CameraState, Localizer::kNumCameras>
+Localizer::MakeCameras(const Constants &constants, aos::EventLoop *event_loop) {
+  CHECK(constants.has_cameras());
+  std::array<Localizer::CameraState, Localizer::kNumCameras> cameras;
+  for (const CameraConfiguration *camera : *constants.cameras()) {
+    CHECK(camera->has_calibration());
+    const frc971::vision::calibration::CameraCalibration *calibration =
+        camera->calibration();
+    CHECK(!calibration->has_turret_extrinsics())
+        << "The 2024 robot does not have cameras on a turret.";
+    CHECK(calibration->has_node_name());
+    const std::string channel_name =
+        absl::StrFormat("/%s/camera%d", calibration->node_name()->string_view(),
+                        calibration->camera_number());
+    const size_t index = CameraIndexForName(channel_name);
+    // We default-construct the extrinsics matrix to all-zeros; use that to
+    // sanity-check whether we have populated the matrix yet or not.
+    CHECK(cameras.at(index).extrinsics.norm() == 0)
+        << "Got multiple calibrations for "
+        << calibration->node_name()->string_view();
+    CHECK(calibration->has_fixed_extrinsics());
+    cameras.at(index).extrinsics =
+        frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
+            *calibration->fixed_extrinsics());
+    cameras.at(index).debug_sender =
+        event_loop->MakeSender<VisualizationStatic>(channel_name);
+  }
+  for (const CameraState &camera : cameras) {
+    CHECK(camera.extrinsics.norm() != 0) << "Missing a camera calibration.";
+  }
+  return cameras;
+}
+
+Localizer::Localizer(aos::EventLoop *event_loop)
+    : event_loop_(event_loop),
+      constants_fetcher_(event_loop),
+      dt_config_(
+          frc971::control_loops::drivetrain::DrivetrainConfig<double>::
+              FromFlatbuffer(*CHECK_NOTNULL(
+                  constants_fetcher_.constants().common()->drivetrain()))),
+      cameras_(MakeCameras(constants_fetcher_.constants(), event_loop)),
+      target_poses_(GetTargetLocations(constants_fetcher_.constants())),
+      down_estimator_(dt_config_),
+      ekf_(dt_config_),
+      observations_(&ekf_),
+      imu_watcher_(event_loop, dt_config_,
+                   y2024::constants::Values::DrivetrainEncoderToMeters(1),
+                   std::bind(&Localizer::HandleImu, this, std::placeholders::_1,
+                             std::placeholders::_2, std::placeholders::_3,
+                             std::placeholders::_4, std::placeholders::_5),
+                   frc971::controls::ImuWatcher::TimestampSource::kPi),
+      utils_(event_loop),
+      status_sender_(event_loop->MakeSender<Status>("/localizer")),
+      output_sender_(event_loop->MakeSender<frc971::controls::LocalizerOutput>(
+          "/localizer")),
+      server_statistics_fetcher_(
+          event_loop_->MakeFetcher<aos::message_bridge::ServerStatistics>(
+              "/aos")),
+      client_statistics_fetcher_(
+          event_loop_->MakeFetcher<aos::message_bridge::ClientStatistics>(
+              "/aos")) {
+  if (dt_config_.is_simulated) {
+    down_estimator_.assume_perfect_gravity();
+  }
+
+  for (size_t camera_index = 0; camera_index < kNumCameras; ++camera_index) {
+    const std::string_view channel_name = kDetectionChannels.at(camera_index);
+    const aos::Channel *const channel = CHECK_NOTNULL(
+        event_loop->GetChannel<frc971::vision::TargetMap>(channel_name));
+    event_loop->MakeWatcher(
+        channel_name, [this, channel,
+                       camera_index](const frc971::vision::TargetMap &targets) {
+          CHECK(targets.has_target_poses());
+          CHECK(targets.has_monotonic_timestamp_ns());
+          const std::optional<aos::monotonic_clock::duration> clock_offset =
+              utils_.ClockOffset(channel->source_node()->string_view());
+          if (!clock_offset.has_value()) {
+            VLOG(1) << "Rejecting image due to disconnected message bridge at "
+                    << event_loop_->monotonic_now();
+            cameras_.at(camera_index)
+                .rejection_counter.IncrementError(
+                    RejectionReason::MESSAGE_BRIDGE_DISCONNECTED);
+            return;
+          }
+          const aos::monotonic_clock::time_point orin_capture_time(
+              std::chrono::nanoseconds(targets.monotonic_timestamp_ns()) -
+              clock_offset.value());
+          if (orin_capture_time > event_loop_->context().monotonic_event_time) {
+            VLOG(1) << "Rejecting image due to being from future at "
+                    << event_loop_->monotonic_now() << " with timestamp of "
+                    << orin_capture_time << " and event time pf "
+                    << event_loop_->context().monotonic_event_time;
+            cameras_.at(camera_index)
+                .rejection_counter.IncrementError(
+                    RejectionReason::IMAGE_FROM_FUTURE);
+            return;
+          }
+          auto debug_builder =
+              cameras_.at(camera_index).debug_sender.MakeStaticBuilder();
+          auto target_debug_list = debug_builder->add_targets();
+          // The static_length should already be 20.
+          CHECK(target_debug_list->reserve(20));
+          for (const frc971::vision::TargetPoseFbs *target :
+               *targets.target_poses()) {
+            VLOG(1) << "Handling target from " << camera_index;
+            HandleTarget(camera_index, orin_capture_time, *target,
+                         target_debug_list->emplace_back());
+          }
+          StatisticsForCamera(cameras_.at(camera_index),
+                              debug_builder->add_statistics());
+          debug_builder.CheckOk(debug_builder.Send());
+          SendStatus();
+        });
+  }
+
+  event_loop_->AddPhasedLoop([this](int) { SendOutput(); },
+                             std::chrono::milliseconds(20));
+
+  event_loop_->MakeWatcher(
+      "/drivetrain",
+      [this](
+          const frc971::control_loops::drivetrain::LocalizerControl &control) {
+        // This is triggered whenever we need to force the X/Y/(maybe theta)
+        // position of the robot to a particular point---e.g., during pre-match
+        // setup, or when commanded by a button on the driverstation.
+
+        // For some forms of reset, we choose to keep our current yaw estimate
+        // rather than overriding it from the control message.
+        const double theta = control.keep_current_theta()
+                                 ? ekf_.X_hat(StateIdx::kTheta)
+                                 : control.theta();
+        // Ecnoder values need to be reset based on the current values to ensure
+        // that we don't get weird corrections on the next encoder update.
+        const double left_encoder = ekf_.X_hat(StateIdx::kLeftEncoder);
+        const double right_encoder = ekf_.X_hat(StateIdx::kRightEncoder);
+        ekf_.ResetInitialState(
+            t_,
+            (HybridEkf::State() << control.x(), control.y(), theta,
+             left_encoder, 0, right_encoder, 0, 0, 0, 0, 0, 0)
+                .finished(),
+            ekf_.P());
+      });
+
+  ekf_.set_ignore_accel(true);
+  // Priority should be lower than the imu reading process, but non-zero.
+  event_loop->SetRuntimeRealtimePriority(10);
+  event_loop->OnRun([this, event_loop]() {
+    ekf_.ResetInitialState(event_loop->monotonic_now(),
+                           HybridEkf::State::Zero(), ekf_.P());
+  });
+}
+
+void Localizer::HandleImu(aos::monotonic_clock::time_point /*sample_time_pico*/,
+                          aos::monotonic_clock::time_point sample_time_orin,
+                          std::optional<Eigen::Vector2d> /*encoders*/,
+                          Eigen::Vector3d gyro, Eigen::Vector3d accel) {
+  std::optional<Eigen::Vector2d> encoders = utils_.Encoders(sample_time_orin);
+  last_encoder_readings_ = encoders;
+  // Ignore invalid readings; the HybridEkf will handle it reasonably.
+  if (!encoders.has_value()) {
+    return;
+  }
+  if (t_ == aos::monotonic_clock::min_time) {
+    t_ = sample_time_orin;
+  }
+  if (t_ + 10 * frc971::controls::ImuWatcher::kNominalDt < sample_time_orin) {
+    t_ = sample_time_orin;
+    ++clock_resets_;
+  }
+  const aos::monotonic_clock::duration dt = sample_time_orin - t_;
+  t_ = sample_time_orin;
+  // We don't actually use the down estimator currently, but it's really
+  // convenient for debugging.
+  down_estimator_.Predict(gyro, accel, dt);
+  const double yaw_rate = (dt_config_.imu_transform * gyro)(2);
+  ekf_.UpdateEncodersAndGyro(encoders.value()(0), encoders.value()(1), yaw_rate,
+                             utils_.VoltageOrZero(sample_time_orin), accel, t_);
+  SendStatus();
+}
+
+void Localizer::RejectImage(int camera_index, RejectionReason reason,
+                            TargetEstimateDebugStatic *builder) {
+  if (builder != nullptr) {
+    builder->set_accepted(false);
+    builder->set_rejection_reason(reason);
+  }
+  cameras_.at(camera_index).rejection_counter.IncrementError(reason);
+}
+
+// Only use april tags present in the target map; this method has also been used
+// (in the past) for ignoring april tags that tend to produce problematic
+// readings.
+bool Localizer::UseAprilTag(uint64_t target_id) {
+  return target_poses_.count(target_id) != 0;
+}
+
+namespace {
+// Converts a camera transformation matrix from treating the +Z axis from
+// pointing straight out the lens to having the +X pointing straight out the
+// lens, with +Z going "up" (i.e., -Y in the normal convention) and +Y going
+// leftwards (i.e., -X in the normal convention).
+Localizer::Transform ZToXCamera(const Localizer::Transform &transform) {
+  return transform *
+         Eigen::Matrix4d{
+             {0, -1, 0, 0}, {0, 0, -1, 0}, {1, 0, 0, 0}, {0, 0, 0, 1}};
+}
+}  // namespace
+
+void Localizer::HandleTarget(
+    int camera_index, const aos::monotonic_clock::time_point capture_time,
+    const frc971::vision::TargetPoseFbs &target,
+    TargetEstimateDebugStatic *debug_builder) {
+  ++total_candidate_targets_;
+  ++cameras_.at(camera_index).total_candidate_targets;
+  const uint64_t target_id = target.id();
+
+  if (debug_builder == nullptr) {
+    AOS_LOG(ERROR, "Dropped message from debug vector.");
+  } else {
+    debug_builder->set_camera(camera_index);
+    debug_builder->set_image_age_sec(aos::time::DurationInSeconds(
+        event_loop_->monotonic_now() - capture_time));
+    debug_builder->set_image_monotonic_timestamp_ns(
+        std::chrono::duration_cast<std::chrono::nanoseconds>(
+            capture_time.time_since_epoch())
+            .count());
+    debug_builder->set_april_tag(target_id);
+  }
+  VLOG(2) << aos::FlatbufferToJson(&target);
+  if (!UseAprilTag(target_id)) {
+    VLOG(1) << "Rejecting target due to invalid ID " << target_id;
+    RejectImage(camera_index, RejectionReason::NO_SUCH_TARGET, debug_builder);
+    return;
+  }
+
+  const Transform &H_field_target = target_poses_.at(target_id);
+  const Transform &H_robot_camera = cameras_.at(camera_index).extrinsics;
+
+  const Transform H_camera_target = PoseToTransform(&target);
+
+  // In order to do the EKF correction, we determine the expected state based
+  // on the state at the time the image was captured; however, we insert the
+  // correction update itself at the current time. This is technically not
+  // quite correct, but saves substantial CPU usage & code complexity by
+  // making it so that we don't have to constantly rewind the entire EKF
+  // history.
+  const std::optional<State> state_at_capture =
+      ekf_.LastStateBeforeTime(capture_time);
+
+  if (!state_at_capture.has_value()) {
+    VLOG(1) << "Rejecting image due to being too old.";
+    return RejectImage(camera_index, RejectionReason::IMAGE_TOO_OLD,
+                       debug_builder);
+  } else if (target.pose_error() > FLAGS_max_pose_error) {
+    VLOG(1) << "Rejecting target due to high pose error "
+            << target.pose_error();
+    return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR,
+                       debug_builder);
+  } else if (target.pose_error_ratio() > FLAGS_max_pose_error_ratio) {
+    VLOG(1) << "Rejecting target due to high pose error ratio "
+            << target.pose_error_ratio();
+    return RejectImage(camera_index, RejectionReason::HIGH_POSE_ERROR_RATIO,
+                       debug_builder);
+  }
+
+  Corrector corrector(state_at_capture.value(), H_field_target, H_robot_camera,
+                      H_camera_target);
+  const double distance_to_target = corrector.observed()(Corrector::kDistance);
+
+  // Heading, distance, skew at 1 meter.
+  Eigen::Matrix<double, 3, 1> noises(0.01, 0.05, 0.05);
+  const double distance_noise_scalar = std::pow(distance_to_target, 2.0);
+  noises(Corrector::kDistance) *= distance_noise_scalar;
+  noises(Corrector::kSkew) *= distance_noise_scalar;
+  // TODO(james): This is leftover from last year; figure out if we want it.
+  // Scale noise by the distortion factor for this detection
+  noises *= (1.0 + FLAGS_distortion_noise_scalar * target.distortion_factor());
+
+  Eigen::Matrix3d R = Eigen::Matrix3d::Zero();
+  R.diagonal() = noises.cwiseAbs2();
+  if (debug_builder != nullptr) {
+    const Eigen::Vector3d camera_position =
+        corrector.observed_camera_pose().abs_pos();
+    debug_builder->set_camera_x(camera_position.x());
+    debug_builder->set_camera_y(camera_position.y());
+    debug_builder->set_camera_theta(
+        corrector.observed_camera_pose().abs_theta());
+    // Calculate the camera-to-robot transformation matrix ignoring the
+    // pitch/roll of the camera.
+    const Transform H_camera_robot_stripped =
+        frc971::control_loops::Pose(ZToXCamera(H_robot_camera))
+            .AsTransformationMatrix()
+            .inverse();
+    const frc971::control_loops::Pose measured_pose(
+        corrector.observed_camera_pose().AsTransformationMatrix() *
+        H_camera_robot_stripped);
+    debug_builder->set_implied_robot_x(measured_pose.rel_pos().x());
+    debug_builder->set_implied_robot_y(measured_pose.rel_pos().y());
+    debug_builder->set_implied_robot_theta(measured_pose.rel_theta());
+
+    Corrector::PopulateMeasurement(corrector.expected(),
+                                   debug_builder->add_expected_observation());
+    Corrector::PopulateMeasurement(corrector.observed(),
+                                   debug_builder->add_actual_observation());
+    Corrector::PopulateMeasurement(noises, debug_builder->add_modeled_noise());
+  }
+
+  const double camera_yaw_error =
+      aos::math::NormalizeAngle(corrector.expected_camera_pose().abs_theta() -
+                                corrector.observed_camera_pose().abs_theta());
+  constexpr double kDegToRad = M_PI / 180.0;
+
+  const double robot_speed =
+      (state_at_capture.value()(StateIdx::kLeftVelocity) +
+       state_at_capture.value()(StateIdx::kRightVelocity)) /
+      2.0;
+  const double yaw_threshold =
+      (utils_.MaybeInAutonomous() ? FLAGS_max_implied_yaw_error
+                                  : FLAGS_max_implied_teleop_yaw_error) *
+      kDegToRad;
+
+  if (utils_.MaybeInAutonomous() &&
+      (std::abs(robot_speed) > FLAGS_max_auto_image_robot_speed)) {
+    return RejectImage(camera_index, RejectionReason::ROBOT_TOO_FAST,
+                       debug_builder);
+  } else if (std::abs(camera_yaw_error) > yaw_threshold) {
+    return RejectImage(camera_index, RejectionReason::HIGH_IMPLIED_YAW_ERROR,
+                       debug_builder);
+  } else if (distance_to_target > FLAGS_max_distance_to_target) {
+    return RejectImage(camera_index, RejectionReason::HIGH_DISTANCE_TO_TARGET,
+                       debug_builder);
+  }
+
+  const Input U = ekf_.MostRecentInput();
+  VLOG(1) << "previous state " << ekf_.X_hat().topRows<3>().transpose();
+  const State prior_state = ekf_.X_hat();
+  // For the correction step, instead of passing in the measurement directly,
+  // we pass in (0, 0, 0) as the measurement and then for the expected
+  // measurement (Zhat) we calculate the error between the pose implied by
+  // the camera measurement and the current estimate of the
+  // pose. This doesn't affect any of the math, it just makes the code a bit
+  // more convenient to write given the Correct() interface we already have.
+  observations_.CorrectKnownH(
+      Eigen::Vector3d::Zero(), &U,
+      Corrector(state_at_capture.value(), H_field_target, H_robot_camera,
+                H_camera_target),
+      R, t_);
+  ++total_accepted_targets_;
+  ++cameras_.at(camera_index).total_accepted_targets;
+  VLOG(1) << "new state " << ekf_.X_hat().topRows<3>().transpose();
+  if (debug_builder != nullptr) {
+    debug_builder->set_correction_x(ekf_.X_hat()(StateIdx::kX) -
+                                    prior_state(StateIdx::kX));
+    debug_builder->set_correction_y(ekf_.X_hat()(StateIdx::kY) -
+                                    prior_state(StateIdx::kY));
+    debug_builder->set_correction_theta(ekf_.X_hat()(StateIdx::kTheta) -
+                                        prior_state(StateIdx::kTheta));
+    debug_builder->set_accepted(true);
+  }
+}
+
+void Localizer::SendOutput() {
+  auto builder = output_sender_.MakeBuilder();
+  frc971::controls::LocalizerOutput::Builder output_builder =
+      builder.MakeBuilder<frc971::controls::LocalizerOutput>();
+  output_builder.add_monotonic_timestamp_ns(
+      std::chrono::duration_cast<std::chrono::nanoseconds>(
+          event_loop_->context().monotonic_event_time.time_since_epoch())
+          .count());
+  output_builder.add_x(ekf_.X_hat(StateIdx::kX));
+  output_builder.add_y(ekf_.X_hat(StateIdx::kY));
+  output_builder.add_theta(ekf_.X_hat(StateIdx::kTheta));
+  output_builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
+  output_builder.add_image_accepted_count(total_accepted_targets_);
+  const Eigen::Quaterniond &orientation =
+      Eigen::AngleAxis<double>(ekf_.X_hat(StateIdx::kTheta),
+                               Eigen::Vector3d::UnitZ()) *
+      down_estimator_.X_hat();
+  frc971::controls::Quaternion quaternion;
+  quaternion.mutate_x(orientation.x());
+  quaternion.mutate_y(orientation.y());
+  quaternion.mutate_z(orientation.z());
+  quaternion.mutate_w(orientation.w());
+  output_builder.add_orientation(&quaternion);
+  server_statistics_fetcher_.Fetch();
+  client_statistics_fetcher_.Fetch();
+
+  bool orins_connected = true;
+
+  if (server_statistics_fetcher_.get()) {
+    for (const auto *orin_server_status :
+         *server_statistics_fetcher_->connections()) {
+      if (orin_server_status->state() ==
+          aos::message_bridge::State::DISCONNECTED) {
+        orins_connected = false;
+      }
+    }
+  }
+
+  if (client_statistics_fetcher_.get()) {
+    for (const auto *pi_client_status :
+         *client_statistics_fetcher_->connections()) {
+      if (pi_client_status->state() ==
+          aos::message_bridge::State::DISCONNECTED) {
+        orins_connected = false;
+      }
+    }
+  }
+
+  // The output message is year-agnostic, and retains "pi" naming for histrocial
+  // reasons.
+  output_builder.add_all_pis_connected(orins_connected);
+  builder.CheckOk(builder.Send(output_builder.Finish()));
+}
+
+flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
+Localizer::PopulateState(const State &X_hat,
+                         flatbuffers::FlatBufferBuilder *fbb) {
+  frc971::control_loops::drivetrain::LocalizerState::Builder builder(*fbb);
+  builder.add_x(X_hat(StateIdx::kX));
+  builder.add_y(X_hat(StateIdx::kY));
+  builder.add_theta(X_hat(StateIdx::kTheta));
+  builder.add_left_velocity(X_hat(StateIdx::kLeftVelocity));
+  builder.add_right_velocity(X_hat(StateIdx::kRightVelocity));
+  builder.add_left_encoder(X_hat(StateIdx::kLeftEncoder));
+  builder.add_right_encoder(X_hat(StateIdx::kRightEncoder));
+  builder.add_left_voltage_error(X_hat(StateIdx::kLeftVoltageError));
+  builder.add_right_voltage_error(X_hat(StateIdx::kRightVoltageError));
+  builder.add_angular_error(X_hat(StateIdx::kAngularError));
+  builder.add_longitudinal_velocity_offset(
+      X_hat(StateIdx::kLongitudinalVelocityOffset));
+  builder.add_lateral_velocity(X_hat(StateIdx::kLateralVelocity));
+  return builder.Finish();
+}
+
+flatbuffers::Offset<ImuStatus> Localizer::PopulateImu(
+    flatbuffers::FlatBufferBuilder *fbb) const {
+  const auto zeroer_offset = imu_watcher_.zeroer().PopulateStatus(fbb);
+  const auto failures_offset = imu_watcher_.PopulateImuFailures(fbb);
+  ImuStatus::Builder builder(*fbb);
+  builder.add_zeroed(imu_watcher_.zeroer().Zeroed());
+  builder.add_faulted_zero(imu_watcher_.zeroer().Faulted());
+  builder.add_zeroing(zeroer_offset);
+  if (imu_watcher_.pico_offset().has_value()) {
+    builder.add_board_offset_ns(imu_watcher_.pico_offset().value().count());
+    builder.add_board_offset_error_ns(imu_watcher_.pico_offset_error().count());
+  }
+  if (last_encoder_readings_.has_value()) {
+    builder.add_left_encoder(last_encoder_readings_.value()(0));
+    builder.add_right_encoder(last_encoder_readings_.value()(1));
+  }
+  builder.add_imu_failures(failures_offset);
+  return builder.Finish();
+}
+
+flatbuffers::Offset<CumulativeStatistics> Localizer::StatisticsForCamera(
+    const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb) {
+  const auto counts_offset = camera.rejection_counter.PopulateCounts(fbb);
+  CumulativeStatistics::Builder stats_builder(*fbb);
+  stats_builder.add_total_accepted(camera.total_accepted_targets);
+  stats_builder.add_total_candidates(camera.total_candidate_targets);
+  stats_builder.add_rejection_reasons(counts_offset);
+  return stats_builder.Finish();
+}
+
+void Localizer::StatisticsForCamera(const CameraState &camera,
+                                    CumulativeStatisticsStatic *builder) {
+  camera.rejection_counter.PopulateCountsStaticFbs(
+      builder->add_rejection_reasons());
+  builder->set_total_accepted(camera.total_accepted_targets);
+  builder->set_total_candidates(camera.total_candidate_targets);
+}
+
+void Localizer::SendStatus() {
+  auto builder = status_sender_.MakeBuilder();
+  std::array<flatbuffers::Offset<CumulativeStatistics>, kNumCameras>
+      stats_offsets;
+  for (size_t ii = 0; ii < kNumCameras; ++ii) {
+    stats_offsets.at(ii) = StatisticsForCamera(cameras_.at(ii), builder.fbb());
+  }
+  auto stats_offset =
+      builder.fbb()->CreateVector(stats_offsets.data(), stats_offsets.size());
+  auto down_estimator_offset =
+      down_estimator_.PopulateStatus(builder.fbb(), t_);
+  auto imu_offset = PopulateImu(builder.fbb());
+  auto state_offset = PopulateState(ekf_.X_hat(), builder.fbb());
+  Status::Builder status_builder = builder.MakeBuilder<Status>();
+  status_builder.add_state(state_offset);
+  status_builder.add_down_estimator(down_estimator_offset);
+  status_builder.add_imu(imu_offset);
+  status_builder.add_statistics(stats_offset);
+  builder.CheckOk(builder.Send(status_builder.Finish()));
+}
+
+Eigen::Vector3d Localizer::Corrector::HeadingDistanceSkew(
+    const Pose &relative_pose) {
+  const double heading = relative_pose.heading();
+  const double distance = relative_pose.xy_norm();
+  const double skew =
+      ::aos::math::NormalizeAngle(relative_pose.rel_theta() - heading);
+  return {heading, distance, skew};
+}
+
+Localizer::Corrector Localizer::Corrector::CalculateHeadingDistanceSkewH(
+    const State &state_at_capture, const Transform &H_field_target,
+    const Transform &H_robot_camera, const Transform &H_camera_target) {
+  const Transform H_field_camera = H_field_target * H_camera_target.inverse();
+  const Pose expected_robot_pose(
+      {state_at_capture(StateIdx::kX), state_at_capture(StateIdx::kY), 0.0},
+      state_at_capture(StateIdx::kTheta));
+  // Observed position on the field, reduced to just the 2-D pose.
+  const Pose observed_camera(ZToXCamera(H_field_camera));
+  const Pose expected_camera(expected_robot_pose.AsTransformationMatrix() *
+                             ZToXCamera(H_robot_camera));
+  const Pose nominal_target(ZToXCamera(H_field_target));
+  const Pose observed_target = nominal_target.Rebase(&observed_camera);
+  const Pose expected_target = nominal_target.Rebase(&expected_camera);
+  return Localizer::Corrector{
+      expected_robot_pose,
+      observed_camera,
+      expected_camera,
+      HeadingDistanceSkew(expected_target),
+      HeadingDistanceSkew(observed_target),
+      frc971::control_loops::drivetrain::HMatrixForCameraHeadingDistanceSkew(
+          nominal_target, observed_camera)};
+}
+
+Localizer::Corrector::Corrector(const State &state_at_capture,
+                                const Transform &H_field_target,
+                                const Transform &H_robot_camera,
+                                const Transform &H_camera_target)
+    : Corrector(CalculateHeadingDistanceSkewH(
+          state_at_capture, H_field_target, H_robot_camera, H_camera_target)) {}
+
+Localizer::Output Localizer::Corrector::H(const State &, const Input &) {
+  return expected_ - observed_;
+}
+
+}  // namespace y2024::localizer
diff --git a/y2024/localizer/localizer.h b/y2024/localizer/localizer.h
new file mode 100644
index 0000000..8235f13
--- /dev/null
+++ b/y2024/localizer/localizer.h
@@ -0,0 +1,158 @@
+#ifndef Y2024_LOCALIZER_LOCALIZER_H_
+#define Y2024_LOCALIZER_LOCALIZER_H_
+
+#include <array>
+#include <map>
+
+#include "aos/network/message_bridge_client_generated.h"
+#include "aos/network/message_bridge_server_generated.h"
+#include "frc971/constants/constants_sender_lib.h"
+#include "frc971/control_loops/drivetrain/hybrid_ekf.h"
+#include "frc971/control_loops/drivetrain/improved_down_estimator.h"
+#include "frc971/control_loops/drivetrain/localization/localizer_output_generated.h"
+#include "frc971/control_loops/drivetrain/localization/utils.h"
+#include "frc971/imu_reader/imu_watcher.h"
+#include "frc971/vision/target_map_generated.h"
+#include "y2024/constants/constants_generated.h"
+#include "y2024/localizer/status_generated.h"
+#include "y2024/localizer/visualization_static.h"
+
+namespace y2024::localizer {
+
+class Localizer {
+ public:
+  static constexpr size_t kNumCameras = 4;
+  using Pose = frc971::control_loops::Pose;
+  typedef Eigen::Matrix<double, 4, 4> Transform;
+  typedef frc971::control_loops::drivetrain::HybridEkf<double> HybridEkf;
+  typedef HybridEkf::State State;
+  typedef HybridEkf::Output Output;
+  typedef HybridEkf::Input Input;
+  typedef HybridEkf::StateIdx StateIdx;
+  Localizer(aos::EventLoop *event_loop);
+
+ private:
+  class Corrector : public HybridEkf::ExpectedObservationFunctor {
+   public:
+    // Indices used for each of the members of the output vector for this
+    // Corrector.
+    enum OutputIdx {
+      kHeading = 0,
+      kDistance = 1,
+      kSkew = 2,
+    };
+    Corrector(const State &state_at_capture, const Transform &H_field_target,
+              const Transform &H_robot_camera,
+              const Transform &H_camera_target);
+
+    using HMatrix = Eigen::Matrix<double, Localizer::HybridEkf::kNOutputs,
+                                  Localizer::HybridEkf::kNStates>;
+
+    Output H(const State &, const Input &) final;
+    HMatrix DHDX(const State &) final { return H_; }
+    const Eigen::Vector3d &observed() const { return observed_; }
+    const Eigen::Vector3d &expected() const { return expected_; }
+    const Pose &expected_robot_pose() const { return expected_robot_pose_; }
+    const Pose &expected_camera_pose() const { return expected_camera_; }
+    const Pose &observed_camera_pose() const { return observed_camera_; }
+
+    static Eigen::Vector3d HeadingDistanceSkew(const Pose &relative_pose);
+
+    static Corrector CalculateHeadingDistanceSkewH(
+        const State &state_at_capture, const Transform &H_field_target,
+        const Transform &H_robot_camera, const Transform &H_camera_target);
+
+    static void PopulateMeasurement(const Eigen::Vector3d &vector,
+                                    MeasurementStatic *builder) {
+      builder->set_heading(vector(kHeading));
+      builder->set_distance(vector(kDistance));
+      builder->set_skew(vector(kSkew));
+    }
+
+   private:
+    Corrector(const Pose &expected_robot_pose, const Pose &observed_camera,
+              const Pose &expected_camera, const Eigen::Vector3d &expected,
+              const Eigen::Vector3d &observed, const HMatrix &H)
+        : expected_robot_pose_(expected_robot_pose),
+          observed_camera_(observed_camera),
+          expected_camera_(expected_camera),
+          expected_(expected),
+          observed_(observed),
+          H_(H) {}
+    // For debugging.
+    const Pose expected_robot_pose_;
+    const Pose observed_camera_;
+    const Pose expected_camera_;
+    // Actually used.
+    const Eigen::Vector3d expected_;
+    const Eigen::Vector3d observed_;
+    const HMatrix H_;
+  };
+
+  struct CameraState {
+    aos::Sender<VisualizationStatic> debug_sender;
+    Transform extrinsics = Transform::Zero();
+    aos::util::ArrayErrorCounter<RejectionReason, RejectionCount>
+        rejection_counter;
+    size_t total_candidate_targets = 0;
+    size_t total_accepted_targets = 0;
+  };
+
+  static std::array<CameraState, kNumCameras> MakeCameras(
+      const Constants &constants, aos::EventLoop *event_loop);
+  void HandleTarget(int camera_index,
+                    const aos::monotonic_clock::time_point capture_time,
+                    const frc971::vision::TargetPoseFbs &target,
+                    TargetEstimateDebugStatic *debug_builder);
+  void HandleImu(aos::monotonic_clock::time_point sample_time_pico,
+                 aos::monotonic_clock::time_point sample_time_pi,
+                 std::optional<Eigen::Vector2d> encoders, Eigen::Vector3d gyro,
+                 Eigen::Vector3d accel);
+  void RejectImage(int camera_index, RejectionReason reason,
+                   TargetEstimateDebugStatic *builder);
+
+  void SendOutput();
+  static flatbuffers::Offset<frc971::control_loops::drivetrain::LocalizerState>
+  PopulateState(const State &X_hat, flatbuffers::FlatBufferBuilder *fbb);
+  flatbuffers::Offset<ImuStatus> PopulateImu(
+      flatbuffers::FlatBufferBuilder *fbb) const;
+  void SendStatus();
+  static flatbuffers::Offset<CumulativeStatistics> StatisticsForCamera(
+      const CameraState &camera, flatbuffers::FlatBufferBuilder *fbb);
+  static void StatisticsForCamera(const CameraState &camera,
+                                  CumulativeStatisticsStatic *builder);
+
+  bool UseAprilTag(uint64_t target_id);
+
+  aos::EventLoop *const event_loop_;
+  frc971::constants::ConstantsFetcher<Constants> constants_fetcher_;
+  const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
+  std::array<CameraState, kNumCameras> cameras_;
+  const std::array<Transform, kNumCameras> camera_extrinsics_;
+  const std::map<uint64_t, Transform> target_poses_;
+
+  frc971::control_loops::drivetrain::DrivetrainUkf down_estimator_;
+  HybridEkf ekf_;
+  HybridEkf::ExpectedObservationAllocator<Corrector> observations_;
+
+  frc971::controls::ImuWatcher imu_watcher_;
+  frc971::control_loops::drivetrain::LocalizationUtils utils_;
+
+  aos::Sender<Status> status_sender_;
+  aos::Sender<frc971::controls::LocalizerOutput> output_sender_;
+  aos::monotonic_clock::time_point t_ = aos::monotonic_clock::min_time;
+  size_t clock_resets_ = 0;
+
+  size_t total_candidate_targets_ = 0;
+  size_t total_accepted_targets_ = 0;
+
+  // For the status message.
+  std::optional<Eigen::Vector2d> last_encoder_readings_;
+
+  aos::Fetcher<aos::message_bridge::ServerStatistics>
+      server_statistics_fetcher_;
+  aos::Fetcher<aos::message_bridge::ClientStatistics>
+      client_statistics_fetcher_;
+};
+}  // namespace y2024::localizer
+#endif  // Y2024_LOCALIZER_LOCALIZER_H_
diff --git a/y2024/localizer/localizer_main.cc b/y2024/localizer/localizer_main.cc
new file mode 100644
index 0000000..25466d2
--- /dev/null
+++ b/y2024/localizer/localizer_main.cc
@@ -0,0 +1,23 @@
+#include "aos/events/shm_event_loop.h"
+#include "aos/init.h"
+#include "frc971/constants/constants_sender_lib.h"
+#include "y2024/control_loops/drivetrain/drivetrain_base.h"
+#include "y2024/localizer/localizer.h"
+
+DEFINE_string(config, "aos_config.json", "Path to the config file to use.");
+
+int main(int argc, char *argv[]) {
+  aos::InitGoogle(&argc, &argv);
+
+  aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+      aos::configuration::ReadConfig(FLAGS_config);
+
+  frc971::constants::WaitForConstants<y2024::Constants>(&config.message());
+
+  aos::ShmEventLoop event_loop(&config.message());
+  y2024::localizer::Localizer localizer(&event_loop);
+
+  event_loop.Run();
+
+  return 0;
+}
diff --git a/y2024/localizer/localizer_plotter.ts b/y2024/localizer/localizer_plotter.ts
new file mode 100644
index 0000000..b5e764b
--- /dev/null
+++ b/y2024/localizer/localizer_plotter.ts
@@ -0,0 +1,203 @@
+// Provides a plot for debugging drivetrain-related issues.
+import {AosPlotter} from '../../aos/network/www/aos_plotter';
+import {ImuMessageHandler} from '../../frc971/wpilib/imu_plot_utils';
+import * as proxy from '../../aos/network/www/proxy';
+import {BLUE, BROWN, CYAN, GREEN, PINK, RED, WHITE} from '../../aos/network/www/colors';
+
+import Connection = proxy.Connection;
+
+const TIME = AosPlotter.TIME;
+const DEFAULT_WIDTH = AosPlotter.DEFAULT_WIDTH;
+const DEFAULT_HEIGHT = AosPlotter.DEFAULT_HEIGHT;
+
+export function plotLocalizer(conn: Connection, element: Element): void {
+  const aosPlotter = new AosPlotter(conn);
+
+  const position = aosPlotter.addMessageSource("/drivetrain",
+      "frc971.control_loops.drivetrain.Position");
+  const status = aosPlotter.addMessageSource(
+      '/drivetrain', 'frc971.control_loops.drivetrain.Status');
+  const output = aosPlotter.addMessageSource(
+      '/drivetrain', 'frc971.control_loops.drivetrain.Output');
+  const localizer = aosPlotter.addMessageSource(
+      '/localizer', 'y2024.localizer.Status');
+  const imu = aosPlotter.addRawMessageSource(
+      '/localizer', 'frc971.IMUValuesBatch',
+      new ImuMessageHandler(conn.getSchema('frc971.IMUValuesBatch')));
+
+  // Drivetrain Status estimated relative position
+  const positionPlot = aosPlotter.addPlot(element);
+  positionPlot.plot.getAxisLabels().setTitle("Estimated Relative Position " +
+                                             "of the Drivetrain");
+  positionPlot.plot.getAxisLabels().setXLabel(TIME);
+  positionPlot.plot.getAxisLabels().setYLabel("Relative Position (m)");
+  const leftPosition =
+      positionPlot.addMessageLine(status, ["estimated_left_position"]);
+  leftPosition.setColor(RED);
+  const rightPosition =
+      positionPlot.addMessageLine(status, ["estimated_right_position"]);
+  rightPosition.setColor(GREEN);
+  positionPlot.addMessageLine(position, ['left_encoder'])
+      .setColor(BROWN)
+      .setDrawLine(false);
+  positionPlot.addMessageLine(position, ['right_encoder'])
+      .setColor(CYAN)
+      .setDrawLine(false);
+
+
+  // Drivetrain Velocities
+  const velocityPlot = aosPlotter.addPlot(element);
+  velocityPlot.plot.getAxisLabels().setTitle('Velocity Plots');
+  velocityPlot.plot.getAxisLabels().setXLabel(TIME);
+  velocityPlot.plot.getAxisLabels().setYLabel('Wheel Velocity (m/s)');
+
+  const leftVelocity =
+      velocityPlot.addMessageLine(status, ['estimated_left_velocity']);
+  leftVelocity.setColor(RED);
+  const rightVelocity =
+      velocityPlot.addMessageLine(status, ['estimated_right_velocity']);
+  rightVelocity.setColor(GREEN);
+
+  const leftSpeed = velocityPlot.addMessageLine(position, ["left_speed"]);
+  leftSpeed.setColor(BLUE);
+  const rightSpeed = velocityPlot.addMessageLine(position, ["right_speed"]);
+  rightSpeed.setColor(BROWN);
+
+  const ekfLeftVelocity = velocityPlot.addMessageLine(
+      localizer, ['state', 'left_velocity']);
+  ekfLeftVelocity.setColor(RED);
+  ekfLeftVelocity.setPointSize(0.0);
+  const ekfRightVelocity = velocityPlot.addMessageLine(
+      localizer, ['state', 'right_velocity']);
+  ekfRightVelocity.setColor(GREEN);
+  ekfRightVelocity.setPointSize(0.0);
+
+  // Lateral velocity
+  const lateralPlot = aosPlotter.addPlot(element);
+  lateralPlot.plot.getAxisLabels().setTitle('Lateral Velocity');
+  lateralPlot.plot.getAxisLabels().setXLabel(TIME);
+  lateralPlot.plot.getAxisLabels().setYLabel('Velocity (m/s)');
+
+  lateralPlot.addMessageLine(localizer, ['state', 'lateral_velocity']).setColor(CYAN);
+
+  // Drivetrain Voltage
+  const voltagePlot = aosPlotter.addPlot(element);
+  voltagePlot.plot.getAxisLabels().setTitle('Voltage Plots');
+  voltagePlot.plot.getAxisLabels().setXLabel(TIME);
+  voltagePlot.plot.getAxisLabels().setYLabel('Voltage (V)')
+
+  voltagePlot.addMessageLine(localizer, ['state', 'left_voltage_error'])
+      .setColor(RED)
+      .setDrawLine(false);
+  voltagePlot.addMessageLine(localizer, ['state', 'right_voltage_error'])
+      .setColor(GREEN)
+      .setDrawLine(false);
+  voltagePlot.addMessageLine(output, ['left_voltage'])
+      .setColor(RED)
+      .setPointSize(0);
+  voltagePlot.addMessageLine(output, ['right_voltage'])
+      .setColor(GREEN)
+      .setPointSize(0);
+
+  // Heading
+  const yawPlot = aosPlotter.addPlot(element);
+  yawPlot.plot.getAxisLabels().setTitle('Robot Yaw');
+  yawPlot.plot.getAxisLabels().setXLabel(TIME);
+  yawPlot.plot.getAxisLabels().setYLabel('Yaw (rad)');
+
+  yawPlot.addMessageLine(status, ['localizer', 'theta']).setColor(GREEN);
+
+  yawPlot.addMessageLine(localizer, ['down_estimator', 'yaw']).setColor(BLUE);
+
+  yawPlot.addMessageLine(localizer, ['state', 'theta']).setColor(RED);
+
+  // Pitch/Roll
+  const orientationPlot = aosPlotter.addPlot(element);
+  orientationPlot.plot.getAxisLabels().setTitle('Orientation');
+  orientationPlot.plot.getAxisLabels().setXLabel(TIME);
+  orientationPlot.plot.getAxisLabels().setYLabel('Angle (rad)');
+
+  orientationPlot.addMessageLine(localizer, ['down_estimator', 'lateral_pitch'])
+      .setColor(RED)
+      .setLabel('roll');
+  orientationPlot
+      .addMessageLine(localizer, ['down_estimator', 'longitudinal_pitch'])
+      .setColor(GREEN)
+      .setLabel('pitch');
+
+  const stillPlot = aosPlotter.addPlot(element, [DEFAULT_WIDTH, DEFAULT_HEIGHT / 3]);
+  stillPlot.plot.getAxisLabels().setTitle('Still Plot');
+  stillPlot.plot.getAxisLabels().setXLabel(TIME);
+  stillPlot.plot.getAxisLabels().setYLabel('bool, g\'s');
+  stillPlot.plot.setDefaultYRange([-0.1, 1.1]);
+
+  stillPlot.addMessageLine(localizer, ['down_estimator', 'gravity_magnitude'])
+      .setColor(WHITE)
+      .setDrawLine(false);
+
+  // Absolute X Position
+  const xPositionPlot = aosPlotter.addPlot(element);
+  xPositionPlot.plot.getAxisLabels().setTitle('X Position');
+  xPositionPlot.plot.getAxisLabels().setXLabel(TIME);
+  xPositionPlot.plot.getAxisLabels().setYLabel('X Position (m)');
+
+  xPositionPlot.addMessageLine(status, ['x']).setColor(RED);
+  xPositionPlot.addMessageLine(localizer, ['down_estimator', 'position_x'])
+      .setColor(BLUE);
+  xPositionPlot.addMessageLine(localizer, ['state', 'x']).setColor(CYAN);
+
+  // Absolute Y Position
+  const yPositionPlot = aosPlotter.addPlot(element);
+  yPositionPlot.plot.getAxisLabels().setTitle('Y Position');
+  yPositionPlot.plot.getAxisLabels().setXLabel(TIME);
+  yPositionPlot.plot.getAxisLabels().setYLabel('Y Position (m)');
+
+  const localizerY = yPositionPlot.addMessageLine(status, ['y']);
+  localizerY.setColor(RED);
+  yPositionPlot.addMessageLine(localizer, ['down_estimator', 'position_y'])
+      .setColor(BLUE);
+  yPositionPlot.addMessageLine(localizer, ['state', 'y']).setColor(CYAN);
+
+  // Gyro
+  const gyroPlot = aosPlotter.addPlot(element);
+  gyroPlot.plot.getAxisLabels().setTitle('Gyro Readings');
+  gyroPlot.plot.getAxisLabels().setYLabel('Angular Velocity (rad / sec)');
+  gyroPlot.plot.getAxisLabels().setXLabel('Monotonic Reading Time (sec)');
+
+  const gyroX = gyroPlot.addMessageLine(imu, ['gyro_x']);
+  gyroX.setColor(RED);
+  const gyroY = gyroPlot.addMessageLine(imu, ['gyro_y']);
+  gyroY.setColor(GREEN);
+  const gyroZ = gyroPlot.addMessageLine(imu, ['gyro_z']);
+  gyroZ.setColor(BLUE);
+
+
+  const timingPlot =
+      aosPlotter.addPlot(element, [DEFAULT_WIDTH, DEFAULT_HEIGHT]);
+  timingPlot.plot.getAxisLabels().setTitle('Fault Counting');
+  timingPlot.plot.getAxisLabels().setXLabel(TIME);
+
+  timingPlot
+      .addMessageLine(
+          localizer, ['imu', 'imu_failures', 'imu_to_pico_checksum_mismatch'])
+      .setColor(BLUE)
+      .setDrawLine(false);
+
+  timingPlot
+      .addMessageLine(
+          localizer, ['imu', 'imu_failures', 'pico_to_pi_checksum_mismatch'])
+      .setColor(RED)
+      .setDrawLine(false);
+
+  timingPlot
+      .addMessageLine(
+          localizer, ['imu', 'imu_failures', 'other_zeroing_faults'])
+      .setColor(CYAN)
+      .setDrawLine(false);
+
+  timingPlot
+      .addMessageLine(
+          localizer, ['imu', 'imu_failures', 'missed_messages'])
+      .setColor(PINK)
+      .setDrawLine(false);
+}
diff --git a/y2024/localizer/localizer_replay.cc b/y2024/localizer/localizer_replay.cc
new file mode 100644
index 0000000..763ad36
--- /dev/null
+++ b/y2024/localizer/localizer_replay.cc
@@ -0,0 +1,68 @@
+#include "gflags/gflags.h"
+
+#include "aos/configuration.h"
+#include "aos/events/logging/log_reader.h"
+#include "aos/events/logging/log_writer.h"
+#include "aos/events/simulated_event_loop.h"
+#include "aos/init.h"
+#include "aos/json_to_flatbuffer.h"
+#include "aos/network/team_number.h"
+#include "aos/util/simulation_logger.h"
+#include "y2024/control_loops/drivetrain/drivetrain_base.h"
+#include "y2024/localizer/localizer.h"
+
+DEFINE_string(config, "y2024/aos_config.json",
+              "Name of the config file to replay using.");
+DEFINE_int32(team, 9971, "Team number to use for logfile replay.");
+DEFINE_string(output_folder, "/tmp/replayed",
+              "Name of the folder to write replayed logs to.");
+
+int main(int argc, char **argv) {
+  aos::InitGoogle(&argc, &argv);
+
+  aos::network::OverrideTeamNumber(FLAGS_team);
+
+  const aos::FlatbufferDetachedBuffer<aos::Configuration> config =
+      aos::configuration::ReadConfig(FLAGS_config);
+
+  // sort logfiles
+  const std::vector<aos::logger::LogFile> logfiles =
+      aos::logger::SortParts(aos::logger::FindLogs(argc, argv));
+
+  // open logfiles
+  aos::logger::LogReader reader(logfiles, &config.message());
+
+  reader.RemapLoggedChannel("/localizer", "y2024.localizer.Status");
+  for (const auto orin : {"orin1", "orin2"}) {
+    for (const auto camera : {"camera0", "camera1"}) {
+      reader.RemapLoggedChannel(absl::StrCat("/", orin, "/", camera),
+                                "y2024.localizer.Visualization");
+    }
+  }
+  reader.RemapLoggedChannel("/localizer", "frc971.controls.LocalizerOutput");
+
+  auto factory =
+      std::make_unique<aos::SimulatedEventLoopFactory>(reader.configuration());
+
+  reader.RegisterWithoutStarting(factory.get());
+
+  const aos::Node *node = nullptr;
+  if (aos::configuration::MultiNode(reader.configuration())) {
+    node = aos::configuration::GetNode(reader.configuration(), "imu");
+  }
+  std::vector<std::unique_ptr<aos::util::LoggerState>> loggers;
+
+  reader.OnStart(node, [&factory, node, &loggers]() {
+    aos::NodeEventLoopFactory *node_factory =
+        factory->GetNodeEventLoopFactory(node);
+    node_factory->AlwaysStart<y2024::localizer::Localizer>("localizer");
+    loggers.push_back(std::make_unique<aos::util::LoggerState>(
+        factory.get(), node, FLAGS_output_folder));
+  });
+
+  reader.event_loop_factory()->Run();
+
+  reader.Deregister();
+
+  return 0;
+}
diff --git a/y2024/localizer/localizer_test.cc b/y2024/localizer/localizer_test.cc
new file mode 100644
index 0000000..25c0bab
--- /dev/null
+++ b/y2024/localizer/localizer_test.cc
@@ -0,0 +1,554 @@
+#include "y2024/localizer/localizer.h"
+
+#include "gtest/gtest.h"
+
+#include "aos/events/logging/log_writer.h"
+#include "aos/events/simulated_event_loop.h"
+#include "frc971/control_loops/drivetrain/drivetrain_test_lib.h"
+#include "frc971/control_loops/drivetrain/localizer_generated.h"
+#include "frc971/control_loops/pose.h"
+#include "frc971/vision/target_map_generated.h"
+#include "frc971/vision/target_map_utils.h"
+#include "y2024/constants/simulated_constants_sender.h"
+#include "y2024/control_loops/drivetrain/drivetrain_base.h"
+#include "y2024/localizer/status_generated.h"
+
+DEFINE_string(output_folder, "",
+              "If set, logs all channels to the provided logfile.");
+DECLARE_double(max_distance_to_target);
+
+namespace y2024::localizer::testing {
+
+using frc971::control_loops::drivetrain::Output;
+
+class LocalizerTest : public ::testing::Test {
+ protected:
+  static constexpr uint64_t kTargetId = 1;
+  LocalizerTest()
+      : configuration_(aos::configuration::ReadConfig("y2024/aos_config.json")),
+        event_loop_factory_(&configuration_.message()),
+        roborio_node_([this]() {
+          // Get the constants sent before anything else happens.
+          // It has nothing to do with the roborio node.
+          SendSimulationConstants(&event_loop_factory_, 7971,
+                                  "y2024/constants/test_constants.json");
+          return aos::configuration::GetNode(&configuration_.message(),
+                                             "roborio");
+        }()),
+        imu_node_(
+            aos::configuration::GetNode(&configuration_.message(), "imu")),
+        camera_node_(
+            aos::configuration::GetNode(&configuration_.message(), "orin1")),
+        roborio_test_event_loop_(
+            event_loop_factory_.MakeEventLoop("test", roborio_node_)),
+        dt_config_(y2024::control_loops::drivetrain::GetDrivetrainConfig(
+            roborio_test_event_loop_.get())),
+        localizer_event_loop_(
+            event_loop_factory_.MakeEventLoop("localizer", imu_node_)),
+        localizer_(localizer_event_loop_.get()),
+        drivetrain_plant_event_loop_(event_loop_factory_.MakeEventLoop(
+            "drivetrain_plant", roborio_node_)),
+        drivetrain_plant_imu_event_loop_(
+            event_loop_factory_.MakeEventLoop("drivetrain_plant", imu_node_)),
+        drivetrain_plant_(drivetrain_plant_event_loop_.get(),
+                          drivetrain_plant_imu_event_loop_.get(), dt_config_,
+                          std::chrono::microseconds(1000)),
+        imu_test_event_loop_(
+            event_loop_factory_.MakeEventLoop("test", imu_node_)),
+        camera_test_event_loop_(
+            event_loop_factory_.MakeEventLoop("test", camera_node_)),
+        constants_fetcher_(imu_test_event_loop_.get()),
+        output_sender_(
+            roborio_test_event_loop_->MakeSender<Output>("/drivetrain")),
+        target_sender_(
+            camera_test_event_loop_->MakeSender<frc971::vision::TargetMap>(
+                "/camera0")),
+        control_sender_(roborio_test_event_loop_->MakeSender<
+                        frc971::control_loops::drivetrain::LocalizerControl>(
+            "/drivetrain")),
+        output_fetcher_(
+            roborio_test_event_loop_
+                ->MakeFetcher<frc971::controls::LocalizerOutput>("/localizer")),
+        status_fetcher_(
+            imu_test_event_loop_->MakeFetcher<Status>("/localizer")) {
+    FLAGS_max_distance_to_target = 100.0;
+    {
+      aos::TimerHandler *timer = roborio_test_event_loop_->AddTimer([this]() {
+        {
+          auto builder = output_sender_.MakeBuilder();
+          auto output_builder = builder.MakeBuilder<Output>();
+          output_builder.add_left_voltage(output_voltages_(0));
+          output_builder.add_right_voltage(output_voltages_(1));
+          builder.CheckOk(builder.Send(output_builder.Finish()));
+        }
+      });
+      roborio_test_event_loop_->OnRun([timer, this]() {
+        timer->Schedule(roborio_test_event_loop_->monotonic_now(),
+                        std::chrono::milliseconds(5));
+      });
+    }
+    {
+      // Sanity check that the test calibration files look like what we
+      // expect.
+      CHECK_EQ("orin1", constants_fetcher_.constants()
+                            .cameras()
+                            ->Get(0)
+                            ->calibration()
+                            ->node_name()
+                            ->string_view());
+      CHECK_EQ(0, constants_fetcher_.constants()
+                      .cameras()
+                      ->Get(0)
+                      ->calibration()
+                      ->camera_number());
+      const Eigen::Matrix<double, 4, 4> H_robot_camera =
+          frc971::control_loops::drivetrain::FlatbufferToTransformationMatrix(
+              *constants_fetcher_.constants()
+                   .cameras()
+                   ->Get(0)
+                   ->calibration()
+                   ->fixed_extrinsics());
+
+      CHECK(constants_fetcher_.constants().common()->has_target_map());
+      CHECK(constants_fetcher_.constants()
+                .common()
+                ->target_map()
+                ->has_target_poses());
+      CHECK_LE(1u, constants_fetcher_.constants()
+                       .common()
+                       ->target_map()
+                       ->target_poses()
+                       ->size());
+      CHECK_EQ(kTargetId, constants_fetcher_.constants()
+                              .common()
+                              ->target_map()
+                              ->target_poses()
+                              ->Get(0)
+                              ->id());
+      const Eigen::Matrix<double, 4, 4> H_field_target =
+          PoseToTransform(constants_fetcher_.constants()
+                              .common()
+                              ->target_map()
+                              ->target_poses()
+                              ->Get(0));
+      // For reference, the camera should pointed straight forwards on the
+      // robot, offset by 1 meter.
+      aos::TimerHandler *timer = camera_test_event_loop_->AddTimer(
+          [this, H_robot_camera, H_field_target]() {
+            if (!send_targets_) {
+              return;
+            }
+            const frc971::control_loops::Pose robot_pose(
+                {drivetrain_plant_.GetPosition().x(),
+                 drivetrain_plant_.GetPosition().y(), 0.0},
+                drivetrain_plant_.state()(2, 0) + implied_yaw_error_);
+
+            const Eigen::Matrix<double, 4, 4> H_field_camera =
+                robot_pose.AsTransformationMatrix() * H_robot_camera;
+            const Eigen::Matrix<double, 4, 4> H_camera_target =
+                H_field_camera.inverse() * H_field_target;
+
+            Eigen::Quaterniond quat(H_camera_target.block<3, 3>(0, 0));
+            quat.normalize();
+            const Eigen::Vector3d translation(
+                H_camera_target.block<3, 1>(0, 3));
+
+            auto builder = target_sender_.MakeBuilder();
+            frc971::vision::Quaternion::Builder quat_builder(*builder.fbb());
+            quat_builder.add_w(quat.w());
+            quat_builder.add_x(quat.x());
+            quat_builder.add_y(quat.y());
+            quat_builder.add_z(quat.z());
+            auto quat_offset = quat_builder.Finish();
+            frc971::vision::Position::Builder position_builder(*builder.fbb());
+            position_builder.add_x(translation.x());
+            position_builder.add_y(translation.y());
+            position_builder.add_z(translation.z());
+            auto position_offset = position_builder.Finish();
+
+            frc971::vision::TargetPoseFbs::Builder target_builder(
+                *builder.fbb());
+            target_builder.add_id(send_target_id_);
+            target_builder.add_position(position_offset);
+            target_builder.add_orientation(quat_offset);
+            target_builder.add_pose_error(pose_error_);
+            target_builder.add_pose_error_ratio(pose_error_ratio_);
+            auto target_offset = target_builder.Finish();
+
+            auto targets_offset = builder.fbb()->CreateVector({target_offset});
+            frc971::vision::TargetMap::Builder map_builder(*builder.fbb());
+            map_builder.add_target_poses(targets_offset);
+            map_builder.add_monotonic_timestamp_ns(
+                std::chrono::duration_cast<std::chrono::nanoseconds>(
+                    camera_test_event_loop_->monotonic_now().time_since_epoch())
+                    .count());
+
+            builder.CheckOk(builder.Send(map_builder.Finish()));
+          });
+      camera_test_event_loop_->OnRun([timer, this]() {
+        timer->Schedule(camera_test_event_loop_->monotonic_now(),
+                        std::chrono::milliseconds(50));
+      });
+    }
+
+    localizer_control_send_timer_ =
+        roborio_test_event_loop_->AddTimer([this]() {
+          auto builder = control_sender_.MakeBuilder();
+          auto control_builder = builder.MakeBuilder<
+              frc971::control_loops::drivetrain::LocalizerControl>();
+          control_builder.add_x(localizer_control_x_);
+          control_builder.add_y(localizer_control_y_);
+          control_builder.add_theta(localizer_control_theta_);
+          control_builder.add_theta_uncertainty(0.01);
+          control_builder.add_keep_current_theta(false);
+          builder.CheckOk(builder.Send(control_builder.Finish()));
+        });
+
+    // Get things zeroed.
+    event_loop_factory_.RunFor(std::chrono::seconds(10));
+    CHECK(status_fetcher_.Fetch());
+    CHECK(status_fetcher_->imu()->zeroed());
+
+    if (!FLAGS_output_folder.empty()) {
+      logger_event_loop_ =
+          event_loop_factory_.MakeEventLoop("logger", imu_node_);
+      logger_ = std::make_unique<aos::logger::Logger>(logger_event_loop_.get());
+      logger_->StartLoggingOnRun(FLAGS_output_folder);
+    }
+  }
+
+  void SendLocalizerControl(double x, double y, double theta) {
+    localizer_control_x_ = x;
+    localizer_control_y_ = y;
+    localizer_control_theta_ = theta;
+    localizer_control_send_timer_->Schedule(
+        roborio_test_event_loop_->monotonic_now());
+  }
+
+  ::testing::AssertionResult IsNear(double expected, double actual,
+                                    double epsilon) {
+    if (std::abs(expected - actual) < epsilon) {
+      return ::testing::AssertionSuccess();
+    } else {
+      return ::testing::AssertionFailure()
+             << "Expected " << expected << " but got " << actual
+             << " with a max difference of " << epsilon
+             << " and an actual difference of " << std::abs(expected - actual);
+    }
+  }
+  ::testing::AssertionResult VerifyEstimatorAccurate(double eps) {
+    const Eigen::Matrix<double, 5, 1> true_state = drivetrain_plant_.state();
+    ::testing::AssertionResult result(true);
+    status_fetcher_.Fetch();
+    if (!(result = IsNear(status_fetcher_->state()->x(), true_state(0), eps))) {
+      return result;
+    }
+    if (!(result = IsNear(status_fetcher_->state()->y(), true_state(1), eps))) {
+      return result;
+    }
+    if (!(result =
+              IsNear(status_fetcher_->state()->theta(), true_state(2), eps))) {
+      return result;
+    }
+    return result;
+  }
+
+  aos::FlatbufferDetachedBuffer<aos::Configuration> configuration_;
+  aos::SimulatedEventLoopFactory event_loop_factory_;
+  const aos::Node *const roborio_node_;
+  const aos::Node *const imu_node_;
+  const aos::Node *const camera_node_;
+  std::unique_ptr<aos::EventLoop> roborio_test_event_loop_;
+  const frc971::control_loops::drivetrain::DrivetrainConfig<double> dt_config_;
+  std::unique_ptr<aos::EventLoop> localizer_event_loop_;
+  Localizer localizer_;
+
+  std::unique_ptr<aos::EventLoop> drivetrain_plant_event_loop_;
+  std::unique_ptr<aos::EventLoop> drivetrain_plant_imu_event_loop_;
+  frc971::control_loops::drivetrain::testing::DrivetrainSimulation
+      drivetrain_plant_;
+
+  std::unique_ptr<aos::EventLoop> imu_test_event_loop_;
+  std::unique_ptr<aos::EventLoop> camera_test_event_loop_;
+
+  frc971::constants::ConstantsFetcher<Constants> constants_fetcher_;
+
+  aos::Sender<Output> output_sender_;
+  aos::Sender<frc971::vision::TargetMap> target_sender_;
+  aos::Sender<frc971::control_loops::drivetrain::LocalizerControl>
+      control_sender_;
+  aos::Fetcher<frc971::controls::LocalizerOutput> output_fetcher_;
+  aos::Fetcher<Status> status_fetcher_;
+
+  Eigen::Vector2d output_voltages_ = Eigen::Vector2d::Zero();
+
+  aos::TimerHandler *localizer_control_send_timer_;
+
+  bool send_targets_ = false;
+
+  double localizer_control_x_ = 0.0;
+  double localizer_control_y_ = 0.0;
+  double localizer_control_theta_ = 0.0;
+
+  std::unique_ptr<aos::EventLoop> logger_event_loop_;
+  std::unique_ptr<aos::logger::Logger> logger_;
+
+  uint64_t send_target_id_ = kTargetId;
+  double pose_error_ = 1e-7;
+  double pose_error_ratio_ = 0.1;
+  double implied_yaw_error_ = 0.0;
+
+  gflags::FlagSaver flag_saver_;
+};
+
+// Test a simple scenario with no errors where the robot should just drive
+// straight forwards.
+TEST_F(LocalizerTest, Nominal) {
+  output_voltages_ << 1.0, 1.0;
+  event_loop_factory_.RunFor(std::chrono::seconds(2));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 1e-2);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              1e-6);
+  // Confirm that we did indeed drive forwards (and straight), as expected.
+  EXPECT_LT(0.1, output_fetcher_->x());
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->theta(), 1e-10);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->left_voltage_error(), 1e-1);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->right_voltage_error(), 1e-1);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Confirm that when the robot drives backwards that we localize correctly.
+TEST_F(LocalizerTest, NominalReverse) {
+  output_voltages_ << -1.0, -1.0;
+  event_loop_factory_.RunFor(std::chrono::seconds(2));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 1e-2);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              1e-6);
+  // Confirm that we did indeed drive backwards (and straight), as expected.
+  EXPECT_GT(-0.1, output_fetcher_->x());
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->theta(), 1e-10);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->left_voltage_error(), 1e-1);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->right_voltage_error(), 1e-1);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Confirm that when the robot turns counter-clockwise that we localize
+// correctly.
+TEST_F(LocalizerTest, NominalSpinInPlace) {
+  output_voltages_ << -1.0, 1.0;
+  // Go 1 ms over 2 sec to make sure we actually see relatively recent messages
+  // on each channel.
+  event_loop_factory_.RunFor(std::chrono::milliseconds(2001));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 1e-6);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              1e-2);
+  // Confirm that we did indeed turn counter-clockwise.
+  EXPECT_NEAR(0.0, output_fetcher_->x(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_LT(0.1, output_fetcher_->theta());
+  EXPECT_NEAR(0.0, status_fetcher_->state()->left_voltage_error(), 1e-1);
+  EXPECT_NEAR(0.0, status_fetcher_->state()->right_voltage_error(), 1e-1);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Confirm that when the robot drives in a curve that we localize
+// successfully.
+TEST_F(LocalizerTest, NominalCurve) {
+  output_voltages_ << 2.0, 3.0;
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // The two can be different because they may've been sent at different
+  // times.
+  EXPECT_NEAR(output_fetcher_->x(), status_fetcher_->state()->x(), 2e-2);
+  EXPECT_NEAR(output_fetcher_->y(), status_fetcher_->state()->y(), 2e-2);
+  EXPECT_NEAR(output_fetcher_->theta(), status_fetcher_->state()->theta(),
+              2e-2);
+  // Confirm that we did indeed drive in a rough, counter-clockwise, curve.
+  EXPECT_LT(0.1, output_fetcher_->x());
+  EXPECT_LT(0.1, output_fetcher_->y());
+  EXPECT_LT(0.1, output_fetcher_->theta());
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+}
+
+// Tests that, in the presence of a non-zero voltage error, that we correct
+// for it.
+TEST_F(LocalizerTest, VoltageErrorDisabled) {
+  output_voltages_ << 0.0, 0.0;
+  drivetrain_plant_.set_left_voltage_offset(2.0);
+  drivetrain_plant_.set_right_voltage_offset(2.0);
+
+  event_loop_factory_.RunFor(std::chrono::seconds(2));
+  CHECK(output_fetcher_.Fetch());
+  CHECK(status_fetcher_.Fetch());
+  // We should've just ended up driving straight forwards.
+  EXPECT_LT(0.1, output_fetcher_->x());
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-10);
+  EXPECT_NEAR(0.0, output_fetcher_->theta(), 1e-10);
+  EXPECT_NEAR(2.0, status_fetcher_->state()->left_voltage_error(), 1.0);
+  EXPECT_NEAR(2.0, status_fetcher_->state()->right_voltage_error(), 1.0);
+
+  // And check that we actually think that we are near where the simulator
+  // says we are.
+  EXPECT_TRUE(VerifyEstimatorAccurate(0.05));
+}
+
+// Tests that image corrections in the nominal case (no errors) causes no
+// issues.
+TEST_F(LocalizerTest, NominalImageCorrections) {
+  output_voltages_ << 3.0, 2.0;
+  send_targets_ = true;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  EXPECT_TRUE(VerifyEstimatorAccurate(1e-2));
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(status_fetcher_->statistics()->Get(0)->total_candidates(),
+            status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+// Tests that image corrections when there is an error at the start results
+// in us actually getting corrected over time.
+TEST_F(LocalizerTest, ImageCorrections) {
+  output_voltages_ << 0.0, 0.0;
+  // Put ourselves somewhat near the target so that we don't ignore its
+  // corrections too much.
+  drivetrain_plant_.mutable_state()->x() = 3.0;
+  drivetrain_plant_.mutable_state()->y() = -2.0;
+  SendLocalizerControl(5.0, 0.0, 0.0);
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  ASSERT_TRUE(output_fetcher_.Fetch());
+  EXPECT_NEAR(5.0, output_fetcher_->x(), 1e-5);
+  EXPECT_NEAR(0.0, output_fetcher_->y(), 1e-5);
+  EXPECT_NEAR(0.0, output_fetcher_->theta(), 1e-5);
+
+  send_targets_ = true;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(10));
+  CHECK(status_fetcher_.Fetch());
+  EXPECT_TRUE(VerifyEstimatorAccurate(0.1));
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(status_fetcher_->statistics()->Get(0)->total_candidates(),
+            status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+// Tests that we correctly reject an invalid target.
+TEST_F(LocalizerTest, InvalidTargetId) {
+  output_voltages_ << 0.0, 0.0;
+  send_targets_ = true;
+  send_target_id_ = 100;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(0, status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+  ASSERT_EQ(status_fetcher_->statistics()
+                ->Get(0)
+                ->rejection_reasons()
+                ->Get(static_cast<size_t>(RejectionReason::NO_SUCH_TARGET))
+                ->count(),
+            status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+// Tests that we correctly reject a detection with a high pose error.
+TEST_F(LocalizerTest, HighPoseError) {
+  output_voltages_ << 0.0, 0.0;
+  send_targets_ = true;
+  // Send the minimum pose error to be rejected
+  constexpr double kEps = 1e-9;
+  pose_error_ = 1e-6 + kEps;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(0, status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+  ASSERT_EQ(status_fetcher_->statistics()
+                ->Get(0)
+                ->rejection_reasons()
+                ->Get(static_cast<size_t>(RejectionReason::HIGH_POSE_ERROR))
+                ->count(),
+            status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+// Tests that we correctly reject a detection with a high implied yaw error.
+TEST_F(LocalizerTest, HighImpliedYawError) {
+  output_voltages_ << 0.0, 0.0;
+  send_targets_ = true;
+  implied_yaw_error_ = 31.0 * M_PI / 180.0;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(0, status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+  ASSERT_EQ(
+      status_fetcher_->statistics()
+          ->Get(0)
+          ->rejection_reasons()
+          ->Get(static_cast<size_t>(RejectionReason::HIGH_IMPLIED_YAW_ERROR))
+          ->count(),
+      status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+// Tests that we correctly reject a detection with a high pose error ratio.
+TEST_F(LocalizerTest, HighPoseErrorRatio) {
+  output_voltages_ << 0.0, 0.0;
+  send_targets_ = true;
+  // Send the minimum pose error to be rejected
+  constexpr double kEps = 1e-9;
+  pose_error_ratio_ = 0.4 + kEps;
+
+  event_loop_factory_.RunFor(std::chrono::seconds(4));
+  CHECK(status_fetcher_.Fetch());
+  ASSERT_TRUE(status_fetcher_->has_statistics());
+  ASSERT_EQ(4u /* number of cameras */, status_fetcher_->statistics()->size());
+  ASSERT_EQ(0, status_fetcher_->statistics()->Get(0)->total_accepted());
+  ASSERT_LT(10, status_fetcher_->statistics()->Get(0)->total_candidates());
+  ASSERT_EQ(
+      status_fetcher_->statistics()
+          ->Get(0)
+          ->rejection_reasons()
+          ->Get(static_cast<size_t>(RejectionReason::HIGH_POSE_ERROR_RATIO))
+          ->count(),
+      status_fetcher_->statistics()->Get(0)->total_candidates());
+}
+
+}  // namespace y2024::localizer::testing
diff --git a/y2024/localizer/status.fbs b/y2024/localizer/status.fbs
new file mode 100644
index 0000000..ad2ac72
--- /dev/null
+++ b/y2024/localizer/status.fbs
@@ -0,0 +1,73 @@
+include "frc971/control_loops/drivetrain/drivetrain_status.fbs";
+include "frc971/imu_reader/imu_failures.fbs";
+
+namespace y2024.localizer;
+
+attribute "static_length";
+
+enum RejectionReason : uint8 {
+  // For some reason, the image timestamp indicates that the image was taken
+  // in the future.
+  IMAGE_FROM_FUTURE = 0,
+  // The image was too old for the buffer of old state estimates that we
+  // maintain.
+  IMAGE_TOO_OLD = 1,
+  // Message bridge is not yet connected, and so we can't get accurate
+  // time offsets betwee nnodes.
+  MESSAGE_BRIDGE_DISCONNECTED = 2,
+  // The target ID does not exist.
+  NO_SUCH_TARGET = 3,
+  // Pose estimation error was higher than any normal detection.
+  HIGH_POSE_ERROR = 4,
+  // Pose estimate implied a robot yaw far off from our estimate.
+  HIGH_IMPLIED_YAW_ERROR = 5,
+  // Pose estimate had a high distance to target.
+  // We don't trust estimates very far out.
+  HIGH_DISTANCE_TO_TARGET = 6,
+  // The robot was travelling too fast; we don't trust the target.
+  ROBOT_TOO_FAST = 7,
+  // Pose estimation error ratio was higher than any normal detection.
+  HIGH_POSE_ERROR_RATIO = 8,
+}
+
+table RejectionCount {
+  error:RejectionReason (id: 0);
+  count:uint (id: 1);
+}
+
+table CumulativeStatistics {
+  total_accepted:int (id: 0);
+  total_candidates:int (id: 1);
+  rejection_reasons:[RejectionCount] (id: 2, static_length: 9);
+}
+
+table ImuStatus {
+  // Whether the IMU is zeroed or not.
+  zeroed:bool (id: 0);
+  // Whether the IMU zeroing is faulted or not.
+  faulted_zero:bool (id: 1);
+  zeroing:frc971.control_loops.drivetrain.ImuZeroerState (id: 2);
+  // Offset between the IMU board clock and the orin clock, such that
+  // board_timestamp + board_offset_ns = orin_timestamp
+  board_offset_ns:int64 (id: 3);
+  // Error in the offset, if we assume that the orin/board clocks are
+  // identical and that there is a perfectly consistent latency between the
+  // two. Will be zero for the very first cycle, and then referenced off of
+  // the initial offset thereafter. If greater than zero, implies that the
+  // board is "behind", whether due to unusually large latency or due to
+  // clock drift.
+  board_offset_error_ns:int64 (id: 4);
+  left_encoder:double (id: 5);
+  right_encoder:double (id: 6);
+  imu_failures:frc971.controls.ImuFailures (id: 7);
+}
+
+table Status {
+  state: frc971.control_loops.drivetrain.LocalizerState (id: 0);
+  down_estimator:frc971.control_loops.drivetrain.DownEstimatorState (id: 1);
+  imu:ImuStatus (id: 2);
+  // Statistics are per-camera, by camera index.
+  statistics:[CumulativeStatistics] (id: 3);
+}
+
+root_type Status;
diff --git a/y2024/localizer/visualization.fbs b/y2024/localizer/visualization.fbs
new file mode 100644
index 0000000..d903e3a
--- /dev/null
+++ b/y2024/localizer/visualization.fbs
@@ -0,0 +1,47 @@
+include "y2024/localizer/status.fbs";
+
+namespace y2024.localizer;
+
+attribute "static_length";
+
+table Measurement {
+  heading:double (id: 0);
+  distance:double (id: 1);
+  skew:double (id: 2);
+}
+
+table TargetEstimateDebug {
+  camera:uint8 (id: 0);
+  camera_x:double (id: 1);
+  camera_y:double (id: 2);
+  camera_theta:double (id: 3);
+  implied_robot_x:double (id: 4);
+  implied_robot_y:double (id: 5);
+  implied_robot_theta:double (id: 6);
+  accepted:bool (id: 7);
+  rejection_reason:RejectionReason  (id: 8);
+  // Image age (more human-readable than trying to interpret raw nanosecond
+  // values).
+  image_age_sec:double (id: 9);
+  // Time at which the image was captured.
+  image_monotonic_timestamp_ns:uint64 (id: 10);
+  // April tag ID used for this image detection.
+  april_tag:uint (id: 11);
+  // If the image was accepted, the total correction that occurred as a result.
+  // These numbers will be equal to the value after the correction - the value
+  // before.
+  correction_x: double (id: 12);
+  correction_y: double (id: 13);
+  correction_theta: double (id: 14);
+  // The expected observation given the current estimate of the robot pose.
+  expected_observation:Measurement (id: 15);
+  actual_observation:Measurement (id: 16);
+  modeled_noise:Measurement (id: 17);
+}
+
+table Visualization {
+  targets:[TargetEstimateDebug] (id: 0, static_length: 20);
+  statistics:CumulativeStatistics (id: 1);
+}
+
+root_type Visualization;
diff --git a/y2024/y2024_imu.json b/y2024/y2024_imu.json
index 72dc1f7..ff8f625 100644
--- a/y2024/y2024_imu.json
+++ b/y2024/y2024_imu.json
@@ -246,6 +246,14 @@
       "max_size": 200
     },
     {
+      "name": "/localizer",
+      "type": "y2024.localizer.Status",
+      "source_node": "imu",
+      "frequency": 1600,
+      "max_size": 1600,
+      "num_senders": 2
+    },
+    {
       "name": "/imu/constants",
       "type": "y2024.Constants",
       "source_node": "imu",
@@ -285,6 +293,14 @@
       ]
     },
     {
+      "name": "localizer",
+      "executable_name": "localizer_main",
+      "user": "pi",
+      "nodes": [
+        "imu"
+      ]
+    },
+    {
       "name": "localizer_logger",
       "executable_name": "logger_main",
       "args": [
diff --git a/y2024/y2024_orin_template.json b/y2024/y2024_orin_template.json
index acbe568..ac7a49f 100644
--- a/y2024/y2024_orin_template.json
+++ b/y2024/y2024_orin_template.json
@@ -163,6 +163,20 @@
     },
     {
       "name": "/orin{{ NUM }}/camera0",
+      "type": "y2024.localizer.Visualization",
+      "source_node": "imu",
+      "frequency": 65,
+      "max_size": 50000
+    },
+    {
+      "name": "/orin{{ NUM }}/camera1",
+      "type": "y2024.localizer.Visualization",
+      "source_node": "imu",
+      "frequency": 65,
+      "max_size": 50000
+    },
+    {
+      "name": "/orin{{ NUM }}/camera0",
       "type": "frc971.vision.TargetMap",
       "source_node": "orin{{ NUM }}",
       "frequency": 65,
diff --git a/y2024/y2024_roborio.json b/y2024/y2024_roborio.json
index b5b831f..30af4a2 100644
--- a/y2024/y2024_roborio.json
+++ b/y2024/y2024_roborio.json
@@ -221,7 +221,18 @@
       "source_node": "roborio",
       "frequency": 400,
       "max_size": 128,
-      "num_senders": 2
+      "num_senders": 2,
+      "logger": "LOCAL_AND_REMOTE_LOGGER",
+      "logger_nodes": [
+        "imu"
+      ],
+      "destination_nodes": [
+        {
+          "name": "imu",
+          "priority": 5,
+          "time_to_live": 5000000
+        }
+      ]
     },
     {
       "name": "/drivetrain",